From 294529e47024fd816266c3441bcd5e175aa761e0 Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Sat, 15 Mar 2025 15:52:02 +0800
Subject: [PATCH 01/15] [Don't merge] Test macos hang.

---
 ops/conda_env/macos_cpu_test.yml | 8 +-------
 1 file changed, 1 insertion(+), 7 deletions(-)

diff --git a/ops/conda_env/macos_cpu_test.yml b/ops/conda_env/macos_cpu_test.yml
index 29ff99e3504f..29fc1cbf111a 100644
--- a/ops/conda_env/macos_cpu_test.yml
+++ b/ops/conda_env/macos_cpu_test.yml
@@ -6,8 +6,6 @@ dependencies:
 - pip
 - wheel
 - pyyaml
-- cpplint
-- pylint
 - numpy
 - scipy
 - llvm-openmp
@@ -20,13 +18,10 @@ dependencies:
 - python-graphviz
 - hypothesis
 - astroid
-- sphinx
 - sh
-- recommonmark
-- mock
-- breathe
 - pytest
 - pytest-cov
+- pytest-timeout
 - python-kubernetes
 - urllib3
 - jsonschema
@@ -38,4 +33,3 @@ dependencies:
 - cloudpickle
 - pip:
   - setuptools
-  - sphinx_rtd_theme

From d428977292b7ba190382fab25741ec65ca5eecbe Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Sat, 15 Mar 2025 16:20:49 +0800
Subject: [PATCH 02/15] test rabit.

---
 ops/pipeline/test-python-macos.sh | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/ops/pipeline/test-python-macos.sh b/ops/pipeline/test-python-macos.sh
index 63b5690d1312..0d7ac3226224 100755
--- a/ops/pipeline/test-python-macos.sh
+++ b/ops/pipeline/test-python-macos.sh
@@ -19,5 +19,5 @@ python --version
 pip install -v .
 
 cd ..
-pytest -s -v -rxXs --durations=0 ./tests/python
-pytest -s -v -rxXs --durations=0 ./tests/test_distributed/test_with_dask
+pytest -s -v -rxXs --durations=0 ./tests/python/test_collective.py::test_rabit_communicator
+# pytest -s -v -rxXs --durations=0 ./tests/test_distributed/test_with_dask

From 92a3e7a19e6753d40c43b33543471cb1df0bc25e Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Sat, 15 Mar 2025 16:21:19 +0800
Subject: [PATCH 03/15] Disable other tests for now.

---
 .github/workflows/doc.yml                 |  96 ---------
 .github/workflows/freebsd.yml             |  29 ---
 .github/workflows/i386.yml                |  27 ---
 .github/workflows/jvm_tests.yml           | 245 ----------------------
 .github/workflows/lint.yml                | 101 ---------
 .github/workflows/main.yml                | 223 --------------------
 .github/workflows/misc.yml                |  45 ----
 .github/workflows/python_wheels_macos.yml |  58 -----
 .github/workflows/r_nold.yml              |  41 ----
 .github/workflows/r_tests.yml             | 103 ---------
 .github/workflows/scorecards.yml          |  54 -----
 .github/workflows/sycl_tests.yml          |  48 -----
 .github/workflows/windows.yml             |  78 -------
 13 files changed, 1148 deletions(-)
 delete mode 100644 .github/workflows/doc.yml
 delete mode 100644 .github/workflows/freebsd.yml
 delete mode 100644 .github/workflows/i386.yml
 delete mode 100644 .github/workflows/jvm_tests.yml
 delete mode 100644 .github/workflows/lint.yml
 delete mode 100644 .github/workflows/misc.yml
 delete mode 100644 .github/workflows/python_wheels_macos.yml
 delete mode 100644 .github/workflows/r_nold.yml
 delete mode 100644 .github/workflows/r_tests.yml
 delete mode 100644 .github/workflows/scorecards.yml
 delete mode 100644 .github/workflows/sycl_tests.yml
 delete mode 100644 .github/workflows/windows.yml

diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml
deleted file mode 100644
index 584af0987e92..000000000000
--- a/.github/workflows/doc.yml
+++ /dev/null
@@ -1,96 +0,0 @@
-name: XGBoost-docs
-
-on: [push, pull_request]
-
-env:
-  BRANCH_NAME: >-
-    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
-
-jobs:
-  build-jvm-docs:
-    name: Build docs for JVM packages
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=doc-build-jvm-docs
-
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-
-      - name: Get the git hash for the push event.
-        if: ${{ github.event_name == 'push' }}
-        shell: bash
-        run: |
-          echo "HEAD_SHA=${GITHUB_SHA}" >> ${GITHUB_ENV}
-      - name: Get the git hash for the PR event.
-        if: ${{ github.event_name == 'pull_request' }}
-        shell: bash
-        run: |
-          echo "HEAD_SHA=${{ github.event.pull_request.head.sha }}" >> ${GITHUB_ENV}
-
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: bash ops/pipeline/build-jvm-gpu.sh
-      - run: bash ops/pipeline/build-jvm-doc.sh
-      - name: Upload JVM doc
-        run: |
-          # xgboost-docs/{branch}/{commit}/{branch}.tar.bz2
-          # branch can be the name of the dmlc/xgboost branch, or `PR-{number}`.
-          python3 ops/pipeline/manage-artifacts.py upload \
-            --s3-bucket xgboost-docs \
-            --prefix ${BRANCH_NAME}/${{ env.HEAD_SHA }} --make-public \
-            jvm-packages/${{ env.BRANCH_NAME }}.tar.bz2
-
-  build-r-docs:
-    name: Build docs for the R package
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=r-tests-build-docs
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-
-      - name: Get the git hash for the push event.
-        if: ${{ github.event_name == 'push' }}
-        shell: bash
-        run: |
-          echo "HEAD_SHA=${GITHUB_SHA}" >> ${GITHUB_ENV}
-      - name: Get the git hash for the PR event.
-        if: ${{ github.event_name == 'pull_request' }}
-        shell: bash
-        run: |
-          echo "HEAD_SHA=${{ github.event.pull_request.head.sha }}" >> ${GITHUB_ENV}
-
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: bash ops/pipeline/build-r-docs.sh
-      - name: Upload R doc
-        run: |
-          python3 ops/pipeline/manage-artifacts.py upload \
-            --s3-bucket xgboost-docs \
-            --prefix ${BRANCH_NAME}/${{ env.HEAD_SHA }} --make-public \
-            r-docs-${{ env.BRANCH_NAME }}.tar.bz2
-
-  trigger-rtd-build:
-    needs: [build-jvm-docs]
-    name: Trigger Read The Docs build.
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=doc-trigger-rtd-build
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Trigger RTD
-        run: bash ops/pipeline/trigger-rtd.sh
diff --git a/.github/workflows/freebsd.yml b/.github/workflows/freebsd.yml
deleted file mode 100644
index 26e8fa34c119..000000000000
--- a/.github/workflows/freebsd.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-name: FreeBSD
-
-on: [push, pull_request]
-
-permissions:
-  contents: read # to fetch code (actions/checkout)
-
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
-  cancel-in-progress: true
-
-jobs:
-  test:
-    runs-on: ubuntu-latest
-    timeout-minutes: 20
-    name: A job to run test in FreeBSD
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - name: Test in FreeBSD
-        id: test
-        uses: vmactions/freebsd-vm@v1
-        with:
-          usesh: true
-          prepare: |
-            pkg install -y cmake git ninja googletest bash
-          run: |
-            bash ops/pipeline/test-freebsd.sh
diff --git a/.github/workflows/i386.yml b/.github/workflows/i386.yml
deleted file mode 100644
index 26ceaf758f3a..000000000000
--- a/.github/workflows/i386.yml
+++ /dev/null
@@ -1,27 +0,0 @@
-name: XGBoost-i386-test
-
-on: [push, pull_request]
-
-permissions:
-  contents: read  # to fetch code (actions/checkout)
-
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
-  cancel-in-progress: true
-
-jobs:
-  build-32bit:
-    name: Build 32-bit
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=i386-build-32bit
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: bash ops/pipeline/test-cpp-i386.sh
diff --git a/.github/workflows/jvm_tests.yml b/.github/workflows/jvm_tests.yml
deleted file mode 100644
index a1b170c9d105..000000000000
--- a/.github/workflows/jvm_tests.yml
+++ /dev/null
@@ -1,245 +0,0 @@
-name: XGBoost CI (JVM packages)
-
-on: [push, pull_request]
-
-permissions:
-  contents: read  # to fetch code (actions/checkout)
-
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
-  cancel-in-progress: true
-
-env:
-  BRANCH_NAME: >-
-    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
-
-jobs:
-  build-jvm-manylinux2014:
-    name: >-
-      Build libxgboost4j.so targeting glibc 2.17
-      (arch ${{ matrix.arch }}, runner ${{ matrix.runner }})
-    runs-on:
-      - runs-on
-      - runner=${{ matrix.runner }}
-      - run-id=${{ github.run_id }}
-      - tag=jvm-tests-build-jvm-manylinux2014-${{ matrix.arch }}
-    strategy:
-      fail-fast: false
-      matrix:
-        include:
-        - arch: aarch64
-          runner: linux-arm64-cpu
-        - arch: x86_64
-          runner: linux-amd64-cpu
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: bash ops/pipeline/build-jvm-manylinux2014.sh ${{ matrix.arch }}
-
-  build-jvm-gpu:
-    name: Build libxgboost4j.so with CUDA
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=jvm-tests-build-jvm-gpu
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: bash ops/pipeline/build-jvm-gpu.sh
-      - name: Stash files
-        run: |
-          python3 ops/pipeline/manage-artifacts.py upload \
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
-            --prefix cache/${{ github.run_id }}/build-jvm-gpu \
-            lib/libxgboost4j.so
-
-  build-jvm-mac:
-    name: "Build libxgboost4j.dylib for ${{ matrix.description }}"
-    runs-on: ${{ matrix.runner }}
-    strategy:
-      fail-fast: false
-      matrix:
-        include:
-          - description: "MacOS (Apple Silicon)"
-            script: ops/pipeline/build-jvm-macos-apple-silicon.sh
-            libname: libxgboost4j_m1.dylib
-            runner: macos-14
-          - description: "MacOS (Intel)"
-            script: ops/pipeline/build-jvm-macos-intel.sh
-            libname: libxgboost4j_intel.dylib
-            runner: macos-13
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - run: bash ${{ matrix.script }}
-      - name: Upload libxgboost4j.dylib
-        if: github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_')
-        run: |
-          mv -v lib/libxgboost4j.dylib ${{ matrix.libname }}
-          python3 ops/pipeline/manage-artifacts.py upload \
-            --s3-bucket xgboost-nightly-builds \
-            --prefix ${{ env.BRANCH_NAME }}/${{ github.sha }} --make-public \
-            ${{ matrix.libname }}
-        env:
-          AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }}
-          AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }}
-
-  build-test-jvm-packages:
-    name: Build and test JVM packages (Linux, Scala ${{ matrix.scala_version }})
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=jvm-tests-build-test-jvm-packages-scala${{ matrix.scala_version }}
-    strategy:
-      fail-fast: false
-      matrix:
-        scala_version: ["2.12", "2.13"]
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - name: Build and test JVM packages (Scala ${{ matrix.scala_version }})
-        run: bash ops/pipeline/build-test-jvm-packages.sh
-        env:
-          SCALA_VERSION: ${{ matrix.scala_version }}
-      - name: Stash files
-        run: |
-          python3 ops/pipeline/manage-artifacts.py upload \
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
-            --prefix cache/${{ github.run_id }}/build-test-jvm-packages \
-            lib/libxgboost4j.so
-        if: matrix.scala_version == '2.13'
-
-  build-test-jvm-packages-other-os:
-    name: Build and test JVM packages (${{ matrix.os }})
-    timeout-minutes: 30
-    runs-on: ${{ matrix.os }}
-    strategy:
-      fail-fast: false
-      matrix:
-        os: [windows-latest, macos-13]
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - uses: actions/setup-java@v4
-        with:
-          distribution: 'temurin'
-          java-version: '8'
-      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
-        with:
-          environment-name: minimal
-          environment-file: ops/conda_env/minimal.yml
-      - name: Cache Maven packages
-        uses: actions/cache@v4
-        with:
-          path: ~/.m2
-          key: ${{ runner.os }}-m2-${{ hashFiles('./jvm-packages/pom.xml') }}
-          restore-keys: ${{ runner.os }}-m2-${{ hashFiles('./jvm-packages/pom.xml') }}
-      - name: Test XGBoost4J (Core) on macos
-        if: matrix.os == 'macos-13'
-        run: |
-          cd jvm-packages
-          mvn test -B -pl :xgboost4j_2.12 -Duse.openmp=OFF
-      - name: Test XGBoost4J (Core) on windows
-        if: matrix.os == 'windows-latest'
-        run: |
-          cd jvm-packages
-          mvn test -B -pl :xgboost4j_2.12
-      - name: Publish artifact xgboost4j.dll to S3
-        run: |
-          python ops/pipeline/manage-artifacts.py upload `
-            --s3-bucket xgboost-nightly-builds `
-            --prefix ${{ env.BRANCH_NAME }}/${{ github.sha }} --make-public `
-            lib/xgboost4j.dll
-        if: |
-          (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_')) &&
-          matrix.os == 'windows-latest'
-        env:
-          AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }}
-          AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }}
-
-  test-jvm-packages-gpu:
-    name: Test JVM packages with CUDA (Scala ${{ matrix.scala_version }})
-    needs: [build-jvm-gpu]
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-mgpu
-      - tag=jvm-tests-test-jvm-packages-gpu-scala${{ matrix.scala_version }}
-    strategy:
-      fail-fast: false
-      matrix:
-        scala_version: ["2.12", "2.13"]
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - name: Unstash files
-        run: |
-          python3 ops/pipeline/manage-artifacts.py download \
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
-            --prefix cache/${{ github.run_id }}/build-jvm-gpu \
-            --dest-dir lib \
-            libxgboost4j.so
-      - run: bash ops/pipeline/test-jvm-gpu.sh
-        env:
-          SCALA_VERSION: ${{ matrix.scala_version }}
-
-  deploy-jvm-packages:
-    name: Deploy JVM packages to S3 (${{ matrix.variant.name }})
-    needs: [build-jvm-gpu, build-test-jvm-packages, test-jvm-packages-gpu]
-    runs-on:
-      - runs-on
-      - runner=linux-amd64-cpu
-      - run-id=${{ github.run_id }}
-      - tag=jvm-tests-deploy-jvm-packages-${{ matrix.variant.name }}-scala${{ matrix.scala_version }}
-    strategy:
-      fail-fast: false
-      matrix:
-        variant:
-          - name: cpu
-            image_repo: xgb-ci.jvm
-            artifact_from: build-test-jvm-packages
-          - name: gpu
-            image_repo: xgb-ci.jvm_gpu_build
-            artifact_from: build-jvm-gpu
-        scala_version: ['2.12', '2.13']
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - name: Unstash files
-        run: |
-          python3 ops/pipeline/manage-artifacts.py download \
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
-            --prefix cache/${{ github.run_id }}/${{ matrix.variant.artifact_from }} \
-            --dest-dir lib \
-            libxgboost4j.so
-          ls -lh lib/libxgboost4j.so
-      - name: Deploy JVM packages to S3
-        run: |
-          bash ops/pipeline/deploy-jvm-packages.sh ${{ matrix.variant.name }} \
-            ${{ matrix.variant.image_repo }} ${{ matrix.scala_version }}
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
deleted file mode 100644
index 73636e7ce66d..000000000000
--- a/.github/workflows/lint.yml
+++ /dev/null
@@ -1,101 +0,0 @@
-name: XGBoost CI (Lint)
-
-on: [push, pull_request]
-
-permissions:
-  contents: read  # to fetch code (actions/checkout)
-
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
-  cancel-in-progress: true
-
-env:
-  BRANCH_NAME: >-
-    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
-
-jobs:
-  clang-tidy:
-    name: Run clang-tidy
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=lint-clang-tidy
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: bash ops/pipeline/run-clang-tidy.sh
-
-  python-mypy-lint:
-    runs-on: ubuntu-latest
-    name: Type and format checks for the Python package
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
-        with:
-          environment-name: python_lint
-          environment-file: ops/conda_env/python_lint.yml
-      - name: Run mypy
-        shell: bash -el {0}
-        run: |
-          python ops/script/lint_python.py --format=0 --type-check=1 --pylint=0
-      - name: Run formatter
-        shell: bash -el {0}
-        run: |
-          python ops/script/lint_python.py --format=1 --type-check=0 --pylint=0
-      - name: Run pylint
-        shell: bash -el {0}
-        run: |
-          python ops/script/lint_python.py --format=0 --type-check=0 --pylint=1
-
-  cpp-lint:
-    runs-on: ubuntu-latest
-    name: Code linting for C++
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - uses: actions/setup-python@v5
-        with:
-          python-version: "3.10"
-          architecture: 'x64'
-      - name: Install Python packages
-        run: |
-          python -m pip install wheel setuptools cmakelint cpplint==1.6.1 pylint
-      - name: Run lint
-        run: |
-          python3 ops/script/lint_cpp.py
-          bash ops/script/lint_cmake.sh
-
-  lintr:
-    runs-on: ubuntu-latest
-    name: Run R linters on Ubuntu
-    env:
-      R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - uses: r-lib/actions/setup-r@v2
-        with:
-          r-version: "release"
-      - name: Cache R packages
-        uses: actions/cache@v4
-        with:
-          path: ${{ env.R_LIBS_USER }}
-          key: ${{ runner.os }}-r-release-7-${{ hashFiles('R-package/DESCRIPTION') }}
-          restore-keys: ${{ runner.os }}-r-release-7-${{ hashFiles('R-package/DESCRIPTION') }}
-      - name: Install dependencies
-        shell: Rscript {0}
-        run: |
-          source("./R-package/tests/helper_scripts/install_deps.R")
-      - name: Run lintr
-        run: |
-          MAKEFLAGS="-j$(nproc)" R CMD INSTALL R-package/
-          Rscript ops/script/lint_r.R $(pwd)
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 332606cece7b..74866f9c223c 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -35,226 +35,3 @@ jobs:
             --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
             --prefix cache/${{ github.run_id }}/build-cpu \
             ./xgboost
-
-  build-cpu-arm64:
-    name: Build CPU ARM64 + manylinux_2_28_aarch64 wheel
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-arm64-cpu
-      - tag=build-cpu-arm64
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: bash ops/pipeline/build-cpu-arm64.sh
-      - name: Stash files
-        run: |
-          python3 ops/pipeline/manage-artifacts.py upload \
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
-            --prefix cache/${{ github.run_id }}/build-cpu-arm64 \
-            ./xgboost python-package/dist/*.whl
-
-  build-cuda:
-    name: Build CUDA + manylinux_2_28_x86_64 wheel
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=main-build-cuda
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: |
-          bash ops/pipeline/build-cuda.sh xgb-ci.gpu_build_rockylinux8 disable-rmm
-      - name: Stash files
-        run: |
-          python3 ops/pipeline/manage-artifacts.py upload \
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
-            --prefix cache/${{ github.run_id }}/build-cuda \
-            build/testxgboost ./xgboost python-package/dist/*.whl
-
-  build-cuda-with-rmm:
-    name: Build CUDA with RMM
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=main-build-cuda-with-rmm
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: |
-          bash ops/pipeline/build-cuda.sh xgb-ci.gpu_build_rockylinux8 enable-rmm
-      - name: Stash files
-        run: |
-          python3 ops/pipeline/manage-artifacts.py upload \
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
-            --prefix cache/${{ github.run_id }}/build-cuda-with-rmm \
-            build/testxgboost
-
-  build-cuda-with-rmm-dev:
-    name: Build CUDA with RMM (dev)
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=main-build-cuda-with-rmm-dev
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: |
-          bash ops/pipeline/build-cuda.sh \
-            xgb-ci.gpu_build_rockylinux8_dev_ver enable-rmm
-
-  build-manylinux2014:
-    name: Build manylinux2014_${{ matrix.arch }} wheel
-    runs-on:
-      - runs-on
-      - runner=${{ matrix.runner }}
-      - run-id=${{ github.run_id }}
-      - tag=main-build-manylinux2014-${{ matrix.arch }}
-    strategy:
-      fail-fast: false
-      matrix:
-        include:
-        - arch: aarch64
-          runner: linux-arm64-cpu
-        - arch: x86_64
-          runner: linux-amd64-cpu
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: bash ops/pipeline/build-manylinux2014.sh ${{ matrix.arch }}
-
-  build-gpu-rpkg:
-    name: Build GPU-enabled R package
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=main-build-gpu-rpkg
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: bash ops/pipeline/build-gpu-rpkg.sh
-
-
-  test-cpp-gpu:
-    name: >-
-      Run Google Tests with GPUs
-      (Suite ${{ matrix.suite }}, Runner ${{ matrix.runner }})
-    needs: [build-cuda, build-cuda-with-rmm]
-    runs-on:
-      - runs-on
-      - runner=${{ matrix.runner }}
-      - run-id=${{ github.run_id }}
-      - tag=main-test-cpp-gpu-${{ matrix.suite }}
-    timeout-minutes: 30
-    strategy:
-      fail-fast: false
-      matrix:
-        include:
-          - suite: gpu
-            runner: linux-amd64-gpu
-            artifact_from: build-cuda
-          - suite: gpu-rmm
-            runner: linux-amd64-gpu
-            artifact_from: build-cuda-with-rmm
-          - suite: mgpu
-            runner: linux-amd64-mgpu
-            artifact_from: build-cuda
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - name: Unstash gtest
-        run: |
-          python3 ops/pipeline/manage-artifacts.py download \
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
-            --prefix cache/${{ github.run_id }}/${{ matrix.artifact_from }} \
-            --dest-dir build \
-            testxgboost
-          chmod +x build/testxgboost
-      - run: bash ops/pipeline/test-cpp-gpu.sh ${{ matrix.suite }}
-
-  test-python-wheel:
-    name: Run Python tests (${{ matrix.description }})
-    needs: [build-cuda, build-cpu-arm64]
-    runs-on:
-      - runs-on
-      - runner=${{ matrix.runner }}
-      - run-id=${{ github.run_id }}
-      - tag=main-test-python-wheel-${{ matrix.description }}
-    timeout-minutes: 60
-    strategy:
-      fail-fast: false
-      matrix:
-        include:
-          - description: single-gpu
-            image_repo: xgb-ci.gpu
-            suite: gpu
-            runner: linux-amd64-gpu
-            artifact_from: build-cuda
-          - description: multiple-gpu
-            image_repo: xgb-ci.gpu
-            suite: mgpu
-            runner: linux-amd64-mgpu
-            artifact_from: build-cuda
-          - description: cpu-amd64
-            image_repo: xgb-ci.cpu
-            suite: cpu
-            runner: linux-amd64-cpu
-            artifact_from: build-cuda
-          - description: cpu-arm64
-            image_repo: xgb-ci.aarch64
-            suite: cpu-arm64
-            runner: linux-arm64-cpu
-            artifact_from: build-cpu-arm64
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - name: Unstash Python wheel
-        run: |
-          python3 ops/pipeline/manage-artifacts.py download \
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
-            --prefix cache/${{ github.run_id }}/${{ matrix.artifact_from }} \
-            --dest-dir wheelhouse \
-            *.whl xgboost
-          mv -v wheelhouse/xgboost .
-          chmod +x ./xgboost
-      - name: Run Python tests, ${{ matrix.description }}
-        run: bash ops/pipeline/test-python-wheel.sh ${{ matrix.suite }} ${{ matrix.image_repo }}
diff --git a/.github/workflows/misc.yml b/.github/workflows/misc.yml
deleted file mode 100644
index 54d0078a6164..000000000000
--- a/.github/workflows/misc.yml
+++ /dev/null
@@ -1,45 +0,0 @@
-name: XGBoost CI (misc)
-
-on: [push, pull_request]
-
-permissions:
-  contents: read  # to fetch code (actions/checkout)
-
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
-  cancel-in-progress: true
-
-env:
-  BRANCH_NAME: >-
-    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
-
-jobs:
-  gtest-cpu-nonomp:
-    name: Test Google C++ unittest (CPU Non-OMP)
-    runs-on: ubuntu-latest
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - name: Install system packages
-        run: |
-          sudo apt-get install -y --no-install-recommends ninja-build
-      - name: Build and test XGBoost
-        run: bash ops/pipeline/build-test-cpu-nonomp.sh
-
-  c-api-demo:
-    name: Test installing XGBoost lib + building the C API demo
-    runs-on: ubuntu-latest
-    defaults:
-      run:
-        shell: bash -l {0}
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
-        with:
-          environment-name: cpp_test
-          environment-file: ops/conda_env/cpp_test.yml
-      - name: Build and run C API demo with shared
-        run: bash ops/pipeline/test-c-api-demo.sh
diff --git a/.github/workflows/python_wheels_macos.yml b/.github/workflows/python_wheels_macos.yml
deleted file mode 100644
index cbece0512274..000000000000
--- a/.github/workflows/python_wheels_macos.yml
+++ /dev/null
@@ -1,58 +0,0 @@
-name: Build Python wheels targeting MacOS
-
-on: [push, pull_request]
-
-permissions:
-  contents: read  # to fetch code (actions/checkout)
-
-defaults:
-  run:
-    shell: bash -l {0}
-
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
-  cancel-in-progress: true
-
-env:
-  BRANCH_NAME: >-
-    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
-
-jobs:
-  python-wheels-macos:
-    name: Build wheel for ${{ matrix.platform_id }}
-    runs-on: ${{ matrix.os }}
-    strategy:
-      fail-fast: false
-      matrix:
-        include:
-        - os: macos-13
-          platform_id: macosx_x86_64
-        - os: macos-14
-          platform_id: macosx_arm64
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - name: Set up homebrew
-        uses: Homebrew/actions/setup-homebrew@13341b4d5e459a98bbe0b122b12c11bf90518cc8
-      - name: Install libomp
-        run: brew install libomp
-      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
-        with:
-          environment-name: minimal
-          environment-file: ops/conda_env/minimal.yml
-      - name: Build wheels
-        run: bash ops/pipeline/build-python-wheels-macos.sh ${{ matrix.platform_id }} ${{ github.sha }}
-      - name: Try installing XGBoost
-        run: |
-          python -m pip install -vvv wheelhouse/*.whl
-      - name: Upload Python wheel
-        if: github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_')
-        run: |
-          python ops/pipeline/manage-artifacts.py upload \
-            --s3-bucket xgboost-nightly-builds \
-            --prefix ${{ env.BRANCH_NAME }}/${{ github.sha }} --make-public \
-            wheelhouse/*.whl
-        env:
-          AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }}
-          AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }}
diff --git a/.github/workflows/r_nold.yml b/.github/workflows/r_nold.yml
deleted file mode 100644
index da01f39f650b..000000000000
--- a/.github/workflows/r_nold.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-# Run expensive R tests with the help of rhub. Only triggered by a pull request review
-# See discussion at https://github.com/dmlc/xgboost/pull/6378
-
-name: XGBoost-R-noLD
-
-on:
-  pull_request_review_comment:
-    types: [created]
-
-permissions:
-  contents: read # to fetch code (actions/checkout)
-
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
-  cancel-in-progress: true
-
-jobs:
-  test-R-noLD:
-    if: github.event.comment.body == '/gha run r-nold-test' && contains('OWNER,MEMBER,COLLABORATOR', github.event.comment.author_association)
-    timeout-minutes: 120
-    runs-on: ubuntu-latest
-    container:
-      image: rhub/debian-gcc-devel-nold
-    steps:
-      - name: Install git and system packages
-        shell: bash
-        run: |
-          apt update && apt install libcurl4-openssl-dev libssl-dev libssh2-1-dev libgit2-dev libglpk-dev libxml2-dev libharfbuzz-dev libfribidi-dev git -y
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - name: Install dependencies
-        shell: bash -l {0}
-        run: |
-          /tmp/R-devel/bin/Rscript -e "source('./R-package/tests/helper_scripts/install_deps.R')"
-      - name: Run R tests
-        shell: bash
-        run: |
-          cd R-package && \
-          /tmp/R-devel/bin/R CMD INSTALL . && \
-          /tmp/R-devel/bin/R -q -e "library(testthat); setwd('tests'); source('testthat.R')"
diff --git a/.github/workflows/r_tests.yml b/.github/workflows/r_tests.yml
deleted file mode 100644
index 43ad372a1e84..000000000000
--- a/.github/workflows/r_tests.yml
+++ /dev/null
@@ -1,103 +0,0 @@
-name: XGBoost-R-Tests
-
-on: [push, pull_request]
-
-env:
-  GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
-
-permissions:
-  contents: read # to fetch code (actions/checkout)
-
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
-  cancel-in-progress: true
-
-jobs:
-  test-Rpkg:
-    runs-on: ${{ matrix.os }}
-    name: Test R on OS ${{ matrix.os }}, R ${{ matrix.r }}, Compiler ${{ matrix.compiler }}, Build ${{ matrix.build }}
-    strategy:
-      fail-fast: false
-      matrix:
-        include:
-          - os: windows-latest
-            r: release
-            compiler: mingw
-            build: autotools
-          - os: ubuntu-latest
-            r: release
-            compiler: none
-            build: cmake
-    env:
-      R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
-    steps:
-      - name: Install system dependencies
-        run: |
-          sudo apt update
-          sudo apt install libcurl4-openssl-dev libssl-dev libssh2-1-dev libgit2-dev libglpk-dev libxml2-dev libharfbuzz-dev libfribidi-dev librsvg2-dev librsvg2-2
-        if: matrix.os == 'ubuntu-latest'
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - uses: r-lib/actions/setup-r@v2
-        with:
-          r-version: ${{ matrix.r }}
-      - name: Cache R packages
-        uses: actions/cache@v4
-        with:
-          path: ${{ env.R_LIBS_USER }}
-          key: ${{ runner.os }}-r-${{ matrix.r }}-8-${{ hashFiles('R-package/DESCRIPTION') }}
-          restore-keys: ${{ runner.os }}-r-${{ matrix.r }}-8-${{ hashFiles('R-package/DESCRIPTION') }}
-      - uses: actions/setup-python@v5
-        with:
-          python-version: "3.10"
-          architecture: 'x64'
-      - uses: r-lib/actions/setup-tinytex@v2
-      - name: Install dependencies
-        shell: Rscript {0}
-        run: |
-          source("./R-package/tests/helper_scripts/install_deps.R")
-      - name: Test R
-        run: |
-          python ops/script/test_r_package.py --compiler='${{ matrix.compiler }}' --build-tool="${{ matrix.build }}" --task=check
-        if: matrix.compiler != 'none'
-      - name: Test R
-        run: |
-          python ops/script/test_r_package.py --build-tool="${{ matrix.build }}" --task=check
-        if: matrix.compiler == 'none'
-
-  test-R-on-Debian:
-    name: Test R package on Debian
-    runs-on: ubuntu-latest
-    container:
-      image: rhub/debian-gcc-release
-    steps:
-      - name: Install system dependencies
-        run: |
-          # Must run before checkout to have the latest git installed.
-          # No need to add pandoc, the container has it figured out.
-          apt update && apt install libcurl4-openssl-dev libssl-dev libssh2-1-dev libgit2-dev libglpk-dev libxml2-dev libharfbuzz-dev libfribidi-dev git librsvg2-dev librsvg2-2 -y
-      - name: Trust git cloning project sources
-        run: |
-          git config --global --add safe.directory "${GITHUB_WORKSPACE}"
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - name: Install dependencies
-        shell: bash -l {0}
-        run: |
-          Rscript -e "source('./R-package/tests/helper_scripts/install_deps.R')"
-      - name: Test R
-        shell: bash -l {0}
-        run: |
-          python3 ops/script/test_r_package.py --r=/usr/bin/R --build-tool=autotools --task=check
-      - uses: dorny/paths-filter@v3
-        id: changes
-        with:
-          filters: |
-            r_package:
-              - 'R-package/**'
-      - name: Run document check
-        if: steps.changes.outputs.r_package == 'true'
-        run: |
-          python3 ops/script/test_r_package.py --r=/usr/bin/R --task=doc
diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml
deleted file mode 100644
index f3837391b4fe..000000000000
--- a/.github/workflows/scorecards.yml
+++ /dev/null
@@ -1,54 +0,0 @@
-name: Scorecards supply-chain security
-on:
-  # Only the default branch is supported.
-  branch_protection_rule:
-  schedule:
-    - cron: '17 2 * * 6'
-  push:
-    branches: [ "master" ]
-
-# Declare default permissions as read only.
-permissions: read-all
-
-jobs:
-  analysis:
-    name: Scorecards analysis
-    runs-on: ubuntu-latest
-    permissions:
-      # Needed to upload the results to code-scanning dashboard.
-      security-events: write
-      # Used to receive a badge.
-      id-token: write
-
-    steps:
-      - name: "Checkout code"
-        uses: actions/checkout@v4
-        with:
-          persist-credentials: false
-
-      - name: "Run analysis"
-        uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0
-        with:
-          results_file: results.sarif
-          results_format: sarif
-
-          # Publish the results for public repositories to enable scorecard badges. For more details, see
-          # https://github.com/ossf/scorecard-action#publishing-results.
-          # For private repositories, `publish_results` will automatically be set to `false`, regardless
-          # of the value entered here.
-          publish_results: true
-
-      # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
-      # format to the repository Actions tab.
-      - name: "Upload artifact"
-        uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
-        with:
-          name: SARIF file
-          path: results.sarif
-          retention-days: 5
-
-      # Upload the results to GitHub's code scanning dashboard.
-      - name: "Upload to code-scanning"
-        uses: github/codeql-action/upload-sarif@83a02f7883b12e0e4e1a146174f5e2292a01e601 # v2.16.4
-        with:
-          sarif_file: results.sarif
diff --git a/.github/workflows/sycl_tests.yml b/.github/workflows/sycl_tests.yml
deleted file mode 100644
index 8efdc98d7fd9..000000000000
--- a/.github/workflows/sycl_tests.yml
+++ /dev/null
@@ -1,48 +0,0 @@
-name: XGBoost CI (oneAPI)
-
-on: [push, pull_request]
-
-permissions:
-  contents: read  # to fetch code (actions/checkout)
-
-defaults:
-  run:
-    shell: bash -l {0}
-
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
-  cancel-in-progress: true
-
-env:
-  BRANCH_NAME: >-
-    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
-
-jobs:
-  gtest-cpu-sycl:
-    name: Test Google C++ unittest (CPU SYCL)
-    runs-on: ubuntu-latest
-    steps:
-    - uses: actions/checkout@v4
-      with:
-        submodules: 'true'
-    - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
-      with:
-        environment-name: linux_sycl_test
-        environment-file: ops/conda_env/linux_sycl_test.yml
-    - name: Run gtest
-      run: bash ops/pipeline/build-test-sycl.sh gtest
-
-  python-sycl-tests-on-ubuntu:
-    name: Test XGBoost Python package with SYCL
-    runs-on: ubuntu-latest
-    timeout-minutes: 90
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
-        with:
-          environment-name: linux_sycl_test
-          environment-file: ops/conda_env/linux_sycl_test.yml
-      - name: Test Python package
-        run: bash ops/pipeline/build-test-sycl.sh pytest
diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml
deleted file mode 100644
index 10d430c5f0f8..000000000000
--- a/.github/workflows/windows.yml
+++ /dev/null
@@ -1,78 +0,0 @@
-name: XGBoost CI (Windows)
-
-on: [push, pull_request]
-
-permissions:
-  contents: read  # to fetch code (actions/checkout)
-
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
-  cancel-in-progress: true
-
-defaults:
-  run:
-    shell: powershell
-
-env:
-  BRANCH_NAME: >-
-    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
-
-jobs:
-  build-win64-gpu:
-    name: Build XGBoost for Windows with CUDA
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=windows-cpu
-      - tag=windows-build-win64-gpu
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - run: powershell ops/pipeline/build-win64-gpu.ps1
-      - name: Stash files
-        shell: powershell
-        run: |
-          conda activate
-          python ops/pipeline/manage-artifacts.py upload `
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} `
-            --prefix cache/${{ github.run_id }}/build-win64-gpu `
-            build/testxgboost.exe xgboost.exe `
-            (Get-ChildItem python-package/dist/*.whl | Select-Object -Expand FullName)
-
-  build-win64-cpu:
-    name: Build XGBoost for Windows (minimal)
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=windows-cpu
-      - tag=windows-build-win64-cpu
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - run: powershell ops/pipeline/build-win64-cpu.ps1
-
-  test-win64-gpu:
-    name: Test XGBoost on Windows
-    needs: build-win64-gpu
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=windows-gpu
-      - tag=windows-test-win64-gpu
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Unstash files
-        shell: powershell
-        run: |
-          conda activate
-          python ops/pipeline/manage-artifacts.py download `
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} `
-            --prefix cache/${{ github.run_id }}/build-win64-gpu `
-            --dest-dir build `
-            *.whl testxgboost.exe xgboost.exe
-          Move-Item -Path build/xgboost.exe -Destination .
-          New-Item -ItemType Directory -Path python-package/dist/ -Force
-          Move-Item -Path (Get-ChildItem build/*.whl | Select-Object -Expand FullName) `
-            -Destination python-package/dist/
-      - run: powershell ops/pipeline/test-win64-gpu.ps1

From d9d8f3591ab4b2e406d53e7cc5c81ab014db38aa Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Sun, 16 Mar 2025 00:57:05 +0800
Subject: [PATCH 04/15] full test suite.

---
 ops/pipeline/test-python-macos.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/ops/pipeline/test-python-macos.sh b/ops/pipeline/test-python-macos.sh
index 0d7ac3226224..08b956c7ecbf 100755
--- a/ops/pipeline/test-python-macos.sh
+++ b/ops/pipeline/test-python-macos.sh
@@ -19,5 +19,5 @@ python --version
 pip install -v .
 
 cd ..
-pytest -s -v -rxXs --durations=0 ./tests/python/test_collective.py::test_rabit_communicator
+pytest -s -v -rxXs --durations=0 ./tests/python/
 # pytest -s -v -rxXs --durations=0 ./tests/test_distributed/test_with_dask

From 3b94fab0b9c35b0cf3e179abd1fa4481c2d7e182 Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Sun, 16 Mar 2025 02:19:41 +0800
Subject: [PATCH 05/15] Fail fast.

---
 .github/workflows/main.yml         | 37 ------------------------------
 .github/workflows/python_tests.yml | 22 ------------------
 ops/pipeline/test-python-macos.sh  |  2 +-
 3 files changed, 1 insertion(+), 60 deletions(-)
 delete mode 100644 .github/workflows/main.yml

diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
deleted file mode 100644
index 74866f9c223c..000000000000
--- a/.github/workflows/main.yml
+++ /dev/null
@@ -1,37 +0,0 @@
-name: XGBoost CI
-
-on: [push, pull_request]
-
-permissions:
-  contents: read  # to fetch code (actions/checkout)
-
-concurrency:
-  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
-  cancel-in-progress: true
-
-env:
-  BRANCH_NAME: >-
-    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
-
-jobs:
-  build-cpu:
-    name: Build CPU
-    runs-on:
-      - runs-on=${{ github.run_id }}
-      - runner=linux-amd64-cpu
-      - tag=main-build-cpu
-    steps:
-      # Restart Docker daemon so that it recognizes the ephemeral disks
-      - run: sudo systemctl restart docker
-      - uses: actions/checkout@v4
-        with:
-          submodules: "true"
-      - name: Log into Docker registry (AWS ECR)
-        run: bash ops/pipeline/login-docker-registry.sh
-      - run: bash ops/pipeline/build-cpu.sh
-      - name: Stash CLI executable
-        run: |
-          python3 ops/pipeline/manage-artifacts.py upload \
-            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
-            --prefix cache/${{ github.run_id }}/build-cpu \
-            ./xgboost
diff --git a/.github/workflows/python_tests.yml b/.github/workflows/python_tests.yml
index 180b1a855733..647567f8cd34 100644
--- a/.github/workflows/python_tests.yml
+++ b/.github/workflows/python_tests.yml
@@ -14,28 +14,6 @@ concurrency:
   cancel-in-progress: true
 
 jobs:
-  python-sdist-test:
-    runs-on: ${{ matrix.os }}
-    name: Test installing Python XGBoost from the source distribution (${{ matrix.os }})
-    strategy:
-      fail-fast: false
-      matrix:
-        os: [macos-13, windows-latest, ubuntu-latest]
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
-        with:
-          environment-name: sdist_test
-          environment-file: ops/conda_env/sdist_test.yml
-      - name: Install extra package for MacOS
-        run: |
-          mamba install -c conda-forge llvm-openmp
-        if: matrix.os == 'macos-13'
-      - name: Build and install XGBoost
-        run: bash ops/pipeline/test-python-sdist.sh
-
   python-tests-on-macos:
     name: Test XGBoost Python package on macos-13
     runs-on: macos-13
diff --git a/ops/pipeline/test-python-macos.sh b/ops/pipeline/test-python-macos.sh
index 08b956c7ecbf..57bb186fa75d 100755
--- a/ops/pipeline/test-python-macos.sh
+++ b/ops/pipeline/test-python-macos.sh
@@ -19,5 +19,5 @@ python --version
 pip install -v .
 
 cd ..
-pytest -s -v -rxXs --durations=0 ./tests/python/
+pytest -s -v --maxfail=1 --durations=0 ./tests/python/
 # pytest -s -v -rxXs --durations=0 ./tests/test_distributed/test_with_dask

From 4b148866271ef4b9fb4922a60dbc3293a0a487c3 Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Sun, 16 Mar 2025 02:45:16 +0800
Subject: [PATCH 06/15] freeze loky.

---
 .github/workflows/python_tests.yml | 13 -------------
 ops/conda_env/macos_cpu_test.yml   |  2 +-
 2 files changed, 1 insertion(+), 14 deletions(-)

diff --git a/.github/workflows/python_tests.yml b/.github/workflows/python_tests.yml
index 647567f8cd34..7e5c8ea58ec4 100644
--- a/.github/workflows/python_tests.yml
+++ b/.github/workflows/python_tests.yml
@@ -27,16 +27,3 @@ jobs:
           environment-name: macos_cpu_test
           environment-file: ops/conda_env/macos_cpu_test.yml
       - run: bash ops/pipeline/test-python-macos.sh
-
-  python-system-installation-on-ubuntu:
-    name: Test XGBoost Python package System Installation on Ubuntu
-    runs-on: ubuntu-latest
-    steps:
-      - uses: actions/checkout@v4
-        with:
-          submodules: 'true'
-      - name: Set up Python 3.10
-        uses: actions/setup-python@v5
-        with:
-          python-version: "3.10"
-      - run: bash ops/pipeline/test-python-with-sysprefix.sh
diff --git a/ops/conda_env/macos_cpu_test.yml b/ops/conda_env/macos_cpu_test.yml
index 29fc1cbf111a..4cb2235e96ab 100644
--- a/ops/conda_env/macos_cpu_test.yml
+++ b/ops/conda_env/macos_cpu_test.yml
@@ -27,7 +27,7 @@ dependencies:
 - jsonschema
 - boto3
 - awscli
-- loky
+- loky=3.4.1
 - pyarrow
 - pyspark>=3.4.0
 - cloudpickle

From 6e5aba4f8f3a0c44f0b767cb235f16dc09214964 Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Sun, 16 Mar 2025 03:01:58 +0800
Subject: [PATCH 07/15] Revert.

---
 .github/workflows/doc.yml                 |  96 ++++++++
 .github/workflows/freebsd.yml             |  29 +++
 .github/workflows/i386.yml                |  27 +++
 .github/workflows/jvm_tests.yml           | 245 ++++++++++++++++++++
 .github/workflows/lint.yml                | 101 +++++++++
 .github/workflows/main.yml                | 260 ++++++++++++++++++++++
 .github/workflows/misc.yml                |  45 ++++
 .github/workflows/python_tests.yml        |  35 +++
 .github/workflows/python_wheels_macos.yml |  58 +++++
 .github/workflows/r_nold.yml              |  41 ++++
 .github/workflows/r_tests.yml             | 103 +++++++++
 .github/workflows/scorecards.yml          |  54 +++++
 .github/workflows/sycl_tests.yml          |  48 ++++
 .github/workflows/windows.yml             |  78 +++++++
 ops/pipeline/test-python-macos.sh         |   4 +-
 15 files changed, 1222 insertions(+), 2 deletions(-)
 create mode 100644 .github/workflows/doc.yml
 create mode 100644 .github/workflows/freebsd.yml
 create mode 100644 .github/workflows/i386.yml
 create mode 100644 .github/workflows/jvm_tests.yml
 create mode 100644 .github/workflows/lint.yml
 create mode 100644 .github/workflows/main.yml
 create mode 100644 .github/workflows/misc.yml
 create mode 100644 .github/workflows/python_wheels_macos.yml
 create mode 100644 .github/workflows/r_nold.yml
 create mode 100644 .github/workflows/r_tests.yml
 create mode 100644 .github/workflows/scorecards.yml
 create mode 100644 .github/workflows/sycl_tests.yml
 create mode 100644 .github/workflows/windows.yml

diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml
new file mode 100644
index 000000000000..584af0987e92
--- /dev/null
+++ b/.github/workflows/doc.yml
@@ -0,0 +1,96 @@
+name: XGBoost-docs
+
+on: [push, pull_request]
+
+env:
+  BRANCH_NAME: >-
+    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
+
+jobs:
+  build-jvm-docs:
+    name: Build docs for JVM packages
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=doc-build-jvm-docs
+
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+
+      - name: Get the git hash for the push event.
+        if: ${{ github.event_name == 'push' }}
+        shell: bash
+        run: |
+          echo "HEAD_SHA=${GITHUB_SHA}" >> ${GITHUB_ENV}
+      - name: Get the git hash for the PR event.
+        if: ${{ github.event_name == 'pull_request' }}
+        shell: bash
+        run: |
+          echo "HEAD_SHA=${{ github.event.pull_request.head.sha }}" >> ${GITHUB_ENV}
+
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: bash ops/pipeline/build-jvm-gpu.sh
+      - run: bash ops/pipeline/build-jvm-doc.sh
+      - name: Upload JVM doc
+        run: |
+          # xgboost-docs/{branch}/{commit}/{branch}.tar.bz2
+          # branch can be the name of the dmlc/xgboost branch, or `PR-{number}`.
+          python3 ops/pipeline/manage-artifacts.py upload \
+            --s3-bucket xgboost-docs \
+            --prefix ${BRANCH_NAME}/${{ env.HEAD_SHA }} --make-public \
+            jvm-packages/${{ env.BRANCH_NAME }}.tar.bz2
+
+  build-r-docs:
+    name: Build docs for the R package
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=r-tests-build-docs
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+
+      - name: Get the git hash for the push event.
+        if: ${{ github.event_name == 'push' }}
+        shell: bash
+        run: |
+          echo "HEAD_SHA=${GITHUB_SHA}" >> ${GITHUB_ENV}
+      - name: Get the git hash for the PR event.
+        if: ${{ github.event_name == 'pull_request' }}
+        shell: bash
+        run: |
+          echo "HEAD_SHA=${{ github.event.pull_request.head.sha }}" >> ${GITHUB_ENV}
+
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: bash ops/pipeline/build-r-docs.sh
+      - name: Upload R doc
+        run: |
+          python3 ops/pipeline/manage-artifacts.py upload \
+            --s3-bucket xgboost-docs \
+            --prefix ${BRANCH_NAME}/${{ env.HEAD_SHA }} --make-public \
+            r-docs-${{ env.BRANCH_NAME }}.tar.bz2
+
+  trigger-rtd-build:
+    needs: [build-jvm-docs]
+    name: Trigger Read The Docs build.
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=doc-trigger-rtd-build
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Trigger RTD
+        run: bash ops/pipeline/trigger-rtd.sh
diff --git a/.github/workflows/freebsd.yml b/.github/workflows/freebsd.yml
new file mode 100644
index 000000000000..26e8fa34c119
--- /dev/null
+++ b/.github/workflows/freebsd.yml
@@ -0,0 +1,29 @@
+name: FreeBSD
+
+on: [push, pull_request]
+
+permissions:
+  contents: read # to fetch code (actions/checkout)
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+  cancel-in-progress: true
+
+jobs:
+  test:
+    runs-on: ubuntu-latest
+    timeout-minutes: 20
+    name: A job to run test in FreeBSD
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - name: Test in FreeBSD
+        id: test
+        uses: vmactions/freebsd-vm@v1
+        with:
+          usesh: true
+          prepare: |
+            pkg install -y cmake git ninja googletest bash
+          run: |
+            bash ops/pipeline/test-freebsd.sh
diff --git a/.github/workflows/i386.yml b/.github/workflows/i386.yml
new file mode 100644
index 000000000000..26ceaf758f3a
--- /dev/null
+++ b/.github/workflows/i386.yml
@@ -0,0 +1,27 @@
+name: XGBoost-i386-test
+
+on: [push, pull_request]
+
+permissions:
+  contents: read  # to fetch code (actions/checkout)
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+  cancel-in-progress: true
+
+jobs:
+  build-32bit:
+    name: Build 32-bit
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=i386-build-32bit
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: bash ops/pipeline/test-cpp-i386.sh
diff --git a/.github/workflows/jvm_tests.yml b/.github/workflows/jvm_tests.yml
new file mode 100644
index 000000000000..a1b170c9d105
--- /dev/null
+++ b/.github/workflows/jvm_tests.yml
@@ -0,0 +1,245 @@
+name: XGBoost CI (JVM packages)
+
+on: [push, pull_request]
+
+permissions:
+  contents: read  # to fetch code (actions/checkout)
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+  cancel-in-progress: true
+
+env:
+  BRANCH_NAME: >-
+    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
+
+jobs:
+  build-jvm-manylinux2014:
+    name: >-
+      Build libxgboost4j.so targeting glibc 2.17
+      (arch ${{ matrix.arch }}, runner ${{ matrix.runner }})
+    runs-on:
+      - runs-on
+      - runner=${{ matrix.runner }}
+      - run-id=${{ github.run_id }}
+      - tag=jvm-tests-build-jvm-manylinux2014-${{ matrix.arch }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include:
+        - arch: aarch64
+          runner: linux-arm64-cpu
+        - arch: x86_64
+          runner: linux-amd64-cpu
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: bash ops/pipeline/build-jvm-manylinux2014.sh ${{ matrix.arch }}
+
+  build-jvm-gpu:
+    name: Build libxgboost4j.so with CUDA
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=jvm-tests-build-jvm-gpu
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: bash ops/pipeline/build-jvm-gpu.sh
+      - name: Stash files
+        run: |
+          python3 ops/pipeline/manage-artifacts.py upload \
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
+            --prefix cache/${{ github.run_id }}/build-jvm-gpu \
+            lib/libxgboost4j.so
+
+  build-jvm-mac:
+    name: "Build libxgboost4j.dylib for ${{ matrix.description }}"
+    runs-on: ${{ matrix.runner }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include:
+          - description: "MacOS (Apple Silicon)"
+            script: ops/pipeline/build-jvm-macos-apple-silicon.sh
+            libname: libxgboost4j_m1.dylib
+            runner: macos-14
+          - description: "MacOS (Intel)"
+            script: ops/pipeline/build-jvm-macos-intel.sh
+            libname: libxgboost4j_intel.dylib
+            runner: macos-13
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - run: bash ${{ matrix.script }}
+      - name: Upload libxgboost4j.dylib
+        if: github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_')
+        run: |
+          mv -v lib/libxgboost4j.dylib ${{ matrix.libname }}
+          python3 ops/pipeline/manage-artifacts.py upload \
+            --s3-bucket xgboost-nightly-builds \
+            --prefix ${{ env.BRANCH_NAME }}/${{ github.sha }} --make-public \
+            ${{ matrix.libname }}
+        env:
+          AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }}
+          AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }}
+
+  build-test-jvm-packages:
+    name: Build and test JVM packages (Linux, Scala ${{ matrix.scala_version }})
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=jvm-tests-build-test-jvm-packages-scala${{ matrix.scala_version }}
+    strategy:
+      fail-fast: false
+      matrix:
+        scala_version: ["2.12", "2.13"]
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - name: Build and test JVM packages (Scala ${{ matrix.scala_version }})
+        run: bash ops/pipeline/build-test-jvm-packages.sh
+        env:
+          SCALA_VERSION: ${{ matrix.scala_version }}
+      - name: Stash files
+        run: |
+          python3 ops/pipeline/manage-artifacts.py upload \
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
+            --prefix cache/${{ github.run_id }}/build-test-jvm-packages \
+            lib/libxgboost4j.so
+        if: matrix.scala_version == '2.13'
+
+  build-test-jvm-packages-other-os:
+    name: Build and test JVM packages (${{ matrix.os }})
+    timeout-minutes: 30
+    runs-on: ${{ matrix.os }}
+    strategy:
+      fail-fast: false
+      matrix:
+        os: [windows-latest, macos-13]
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - uses: actions/setup-java@v4
+        with:
+          distribution: 'temurin'
+          java-version: '8'
+      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
+        with:
+          environment-name: minimal
+          environment-file: ops/conda_env/minimal.yml
+      - name: Cache Maven packages
+        uses: actions/cache@v4
+        with:
+          path: ~/.m2
+          key: ${{ runner.os }}-m2-${{ hashFiles('./jvm-packages/pom.xml') }}
+          restore-keys: ${{ runner.os }}-m2-${{ hashFiles('./jvm-packages/pom.xml') }}
+      - name: Test XGBoost4J (Core) on macos
+        if: matrix.os == 'macos-13'
+        run: |
+          cd jvm-packages
+          mvn test -B -pl :xgboost4j_2.12 -Duse.openmp=OFF
+      - name: Test XGBoost4J (Core) on windows
+        if: matrix.os == 'windows-latest'
+        run: |
+          cd jvm-packages
+          mvn test -B -pl :xgboost4j_2.12
+      - name: Publish artifact xgboost4j.dll to S3
+        run: |
+          python ops/pipeline/manage-artifacts.py upload `
+            --s3-bucket xgboost-nightly-builds `
+            --prefix ${{ env.BRANCH_NAME }}/${{ github.sha }} --make-public `
+            lib/xgboost4j.dll
+        if: |
+          (github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_')) &&
+          matrix.os == 'windows-latest'
+        env:
+          AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }}
+          AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }}
+
+  test-jvm-packages-gpu:
+    name: Test JVM packages with CUDA (Scala ${{ matrix.scala_version }})
+    needs: [build-jvm-gpu]
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-mgpu
+      - tag=jvm-tests-test-jvm-packages-gpu-scala${{ matrix.scala_version }}
+    strategy:
+      fail-fast: false
+      matrix:
+        scala_version: ["2.12", "2.13"]
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - name: Unstash files
+        run: |
+          python3 ops/pipeline/manage-artifacts.py download \
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
+            --prefix cache/${{ github.run_id }}/build-jvm-gpu \
+            --dest-dir lib \
+            libxgboost4j.so
+      - run: bash ops/pipeline/test-jvm-gpu.sh
+        env:
+          SCALA_VERSION: ${{ matrix.scala_version }}
+
+  deploy-jvm-packages:
+    name: Deploy JVM packages to S3 (${{ matrix.variant.name }})
+    needs: [build-jvm-gpu, build-test-jvm-packages, test-jvm-packages-gpu]
+    runs-on:
+      - runs-on
+      - runner=linux-amd64-cpu
+      - run-id=${{ github.run_id }}
+      - tag=jvm-tests-deploy-jvm-packages-${{ matrix.variant.name }}-scala${{ matrix.scala_version }}
+    strategy:
+      fail-fast: false
+      matrix:
+        variant:
+          - name: cpu
+            image_repo: xgb-ci.jvm
+            artifact_from: build-test-jvm-packages
+          - name: gpu
+            image_repo: xgb-ci.jvm_gpu_build
+            artifact_from: build-jvm-gpu
+        scala_version: ['2.12', '2.13']
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - name: Unstash files
+        run: |
+          python3 ops/pipeline/manage-artifacts.py download \
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
+            --prefix cache/${{ github.run_id }}/${{ matrix.variant.artifact_from }} \
+            --dest-dir lib \
+            libxgboost4j.so
+          ls -lh lib/libxgboost4j.so
+      - name: Deploy JVM packages to S3
+        run: |
+          bash ops/pipeline/deploy-jvm-packages.sh ${{ matrix.variant.name }} \
+            ${{ matrix.variant.image_repo }} ${{ matrix.scala_version }}
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
new file mode 100644
index 000000000000..73636e7ce66d
--- /dev/null
+++ b/.github/workflows/lint.yml
@@ -0,0 +1,101 @@
+name: XGBoost CI (Lint)
+
+on: [push, pull_request]
+
+permissions:
+  contents: read  # to fetch code (actions/checkout)
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+  cancel-in-progress: true
+
+env:
+  BRANCH_NAME: >-
+    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
+
+jobs:
+  clang-tidy:
+    name: Run clang-tidy
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=lint-clang-tidy
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: bash ops/pipeline/run-clang-tidy.sh
+
+  python-mypy-lint:
+    runs-on: ubuntu-latest
+    name: Type and format checks for the Python package
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
+        with:
+          environment-name: python_lint
+          environment-file: ops/conda_env/python_lint.yml
+      - name: Run mypy
+        shell: bash -el {0}
+        run: |
+          python ops/script/lint_python.py --format=0 --type-check=1 --pylint=0
+      - name: Run formatter
+        shell: bash -el {0}
+        run: |
+          python ops/script/lint_python.py --format=1 --type-check=0 --pylint=0
+      - name: Run pylint
+        shell: bash -el {0}
+        run: |
+          python ops/script/lint_python.py --format=0 --type-check=0 --pylint=1
+
+  cpp-lint:
+    runs-on: ubuntu-latest
+    name: Code linting for C++
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - uses: actions/setup-python@v5
+        with:
+          python-version: "3.10"
+          architecture: 'x64'
+      - name: Install Python packages
+        run: |
+          python -m pip install wheel setuptools cmakelint cpplint==1.6.1 pylint
+      - name: Run lint
+        run: |
+          python3 ops/script/lint_cpp.py
+          bash ops/script/lint_cmake.sh
+
+  lintr:
+    runs-on: ubuntu-latest
+    name: Run R linters on Ubuntu
+    env:
+      R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - uses: r-lib/actions/setup-r@v2
+        with:
+          r-version: "release"
+      - name: Cache R packages
+        uses: actions/cache@v4
+        with:
+          path: ${{ env.R_LIBS_USER }}
+          key: ${{ runner.os }}-r-release-7-${{ hashFiles('R-package/DESCRIPTION') }}
+          restore-keys: ${{ runner.os }}-r-release-7-${{ hashFiles('R-package/DESCRIPTION') }}
+      - name: Install dependencies
+        shell: Rscript {0}
+        run: |
+          source("./R-package/tests/helper_scripts/install_deps.R")
+      - name: Run lintr
+        run: |
+          MAKEFLAGS="-j$(nproc)" R CMD INSTALL R-package/
+          Rscript ops/script/lint_r.R $(pwd)
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
new file mode 100644
index 000000000000..332606cece7b
--- /dev/null
+++ b/.github/workflows/main.yml
@@ -0,0 +1,260 @@
+name: XGBoost CI
+
+on: [push, pull_request]
+
+permissions:
+  contents: read  # to fetch code (actions/checkout)
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+  cancel-in-progress: true
+
+env:
+  BRANCH_NAME: >-
+    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
+
+jobs:
+  build-cpu:
+    name: Build CPU
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=main-build-cpu
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: bash ops/pipeline/build-cpu.sh
+      - name: Stash CLI executable
+        run: |
+          python3 ops/pipeline/manage-artifacts.py upload \
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
+            --prefix cache/${{ github.run_id }}/build-cpu \
+            ./xgboost
+
+  build-cpu-arm64:
+    name: Build CPU ARM64 + manylinux_2_28_aarch64 wheel
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-arm64-cpu
+      - tag=build-cpu-arm64
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: bash ops/pipeline/build-cpu-arm64.sh
+      - name: Stash files
+        run: |
+          python3 ops/pipeline/manage-artifacts.py upload \
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
+            --prefix cache/${{ github.run_id }}/build-cpu-arm64 \
+            ./xgboost python-package/dist/*.whl
+
+  build-cuda:
+    name: Build CUDA + manylinux_2_28_x86_64 wheel
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=main-build-cuda
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: |
+          bash ops/pipeline/build-cuda.sh xgb-ci.gpu_build_rockylinux8 disable-rmm
+      - name: Stash files
+        run: |
+          python3 ops/pipeline/manage-artifacts.py upload \
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
+            --prefix cache/${{ github.run_id }}/build-cuda \
+            build/testxgboost ./xgboost python-package/dist/*.whl
+
+  build-cuda-with-rmm:
+    name: Build CUDA with RMM
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=main-build-cuda-with-rmm
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: |
+          bash ops/pipeline/build-cuda.sh xgb-ci.gpu_build_rockylinux8 enable-rmm
+      - name: Stash files
+        run: |
+          python3 ops/pipeline/manage-artifacts.py upload \
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
+            --prefix cache/${{ github.run_id }}/build-cuda-with-rmm \
+            build/testxgboost
+
+  build-cuda-with-rmm-dev:
+    name: Build CUDA with RMM (dev)
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=main-build-cuda-with-rmm-dev
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: |
+          bash ops/pipeline/build-cuda.sh \
+            xgb-ci.gpu_build_rockylinux8_dev_ver enable-rmm
+
+  build-manylinux2014:
+    name: Build manylinux2014_${{ matrix.arch }} wheel
+    runs-on:
+      - runs-on
+      - runner=${{ matrix.runner }}
+      - run-id=${{ github.run_id }}
+      - tag=main-build-manylinux2014-${{ matrix.arch }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include:
+        - arch: aarch64
+          runner: linux-arm64-cpu
+        - arch: x86_64
+          runner: linux-amd64-cpu
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: bash ops/pipeline/build-manylinux2014.sh ${{ matrix.arch }}
+
+  build-gpu-rpkg:
+    name: Build GPU-enabled R package
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=linux-amd64-cpu
+      - tag=main-build-gpu-rpkg
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - run: bash ops/pipeline/build-gpu-rpkg.sh
+
+
+  test-cpp-gpu:
+    name: >-
+      Run Google Tests with GPUs
+      (Suite ${{ matrix.suite }}, Runner ${{ matrix.runner }})
+    needs: [build-cuda, build-cuda-with-rmm]
+    runs-on:
+      - runs-on
+      - runner=${{ matrix.runner }}
+      - run-id=${{ github.run_id }}
+      - tag=main-test-cpp-gpu-${{ matrix.suite }}
+    timeout-minutes: 30
+    strategy:
+      fail-fast: false
+      matrix:
+        include:
+          - suite: gpu
+            runner: linux-amd64-gpu
+            artifact_from: build-cuda
+          - suite: gpu-rmm
+            runner: linux-amd64-gpu
+            artifact_from: build-cuda-with-rmm
+          - suite: mgpu
+            runner: linux-amd64-mgpu
+            artifact_from: build-cuda
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - name: Unstash gtest
+        run: |
+          python3 ops/pipeline/manage-artifacts.py download \
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
+            --prefix cache/${{ github.run_id }}/${{ matrix.artifact_from }} \
+            --dest-dir build \
+            testxgboost
+          chmod +x build/testxgboost
+      - run: bash ops/pipeline/test-cpp-gpu.sh ${{ matrix.suite }}
+
+  test-python-wheel:
+    name: Run Python tests (${{ matrix.description }})
+    needs: [build-cuda, build-cpu-arm64]
+    runs-on:
+      - runs-on
+      - runner=${{ matrix.runner }}
+      - run-id=${{ github.run_id }}
+      - tag=main-test-python-wheel-${{ matrix.description }}
+    timeout-minutes: 60
+    strategy:
+      fail-fast: false
+      matrix:
+        include:
+          - description: single-gpu
+            image_repo: xgb-ci.gpu
+            suite: gpu
+            runner: linux-amd64-gpu
+            artifact_from: build-cuda
+          - description: multiple-gpu
+            image_repo: xgb-ci.gpu
+            suite: mgpu
+            runner: linux-amd64-mgpu
+            artifact_from: build-cuda
+          - description: cpu-amd64
+            image_repo: xgb-ci.cpu
+            suite: cpu
+            runner: linux-amd64-cpu
+            artifact_from: build-cuda
+          - description: cpu-arm64
+            image_repo: xgb-ci.aarch64
+            suite: cpu-arm64
+            runner: linux-arm64-cpu
+            artifact_from: build-cpu-arm64
+    steps:
+      # Restart Docker daemon so that it recognizes the ephemeral disks
+      - run: sudo systemctl restart docker
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Log into Docker registry (AWS ECR)
+        run: bash ops/pipeline/login-docker-registry.sh
+      - name: Unstash Python wheel
+        run: |
+          python3 ops/pipeline/manage-artifacts.py download \
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} \
+            --prefix cache/${{ github.run_id }}/${{ matrix.artifact_from }} \
+            --dest-dir wheelhouse \
+            *.whl xgboost
+          mv -v wheelhouse/xgboost .
+          chmod +x ./xgboost
+      - name: Run Python tests, ${{ matrix.description }}
+        run: bash ops/pipeline/test-python-wheel.sh ${{ matrix.suite }} ${{ matrix.image_repo }}
diff --git a/.github/workflows/misc.yml b/.github/workflows/misc.yml
new file mode 100644
index 000000000000..54d0078a6164
--- /dev/null
+++ b/.github/workflows/misc.yml
@@ -0,0 +1,45 @@
+name: XGBoost CI (misc)
+
+on: [push, pull_request]
+
+permissions:
+  contents: read  # to fetch code (actions/checkout)
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+  cancel-in-progress: true
+
+env:
+  BRANCH_NAME: >-
+    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
+
+jobs:
+  gtest-cpu-nonomp:
+    name: Test Google C++ unittest (CPU Non-OMP)
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - name: Install system packages
+        run: |
+          sudo apt-get install -y --no-install-recommends ninja-build
+      - name: Build and test XGBoost
+        run: bash ops/pipeline/build-test-cpu-nonomp.sh
+
+  c-api-demo:
+    name: Test installing XGBoost lib + building the C API demo
+    runs-on: ubuntu-latest
+    defaults:
+      run:
+        shell: bash -l {0}
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
+        with:
+          environment-name: cpp_test
+          environment-file: ops/conda_env/cpp_test.yml
+      - name: Build and run C API demo with shared
+        run: bash ops/pipeline/test-c-api-demo.sh
diff --git a/.github/workflows/python_tests.yml b/.github/workflows/python_tests.yml
index 7e5c8ea58ec4..180b1a855733 100644
--- a/.github/workflows/python_tests.yml
+++ b/.github/workflows/python_tests.yml
@@ -14,6 +14,28 @@ concurrency:
   cancel-in-progress: true
 
 jobs:
+  python-sdist-test:
+    runs-on: ${{ matrix.os }}
+    name: Test installing Python XGBoost from the source distribution (${{ matrix.os }})
+    strategy:
+      fail-fast: false
+      matrix:
+        os: [macos-13, windows-latest, ubuntu-latest]
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
+        with:
+          environment-name: sdist_test
+          environment-file: ops/conda_env/sdist_test.yml
+      - name: Install extra package for MacOS
+        run: |
+          mamba install -c conda-forge llvm-openmp
+        if: matrix.os == 'macos-13'
+      - name: Build and install XGBoost
+        run: bash ops/pipeline/test-python-sdist.sh
+
   python-tests-on-macos:
     name: Test XGBoost Python package on macos-13
     runs-on: macos-13
@@ -27,3 +49,16 @@ jobs:
           environment-name: macos_cpu_test
           environment-file: ops/conda_env/macos_cpu_test.yml
       - run: bash ops/pipeline/test-python-macos.sh
+
+  python-system-installation-on-ubuntu:
+    name: Test XGBoost Python package System Installation on Ubuntu
+    runs-on: ubuntu-latest
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - name: Set up Python 3.10
+        uses: actions/setup-python@v5
+        with:
+          python-version: "3.10"
+      - run: bash ops/pipeline/test-python-with-sysprefix.sh
diff --git a/.github/workflows/python_wheels_macos.yml b/.github/workflows/python_wheels_macos.yml
new file mode 100644
index 000000000000..cbece0512274
--- /dev/null
+++ b/.github/workflows/python_wheels_macos.yml
@@ -0,0 +1,58 @@
+name: Build Python wheels targeting MacOS
+
+on: [push, pull_request]
+
+permissions:
+  contents: read  # to fetch code (actions/checkout)
+
+defaults:
+  run:
+    shell: bash -l {0}
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+  cancel-in-progress: true
+
+env:
+  BRANCH_NAME: >-
+    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
+
+jobs:
+  python-wheels-macos:
+    name: Build wheel for ${{ matrix.platform_id }}
+    runs-on: ${{ matrix.os }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include:
+        - os: macos-13
+          platform_id: macosx_x86_64
+        - os: macos-14
+          platform_id: macosx_arm64
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - name: Set up homebrew
+        uses: Homebrew/actions/setup-homebrew@13341b4d5e459a98bbe0b122b12c11bf90518cc8
+      - name: Install libomp
+        run: brew install libomp
+      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
+        with:
+          environment-name: minimal
+          environment-file: ops/conda_env/minimal.yml
+      - name: Build wheels
+        run: bash ops/pipeline/build-python-wheels-macos.sh ${{ matrix.platform_id }} ${{ github.sha }}
+      - name: Try installing XGBoost
+        run: |
+          python -m pip install -vvv wheelhouse/*.whl
+      - name: Upload Python wheel
+        if: github.ref == 'refs/heads/master' || contains(github.ref, 'refs/heads/release_')
+        run: |
+          python ops/pipeline/manage-artifacts.py upload \
+            --s3-bucket xgboost-nightly-builds \
+            --prefix ${{ env.BRANCH_NAME }}/${{ github.sha }} --make-public \
+            wheelhouse/*.whl
+        env:
+          AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_IAM_S3_UPLOADER }}
+          AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_IAM_S3_UPLOADER }}
diff --git a/.github/workflows/r_nold.yml b/.github/workflows/r_nold.yml
new file mode 100644
index 000000000000..da01f39f650b
--- /dev/null
+++ b/.github/workflows/r_nold.yml
@@ -0,0 +1,41 @@
+# Run expensive R tests with the help of rhub. Only triggered by a pull request review
+# See discussion at https://github.com/dmlc/xgboost/pull/6378
+
+name: XGBoost-R-noLD
+
+on:
+  pull_request_review_comment:
+    types: [created]
+
+permissions:
+  contents: read # to fetch code (actions/checkout)
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+  cancel-in-progress: true
+
+jobs:
+  test-R-noLD:
+    if: github.event.comment.body == '/gha run r-nold-test' && contains('OWNER,MEMBER,COLLABORATOR', github.event.comment.author_association)
+    timeout-minutes: 120
+    runs-on: ubuntu-latest
+    container:
+      image: rhub/debian-gcc-devel-nold
+    steps:
+      - name: Install git and system packages
+        shell: bash
+        run: |
+          apt update && apt install libcurl4-openssl-dev libssl-dev libssh2-1-dev libgit2-dev libglpk-dev libxml2-dev libharfbuzz-dev libfribidi-dev git -y
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - name: Install dependencies
+        shell: bash -l {0}
+        run: |
+          /tmp/R-devel/bin/Rscript -e "source('./R-package/tests/helper_scripts/install_deps.R')"
+      - name: Run R tests
+        shell: bash
+        run: |
+          cd R-package && \
+          /tmp/R-devel/bin/R CMD INSTALL . && \
+          /tmp/R-devel/bin/R -q -e "library(testthat); setwd('tests'); source('testthat.R')"
diff --git a/.github/workflows/r_tests.yml b/.github/workflows/r_tests.yml
new file mode 100644
index 000000000000..43ad372a1e84
--- /dev/null
+++ b/.github/workflows/r_tests.yml
@@ -0,0 +1,103 @@
+name: XGBoost-R-Tests
+
+on: [push, pull_request]
+
+env:
+  GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
+
+permissions:
+  contents: read # to fetch code (actions/checkout)
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+  cancel-in-progress: true
+
+jobs:
+  test-Rpkg:
+    runs-on: ${{ matrix.os }}
+    name: Test R on OS ${{ matrix.os }}, R ${{ matrix.r }}, Compiler ${{ matrix.compiler }}, Build ${{ matrix.build }}
+    strategy:
+      fail-fast: false
+      matrix:
+        include:
+          - os: windows-latest
+            r: release
+            compiler: mingw
+            build: autotools
+          - os: ubuntu-latest
+            r: release
+            compiler: none
+            build: cmake
+    env:
+      R_REMOTES_NO_ERRORS_FROM_WARNINGS: true
+    steps:
+      - name: Install system dependencies
+        run: |
+          sudo apt update
+          sudo apt install libcurl4-openssl-dev libssl-dev libssh2-1-dev libgit2-dev libglpk-dev libxml2-dev libharfbuzz-dev libfribidi-dev librsvg2-dev librsvg2-2
+        if: matrix.os == 'ubuntu-latest'
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - uses: r-lib/actions/setup-r@v2
+        with:
+          r-version: ${{ matrix.r }}
+      - name: Cache R packages
+        uses: actions/cache@v4
+        with:
+          path: ${{ env.R_LIBS_USER }}
+          key: ${{ runner.os }}-r-${{ matrix.r }}-8-${{ hashFiles('R-package/DESCRIPTION') }}
+          restore-keys: ${{ runner.os }}-r-${{ matrix.r }}-8-${{ hashFiles('R-package/DESCRIPTION') }}
+      - uses: actions/setup-python@v5
+        with:
+          python-version: "3.10"
+          architecture: 'x64'
+      - uses: r-lib/actions/setup-tinytex@v2
+      - name: Install dependencies
+        shell: Rscript {0}
+        run: |
+          source("./R-package/tests/helper_scripts/install_deps.R")
+      - name: Test R
+        run: |
+          python ops/script/test_r_package.py --compiler='${{ matrix.compiler }}' --build-tool="${{ matrix.build }}" --task=check
+        if: matrix.compiler != 'none'
+      - name: Test R
+        run: |
+          python ops/script/test_r_package.py --build-tool="${{ matrix.build }}" --task=check
+        if: matrix.compiler == 'none'
+
+  test-R-on-Debian:
+    name: Test R package on Debian
+    runs-on: ubuntu-latest
+    container:
+      image: rhub/debian-gcc-release
+    steps:
+      - name: Install system dependencies
+        run: |
+          # Must run before checkout to have the latest git installed.
+          # No need to add pandoc, the container has it figured out.
+          apt update && apt install libcurl4-openssl-dev libssl-dev libssh2-1-dev libgit2-dev libglpk-dev libxml2-dev libharfbuzz-dev libfribidi-dev git librsvg2-dev librsvg2-2 -y
+      - name: Trust git cloning project sources
+        run: |
+          git config --global --add safe.directory "${GITHUB_WORKSPACE}"
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - name: Install dependencies
+        shell: bash -l {0}
+        run: |
+          Rscript -e "source('./R-package/tests/helper_scripts/install_deps.R')"
+      - name: Test R
+        shell: bash -l {0}
+        run: |
+          python3 ops/script/test_r_package.py --r=/usr/bin/R --build-tool=autotools --task=check
+      - uses: dorny/paths-filter@v3
+        id: changes
+        with:
+          filters: |
+            r_package:
+              - 'R-package/**'
+      - name: Run document check
+        if: steps.changes.outputs.r_package == 'true'
+        run: |
+          python3 ops/script/test_r_package.py --r=/usr/bin/R --task=doc
diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml
new file mode 100644
index 000000000000..f3837391b4fe
--- /dev/null
+++ b/.github/workflows/scorecards.yml
@@ -0,0 +1,54 @@
+name: Scorecards supply-chain security
+on:
+  # Only the default branch is supported.
+  branch_protection_rule:
+  schedule:
+    - cron: '17 2 * * 6'
+  push:
+    branches: [ "master" ]
+
+# Declare default permissions as read only.
+permissions: read-all
+
+jobs:
+  analysis:
+    name: Scorecards analysis
+    runs-on: ubuntu-latest
+    permissions:
+      # Needed to upload the results to code-scanning dashboard.
+      security-events: write
+      # Used to receive a badge.
+      id-token: write
+
+    steps:
+      - name: "Checkout code"
+        uses: actions/checkout@v4
+        with:
+          persist-credentials: false
+
+      - name: "Run analysis"
+        uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0
+        with:
+          results_file: results.sarif
+          results_format: sarif
+
+          # Publish the results for public repositories to enable scorecard badges. For more details, see
+          # https://github.com/ossf/scorecard-action#publishing-results.
+          # For private repositories, `publish_results` will automatically be set to `false`, regardless
+          # of the value entered here.
+          publish_results: true
+
+      # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
+      # format to the repository Actions tab.
+      - name: "Upload artifact"
+        uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
+        with:
+          name: SARIF file
+          path: results.sarif
+          retention-days: 5
+
+      # Upload the results to GitHub's code scanning dashboard.
+      - name: "Upload to code-scanning"
+        uses: github/codeql-action/upload-sarif@83a02f7883b12e0e4e1a146174f5e2292a01e601 # v2.16.4
+        with:
+          sarif_file: results.sarif
diff --git a/.github/workflows/sycl_tests.yml b/.github/workflows/sycl_tests.yml
new file mode 100644
index 000000000000..8efdc98d7fd9
--- /dev/null
+++ b/.github/workflows/sycl_tests.yml
@@ -0,0 +1,48 @@
+name: XGBoost CI (oneAPI)
+
+on: [push, pull_request]
+
+permissions:
+  contents: read  # to fetch code (actions/checkout)
+
+defaults:
+  run:
+    shell: bash -l {0}
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+  cancel-in-progress: true
+
+env:
+  BRANCH_NAME: >-
+    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
+
+jobs:
+  gtest-cpu-sycl:
+    name: Test Google C++ unittest (CPU SYCL)
+    runs-on: ubuntu-latest
+    steps:
+    - uses: actions/checkout@v4
+      with:
+        submodules: 'true'
+    - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
+      with:
+        environment-name: linux_sycl_test
+        environment-file: ops/conda_env/linux_sycl_test.yml
+    - name: Run gtest
+      run: bash ops/pipeline/build-test-sycl.sh gtest
+
+  python-sycl-tests-on-ubuntu:
+    name: Test XGBoost Python package with SYCL
+    runs-on: ubuntu-latest
+    timeout-minutes: 90
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: 'true'
+      - uses: dmlc/xgboost-devops/actions/miniforge-setup@main
+        with:
+          environment-name: linux_sycl_test
+          environment-file: ops/conda_env/linux_sycl_test.yml
+      - name: Test Python package
+        run: bash ops/pipeline/build-test-sycl.sh pytest
diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml
new file mode 100644
index 000000000000..10d430c5f0f8
--- /dev/null
+++ b/.github/workflows/windows.yml
@@ -0,0 +1,78 @@
+name: XGBoost CI (Windows)
+
+on: [push, pull_request]
+
+permissions:
+  contents: read  # to fetch code (actions/checkout)
+
+concurrency:
+  group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
+  cancel-in-progress: true
+
+defaults:
+  run:
+    shell: powershell
+
+env:
+  BRANCH_NAME: >-
+    ${{ github.event.pull_request.number && 'PR-' }}${{ github.event.pull_request.number || github.ref_name }}
+
+jobs:
+  build-win64-gpu:
+    name: Build XGBoost for Windows with CUDA
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=windows-cpu
+      - tag=windows-build-win64-gpu
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - run: powershell ops/pipeline/build-win64-gpu.ps1
+      - name: Stash files
+        shell: powershell
+        run: |
+          conda activate
+          python ops/pipeline/manage-artifacts.py upload `
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} `
+            --prefix cache/${{ github.run_id }}/build-win64-gpu `
+            build/testxgboost.exe xgboost.exe `
+            (Get-ChildItem python-package/dist/*.whl | Select-Object -Expand FullName)
+
+  build-win64-cpu:
+    name: Build XGBoost for Windows (minimal)
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=windows-cpu
+      - tag=windows-build-win64-cpu
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - run: powershell ops/pipeline/build-win64-cpu.ps1
+
+  test-win64-gpu:
+    name: Test XGBoost on Windows
+    needs: build-win64-gpu
+    runs-on:
+      - runs-on=${{ github.run_id }}
+      - runner=windows-gpu
+      - tag=windows-test-win64-gpu
+    steps:
+      - uses: actions/checkout@v4
+        with:
+          submodules: "true"
+      - name: Unstash files
+        shell: powershell
+        run: |
+          conda activate
+          python ops/pipeline/manage-artifacts.py download `
+            --s3-bucket ${{ env.RUNS_ON_S3_BUCKET_CACHE }} `
+            --prefix cache/${{ github.run_id }}/build-win64-gpu `
+            --dest-dir build `
+            *.whl testxgboost.exe xgboost.exe
+          Move-Item -Path build/xgboost.exe -Destination .
+          New-Item -ItemType Directory -Path python-package/dist/ -Force
+          Move-Item -Path (Get-ChildItem build/*.whl | Select-Object -Expand FullName) `
+            -Destination python-package/dist/
+      - run: powershell ops/pipeline/test-win64-gpu.ps1
diff --git a/ops/pipeline/test-python-macos.sh b/ops/pipeline/test-python-macos.sh
index 57bb186fa75d..63b5690d1312 100755
--- a/ops/pipeline/test-python-macos.sh
+++ b/ops/pipeline/test-python-macos.sh
@@ -19,5 +19,5 @@ python --version
 pip install -v .
 
 cd ..
-pytest -s -v --maxfail=1 --durations=0 ./tests/python/
-# pytest -s -v -rxXs --durations=0 ./tests/test_distributed/test_with_dask
+pytest -s -v -rxXs --durations=0 ./tests/python
+pytest -s -v -rxXs --durations=0 ./tests/test_distributed/test_with_dask

From 5982ac92a7899302a2b66e44c739d6a77c091603 Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Sun, 16 Mar 2025 03:02:54 +0800
Subject: [PATCH 08/15] remove pyspark.

---
 ops/conda_env/macos_cpu_test.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/ops/conda_env/macos_cpu_test.yml b/ops/conda_env/macos_cpu_test.yml
index 4cb2235e96ab..eb6c4319e848 100644
--- a/ops/conda_env/macos_cpu_test.yml
+++ b/ops/conda_env/macos_cpu_test.yml
@@ -29,7 +29,6 @@ dependencies:
 - awscli
 - loky=3.4.1
 - pyarrow
-- pyspark>=3.4.0
 - cloudpickle
 - pip:
   - setuptools

From 8d2ca0901ee92dfe7b3676cb706420c36b60471a Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Mon, 17 Mar 2025 22:13:41 +0800
Subject: [PATCH 09/15] Freeze loky on Linux.

---
 ops/pipeline/get-image-tag.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/ops/pipeline/get-image-tag.sh b/ops/pipeline/get-image-tag.sh
index 74458229bbce..68e916eefa66 100755
--- a/ops/pipeline/get-image-tag.sh
+++ b/ops/pipeline/get-image-tag.sh
@@ -1,4 +1,4 @@
 ## Update the following line to test changes to CI images
 ## See https://xgboost.readthedocs.io/en/latest/contrib/ci.html#making-changes-to-ci-containers
 
-IMAGE_TAG=main
+IMAGE_TAG=PR-15

From 722c99a1d9f9a30c849f2b8218e44c72a9d9bd82 Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Tue, 18 Mar 2025 13:59:35 +0800
Subject: [PATCH 10/15] Linux.

---
 ops/conda_env/linux_cpu_test.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/ops/conda_env/linux_cpu_test.yml b/ops/conda_env/linux_cpu_test.yml
index e4c0b507c8e2..26d54179cc63 100644
--- a/ops/conda_env/linux_cpu_test.yml
+++ b/ops/conda_env/linux_cpu_test.yml
@@ -34,7 +34,7 @@ dependencies:
 - boto3
 - awscli
 - py-ubjson
-- loky
+- loky=3.4.1
 - pyarrow
 - protobuf
 - cloudpickle

From 04330f7c61c4b85f01e6696e5339f3b6d2a60b9e Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Tue, 18 Mar 2025 14:25:15 +0800
Subject: [PATCH 11/15] Freeze.

---
 ops/conda_env/aarch64_test.yml | 2 +-
 ops/conda_env/win64_test.yml   | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/ops/conda_env/aarch64_test.yml b/ops/conda_env/aarch64_test.yml
index 14305ebbf090..a7efc707f5c6 100644
--- a/ops/conda_env/aarch64_test.yml
+++ b/ops/conda_env/aarch64_test.yml
@@ -26,7 +26,7 @@ dependencies:
 - awscli
 - numba
 - llvmlite
-- loky
+- loky=3.4.1
 - pyarrow
 - pyspark>=3.4.0
 - cloudpickle
diff --git a/ops/conda_env/win64_test.yml b/ops/conda_env/win64_test.yml
index 32b9339e6fc0..1b5e906dd395 100644
--- a/ops/conda_env/win64_test.yml
+++ b/ops/conda_env/win64_test.yml
@@ -16,5 +16,5 @@ dependencies:
 - python-graphviz
 - pip
 - py-ubjson
-- loky
+- loky=3.4.1
 - pyarrow

From 733caef00c92478533d8686309c4b0a787cbef95 Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Tue, 18 Mar 2025 15:20:10 +0800
Subject: [PATCH 12/15] assert loky version.

---
 tests/python/test_collective.py | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)

diff --git a/tests/python/test_collective.py b/tests/python/test_collective.py
index 473b38b5b742..0159f4671786 100644
--- a/tests/python/test_collective.py
+++ b/tests/python/test_collective.py
@@ -3,7 +3,6 @@
 
 import numpy as np
 import pytest
-from loky import get_reusable_executor
 
 import xgboost as xgb
 from xgboost import RabitTracker, build_info, federated
@@ -25,10 +24,16 @@ def run_rabit_worker(rabit_env: dict, world_size: int) -> int:
 
 @pytest.mark.skipif(**tm.no_loky())
 def test_rabit_communicator() -> None:
+    import loky
+    from loky import get_reusable_executor
+
+    assert loky.__version__ == "3.4.1"
+
     world_size = 2
     tracker = RabitTracker(host_ip="127.0.0.1", n_workers=world_size)
     tracker.start()
     workers = []
+
     with get_reusable_executor(max_workers=world_size) as pool:
         for _ in range(world_size):
             worker = pool.submit(
@@ -60,6 +65,8 @@ def run_federated_worker(port: int, world_size: int, rank: int) -> int:
 @pytest.mark.skipif(**tm.skip_win())
 @pytest.mark.skipif(**tm.no_loky())
 def test_federated_communicator() -> None:
+    from loky import get_reusable_executor
+
     if not build_info()["USE_FEDERATED"]:
         pytest.skip("XGBoost not built with federated learning enabled")
 

From bad70285d122bafd23a1ea905756c6fa461ca3d7 Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Tue, 18 Mar 2025 22:22:51 +0800
Subject: [PATCH 13/15] revert assert.

---
 tests/python/test_collective.py | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/tests/python/test_collective.py b/tests/python/test_collective.py
index 0159f4671786..1204c0faf8c9 100644
--- a/tests/python/test_collective.py
+++ b/tests/python/test_collective.py
@@ -24,11 +24,8 @@ def run_rabit_worker(rabit_env: dict, world_size: int) -> int:
 
 @pytest.mark.skipif(**tm.no_loky())
 def test_rabit_communicator() -> None:
-    import loky
     from loky import get_reusable_executor
 
-    assert loky.__version__ == "3.4.1"
-
     world_size = 2
     tracker = RabitTracker(host_ip="127.0.0.1", n_workers=world_size)
     tracker.start()

From 5d854c2fda4010c96a7535412d47f00650c3b8cd Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Wed, 19 Mar 2025 18:50:23 +0800
Subject: [PATCH 14/15] 3.5.1

---
 ops/conda_env/aarch64_test.yml   | 2 +-
 ops/conda_env/linux_cpu_test.yml | 2 +-
 ops/conda_env/macos_cpu_test.yml | 2 +-
 ops/conda_env/win64_test.yml     | 2 +-
 4 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/ops/conda_env/aarch64_test.yml b/ops/conda_env/aarch64_test.yml
index a7efc707f5c6..d7dd13639ff3 100644
--- a/ops/conda_env/aarch64_test.yml
+++ b/ops/conda_env/aarch64_test.yml
@@ -26,7 +26,7 @@ dependencies:
 - awscli
 - numba
 - llvmlite
-- loky=3.4.1
+- loky>=3.5.1
 - pyarrow
 - pyspark>=3.4.0
 - cloudpickle
diff --git a/ops/conda_env/linux_cpu_test.yml b/ops/conda_env/linux_cpu_test.yml
index 26d54179cc63..55bac17f2dbb 100644
--- a/ops/conda_env/linux_cpu_test.yml
+++ b/ops/conda_env/linux_cpu_test.yml
@@ -34,7 +34,7 @@ dependencies:
 - boto3
 - awscli
 - py-ubjson
-- loky=3.4.1
+- loky>=3.5.1
 - pyarrow
 - protobuf
 - cloudpickle
diff --git a/ops/conda_env/macos_cpu_test.yml b/ops/conda_env/macos_cpu_test.yml
index eb6c4319e848..390abf141803 100644
--- a/ops/conda_env/macos_cpu_test.yml
+++ b/ops/conda_env/macos_cpu_test.yml
@@ -27,7 +27,7 @@ dependencies:
 - jsonschema
 - boto3
 - awscli
-- loky=3.4.1
+- loky>=3.5.1
 - pyarrow
 - cloudpickle
 - pip:
diff --git a/ops/conda_env/win64_test.yml b/ops/conda_env/win64_test.yml
index 1b5e906dd395..6e87e1560c21 100644
--- a/ops/conda_env/win64_test.yml
+++ b/ops/conda_env/win64_test.yml
@@ -16,5 +16,5 @@ dependencies:
 - python-graphviz
 - pip
 - py-ubjson
-- loky=3.4.1
+- loky>=3.5.1
 - pyarrow

From 36d104b4ca4d9c8eede489d6fd34c7acdfef435f Mon Sep 17 00:00:00 2001
From: Jiaming Yuan <jm.yuan@outlook.com>
Date: Wed, 19 Mar 2025 22:43:06 +0800
Subject: [PATCH 15/15] Return to main.

---
 ops/pipeline/get-image-tag.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/ops/pipeline/get-image-tag.sh b/ops/pipeline/get-image-tag.sh
index 68e916eefa66..74458229bbce 100755
--- a/ops/pipeline/get-image-tag.sh
+++ b/ops/pipeline/get-image-tag.sh
@@ -1,4 +1,4 @@
 ## Update the following line to test changes to CI images
 ## See https://xgboost.readthedocs.io/en/latest/contrib/ci.html#making-changes-to-ci-containers
 
-IMAGE_TAG=PR-15
+IMAGE_TAG=main