Skip to content

Commit

Permalink
Merge branch 'intel:main' into dev/github_action_test
Browse files Browse the repository at this point in the history
  • Loading branch information
icfaust authored Jul 23, 2024
2 parents abab7c4 + ae8a398 commit 6ff0a1d
Show file tree
Hide file tree
Showing 132 changed files with 3,554 additions and 1,718 deletions.
6 changes: 4 additions & 2 deletions .ci/pipeline/build-and-test-lnx.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,10 @@ steps:
bash .ci/scripts/describe_system.sh
displayName: "System info"
- script: |
conda config --add channels conda-forge
conda config --set channel_priority strict
conda update -y -q conda
conda create -q -y -n CB -c conda-forge -c intel python=$(PYTHON_VERSION) intel::dal-devel mpich pyyaml "dpcpp-cpp-rt=2024.1.0"
conda create -q -y -n CB -c conda-forge python=$(PYTHON_VERSION) dal-devel mpich pyyaml "dpcpp-cpp-rt=2024.2.0"
displayName: "Conda create"
- script: |
. /usr/share/miniconda/etc/profile.d/conda.sh
Expand All @@ -46,7 +48,7 @@ steps:
bash .ci/scripts/setup_sklearn.sh $(SKLEARN_VERSION)
pip install --upgrade -r requirements-test.txt
pip install $(python .ci/scripts/get_compatible_scipy_version.py)
if [ $(echo $(PYTHON_VERSION) | grep '3.9\|3.10') ]; then conda install -q -y -c intel dpctl=0.16.0 dpnp=0.14.0; fi
if [ $(echo $(PYTHON_VERSION) | grep '3.9\|3.11') ] && [ $(SKLEARN_VERSION) != "1.0" ]; then conda install -q -y -c https://software.repos.intel.com/python/conda/ dpctl=0.17.0 dpnp=0.15.0; fi
pip list
displayName: "Install testing requirements"
- script: |
Expand Down
3 changes: 2 additions & 1 deletion .ci/pipeline/build-and-test-win.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
steps:
- powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts"
displayName: Add conda to PATH
- script: conda create -q -y -n CB -c conda-forge -c intel python=$(PYTHON_VERSION) intel::dal-devel impi-devel clang-format pyyaml
- script: conda create -q -y -n CB -c conda-forge python=$(PYTHON_VERSION) dal-devel impi-devel clang-format pyyaml
displayName: 'Create Anaconda environment'
- script: |
call activate CB
Expand All @@ -36,6 +36,7 @@ steps:
set PREFIX=%CONDA_PREFIX%
set PYTHON=python
call conda-recipe\bld.bat
IF %ERRORLEVEL% neq 0 EXIT /b %ERRORLEVEL%
set DALROOT=%CONDA_PREFIX%
python setup_sklearnex.py install --single-version-externally-managed --record=record_sklearnex.txt
displayName: 'Build daal4py/sklearnex'
Expand Down
14 changes: 10 additions & 4 deletions .ci/pipeline/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ jobs:
timeoutInMinutes: 120
strategy:
matrix:
Python3.8_Sklearn1.0:
PYTHON_VERSION: '3.8'
Python3.9_Sklearn1.0:
PYTHON_VERSION: '3.9'
SKLEARN_VERSION: '1.0'
Python3.9_Sklearn1.1:
PYTHON_VERSION: '3.9'
Expand All @@ -75,6 +75,9 @@ jobs:
Python3.12_Sklearn1.4:
PYTHON_VERSION: '3.12'
SKLEARN_VERSION: '1.4'
Python3.12_Sklearn1.5:
PYTHON_VERSION: '3.12'
SKLEARN_VERSION: '1.5'
pool:
vmImage: 'ubuntu-22.04'
steps:
Expand All @@ -84,8 +87,8 @@ jobs:
timeoutInMinutes: 120
strategy:
matrix:
Python3.8_Sklearn1.0:
PYTHON_VERSION: '3.8'
Python3.9_Sklearn1.0:
PYTHON_VERSION: '3.9'
SKLEARN_VERSION: '1.0'
Python3.9_Sklearn1.1:
PYTHON_VERSION: '3.9'
Expand All @@ -99,6 +102,9 @@ jobs:
Python3.12_Sklearn1.4:
PYTHON_VERSION: '3.12'
SKLEARN_VERSION: '1.4'
Python3.12_Sklearn1.5:
PYTHON_VERSION: '3.12'
SKLEARN_VERSION: '1.5'
pool:
vmImage: 'windows-latest'
steps:
Expand Down
4 changes: 3 additions & 1 deletion .ci/pipeline/nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,10 @@ jobs:
vmImage: 'ubuntu-latest'
steps:
- script: |
conda config --append channels conda-forge
conda config --remove channels defaults
conda update -y -q conda
conda create -y -q -n CB -c intel python=$(python.version) dal-devel impi-devel
conda create -y -q -n CB -c conda-forge python=$(python.version) dal-devel impi-devel
displayName: 'Conda create'
- script: |
bash .ci/scripts/describe_system.sh
Expand Down
6 changes: 4 additions & 2 deletions .ci/pipeline/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ jobs:
displayName: 'Sklearn testing'
- job: GeneratorConda
steps:
- bash: python .ci/scripts/gen_release_jobs.py --channels main intel conda-forge
- bash: python .ci/scripts/gen_release_jobs.py --channels conda-forge
name: MatrixGen
- job: ReleaseConda
dependsOn: GeneratorConda
Expand All @@ -68,7 +68,9 @@ jobs:
condition: eq( variables['Agent.OS'], 'Darwin')
displayName: Add sudo access
- script: |
conda update -y -q -c defaults conda
conda config --append channels conda-forge
conda config --remove channels defaults
conda update -y -q conda
conda create -y -q -n CB -c $(conda.channel) python=$(python.version) scikit-learn-intelex pandas pytest pyyaml
displayName: 'Install scikit-learn-intelex'
- script: |
Expand Down
10 changes: 8 additions & 2 deletions .ci/scripts/get_compatible_scipy_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,15 @@
print("Scipy version is not specified for this sklearn/python version.", file=stderr)
print("scipy")
elif sklearn_check_version("1.3") or python_version[1] > 11:
print("scipy==1.11.*")
if python_version[1] > 8:
print("scipy==1.12.*")
else:
print("scipy==1.11.*")
elif sklearn_check_version("1.2") or python_version[1] > 10:
print("scipy==1.9.*")
if python_version[1] > 9:
print("scipy==1.12.*")
else:
print("scipy==1.9.*")
elif sklearn_check_version("1.1"):
print("scipy==1.8.*")
elif sklearn_check_version("1.0"):
Expand Down
2 changes: 1 addition & 1 deletion .ci/scripts/install_dpcpp.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,5 @@ rm GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
echo "deb https://apt.repos.intel.com/oneapi all main" | sudo tee /etc/apt/sources.list.d/oneAPI.list
sudo add-apt-repository -y "deb https://apt.repos.intel.com/oneapi all main"
sudo apt-get update
sudo apt-get install -y intel-dpcpp-cpp-compiler-2024.1
sudo apt-get install -y intel-dpcpp-cpp-compiler-2024.2
sudo bash -c 'echo libintelocl.so > /etc/OpenCL/vendors/intel-cpu.icd'
1 change: 1 addition & 0 deletions .ci/scripts/run_sklearn_tests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ cd $ci_dir
# ('all' - special value to run all tests)
export SELECTED_TESTS=${SELECTED_TESTS:-$(python scripts/select_sklearn_tests.py)}

export DESELECT_FLAGS="--public ${DESELECT_FLAGS}"
if [ -n "${SKLEARNEX_PREVIEW}" ]; then
export DESELECT_FLAGS="--preview ${DESELECT_FLAGS}"
fi
Expand Down
12 changes: 0 additions & 12 deletions INSTALL.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ To install Intel(R) Extension for Scikit-learn*, use one of the following scenar
- [Install from Anaconda Cloud](#install-from-anaconda-cloud)
- [Install via Anaconda Cloud from Conda-Forge Channel](#install-via-anaconda-cloud-from-conda-forge-channel)
- [Install via Anaconda Cloud from Intel Channel](#install-via-anaconda-cloud-from-intel-channel)
- [Install via Anaconda Cloud from Main Channel](#install-via-anaconda-cloud-from-main-channel)
- [Build from Sources](#build-from-sources)
- [Prerequisites](#prerequisites)
- [Configure the Build with Environment Variables](#configure-the-build-with-environment-variables)
Expand All @@ -52,7 +51,6 @@ Applicable for:
* PyPI
* Anaconda Cloud from Conda-Forge Channel
* Anaconda Cloud from Intel Channel
* Anaconda Cloud from Main Channel



Expand Down Expand Up @@ -128,16 +126,6 @@ We recommend this installation for the users of Intel® Distribution for Python.
conda install scikit-learn-intelex
```

### Install via Anaconda Cloud from Main Channel

> **_NOTE:_** You may not find the latest version on the Anaconda Main channel since it usually lags on versions deployed.

- Install into a newly created environment (recommended):

```bash
conda create -n env python=3.10 scikit-learn-intelex
```

> **_NOTE:_** If you do not specify the version of Python, the latest one is downloaded.

- Install into your current environment:
Expand Down
100 changes: 0 additions & 100 deletions daal4py/sklearn/_device_offload.py

This file was deleted.

3 changes: 0 additions & 3 deletions daal4py/sklearn/cluster/dbscan.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@

import daal4py

from .._device_offload import support_usm_ndarray
from .._n_jobs_support import control_n_jobs
from .._utils import PatchingConditionsChain, getFPType, make2d, sklearn_check_version

Expand Down Expand Up @@ -83,7 +82,6 @@ def __init__(
self.p = p
self.n_jobs = n_jobs

@support_usm_ndarray()
def fit(self, X, y=None, sample_weight=None):
if sklearn_check_version("1.2"):
self._validate_params()
Expand Down Expand Up @@ -160,7 +158,6 @@ def fit(self, X, y=None, sample_weight=None):
return self
return super().fit(X, y, sample_weight=sample_weight)

@support_usm_ndarray()
def fit_predict(self, X, y=None, sample_weight=None):
return super().fit_predict(X, y, sample_weight)

Expand Down
8 changes: 0 additions & 8 deletions daal4py/sklearn/cluster/k_means.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@

import daal4py

from .._device_offload import support_usm_ndarray
from .._n_jobs_support import control_n_jobs
from .._utils import PatchingConditionsChain, getFPType, sklearn_check_version

Expand Down Expand Up @@ -575,31 +574,24 @@ def __init__(
algorithm=algorithm,
)

@support_usm_ndarray()
def fit(self, X, y=None, sample_weight=None):
return _fit(self, X, y=y, sample_weight=sample_weight)

if sklearn_check_version("1.5"):

@support_usm_ndarray()
def predict(self, X):
return _predict(self, X)

else:

@support_usm_ndarray()
def predict(
self, X, sample_weight="deprecated" if sklearn_check_version("1.3") else None
):
return _predict(self, X, sample_weight=sample_weight)

@support_usm_ndarray()
def fit_predict(self, X, y=None, sample_weight=None):
return super().fit_predict(X, y, sample_weight)

score = support_usm_ndarray()(KMeans_original.score)

fit.__doc__ = KMeans_original.fit.__doc__
predict.__doc__ = KMeans_original.predict.__doc__
fit_predict.__doc__ = KMeans_original.fit_predict.__doc__
score.__doc__ = KMeans_original.score.__doc__
5 changes: 0 additions & 5 deletions daal4py/sklearn/decomposition/_pca.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@

import daal4py

from .._device_offload import support_usm_ndarray
from .._n_jobs_support import control_n_jobs
from .._utils import PatchingConditionsChain, getFPType, sklearn_check_version

Expand Down Expand Up @@ -376,7 +375,6 @@ def _transform_daal4py(self, X, whiten=False, scale_eigenvalues=True, check_X=Tr

if sklearn_check_version("1.3"):

@support_usm_ndarray()
@_fit_context(prefer_skip_nested_validation=True)
def fit(self, X, y=None):
"""Fit the model with X.
Expand All @@ -400,7 +398,6 @@ def fit(self, X, y=None):

else:

@support_usm_ndarray()
def fit(self, X, y=None):
"""Fit the model with X.
Expand Down Expand Up @@ -431,7 +428,6 @@ def fit(self, X, y=None):
self._fit(X)
return self

@support_usm_ndarray()
def transform(self, X):
"""
Apply dimensionality reduction to X.
Expand Down Expand Up @@ -466,7 +462,6 @@ def transform(self, X):
)
return PCA_original.transform(self, X)

@support_usm_ndarray()
def fit_transform(self, X, y=None):
"""
Fit the model with X and apply the dimensionality reduction on X.
Expand Down
Loading

0 comments on commit 6ff0a1d

Please sign in to comment.