From 63a92f8337da224450547b1b5dcf04983fa6871d Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Fri, 3 Jan 2025 23:53:50 +0100 Subject: [PATCH 01/61] Update build_backend.py --- scripts/build_backend.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index f3546f480e..60c4c16fb1 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -131,16 +131,20 @@ def custom_build_cmake_clib( cmake_args += ["-DADD_ONEDAL_RPATH=ON"] cpu_count = multiprocessing.cpu_count() + memfree = sys.maxsize # limit parallel cmake jobs if memory size is insufficient - # TODO: add on all platforms if IS_LIN: with open("/proc/meminfo", "r") as meminfo_file_obj: memfree = meminfo_file_obj.read().split("\n")[1].split(" ") while "" in memfree: memfree.remove("") - memfree = int(memfree[1]) # total memory in kB - cpu_count = min(cpu_count, floor(max(1, memfree / 2**20))) + memfree = int(memfree[1]) # total free physical memory in kB + elif IS_WIN: + txt = subprocess.run(["powershell","Get-CIMInstance","Win32_OperatingSystem","|","Select","FreePhysicalMemory"], stdout=subprocess.PIPE, text=True) + memfree = int(txt.stdout.strip().split(" ")[-1]) # total free physical memory in kB + mem_per_proc = 2**20 # approximately 1GB + cpu_count = min(cpu_count, floor(max(1, memfree / mem_per_proc))) make_args = ["cmake", "--build", abs_build_temp_path, "-j " + str(cpu_count)] make_install_args = [ From 0a65818bde4a19968a5b96dec6a70c2611aa517c Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Fri, 3 Jan 2025 23:54:37 +0100 Subject: [PATCH 02/61] Update build_backend.py --- scripts/build_backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 60c4c16fb1..4bdfc763c1 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -140,7 +140,7 @@ def custom_build_cmake_clib( memfree.remove("") memfree = int(memfree[1]) # total free physical memory in kB elif IS_WIN: - txt = subprocess.run(["powershell","Get-CIMInstance","Win32_OperatingSystem","|","Select","FreePhysicalMemory"], stdout=subprocess.PIPE, text=True) + txt = subprocess.run(["powershell.exe","Get-CIMInstance","Win32_OperatingSystem","|","Select","FreePhysicalMemory"], stdout=subprocess.PIPE, text=True) memfree = int(txt.stdout.strip().split(" ")[-1]) # total free physical memory in kB mem_per_proc = 2**20 # approximately 1GB From 2df497fe09199138664e639eccdc6f1193d0b3a0 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Fri, 3 Jan 2025 23:56:49 +0100 Subject: [PATCH 03/61] formatting --- scripts/build_backend.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 4bdfc763c1..2d33e169e5 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -140,8 +140,21 @@ def custom_build_cmake_clib( memfree.remove("") memfree = int(memfree[1]) # total free physical memory in kB elif IS_WIN: - txt = subprocess.run(["powershell.exe","Get-CIMInstance","Win32_OperatingSystem","|","Select","FreePhysicalMemory"], stdout=subprocess.PIPE, text=True) - memfree = int(txt.stdout.strip().split(" ")[-1]) # total free physical memory in kB + txt = subprocess.run( + [ + "powershell.exe", + "Get-CIMInstance", + "Win32_OperatingSystem", + "|", + "Select", + "FreePhysicalMemory", + ], + stdout=subprocess.PIPE, + text=True, + ) + memfree = int( + txt.stdout.strip().split(" ")[-1] + ) # total free physical memory in kB mem_per_proc = 2**20 # approximately 1GB cpu_count = min(cpu_count, floor(max(1, memfree / mem_per_proc))) From 38985d3b747572626773f48beac72aeec33e09bf Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Sat, 4 Jan 2025 00:26:55 +0100 Subject: [PATCH 04/61] fix memfree --- scripts/build_backend.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 2d33e169e5..34247e7c14 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -131,7 +131,8 @@ def custom_build_cmake_clib( cmake_args += ["-DADD_ONEDAL_RPATH=ON"] cpu_count = multiprocessing.cpu_count() - memfree = sys.maxsize + # convert to max supported pointer to a memory size in kB + memfree = sys.maxsize // (128 if sys.maxisze > 2**32 else 256) # limit parallel cmake jobs if memory size is insufficient if IS_LIN: with open("/proc/meminfo", "r") as meminfo_file_obj: From ee7cbf91bb442ed554e15d5d059f0271939f90f3 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Sat, 4 Jan 2025 00:42:52 +0100 Subject: [PATCH 05/61] Update build_backend.py --- scripts/build_backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 34247e7c14..837f36817f 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -132,7 +132,7 @@ def custom_build_cmake_clib( cpu_count = multiprocessing.cpu_count() # convert to max supported pointer to a memory size in kB - memfree = sys.maxsize // (128 if sys.maxisze > 2**32 else 256) + memfree = sys.maxsize // (128 if sys.maxsize > 2**32 else 256) # limit parallel cmake jobs if memory size is insufficient if IS_LIN: with open("/proc/meminfo", "r") as meminfo_file_obj: From 8d427e896b295808eac4d56aa9be8ea8ed2c3692 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Sat, 4 Jan 2025 11:22:35 +0100 Subject: [PATCH 06/61] Update build_backend.py --- scripts/build_backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 837f36817f..f70815d4fe 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -143,7 +143,7 @@ def custom_build_cmake_clib( elif IS_WIN: txt = subprocess.run( [ - "powershell.exe", + "powershell", "Get-CIMInstance", "Win32_OperatingSystem", "|", From 73d787bb1809132dcda395217d5e2378f92080c3 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Tue, 7 Jan 2025 09:57:20 +0100 Subject: [PATCH 07/61] Update build_backend.py --- scripts/build_backend.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index f70815d4fe..22cfb94a71 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -132,7 +132,7 @@ def custom_build_cmake_clib( cpu_count = multiprocessing.cpu_count() # convert to max supported pointer to a memory size in kB - memfree = sys.maxsize // (128 if sys.maxsize > 2**32 else 256) + memfree = sys.maxsize >> 10 # limit parallel cmake jobs if memory size is insufficient if IS_LIN: with open("/proc/meminfo", "r") as meminfo_file_obj: @@ -157,8 +157,8 @@ def custom_build_cmake_clib( txt.stdout.strip().split(" ")[-1] ) # total free physical memory in kB - mem_per_proc = 2**20 # approximately 1GB - cpu_count = min(cpu_count, floor(max(1, memfree / mem_per_proc))) + mem_per_proc = 20 # 2**20 kB or 1GB + cpu_count = min(cpu_count, floor(max(1, memfree >> mem_per_proc))) make_args = ["cmake", "--build", abs_build_temp_path, "-j " + str(cpu_count)] make_install_args = [ From 6bcc7367041a25dafd179a532c691713fe7e937e Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Tue, 7 Jan 2025 10:29:38 +0100 Subject: [PATCH 08/61] Update build_backend.py --- scripts/build_backend.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 22cfb94a71..98e48e14e6 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -136,10 +136,10 @@ def custom_build_cmake_clib( # limit parallel cmake jobs if memory size is insufficient if IS_LIN: with open("/proc/meminfo", "r") as meminfo_file_obj: - memfree = meminfo_file_obj.read().split("\n")[1].split(" ") - while "" in memfree: - memfree.remove("") - memfree = int(memfree[1]) # total free physical memory in kB + next(meminfo_file_obj) # skip MemTotal + memfree = int( + next(meminfo_file_obj).strip().split()[-2] + ) # total free physical memory in kB elif IS_WIN: txt = subprocess.run( [ @@ -154,7 +154,7 @@ def custom_build_cmake_clib( text=True, ) memfree = int( - txt.stdout.strip().split(" ")[-1] + txt.stdout.strip().split()[-1] ) # total free physical memory in kB mem_per_proc = 20 # 2**20 kB or 1GB From 050adc82131b8b8d1cd1043a08073b12966785fe Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Tue, 7 Jan 2025 11:07:24 +0100 Subject: [PATCH 09/61] formatting --- scripts/build_backend.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 98e48e14e6..0159c8e5c2 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -153,9 +153,7 @@ def custom_build_cmake_clib( stdout=subprocess.PIPE, text=True, ) - memfree = int( - txt.stdout.strip().split()[-1] - ) # total free physical memory in kB + memfree = int(txt.stdout.strip().split()[-1]) # total free physical memory in kB mem_per_proc = 20 # 2**20 kB or 1GB cpu_count = min(cpu_count, floor(max(1, memfree >> mem_per_proc))) From e9afcb356253596a01a129b8380654ac496bff3f Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Fri, 10 Jan 2025 00:26:45 +0100 Subject: [PATCH 10/61] see where CI fails --- scripts/build_backend.py | 28 ++-------------------------- setup.py | 23 +++++++++++++++++++++++ 2 files changed, 25 insertions(+), 26 deletions(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 0159c8e5c2..1da9bf3312 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -131,32 +131,8 @@ def custom_build_cmake_clib( cmake_args += ["-DADD_ONEDAL_RPATH=ON"] cpu_count = multiprocessing.cpu_count() - # convert to max supported pointer to a memory size in kB - memfree = sys.maxsize >> 10 - # limit parallel cmake jobs if memory size is insufficient - if IS_LIN: - with open("/proc/meminfo", "r") as meminfo_file_obj: - next(meminfo_file_obj) # skip MemTotal - memfree = int( - next(meminfo_file_obj).strip().split()[-2] - ) # total free physical memory in kB - elif IS_WIN: - txt = subprocess.run( - [ - "powershell", - "Get-CIMInstance", - "Win32_OperatingSystem", - "|", - "Select", - "FreePhysicalMemory", - ], - stdout=subprocess.PIPE, - text=True, - ) - memfree = int(txt.stdout.strip().split()[-1]) # total free physical memory in kB - - mem_per_proc = 20 # 2**20 kB or 1GB - cpu_count = min(cpu_count, floor(max(1, memfree >> mem_per_proc))) + max_jobs = os.getenv("MAX_JOBS") + cpu_count = min(cpu_count, max(1, max_jobs if max_jobs else cpu_count)) make_args = ["cmake", "--build", abs_build_temp_path, "-j " + str(cpu_count)] make_install_args = [ diff --git a/setup.py b/setup.py index 8166f00d07..e99832d8b0 100644 --- a/setup.py +++ b/setup.py @@ -16,6 +16,29 @@ # limitations under the License. # ============================================================================== +# Environment variables which impact build process: +# +# DALROOT (required) +# folder location of oneDAL +# +# MPIROOT (required if NO_DIST not set) +# folder location of MPI +# +# NO_DIST (default: false) +# build sklearnex without MPI support +# +# NO_DPC (default: false) +# build oneDAL interfaces without SYCL support +# +# NO_STREAM (default: false) +# disables building daal4py with streaming mode +# +# OFF_ONEDAL_IFACE (default: false) +# do not build or use oneDAL interfaces (e.g. DAAL support only) +# +# MAX_JOBS (default: number of processors) +# maximum number of jobs used for compilation + import glob # System imports From 9964ffd67c12fc365f97e3b31b2bae81384818fc Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Sun, 12 Jan 2025 22:20:54 +0100 Subject: [PATCH 11/61] Update build_backend.py --- scripts/build_backend.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 1da9bf3312..eba2449e55 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -131,8 +131,7 @@ def custom_build_cmake_clib( cmake_args += ["-DADD_ONEDAL_RPATH=ON"] cpu_count = multiprocessing.cpu_count() - max_jobs = os.getenv("MAX_JOBS") - cpu_count = min(cpu_count, max(1, max_jobs if max_jobs else cpu_count)) + cpu_count = min(cpu_count, max(1, os.getenv("MAX_JOBS", cpu_count))) make_args = ["cmake", "--build", abs_build_temp_path, "-j " + str(cpu_count)] make_install_args = [ From d54fb0d1f417a0cdafd6b9044a9382d1c17e3d30 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Sun, 12 Jan 2025 22:24:02 +0100 Subject: [PATCH 12/61] Update setup.py --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index e99832d8b0..f7dfbf9c57 100644 --- a/setup.py +++ b/setup.py @@ -19,10 +19,10 @@ # Environment variables which impact build process: # # DALROOT (required) -# folder location of oneDAL +# oneDAL root folder (containing `bin`, `lib`, and `include` folders) # # MPIROOT (required if NO_DIST not set) -# folder location of MPI +# MPI root folder (containing `bin`, `lib`, and `include` folders) # # NO_DIST (default: false) # build sklearnex without MPI support From ea3aaaa501bad3f47f4ded9bc702f21e1a913d3b Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Sun, 12 Jan 2025 22:27:11 +0100 Subject: [PATCH 13/61] Update setup.py From 14a114a82881a1dc22849b84d58628113439b5fb Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Sun, 12 Jan 2025 22:32:18 +0100 Subject: [PATCH 14/61] Update INSTALL.md --- INSTALL.md | 1 + 1 file changed, 1 insertion(+) diff --git a/INSTALL.md b/INSTALL.md index f05d7a4cb2..6f62ada6a8 100755 --- a/INSTALL.md +++ b/INSTALL.md @@ -172,6 +172,7 @@ The build-process (using setup.py) happens in 4 stages: * ``NO_STREAM``: set to '1', 'yes' or alike to build without support for streaming mode * ``NO_DPC``: set to '1', 'yes' or alike to build without support of oneDAL DPC++ interfaces * ``OFF_ONEDAL_IFACE``: set to '1' to build without the support of oneDAL interfaces +* ``MAX_JOBS``: max number processes for building sklearnex, default is the number of cpus **Note:** in order to use distributed mode, `mpi4py` is also required, and needs to be built with the same MPI backend as scikit-learn-intelex. From 5a9c55440812c9c4ff703cfffb0265f3799a48e7 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Sun, 12 Jan 2025 22:33:40 +0100 Subject: [PATCH 15/61] Update INSTALL.md --- INSTALL.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/INSTALL.md b/INSTALL.md index 6f62ada6a8..16a9960208 100755 --- a/INSTALL.md +++ b/INSTALL.md @@ -172,7 +172,7 @@ The build-process (using setup.py) happens in 4 stages: * ``NO_STREAM``: set to '1', 'yes' or alike to build without support for streaming mode * ``NO_DPC``: set to '1', 'yes' or alike to build without support of oneDAL DPC++ interfaces * ``OFF_ONEDAL_IFACE``: set to '1' to build without the support of oneDAL interfaces -* ``MAX_JOBS``: max number processes for building sklearnex, default is the number of cpus +* ``MAX_JOBS``: maximum number processes for building scikit-learn-intelex, default is the number of cpus **Note:** in order to use distributed mode, `mpi4py` is also required, and needs to be built with the same MPI backend as scikit-learn-intelex. From efebe4997d5d21c7bbbbafe061c4b7d7c8d24b91 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Sun, 12 Jan 2025 22:35:06 +0100 Subject: [PATCH 16/61] Update setup.py --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index f7dfbf9c57..0775ca0947 100644 --- a/setup.py +++ b/setup.py @@ -19,10 +19,10 @@ # Environment variables which impact build process: # # DALROOT (required) -# oneDAL root folder (containing `bin`, `lib`, and `include` folders) +# oneDAL root path (folder containing `bin`, `lib`, and `include` folders) # # MPIROOT (required if NO_DIST not set) -# MPI root folder (containing `bin`, `lib`, and `include` folders) +# MPI root path (folder containing `bin`, `lib`, and `include` folders) # # NO_DIST (default: false) # build sklearnex without MPI support From 912b2c66a32d5efa01a0532e0a00ec21616d3ea0 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Sun, 12 Jan 2025 22:36:18 +0100 Subject: [PATCH 17/61] Update setup.py --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 0775ca0947..029207dd56 100644 --- a/setup.py +++ b/setup.py @@ -19,10 +19,10 @@ # Environment variables which impact build process: # # DALROOT (required) -# oneDAL root path (folder containing `bin`, `lib`, and `include` folders) +# oneDAL root path (folder containing `bin`,`include`, and `lib` folders) # # MPIROOT (required if NO_DIST not set) -# MPI root path (folder containing `bin`, `lib`, and `include` folders) +# MPI root path (folder containing `bin`,`include`, and `lib` folders) # # NO_DIST (default: false) # build sklearnex without MPI support From ff335f194660ab1bfb62111253b54eeddd45dd6a Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Sun, 12 Jan 2025 22:37:00 +0100 Subject: [PATCH 18/61] Update setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 029207dd56..b2263486b2 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ # limitations under the License. # ============================================================================== -# Environment variables which impact build process: +# Environment variables which impact the build process: # # DALROOT (required) # oneDAL root path (folder containing `bin`,`include`, and `lib` folders) From 20be65011f34ed769ca18fa30d2728db78c5fd70 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Mon, 13 Jan 2025 11:33:33 +0100 Subject: [PATCH 19/61] Update setup.py --- setup.py | 51 ++++++++++++++++++++++++--------------------------- 1 file changed, 24 insertions(+), 27 deletions(-) diff --git a/setup.py b/setup.py index b2263486b2..b9422e9754 100644 --- a/setup.py +++ b/setup.py @@ -36,7 +36,7 @@ # OFF_ONEDAL_IFACE (default: false) # do not build or use oneDAL interfaces (e.g. DAAL support only) # -# MAX_JOBS (default: number of processors) +# NTHREADS (default: number of processors) # maximum number of jobs used for compilation import glob @@ -50,6 +50,7 @@ import time from concurrent.futures import ThreadPoolExecutor from ctypes.util import find_library +from functools import partial from os.path import join as jp from sysconfig import get_config_vars @@ -440,33 +441,19 @@ def build_extensions(self): class custom_build: def run(self): + cxx = os.getenv("CXX", "cl" if IS_WIN else "g++") + build_onedal = lambda iface: build.backend.custom_build_cmake_clib(iface=iface, + cxx=cxx, + onedal_major_binary_version=ONEDAL_MAJOR_BINARY_VERSION, + no_dist=no_dist, + use_parameters_lib=use_parameters_lib, + use_abs_rpath=USE_ABS_RPATH) if is_onedal_iface: - cxx = os.getenv("CXX", "cl" if IS_WIN else "g++") - build_backend.custom_build_cmake_clib( - iface="host", - cxx=cxx, - onedal_major_binary_version=ONEDAL_MAJOR_BINARY_VERSION, - no_dist=no_dist, - use_parameters_lib=use_parameters_lib, - use_abs_rpath=USE_ABS_RPATH, - ) - if dpcpp: - if is_onedal_iface: - build_backend.custom_build_cmake_clib( - iface="dpc", - onedal_major_binary_version=ONEDAL_MAJOR_BINARY_VERSION, - no_dist=no_dist, - use_parameters_lib=use_parameters_lib, - use_abs_rpath=USE_ABS_RPATH, - ) + build_onedal("host") + if dpcpp: + build_onedal("dpc") if build_distribute: - build_backend.custom_build_cmake_clib( - iface="spmd_dpc", - onedal_major_binary_version=ONEDAL_MAJOR_BINARY_VERSION, - no_dist=no_dist, - use_parameters_lib=use_parameters_lib, - use_abs_rpath=USE_ABS_RPATH, - ) + build_onedal("spmd_dpc") def post_build(self): if IS_MAC: @@ -492,13 +479,23 @@ def post_build(self): class develop(orig_develop.develop, custom_build): - def run(self): + def finalize_options(self) + # set parallel execution to n_threads + super().finalize_options() + self.parallel = n_threads + + def run(self): custom_build.run(self) super().run() custom_build.post_build(self) class build(orig_build.build, custom_build): + def finalize_options(self) + # set parallel execution to n_threads + super().finalize_options() + self.parallel = n_threads + def run(self): custom_build.run(self) super().run() From fb93809b5e51691ae5f31b59f46d88b1c5761501 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 11:40:33 +0100 Subject: [PATCH 20/61] switch to NTHREADS --- INSTALL.md | 2 +- scripts/build_backend.py | 5 ++--- setup.py | 30 ++++++++++++++++-------------- 3 files changed, 19 insertions(+), 18 deletions(-) diff --git a/INSTALL.md b/INSTALL.md index 16a9960208..e1f28bdc97 100755 --- a/INSTALL.md +++ b/INSTALL.md @@ -172,7 +172,7 @@ The build-process (using setup.py) happens in 4 stages: * ``NO_STREAM``: set to '1', 'yes' or alike to build without support for streaming mode * ``NO_DPC``: set to '1', 'yes' or alike to build without support of oneDAL DPC++ interfaces * ``OFF_ONEDAL_IFACE``: set to '1' to build without the support of oneDAL interfaces -* ``MAX_JOBS``: maximum number processes for building scikit-learn-intelex, default is the number of cpus +* ``NTHREADS``: number processes for building scikit-learn-intelex, default is the number of cpus **Note:** in order to use distributed mode, `mpi4py` is also required, and needs to be built with the same MPI backend as scikit-learn-intelex. diff --git a/scripts/build_backend.py b/scripts/build_backend.py index eba2449e55..9086bc264d 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -49,6 +49,7 @@ def custom_build_cmake_clib( no_dist=True, use_parameters_lib=True, use_abs_rpath=False, + n_threads=1, ): import pybind11 @@ -130,9 +131,7 @@ def custom_build_cmake_clib( if use_abs_rpath: cmake_args += ["-DADD_ONEDAL_RPATH=ON"] - cpu_count = multiprocessing.cpu_count() - cpu_count = min(cpu_count, max(1, os.getenv("MAX_JOBS", cpu_count))) - make_args = ["cmake", "--build", abs_build_temp_path, "-j " + str(cpu_count)] + make_args = ["cmake", "--build", abs_build_temp_path, "-j " + n_threads] make_install_args = [ "cmake", diff --git a/setup.py b/setup.py index b9422e9754..bd2bce08ae 100644 --- a/setup.py +++ b/setup.py @@ -76,8 +76,7 @@ IS_LIN = False dal_root = os.environ.get("DALROOT") -n_threads = int(os.environ.get("NTHREADS", os.cpu_count() or 1)) - +n_threads = min(os.cpu_count(), max(1, os.getenv("NTHREADS", os.cpu_count()))) arch_dir = plt.machine() plt_dict = {"x86_64": "intel64", "AMD64": "intel64", "aarch64": "arm"} arch_dir = plt_dict[arch_dir] if arch_dir in plt_dict else arch_dir @@ -442,12 +441,14 @@ def build_extensions(self): class custom_build: def run(self): cxx = os.getenv("CXX", "cl" if IS_WIN else "g++") - build_onedal = lambda iface: build.backend.custom_build_cmake_clib(iface=iface, - cxx=cxx, - onedal_major_binary_version=ONEDAL_MAJOR_BINARY_VERSION, - no_dist=no_dist, - use_parameters_lib=use_parameters_lib, - use_abs_rpath=USE_ABS_RPATH) + build_onedal = lambda iface: build.backend.custom_build_cmake_clib( + iface=iface, + cxx=cxx, + onedal_major_binary_version=ONEDAL_MAJOR_BINARY_VERSION, + no_dist=no_dist, + use_parameters_lib=use_parameters_lib, + use_abs_rpath=USE_ABS_RPATH, + ) if is_onedal_iface: build_onedal("host") if dpcpp: @@ -479,23 +480,24 @@ def post_build(self): class develop(orig_develop.develop, custom_build): - def finalize_options(self) - # set parallel execution to n_threads + def finalize_options(self): + # override setuptools.build finalize_options + # to set parallel execution to n_threads super().finalize_options() self.parallel = n_threads - - def run(self): + + def run(self): custom_build.run(self) super().run() custom_build.post_build(self) class build(orig_build.build, custom_build): - def finalize_options(self) + def finalize_options(self): # set parallel execution to n_threads super().finalize_options() self.parallel = n_threads - + def run(self): custom_build.run(self) super().run() From e64dd5487e31626555115ddf2f56a79a32bc86dd Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 11:43:35 +0100 Subject: [PATCH 21/61] forgot to add n_threads --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index bd2bce08ae..913a4fc81c 100644 --- a/setup.py +++ b/setup.py @@ -448,6 +448,7 @@ def run(self): no_dist=no_dist, use_parameters_lib=use_parameters_lib, use_abs_rpath=USE_ABS_RPATH, + n_threads=n_threads, ) if is_onedal_iface: build_onedal("host") From c981996e70ba863dbeea121a5331893b29199d12 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 11:53:14 +0100 Subject: [PATCH 22/61] fix mistake --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 913a4fc81c..45231d7b63 100644 --- a/setup.py +++ b/setup.py @@ -441,7 +441,7 @@ def build_extensions(self): class custom_build: def run(self): cxx = os.getenv("CXX", "cl" if IS_WIN else "g++") - build_onedal = lambda iface: build.backend.custom_build_cmake_clib( + build_onedal = lambda iface: build_backend.custom_build_cmake_clib( iface=iface, cxx=cxx, onedal_major_binary_version=ONEDAL_MAJOR_BINARY_VERSION, From 1269fda5b86aadb7dbb205040670e3b03db11d2c Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 11:56:46 +0100 Subject: [PATCH 23/61] fix mistake2 --- scripts/build_backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 9086bc264d..5fb5a17253 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -131,7 +131,7 @@ def custom_build_cmake_clib( if use_abs_rpath: cmake_args += ["-DADD_ONEDAL_RPATH=ON"] - make_args = ["cmake", "--build", abs_build_temp_path, "-j " + n_threads] + make_args = ["cmake", "--build", abs_build_temp_path, "-j " + str(n_threads)] make_install_args = [ "cmake", From 35d12eeb3fbd98bcfaf7e8b38c4dce90fb68a32e Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 11:57:33 +0100 Subject: [PATCH 24/61] remove import --- scripts/build_backend.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 5fb5a17253..30e5e4551e 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -17,7 +17,6 @@ # =============================================================================== import logging -import multiprocessing import os import platform as plt import subprocess From f905fe893a7c1fd895fa255c552bea0ee38a5278 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Mon, 13 Jan 2025 12:36:02 +0100 Subject: [PATCH 25/61] Update setup.py --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index 45231d7b63..76cae422db 100644 --- a/setup.py +++ b/setup.py @@ -501,6 +501,7 @@ def finalize_options(self): def run(self): custom_build.run(self) + print(f"HELLO RIGHT HERE!! {self.parallel}") super().run() custom_build.post_build(self) From 035caa12a8c74e7ac5514d8a533a2cb5e5affa98 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Mon, 13 Jan 2025 12:54:23 +0100 Subject: [PATCH 26/61] Update setup.py --- setup.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/setup.py b/setup.py index 76cae422db..65668bc4e2 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,7 @@ # do not build or use oneDAL interfaces (e.g. DAAL support only) # # NTHREADS (default: number of processors) -# maximum number of jobs used for compilation +# number of processes used for compilation, can be overridden with -j import glob @@ -428,14 +428,12 @@ def get_onedal_py_libs(): class parallel_build_ext(_build_ext): - def build_extensions(self): - with ThreadPoolExecutor(max_workers=n_threads) as executor: - result_list = [ - executor.submit(self.build_extension, ext) for ext in self.extensions - ] - assert all( - f.exception() is None for f in result_list - ), "There were errors building the extensions" + def finalize_options(self): + # set parallel execution to n_threads + super().finalize_options() + print("parallel_build_ext RAN") + if self.parallel is None: + self.parallel = n_threads class custom_build: @@ -485,7 +483,8 @@ def finalize_options(self): # override setuptools.build finalize_options # to set parallel execution to n_threads super().finalize_options() - self.parallel = n_threads + if self.parallel is None: + self.parallel = n_threads def run(self): custom_build.run(self) @@ -497,7 +496,8 @@ class build(orig_build.build, custom_build): def finalize_options(self): # set parallel execution to n_threads super().finalize_options() - self.parallel = n_threads + if self.parallel is None: + self.parallel = n_threads def run(self): custom_build.run(self) From 90b00beb701cd6eb25f4ffa151f1d27456897ddf Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Mon, 13 Jan 2025 12:55:23 +0100 Subject: [PATCH 27/61] Update setup.py --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index 65668bc4e2..bb70506726 100644 --- a/setup.py +++ b/setup.py @@ -48,7 +48,6 @@ import shutil import sys import time -from concurrent.futures import ThreadPoolExecutor from ctypes.util import find_library from functools import partial from os.path import join as jp From 6ccf3c23db5263af557e4de69acfe0d3efd6e5e8 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Mon, 13 Jan 2025 13:08:38 +0100 Subject: [PATCH 28/61] Update setup.py --- setup.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/setup.py b/setup.py index bb70506726..e2aeef3cbf 100644 --- a/setup.py +++ b/setup.py @@ -430,7 +430,6 @@ class parallel_build_ext(_build_ext): def finalize_options(self): # set parallel execution to n_threads super().finalize_options() - print("parallel_build_ext RAN") if self.parallel is None: self.parallel = n_threads @@ -500,7 +499,6 @@ def finalize_options(self): def run(self): custom_build.run(self) - print(f"HELLO RIGHT HERE!! {self.parallel}") super().run() custom_build.post_build(self) From 0750d9197074a3d87297e8f9de0901533016f5d7 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 14:05:11 +0100 Subject: [PATCH 29/61] monkeypatch a solution: --- setup.py | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/setup.py b/setup.py index e2aeef3cbf..2ec235e7f3 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ import sys import time from ctypes.util import find_library -from functools import partial +from multiprocessing import Pool from os.path import join as jp from sysconfig import get_config_vars @@ -427,11 +427,20 @@ def get_onedal_py_libs(): class parallel_build_ext(_build_ext): - def finalize_options(self): - # set parallel execution to n_threads - super().finalize_options() - if self.parallel is None: - self.parallel = n_threads + def build_extension(self, ext): + # monkeypatch a mulitprocess pool to multithread daal4py compilation + try: + p = Pool(n_threads) + base_compile = self.compiler.compile + + def parallel_compile(sources, **kwargs): + return p.map(lambda arg: base_compile(arg, **kwargs), sources) + + self.compiler.compile = parallel_compile + return super().build_extension(ext) + finally: + p.close() + self.compiler.compile = base_compile class custom_build: From 50661408714cc659536c7c403fcca3d2d773d1b0 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 14:06:50 +0100 Subject: [PATCH 30/61] add fix --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2ec235e7f3..acc01c8990 100644 --- a/setup.py +++ b/setup.py @@ -75,7 +75,7 @@ IS_LIN = False dal_root = os.environ.get("DALROOT") -n_threads = min(os.cpu_count(), max(1, os.getenv("NTHREADS", os.cpu_count()))) +n_threads = min(os.cpu_count(), max(1, int(os.getenv("NTHREADS", os.cpu_count())))) arch_dir = plt.machine() plt_dict = {"x86_64": "intel64", "AMD64": "intel64", "aarch64": "arm"} arch_dir = plt_dict[arch_dir] if arch_dir in plt_dict else arch_dir From fd8a83dda9bd9f70ab7765e6b9e3e864103c0ec4 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 14:14:11 +0100 Subject: [PATCH 31/61] clean it up --- setup.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index acc01c8990..6439b33644 100644 --- a/setup.py +++ b/setup.py @@ -427,10 +427,20 @@ def get_onedal_py_libs(): class parallel_build_ext(_build_ext): + def finalize_options(self): + # override setuptools.build finalize_options + # to set parallel execution to n_threads + super().finalize_options() + if self.parallel is None: + self.parallel = n_threads + def build_extension(self, ext): # monkeypatch a mulitprocess pool to multithread daal4py compilation + if self.parallel is None or self.parallel == 1: + return super().build_extension(ext) + try: - p = Pool(n_threads) + p = Pool(self.parallel) base_compile = self.compiler.compile def parallel_compile(sources, **kwargs): From 37c050b85921c72d95b9a7b630b17454c76276b2 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 14:29:17 +0100 Subject: [PATCH 32/61] remove all build_ext special classes --- setup.py | 32 ++------------------------------ 1 file changed, 2 insertions(+), 30 deletions(-) diff --git a/setup.py b/setup.py index 6439b33644..0a66a8d0df 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,7 @@ # do not build or use oneDAL interfaces (e.g. DAAL support only) # # NTHREADS (default: number of processors) -# number of processes used for compilation, can be overridden with -j +# number of processes used for onedal pybind11 compilation import glob @@ -58,7 +58,6 @@ import setuptools.command.develop as orig_develop from Cython.Build import cythonize from setuptools import Extension, setup -from setuptools.command.build_ext import build_ext as _build_ext import scripts.build_backend as build_backend from scripts.package_helpers import get_packages_with_tests @@ -426,33 +425,6 @@ def get_onedal_py_libs(): return libs -class parallel_build_ext(_build_ext): - def finalize_options(self): - # override setuptools.build finalize_options - # to set parallel execution to n_threads - super().finalize_options() - if self.parallel is None: - self.parallel = n_threads - - def build_extension(self, ext): - # monkeypatch a mulitprocess pool to multithread daal4py compilation - if self.parallel is None or self.parallel == 1: - return super().build_extension(ext) - - try: - p = Pool(self.parallel) - base_compile = self.compiler.compile - - def parallel_compile(sources, **kwargs): - return p.map(lambda arg: base_compile(arg, **kwargs), sources) - - self.compiler.compile = parallel_compile - return super().build_extension(ext) - finally: - p.close() - self.compiler.compile = base_compile - - class custom_build: def run(self): cxx = os.getenv("CXX", "cl" if IS_WIN else "g++") @@ -617,7 +589,7 @@ def run(self): author_email="onedal.maintainers@intel.com", maintainer_email="onedal.maintainers@intel.com", project_urls=project_urls, - cmdclass={"develop": develop, "build": build, "build_ext": parallel_build_ext}, + cmdclass={"develop": develop, "build": build, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", From b1dee4224fc74b5b978ced6b0ed712806483262a Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 14:31:04 +0100 Subject: [PATCH 33/61] cleanup --- INSTALL.md | 2 +- setup.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/INSTALL.md b/INSTALL.md index e1f28bdc97..4fda48dc3e 100755 --- a/INSTALL.md +++ b/INSTALL.md @@ -172,7 +172,7 @@ The build-process (using setup.py) happens in 4 stages: * ``NO_STREAM``: set to '1', 'yes' or alike to build without support for streaming mode * ``NO_DPC``: set to '1', 'yes' or alike to build without support of oneDAL DPC++ interfaces * ``OFF_ONEDAL_IFACE``: set to '1' to build without the support of oneDAL interfaces -* ``NTHREADS``: number processes for building scikit-learn-intelex, default is the number of cpus +* ``NTHREADS``: number processes for building the onedal extension, default is the number of cpus **Note:** in order to use distributed mode, `mpi4py` is also required, and needs to be built with the same MPI backend as scikit-learn-intelex. diff --git a/setup.py b/setup.py index 0a66a8d0df..51bd2f276d 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,6 @@ import sys import time from ctypes.util import find_library -from multiprocessing import Pool from os.path import join as jp from sysconfig import get_config_vars @@ -589,7 +588,7 @@ def run(self): author_email="onedal.maintainers@intel.com", maintainer_email="onedal.maintainers@intel.com", project_urls=project_urls, - cmdclass={"develop": develop, "build": build, + cmdclass={"develop": develop, "build": build} classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", From 57dc078007ba2d5374317a3215f999f6118f078c Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 14:38:41 +0100 Subject: [PATCH 34/61] last changes to conform with setuptools --- setup.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 51bd2f276d..29b6a4985e 100644 --- a/setup.py +++ b/setup.py @@ -427,6 +427,8 @@ def get_onedal_py_libs(): class custom_build: def run(self): cxx = os.getenv("CXX", "cl" if IS_WIN else "g++") + if self.parallel is None or self.parallel is True: + self.parallel = n_threads build_onedal = lambda iface: build_backend.custom_build_cmake_clib( iface=iface, cxx=cxx, @@ -434,7 +436,7 @@ def run(self): no_dist=no_dist, use_parameters_lib=use_parameters_lib, use_abs_rpath=USE_ABS_RPATH, - n_threads=n_threads, + n_threads=self.parallel, ) if is_onedal_iface: build_onedal("host") @@ -471,7 +473,7 @@ def finalize_options(self): # override setuptools.build finalize_options # to set parallel execution to n_threads super().finalize_options() - if self.parallel is None: + if self.parallel is None or self.parallel is True: self.parallel = n_threads def run(self): @@ -484,7 +486,7 @@ class build(orig_build.build, custom_build): def finalize_options(self): # set parallel execution to n_threads super().finalize_options() - if self.parallel is None: + if self.parallel is None or self.parallel is True: self.parallel = n_threads def run(self): From 575d93b3044d8d202f5716a8321e9a6c6cdc177a Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 14:44:02 +0100 Subject: [PATCH 35/61] make better INSTALL.md comment --- INSTALL.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/INSTALL.md b/INSTALL.md index 4fda48dc3e..b5e0d9f068 100755 --- a/INSTALL.md +++ b/INSTALL.md @@ -172,7 +172,7 @@ The build-process (using setup.py) happens in 4 stages: * ``NO_STREAM``: set to '1', 'yes' or alike to build without support for streaming mode * ``NO_DPC``: set to '1', 'yes' or alike to build without support of oneDAL DPC++ interfaces * ``OFF_ONEDAL_IFACE``: set to '1' to build without the support of oneDAL interfaces -* ``NTHREADS``: number processes for building the onedal extension, default is the number of cpus +* ``NTHREADS``: number processes for building the onedal extension, default is the number of cpus. It is overridden by setuptool's `parallel`/ `-j` argument. **Note:** in order to use distributed mode, `mpi4py` is also required, and needs to be built with the same MPI backend as scikit-learn-intelex. From 1eebf9455f31607d7a6c8b2072e0ea2050e28981 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Mon, 13 Jan 2025 14:44:53 +0100 Subject: [PATCH 36/61] update --- INSTALL.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/INSTALL.md b/INSTALL.md index b5e0d9f068..72a6c684af 100755 --- a/INSTALL.md +++ b/INSTALL.md @@ -172,7 +172,7 @@ The build-process (using setup.py) happens in 4 stages: * ``NO_STREAM``: set to '1', 'yes' or alike to build without support for streaming mode * ``NO_DPC``: set to '1', 'yes' or alike to build without support of oneDAL DPC++ interfaces * ``OFF_ONEDAL_IFACE``: set to '1' to build without the support of oneDAL interfaces -* ``NTHREADS``: number processes for building the onedal extension, default is the number of cpus. It is overridden by setuptool's `parallel`/ `-j` argument. +* ``NTHREADS``: number processes for building the onedal extension, default is the number of cpus. It is overridden by setuptool's `--parallel`/ `-j` argument. **Note:** in order to use distributed mode, `mpi4py` is also required, and needs to be built with the same MPI backend as scikit-learn-intelex. From dc12a2c4a9a2b23b17a0f07591bdf138ab082d12 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Mon, 13 Jan 2025 14:52:22 +0100 Subject: [PATCH 37/61] Update setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 29b6a4985e..a9bbb3a881 100644 --- a/setup.py +++ b/setup.py @@ -590,7 +590,7 @@ def run(self): author_email="onedal.maintainers@intel.com", maintainer_email="onedal.maintainers@intel.com", project_urls=project_urls, - cmdclass={"develop": develop, "build": build} + cmdclass={"develop": develop, "build": build}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", From 6f9f9121717404584eaf998fcf60dc5830665f86 Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Tue, 14 Jan 2025 00:30:02 +0100 Subject: [PATCH 38/61] Update setup.py --- setup.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index a9bbb3a881..954c093863 100644 --- a/setup.py +++ b/setup.py @@ -56,6 +56,7 @@ import setuptools.command.build as orig_build import setuptools.command.develop as orig_develop from Cython.Build import cythonize +from Cython.Distutils import build_ext from setuptools import Extension, setup import scripts.build_backend as build_backend @@ -470,7 +471,6 @@ def post_build(self): class develop(orig_develop.develop, custom_build): def finalize_options(self): - # override setuptools.build finalize_options # to set parallel execution to n_threads super().finalize_options() if self.parallel is None or self.parallel is True: @@ -484,6 +484,7 @@ def run(self): class build(orig_build.build, custom_build): def finalize_options(self): + # override setuptools.build finalize_options # set parallel execution to n_threads super().finalize_options() if self.parallel is None or self.parallel is True: @@ -590,7 +591,7 @@ def run(self): author_email="onedal.maintainers@intel.com", maintainer_email="onedal.maintainers@intel.com", project_urls=project_urls, - cmdclass={"develop": develop, "build": build}, + cmdclass={"develop": develop, "build": build, "build_ext": build_ext}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console", From 3dfc5924477acae6715a504916264ef5849c055c Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Tue, 14 Jan 2025 09:46:09 +0100 Subject: [PATCH 39/61] Update setup.py --- setup.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/setup.py b/setup.py index 954c093863..46ad8d053f 100644 --- a/setup.py +++ b/setup.py @@ -35,9 +35,6 @@ # # OFF_ONEDAL_IFACE (default: false) # do not build or use oneDAL interfaces (e.g. DAAL support only) -# -# NTHREADS (default: number of processors) -# number of processes used for onedal pybind11 compilation import glob @@ -45,6 +42,7 @@ import os import pathlib import platform as plt +import re import shutil import sys import time @@ -74,7 +72,10 @@ IS_LIN = False dal_root = os.environ.get("DALROOT") -n_threads = min(os.cpu_count(), max(1, int(os.getenv("NTHREADS", os.cpu_count())))) +makeflags = os.getenv("MAKEFLAGS", "") +n_threads = re.findall(r"(?<=(? Date: Tue, 14 Jan 2025 13:15:47 +0100 Subject: [PATCH 40/61] lots of logic --- scripts/build_backend.py | 5 ++- setup.py | 66 +++++++++++++++++++++++++--------------- 2 files changed, 44 insertions(+), 27 deletions(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 30e5e4551e..d9526bc62f 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -21,7 +21,6 @@ import platform as plt import subprocess import sys -from math import floor from os.path import join as jp from sysconfig import get_config_var, get_paths @@ -48,7 +47,6 @@ def custom_build_cmake_clib( no_dist=True, use_parameters_lib=True, use_abs_rpath=False, - n_threads=1, ): import pybind11 @@ -130,7 +128,8 @@ def custom_build_cmake_clib( if use_abs_rpath: cmake_args += ["-DADD_ONEDAL_RPATH=ON"] - make_args = ["cmake", "--build", abs_build_temp_path, "-j " + str(n_threads)] + # the number of parallel processes is dictated by MAKEFLAGS (see setup.py) + make_args = ["cmake", "--build", abs_build_temp_path] make_install_args = [ "cmake", diff --git a/setup.py b/setup.py index 46ad8d053f..e18c3cd604 100644 --- a/setup.py +++ b/setup.py @@ -46,6 +46,7 @@ import shutil import sys import time +from contextlib import contextmanager from ctypes.util import find_library from os.path import join as jp from sysconfig import get_config_vars @@ -72,10 +73,7 @@ IS_LIN = False dal_root = os.environ.get("DALROOT") -makeflags = os.getenv("MAKEFLAGS", "") -n_threads = re.findall(r"(?<=(? Date: Tue, 14 Jan 2025 13:31:10 +0100 Subject: [PATCH 41/61] Update setup.py --- setup.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/setup.py b/setup.py index b257c7f709..28ed677375 100644 --- a/setup.py +++ b/setup.py @@ -16,26 +16,6 @@ # limitations under the License. # ============================================================================== -# Environment variables which impact the build process: -# -# DALROOT (required) -# oneDAL root path (folder containing `bin`,`include`, and `lib` folders) -# -# MPIROOT (required if NO_DIST not set) -# MPI root path (folder containing `bin`,`include`, and `lib` folders) -# -# NO_DIST (default: false) -# build sklearnex without MPI support -# -# NO_DPC (default: false) -# build oneDAL interfaces without SYCL support -# -# NO_STREAM (default: false) -# disables building daal4py with streaming mode -# -# OFF_ONEDAL_IFACE (default: false) -# do not build or use oneDAL interfaces (e.g. DAAL support only) - import glob # System imports From d8a63c65a2d811adb3a09b56a891afc22053ccf5 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Tue, 14 Jan 2025 13:32:01 +0100 Subject: [PATCH 42/61] remove NTHREADS entirely --- INSTALL.md | 1 - 1 file changed, 1 deletion(-) diff --git a/INSTALL.md b/INSTALL.md index 6455542737..f60c1cb735 100755 --- a/INSTALL.md +++ b/INSTALL.md @@ -172,7 +172,6 @@ The build-process (using setup.py) happens in 4 stages: * ``NO_STREAM``: set to '1', 'yes' or alike to build without support for streaming mode * ``NO_DPC``: set to '1', 'yes' or alike to build without support of oneDAL DPC++ interfaces * ``OFF_ONEDAL_IFACE``: set to '1' to build without the support of oneDAL interfaces -* ``NTHREADS``: number processes for building the onedal extension, default is the number of cpus. It is overridden by setuptool's `--parallel`/ `-j` argument. **Note:** in order to use distributed mode, `mpi4py` is also required, and needs to be built with the same MPI backend as scikit-learn-intelex. From 83d855e704be97ed4417bef9ca22e28a3562bc17 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Tue, 14 Jan 2025 13:33:37 +0100 Subject: [PATCH 43/61] bad merge --- scripts/build_backend.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 1c92ba0049..5dbd55b978 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -129,6 +129,9 @@ def custom_build_cmake_clib( if use_abs_rpath: cmake_args += ["-DADD_ONEDAL_RPATH=ON"] + if use_gcov: + cmake_args += ["-DSKLEARNEX_GCOV=ON"] + # the number of parallel processes is dictated by MAKEFLAGS (see setup.py) make_args = ["cmake", "--build", abs_build_temp_path] From ed661c89729d0d6322386fdda8416651d11e27dd Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Tue, 14 Jan 2025 13:34:12 +0100 Subject: [PATCH 44/61] formatting --- scripts/build_backend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index 5dbd55b978..c1e5d5716f 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -131,7 +131,7 @@ def custom_build_cmake_clib( if use_gcov: cmake_args += ["-DSKLEARNEX_GCOV=ON"] - + # the number of parallel processes is dictated by MAKEFLAGS (see setup.py) make_args = ["cmake", "--build", abs_build_temp_path] From 5317f09f511123f7ad7e4b852c12cf4bb5a23834 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Tue, 14 Jan 2025 13:36:31 +0100 Subject: [PATCH 45/61] fix logic --- setup.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/setup.py b/setup.py index 28ed677375..e698f20efc 100644 --- a/setup.py +++ b/setup.py @@ -459,11 +459,13 @@ def set_nthreads(n_threads): if makeflags: # extract "-j" option value set in makeflags - if re.findall(r"(?<=(? Date: Tue, 14 Jan 2025 15:02:04 +0100 Subject: [PATCH 46/61] fixed logic for cythonize --- setup.py | 72 +++++++++++++++++++++++++++++--------------------------- 1 file changed, 37 insertions(+), 35 deletions(-) diff --git a/setup.py b/setup.py index e698f20efc..3c8f5b46e2 100644 --- a/setup.py +++ b/setup.py @@ -312,6 +312,41 @@ def get_build_options(): return eca, ela, include_dir_plat +@contextmanager +def set_nthreads(n_threads): + """MAKEFLAGS is used by the onedal cmake and cythonize to control the number + of processes. If it is set via the setup.py commmand with the --parallel or + -j argument, it will supercede this value. When both are not set, it will + default to the number of cpus, n_threads should be a positive integer, None, + or True""" + makeflags = os.getenv("MAKEFLAGS", None) + # True is used by setuptools to indicate cpu_count for `parallel` + # None is default for setuptools for single threading + orig_n_threads = re.findall(r"(?<=(? Date: Tue, 14 Jan 2025 15:15:26 +0100 Subject: [PATCH 47/61] switch None to " --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 3c8f5b46e2..a6093fb780 100644 --- a/setup.py +++ b/setup.py @@ -319,7 +319,7 @@ def set_nthreads(n_threads): -j argument, it will supercede this value. When both are not set, it will default to the number of cpus, n_threads should be a positive integer, None, or True""" - makeflags = os.getenv("MAKEFLAGS", None) + makeflags = os.getenv("MAKEFLAGS", "") # True is used by setuptools to indicate cpu_count for `parallel` # None is default for setuptools for single threading orig_n_threads = re.findall(r"(?<=(? Date: Tue, 14 Jan 2025 20:18:30 +0100 Subject: [PATCH 48/61] Update setup.py --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index a6093fb780..995493055f 100644 --- a/setup.py +++ b/setup.py @@ -322,7 +322,7 @@ def set_nthreads(n_threads): makeflags = os.getenv("MAKEFLAGS", "") # True is used by setuptools to indicate cpu_count for `parallel` # None is default for setuptools for single threading - orig_n_threads = re.findall(r"(?<=(? Date: Wed, 15 Jan 2025 09:13:46 +0100 Subject: [PATCH 49/61] Update setup.py --- setup.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 995493055f..3b8456560b 100644 --- a/setup.py +++ b/setup.py @@ -322,7 +322,11 @@ def set_nthreads(n_threads): makeflags = os.getenv("MAKEFLAGS", "") # True is used by setuptools to indicate cpu_count for `parallel` # None is default for setuptools for single threading - orig_n_threads = re.findall(r"(?<=(? Date: Wed, 15 Jan 2025 09:55:41 +0100 Subject: [PATCH 50/61] Attempt to single thread windows build --- conda-recipe/bld.bat | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conda-recipe/bld.bat b/conda-recipe/bld.bat index 15321a39ec..1ab067a14c 100644 --- a/conda-recipe/bld.bat +++ b/conda-recipe/bld.bat @@ -30,4 +30,4 @@ IF DEFINED DPCPPROOT ( call "%DPCPPROOT%\env\vars.bat" ) -%PYTHON% setup.py install --single-version-externally-managed --record record.txt +%PYTHON% setup.py install -j1 --single-version-externally-managed --record record.txt From de185035461a42b050219b58d4481b77042cae4e Mon Sep 17 00:00:00 2001 From: Ian Faust Date: Wed, 15 Jan 2025 10:12:25 +0100 Subject: [PATCH 51/61] Update bld.bat --- conda-recipe/bld.bat | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/conda-recipe/bld.bat b/conda-recipe/bld.bat index 1ab067a14c..4524b77738 100644 --- a/conda-recipe/bld.bat +++ b/conda-recipe/bld.bat @@ -30,4 +30,5 @@ IF DEFINED DPCPPROOT ( call "%DPCPPROOT%\env\vars.bat" ) -%PYTHON% setup.py install -j1 --single-version-externally-managed --record record.txt +set MAKEFLAGS=-j1 +%PYTHON% setup.py install --single-version-externally-managed --record record.txt From f9b159b153028dd888d0ecc7a8d01bca0ffa5a3c Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Wed, 15 Jan 2025 11:35:01 +0100 Subject: [PATCH 52/61] fix windows issues, test again --- scripts/build_backend.py | 5 ++++- setup.py | 24 +++++++++++++----------- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/scripts/build_backend.py b/scripts/build_backend.py index c1e5d5716f..4522d90de1 100755 --- a/scripts/build_backend.py +++ b/scripts/build_backend.py @@ -48,6 +48,7 @@ def custom_build_cmake_clib( use_parameters_lib=True, use_abs_rpath=False, use_gcov=False, + n_threads=1, ): import pybind11 @@ -133,7 +134,9 @@ def custom_build_cmake_clib( cmake_args += ["-DSKLEARNEX_GCOV=ON"] # the number of parallel processes is dictated by MAKEFLAGS (see setup.py) - make_args = ["cmake", "--build", abs_build_temp_path] + # using make conventions (i.e. -j flag) but is set as a cmake argument to + # support Windows and Linux simultaneously + make_args = ["cmake", "--build", abs_build_temp_path, "-j" + str(n_threads)] make_install_args = [ "cmake", diff --git a/setup.py b/setup.py index a6093fb780..ac23155853 100644 --- a/setup.py +++ b/setup.py @@ -441,8 +441,8 @@ def get_onedal_py_libs(): return libs -class custom_build: - def run(self): +class onedal_build: + def onedal_run(self, n_threads): cxx = os.getenv("CXX", "cl" if IS_WIN else "g++") build_onedal = lambda iface: build_backend.custom_build_cmake_clib( iface=iface, @@ -451,6 +451,8 @@ def run(self): no_dist=no_dist, use_parameters_lib=use_parameters_lib, use_abs_rpath=USE_ABS_RPATH, + use_gcov=use_gcov, + n_threads=n_threads, ) if is_onedal_iface: build_onedal("host") @@ -459,7 +461,7 @@ def run(self): if build_distribute: build_onedal("spmd_dpc") - def post_build(self): + def onedal_post_build(self): if IS_MAC: import subprocess @@ -482,22 +484,22 @@ def post_build(self): ) -class develop(orig_develop.develop, custom_build): +class develop(orig_develop.develop, onedal_build): def run(self): - with set_nthreads(self.parallel): - custom_build.run(self) + with set_nthreads(self.parallel) as n_threads: + self.onedal_run(n_threads) super().run() - custom_build.post_build(self) + self.onedal_post_build() -class build(orig_build.build, custom_build): +class build(orig_build.build, onedal_build): def run(self): - with set_nthreads(self.parallel): - custom_build.run(self) + with set_nthreads(self.parallel) as n_threads: + self.onedal_run(n_threads) super().run() - custom_build.post_build(self) + self.onedal_post_build() project_urls = { From 9fc4698347f24f6528ef0a96efedd940d440b63c Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Wed, 15 Jan 2025 11:38:13 +0100 Subject: [PATCH 53/61] fix again --- setup.py | 25 +++++++++++-------------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/setup.py b/setup.py index 888278a616..a952dd36c1 100644 --- a/setup.py +++ b/setup.py @@ -446,6 +446,13 @@ def get_onedal_py_libs(): class onedal_build: + + def run(self): + with set_nthreads(self.parallel) as n_threads: + self.onedal_run(n_threads) + super(onedal_build, self).run() + self.onedal_post_build() + def onedal_run(self, n_threads): cxx = os.getenv("CXX", "cl" if IS_WIN else "g++") build_onedal = lambda iface: build_backend.custom_build_cmake_clib( @@ -488,22 +495,12 @@ def onedal_post_build(self): ) -class develop(orig_develop.develop, onedal_build): - - def run(self): - with set_nthreads(self.parallel) as n_threads: - self.onedal_run(n_threads) - super().run() - self.onedal_post_build() - +class develop(onedal_build, orig_develop.develop): + pass -class build(orig_build.build, onedal_build): - def run(self): - with set_nthreads(self.parallel) as n_threads: - self.onedal_run(n_threads) - super().run() - self.onedal_post_build() +class build(onedal_build, orig_build.build): + pass project_urls = { From a9c86c54fdc3b3027bd7190c9e3a46108a538fff Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Wed, 15 Jan 2025 13:36:10 +0100 Subject: [PATCH 54/61] greatly reduce code based on misunderstanding env variable scopes --- conda-recipe/bld.bat | 1 - setup.py | 46 ++++++++++++++------------------------------ 2 files changed, 14 insertions(+), 33 deletions(-) diff --git a/conda-recipe/bld.bat b/conda-recipe/bld.bat index 4524b77738..15321a39ec 100644 --- a/conda-recipe/bld.bat +++ b/conda-recipe/bld.bat @@ -30,5 +30,4 @@ IF DEFINED DPCPPROOT ( call "%DPCPPROOT%\env\vars.bat" ) -set MAKEFLAGS=-j1 %PYTHON% setup.py install --single-version-externally-managed --record record.txt diff --git a/setup.py b/setup.py index a952dd36c1..6bc2509fd1 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,6 @@ import shutil import sys import time -from contextlib import contextmanager from ctypes.util import find_library from os.path import join as jp from sysconfig import get_config_vars @@ -312,43 +311,26 @@ def get_build_options(): return eca, ela, include_dir_plat -@contextmanager -def set_nthreads(n_threads): +def get_nthreads(n_threads=None): """MAKEFLAGS is used by the onedal cmake and cythonize to control the number of processes. If it is set via the setup.py commmand with the --parallel or - -j argument, it will supercede this value. When both are not set, it will + -j argument, it will supersede this value. When both are not set, it will default to the number of cpus, n_threads should be a positive integer, None, or True""" makeflags = os.getenv("MAKEFLAGS", "") # True is used by setuptools to indicate cpu_count for `parallel` # None is default for setuptools for single threading - # take the last defined value in MAKEFLAGS, do the regex on the - # reversed string because of the limitations in re.sub (only - # replace the last value) - regex_inv = r"(? Date: Wed, 15 Jan 2025 14:02:12 +0100 Subject: [PATCH 55/61] simplify --- INSTALL.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/INSTALL.md b/INSTALL.md index f60c1cb735..3ed98f9d8b 100755 --- a/INSTALL.md +++ b/INSTALL.md @@ -172,8 +172,11 @@ The build-process (using setup.py) happens in 4 stages: * ``NO_STREAM``: set to '1', 'yes' or alike to build without support for streaming mode * ``NO_DPC``: set to '1', 'yes' or alike to build without support of oneDAL DPC++ interfaces * ``OFF_ONEDAL_IFACE``: set to '1' to build without the support of oneDAL interfaces +* ``MAKEFLAGS``: the last `-j` flag determines the number of threads for building the onedal extension. It will default to the number of processors when not set. **Note:** in order to use distributed mode, `mpi4py` is also required, and needs to be built with the same MPI backend as scikit-learn-intelex. +**Note:** The `-j` flag in the ``MAKEFLAGS`` environment variable is superseded in `build` and `develop` mode by the ``--parallel`` and `-j` command line flags. + ### Build Intel(R) Extension for Scikit-learn From c04889a1ac5a0f125927c4fd4e421aaa8cc865b4 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Wed, 15 Jan 2025 14:02:53 +0100 Subject: [PATCH 56/61] forgotten save --- setup.py | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/setup.py b/setup.py index 6bc2509fd1..3ce2fdcf26 100644 --- a/setup.py +++ b/setup.py @@ -317,18 +317,7 @@ def get_nthreads(n_threads=None): -j argument, it will supersede this value. When both are not set, it will default to the number of cpus, n_threads should be a positive integer, None, or True""" - makeflags = os.getenv("MAKEFLAGS", "") - # True is used by setuptools to indicate cpu_count for `parallel` - # None is default for setuptools for single threading - # take the last defined value in MAKEFLAGS, as it will be the one - # used by cmake/make - regex = r"(?<=(? Date: Wed, 15 Jan 2025 14:04:11 +0100 Subject: [PATCH 57/61] formatting --- setup.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/setup.py b/setup.py index 3ce2fdcf26..b11cbe4601 100644 --- a/setup.py +++ b/setup.py @@ -311,17 +311,6 @@ def get_build_options(): return eca, ela, include_dir_plat -def get_nthreads(n_threads=None): - """MAKEFLAGS is used by the onedal cmake and cythonize to control the number - of processes. If it is set via the setup.py commmand with the --parallel or - -j argument, it will supersede this value. When both are not set, it will - default to the number of cpus, n_threads should be a positive integer, None, - or True""" - - - return n_threads - - def getpyexts(): eca, ela, include_dir_plat = get_build_options() libraries_plat = get_libs("daal") From b14a7035a708889d1b1dedbe9ad03f17d3c306fb Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Wed, 15 Jan 2025 14:08:24 +0100 Subject: [PATCH 58/61] fix mistake in reversion of regex --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b11cbe4601..96c192654f 100644 --- a/setup.py +++ b/setup.py @@ -419,7 +419,7 @@ def onedal_run(self): # None is default for setuptools for single threading # take the last defined value in MAKEFLAGS, as it will be the one # used by cmake/make - regex = r"(?<=(? Date: Wed, 15 Jan 2025 14:19:36 +0100 Subject: [PATCH 59/61] fix if cpu_count is None --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 96c192654f..6ac2e0b733 100644 --- a/setup.py +++ b/setup.py @@ -423,9 +423,9 @@ def onedal_run(self): orig_n_threads = re.findall(regex, makeflags)[-1] if n_threads is None: - n_threads = int(orig_n_threads) if orig_n_threads else os.cpu_count() + n_threads = int(orig_n_threads) if orig_n_threads else os.cpu_count() or 1 elif n_threads is True: - n_threads = os.cpu_count() + n_threads = os.cpu_count() or 1 cxx = os.getenv("CXX", "cl" if IS_WIN else "g++") build_onedal = lambda iface: build_backend.custom_build_cmake_clib( From 0cbd40a0c9e975191d3e344936f07b821bd8aea9 Mon Sep 17 00:00:00 2001 From: "Faust, Ian" Date: Wed, 15 Jan 2025 14:30:26 +0100 Subject: [PATCH 60/61] switch back to inverse because of nullstring on -1 index --- setup.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 6ac2e0b733..936103f9a6 100644 --- a/setup.py +++ b/setup.py @@ -418,9 +418,10 @@ def onedal_run(self): # True is used by setuptools to indicate cpu_count for `parallel` # None is default for setuptools for single threading # take the last defined value in MAKEFLAGS, as it will be the one - # used by cmake/make - regex = r"(?<=(? Date: Wed, 15 Jan 2025 16:29:36 +0100 Subject: [PATCH 61/61] remove build_ext to use default setuptools's build_ext --- INSTALL.md | 4 ++-- setup.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/INSTALL.md b/INSTALL.md index 3ed98f9d8b..d497741acc 100755 --- a/INSTALL.md +++ b/INSTALL.md @@ -172,10 +172,10 @@ The build-process (using setup.py) happens in 4 stages: * ``NO_STREAM``: set to '1', 'yes' or alike to build without support for streaming mode * ``NO_DPC``: set to '1', 'yes' or alike to build without support of oneDAL DPC++ interfaces * ``OFF_ONEDAL_IFACE``: set to '1' to build without the support of oneDAL interfaces -* ``MAKEFLAGS``: the last `-j` flag determines the number of threads for building the onedal extension. It will default to the number of processors when not set. +* ``MAKEFLAGS``: the last `-j` flag determines the number of threads for building the onedal extension. It will default to the number of CPU threads when not set. **Note:** in order to use distributed mode, `mpi4py` is also required, and needs to be built with the same MPI backend as scikit-learn-intelex. -**Note:** The `-j` flag in the ``MAKEFLAGS`` environment variable is superseded in `build` and `develop` mode by the ``--parallel`` and `-j` command line flags. +**Note:** The `-j` flag in the ``MAKEFLAGS`` environment variable is superseded in `setup.py` modes which support the ``--parallel`` and `-j` command line flags. ### Build Intel(R) Extension for Scikit-learn diff --git a/setup.py b/setup.py index 936103f9a6..3da8207b8d 100644 --- a/setup.py +++ b/setup.py @@ -34,7 +34,6 @@ import setuptools.command.build as orig_build import setuptools.command.develop as orig_develop from Cython.Build import cythonize -from Cython.Distutils import build_ext from setuptools import Extension, setup import scripts.build_backend as build_backend @@ -572,7 +571,7 @@ class build(onedal_build, orig_build.build): author_email="onedal.maintainers@intel.com", maintainer_email="onedal.maintainers@intel.com", project_urls=project_urls, - cmdclass={"develop": develop, "build": build, "build_ext": build_ext}, + cmdclass={"develop": develop, "build": build}, classifiers=[ "Development Status :: 5 - Production/Stable", "Environment :: Console",