You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
/root/miniconda3/lib/python3.9/site-packages/pyspark/ml/base.py:205: in fit
return self._fit(dataset)
src/spark_rapids_ml/core.py:812: in _fit
return self._fit_internal(dataset, None)[0]
src/spark_rapids_ml/core.py:781: in _fit_internal
pipelined_rdd = self._call_cuml_fit_func(
src/spark_rapids_ml/core.py:487: in _call_cuml_fit_func
cuml_fit_func = self._get_cuml_fit_func(
src/spark_rapids_ml/tree.py:287: in _get_cuml_fit_func
n_estimators_of_all_params.append(self._estimators_per_worker(num_trees))
def _estimators_per_worker(self, n_estimators: int) -> List[int]:
"""Calculate the number of trees each task should train according to n_estimators"""
n_workers = self.num_workers
if n_estimators < n_workers:
raise ValueError("n_estimators cannot be lower than number of spark tasks.")
E ValueError: n_estimators cannot be lower than number of spark tasks.
The text was updated successfully, but these errors were encountered:
test environment:
there are 4 V100 gpu
detail:
tests/test_random_forest.py:499:
/root/miniconda3/lib/python3.9/site-packages/pyspark/ml/base.py:205: in fit
return self._fit(dataset)
src/spark_rapids_ml/core.py:812: in _fit
return self._fit_internal(dataset, None)[0]
src/spark_rapids_ml/core.py:781: in _fit_internal
pipelined_rdd = self._call_cuml_fit_func(
src/spark_rapids_ml/core.py:487: in _call_cuml_fit_func
cuml_fit_func = self._get_cuml_fit_func(
src/spark_rapids_ml/tree.py:287: in _get_cuml_fit_func
n_estimators_of_all_params.append(self._estimators_per_worker(num_trees))
self = RandomForestRegressor_5def7d11920a, n_estimators = 3
E ValueError: n_estimators cannot be lower than number of spark tasks.
The text was updated successfully, but these errors were encountered: