diff --git a/onnxmltools/convert/xgboost/operator_converters/XGBoost.py b/onnxmltools/convert/xgboost/operator_converters/XGBoost.py index 519d94ce..a9f31211 100644 --- a/onnxmltools/convert/xgboost/operator_converters/XGBoost.py +++ b/onnxmltools/convert/xgboost/operator_converters/XGBoost.py @@ -264,8 +264,8 @@ def convert(scope, operator, container): ) if objective == "count:poisson": - cst = scope.get_unique_variable_name("half") - container.add_initializer(cst, TensorProto.FLOAT, [1], [0.5]) + cst = scope.get_unique_variable_name("poisson") + container.add_initializer(cst, TensorProto.FLOAT, [1], [base_score]) new_name = scope.get_unique_variable_name("exp") container.add_node("Exp", names, [new_name]) container.add_node("Mul", [new_name, cst], operator.output_full_names) diff --git a/tests/xgboost/test_xgboost_converters.py b/tests/xgboost/test_xgboost_converters.py index 475d5b2a..7a37dd3a 100644 --- a/tests/xgboost/test_xgboost_converters.py +++ b/tests/xgboost/test_xgboost_converters.py @@ -94,12 +94,12 @@ def test_xgb_regressor_poisson(self): x = iris.data y = iris.target / 100 x_train, x_test, y_train, _ = train_test_split( - x, y, test_size=0.5, random_state=42 + x, y, test_size=0.5, random_state=17 ) for nest in [5, 50]: xgb = XGBRegressor( objective="count:poisson", - random_state=0, + random_state=5, max_depth=3, n_estimators=nest, ) @@ -716,5 +716,4 @@ def test_xgb_classifier_13(self): if __name__ == "__main__": - TestXGBoostModels().test_xgb_regressor_poisson() unittest.main(verbosity=2)