@@ -110,9 +110,9 @@ def test_with_iterator(self, sparsity: float) -> None:
110
110
111
111
@pytest .mark .parametrize ("sparsity" , [0.0 , 0.1 , 0.5 , 0.8 , 0.9 ])
112
112
def test_training (self , sparsity : float ) -> None :
113
- n_samples_per_batch = 317
114
- n_features = 8
115
- n_batches = 7
113
+ n_samples_per_batch = 16
114
+ n_features = 2
115
+ n_batches = 2
116
116
if sparsity == 0.0 :
117
117
it = IteratorForTest (
118
118
* make_batches (n_samples_per_batch , n_features , n_batches , False ), None
@@ -127,27 +127,17 @@ def test_training(self, sparsity: float) -> None:
127
127
128
128
parameters = {"tree_method" : "hist" , "max_bin" : 256 }
129
129
Xy_it = xgb .QuantileDMatrix (it , max_bin = parameters ["max_bin" ])
130
- from_it = xgb .train (parameters , Xy_it )
130
+ from_it = xgb .train (parameters , Xy_it , num_boost_round = 1 )
131
131
132
132
X , y , w = it .as_arrays ()
133
133
w_it = Xy_it .get_weight ()
134
134
np .testing .assert_allclose (w_it , w )
135
135
136
136
Xy_arr = xgb .DMatrix (X , y , weight = w )
137
- from_arr = xgb .train (parameters , Xy_arr )
137
+ from_arr = xgb .train (parameters , Xy_arr , num_boost_round = 1 )
138
138
139
139
np .testing .assert_allclose (from_arr .predict (Xy_it ), from_it .predict (Xy_arr ))
140
140
141
- y -= y .min ()
142
- y += 0.01
143
- Xy = xgb .QuantileDMatrix (X , y , weight = w )
144
- with pytest .raises (ValueError , match = r"Only.*hist.*" ):
145
- parameters = {
146
- "tree_method" : "approx" ,
147
- "max_bin" : 256 ,
148
- "objective" : "reg:gamma" ,
149
- }
150
- xgb .train (parameters , Xy )
151
141
152
142
def run_ref_dmatrix (self , rng : Any , tree_method : str , enable_cat : bool ) -> None :
153
143
n_samples , n_features = 2048 , 17
0 commit comments