1616import scipy .spatial
1717
1818from ..learner import (AverageLearner , BalancingLearner , DataSaver ,
19- IntegratorLearner , Learner1D , Learner2D , LearnerND , SKOptLearner )
19+ IntegratorLearner , Learner1D , Learner2D , LearnerND )
2020from ..runner import simple
2121
2222
23+ try :
24+ import skopt
25+ from ..learner import SKOptLearner
26+ except ModuleNotFoundError :
27+ SKOptLearner = None
28+
29+
2330def generate_random_parametrization (f ):
2431 """Return a realization of 'f' with parameters bound to random values.
2532
@@ -60,6 +67,10 @@ def xfail(learner):
6067 return pytest .mark .xfail , learner
6168
6269
70+ def maybe_skip (learner ):
71+ return (pytest .mark .skip , learner ) if learner is None else learner
72+
73+
6374# All parameters except the first must be annotated with a callable that
6475# returns a random value for that parameter.
6576
@@ -100,15 +111,15 @@ def gaussian(n):
100111def run_with (* learner_types ):
101112 pars = []
102113 for l in learner_types :
103- is_xfail = isinstance (l , tuple )
104- if is_xfail :
105- xfail , l = l
114+ has_marker = isinstance (l , tuple )
115+ if has_marker :
116+ marker , l = l
106117 for f , k in learner_function_combos [l ]:
107118 # Check if learner was marked with our `xfail` decorator
108119 # XXX: doesn't work when feeding kwargs to xfail.
109- if is_xfail :
120+ if has_marker :
110121 pars .append (pytest .param (l , f , dict (k ),
111- marks = [pytest . mark . xfail ]))
122+ marks = [marker ]))
112123 else :
113124 pars .append ((l , f , dict (k )))
114125 return pytest .mark .parametrize ('learner_type, f, learner_kwargs' , pars )
@@ -391,8 +402,8 @@ def test_balancing_learner(learner_type, f, learner_kwargs):
391402 assert all (l .npoints > 10 for l in learner .learners ), [l .npoints for l in learner .learners ]
392403
393404
394- @run_with (Learner1D , Learner2D , LearnerND , AverageLearner , SKOptLearner ,
395- IntegratorLearner )
405+ @run_with (Learner1D , Learner2D , LearnerND , AverageLearner ,
406+ maybe_skip ( SKOptLearner ), IntegratorLearner )
396407def test_saving (learner_type , f , learner_kwargs ):
397408 f = generate_random_parametrization (f )
398409 learner = learner_type (f , ** learner_kwargs )
@@ -412,8 +423,8 @@ def test_saving(learner_type, f, learner_kwargs):
412423 os .remove (path )
413424
414425
415- @run_with (Learner1D , Learner2D , LearnerND , AverageLearner , SKOptLearner ,
416- IntegratorLearner )
426+ @run_with (Learner1D , Learner2D , LearnerND , AverageLearner ,
427+ maybe_skip ( SKOptLearner ), IntegratorLearner )
417428def test_saving_of_balancing_learner (learner_type , f , learner_kwargs ):
418429 f = generate_random_parametrization (f )
419430 learner = BalancingLearner ([learner_type (f , ** learner_kwargs )])
@@ -438,8 +449,8 @@ def test_saving_of_balancing_learner(learner_type, f, learner_kwargs):
438449 shutil .rmtree (folder )
439450
440451
441- @run_with (Learner1D , Learner2D , LearnerND , AverageLearner , SKOptLearner ,
442- IntegratorLearner )
452+ @run_with (Learner1D , Learner2D , LearnerND , AverageLearner ,
453+ maybe_skip ( SKOptLearner ), IntegratorLearner )
443454def test_saving_with_datasaver (learner_type , f , learner_kwargs ):
444455 f = generate_random_parametrization (f )
445456 g = lambda x : {'y' : f (x ), 't' : random .random ()}
0 commit comments