本文整理汇总了Python中warnings.simplefilter函数的典型用法代码示例。如果您正苦于以下问题:Python simplefilter函数的具体用法?Python simplefilter怎么用?Python simplefilter使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了simplefilter函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: test_parseable_output_deprecated
def test_parseable_output_deprecated(self):
with warnings.catch_warnings(record=True) as cm:
warnings.simplefilter("always")
ParseableTextReporter()
self.assertEqual(len(cm), 1)
self.assertIsInstance(cm[0].message, DeprecationWarning)
示例2: test_idlever
def test_idlever(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
import idlelib.idlever
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category, DeprecationWarning))
self.assertIn("version", str(w[-1].message))
示例3: test_op
def test_op(self):
# motor.Op is deprecated in Motor 0.2, superseded by Tornado 3 Futures.
# Just make sure it still works.
collection = self.cx.pymongo_test.test_collection
doc = {'_id': 'jesse'}
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
# Op works.
_id = yield motor.Op(collection.insert, doc)
self.assertEqual('jesse', _id)
# Raised a DeprecationWarning.
self.assertEqual(1, len(w))
warning = w[-1]
self.assertTrue(issubclass(warning.category, DeprecationWarning))
message = str(warning.message)
self.assertTrue("deprecated" in message)
self.assertTrue("insert" in message)
result = yield motor.Op(collection.find_one, doc)
self.assertEqual(doc, result)
# Make sure it works with no args.
result = yield motor.Op(collection.find_one)
self.assertTrue(isinstance(result, dict))
with assert_raises(pymongo.errors.DuplicateKeyError):
yield motor.Op(collection.insert, doc)
示例4: _lasso_stability_path
def _lasso_stability_path(X, y, mask, weights, eps):
"Inner loop of lasso_stability_path"
X = X * weights[np.newaxis, :]
X = X[safe_mask(X, mask), :]
y = y[mask]
alpha_max = np.max(np.abs(np.dot(X.T, y))) / X.shape[0]
alpha_min = eps * alpha_max # set for early stopping in path
with warnings.catch_warnings():
warnings.simplefilter('ignore', ConvergenceWarning)
alphas, _, coefs = lars_path(X, y, method='lasso', verbose=False,
alpha_min=alpha_min)
# Scale alpha by alpha_max
alphas /= alphas[0]
# Sort alphas in assending order
alphas = alphas[::-1]
coefs = coefs[:, ::-1]
# Get rid of the alphas that are too small
mask = alphas >= eps
# We also want to keep the first one: it should be close to the OLS
# solution
mask[0] = True
alphas = alphas[mask]
coefs = coefs[:, mask]
return alphas, coefs
示例5: __unit_test_onset_function
def __unit_test_onset_function(metric):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
# First, test for a warning on empty onsets
metric(np.array([]), np.arange(10))
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert str(w[-1].message) == "Reference onsets are empty."
metric(np.arange(10), np.array([]))
assert len(w) == 2
assert issubclass(w[-1].category, UserWarning)
assert str(w[-1].message) == "Estimated onsets are empty."
# And that the metric is 0
assert np.allclose(metric(np.array([]), np.array([])), 0)
# Now test validation function - onsets must be 1d ndarray
onsets = np.array([[1., 2.]])
nose.tools.assert_raises(ValueError, metric, onsets, onsets)
# onsets must be in seconds (so not huge)
onsets = np.array([1e10, 1e11])
nose.tools.assert_raises(ValueError, metric, onsets, onsets)
# onsets must be sorted
onsets = np.array([2., 1.])
nose.tools.assert_raises(ValueError, metric, onsets, onsets)
# Valid onsets which are the same produce a score of 1 for all metrics
onsets = np.arange(10, dtype=np.float)
assert np.allclose(metric(onsets, onsets), 1)
示例6: test_get_default_base_name_deprecation
def test_get_default_base_name_deprecation(self):
msg = "`CustomRouter.get_default_base_name` method should be renamed `get_default_basename`."
# Class definition should raise a warning
with pytest.warns(RemovedInDRF311Warning) as w:
warnings.simplefilter('always')
class CustomRouter(SimpleRouter):
def get_default_base_name(self, viewset):
return 'foo'
assert len(w) == 1
assert str(w[0].message) == msg
# Deprecated method implementation should still be called
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
router = CustomRouter()
router.register('mock', MockViewSet)
assert len(w) == 0
assert router.registry == [
('mock', MockViewSet, 'foo'),
]
示例7: test_usgs_eventype
def test_usgs_eventype(self):
filename = os.path.join(self.path, 'usgs_event.xml')
with warnings.catch_warnings(record=True):
warnings.simplefilter("ignore")
catalog = _read_quakeml(filename)
self.assertEqual(len(catalog), 1)
self.assertEqual(catalog[0].event_type, 'quarry blast')
示例8: test_decorator_attrs
def test_decorator_attrs(self):
def fxn(module): pass
with warnings.catch_warnings():
warnings.simplefilter('ignore', DeprecationWarning)
wrapped = self.util.set_package(fxn)
self.assertEqual(wrapped.__name__, fxn.__name__)
self.assertEqual(wrapped.__qualname__, fxn.__qualname__)
示例9: test_attribute_is_None
def test_attribute_is_None(self):
loader = self.DummyLoader()
loader.module = types.ModuleType('blah')
loader.module.__loader__ = None
with warnings.catch_warnings():
warnings.simplefilter('ignore', DeprecationWarning)
self.assertEqual(loader, loader.load_module('blah').__loader__)
示例10: setUp
def setUp(self):
super(converterTestsCDF, self).setUp()
self.SDobj = dm.SpaceData(attrs={'global': 'test'})
self.SDobj['var'] = dm.dmarray([1, 2, 3], attrs={'a': 'a'})
self.testdir = tempfile.mkdtemp()
self.testfile = os.path.join(self.testdir, 'test.cdf')
warnings.simplefilter('error', dm.DMWarning)
示例11: tearDown
def tearDown(self):
super(converterTestsCDF, self).tearDown()
del self.SDobj
if os.path.exists(self.testfile):
os.remove(self.testfile)
os.rmdir(self.testdir)
warnings.simplefilter('default', dm.DMWarning)
示例12: properties
def properties(self):
"""
return a dictionary mapping property name -> value
"""
o = self.oorig
getters = [name for name in dir(o)
if name.startswith('get_')
and six.callable(getattr(o, name))]
getters.sort()
d = dict()
for name in getters:
func = getattr(o, name)
if self.is_alias(func):
continue
try:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
val = func()
except:
continue
else:
d[name[4:]] = val
return d
示例13: _evaluate_projection
def _evaluate_projection(self, x, y):
"""
kNNEvaluate - evaluate class separation in the given projection using a k-NN method
Parameters
----------
x - variables to evaluate
y - class
Returns
-------
scores
"""
if self.percent_data_used != 100:
rand = np.random.choice(len(x), int(len(x) * self.percent_data_used / 100),
replace=False)
x = x[rand]
y = y[rand]
neigh = KNeighborsClassifier(n_neighbors=3) if self.attr_color.is_discrete else \
KNeighborsRegressor(n_neighbors=3)
assert ~(np.isnan(x).any(axis=None) | np.isnan(x).any(axis=None))
neigh.fit(x, y)
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UserWarning)
scores = cross_val_score(neigh, x, y, cv=3)
return scores.mean()
示例14: test_ica_rank_reduction
def test_ica_rank_reduction():
"""Test recovery of full data when no source is rejected"""
# Most basic recovery
raw = Raw(raw_fname).crop(0.5, stop, False)
raw.load_data()
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')[:10]
n_components = 5
max_pca_components = len(picks)
for n_pca_components in [6, 10]:
with warnings.catch_warnings(record=True): # non-convergence
warnings.simplefilter('always')
ica = ICA(n_components=n_components,
max_pca_components=max_pca_components,
n_pca_components=n_pca_components,
method='fastica', max_iter=1).fit(raw, picks=picks)
rank_before = raw.estimate_rank(picks=picks)
assert_equal(rank_before, len(picks))
raw_clean = ica.apply(raw, copy=True)
rank_after = raw_clean.estimate_rank(picks=picks)
# interaction between ICA rejection and PCA components difficult
# to preduct. Rank_after often seems to be 1 higher then
# n_pca_components
assert_true(n_components < n_pca_components <= rank_after <=
rank_before)
示例15: test_unicode_decode_error
def test_unicode_decode_error():
# decode_error default to strict, so this should fail
# First, encode (as bytes) a unicode string.
text = "J'ai mang\xe9 du kangourou ce midi, c'\xe9tait pas tr\xeas bon."
text_bytes = text.encode('utf-8')
# Then let the Analyzer try to decode it as ascii. It should fail,
# because we have given it an incorrect encoding.
wa = CountVectorizer(ngram_range=(1, 2), encoding='ascii').build_analyzer()
assert_raises(UnicodeDecodeError, wa, text_bytes)
ca = CountVectorizer(analyzer='char', ngram_range=(3, 6),
encoding='ascii').build_analyzer()
assert_raises(UnicodeDecodeError, ca, text_bytes)
# Check the old interface
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
ca = CountVectorizer(analyzer='char', ngram_range=(3, 6),
charset='ascii').build_analyzer()
assert_raises(UnicodeDecodeError, ca, text_bytes)
assert_equal(len(w), 1)
assert_true(issubclass(w[0].category, DeprecationWarning))
assert_true("charset" in str(w[0].message).lower())