• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Python warnings.catch_warnings函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中warnings.catch_warnings函数的典型用法代码示例。如果您正苦于以下问题:Python catch_warnings函数的具体用法?Python catch_warnings怎么用?Python catch_warnings使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了catch_warnings函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: testWarnings

  def testWarnings(self):
    # Smaller than the threshold: no warning.
    c_sparse = ops.IndexedSlices(array_ops.placeholder(dtypes.float32),
                                 array_ops.placeholder(dtypes.int32),
                                 constant([4, 4, 4, 4]))
    with warnings.catch_warnings(record=True) as w:
      math_ops.mul(c_sparse, 1.0)
    self.assertEqual(0, len(w))

    # Greater than or equal to the threshold: warning.
    c_sparse = ops.IndexedSlices(array_ops.placeholder(dtypes.float32),
                                 array_ops.placeholder(dtypes.int32),
                                 constant([100, 100, 100, 100]))
    with warnings.catch_warnings(record=True) as w:
      math_ops.mul(c_sparse, 1.0)
    self.assertEqual(1, len(w))
    self.assertTrue(
        "with 100000000 elements. This may consume a large amount of memory."
        in str(w[0].message))

    # Unknown dense shape: warning.
    c_sparse = ops.IndexedSlices(array_ops.placeholder(dtypes.float32),
                                 array_ops.placeholder(dtypes.int32),
                                 array_ops.placeholder(dtypes.int32))
    with warnings.catch_warnings(record=True) as w:
      math_ops.mul(c_sparse, 1.0)
    self.assertEqual(1, len(w))
    self.assertTrue(
        "of unknown shape. This may consume a large amount of memory."
        in str(w[0].message))
开发者ID:Ambier,项目名称:tensorflow,代码行数:30,代码来源:gradients_test.py


示例2: test_long_cache_keys_shortened

    def test_long_cache_keys_shortened(self):
        cache_settings = {
            'default': {
                'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
                'LOCATION': os.path.join(TOP_DIR, 'test.cache'),
            }
        }
        long_key_string = "X" * 251

        with override_settings(CACHES=cache_settings):
            with warnings.catch_warnings(record=True) as w:
                warnings.simplefilter("always")
                # memcached limits key length to 250
                cache.set(long_key_string, "hello cached world")

                self.assertEqual(len(w), 1)
                self.assertIsInstance(w[0].message, CacheKeyWarning)

        # Activate optional cache key length checker
        cache_settings['default']['KEY_FUNCTION'] = 'mainsite.utils.filter_cache_key'

        with override_settings(CACHES=cache_settings):
            with warnings.catch_warnings(record=True) as w:
                warnings.simplefilter("always")
                # memcached limits key length to 250
                cache.set(long_key_string, "hello cached world")

                self.assertEqual(len(w), 0)

                retrieved = cache.get(long_key_string)

                self.assertEqual(retrieved, "hello cached world")
开发者ID:concentricsky,项目名称:badgr-server,代码行数:32,代码来源:test_misc.py


示例3: test_calculate_chpi_positions

def test_calculate_chpi_positions():
    """Test calculation of cHPI positions
    """
    trans, rot, t = head_pos_to_trans_rot_t(read_head_pos(pos_fname))
    with warnings.catch_warnings(record=True):
        raw = Raw(chpi_fif_fname, allow_maxshield=True, preload=True)
    t -= raw.first_samp / raw.info['sfreq']
    quats = _calculate_chpi_positions(raw, verbose='debug')
    trans_est, rot_est, t_est = head_pos_to_trans_rot_t(quats)
    _compare_positions((trans, rot, t), (trans_est, rot_est, t_est), 0.003)

    # degenerate conditions
    raw_no_chpi = Raw(test_fif_fname)
    assert_raises(RuntimeError, _calculate_chpi_positions, raw_no_chpi)
    raw_bad = raw.copy()
    for d in raw_bad.info['dig']:
        if d['kind'] == FIFF.FIFFV_POINT_HPI:
            d['coord_frame'] = 999
            break
    assert_raises(RuntimeError, _calculate_chpi_positions, raw_bad)
    raw_bad = raw.copy()
    for d in raw_bad.info['dig']:
        if d['kind'] == FIFF.FIFFV_POINT_HPI:
            d['r'] = np.ones(3)
    raw_bad.crop(0, 1., copy=False)
    with warnings.catch_warnings(record=True):  # bad pos
        with catch_logging() as log_file:
            _calculate_chpi_positions(raw_bad, verbose=True)
    # ignore HPI info header and [done] footer
    for line in log_file.getvalue().strip().split('\n')[4:-1]:
        assert_true('0/5 good' in line)
开发者ID:mdclarke,项目名称:mne-python,代码行数:31,代码来源:test_chpi.py


示例4: test_misspecifications

def test_misspecifications():
    # Tests for model specification and misspecification exceptions
    endog = np.arange(20).reshape(10,2)

    # Bad trend specification
    assert_raises(ValueError, varmax.VARMAX, endog, order=(1,0), trend='')

    # Bad error_cov_type specification
    assert_raises(ValueError, varmax.VARMAX, endog, order=(1,0), error_cov_type='')

    # Bad order specification
    assert_raises(ValueError, varmax.VARMAX, endog, order=(0,0))

    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')
        varmax.VARMAX(endog, order=(1,1))

    # Warning with VARMA specification
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter('always')

        varmax.VARMAX(endog, order=(1,1))

        message = ('Estimation of VARMA(p,q) models is not generically robust,'
                   ' due especially to identification issues.')
        assert_equal(str(w[0].message), message)
    warnings.resetwarnings()
开发者ID:Bonfils-ebu,项目名称:statsmodels,代码行数:27,代码来源:test_varmax.py


示例5: test_popen

    def test_popen(self):
        mswindows = (sys.platform == "win32")

        if mswindows:
            command = '"{}" -c "print(\'Hello\')"'.format(sys.executable)
        else:
            command = "'{}' -c 'print(\"Hello\")'".format(sys.executable)
        with warnings.catch_warnings():
            warnings.simplefilter("ignore", DeprecationWarning)
            with platform.popen(command) as stdout:
                hello = stdout.read().strip()
                stdout.close()
                self.assertEqual(hello, "Hello")

        data = 'plop'
        if mswindows:
            command = '"{}" -c "import sys; data=sys.stdin.read(); exit(len(data))"'
        else:
            command = "'{}' -c 'import sys; data=sys.stdin.read(); exit(len(data))'"
        command = command.format(sys.executable)
        with warnings.catch_warnings():
            warnings.simplefilter("ignore", DeprecationWarning)
            with platform.popen(command, 'w') as stdin:
                stdout = stdin.write(data)
                ret = stdin.close()
                self.assertIsNotNone(ret)
                if os.name == 'nt':
                    returncode = ret
                else:
                    returncode = ret >> 8
                self.assertEqual(returncode, len(data))
开发者ID:tiggerntatie,项目名称:brython,代码行数:31,代码来源:test_platform.py


示例6: test_read_epochs_bad_events

def test_read_epochs_bad_events():
    """Test epochs when events are at the beginning or the end of the file
    """
    # Event at the beginning
    epochs = Epochs(
        raw, np.array([[raw.first_samp, 0, event_id]]), event_id, tmin, tmax, picks=picks, baseline=(None, 0)
    )
    with warnings.catch_warnings(record=True):
        evoked = epochs.average()

    epochs = Epochs(
        raw, np.array([[raw.first_samp, 0, event_id]]), event_id, tmin, tmax, picks=picks, baseline=(None, 0)
    )
    epochs.drop_bad_epochs()
    with warnings.catch_warnings(record=True):
        evoked = epochs.average()

    # Event at the end
    epochs = Epochs(
        raw, np.array([[raw.last_samp, 0, event_id]]), event_id, tmin, tmax, picks=picks, baseline=(None, 0)
    )

    with warnings.catch_warnings(record=True):
        evoked = epochs.average()
        assert evoked
    warnings.resetwarnings()
开发者ID:rgoj,项目名称:mne-python,代码行数:26,代码来源:test_epochs.py


示例7: test_evoked_io_from_epochs

def test_evoked_io_from_epochs():
    """Test IO of evoked data made from epochs
    """
    # offset our tmin so we don't get exactly a zero value when decimating
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always")
        epochs = Epochs(raw, events[:4], event_id, tmin + 0.011, tmax, picks=picks, baseline=(None, 0), decim=5)
    assert_true(len(w) == 1)
    evoked = epochs.average()
    evoked.save(op.join(tempdir, "evoked-ave.fif"))
    evoked2 = read_evokeds(op.join(tempdir, "evoked-ave.fif"))[0]
    assert_allclose(evoked.data, evoked2.data, rtol=1e-4, atol=1e-20)
    assert_allclose(evoked.times, evoked2.times, rtol=1e-4, atol=1 / evoked.info["sfreq"])

    # now let's do one with negative time
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always")
        epochs = Epochs(raw, events[:4], event_id, 0.1, tmax, picks=picks, baseline=(0.1, 0.2), decim=5)
    evoked = epochs.average()
    evoked.save(op.join(tempdir, "evoked-ave.fif"))
    evoked2 = read_evokeds(op.join(tempdir, "evoked-ave.fif"))[0]
    assert_allclose(evoked.data, evoked2.data, rtol=1e-4, atol=1e-20)
    assert_allclose(evoked.times, evoked2.times, rtol=1e-4, atol=1e-20)

    # should be equivalent to a cropped original
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always")
        epochs = Epochs(raw, events[:4], event_id, -0.2, tmax, picks=picks, baseline=(0.1, 0.2), decim=5)
    evoked = epochs.average()
    evoked.crop(0.099, None)
    assert_allclose(evoked.data, evoked2.data, rtol=1e-4, atol=1e-20)
    assert_allclose(evoked.times, evoked2.times, rtol=1e-4, atol=1e-20)
开发者ID:rgoj,项目名称:mne-python,代码行数:32,代码来源:test_epochs.py


示例8: _check_predict_proba

def _check_predict_proba(clf, X, y):
    proba = clf.predict_proba(X)
    with warnings.catch_warnings():
        warnings.simplefilter("ignore")
        # We know that we can have division by zero
        log_proba = clf.predict_log_proba(X)

    y = np.atleast_1d(y)
    if y.ndim == 1:
        y = np.reshape(y, (-1, 1))

    n_outputs = y.shape[1]
    n_samples = len(X)

    if n_outputs == 1:
        proba = [proba]
        log_proba = [log_proba]

    for k in xrange(n_outputs):
        assert_equal(proba[k].shape[0], n_samples)
        assert_equal(proba[k].shape[1], len(np.unique(y[:, k])))
        assert_array_equal(proba[k].sum(axis=1), np.ones(len(X)))
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            # We know that we can have division by zero
            assert_array_equal(np.log(proba[k]), log_proba[k])
开发者ID:RONNCC,项目名称:scikit-learn,代码行数:26,代码来源:test_dummy.py


示例9: _check_roundtrip

    def _check_roundtrip(self, frame):
        _skip_if_no_MySQLdb()
        drop_sql = "DROP TABLE IF EXISTS test_table"
        cur = self.db.cursor()
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", "Unknown table.*")
            cur.execute(drop_sql)
        sql.write_frame(frame, name='test_table', con=self.db, flavor='mysql')
        result = sql.read_frame("select * from test_table", self.db)

        # HACK! Change this once indexes are handled properly.
        result.index = frame.index
        result.index.name = frame.index.name

        expected = frame
        tm.assert_frame_equal(result, expected)

        frame['txt'] = ['a'] * len(frame)
        frame2 = frame.copy()
        index = Index(lrange(len(frame2))) + 10
        frame2['Idx'] = index
        drop_sql = "DROP TABLE IF EXISTS test_table2"
        cur = self.db.cursor()
        with warnings.catch_warnings():
            warnings.filterwarnings("ignore", "Unknown table.*")
            cur.execute(drop_sql)
        sql.write_frame(frame2, name='test_table2', con=self.db, flavor='mysql')
        result = sql.read_frame("select * from test_table2", self.db,
                                index_col='Idx')
        expected = frame.copy()

        # HACK! Change this once indexes are handled properly.
        expected.index = index
        expected.index.names = result.index.names
        tm.assert_frame_equal(expected, result)
开发者ID:Al-Harazmi,项目名称:pandas,代码行数:35,代码来源:test_sql.py


示例10: test_class_weight_classifiers

def test_class_weight_classifiers():
    # test that class_weight works and that the semantics are consistent
    classifiers = all_estimators(type_filter="classifier")

    with warnings.catch_warnings(record=True):
        classifiers = [c for c in classifiers if "class_weight" in c[1]().get_params().keys()]

    for n_centers in [2, 3]:
        # create a very noisy dataset
        X, y = make_blobs(centers=n_centers, random_state=0, cluster_std=20)
        X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5, random_state=0)
        for name, Classifier in classifiers:
            if name == "NuSVC":
                # the sparse version has a parameter that doesn't do anything
                continue
            if name.endswith("NB"):
                # NaiveBayes classifiers have a somewhat different interface.
                # FIXME SOON!
                continue
            if n_centers == 2:
                class_weight = {0: 1000, 1: 0.0001}
            else:
                class_weight = {0: 1000, 1: 0.0001, 2: 0.0001}

            with warnings.catch_warnings(record=True):
                classifier = Classifier(class_weight=class_weight)
            if hasattr(classifier, "n_iter"):
                classifier.set_params(n_iter=100)

            set_random_state(classifier)
            classifier.fit(X_train, y_train)
            y_pred = classifier.predict(X_test)
            assert_greater(np.mean(y_pred == 0), 0.9)
开发者ID:nicomahler,项目名称:scikit-learn,代码行数:33,代码来源:test_common.py


示例11: test_graph_iterative

    def test_graph_iterative(self):
        graph = MigrationGraph()
        root = ("app_a", "1")
        graph.add_node(root, None)
        expected = [root]
        for i in range(2, 1000):
            parent = ("app_a", str(i - 1))
            child = ("app_a", str(i))
            graph.add_node(child, None)
            graph.add_dependency(str(i), child, parent)
            expected.append(child)
        leaf = expected[-1]

        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter('always', RuntimeWarning)
            forwards_plan = graph.forwards_plan(leaf)

        self.assertEqual(len(w), 1)
        self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
        self.assertEqual(str(w[-1].message), RECURSION_DEPTH_WARNING)
        self.assertEqual(expected, forwards_plan)

        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter('always', RuntimeWarning)
            backwards_plan = graph.backwards_plan(root)

        self.assertEqual(len(w), 1)
        self.assertTrue(issubclass(w[-1].category, RuntimeWarning))
        self.assertEqual(str(w[-1].message), RECURSION_DEPTH_WARNING)
        self.assertEqual(expected[::-1], backwards_plan)
开发者ID:2015E8007361074,项目名称:django,代码行数:30,代码来源:test_graph.py


示例12: test_sparse_randomized_pca_inverse

def test_sparse_randomized_pca_inverse():
    """Test that RandomizedPCA is inversible on sparse data"""
    rng = np.random.RandomState(0)
    n, p = 50, 3
    X = rng.randn(n, p)  # spherical data
    X[:, 1] *= 0.00001  # make middle component relatively small
    # no large means because the sparse version of randomized pca does not do
    # centering to avoid breaking the sparsity
    X = csr_matrix(X)

    # same check that we can find the original data from the transformed signal
    # (since the data is almost of rank n_components)
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always", DeprecationWarning)
        pca = RandomizedPCA(n_components=2, random_state=0).fit(X)
        assert_equal(len(w), 1)
        assert_equal(w[0].category, DeprecationWarning)

    Y = pca.transform(X)

    Y_inverse = pca.inverse_transform(Y)
    assert_almost_equal(X.todense(), Y_inverse, decimal=2)

    # same as above with whitening (approximate reconstruction)
    with warnings.catch_warnings(record=True) as w:
        warnings.simplefilter("always", DeprecationWarning)
        pca = RandomizedPCA(n_components=2, whiten=True, random_state=0).fit(X)
        assert_equal(len(w), 1)
        assert_equal(w[0].category, DeprecationWarning)

    Y = pca.transform(X)
    Y_inverse = pca.inverse_transform(Y)
    relative_max_delta = (np.abs(X.todense() - Y_inverse) / np.abs(X).mean()).max()
    # XXX: this does not seam to work as expected:
    assert_almost_equal(relative_max_delta, 0.91, decimal=2)
开发者ID:mugiro,项目名称:elm-python,代码行数:35,代码来源:test_pca.py


示例13: test_random_pair_match

    def test_random_pair_match(self) :
        self.assertRaises(ValueError, dedupe.core.randomPairsMatch, 1, 0, 10)
        self.assertRaises(ValueError, dedupe.core.randomPairsMatch, 0, 0, 10)
        self.assertRaises(ValueError, dedupe.core.randomPairsMatch, 0, 1, 10)

        assert len(dedupe.core.randomPairsMatch(100, 100, 100)) == 100
        assert len(dedupe.core.randomPairsMatch(10, 10, 99)) == 99


        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            pairs = dedupe.core.randomPairsMatch(10, 10, 200)
            assert len(w) == 1
            assert str(w[-1].message) == "Requested sample of size 200, only returning 100 possible pairs"

        assert len(pairs) == 100

        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            pairs = dedupe.core.randomPairsMatch(10, 10, 200)
            assert len(w) == 1
            assert str(w[-1].message) == "Requested sample of size 200, only returning 100 possible pairs"


        random.seed(123)
        numpy.random.seed(123)
        pairs = dedupe.core.randomPairsMatch(10, 10, 10)
        assert pairs == set([(7, 3), (3, 3), (2, 9), (6, 0), (2, 0), 
                             (1, 9), (9, 4), (0, 4), (1, 0), (1, 1)])
开发者ID:mhong19414,项目名称:dedupe,代码行数:29,代码来源:test_core.py


示例14: test_random_pair

    def test_random_pair(self) :
        self.assertRaises(ValueError, dedupe.core.randomPairs, 1, 10)
        assert dedupe.core.randomPairs(10, 10).any()
        random.seed(123)
        numpy.random.seed(123)
        random_pairs = dedupe.core.randomPairs(10, 5)
        assert numpy.array_equal(random_pairs, 
                                 numpy.array([[ 0,  3],
                                              [ 3,  8],
                                              [ 4,  9],
                                              [ 5,  9],
                                              [ 2,  3]]))

        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            dedupe.core.randomPairs(10, 10**6)
            assert len(w) == 1
            assert str(w[-1].message) == "Requested sample of size 1000000, only returning 45 possible pairs"

        with warnings.catch_warnings(record=True) as w:
            warnings.simplefilter("always")
            dedupe.core.randomPairs(10**40, 10)
            assert len(w) == 2
            assert str(w[0].message) == "There may be duplicates in the sample"
            assert "Asked to sample pairs from" in str(w[1].message)

        random.seed(123)
        numpy.random.seed(123)
        assert numpy.array_equal(dedupe.core.randomPairs(11**9, 1),
                                 numpy.array([[1228959102, 1840268610]]))
开发者ID:mhong19414,项目名称:dedupe,代码行数:30,代码来源:test_core.py


示例15: test_deprecated_score_func

def test_deprecated_score_func():
    # test that old deprecated way of passing a score / loss function is still
    # supported
    X, y = make_classification(n_samples=200, n_features=100, random_state=0)
    clf = LinearSVC(random_state=0)
    cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, scoring="f1")
    cv.fit(X[:180], y[:180])
    y_pred = cv.predict(X[180:])
    C = cv.best_estimator_.C

    clf = LinearSVC(random_state=0)
    cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, score_func=f1_score)
    with warnings.catch_warnings(record=True):
        # catch deprecation warning
        cv.fit(X[:180], y[:180])
    y_pred_func = cv.predict(X[180:])
    C_func = cv.best_estimator_.C

    assert_array_equal(y_pred, y_pred_func)
    assert_equal(C, C_func)

    # test loss where greater is worse
    def f1_loss(y_true_, y_pred_):
        return -f1_score(y_true_, y_pred_)

    clf = LinearSVC(random_state=0)
    cv = GridSearchCV(clf, {'C': [0.1, 1.0]}, loss_func=f1_loss)
    with warnings.catch_warnings(record=True):
        # catch deprecation warning
        cv.fit(X[:180], y[:180])
    y_pred_loss = cv.predict(X[180:])
    C_loss = cv.best_estimator_.C

    assert_array_equal(y_pred, y_pred_loss)
    assert_equal(C, C_loss)
开发者ID:CheMcCandless,项目名称:scikit-learn,代码行数:35,代码来源:test_grid_search.py


示例16: test_array_richcompare_legacy_weirdness

    def test_array_richcompare_legacy_weirdness(self):
        # It doesn't really work to use assert_deprecated here, b/c part of
        # the point of assert_deprecated is to check that when warnings are
        # set to "error" mode then the error is propagated -- which is good!
        # But here we are testing a bunch of code that is deprecated *because*
        # it has the habit of swallowing up errors and converting them into
        # different warnings. So assert_warns will have to be sufficient.
        assert_warns(FutureWarning, lambda: np.arange(2) == "a")
        assert_warns(FutureWarning, lambda: np.arange(2) != "a")
        # No warning for scalar comparisons
        with warnings.catch_warnings():
            warnings.filterwarnings("error")
            assert_(not (np.array(0) == "a"))
            assert_(np.array(0) != "a")
            assert_(not (np.int16(0) == "a"))
            assert_(np.int16(0) != "a")

        for arg1 in [np.asarray(0), np.int16(0)]:
            struct = np.zeros(2, dtype="i4,i4")
            for arg2 in [struct, "a"]:
                for f in [operator.lt, operator.le, operator.gt, operator.ge]:
                    if sys.version_info[0] >= 3:
                        # py3
                        with warnings.catch_warnings() as l:
                            warnings.filterwarnings("always")
                            assert_raises(TypeError, f, arg1, arg2)
                            assert_(not l)
                    else:
                        # py2
                        assert_warns(DeprecationWarning, f, arg1, arg2)
开发者ID:EelcoHoogendoorn,项目名称:numpy,代码行数:30,代码来源:test_deprecations.py


示例17: test_iloc_getitem_multiindex

    def test_iloc_getitem_multiindex(self):
        mi_labels = DataFrame(np.random.randn(4, 3),
                              columns=[['i', 'i', 'j'], ['A', 'A', 'B']],
                              index=[['i', 'i', 'j', 'k'],
                                     ['X', 'X', 'Y', 'Y']])

        mi_int = DataFrame(np.random.randn(3, 3),
                           columns=[[2, 2, 4], [6, 8, 10]],
                           index=[[4, 4, 8], [8, 10, 12]])

        # the first row
        rs = mi_int.iloc[0]
        with catch_warnings(record=True):
            xp = mi_int.ix[4].ix[8]
        tm.assert_series_equal(rs, xp, check_names=False)
        assert rs.name == (4, 8)
        assert xp.name == 8

        # 2nd (last) columns
        rs = mi_int.iloc[:, 2]
        with catch_warnings(record=True):
            xp = mi_int.ix[:, 2]
        tm.assert_series_equal(rs, xp)

        # corner column
        rs = mi_int.iloc[2, 2]
        with catch_warnings(record=True):
            xp = mi_int.ix[:, 2].ix[2]
        assert rs == xp

        # this is basically regular indexing
        rs = mi_labels.iloc[2, 2]
        with catch_warnings(record=True):
            xp = mi_labels.ix['j'].ix[:, 'j'].ix[0, 0]
        assert rs == xp
开发者ID:BobMcFry,项目名称:pandas,代码行数:35,代码来源:test_multiindex.py


示例18: test_get_url_shortener

    def test_get_url_shortener(self):
        us_settings.URL_SHORTENER_BACKEND = 'mymodule.myclass'
        try:
            with warnings.catch_warnings(record=True) as w:
                self.assertEquals(get_url_shortener(), default_backend)
                self.assertTrue(issubclass(w[-1].metatype, RuntimeWarning))
                self.assertEquals(
                    str(w[-1].message),
                    'mymodule.myclass backend cannot be imported')
        except AttributeError:
            # Fail under Python2.5, because of'warnings.catch_warnings'
            pass

        us_settings.URL_SHORTENER_BACKEND = 'gstudio.tests.custom_url_shortener'
        try:
            with warnings.catch_warnings(record=True) as w:
                self.assertEquals(get_url_shortener(), default_backend)
                self.assertTrue(issubclass(w[-1].metatype, RuntimeWarning))
                self.assertEquals(
                    str(w[-1].message),
                    'This backend only exists for testing')
        except AttributeError:
            # Fail under Python2.5, because of'warnings.catch_warnings'
            pass

        us_settings.URL_SHORTENER_BACKEND = 'gstudio.url_shortener'\
                                            '.backends.default'
        self.assertEquals(get_url_shortener(), default_backend)
开发者ID:Big-Data,项目名称:gnowsys-studio,代码行数:28,代码来源:url_shortener.py


示例19: run_tests

        def run_tests(df, rhs, right):
            # label, index, slice
            r, i, s = list('bcd'), [1, 2, 3], slice(1, 4)
            c, j, l = ['joe', 'jolie'], [1, 2], slice(1, 3)

            left = df.copy()
            left.loc[r, c] = rhs
            tm.assert_frame_equal(left, right)

            left = df.copy()
            left.iloc[i, j] = rhs
            tm.assert_frame_equal(left, right)

            left = df.copy()
            with catch_warnings(record=True):
                left.ix[s, l] = rhs
            tm.assert_frame_equal(left, right)

            left = df.copy()
            with catch_warnings(record=True):
                left.ix[i, j] = rhs
            tm.assert_frame_equal(left, right)

            left = df.copy()
            with catch_warnings(record=True):
                left.ix[r, c] = rhs
            tm.assert_frame_equal(left, right)
开发者ID:cpcloud,项目名称:pandas,代码行数:27,代码来源:test_indexing.py


示例20: test_deprecated_callbacks

 def test_deprecated_callbacks(self):
     # Tests that callback functions that return values are still supported but that warnings are generated
     
     def returns_cube(cube, field, filename):
         return cube
         
     def returns_no_cube(cube, field, filename):
         return iris.io.NO_CUBE
         
     fname = tests.get_data_path(["PP", "trui", "air_temp_init", "200812011200__qwqu12ff.initanl.pp"])
     
     # Catch all warnings for returns_cube
     with warnings.catch_warnings(record=True) as generated_warnings_cube:
         warnings.simplefilter("always")
         r = iris.load(fname, callback=returns_cube)
         
         # Test that our warnings are present in the generated warnings:
         gen_warnings_cube = [str(x.message) for x in generated_warnings_cube]
         self.assertIn(iris.io.CALLBACK_DEPRECATION_MSG, gen_warnings_cube, "Callback deprecation warning message not issued.")
     
     # Catch all warnings for returns_no_cube
     with warnings.catch_warnings(record=True) as generated_warnings_no_cube:
         warnings.simplefilter("always")  
         r = iris.load(fname, callback=returns_no_cube)
         
         # Test that our warnings are present in the generated warnings:
         gen_warnings_no_cube = [str(x.message) for x in generated_warnings_no_cube]
         self.assertIn(iris.io.CALLBACK_DEPRECATION_MSG, gen_warnings_no_cube, "Callback deprecation warning message not issued.")
开发者ID:omarjamil,项目名称:iris,代码行数:28,代码来源:test_uri_callback.py



注:本文中的warnings.catch_warnings函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Python warnings.filterwarnings函数代码示例发布时间:2022-05-26
下一篇:
Python warnings.append函数代码示例发布时间:2022-05-26
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap