当前位置: 首页>>代码示例>>Python>>正文


Python hmm.MultinomialHMM方法代码示例

本文整理汇总了Python中hmmlearn.hmm.MultinomialHMM方法的典型用法代码示例。如果您正苦于以下问题:Python hmm.MultinomialHMM方法的具体用法?Python hmm.MultinomialHMM怎么用?Python hmm.MultinomialHMM使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在hmmlearn.hmm的用法示例。


在下文中一共展示了hmm.MultinomialHMM方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _construct_model

# 需要导入模块: from hmmlearn import hmm [as 别名]
# 或者: from hmmlearn.hmm import MultinomialHMM [as 别名]
def _construct_model(self, startprob, transmat, emissionprob, vocabulary):
        """
        Create internal HMM model with given matrices and store it to self.model_
        :param startprob: Starting probability [negative_starting_prob, positive_starting_prob]
        :param transmat: Transition matrix (An array where the [i][j]-th element corresponds to the posterior probability of transitioning between the i-th to j-th)
        :param emissionprob: Emission probability [[neg_pfam1, neg_pfam2, ...], [pos_pfam1, pos_pfam2, ...]] with pfam IDs indexed by their vocabulary index numbers
        :param vocabulary: Vocabulary dictionary with {pfam_id: index_number_in_emission}
        :return: self
        """
        try:
            from hmmlearn import hmm
        except ImportError:
            raise get_hmmlearn_import_error()
        self.model_ = hmm.MultinomialHMM(n_components=2)
        if isinstance(startprob, list):
            startprob = np.array(startprob)
        if isinstance(transmat, list):
            transmat = np.array(transmat)
        self.model_.startprob_ = startprob
        self.model_.transmat_ = transmat
        self.model_.emissionprob_ = emissionprob
        self.vocabulary_ = vocabulary
        return self 
开发者ID:Merck,项目名称:deepbgc,代码行数:25,代码来源:hmm.py

示例2: fit

# 需要导入模块: from hmmlearn import hmm [as 别名]
# 或者: from hmmlearn.hmm import MultinomialHMM [as 别名]
def fit(self, X_list, y_list, startprob=None, transmat=None, verbose=1, debug_progress_path=None, validation_X_list=None, validation_y_list=None):
        if validation_X_list:
            logging.warning('GeneBorderHMM: Validation is present but has no effect yet')
        if verbose:
            logging.info('Training two state model...')

        two_state_model = DiscreteHMM()
        two_state_model.fit(X_list, y_list, startprob=startprob, transmat=transmat, verbose=verbose)

        emission, self.vocabulary_ = self._convert_emission(two_state_model.model_.emissionprob_, two_state_model.vocabulary_)

        from hmmlearn import hmm
        self.model_ = hmm.MultinomialHMM(n_components=4)
        self.model_.startprob_ = self._convert_startprob(startprob)
        self.model_.transmat_ = self._convert_transmat(transmat, X_list)
        self.model_.emissionprob_ = emission
        return self 
开发者ID:Merck,项目名称:deepbgc,代码行数:19,代码来源:hmm.py

示例3: to_viterbi_cents

# 需要导入模块: from hmmlearn import hmm [as 别名]
# 或者: from hmmlearn.hmm import MultinomialHMM [as 别名]
def to_viterbi_cents(salience):
    """
    Find the Viterbi path using a transition prior that induces pitch
    continuity.
    """
    from hmmlearn import hmm

    # uniform prior on the starting pitch
    starting = np.ones(360) / 360

    # transition probabilities inducing continuous pitch
    xx, yy = np.meshgrid(range(360), range(360))
    transition = np.maximum(12 - abs(xx - yy), 0)
    transition = transition / np.sum(transition, axis=1)[:, None]

    # emission probability = fixed probability for self, evenly distribute the
    # others
    self_emission = 0.1
    emission = (np.eye(360) * self_emission + np.ones(shape=(360, 360)) *
                ((1 - self_emission) / 360))

    # fix the model parameters because we are not optimizing the model
    model = hmm.MultinomialHMM(360, starting, transition)
    model.startprob_, model.transmat_, model.emissionprob_ = \
        starting, transition, emission

    # find the Viterbi path
    observations = np.argmax(salience, axis=1)
    path = model.predict(observations.reshape(-1, 1), [len(observations)])

    return np.array([to_local_average_cents(salience[i, :], path[i]) for i in
                     range(len(observations))]) 
开发者ID:marl,项目名称:crepe,代码行数:34,代码来源:core.py

示例4: setup_method

# 需要导入模块: from hmmlearn import hmm [as 别名]
# 或者: from hmmlearn.hmm import MultinomialHMM [as 别名]
def setup_method(self, method):
        n_components = 2   # ['Rainy', 'Sunny']
        n_features = 3     # ['walk', 'shop', 'clean']
        self.h = hmm.MultinomialHMM(n_components)
        self.h.n_features = n_features
        self.h.startprob_ = np.array([0.6, 0.4])
        self.h.transmat_ = np.array([[0.7, 0.3], [0.4, 0.6]])
        self.h.emissionprob_ = np.array([[0.1, 0.4, 0.5],
                                         [0.6, 0.3, 0.1]]) 
开发者ID:hmmlearn,项目名称:hmmlearn,代码行数:11,代码来源:test_multinomial_hmm.py

示例5: test_fit_with_init

# 需要导入模块: from hmmlearn import hmm [as 别名]
# 或者: from hmmlearn.hmm import MultinomialHMM [as 别名]
def test_fit_with_init(self, params='ste', n_iter=5):
        lengths = [10] * 10
        X, _state_sequence = self.h.sample(sum(lengths))

        # use init_function to initialize paramerters
        h = hmm.MultinomialHMM(self.n_components, params=params,
                               init_params=params)
        h._init(X, lengths=lengths)

        assert log_likelihood_increasing(h, X, lengths, n_iter) 
开发者ID:hmmlearn,项目名称:hmmlearn,代码行数:12,代码来源:test_multinomial_hmm.py

示例6: test_HMM

# 需要导入模块: from hmmlearn import hmm [as 别名]
# 或者: from hmmlearn.hmm import MultinomialHMM [as 别名]
def test_HMM():
    np.random.seed(12345)
    np.set_printoptions(precision=5, suppress=True)

    P = default_hmm()
    ls, obs = P["latent_states"], P["obs_types"]

    # generate a new sequence
    O = generate_training_data(P, n_steps=30, n_examples=25)

    tol = 1e-5
    n_runs = 5
    best, best_theirs = (-np.inf, []), (-np.inf, [])
    for _ in range(n_runs):
        hmm = MultinomialHMM()
        A_, B_, pi_ = hmm.fit(O, ls, obs, tol=tol, verbose=True)

        theirs = MHMM(
            tol=tol,
            verbose=True,
            n_iter=int(1e9),
            transmat_prior=1,
            startprob_prior=1,
            algorithm="viterbi",
            n_components=len(ls),
        )

        O_flat = O.reshape(1, -1).flatten().reshape(-1, 1)
        theirs = theirs.fit(O_flat, lengths=[O.shape[1]] * O.shape[0])

        hmm2 = MultinomialHMM(A=A_, B=B_, pi=pi_)
        like = np.sum([hmm2.log_likelihood(obs) for obs in O])
        like_theirs = theirs.score(O_flat, lengths=[O.shape[1]] * O.shape[0])

        if like > best[0]:
            best = (like, {"A": A_, "B": B_, "pi": pi_})

        if like_theirs > best_theirs[0]:
            best_theirs = (
                like_theirs,
                {
                    "A": theirs.transmat_,
                    "B": theirs.emissionprob_,
                    "pi": theirs.startprob_,
                },
            )
    print("Final log likelihood of sequence: {:.5f}".format(best[0]))
    print("Final log likelihood of sequence (theirs): {:.5f}".format(best_theirs[0]))
    plot_matrices(P, best, best_theirs) 
开发者ID:ddbourgin,项目名称:numpy-ml,代码行数:51,代码来源:hmm_plots.py


注:本文中的hmmlearn.hmm.MultinomialHMM方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。