當前位置: 首頁>>代碼示例>>Python>>正文


Python kaldi_io.read_mat_scp方法代碼示例

本文整理匯總了Python中kaldi_io.read_mat_scp方法的典型用法代碼示例。如果您正苦於以下問題:Python kaldi_io.read_mat_scp方法的具體用法?Python kaldi_io.read_mat_scp怎麽用?Python kaldi_io.read_mat_scp使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在kaldi_io的用法示例。


在下文中一共展示了kaldi_io.read_mat_scp方法的9個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: read_mat_scp

# 需要導入模塊: import kaldi_io [as 別名]
# 或者: from kaldi_io import read_mat_scp [as 別名]
def read_mat_scp(file_or_fd: Any) -> Iterable[Tuple[str, Tensor]]:
    r"""Create generator of (key,matrix<float32/float64>) tuples, read according to Kaldi scp.

    Args:
        file_or_fd (str/FileDescriptor): scp, gzipped scp, pipe or opened file descriptor

    Returns:
        Iterable[Tuple[str, Tensor]]: The string is the key and the tensor is the matrix read from file

    Example
        >>> # read scp to a 'dictionary'
        >>> d = { u:d for u,d in torchaudio.kaldi_io.read_mat_scp(file) }
    """
    return _convert_method_output_to_tensor(file_or_fd, kaldi_io.read_mat_scp) 
開發者ID:pytorch,項目名稱:audio,代碼行數:16,代碼來源:kaldi_io.py

示例2: __init__

# 需要導入模塊: import kaldi_io [as 別名]
# 或者: from kaldi_io import read_mat_scp [as 別名]
def __init__(self, scp_file, utt2label_file, M):
        'Initialization'
        self.M = M
        self.scp_file  = scp_file
        self.utt2len   = ako.read_key_len(scp_file)
        self.utt2label = ako.read_key_label(utt2label_file)
        self.feat_gen  = ko.read_mat_scp(scp_file) # feature generator

        self.utt2mat = {}
        for key,mat in self.feat_gen:
            self.utt2mat[key] = mat 
開發者ID:jefflai108,項目名稱:Attentive-Filtering-Network,代碼行數:13,代碼來源:v1_dataset.py

示例3: __init__

# 需要導入模塊: import kaldi_io [as 別名]
# 或者: from kaldi_io import read_mat_scp [as 別名]
def __init__(self, scp_file, utt2label_file, M):
        'Initialization'
        self.M = M
        self.scp_file  = scp_file
        self.utt2len   = ako.read_key_len(scp_file)
        self.utt2label = ako.read_key_label(utt2label_file)
        self.feat_gen  = ko.read_mat_scp(scp_file) # feature generator

        mats, labels = [], [] # construct feature and label matrices
        for key,mat in self.feat_gen:
            mats.append(tensor_cnn(mat,M))
            labels.append(np.repeat(self.utt2label[key], len(mat)))
        self.label_mat  = np.hstack(labels)
        self.feat_mat   = np.vstack(mats) 
開發者ID:jefflai108,項目名稱:Attentive-Filtering-Network,代碼行數:16,代碼來源:v5_dataset.py

示例4: __init__

# 需要導入模塊: import kaldi_io [as 別名]
# 或者: from kaldi_io import read_mat_scp [as 別名]
def __init__(self, scp_file, utt2label_file, M):
        'Initialization'
        self.M = M
        self.scp_file  = scp_file
        self.utt2len   = ako.read_key_len(scp_file)
        self.utt2label = ako.read_key_label(utt2label_file)
        self.feat_gen  = ko.read_mat_scp(scp_file) # feature generator

        mats, labels = [], [] # construct feature and label matrices
        for key,mat in self.feat_gen:
            mats.append(slice(mat,M))
            labels.append(np.repeat(self.utt2label[key], len(mat)))
        self.label_mat  = np.hstack(labels)
        self.feat_mat   = np.vstack(mats) 
開發者ID:jefflai108,項目名稱:Attentive-Filtering-Network,代碼行數:16,代碼來源:v4_dataset.py

示例5: write_kaldi

# 需要導入模塊: import kaldi_io [as 別名]
# 或者: from kaldi_io import read_mat_scp [as 別名]
def write_kaldi(orig_feat_scp, ark_scp_output, max_len):
    """Write the slice feature matrix to ark_scp_output
    """
    with ko.open_or_fd(ark_scp_output,'wb') as f:
        for key,mat in ko.read_mat_scp(orig_feat_scp):
            tensor = tensor_cnn_utt(mat, max_len)
            if tensor.shape[1] != max_len:
                print(tensor.shape)
            ko.write_mat(f, tensor, key=key) 
開發者ID:jefflai108,項目名稱:Attentive-Filtering-Network,代碼行數:11,代碼來源:feat_slicing.py

示例6: plot_feat

# 需要導入模塊: import kaldi_io [as 別名]
# 或者: from kaldi_io import read_mat_scp [as 別名]
def plot_feat(orig_feat_scp, output_plot_wd):
    """plot one Kaldi logspec feat 
    """
    for key,mat in ko.read_mat_scp(orig_feat_scp):
        #mat = np.transpose(mat)
        print(mat.shape)
        
        fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(20,4))
        cax = ax.matshow(mat, interpolation='nearest', aspect='auto', 
                cmap=plt.cm.afmhot, origin='lower')
        fig.colorbar(cax)
        plt.savefig(output_plot_wd + key + '.png') 
開發者ID:jefflai108,項目名稱:Attentive-Filtering-Network,代碼行數:14,代碼來源:feat_plot.py

示例7: __init__

# 需要導入模塊: import kaldi_io [as 別名]
# 或者: from kaldi_io import read_mat_scp [as 別名]
def __init__(self, scp_file, utt2label_file):
        'Initialization'
        self.scp_file  = scp_file
        self.utt2label = ako.read_key_label(utt2label_file)
        self.feat_gen  = ko.read_mat_scp(scp_file) # feature generator

        mats, labels = [], [] # construct feature and label matrices
        for key,mat in self.feat_gen:
            mats.append(mat)
            labels.append(np.repeat(self.utt2label[key], len(mat)))
        self.label_mat  = np.hstack(labels)
        self.feat_mat   = np.vstack(mats) 
開發者ID:jefflai108,項目名稱:Attentive-Filtering-Network,代碼行數:14,代碼來源:v2_dataset.py

示例8: testMatrixReadWrite

# 需要導入模塊: import kaldi_io [as 別名]
# 或者: from kaldi_io import read_mat_scp [as 別名]
def testMatrixReadWrite(self):
        """
        Test read/write for float matrices.
        """
        # read,
        flt_mat = { k:m for k,m in kaldi_io.read_mat_scp('tests/data/feats_ascii.scp') } # ascii-scp,
        flt_mat2 = { k:m for k,m in kaldi_io.read_mat_ark('tests/data/feats_ascii.ark') } # ascii-ark,
        flt_mat3 = { k:m for k,m in kaldi_io.read_mat_ark('tests/data/feats.ark') } # ascii-ark,
        # store,
        with kaldi_io.open_or_fd('tests/data_re-saved/mat.ark','wb') as f:
            for k,m in flt_mat3.items(): kaldi_io.write_mat(f, m, k)
        # read and compare,
        for k,m in kaldi_io.read_mat_ark('tests/data_re-saved/mat.ark'):
            self.assertTrue(np.array_equal(m, flt_mat3[k]), msg="flt. matrix same after re-saving") 
開發者ID:vesis84,項目名稱:kaldi-io-for-python,代碼行數:16,代碼來源:test_kaldi_io.py

示例9: load_ranges_data

# 需要導入模塊: import kaldi_io [as 別名]
# 或者: from kaldi_io import read_mat_scp [as 別名]
def load_ranges_data(utt_to_chunks, minibatch_info, minibatch_size, scp_file_path, fea_dim, logger=None):
    num_err, num_done = 0, 0
    if logger is not None:
        logger.info('Start allocating memories for loading training examples ...')
    all_data = np.ndarray(len(minibatch_info), dtype=object)
    labels = np.ndarray(len(minibatch_info), dtype=object)
    for i in range(len(minibatch_info)):
        all_data[i] = np.zeros((minibatch_size, minibatch_info[i][1], fea_dim), dtype=np.float32)
        labels[i] = np.zeros(minibatch_size, dtype=np.int32)
    if logger is not None:
        logger.info('Start loading training examples to the memory ...')
    for key, mat in kaldi_io.read_mat_scp(scp_file_path):
        got = utt_to_chunks.get(key)
        if key is None:
            if logger is not None:
                logger.info("Could not create examples from utterance '%s' "
                            "because it has no entry in the ranges input file." % key)
            num_err += 1
        else:
            num_done += 1
            for minibatch_index, offset, length, label in got:
                info = minibatch_info[minibatch_index]
                mm = mat[offset:offset + length, :]
                dat = all_data[minibatch_index]
                assert dat.shape[1] == mm.shape[0] and dat.shape[2] == mm.shape[1]
                dat[info[2], :, :] = mm
                labels[minibatch_index][info[2]] = label
                info[2] += 1
    if logger is not None:
        logger.info('Loading features finished with {0} errors and {1} success from total {2} files.'.
                    format(num_err, num_done, num_err + num_done))
    return all_data, labels 
開發者ID:hsn-zeinali,項目名稱:x-vector-kaldi-tf,代碼行數:34,代碼來源:examples_io.py


注:本文中的kaldi_io.read_mat_scp方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。