當前位置: 首頁>>代碼示例>>Python>>正文


Python dataloader.ExceptionWrapper方法代碼示例

本文整理匯總了Python中torch.utils.data.dataloader.ExceptionWrapper方法的典型用法代碼示例。如果您正苦於以下問題:Python dataloader.ExceptionWrapper方法的具體用法?Python dataloader.ExceptionWrapper怎麽用?Python dataloader.ExceptionWrapper使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在torch.utils.data.dataloader的用法示例。


在下文中一共展示了dataloader.ExceptionWrapper方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: _ms_loop

# 需要導入模塊: from torch.utils.data import dataloader [as 別名]
# 或者: from torch.utils.data.dataloader import ExceptionWrapper [as 別名]
def _ms_loop(dataset, index_queue, data_queue, collate_fn, scale, seed, init_fn, worker_id):
    global _use_shared_memory
    _use_shared_memory = True
    _set_worker_signal_handlers()

    torch.set_num_threads(1)
    torch.manual_seed(seed)
    while True:
        r = index_queue.get()
        if r is None:
            break
        idx, batch_indices = r
        try:
            idx_scale = 0
            if len(scale) > 1 and dataset.train:
                idx_scale = random.randrange(0, len(scale))
                dataset.set_scale(idx_scale)

            samples = collate_fn([dataset[i] for i in batch_indices])
            samples.append(idx_scale)

        except Exception:
            data_queue.put((idx, ExceptionWrapper(sys.exc_info())))
        else:
            data_queue.put((idx, samples)) 
開發者ID:HolmesShuan,項目名稱:OISR-PyTorch,代碼行數:27,代碼來源:dataloader.py

示例2: _ms_loop

# 需要導入模塊: from torch.utils.data import dataloader [as 別名]
# 或者: from torch.utils.data.dataloader import ExceptionWrapper [as 別名]
def _ms_loop(dataset, index_queue, data_queue, collate_fn, scale, seed, init_fn, worker_id):
    global _use_shared_memory
    _use_shared_memory = True
    _set_worker_signal_handlers()

    torch.set_num_threads(1)
    torch.manual_seed(seed)
    while True:
        r = index_queue.get()
        if r is None:
            break
        idx, batch_indices = r
        try:
            idx_scale = 0
            if len(scale) > 1 and dataset.train:
                idx_scale = random.randrange(0, len(scale))
                dataset.set_scale(idx_scale)

            samples = collate_fn([dataset[i] for i in batch_indices])
            samples.append(idx_scale)
            #This is why idx_scale appears in the samples of the train loader

        except Exception:
            data_queue.put((idx, ExceptionWrapper(sys.exc_info())))
        else:
            data_queue.put((idx, samples)) 
開發者ID:ofsoundof,項目名稱:3D_Appearance_SR,代碼行數:28,代碼來源:dataloader.py

示例3: _ms_loop

# 需要導入模塊: from torch.utils.data import dataloader [as 別名]
# 或者: from torch.utils.data.dataloader import ExceptionWrapper [as 別名]
def _ms_loop(dataset, index_queue, data_queue, done_event, collate_fn, scale, seed, init_fn, worker_id):
    try:
        global _use_shared_memory
        _use_shared_memory = True

        _set_worker_signal_handlers()

        torch.set_num_threads(1)
        random.seed(seed)
        torch.manual_seed(seed)

        data_queue.cancel_join_thread()

        if init_fn is not None:
            init_fn(worker_id)

        watchdog = ManagerWatchdog()

        while watchdog.is_alive():
            try:
                r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL)
            except queue.Empty:
                continue
            if r is None:
                # Received the final signal
                assert done_event.is_set()
                return
            elif done_event.is_set():
                # Done event is set. But I haven't received the final signal
                # (None) yet. I will keep continuing until get it, and skip the
                # processing steps.
                continue
            idx, batch_indices = r
            try:
                idx_scale = 0
                if len(scale) > 1 and dataset.train:
                    idx_scale = random.randrange(0, len(scale))
                    dataset.set_scale(idx_scale)

                samples = collate_fn([dataset[i] for i in batch_indices])
                samples.append(idx_scale)
                #This is why idx_scale appears in the samples of the train loader

            except Exception:
                # It is important that we don't store exc_info in a variable,
                # see NOTE [ Python Traceback Reference Cycle Problem ]
                data_queue.put((idx, ExceptionWrapper(sys.exc_info())))
            else:
                data_queue.put((idx, samples))
                del samples
    except KeyboardInterrupt:
        # Main process will raise KeyboardInterrupt anyways.
        pass 
開發者ID:ofsoundof,項目名稱:3D_Appearance_SR,代碼行數:55,代碼來源:dataloader_new.py


注:本文中的torch.utils.data.dataloader.ExceptionWrapper方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。