当前位置: 首页>>代码示例>>Python>>正文


Python dataloader.ExceptionWrapper方法代码示例

本文整理汇总了Python中torch.utils.data.dataloader.ExceptionWrapper方法的典型用法代码示例。如果您正苦于以下问题:Python dataloader.ExceptionWrapper方法的具体用法?Python dataloader.ExceptionWrapper怎么用?Python dataloader.ExceptionWrapper使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在torch.utils.data.dataloader的用法示例。


在下文中一共展示了dataloader.ExceptionWrapper方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: _ms_loop

# 需要导入模块: from torch.utils.data import dataloader [as 别名]
# 或者: from torch.utils.data.dataloader import ExceptionWrapper [as 别名]
def _ms_loop(dataset, index_queue, data_queue, collate_fn, scale, seed, init_fn, worker_id):
    global _use_shared_memory
    _use_shared_memory = True
    _set_worker_signal_handlers()

    torch.set_num_threads(1)
    torch.manual_seed(seed)
    while True:
        r = index_queue.get()
        if r is None:
            break
        idx, batch_indices = r
        try:
            idx_scale = 0
            if len(scale) > 1 and dataset.train:
                idx_scale = random.randrange(0, len(scale))
                dataset.set_scale(idx_scale)

            samples = collate_fn([dataset[i] for i in batch_indices])
            samples.append(idx_scale)

        except Exception:
            data_queue.put((idx, ExceptionWrapper(sys.exc_info())))
        else:
            data_queue.put((idx, samples)) 
开发者ID:HolmesShuan,项目名称:OISR-PyTorch,代码行数:27,代码来源:dataloader.py

示例2: _ms_loop

# 需要导入模块: from torch.utils.data import dataloader [as 别名]
# 或者: from torch.utils.data.dataloader import ExceptionWrapper [as 别名]
def _ms_loop(dataset, index_queue, data_queue, collate_fn, scale, seed, init_fn, worker_id):
    global _use_shared_memory
    _use_shared_memory = True
    _set_worker_signal_handlers()

    torch.set_num_threads(1)
    torch.manual_seed(seed)
    while True:
        r = index_queue.get()
        if r is None:
            break
        idx, batch_indices = r
        try:
            idx_scale = 0
            if len(scale) > 1 and dataset.train:
                idx_scale = random.randrange(0, len(scale))
                dataset.set_scale(idx_scale)

            samples = collate_fn([dataset[i] for i in batch_indices])
            samples.append(idx_scale)
            #This is why idx_scale appears in the samples of the train loader

        except Exception:
            data_queue.put((idx, ExceptionWrapper(sys.exc_info())))
        else:
            data_queue.put((idx, samples)) 
开发者ID:ofsoundof,项目名称:3D_Appearance_SR,代码行数:28,代码来源:dataloader.py

示例3: _ms_loop

# 需要导入模块: from torch.utils.data import dataloader [as 别名]
# 或者: from torch.utils.data.dataloader import ExceptionWrapper [as 别名]
def _ms_loop(dataset, index_queue, data_queue, done_event, collate_fn, scale, seed, init_fn, worker_id):
    try:
        global _use_shared_memory
        _use_shared_memory = True

        _set_worker_signal_handlers()

        torch.set_num_threads(1)
        random.seed(seed)
        torch.manual_seed(seed)

        data_queue.cancel_join_thread()

        if init_fn is not None:
            init_fn(worker_id)

        watchdog = ManagerWatchdog()

        while watchdog.is_alive():
            try:
                r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL)
            except queue.Empty:
                continue
            if r is None:
                # Received the final signal
                assert done_event.is_set()
                return
            elif done_event.is_set():
                # Done event is set. But I haven't received the final signal
                # (None) yet. I will keep continuing until get it, and skip the
                # processing steps.
                continue
            idx, batch_indices = r
            try:
                idx_scale = 0
                if len(scale) > 1 and dataset.train:
                    idx_scale = random.randrange(0, len(scale))
                    dataset.set_scale(idx_scale)

                samples = collate_fn([dataset[i] for i in batch_indices])
                samples.append(idx_scale)
                #This is why idx_scale appears in the samples of the train loader

            except Exception:
                # It is important that we don't store exc_info in a variable,
                # see NOTE [ Python Traceback Reference Cycle Problem ]
                data_queue.put((idx, ExceptionWrapper(sys.exc_info())))
            else:
                data_queue.put((idx, samples))
                del samples
    except KeyboardInterrupt:
        # Main process will raise KeyboardInterrupt anyways.
        pass 
开发者ID:ofsoundof,项目名称:3D_Appearance_SR,代码行数:55,代码来源:dataloader_new.py


注:本文中的torch.utils.data.dataloader.ExceptionWrapper方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。