當前位置: 首頁>>代碼示例>>Python>>正文


Python utils.utils方法代碼示例

本文整理匯總了Python中utils.utils.utils方法的典型用法代碼示例。如果您正苦於以下問題:Python utils.utils方法的具體用法?Python utils.utils怎麽用?Python utils.utils使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在utils.utils的用法示例。


在下文中一共展示了utils.utils方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: run

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def run(self):
        self.set_logger()

        # Initialize progress bar
        bar = utils.set_progress_bar(self.total_iters)

        for cycle_num in range(int(self.total_iters / self.iters)):
            self.model.train()

            self.cycle(bar, cycle_num)

            with torch.no_grad():
                self.run_evaluation_cycle()

            self.log_losses(self.opt, self.losses)
            self.update_top_score(self.opt)
            self.save_model(self.get_tracked_score())

        self.stop_logger() 
開發者ID:atcbosselut,項目名稱:comet-commonsense,代碼行數:21,代碼來源:train.py

示例2: load_data

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def load_data(self, path):
        if ".pickle" in path:
            print("Loading data from: {}".format(path))
            data_utils.load_existing_data_loader(self, path)

            return True

        for split in self.data:
            file_name = "v4_atomic_{}.csv".format(map_name(split))

            df = pandas.read_csv("{}/{}".format(path, file_name), index_col=0)
            df.iloc[:, :9] = df.iloc[:, :9].apply(
                lambda col: col.apply(json.loads))

            for cat in self.categories:
                attr = df[cat]
                self.data[split]["total"] += utils.zipped_flatten(zip(
                    attr.index, ["<{}>".format(cat)] * len(attr), attr.values))

        if do_take_partial_dataset(self.opt.data):
            self.data["train"]["total"] = select_partial_dataset(
                self.opt.data, self.data["train"]["total"])

        return False 
開發者ID:atcbosselut,項目名稱:comet-commonsense,代碼行數:26,代碼來源:atomic.py

示例3: handle_underscores

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def handle_underscores(suffix, text_encoder, prefix=False):
    encoder = text_encoder.encoder
    if prefix:
        tok = "___"
    else:
        tok = find_underscore_length(suffix)

    suffix_parts = [i.strip() for i in suffix.split("{}".format(tok))]
    to_flatten = []
    for i, part in enumerate(suffix_parts):
        if part:
            to_flatten.append(text_encoder.encode([part], verbose=False)[0])

            if i != len(suffix_parts) - 1 and suffix_parts[i+1]:
                to_flatten.append([encoder["<blank>"]])
        else:
            to_flatten.append([encoder["<blank>"]])

    final_suffix = utils.flatten(to_flatten)

    return final_suffix 
開發者ID:atcbosselut,項目名稱:comet-commonsense,代碼行數:23,代碼來源:atomic.py

示例4: dataloader_create

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def dataloader_create(self, args):
        from torch.utils.data import DataLoader
        from myDatasets_stereo import dataset_stereo_by_name as dataset_stereo
        args.mode = args.mode.lower()
        if args.mode == 'test' or args.mode == 'submit':
            # dataloader
            transform=myTransforms.Stereo_eval()
            dataset = dataset_stereo(names_dataset=args.dataset, root=args.root, Train=False, transform=transform)
            self.dataloader_val = DataLoader(dataset, batch_size=args.batchsize, shuffle=False, num_workers=4, drop_last=False)
            msg = "Val dataset: %s " % (args.dataset)
            logging.info(msg)
        else:
            # dataloader
            transform=myTransforms.Stereo_Spatial(size_crop=[768, 384], scale_delt=0, shift_max=32)
            dataset = dataset_stereo(names_dataset=args.dataset, root=args.root, Train=True, transform=transform)
            self.dataloader_train = DataLoader(dataset, batch_size=args.batchsize, shuffle=True, num_workers=4, drop_last=False)
            transform=myTransforms.Stereo_ToTensor()
            dataset_val = dataset_stereo(names_dataset=args.dataset_val, root=args.root, Train=False, transform=transform)
            self.dataloader_val = DataLoader(dataset_val, batch_size=args.batchsize, shuffle=False, num_workers=4, drop_last=False)
            msg = "Train dataset: %s , Val dataset: %s " % (args.dataset, args.dataset_val)
            logging.info(msg) 
開發者ID:wyf2017,項目名稱:DSMnet,代碼行數:23,代碼來源:stereo_selfsupervised.py

示例5: dataloader_create

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def dataloader_create(self, args):
        from torch.utils.data import DataLoader
        from myDatasets_stereo import dataset_stereo_by_name as dataset_stereo
        import myTransforms
        args.mode = args.mode.lower()
        if args.mode == 'test' or args.mode == 'submit':
            # dataloader
            transform=myTransforms.Stereo_eval()
            dataset = dataset_stereo(names_dataset=args.dataset, root=args.root, Train=False, transform=transform)
            self.dataloader_val = DataLoader(dataset, batch_size=args.batchsize, shuffle=False, num_workers=4, drop_last=False)
            msg = "%s dataset: %s , model name: %s " % (args.mode, args.dataset, args.net)
            logging.info(msg)
        else:
            # dataloader
            transform = myTransforms.Stereo_train(size_crop=[768, 384], scale_delt=0, shift_max=32)
            dataset = dataset_stereo(names_dataset=args.dataset, root=args.root, Train=True, transform=transform)
            self.dataloader_train = DataLoader(dataset, batch_size=args.batchsize, shuffle=True, num_workers=4, drop_last=False)
            transform=myTransforms.Stereo_eval()
            dataset_val = dataset_stereo(names_dataset=args.dataset_val, root=args.root, Train=False, transform=transform)
            self.dataloader_val = DataLoader(dataset_val, batch_size=args.batchsize, shuffle=False, num_workers=4, drop_last=False)
            msg = "Train dataset: %s , val dataset: %s " % (args.dataset, args.dataset_val)
            logging.info(msg) 
開發者ID:wyf2017,項目名稱:DSMnet,代碼行數:24,代碼來源:stereo_supervised.py

示例6: clean_str

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def clean_str(string):
    """
    Tokenization/string cleaning for all datasets except for SST.
    Original taken from https://github.com/yoonkim/CNN_sentence/blob/master/process_data.py
    """
    string = re.sub(r"[^A-Za-z0-9(),!?\'\`]", " ", string)
    string = re.sub(r"\'s", " \'s", string)
    string = re.sub(r"\'ve", " \'ve", string)
    string = re.sub(r"n\'t", " n\'t", string)
    string = re.sub(r"\'re", " \'re", string)
    string = re.sub(r"\'d", " \'d", string)
    string = re.sub(r"\'ll", " \'ll", string)
    string = re.sub(r",", " , ", string)
    string = re.sub(r"!", " ! ", string)
    string = re.sub(r"\(", " \( ", string)
    string = re.sub(r"\)", " \) ", string)
    string = re.sub(r"\?", " \? ", string)
    string = re.sub(r"\s{2,}", " ", string)
    return string.strip().lower()

# FIXME: Move to utils, just like we're doing with the mnist module 
開發者ID:cchio,項目名稱:deep-pwning,代碼行數:23,代碼來源:semantic_driver.py

示例7: set_logger

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def set_logger(self):
        if cfg.toy:
            self.logger = SummaryWriter(utils.make_name(
                self.opt, prefix="garbage/logs/", eval_=True, do_epoch=False))
        else:
            self.logger = SummaryWriter(utils.make_name(
                self.opt, prefix="logs/", eval_=True, do_epoch=False))
        print("Logging Tensorboard Files at: {}".format(self.logger.logdir)) 
開發者ID:atcbosselut,項目名稱:comet-commonsense,代碼行數:10,代碼來源:train.py

示例8: epoch

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def epoch(self):
        nums = self.reset_losses()

        # Initialize progress bar
        bar = utils.initialize_progress_bar(
            self.data_loader.sequences["train"])

        reset = False

        while not reset:
            loss, nums, reset = self.do_forward_pass(nums)
            self.do_backward_pass(loss)
            self.update_parameters()

            bar.update(self.opt.train.dynamic.bs)
            self.count += 1

            for loss_name in self.losses["train"]:
                self.logger.add_scalar(
                    "train/{}".format(loss_name),
                    loss.item() / self.opt.train.dynamic.bs,
                    self.count)

            if cfg.toy and self.counter(nums) > 300:
                break

        with torch.no_grad():
            self.run_evaluation_cycle()

        self.log_losses(self.opt, self.losses)
        self.update_top_score(self.opt)
        self.save_model(self.get_tracked_score())

        self.data_loader.reset_offsets("train") 
開發者ID:atcbosselut,項目名稱:comet-commonsense,代碼行數:36,代碼來源:train.py

示例9: clip_gradients

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def clip_gradients(self):
        if self.opt.train.static.clip:
            torch.nn.utils.clip_grad_norm_(
                self.model.parameters(), self.opt.train.static.clip) 
開發者ID:atcbosselut,項目名稱:comet-commonsense,代碼行數:6,代碼來源:train.py

示例10: save_step

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def save_step(model, vocab, optimizer, opt, length, lrs):
    if cfg.test_save:
        name = "{}.pickle".format(utils.make_name(
            opt, prefix="garbage/models/", is_dir=False, eval_=True))
    else:
        name = "{}.pickle".format(utils.make_name(
            opt, prefix="models/", is_dir=False, eval_=True))
    save_checkpoint({
        "epoch": length, "state_dict": model.state_dict(),
        "optimizer": optimizer.state_dict(), "opt": opt,
        "vocab": vocab, "epoch_learning_rates": lrs},
        name) 
開發者ID:atcbosselut,項目名稱:comet-commonsense,代碼行數:14,代碼來源:data.py

示例11: load_atomic_data

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def load_atomic_data(opt):
    # Hacky workaround, you may have to change this
    # if your models use different pad lengths for e1, e2, r
    if opt.data.get("maxe1", None) is None:
        opt.data.maxe1 = 17
        opt.data.maxe2 = 35
        opt.data.maxr = 1
    path = "data/atomic/processed/generation/{}.pickle".format(
        utils.make_name_string(opt.data))
    data_loader = data.make_data_loader(opt, opt.data.categories)
    loaded = data_loader.load_data(path)

    return data_loader 
開發者ID:atcbosselut,項目名稱:comet-commonsense,代碼行數:15,代碼來源:functions.py

示例12: save_checkpoint

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def save_checkpoint(self, epoch, best_prec, is_best):
        state = {
                'epoch': epoch,
                'best_prec': best_prec,
                'state_dict': self.model.state_dict(),
                'optim' : self.optim.state_dict(),
                }
        utils.save_checkpoint(state, is_best, dirpath=self.dirpath, filename='model_checkpoint.pkl')
        if(is_best):
            path_save = os.path.join(self.dirpath, 'weight_best.pkl')
            torch.save({'state_dict': self.model.state_dict()}, path_save) 
開發者ID:wyf2017,項目名稱:DSMnet,代碼行數:13,代碼來源:stereo.py

示例13: load_checkpoint

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def load_checkpoint(self, best=False):
        state = utils.load_checkpoint(self.dirpath, best)
        if state is not None:
            msg = 'load checkpoint successly: %s \n' % self.dirpath
            logging.info(msg)
            self.epoch = state['epoch'] + 1
            self.best_prec = state['best_prec']
            self.model.load_state_dict(state['state_dict'])
            self.optim.load_state_dict(state['optim']) 
開發者ID:wyf2017,項目名稱:DSMnet,代碼行數:11,代碼來源:stereo.py

示例14: detect_and_draw

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def detect_and_draw(model, bev_maps, Tensor, is_front=True):

    # If back side bev, flip around vertical axis
    if not is_front:
        bev_maps = torch.flip(bev_maps, [2, 3])
    imgs = Variable(bev_maps.type(Tensor))

    # Get Detections
    img_detections = []
    with torch.no_grad():
        detections = model(imgs)
        detections = utils.non_max_suppression_rotated_bbox(detections, opt.conf_thres, opt.nms_thres)

    img_detections.extend(detections)

    # Only supports single batch
    display_bev = np.zeros((cnf.BEV_WIDTH, cnf.BEV_WIDTH, 3))
    
    bev_map = bev_maps[0].numpy()
    display_bev[:, :, 2] = bev_map[0, :, :]  # r_map
    display_bev[:, :, 1] = bev_map[1, :, :]  # g_map
    display_bev[:, :, 0] = bev_map[2, :, :]  # b_map

    display_bev *= 255
    display_bev = display_bev.astype(np.uint8)

    for detections in img_detections:
        if detections is None:
            continue
        # Rescale boxes to original image
        detections = utils.rescale_boxes(detections, opt.img_size, display_bev.shape[:2])
        for x, y, w, l, im, re, conf, cls_conf, cls_pred in detections:
            yaw = np.arctan2(im, re)
            # Draw rotated box
            bev_utils.drawRotatedBox(display_bev, x, y, w, l, yaw, cnf.colors[int(cls_pred)])

    return display_bev, img_detections 
開發者ID:ghimiredhikura,項目名稱:Complex-YOLOv3,代碼行數:39,代碼來源:test_both_side_detection.py

示例15: save_img

# 需要導入模塊: from utils import utils [as 別名]
# 或者: from utils.utils import utils [as 別名]
def save_img(self, fig, name):
        utils.save_img(fig, self.model_name, name, self.result_dir)
        return 
開發者ID:psanch21,項目名稱:VAE-GMVAE,代碼行數:5,代碼來源:base_visualize.py


注:本文中的utils.utils.utils方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。