當前位置: 首頁>>代碼示例>>Python>>正文


Python path.getsize方法代碼示例

本文整理匯總了Python中os.path.getsize方法的典型用法代碼示例。如果您正苦於以下問題:Python path.getsize方法的具體用法?Python path.getsize怎麽用?Python path.getsize使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在os.path的用法示例。


在下文中一共展示了path.getsize方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: list

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def list(self):
		dirs = []
		# TODO: error management
		for d in listdir(self.datapath):
			dp = path.join(self.datapath, d)
			if path.isdir(dp):
				dirs.append({
					"name": d,
					"path": dp,
					"mtime": datetime.datetime.fromtimestamp(path.getmtime(dp)).strftime('%d.%m.%Y %H:%M:%S')
					# "size": path.getsize(dp),
					# "hsize": self.human_size(self.get_size(dp))
				})
		
		dirs.sort(key=lambda dir: dir['name'], reverse=True)
		
		return {'status': 'success', 'data': dirs} 
開發者ID:SecPi,項目名稱:SecPi,代碼行數:19,代碼來源:alarmdata.py

示例2: __init__

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def __init__(self, path, split, tokenizer, bucket_size, ascending=False):
        # Setup
        self.path = path
        self.bucket_size = bucket_size

        # List all wave files
        file_list = []
        for s in split:
            split_list = list(Path(join(path, s)).rglob("*.flac"))
            assert len(split_list) > 0, "No data found @ {}".format(join(path,s))
            file_list += split_list
        # Read text
        text = Parallel(n_jobs=READ_FILE_THREADS)(
            delayed(read_text)(str(f)) for f in file_list)
        #text = Parallel(n_jobs=-1)(delayed(tokenizer.encode)(txt) for txt in text)
        text = [tokenizer.encode(txt) for txt in text]

        # Sort dataset by text length
        #file_len = Parallel(n_jobs=READ_FILE_THREADS)(delayed(getsize)(f) for f in file_list)
        self.file_list, self.text = zip(*[(f_name, txt)
                                          for f_name, txt in sorted(zip(file_list, text), reverse=not ascending, key=lambda x:len(x[1]))]) 
開發者ID:Alexander-H-Liu,項目名稱:End-to-end-ASR-Pytorch,代碼行數:23,代碼來源:librispeech.py

示例3: verify

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def verify(self, sha1=None):
        _dlsize = getsize(self._destination)
        if _dlsize != self.get_size():
            raise FDSizeMismatch(_dlsize, self._fname, self.get_size())

        if not sha1:
            return

        dlsha1 = None
        try:
            result = util.exec_command(["sha1sum", self._destination])
            dlsha1 = result['out']
        except OSError:
            try:
                result = util.exec_command(
                    ["shasum", "-a", "1", self._destination])
                dlsha1 = result['out']
            except OSError:
                pass

        if dlsha1:
            dlsha1 = dlsha1[1:41] if dlsha1.startswith("\\") else dlsha1[:40]
            if sha1 != dlsha1:
                raise FDSHASumMismatch(dlsha1, self._fname, sha1) 
開發者ID:bq,項目名稱:web2board,代碼行數:26,代碼來源:downloader.py

示例4: clear_cache

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def clear_cache(force = False):
  """
  If the folder exists, and has more than 5MB of icons in the cache, delete
  it to clear all the icons then recreate it.
  """
  from os.path import getsize, join, isfile, exists
  from os import makedirs, listdir
  from sublime import cache_path
  from shutil import rmtree

  # The icon cache path
  icon_path = join(cache_path(), "GutterColor")

  # The maximum amount of space to take up
  limit = 5242880 # 5 MB

  if exists(icon_path):
    size = sum(getsize(join(icon_path, f)) for f in listdir(icon_path) if isfile(join(icon_path, f)))
    if force or (size > limit): rmtree(icon_path)

  if not exists(icon_path): makedirs(icon_path) 
開發者ID:ggordan,項目名稱:GutterColor,代碼行數:23,代碼來源:gutter_color.py

示例5: _read

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def _read(filename):
    """ Reads Fortran style binary data into numpy array
    """
    nbytes = getsize(filename)
    with open(filename, 'rb') as file:
        # read size of record
        file.seek(0)
        n = np.fromfile(file, dtype='int32', count=1)[0]
        if n == nbytes-8:
            file.seek(4)
            data = np.fromfile(file, dtype='float32')
            return data[:-1]
        else:
            file.seek(0)
            data = np.fromfile(file, dtype='float32')
            return data 
開發者ID:rmodrak,項目名稱:seisflows,代碼行數:18,代碼來源:fortran_binary.py

示例6: _scan_dynamodb_and_upload_to_s3

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def _scan_dynamodb_and_upload_to_s3(self, temp_file, scan_kwargs, table):
        while True:
            response = table.scan(**scan_kwargs)
            items = response['Items']
            for item in items:
                temp_file.write(self.process_func(item))

            if 'LastEvaluatedKey' not in response:
                # no more items to scan
                break

            last_evaluated_key = response['LastEvaluatedKey']
            scan_kwargs['ExclusiveStartKey'] = last_evaluated_key

            # Upload the file to S3 if reach file size limit
            if getsize(temp_file.name) >= self.file_size:
                _upload_file_to_s3(temp_file, self.s3_bucket_name,
                                   self.s3_key_prefix)
                temp_file.close()
                temp_file = NamedTemporaryFile()
        return temp_file 
開發者ID:apache,項目名稱:airflow,代碼行數:23,代碼來源:dynamodb_to_s3.py

示例7: test_write_gff_file

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def test_write_gff_file(self, seqprop_with_i, tmpdir):
        """Test writing the features, and that features are now loaded from a file"""
        outpath = tmpdir.join('test_seqprop_with_i_write_gff_file.gff').strpath
        seqprop_with_i.write_gff_file(outfile=outpath, force_rerun=True)

        # Test that the file was written
        assert op.exists(outpath)
        assert op.getsize(outpath) > 0

        # Test that file paths are correct
        assert seqprop_with_i.feature_path == outpath
        assert seqprop_with_i.feature_file == 'test_seqprop_with_i_write_gff_file.gff'
        assert seqprop_with_i.feature_dir == tmpdir

        # Test that features cannot be changed
        with pytest.raises(ValueError):
            seqprop_with_i.features = ['NOFEATURES'] 
開發者ID:SBRG,項目名稱:ssbio,代碼行數:19,代碼來源:test_protein_seqprop.py

示例8: traverse_dir

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def traverse_dir(path):
    file_dict = {}
    dir_dict = {}
    count = 1
    for root, dirs, files in walk(path):
        for d in dirs:
            abs_p = join(root, d)
            dir_dict[abs_p] = 0
            print(abs_p)
            count += 1
            if count % 200 == 0:
                print('%s files scanned' % count)

        for f in files:
            abs_p = join(root, f)
            file_dict[abs_p] = getsize(abs_p)
            print(abs_p)
            count += 1
            if count % 200 == 0:
                print('%s files scanned' % count)

    return file_dict, dir_dict 
開發者ID:restran,項目名稱:hacker-scripts,代碼行數:24,代碼來源:dir_compare.py

示例9: get_sha1_by_slice

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def get_sha1_by_slice(file_name, slice_size):
        """ Get SHA array based on Qcloud Slice Upload Interface

        :param file_name: local file path
        :param slice_size: slice size in bit
        :return: sha array like [{“offset”:0, “datalen”:1024,”datasha”:”aaa”}, {}, {}]
        """
        from os import path

        with open(file_name, 'rb') as f:

            result = []
            file_size = path.getsize(file_name)
            sha1_obj = Sha1Hash()
            for current_offset in range(0, file_size, slice_size):

                data_length = min(slice_size, file_size - current_offset)
                sha1_obj.update(f.read(data_length))
                sha1_val = sha1_obj.inner_digest()
                result.append({"offset": current_offset, "datalen": data_length, "datasha": sha1_val})

            result[-1]['datasha'] = sha1_obj.hexdigest()
            return result 
開發者ID:tencentyun,項目名稱:cos-python-sdk-v4,代碼行數:25,代碼來源:cos_common.py

示例10: _close

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def _close(self, file):
        f = self.handles[file]
        del self.handles[file]
        if self.whandles.has_key(file):
            del self.whandles[file]
            f.flush()
            self.unlock_file(file, f)
            f.close()
            if os.path.isfile(file):
                self.tops[file] = getsize(file)
                self.mtimes[file] = getmtime(file)
            else:
                if DEBUG:
                    log(self.log_prefix + '_close: missing file', file)
                self.tops[file] = 0
                self.mtimes[file] = 0
        else:
            if self.lock_while_reading:
                self.unlock_file(file, f)
            f.close() 
開發者ID:alesnav,項目名稱:p2ptv-pi,代碼行數:22,代碼來源:Storage.py

示例11: enable_file

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def enable_file(self, f):
        if self.config['encrypted_storage']:
            return
        if not self.disabled[f]:
            return
        self.disabled[f] = False
        r = self.file_ranges[f]
        if not r:
            return
        file = r[3]
        if not exists(file):
            h = open(file, 'wb+')
            h.flush()
            h.close()
        if not self.tops.has_key(file):
            self.tops[file] = getsize(file)
        if not self.mtimes.has_key(file):
            self.mtimes[file] = getmtime(file)
        self.working_ranges[f] = [r]
        if DEBUG:
            log(self.log_prefix + 'enable_file: f:', f, 'self.working_ranges:', self.working_ranges) 
開發者ID:alesnav,項目名稱:p2ptv-pi,代碼行數:23,代碼來源:Storage.py

示例12: disable_file

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def disable_file(self, f):
        if self.config['encrypted_storage']:
            return
        if self.disabled[f]:
            return
        self.disabled[f] = True
        r = self._get_disabled_ranges(f)
        if not r:
            return
        for file, begin, end in r[2]:
            if not os.path.isdir(self.bufferdir):
                os.makedirs(self.bufferdir)
            if not exists(file):
                h = open(file, 'wb+')
                h.flush()
                h.close()
            if not self.tops.has_key(file):
                self.tops[file] = getsize(file)
            if not self.mtimes.has_key(file):
                self.mtimes[file] = getmtime(file)

        self.working_ranges[f] = r[0] 
開發者ID:alesnav,項目名稱:p2ptv-pi,代碼行數:24,代碼來源:Storage.py

示例13: pickle

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def pickle(self):
        files = []
        pfiles = []
        for i in xrange(len(self.files)):
            if not self.files[i][1]:
                continue
            if self.disabled[i]:
                for file, start, end in self._get_disabled_ranges(i)[2]:
                    pfiles.extend([basename(file), getsize(file), getmtime(file)])

                continue
            file = self.files[i][0].encode('utf-8')
            files.extend([i, getsize(file), getmtime(file)])

        return {'files': files,
         'partial files': pfiles} 
開發者ID:alesnav,項目名稱:p2ptv-pi,代碼行數:18,代碼來源:Storage.py

示例14: rinexobs

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def rinexobs(obsfn,writeh5=None,maxtimes=None):
    stem,ext = splitext(expanduser(obsfn))
    if ext[-1].lower() == 'o': #raw text file
        with open(obsfn,'r') as f:
            t=time.time()
            lines = f.read().splitlines(True)
            lines.append('')
            header,version,headlines,obstimes,sats,svset = scan(lines)
            print('{} is a RINEX {} file, {} kB.'.format(obsfn,version,getsize(obsfn)/1000.0))
            data = processBlocks(lines,header,obstimes,svset,headlines,sats)
            print("finished in {0:.2f} seconds".format(time.time()-t))
    #%% save to disk (optional)
        if writeh5:
            h5fn = stem + '.h5'
            print('saving OBS data to {}'.format(h5fn))
            data.to_hdf(h5fn,key='OBS',mode='a',complevel=6,append=False)
    elif ext.lower() == '.h5':
        data = read_hdf(obsfn,key='OBS')
        print('loaded OBS data from {} to {}'.format(blocks.items[0],blocks.items[-1]))
    return data


# this will scan the document for the header info and for the line on
# which each block starts 
開發者ID:gregstarr,項目名稱:PyGPS,代碼行數:26,代碼來源:readRinexObs.py

示例15: __init__

# 需要導入模塊: from os import path [as 別名]
# 或者: from os.path import getsize [as 別名]
def __init__(self,addr=0,size=None,file=None,process=True):
        self.init()
        if not size:
            size = ospath_getsize(file)
        self.file_size = size
        self.file = file
        self.addr = addr
        self.size = size
        self.type = 'TGA'
        self.pallete = None
        self.Read_header()
        self.flipped = False
        if self.header['ImageDescriptor'] & 32:
            self.flipped = True
        self.alpha_bits = self.header['ImageDescriptor'] & 15
        self.Get_image_dimensions()
        self.size_of_plane = self.width * self.height
        self.sourceBpp = self.header['BitsPerPixel']/8
        self.data_size = self.width * self.height * self.sourceBpp
        self.Bps = self.width * self.sourceBpp
        self.Bpp = 4
        self.final_size = self.size_of_plane * self.Bpp
        self.RGBA = None
        if not process: return
        self.Process() 
開發者ID:elbowz,項目名稱:xbmc.service.pushbullet,代碼行數:27,代碼來源:limpp.py


注:本文中的os.path.getsize方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。