本文整理汇总了Python中pyon.util.file_sys.FileSystem.get_url方法的典型用法代码示例。如果您正苦于以下问题:Python FileSystem.get_url方法的具体用法?Python FileSystem.get_url怎么用?Python FileSystem.get_url使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pyon.util.file_sys.FileSystem
的用法示例。
在下文中一共展示了FileSystem.get_url方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def __init__(self, name = None):
"""
@param name The name of the dataset
"""
# generate a random name for the filename if it has not been provided.
self.filename = FileSystem.get_url(fs=FS.TEMP, filename=name or random_name(), ext='encoder.hdf5')
# Using inline imports to put off making hdf/numpy required dependencies
import h5py
# open an hdf file on disk - in /tmp to write data to since we can't yet do in memory
try:
log.debug("Creating h5py file object for the encoder at %s" % self.filename)
if os.path.isfile(self.filename):
# if file exists, then append to it
self.h5pyfile = h5py.File(self.filename, mode = 'r+', driver='core')
else:
# if file does not already exist, write a new one
self.h5pyfile = h5py.File(self.filename, mode = 'w', driver='core')
assert self.h5pyfile, 'No h5py file object created.'
except IOError:
log.debug("Error opening file for the HDFEncoder! ")
raise HDFEncoderException("Error while trying to open file. ")
except AssertionError as err:
log.debug(err.message)
raise HDFEncoderException(err.message)
示例2: create_known
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def create_known(dataset_name, rootgrp_name, grp_name):
"""
A known array to compare against during tests
"""
known_array = numpy.ones((10,20))
filename = FileSystem.get_url(FS.TEMP,random_name(), ".hdf5")
# Write an hdf file with known values to compare against
h5pyfile = h5py.File(filename, mode = 'w', driver='core')
grp = h5pyfile.create_group(rootgrp_name)
subgrp = grp.create_group(grp_name)
dataset = subgrp.create_dataset(dataset_name, known_array.shape, known_array.dtype.str, maxshape=(None,None))
dataset.write_direct(known_array)
h5pyfile.close()
# convert the hdf file into a binary string
f = open(filename, mode='rb')
# read the binary string representation of the file
known_hdf_as_string = f.read() # this is a known string to compare against during tests
f.close()
# cleaning up
FileSystem.unlink(f.name)
return known_array, known_hdf_as_string
示例3: _create_coverage
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def _create_coverage(self, dataset_id, description, parameter_dict, spatial_domain,temporal_domain):
pdict = ParameterDictionary.load(parameter_dict)
sdom = GridDomain.load(spatial_domain)
tdom = GridDomain.load(temporal_domain)
file_root = FileSystem.get_url(FS.CACHE,'datasets')
scov = SimplexCoverage(file_root,dataset_id,description or dataset_id,parameter_dictionary=pdict, temporal_domain=tdom, spatial_domain=sdom, inline_data_writes=self.inline_data_writes)
return scov
示例4: _splice_coverage
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def _splice_coverage(cls, dataset_id, scov):
file_root = FileSystem.get_url(FS.CACHE,'datasets')
vcov = cls._get_coverage(dataset_id,mode='a')
scov_pth = scov.persistence_dir
if isinstance(vcov.reference_coverage, SimplexCoverage):
ccov = ComplexCoverage(file_root, uuid4().hex, 'Complex coverage for %s' % dataset_id,
reference_coverage_locs=[vcov.head_coverage_path,],
parameter_dictionary=ParameterDictionary(),
complex_type=ComplexCoverageType.TEMPORAL_AGGREGATION)
log.info('Creating Complex Coverage: %s', ccov.persistence_dir)
ccov.append_reference_coverage(scov_pth)
ccov_pth = ccov.persistence_dir
ccov.close()
vcov.replace_reference_coverage(ccov_pth)
elif isinstance(vcov.reference_coverage, ComplexCoverage):
log.info('Appending simplex coverage to complex coverage')
#vcov.reference_coverage.append_reference_coverage(scov_pth)
dir_path = vcov.reference_coverage.persistence_dir
vcov.close()
ccov = AbstractCoverage.load(dir_path, mode='a')
ccov.append_reference_coverage(scov_pth)
ccov.refresh()
ccov.close()
vcov.refresh()
vcov.close()
示例5: _create_coverage
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def _create_coverage(self, dataset_id, parameter_dict_id, time_dom, spatial_dom):
pd = self.dataset_management_client.read_parameter_dictionary(parameter_dict_id)
pdict = ParameterDictionary.load(pd)
sdom = GridDomain.load(spatial_dom.dump())
tdom = GridDomain.load(time_dom.dump())
file_root = FileSystem.get_url(FS.CACHE,'datasets')
scov = SimplexCoverage(file_root, dataset_id, dataset_id, parameter_dictionary=pdict, temporal_domain=tdom, spatial_domain=sdom)
return scov
示例6: _create_coverage
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def _create_coverage(self, dataset_id, description, parameter_dict, spatial_domain,temporal_domain):
file_root = FileSystem.get_url(FS.CACHE,'datasets')
pdict = ParameterDictionary.load(parameter_dict)
sdom = GridDomain.load(spatial_domain)
tdom = GridDomain.load(temporal_domain)
scov = self._create_simplex_coverage(dataset_id, pdict, sdom, tdom, self.inline_data_writes)
vcov = ViewCoverage(file_root, dataset_id, description or dataset_id, reference_coverage_location=scov.persistence_dir)
scov.close()
return vcov
示例7: _create_view_coverage
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def _create_view_coverage(self, dataset_id, description, parent_dataset_id):
# As annoying as it is we need to load the view coverage belonging to parent dataset id and use the information
# inside to build the new one...
file_root = FileSystem.get_url(FS.CACHE,'datasets')
pscov = self._get_simplex_coverage(parent_dataset_id, mode='r')
scov_location = pscov.persistence_dir
pscov.close()
vcov = ViewCoverage(file_root, dataset_id, description or dataset_id, reference_coverage_location=scov_location)
return vcov
示例8: process
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def process(self,packet):
input = int(packet.get('num',0))
prep = 'echo \'1+%d\' | bc' %(input)
output = commands.getoutput(prep)
if self.has_output:
self.publish(dict(num=output))
with open(FileSystem.get_url(FS.TEMP,"transform_output"),'a') as f:
f.write('(%s): Received %s, transform: %s\n' %(self.name, packet, output))
示例9: upload_qc
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def upload_qc():
upload_folder = FileSystem.get_url(FS.TEMP,'uploads')
try:
object_store = Container.instance.object_store
# required fields
upload = request.files['file'] # <input type=file name="file">
if upload:
# upload file - run filename through werkzeug.secure_filename
filename = secure_filename(upload.filename)
path = os.path.join(upload_folder, filename)
upload_time = time.time()
upload.save(path)
filetype = _check_magic(upload) or 'CSV' # Either going to be ZIP or CSV, probably
# register upload
file_upload_context = {
'name': 'User uploaded QC file %s' % filename,
'filename': filename,
'filetype': filetype, # only CSV, no detection necessary
'path': path,
'upload_time': upload_time,
'status': 'File uploaded to server'
}
fuc_id, _ = object_store.create_doc(file_upload_context)
# client to process dispatch
pd_client = ProcessDispatcherServiceClient()
# create process definition
process_definition = ProcessDefinition(
name='upload_qc_processor',
executable={
'module': 'ion.processes.data.upload.upload_qc_processing',
'class': 'UploadQcProcessing'
}
)
process_definition_id = pd_client.create_process_definition(process_definition)
# create process
process_id = pd_client.create_process(process_definition_id)
# schedule process
config = DotDict()
config.process.fuc_id = fuc_id
pid = pd_client.schedule_process(process_definition_id, process_id=process_id, configuration=config)
log.info('UploadQcProcessing process created %s' % pid)
# response - only FileUploadContext ID and determined filetype for UX display
resp = {'fuc_id': fuc_id}
return gateway_json_response(resp)
raise BadRequest('Invalid Upload')
except Exception as e:
return build_error_response(e)
示例10: check_msg
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def check_msg(msg, header):
assertions(isinstance(msg, StreamGranuleContainer), 'Msg is not a container')
hdf_string = msg.identifiables[msg.data_stream_id].values
sha1 = hashlib.sha1(hdf_string).hexdigest().upper()
log.debug('Sha1 matches')
log.debug('Dumping file so you can inspect it.')
log.debug('Records: %d' % msg.identifiables['record_count'].value)
with open(FileSystem.get_url(FS.TEMP,'%s.cap.hdf5' % sha1[:8]),'w') as f:
f.write(hdf_string)
log.debug('Stream Capture: %s', f.name)
result.set(True)
示例11: setUp
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def setUp(self):
import numpy, h5py
FileSystem(DotDict())
#--------------------------------------------------------------------
# Create an hdf file for testing
#--------------------------------------------------------------------
self.salinity = [0,]*3
self.temperature = [0,]*3
self.pressure = [0,]*3
self.salinity[0] = numpy.arange(50)
self.salinity[1] = numpy.arange(50) + 50
self.salinity[2] = numpy.arange(50) + 100
self.temperature[0] = numpy.random.normal(size=50)
self.temperature[1] = numpy.random.normal(size=50)
self.temperature[2] = numpy.random.normal(size=50)
self.pressure[0] = numpy.random.uniform(low=0.0, high=1.0, size=50)
self.pressure[1] = numpy.random.uniform(low=0.0, high=1.0, size=50)
self.pressure[2] = numpy.random.uniform(low=0.0, high=1.0, size=50)
# provide the check_pieces mathod the size of the dataset so that it can do its checking..
self.sl = slice(0,150)
self.fnames = [0,]*3
for i in range(0,3):
self.fnames[i] = FileSystem.get_url(FS.TEMP, 'data%d.hdf5' % (i+1))
for fname, s, t, p in zip(self.fnames, self.salinity, self.temperature, self.pressure):
file = h5py.File(fname, 'w')
grp1 = file.create_group('fields')
dset1 = grp1.create_dataset("salinity", data=s)
dset2 = grp1.create_dataset("temperature", data=t)
dset3 = grp1.create_dataset("pressure", data=p)
file.close()
# Concatenate the test values for comparison:
self.t_result = numpy.concatenate((self.temperature[0],self.temperature[1],self.temperature[2]), axis = 0)
self.s_result = numpy.concatenate((self.salinity[0],self.salinity[1],self.salinity[2]), axis = 0)
self.p_result = numpy.concatenate((self.pressure[0],self.pressure[1],self.pressure[2]), axis = 0)
示例12: __init__
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def __init__(self, hdf_string):
"""
@param hdf_string
"""
#try:
assert isinstance(hdf_string, basestring), 'The input for instantiating the HDFDecoder object is not a string'
#except AssertionError as err:
# raise HDFDecoderException(err.message)
self.filename = FileSystem.get_url(fs=FS.TEMP, filename=hashlib.sha1(hdf_string).hexdigest(), ext='_decoder.hdf5')
#try:
# save an hdf string to disk - in /tmp to so we can open it as an hdf file and read data from it
f = open(self.filename, mode='wb')
f.write(hdf_string)
f.close()
示例13: _get_coverage_path
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def _get_coverage_path(cls, dataset_id):
file_root = FileSystem.get_url(FS.CACHE,'datasets')
return os.path.join(file_root, '%s' % dataset_id)
示例14: _get_coverage
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def _get_coverage(cls,dataset_id,mode='r'):
file_root = FileSystem.get_url(FS.CACHE,'datasets')
coverage = AbstractCoverage.load(file_root, dataset_id, mode=mode)
return coverage
示例15: _create_simplex_coverage
# 需要导入模块: from pyon.util.file_sys import FileSystem [as 别名]
# 或者: from pyon.util.file_sys.FileSystem import get_url [as 别名]
def _create_simplex_coverage(cls, dataset_id, parameter_dictionary, spatial_domain, temporal_domain):
file_root = FileSystem.get_url(FS.CACHE,'datasets')
scov = SimplexCoverage(file_root,dataset_id,'Simplex Coverage for %s' % dataset_id, parameter_dictionary=parameter_dictionary, temporal_domain=temporal_domain, spatial_domain=spatial_domain )
return scov