本文整理汇总了Python中pydio.job.localdb.LocalDbHandler.get_node_status方法的典型用法代码示例。如果您正苦于以下问题:Python LocalDbHandler.get_node_status方法的具体用法?Python LocalDbHandler.get_node_status怎么用?Python LocalDbHandler.get_node_status使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类pydio.job.localdb.LocalDbHandler
的用法示例。
在下文中一共展示了LocalDbHandler.get_node_status方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: get
# 需要导入模块: from pydio.job.localdb import LocalDbHandler [as 别名]
# 或者: from pydio.job.localdb.LocalDbHandler import get_node_status [as 别名]
def get(self, job_id='', relative_path=''):
"""
retrieves the stat info for a given file / list the active job details
:param job_id: (optional) Job Id of the file/ folder
:param relative_path: (optional) relative path of the file/folder with respect
to the corresponding repository(job_id)
:returns a json response
"""
if request.path == '/stat':
jobs = JobsLoader.Instance().get_jobs()
json_jobs = {}
for job in jobs:
if jobs[job].active:
json_jobs.update({jobs[job].id: [jobs[job].directory, jobs[job].server, jobs[job].label, jobs[job].workspace]})
return json_jobs
else:
directory_path = JobsLoader.Instance().get_job(job_id).directory
base_path = JobsLoader.Instance().build_job_data_path(job_id)
path = os.path.join(directory_path, relative_path)
#r = os.stat(path)
# Get the status of the file idle/busy... by join of ajxp_index and ajxp_node_status tables
db_handler = LocalDbHandler(base_path, directory_path)
if Path(str(path.encode("utf-8"))).is_dir():
node_status = db_handler.get_directory_node_status("/" + relative_path)
else:
node_status = db_handler.get_node_status("/" + relative_path)
return {"node_status": node_status}
示例2: ContinuousDiffMerger
# 需要导入模块: from pydio.job.localdb import LocalDbHandler [as 别名]
# 或者: from pydio.job.localdb.LocalDbHandler import get_node_status [as 别名]
#.........这里部分代码省略.........
if not (os.path.normpath(item['node']['node_path']) == os.path.normpath(otheritem['node']['node_path'])):
continue
if item['node']['bytesize'] == otheritem['node']['bytesize'] and item['node']['md5'] == otheritem['node']['md5']:
logging.debug('Reconciliation sequence for change (node)'+item['node']['node_path'])
lchanges.remove(item)
rchanges.remove(otheritem)
self.remove_seq(item['seq'], 'local')
self.remove_seq(otheritem['seq'], 'remote')
break
except Exception as e:
pass
test_stats = list(set(map(lambda it: it['source'] if it['source'] != 'NULL' else it['target'], lchanges)))
remote_stats = None
if len(test_stats):
remote_stats = self.sdk.bulk_stat(test_stats, with_hash=True)
rchanges = filter(lambda it: not self.filter_change(it, remote_stats, None), rchanges)
lchanges = filter(lambda it: not self.filter_change(it, None, remote_stats), lchanges)
last_ops = self.db_handler.get_last_operations()
new_rchanges = []
for item in lchanges:
ignore = False
for last in last_ops:
if last['type'] == item['type'] and last['source'] == item['source'] and last['target'] == item['target']:
logging.info('IGNORING, RECENT MOVE FROM SERVER', last)
ignore = True
break
if ignore:
continue
conflict = False
for rItem in rchanges:
if (not item['node'] and not rItem['node'] and rItem['source'] == rItem['source']) or (item['node'] and rItem['node'] and item['node']['node_path'] and rItem['node']['node_path'] and os.path.normpath(item['node']['node_path']) == os.path.normpath(rItem['node']['node_path'])):
# Seems there is a conflict - check
c_path = item['source']
if item['node']:
c_path = item['node']['node_path']
status = self.db_handler.get_node_status(c_path)
if status == 'SOLVED:KEEPLOCAL':
rchanges.remove(rItem)
elif status == 'SOLVED:KEEPREMOTE':
conflict = True
else:
conflict = True
rchanges.remove(rItem)
conflicts.append({'local':item,'remote':rItem})
break
if conflict:
continue
new_rchanges.append(item)
self.db_handler.clear_operations_buffer()
# Sort to make sure directory operations are applied first
rchanges = sorted(rchanges + new_rchanges, cmp=self.changes_sorter)
# Prune changes : for DELETE and MOVE of Dir, remove all childrens
toremove = []
for i in range(len(rchanges)):
ch = rchanges[i]
if ch['type'] == 'path' and not ch['source'] == 'NULL' and not ch['target'] == 'NULL' and ch['node']['md5'] == 'directory':
if i < len(rchanges)-1:
for j in range(i+1,len(rchanges)):
if rchanges[j]['source'] and rchanges[j]['type'] == 'path' and rchanges[j]['source'].startswith(ch['source']+'/'):
toremove.append(rchanges[j])
if len(toremove):
for r in toremove:
if r in rchanges: rchanges.remove(r)
return rchanges
def store_conflicts(self, conflicts):
for conflict in conflicts:
local = conflict["local"]
remote = conflict["remote"]
if local["node"]:
path = local["node"]["node_path"]
elif local["source"]:
path = local["source"]
else:
path = local["target"]
self.db_handler.update_node_status(path, 'CONFLICT', pickle.dumps(remote))
def get_remote_changes(self, seq_id, changes=dict()):
logging.debug('Remote sequence ' + str(seq_id))
data = self.sdk.changes(seq_id)
for (i, item) in enumerate(data['changes']):
item['location'] = 'remote'
key = item['source'] if item['source'] != 'NULL' else item['target']
if not key in changes['path_to_seqs']:
changes['path_to_seqs'][key] = []
changes['path_to_seqs'][key].append(item['seq'])
changes['data'][item['seq']] = item
return data['last_seq']