本文整理汇总了Python中ryw.give_bad_news函数的典型用法代码示例。如果您正苦于以下问题:Python give_bad_news函数的具体用法?Python give_bad_news怎么用?Python give_bad_news使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了give_bad_news函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: process_repo_dir
def process_repo_dir(srcd, dstd, copyFileFunc, copyDirFunc):
#check_village_log('test process_repo_dir')
success,entries,regularEntries,prefixes = get_repo_dir_entries(srcd)
goodPrefixes = ryw.cleanup_partial_repo_dir(srcd, prefixes)
# logging.debug('goodPrefixes: ' + repr(goodPrefixes))
# logging.debug('regular: ' + repr(regularEntries))
success = True
for regular in regularEntries:
srcName = os.path.join(srcd, regular)
dstName = os.path.join(dstd, regular)
logging.debug(' regular is: ' + regular)
thisSuccess = copy_tree_diff_common(srcName, dstName,
copyFileFunc, copyDirFunc)
success = success and thisSuccess
for prefix in goodPrefixes:
success,isBig = is_big_data(srcd, prefix)
if not success:
ryw.give_bad_news(
'process_repo_dir: failed to determine data size: '+srcd,
logging.warning)
continue
thisSuccess = copy_an_outgoing_object(srcd, dstd, prefix,
bigdata = isBig)
success = success and thisSuccess
return success
示例2: get_paths
def get_paths(objroot, objID, version, meta, repositoryRoot):
paths = get_paths1(objroot, objID, version)
if not meta:
paths.append(None)
return paths
if not meta.has_key('path'):
ryw.give_bad_news('DeleteObject.get_paths: missing path attribute: '+
repr(meta), logging.error)
paths.append(None)
return paths
path = meta['path']
try:
resources = su.parseKeyValueFile(
os.path.join(repositoryRoot, 'Resources.txt'))
viewroot = resources['viewroot']
except:
ryw.give_bad_news('DeleteObject.get_paths: failed to get view root.',
logging.critical)
paths.append(None)
return paths
viewpath = os.path.join(viewroot, path)
paths.append(viewpath)
logging.debug('DeleteObject.get_paths: ' + repr(paths))
return paths
示例3: get_meta
def get_meta(searchFile, objID, version, repositoryRoot):
success,meta = searchFile.get_meta(objID, version)
if success:
#
# I'm doing this to hardwire all
# places of gettting objectstoreroot.
#
#return (meta, meta['objectstore'])
return (meta, ryw.hard_wired_objectstore_root())
logging.warning(
'ryw_meta.get_meta: not finding it in the SearchFile: ' +
objID + ' # ' + str(version) + ', but attempting to continue')
#
# look for the hardwired objectstore root. not nice but...
#
objroot = os.path.join(repositoryRoot, 'WWW', 'ObjectStore')
if not os.path.exists(objroot):
ryw.give_bad_news(
'DeleteObject.get_meta: even the hardwired root does not exist: '+
objroot, logging.critical)
return (None, None)
success,meta = ryw.get_meta(objroot, objID, version)
if not success:
logging.warning(
'ryw.get_meta: failed to read metadata from objstore: '+
objID + ' # ' + str(version))
return (None, objroot)
return (meta, objroot)
示例4: launchExplorer
def launchExplorer(path):
ryw.db_print2('launchExplorer: path is: ' + path, 59)
try:
return ryw_bizarro.launch_explorer(path)
except:
ryw.give_bad_news("Failed to launch Explorer",logging.warning)
return False
示例5: add_this_version_to_search_file
def add_this_version_to_search_file(self, meta):
"""same as above but does not increment version number."""
logging.debug('add_this_version_to_search_file: ' + repr(meta))
if not meta.has_key('id') or not meta.has_key('version'):
ryw.give_bad_news(
'add_this_version_to_search_file: missing field(s)...',
logging.critical)
return False
objID = meta['id']
version = meta['version']
success,existingMeta = self.get_meta(objID, version)
if success:
ryw.give_news(
'warning: add_this_version_to_search_file: already exists: '+
objID + ' ' + str(version), logging.warning)
return True
if not self._SearchFile__append_to_search_file(meta):
return False
self._SearchFile__add_to_memory_index(objID, version, meta)
logging.debug('add_this_version_to_search_file: success.')
return True
示例6: out_obj_dir_name
def out_obj_dir_name(objStoreRoot, objname, version, currCounter):
if currCounter >= 9999:
ryw.give_bad_news('out_obj_dir_name: counter exceeded 9999.',
logging.warning)
#return (False, None, currCounter)
success,meta = ryw.get_meta(objStoreRoot, objname, version)
if not success:
ryw.give_bad_news('out_obj_dir_name: failed to get meta data: ' +
objname + '#' + str(version), logging.error)
return (False, None, currCounter)
if meta.has_key('content_alias'):
author = meta['content_alias']
author = stripStr(author)
elif meta.has_key('author_name'):
author = meta['author_name']
author = stripStr(author)
author = re.sub('(^[mM]s)|(^[mM]r)|(^[mM]rs)|(^[mM]iss)', '', author)
else:
author = 'unknown'
prefix = str(currCounter).zfill(2)
dirName = prefix + author
dirName = dirName[:32]
logging.debug('out_obj_dir_name: dirName is: ' + dirName)
return (True, dirName, currCounter + 1)
示例7: add_all
def add_all(queueName, searchFile):
try:
reqs = set('')
count = 0
for meta in searchFile.iterator():
objstr = meta['id'] + '#' + str(meta['version'])
reqs.add(objstr)
count += 1
logging.debug('add_all: ' + objstr)
ryw.give_news(
'add_all: number of objects added to the request queue: ' +
str(count), logging.info)
success,tmppath,bakpath = write_reqs(queueName, reqs)
if not success:
ryw.give_bad_news('add_all: write_reqs failed: ' + queueName,
logging.critical)
return False
cleanup(tmppath, bakpath)
return True
except:
ryw.give_bad_news('add_all: failed.', logging.critical)
return False
示例8: NOTUSED_talk_to_search_server
def NOTUSED_talk_to_search_server(values):
"""NOT USED ANY MORE.
need searchserver to send meta-data to.
in turn, it gives us the version number to use for the object."""
try:
searchserver = xmlrpclib.ServerProxy("http://localhost:53972")
except:
ryw.give_bad_news(
'fatal_error: uploadobject: failed to connect to search server.',
logging.critical)
return (False, None, None)
version = None
try:
version = searchserver.addtosearchfile(values)
values['version'] = version
except:
ryw.give_bad_news(
'fatal_error: uploadobject: failed to addtosearchfile().',
logging.critical)
return (False, searchserver, version)
logging.debug('talk_to_search_server passed: got version: ' +
repr(version))
return (True, searchserver, version)
示例9: add_to_search_file
def add_to_search_file(values, hasVersion, cloneVersion=False):
"""need search file to send meta-data to.
in turn, it gives us the version number to use for the object."""
success,searchFile = ryw.open_search_file(
'add_to_search_file:',
os.path.join(RepositoryRoot, 'WWW', 'logs'),
'upload.log',
os.path.join(RepositoryRoot, 'SearchFile'),
True)
if not success:
return (False, None)
version = None
try:
if hasVersion:
success = searchFile.add_this_version_to_search_file(values)
version = values['version']
else:
success,version = searchFile.add_to_search_file(
values, cloneVersion=cloneVersion)
searchFile.done()
values['version'] = version
except:
ryw.give_bad_news(
'fatal_error: failed to add_to_search_file().',
logging.critical)
searchFile.done()
return (False, version)
logging.debug('add_to_search_file passed: got version: ' +
repr(version))
return (True, version)
示例10: show_one_server_object
def show_one_server_object(meta, searchFile):
"""like WebUpload_ryw.show_one_server_object() except that
the searchFile is passed in."""
print "<BR>"
print Browse.script_str()
#displayObject = ryw_view.DisplayObject(RepositoryRoot,
# calledByVillageSide = False,
# missingFileFunc = None)
success,reverseLists = ReverseLists.open_reverse_lists(
'EditObject:', '', '',
os.path.join(RepositoryRoot, 'ReverseLists'), True,
searchFile = searchFile,
repositoryRoot = RepositoryRoot)
if not (success and reverseLists):
ryw.give_bad_news('EditObject: failed to open ReverseLists.',
logging.critical)
if reverseLists:
reverseLists.done()
return False
displayObject = ryw_view.DisplayObject(
RepositoryRoot, calledByVillageSide = False,
missingFileFunc = Browse.reqDownloadFunc,
searchFile = searchFile,
reverseLists = reverseLists)
displayObject.begin_print()
displayObject.show_an_object_compact(meta)
displayObject.end_print()
reverseLists.done()
示例11: delete_all
def delete_all(searchSel):
completeSuccess = True
searchFile = None
for objstr in searchSel:
success,objID,version = ryw.split_objstr(objstr)
if not success:
ryw.give_bad_news('DelSearchAll: invalid objstr: ' + objstr,
logging.error)
completeSuccess = False
continue
success,searchFile = DeleteObject.do_delete(
objID, version, searchFile=searchFile)
if not success:
ryw.give_bad_news(
'DelSearchAll: DeleteObject.do_delete failed.' + objstr,
logging.error)
completeSuccess = False
else:
ryw.db_print('DelSearchAll.delete_all: do_delete succeeded.',
18)
if searchFile:
searchFile.done()
return completeSuccess
示例12: do_update_metadata
def do_update_metadata(objroot, objID, version, meta, searchFile=None):
"""this is also called by merging incoming data in
ProcessDiscs.deal_with_stub(). only there, we're going to worry
about the optional incoming SearchFile argument. there,
we're trying to re-use the searchFile argument without
re-opening it over and over again."""
if not searchFile:
ryw.db_print('do_update_metadata: null searchFile', 11)
else:
ryw.db_print('do_update_metadata: reusing searchFile', 11)
if not ryw_meta.rewrite_meta(objroot, objID, version, meta):
ryw.give_bad_news('EditObject: rewrite_meta failed.', logging.error)
return (False, None)
if not searchFile:
searchFile = ryw_meta.open_search_file(RepositoryRoot,
grabWriteLock = True)
if not searchFile:
ryw.give_bad_news('EditObject: failed to open search file.',
logging.critical)
return (False, None)
searchFile.modify(meta)
return (True, searchFile)
示例13: copy_tree_diff_dir
def copy_tree_diff_dir(src, dst, copyFileFunc, copyDirFunc):
"""normal, except for moving _DONE items to the end of copying..."""
assert(os.path.exists(src))
assert(os.path.isdir(src))
logging.debug('copy_tree_diff_dir: ' + src + ' -> ' + dst)
try:
make_dst_dir(src, dst)
#
# make sure we copy any _DONE items last.
#
dirItems = move_done_last(os.listdir(src))
success = True
for n in dirItems:
srcName = os.path.join(src, n)
dstName = os.path.join(dst, n)
#logging.debug(' n is: ' + n)
thisSuccess = copy_tree_diff_common(srcName, dstName,
copyFileFunc,
copyDirFunc)
success = success and thisSuccess
return success
except:
ryw.give_bad_news('copy_tree_diff_dir: failed to copy dir: ' +
src + ' -> ' + dst, logging.critical)
return False
示例14: copy_tree_diff_file_repo
def copy_tree_diff_file_repo(src, dst):
"""used during recursive copying of the object store:
files that are too big are not copied."""
assert(os.path.exists(src))
assert(os.path.isfile(src))
#logging.debug('copy_tree_diff_file_repo: ' + src + ' -> ' + dst)
try:
src = os.path.normpath(src)
dst = os.path.normpath(dst)
srcBase = os.path.basename(src)
kB = os.path.getsize(src) / 1024
if kB > ryw.smallFileSizeCeilingKB:
#logging.debug(
# 'copy_tree_diff_file_repo: ' +
# 'exceeds small file size ceiling: ' +
# src + ' ' + repr(kB) + ' KB')
#ryw.give_news('copy_tree_diff_file_repo: ' +
# 'exceeds small file size ceiling: ' +
# src + ' ' + repr(kB) + ' KB', logging.info)
return True
#logging.debug('copy_tree_diff_file_repo: ' +
# 'does not exceed small file size ceiling: ' +
# src + ' ' + repr(kB) + ' KB')
return copy_tree_diff_file(src, dst)
except:
ryw.give_bad_news('copy_tree_diff_file_repo: failed to copy file: ' +
src + ' -> ' + dst, logging.critical)
return False
示例15: deleteRequested
def deleteRequested(form):
image = form.getfirst("Img","")
if not image:
print "No Image specified to delete"
sys.exit(1)
success, resources = get_resources()
if not success:
ryw.give_bad_news("Error parsing resource file",logging.error)
sys.exit(1)
robotsJobDir = resources['robotsjobdir']
jobfile = os.path.join(robotsJobDir, image)
ryw.cleanup_path(jobfile+".JRQ",'deleteOutgoingImage.deleteRequested:')
ryw.cleanup_path(jobfile+".ERR",'deleteOutgoingImage.deleteRequested:')
ryw.cleanup_path(jobfile+".DON",'deleteOutgoingImage.deleteRequested:')
ryw.cleanup_path(jobfile,'deleteOutgoingImage.deleteRequested:')
tmpout = resources['tmpout']
image = os.path.join(tmpout,image)
if not os.path.exists(image):
ryw.give_bad_news("specified image doesnt exist",logging.info)
sys.exit(1)
ryw.cleanup_path(image,"deleteOutgoingImage.deleteRequested:")
sys.stdout.write("True")
sys.exit(0)