本文整理汇总了Python中threading.Condition._is_owned方法的典型用法代码示例。如果您正苦于以下问题:Python Condition._is_owned方法的具体用法?Python Condition._is_owned怎么用?Python Condition._is_owned使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类threading.Condition
的用法示例。
在下文中一共展示了Condition._is_owned方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Spooler
# 需要导入模块: from threading import Condition [as 别名]
# 或者: from threading.Condition import _is_owned [as 别名]
class Spooler(object):
def __init__(self, config):
self.config = config
self.spool_dir = config.get('core', 'spool_dir')
if not isdir(self.spool_dir):
assert not exists(self.spool_dir), "specified spool_dir %s already exists, and isn't a dir!" % self.spool_dir
makedirs(self.spool_dir)
self._file_registry = {}
self._sourcetype_registry = []
self._remote_empties()
self._queue = deque(self.keys())
self._lock = Lock()
self._not_empty = Condition(self._lock)
def _remote_empties(self):
files = self.keys()
while 1:
try:
f = files.next()
except StopIteration:
break
if not stat(f).st_size:
remove(f)
def _open(self, sourcetype):
if sourcetype not in self._sourcetype_registry:
self._sourcetype_registry.append(sourcetype)
fname = path_join(self.spool_dir, '%s_%f' % (sourcetype, time()))
self._file_registry[sourcetype] = open(fname, 'a')
def _get_file(self, sourcetype):
if sourcetype not in self._file_registry:
self._open(sourcetype)
return self._file_registry[sourcetype]
def keys(self):
spool_dir = self.spool_dir
return (path_join(spool_dir, f) for f in listdir(spool_dir))
def items(self):
return ((f, open(f, 'r')) for f in self.keys())
def values(self):
return (v for k,v in self.items())
def close(self):
def _close(fh):
fh.flush()
fh.close()
map(_close, self._file_registry.values())
self._remote_empties()
def extend(self, sourcetype, timestamp, extra, datas):
formatter = self._format
data = ''.join((formatter(timestamp, d) for d in datas))
self._write(sourcetype, timestamp, extra, data)
if self._not_empty._is_owned():
self._not_empty.notify()
def append(self, sourcetype, timestamp, extra, data):
data = self._format(timestamp, data)
self._write(sourcetype, timestamp, extra, data)
if self._not_empty._is_owned():
self._not_empty.notify()
def _format(self, timestamp, data):
s = json_dumps([timestamp, data], default=json_util_default)
#TODO: have json dump directly into the file. get the fp pos, +4 for
#size, dump, get new fp pos, generate length struct, insert, and go back
#to the end of the file.
return pack('>L', len(s)) + s
def _write(self, sourcetype, timestamp, extra, data):
def _open():
_f = self._get_file(sourcetype)
new = False
if not _f.tell():
new = True
#new file, needs metadata
hostname = self.config.get('core', 'hostname')
extra['started_timestamp'] = timestamp
m = json_dumps([hostname, sourcetype, extra],
default=json_util_default)
_f.write(pack('>L', len(m)) + m)
return _f, new
def _close(_f):
_f.flush()
_f.close()
self._file_registry.pop(sourcetype)
self._queue.append(_f.name)
with self._lock:
_f, new = _open()
if not new and datetime.utcnow().month != \
datetime.utcfromtimestamp(stat(_f.name).st_ctime).month:
#need to make sure that batching doesn't accidentally mix
#.........这里部分代码省略.........