本文整理汇总了Python中pyLibrary.convert.value2json函数的典型用法代码示例。如果您正苦于以下问题:Python value2json函数的具体用法?Python value2json怎么用?Python value2json使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了value2json函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: solve
def solve():
try:
data = convert.json2value(convert.utf82unicode(flask.request.data))
solved = noop.solve(data)
response_data = convert.unicode2utf8(convert.value2json(solved))
return Response(
response_data,
direct_passthrough=True, # FOR STREAMING
status=200,
headers={
"access-control-allow-origin": "*",
"content-type": "application/json"
}
)
except Exception, e:
e = Except.wrap(e)
Log.warning("Could not process", cause=e)
e = e.as_dict()
return Response(
convert.unicode2utf8(convert.value2json(e)),
status=400,
headers={
"access-control-allow-origin": "*",
"content-type": "application/json"
}
)
示例2: _worker
def _worker(self, please_stop):
curr = "0.0"
acc = []
last_count_written = -1
next_write = Date.now()
while not please_stop:
d = self.temp_queue.pop(timeout=MINUTE)
if d == None:
if not acc:
continue
# WRITE THE INCOMPLETE DATA TO S3, BUT NOT TOO OFTEN
next_write = Date.now() + MINUTE
try:
if last_count_written != len(acc):
if DEBUG:
Log.note("write incomplete data ({{num}} lines) to {{uid}} in S3 next (time = {{next_write}})", uid=curr, next_write=next_write, num=len(acc))
self.bucket.write_lines(curr, (convert.value2json(a) for a in acc))
last_count_written = len(acc)
except Exception, e:
Log.note("Problem with write to S3", cause=e)
elif d[UID_PATH] != curr:
# WRITE acc TO S3 IF WE ARE MOVING TO A NEW KEY
try:
if acc:
if DEBUG:
Log.note("write complete data ({{num}} lines) to {{curr}} in S3", num=len(acc), curr=curr)
self.bucket.write_lines(curr, (convert.value2json(a) for a in acc))
last_count_written = 0
curr = d[UID_PATH]
acc = [d]
except Exception, e:
Log.warning("Can not store data", cause=e)
Thread.sleep(30*MINUTE)
示例3: extend
def extend(self, records):
"""
records - MUST HAVE FORM OF
[{"value":value}, ... {"value":value}] OR
[{"json":json}, ... {"json":json}]
OPTIONAL "id" PROPERTY IS ALSO ACCEPTED
"""
lines = []
try:
for r in records:
id = r.get("id")
if id == None:
id = Random.hex(40)
if "json" in r:
json = r["json"]
elif "value" in r:
json = convert.value2json(r["value"])
else:
json = None
Log.error("Expecting every record given to have \"value\" or \"json\" property")
lines.append('{"index":{"_id": ' + convert.value2json(id) + '}}')
lines.append(json)
del records
if not lines:
return
try:
data_bytes = "\n".join(lines) + "\n"
data_bytes = data_bytes.encode("utf8")
except Exception, e:
Log.error("can not make request body from\n{{lines|indent}}", lines= lines, cause=e)
response = self.cluster._post(
self.path + "/_bulk",
data=data_bytes,
headers={"Content-Type": "text"},
timeout=self.settings.timeout
)
items = response["items"]
for i, item in enumerate(items):
if self.cluster.version.startswith("0.90."):
if not item.index.ok:
Log.error("{{error}} while loading line:\n{{line}}",
error= item.index.error,
line= lines[i * 2 + 1])
elif self.cluster.version.startswith("1.4."):
if item.index.status not in [200, 201]:
Log.error("{{error}} while loading line:\n{{line}}",
error= item.index.error,
line= lines[i * 2 + 1])
else:
Log.error("version not supported {{version}}", version=self.cluster.version)
if self.debug:
Log.note("{{num}} documents added", num= len(items))
示例4: set_refresh_interval
def set_refresh_interval(self, seconds):
if seconds <= 0:
interval = -1
else:
interval = unicode(seconds) + "s"
if self.cluster.version.startswith("0.90."):
response = self.cluster.put(
"/" + self.settings.index + "/_settings",
data='{"index":{"refresh_interval":' + convert.value2json(interval) + '}}'
)
result = convert.json2value(utf82unicode(response.all_content))
if not result.ok:
Log.error("Can not set refresh interval ({{error}})", {
"error": utf82unicode(response.all_content)
})
elif any(map(self.cluster.version.startswith, ["1.4.", "1.5.", "1.6.", "1.7."])):
response = self.cluster.put(
"/" + self.settings.index + "/_settings",
data=convert.unicode2utf8('{"index":{"refresh_interval":' + convert.value2json(interval) + '}}')
)
result = convert.json2value(utf82unicode(response.all_content))
if not result.acknowledged:
Log.error("Can not set refresh interval ({{error}})", {
"error": utf82unicode(response.all_content)
})
else:
Log.error("Do not know how to handle ES version {{version}}", version=self.cluster.version)
示例5: close
def close(self):
self.please_stop.go()
with self.lock:
if self.db is None:
return
self.add(Thread.STOP)
if self.db.status.end == self.start:
if DEBUG:
Log.note("persistent queue clear and closed")
self.file.delete()
else:
if DEBUG:
Log.note("persistent queue closed with {{num}} items left", num=len(self))
try:
self._add_pending({"add": {"status.start": self.start}})
for i in range(self.db.status.start, self.start):
self._add_pending({"remove": str(i)})
self.file.write(
convert.value2json({"add": self.db})
+ "\n"
+ ("\n".join(convert.value2json(p) for p in self.pending))
+ "\n"
)
self._apply_pending()
except Exception, e:
raise e
self.db = None
示例6: commit
def commit(self):
with self.lock:
if self.closed:
Log.error("Queue is closed, commit not allowed")
try:
self._add_pending({"add": {"status.start": self.start}})
for i in range(self.db.status.start, self.start):
self._add_pending({"remove": str(i)})
if (
self.db.status.end - self.start < 10 or Random.range(0, 1000) == 0
): # FORCE RE-WRITE TO LIMIT FILE SIZE
# SIMPLY RE-WRITE FILE
if DEBUG:
Log.note(
"Re-write {{num_keys}} keys to persistent queue", num_keys=self.db.status.end - self.start
)
for k in self.db.keys():
if k == "status" or int(k) >= self.db.status.start:
continue
Log.error("Not expecting {{key}}", key=k)
self._commit()
self.file.write(convert.value2json({"add": self.db}) + "\n")
else:
self._commit()
except Exception, e:
raise e
示例7: __new__
def __new__(cls, value=None, **kwargs):
output = object.__new__(cls)
if value == None:
if kwargs:
output.milli = datetime.timedelta(**kwargs).total_seconds() * 1000
output.month = 0
return output
else:
return None
if Math.is_number(value):
output._milli = float(value) * 1000
output.month = 0
return output
elif isinstance(value, basestring):
return parse(value)
elif isinstance(value, Duration):
output.milli = value.milli
output.month = value.month
return output
elif isinstance(value, float) and Math.is_nan(value):
return None
else:
from pyLibrary import convert
from pyLibrary.debugs.logs import Log
Log.error("Do not know type of object (" + convert.value2json(value) + ")of to make a Duration")
示例8: stream
def stream(): # IT'S A LOT OF PRICES, STREAM THEM TO FILE
prefix = "[\n"
for p in new_prices:
yield prefix
yield convert.value2json(p)
prefix = ",\n"
yield "]"
示例9: delete_record
def delete_record(self, filter):
if self.settings.read_only:
Log.error("Index opened in read only mode, no changes allowed")
self.cluster.get_metadata()
if self.cluster.cluster_state.version.number.startswith("0.90"):
query = {"filtered": {
"query": {"match_all": {}},
"filter": filter
}}
elif self.cluster.cluster_state.version.number.startswith("1.0"):
query = {"query": {"filtered": {
"query": {"match_all": {}},
"filter": filter
}}}
else:
raise NotImplementedError
if self.debug:
Log.note("Delete bugs:\n{{query}}", query= query)
result = self.cluster.delete(
self.path + "/_query",
data=convert.value2json(query),
timeout=60
)
for name, status in result._indices.items():
if status._shards.failed > 0:
Log.error("Failure to delete from {{index}}", index=name)
示例10: _shorten
def _shorten(value, source):
if source.name.startswith("active-data-test-result"):
value.result.subtests = [s for s in value.result.subtests if s.ok is False]
value.result.missing_subtests = True
value.repo.changeset.files = None
shorter_length = len(convert.value2json(value))
if shorter_length > MAX_RECORD_LENGTH:
result_size = len(convert.value2json(value.result))
if source.name == "active-data-test-result":
if result_size > MAX_RECORD_LENGTH:
Log.warning("Epic test failure in {{name}} results in big record for {{id}} of length {{length}}", id=value._id, name=source.name, length=shorter_length)
else:
pass # NOT A PROBLEM
else:
Log.warning("Monstrous {{name}} record {{id}} of length {{length}}", id=value._id, name=source.name, length=shorter_length)
示例11: create_index
def create_index(
self,
index,
alias=None,
schema=None,
limit_replicas=None,
settings=None
):
if not settings.alias:
settings.alias = settings.index
settings.index = proto_name(settings.alias)
if settings.alias == settings.index:
Log.error("Expecting index name to conform to pattern")
if settings.schema_file:
Log.error('schema_file attribute not supported. Use {"$ref":<filename>} instead')
if schema == None:
Log.error("Expecting a schema")
elif isinstance(schema, basestring):
schema = convert.json2value(schema, paths=True)
else:
schema = convert.json2value(convert.value2json(schema), paths=True)
if limit_replicas:
# DO NOT ASK FOR TOO MANY REPLICAS
health = self.get("/_cluster/health")
if schema.settings.index.number_of_replicas >= health.number_of_nodes:
Log.warning("Reduced number of replicas: {{from}} requested, {{to}} realized",
{"from": schema.settings.index.number_of_replicas},
to= health.number_of_nodes - 1
)
schema.settings.index.number_of_replicas = health.number_of_nodes - 1
self._post(
"/" + settings.index,
data=convert.value2json(schema).encode("utf8"),
headers={"Content-Type": "application/json"}
)
while True:
time.sleep(1)
try:
self.head("/" + settings.index)
break
except Exception, _:
Log.note("{{index}} does not exist yet", index= settings.index)
示例12: store_data
def store_data(path):
try:
request = flask.request
auth = request.headers.get('Authorization')
if not auth:
# USE PATTERN MATCHING AUTH
for c in all_creds:
if c.path == path:
return store_public_data(path, c)
raise Log.error(
"No authentication provided. path={{path}} data.length={{length}}",
path=path,
length=len(request.get_data()),
)
try:
receiver = Receiver(
lookup_credentials,
auth,
request.url,
request.method,
content=request.get_data(),
content_type=request.headers['Content-Type'],
seen_nonce=seen_nonce
)
except Exception, e:
e = Except.wrap(e)
raise Log.error(
"Authentication failed. path={{path}} data.length={{length}}\n{{auth|indent}}",
path=path,
length=len(request.get_data()),
auth=auth,
cause=e
)
permissions = lookup_user(receiver.parsed_header["id"])
if path not in listwrap(permissions.resources):
Log.error("{{user}} not allowed access to {{resource}}", user=permissions.hawk.id, resource=path)
link, id = submit_data(path, permissions, request.json)
response_content = convert.unicode2utf8(convert.value2json({
"link": link,
"etl": {"id": id}
}))
receiver.respond(
content=response_content,
content_type=RESPONSE_CONTENT_TYPE
)
return Response(
response_content,
status=200,
headers={
b'Server-Authorization': receiver.response_header,
b'content-type': RESPONSE_CONTENT_TYPE
}
)
示例13: ping
def ping(self):
self.ping_time = Date.now()
self.synch.write(convert.value2json({
"action": "ping",
"next_key": self.next_key,
"source_key": self.source_key,
"timestamp": self.ping_time.milli
}))
示例14: post_json
def post_json(url, **kwargs):
"""
ASSUME RESPONSE IN IN JSON
"""
if b"json" in kwargs:
kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"json"]))
elif b'data':
kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"data"]))
else:
Log.error("Expecting `json` parameter")
response = post(url, **kwargs)
c = response.content
try:
details = convert.json2value(convert.utf82unicode(c))
except Exception, e:
Log.error("Unexpected return value {{content}}", content=c, cause=e)
示例15: create_index
def create_index(
self,
index,
alias=None,
create_timestamp=None,
schema=None,
limit_replicas=None,
read_only=False,
tjson=False,
kwargs=None
):
if not alias:
alias = kwargs.alias = kwargs.index
index = kwargs.index = proto_name(alias, create_timestamp)
if kwargs.alias == index:
Log.error("Expecting index name to conform to pattern")
if kwargs.schema_file:
Log.error('schema_file attribute not supported. Use {"$ref":<filename>} instead')
if schema == None:
Log.error("Expecting a schema")
elif isinstance(schema, basestring):
schema = mo_json.json2value(schema, leaves=True)
else:
schema = mo_json.json2value(convert.value2json(schema), leaves=True)
if limit_replicas:
# DO NOT ASK FOR TOO MANY REPLICAS
health = self.get("/_cluster/health")
if schema.settings.index.number_of_replicas >= health.number_of_nodes:
Log.warning("Reduced number of replicas: {{from}} requested, {{to}} realized",
{"from": schema.settings.index.number_of_replicas},
to= health.number_of_nodes - 1
)
schema.settings.index.number_of_replicas = health.number_of_nodes - 1
self.post(
"/" + index,
data=schema,
headers={"Content-Type": "application/json"}
)
# CONFIRM INDEX EXISTS
while True:
try:
state = self.get("/_cluster/state", retry={"times": 5}, timeout=3)
if index in state.metadata.indices:
break
Log.note("Waiting for index {{index}} to appear", index=index)
except Exception as e:
Log.warning("Problem while waiting for index {{index}} to appear", index=index, cause=e)
Till(seconds=1).wait()
Log.alert("Made new index {{index|quote}}", index=index)
es = Index(kwargs=kwargs)
return es