本文整理汇总了Python中pyLibrary.convert.value2json函数的典型用法代码示例。如果您正苦于以下问题:Python value2json函数的具体用法?Python value2json怎么用?Python value2json使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了value2json函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: solve
def solve():
try:
data = convert.json2value(convert.utf82unicode(flask.request.data))
solved = noop.solve(data)
response_data = convert.unicode2utf8(convert.value2json(solved))
return Response(
response_data,
direct_passthrough=True, # FOR STREAMING
status=200,
headers={
"access-control-allow-origin": "*",
"content-type": "application/json"
}
)
except Exception, e:
e = Except.wrap(e)
Log.warning("Could not process", cause=e)
e = e.as_dict()
return Response(
convert.unicode2utf8(convert.value2json(e)),
status=400,
headers={
"access-control-allow-origin": "*",
"content-type": "application/json"
}
)
开发者ID:mozilla,项目名称:ChangeDetector,代码行数:27,代码来源:app.py
示例2: _worker
def _worker(self, please_stop):
curr = "0.0"
acc = []
last_count_written = -1
next_write = Date.now()
while not please_stop:
d = self.temp_queue.pop(timeout=MINUTE)
if d == None:
if not acc:
continue
# WRITE THE INCOMPLETE DATA TO S3, BUT NOT TOO OFTEN
next_write = Date.now() + MINUTE
try:
if last_count_written != len(acc):
if DEBUG:
Log.note("write incomplete data ({{num}} lines) to {{uid}} in S3 next (time = {{next_write}})", uid=curr, next_write=next_write, num=len(acc))
self.bucket.write_lines(curr, (convert.value2json(a) for a in acc))
last_count_written = len(acc)
except Exception, e:
Log.note("Problem with write to S3", cause=e)
elif d[UID_PATH] != curr:
# WRITE acc TO S3 IF WE ARE MOVING TO A NEW KEY
try:
if acc:
if DEBUG:
Log.note("write complete data ({{num}} lines) to {{curr}} in S3", num=len(acc), curr=curr)
self.bucket.write_lines(curr, (convert.value2json(a) for a in acc))
last_count_written = 0
curr = d[UID_PATH]
acc = [d]
except Exception, e:
Log.warning("Can not store data", cause=e)
Thread.sleep(30*MINUTE)
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:34,代码来源:storage.py
示例3: extend
def extend(self, records):
"""
records - MUST HAVE FORM OF
[{"value":value}, ... {"value":value}] OR
[{"json":json}, ... {"json":json}]
OPTIONAL "id" PROPERTY IS ALSO ACCEPTED
"""
lines = []
try:
for r in records:
id = r.get("id")
if id == None:
id = Random.hex(40)
if "json" in r:
json = r["json"]
elif "value" in r:
json = convert.value2json(r["value"])
else:
json = None
Log.error("Expecting every record given to have \"value\" or \"json\" property")
lines.append('{"index":{"_id": ' + convert.value2json(id) + '}}')
lines.append(json)
del records
if not lines:
return
try:
data_bytes = "\n".join(lines) + "\n"
data_bytes = data_bytes.encode("utf8")
except Exception, e:
Log.error("can not make request body from\n{{lines|indent}}", lines= lines, cause=e)
response = self.cluster._post(
self.path + "/_bulk",
data=data_bytes,
headers={"Content-Type": "text"},
timeout=self.settings.timeout
)
items = response["items"]
for i, item in enumerate(items):
if self.cluster.version.startswith("0.90."):
if not item.index.ok:
Log.error("{{error}} while loading line:\n{{line}}",
error= item.index.error,
line= lines[i * 2 + 1])
elif self.cluster.version.startswith("1.4."):
if item.index.status not in [200, 201]:
Log.error("{{error}} while loading line:\n{{line}}",
error= item.index.error,
line= lines[i * 2 + 1])
else:
Log.error("version not supported {{version}}", version=self.cluster.version)
if self.debug:
Log.note("{{num}} documents added", num= len(items))
开发者ID:klahnakoski,项目名称:intermittents,代码行数:60,代码来源:elasticsearch.py
示例4: set_refresh_interval
def set_refresh_interval(self, seconds):
if seconds <= 0:
interval = -1
else:
interval = unicode(seconds) + "s"
if self.cluster.version.startswith("0.90."):
response = self.cluster.put(
"/" + self.settings.index + "/_settings",
data='{"index":{"refresh_interval":' + convert.value2json(interval) + '}}'
)
result = convert.json2value(utf82unicode(response.all_content))
if not result.ok:
Log.error("Can not set refresh interval ({{error}})", {
"error": utf82unicode(response.all_content)
})
elif any(map(self.cluster.version.startswith, ["1.4.", "1.5.", "1.6.", "1.7."])):
response = self.cluster.put(
"/" + self.settings.index + "/_settings",
data=convert.unicode2utf8('{"index":{"refresh_interval":' + convert.value2json(interval) + '}}')
)
result = convert.json2value(utf82unicode(response.all_content))
if not result.acknowledged:
Log.error("Can not set refresh interval ({{error}})", {
"error": utf82unicode(response.all_content)
})
else:
Log.error("Do not know how to handle ES version {{version}}", version=self.cluster.version)
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:30,代码来源:elasticsearch.py
示例5: close
def close(self):
self.please_stop.go()
with self.lock:
if self.db is None:
return
self.add(Thread.STOP)
if self.db.status.end == self.start:
if DEBUG:
Log.note("persistent queue clear and closed")
self.file.delete()
else:
if DEBUG:
Log.note("persistent queue closed with {{num}} items left", num=len(self))
try:
self._add_pending({"add": {"status.start": self.start}})
for i in range(self.db.status.start, self.start):
self._add_pending({"remove": str(i)})
self.file.write(
convert.value2json({"add": self.db})
+ "\n"
+ ("\n".join(convert.value2json(p) for p in self.pending))
+ "\n"
)
self._apply_pending()
except Exception, e:
raise e
self.db = None
开发者ID:klahnakoski,项目名称:MoDevETL,代码行数:29,代码来源:persistent_queue.py
示例6: commit
def commit(self):
with self.lock:
if self.closed:
Log.error("Queue is closed, commit not allowed")
try:
self._add_pending({"add": {"status.start": self.start}})
for i in range(self.db.status.start, self.start):
self._add_pending({"remove": str(i)})
if (
self.db.status.end - self.start < 10 or Random.range(0, 1000) == 0
): # FORCE RE-WRITE TO LIMIT FILE SIZE
# SIMPLY RE-WRITE FILE
if DEBUG:
Log.note(
"Re-write {{num_keys}} keys to persistent queue", num_keys=self.db.status.end - self.start
)
for k in self.db.keys():
if k == "status" or int(k) >= self.db.status.start:
continue
Log.error("Not expecting {{key}}", key=k)
self._commit()
self.file.write(convert.value2json({"add": self.db}) + "\n")
else:
self._commit()
except Exception, e:
raise e
开发者ID:klahnakoski,项目名称:MoDevETL,代码行数:29,代码来源:persistent_queue.py
示例7: __new__
def __new__(cls, value=None, **kwargs):
output = object.__new__(cls)
if value == None:
if kwargs:
output.milli = datetime.timedelta(**kwargs).total_seconds() * 1000
output.month = 0
return output
else:
return None
if Math.is_number(value):
output._milli = float(value) * 1000
output.month = 0
return output
elif isinstance(value, basestring):
return parse(value)
elif isinstance(value, Duration):
output.milli = value.milli
output.month = value.month
return output
elif isinstance(value, float) and Math.is_nan(value):
return None
else:
from pyLibrary import convert
from pyLibrary.debugs.logs import Log
Log.error("Do not know type of object (" + convert.value2json(value) + ")of to make a Duration")
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:26,代码来源:durations.py
示例8: stream
def stream(): # IT'S A LOT OF PRICES, STREAM THEM TO FILE
prefix = "[\n"
for p in new_prices:
yield prefix
yield convert.value2json(p)
prefix = ",\n"
yield "]"
开发者ID:klahnakoski,项目名称:SpotManager,代码行数:7,代码来源:spot_manager.py
示例9: delete_record
def delete_record(self, filter):
if self.settings.read_only:
Log.error("Index opened in read only mode, no changes allowed")
self.cluster.get_metadata()
if self.cluster.cluster_state.version.number.startswith("0.90"):
query = {"filtered": {
"query": {"match_all": {}},
"filter": filter
}}
elif self.cluster.cluster_state.version.number.startswith("1.0"):
query = {"query": {"filtered": {
"query": {"match_all": {}},
"filter": filter
}}}
else:
raise NotImplementedError
if self.debug:
Log.note("Delete bugs:\n{{query}}", query= query)
result = self.cluster.delete(
self.path + "/_query",
data=convert.value2json(query),
timeout=60
)
for name, status in result._indices.items():
if status._shards.failed > 0:
Log.error("Failure to delete from {{index}}", index=name)
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:30,代码来源:elasticsearch.py
示例10: _shorten
def _shorten(value, source):
if source.name.startswith("active-data-test-result"):
value.result.subtests = [s for s in value.result.subtests if s.ok is False]
value.result.missing_subtests = True
value.repo.changeset.files = None
shorter_length = len(convert.value2json(value))
if shorter_length > MAX_RECORD_LENGTH:
result_size = len(convert.value2json(value.result))
if source.name == "active-data-test-result":
if result_size > MAX_RECORD_LENGTH:
Log.warning("Epic test failure in {{name}} results in big record for {{id}} of length {{length}}", id=value._id, name=source.name, length=shorter_length)
else:
pass # NOT A PROBLEM
else:
Log.warning("Monstrous {{name}} record {{id}} of length {{length}}", id=value._id, name=source.name, length=shorter_length)
开发者ID:klahnakoski,项目名称:SpotManager,代码行数:16,代码来源:rollover_index.py
示例11: create_index
def create_index(
self,
index,
alias=None,
schema=None,
limit_replicas=None,
settings=None
):
if not settings.alias:
settings.alias = settings.index
settings.index = proto_name(settings.alias)
if settings.alias == settings.index:
Log.error("Expecting index name to conform to pattern")
if settings.schema_file:
Log.error('schema_file attribute not supported. Use {"$ref":<filename>} instead')
if schema == None:
Log.error("Expecting a schema")
elif isinstance(schema, basestring):
schema = convert.json2value(schema, paths=True)
else:
schema = convert.json2value(convert.value2json(schema), paths=True)
if limit_replicas:
# DO NOT ASK FOR TOO MANY REPLICAS
health = self.get("/_cluster/health")
if schema.settings.index.number_of_replicas >= health.number_of_nodes:
Log.warning("Reduced number of replicas: {{from}} requested, {{to}} realized",
{"from": schema.settings.index.number_of_replicas},
to= health.number_of_nodes - 1
)
schema.settings.index.number_of_replicas = health.number_of_nodes - 1
self._post(
"/" + settings.index,
data=convert.value2json(schema).encode("utf8"),
headers={"Content-Type": "application/json"}
)
while True:
time.sleep(1)
try:
self.head("/" + settings.index)
break
except Exception, _:
Log.note("{{index}} does not exist yet", index= settings.index)
开发者ID:klahnakoski,项目名称:intermittents,代码行数:47,代码来源:elasticsearch.py
示例12: store_data
def store_data(path):
try:
request = flask.request
auth = request.headers.get('Authorization')
if not auth:
# USE PATTERN MATCHING AUTH
for c in all_creds:
if c.path == path:
return store_public_data(path, c)
raise Log.error(
"No authentication provided. path={{path}} data.length={{length}}",
path=path,
length=len(request.get_data()),
)
try:
receiver = Receiver(
lookup_credentials,
auth,
request.url,
request.method,
content=request.get_data(),
content_type=request.headers['Content-Type'],
seen_nonce=seen_nonce
)
except Exception, e:
e = Except.wrap(e)
raise Log.error(
"Authentication failed. path={{path}} data.length={{length}}\n{{auth|indent}}",
path=path,
length=len(request.get_data()),
auth=auth,
cause=e
)
permissions = lookup_user(receiver.parsed_header["id"])
if path not in listwrap(permissions.resources):
Log.error("{{user}} not allowed access to {{resource}}", user=permissions.hawk.id, resource=path)
link, id = submit_data(path, permissions, request.json)
response_content = convert.unicode2utf8(convert.value2json({
"link": link,
"etl": {"id": id}
}))
receiver.respond(
content=response_content,
content_type=RESPONSE_CONTENT_TYPE
)
return Response(
response_content,
status=200,
headers={
b'Server-Authorization': receiver.response_header,
b'content-type': RESPONSE_CONTENT_TYPE
}
)
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:59,代码来源:app.py
示例13: ping
def ping(self):
self.ping_time = Date.now()
self.synch.write(convert.value2json({
"action": "ping",
"next_key": self.next_key,
"source_key": self.source_key,
"timestamp": self.ping_time.milli
}))
开发者ID:klahnakoski,项目名称:Activedata-ETL,代码行数:8,代码来源:synchro.py
示例14: post_json
def post_json(url, **kwargs):
"""
ASSUME RESPONSE IN IN JSON
"""
if b"json" in kwargs:
kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"json"]))
elif b'data':
kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"data"]))
else:
Log.error("Expecting `json` parameter")
response = post(url, **kwargs)
c = response.content
try:
details = convert.json2value(convert.utf82unicode(c))
except Exception, e:
Log.error("Unexpected return value {{content}}", content=c, cause=e)
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:17,代码来源:http.py
示例15: create_index
def create_index(
self,
index,
alias=None,
create_timestamp=None,
schema=None,
limit_replicas=None,
read_only=False,
tjson=False,
kwargs=None
):
if not alias:
alias = kwargs.alias = kwargs.index
index = kwargs.index = proto_name(alias, create_timestamp)
if kwargs.alias == index:
Log.error("Expecting index name to conform to pattern")
if kwargs.schema_file:
Log.error('schema_file attribute not supported. Use {"$ref":<filename>} instead')
if schema == None:
Log.error("Expecting a schema")
elif isinstance(schema, basestring):
schema = mo_json.json2value(schema, leaves=True)
else:
schema = mo_json.json2value(convert.value2json(schema), leaves=True)
if limit_replicas:
# DO NOT ASK FOR TOO MANY REPLICAS
health = self.get("/_cluster/health")
if schema.settings.index.number_of_replicas >= health.number_of_nodes:
Log.warning("Reduced number of replicas: {{from}} requested, {{to}} realized",
{"from": schema.settings.index.number_of_replicas},
to= health.number_of_nodes - 1
)
schema.settings.index.number_of_replicas = health.number_of_nodes - 1
self.post(
"/" + index,
data=schema,
headers={"Content-Type": "application/json"}
)
# CONFIRM INDEX EXISTS
while True:
try:
state = self.get("/_cluster/state", retry={"times": 5}, timeout=3)
if index in state.metadata.indices:
break
Log.note("Waiting for index {{index}} to appear", index=index)
except Exception as e:
Log.warning("Problem while waiting for index {{index}} to appear", index=index, cause=e)
Till(seconds=1).wait()
Log.alert("Made new index {{index|quote}}", index=index)
es = Index(kwargs=kwargs)
return es
开发者ID:klahnakoski,项目名称:SpotManager,代码行数:58,代码来源:elasticsearch.py
示例16: shutdown
def shutdown(self):
self.pinger_thread.stop()
self.pinger_thread.join()
self.synch.write(convert.value2json({
"action": "shutdown",
"next_key": self.next_key,
"source_key": self.source_key,
"timestamp": Date.now().milli
}))
开发者ID:klahnakoski,项目名称:Activedata-ETL,代码行数:9,代码来源:synchro.py
示例17: test_missing_auth
def test_missing_auth(self):
# MAKE SOME DATA
data = {
"constant": "this is a test",
"random-data": convert.bytes2base64(Random.bytes(100))
}
response = requests.post(settings.bad_url, data=convert.unicode2utf8(convert.value2json(data)))
self.assertEqual(response.status_code, 403)
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:9,代码来源:test_service.py
示例18: post_json
def post_json(url, **kwargs):
"""
ASSUME RESPONSE IN IN JSON
"""
kwargs["data"] = convert.unicode2utf8(convert.value2json(kwargs["data"]))
response = post(url, **kwargs)
c=response.all_content
return convert.json2value(convert.utf82unicode(c))
开发者ID:klahnakoski,项目名称:Activedata-ETL,代码行数:9,代码来源:http.py
示例19: log_loop
def log_loop(settings, synch, queue, bucket, please_stop):
with aws.Queue(settings.work_queue) as work_queue:
for i, g in qb.groupby(queue, size=settings.param.size):
Log.note(
"Preparing {{num}} pulse messages to bucket={{bucket}}",
num=len(g),
bucket=bucket.name
)
full_key = unicode(synch.next_key) + ":" + unicode(MIN(g.select("_meta.count")))
try:
output = [
set_default(
d,
{"etl": {
"name": "Pulse block",
"bucket": settings.destination.bucket,
"timestamp": Date.now().unix,
"id": synch.next_key,
"source": {
"name": "pulse.mozilla.org",
"id": d._meta.count,
"count": d._meta.count,
"message_id": d._meta.message_id,
"sent": Date(d._meta.sent),
},
"type": "aggregation"
}}
)
for i, d in enumerate(g)
if d != None # HAPPENS WHEN PERSISTENT QUEUE FAILS TO LOG start
]
bucket.write(full_key, "\n".join(convert.value2json(d) for d in output))
synch.advance()
synch.source_key = MAX(g.select("_meta.count")) + 1
now = Date.now()
work_queue.add({
"bucket": bucket.name,
"key": full_key,
"timestamp": now.unix,
"date/time": now.format()
})
synch.ping()
queue.commit()
Log.note("Wrote {{num}} pulse messages to bucket={{bucket}}, key={{key}} ",
num= len(g),
bucket= bucket.name,
key= full_key)
except Exception, e:
queue.rollback()
if not queue.closed:
Log.warning("Problem writing {{key}} to S3", key=full_key, cause=e)
if please_stop:
break
开发者ID:klahnakoski,项目名称:Activedata-ETL,代码行数:57,代码来源:pulse_logger.py
示例20: full_etl
def full_etl(settings):
schema = convert.json2value(convert.value2json(SCHEMA), leaves=True)
Cluster(settings.destination).get_or_create_index(settings=settings.destination, schema=schema, limit_replicas=True)
destq = FromES(settings.destination)
if settings.incremental:
min_bug_id = destq.query({
"from": coalesce(settings.destination.alias, settings.destination.index),
"select": {"name": "max_bug_id", "value": "bug_id", "aggregate": "max"}
})
min_bug_id = int(MAX(min_bug_id-1000, 0))
else:
min_bug_id = 0
sourceq = FromES(settings.source)
max_bug_id = sourceq.query({
"from": coalesce(settings.source.alias, settings.source.index),
"select": {"name": "max_bug_id", "value": "bug_id", "aggregate": "max"}
}) + 1
max_bug_id = int(coalesce(max_bug_id, 0))
# FIRST, GET ALL MISSING BUGS
for s, e in qb.reverse(list(qb.intervals(min_bug_id, max_bug_id, 10000))):
with Timer("pull {{start}}..{{end}} from ES", {"start": s, "end": e}):
children = sourceq.query({
"from": settings.source.alias,
"select": ["bug_id", "dependson", "blocked", "modified_ts", "expires_on"],
"where": {"and": [
{"range": {"bug_id": {"gte": s, "lt": e}}},
{"or": [
{"exists": "dependson"},
{"exists": "blocked"}
]}
]},
"limit": 10000
})
with Timer("fixpoint work"):
to_fix_point(settings, destq, children.data)
# PROCESS RECENT CHANGES
with Timer("pull recent dependancies from ES"):
children = sourceq.query({
"from": settings.source.alias,
"select": ["bug_id", "dependson", "blocked"],
"where": {"and": [
{"range": {"modified_ts": {"gte": convert.datetime2milli(datetime.utcnow() - timedelta(days=7))}}},
{"or": [
{"exists": "dependson"},
{"exists": "blocked"}
]}
]},
"limit": 100000
})
to_fix_point(settings, destq, children.data)
开发者ID:klahnakoski,项目名称:MoDevETL,代码行数:56,代码来源:hierarchy.py
注:本文中的pyLibrary.convert.value2json函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论