本文整理汇总了Python中pyLibrary.thread.threads.Queue类的典型用法代码示例。如果您正苦于以下问题:Python Queue类的具体用法?Python Queue怎么用?Python Queue使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Queue类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: worker
def worker(please_stop):
pending = Queue("pending ids", max=BATCH_SIZE*3, silent=False)
pending_thread = Thread.run(
"get pending",
get_pending,
source=source,
since=last_updated,
pending_bugs=pending,
please_stop=please_stop
)
diff_thread = Thread.run(
"diff",
diff,
source,
destination,
pending,
please_stop=please_stop
)
replication_thread = Thread.run(
"replication",
replicate,
source,
destination,
pending,
config.fix,
please_stop=please_stop
)
pending_thread.join()
diff_thread.join()
pending.add(Thread.STOP)
replication_thread.join()
done.go()
please_stop.go()
开发者ID:klahnakoski,项目名称:esReplicate,代码行数:34,代码来源:replicate.py
示例2: Log_usingThread
class Log_usingThread(BaseLog):
def __init__(self, logger):
# DELAYED LOAD FOR THREADS MODULE
from pyLibrary.thread.threads import Queue
self.queue = Queue("logs", max=10000, silent=True)
self.logger = logger
def worker(please_stop):
while not please_stop:
Thread.sleep(1)
logs = self.queue.pop_all()
for log in logs:
if log is Thread.STOP:
if DEBUG_LOGGING:
sys.stdout.write("Log_usingThread.worker() sees stop, filling rest of queue\n")
please_stop.go()
else:
self.logger.write(**log)
self.thread = Thread("log thread", worker)
self.thread.start()
def write(self, template, params):
try:
self.queue.add({"template": template, "params": params})
return self
except Exception, e:
sys.stdout.write("IF YOU SEE THIS, IT IS LIKELY YOU FORGOT TO RUN Log.start() FIRST\n")
raise e # OH NO!
开发者ID:klahnakoski,项目名称:intermittents,代码行数:31,代码来源:logs.py
示例3: etl_one
def etl_one(settings):
queue = Queue("temp work queue")
queue.__setattr__(b"commit", Null)
queue.__setattr__(b"rollback", Null)
settings.param.wait_forever = False
already_in_queue = set()
for w in settings.workers:
source = get_container(w.source)
# source.settings.fast_forward = True
if id(source) in already_in_queue:
continue
try:
for i in parse_id_argument(settings.args.id):
data = source.get_key(i)
if data != None:
already_in_queue.add(id(source))
queue.add(Dict(
bucket=w.source.bucket,
key=i
))
except Exception, e:
if "Key {{key}} does not exist" in e:
already_in_queue.add(id(source))
queue.add(Dict(
bucket=w.source.bucket,
key=settings.args.id
))
Log.warning("Problem", cause=e)
开发者ID:klahnakoski,项目名称:Activedata-ETL,代码行数:29,代码来源:etl.py
示例4: TextLog_usingElasticSearch
class TextLog_usingElasticSearch(TextLog):
@use_settings
def __init__(self, host, index, type="log", max_size=1000, batch_size=100, settings=None):
"""
settings ARE FOR THE ELASTICSEARCH INDEX
"""
self.es = Cluster(settings).get_or_create_index(
schema=convert.json2value(convert.value2json(SCHEMA), leaves=True),
limit_replicas=True,
tjson=True,
settings=settings,
)
self.batch_size = batch_size
self.es.add_alias(coalesce(settings.alias, settings.index))
self.queue = Queue("debug logs to es", max=max_size, silent=True)
self.es.settings.retry.times = coalesce(self.es.settings.retry.times, 3)
self.es.settings.retry.sleep = Duration(coalesce(self.es.settings.retry.sleep, MINUTE))
Thread.run("add debug logs to es", self._insert_loop)
def write(self, template, params):
if params.get("template"):
# DETECTED INNER TEMPLATE, ASSUME TRACE IS ON, SO DO NOT NEED THE OUTER TEMPLATE
self.queue.add({"value": params})
else:
template = strings.limit(template, 2000)
self.queue.add({"value": {"template": template, "params": params}}, timeout=3 * MINUTE)
return self
def _insert_loop(self, please_stop=None):
bad_count = 0
while not please_stop:
try:
Thread.sleep(seconds=1)
messages = wrap(self.queue.pop_all())
if messages:
# for m in messages:
# m.value.params = leafer(m.value.params)
# m.value.error = leafer(m.value.error)
for g, mm in jx.groupby(messages, size=self.batch_size):
self.es.extend(mm)
bad_count = 0
except Exception, e:
Log.warning("Problem inserting logs into ES", cause=e)
bad_count += 1
if bad_count > 5:
break
Log.warning("Given up trying to write debug logs to ES index {{index}}", index=self.es.settings.index)
# CONTINUE TO DRAIN THIS QUEUE
while not please_stop:
try:
Thread.sleep(seconds=1)
self.queue.pop_all()
except Exception, e:
Log.warning("Should not happen", cause=e)
开发者ID:klahnakoski,项目名称:esReplicate,代码行数:55,代码来源:log_usingElasticSearch.py
示例5: __init__
def __init__(self, stream):
assert stream
use_UTF8 = False
if isinstance(stream, basestring):
if stream.startswith("sys."):
use_UTF8 = True # sys.* ARE OLD AND CAN NOT HANDLE unicode
self.stream = eval(stream)
name = stream
else:
self.stream = stream
name = "stream"
# WRITE TO STREAMS CAN BE *REALLY* SLOW, WE WILL USE A THREAD
from pyLibrary.thread.threads import Queue
if use_UTF8:
def utf8_appender(value):
if isinstance(value, unicode):
value = value.encode('utf8')
self.stream.write(value)
appender = utf8_appender
else:
appender = self.stream.write
self.queue = Queue("log to stream", max=10000, silent=True)
self.thread = Thread("log to " + name, time_delta_pusher, appender=appender, queue=self.queue, interval=timedelta(seconds=0.3))
self.thread.start()
开发者ID:klahnakoski,项目名称:intermittents,代码行数:30,代码来源:log_usingThreadedStream.py
示例6: __init__
def __init__(self, host, index, alias=None, name=None, port=9200, settings=None):
global _elasticsearch
if hasattr(self, "settings"):
return
from pyLibrary.queries.containers.lists import ListContainer
from pyLibrary.env import elasticsearch as _elasticsearch
self.settings = settings
self.default_name = coalesce(name, alias, index)
self.default_es = _elasticsearch.Cluster(settings=settings)
self.todo = Queue("refresh metadata", max=100000, unique=True)
self.meta=Dict()
table_columns = metadata_tables()
column_columns = metadata_columns()
self.meta.tables = ListContainer("meta.tables", [], wrap({c.name: c for c in table_columns}))
self.meta.columns = ListContainer("meta.columns", [], wrap({c.name: c for c in column_columns}))
self.meta.columns.insert(column_columns)
self.meta.columns.insert(table_columns)
# TODO: fix monitor so it does not bring down ES
if ENABLE_META_SCAN:
self.worker = Thread.run("refresh metadata", self.monitor)
else:
self.worker = Thread.run("refresh metadata", self.not_monitor)
return
开发者ID:klahnakoski,项目名称:esReplicate,代码行数:26,代码来源:meta.py
示例7: __init__
def __init__(self, functions):
self.outbound = Queue("out to process")
self.inbound = Queue("in from stdin")
self.inbound = Queue("in from stderr")
# MAKE
# MAKE THREADS
self.threads = []
for t, f in enumerate(functions):
thread = worker(
"worker " + unicode(t),
f,
self.inbound,
self.outbound,
)
self.threads.append(thread)
开发者ID:klahnakoski,项目名称:Activedata-ETL,代码行数:17,代码来源:multiprocess.py
示例8: Multithread
class Multithread(object):
"""
SIMPLE SEMANTICS FOR SYMMETRIC MULTITHREADING
PASS A SET OF functions TO BE EXECUTED (ONE PER THREAD)
SET outbound==False TO SIMPLY THROW AWAY RETURN VALUES, IF ANY
threads - IF functions IS NOT AN ARRAY, THEN threads IS USED TO MAKE AN ARRAY
THE inbound QUEUE IS EXPECTING dicts, EACH dict IS USED AS kwargs TO GIVEN functions
"""
def __init__(self, functions, threads=None, outbound=None, silent_queues=None):
if outbound is None:
self.outbound = Queue("multithread", silent=silent_queues)
elif outbound is False:
self.outbound = None
else:
self.outbound = outbound
self.inbound = Queue("multithread", silent=silent_queues)
# MAKE THREADS
if isinstance(functions, Iterable):
Log.error("Not supported anymore")
self.threads = []
for t in range(coalesce(threads, 1)):
thread = worker_thread("worker " + unicode(t), self.inbound, self.outbound, functions)
self.threads.append(thread)
def __enter__(self):
return self
# WAIT FOR ALL QUEUED WORK TO BE DONE BEFORE RETURNING
def __exit__(self, type, value, traceback):
try:
if isinstance(value, Exception):
self.inbound.close()
for t in self.threads:
t.keep_running = False
else:
# ADD STOP MESSAGE, ONE FOR EACH THREAD, FOR ORDERLY SHUTDOWN
for t in self.threads:
self.inbound.add(Thread.STOP)
self.join()
except Exception, e:
Log.warning("Problem sending stops", e)
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:46,代码来源:multithread.py
示例9: __init__
def __init__(self, db=None):
"""
:param db: Optional, wrap a sqlite db in a thread
:return: Multithread save database
"""
self.db = None
self.queue = Queue("sql commands") # HOLD (command, result, signal) PAIRS
self.worker = Thread.run("sqlite db thread", self._worker)
self.get_trace = DEBUG
开发者ID:klahnakoski,项目名称:esReplicate,代码行数:9,代码来源:sqlite.py
示例10: TextLog_usingThreadedStream
class TextLog_usingThreadedStream(TextLog):
# stream CAN BE AN OBJCET WITH write() METHOD, OR A STRING
# WHICH WILL eval() TO ONE
def __init__(self, stream):
assert stream
use_UTF8 = False
if isinstance(stream, basestring):
if stream.startswith("sys."):
use_UTF8 = True # sys.* ARE OLD AND CAN NOT HANDLE unicode
self.stream = eval(stream)
name = stream
else:
self.stream = stream
name = "stream"
# WRITE TO STREAMS CAN BE *REALLY* SLOW, WE WILL USE A THREAD
from pyLibrary.thread.threads import Queue
if use_UTF8:
def utf8_appender(value):
if isinstance(value, unicode):
value = value.encode('utf8')
self.stream.write(value)
appender = utf8_appender
else:
appender = self.stream.write
self.queue = Queue("log to stream", max=10000, silent=True)
self.thread = Thread("log to " + name, time_delta_pusher, appender=appender, queue=self.queue, interval=timedelta(seconds=0.3))
self.thread.parent.remove_child(self.thread) # LOGGING WILL BE RESPONSIBLE FOR THREAD stop()
self.thread.start()
def write(self, template, params):
try:
self.queue.add({"template": template, "params": params})
return self
except Exception, e:
raise e # OH NO!
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:41,代码来源:log_usingThreadedStream.py
示例11: find_changeset
def find_changeset(self, revision, please_stop=False):
locker = Lock()
output = []
queue = Queue("branches", max=2000)
queue.extend(self.branches)
queue.add(Thread.STOP)
problems = []
def _find(please_stop):
for b in queue:
if please_stop:
return
try:
url = b.url + "json-info?node=" + revision
response = http.get(url, timeout=30)
if response.status_code == 200:
with locker:
output.append(b)
Log.note("{{revision}} found at {{url}}", url=url, revision=revision)
except Exception, f:
problems.append(f)
开发者ID:klahnakoski,项目名称:MoHg,代码行数:21,代码来源:hg_mozilla_org.py
示例12: __init__
def __init__(self, host, index, type="log", max_size=1000, batch_size=100, settings=None):
"""
settings ARE FOR THE ELASTICSEARCH INDEX
"""
self.es = Cluster(settings).get_or_create_index(
schema=convert.json2value(convert.value2json(SCHEMA), leaves=True),
limit_replicas=True,
tjson=True,
settings=settings
)
self.batch_size=batch_size
self.es.add_alias("debug")
self.queue = Queue("debug logs to es", max=max_size, silent=True)
Thread.run("add debug logs to es", self._insert_loop)
开发者ID:mozilla,项目名称:ChangeDetector,代码行数:14,代码来源:log_usingElasticSearch.py
示例13: TextLog_usingQueue
class TextLog_usingQueue(TextLog):
def __init__(self, name=None):
queue_name = "log messages to queue"
if name:
queue_name += " "+name
self.queue = Queue(queue_name)
def write(self, template, params):
self.queue.add(expand_template(template, params))
def stop(self):
self.queue.close()
def pop(self):
lines = self.queue.pop()
output = []
for l in lines.split("\n"):
if l[19:22] == " - ":
l = l[22:]
if l.strip().startswith("File"):
continue
output.append(l)
return "\n".join(output).strip()
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:24,代码来源:log_usingQueue.py
示例14: Multiprocess
class Multiprocess(object):
# THE COMPLICATION HERE IS CONNECTING THE DISPARATE LOGGING TO
# A CENTRAL POINT
# ONLY THE MAIN THREAD CAN CREATE AND COMMUNICATE WITH multiprocess.Process
def __init__(self, functions):
self.outbound = Queue("out to process")
self.inbound = Queue("in from stdin")
self.inbound = Queue("in from stderr")
# MAKE
# MAKE THREADS
self.threads = []
for t, f in enumerate(functions):
thread = worker(
"worker " + unicode(t),
f,
self.inbound,
self.outbound,
)
self.threads.append(thread)
def __enter__(self):
return self
# WAIT FOR ALL QUEUED WORK TO BE DONE BEFORE RETURNING
def __exit__(self, a, b, c):
try:
self.inbound.close() # SEND STOPS TO WAKE UP THE WORKERS WAITING ON inbound.pop()
except Exception, e:
Log.warning("Problem adding to inbound", e)
self.join()
开发者ID:klahnakoski,项目名称:Activedata-ETL,代码行数:36,代码来源:multiprocess.py
示例15: __init__
def __init__(self, functions, threads=None, outbound=None, silent_queues=None):
if outbound is None:
self.outbound = Queue("multithread", silent=silent_queues)
elif outbound is False:
self.outbound = None
else:
self.outbound = outbound
self.inbound = Queue("multithread", silent=silent_queues)
# MAKE THREADS
if isinstance(functions, Iterable):
Log.error("Not supported anymore")
self.threads = []
for t in range(coalesce(threads, 1)):
thread = worker_thread("worker " + unicode(t), self.inbound, self.outbound, functions)
self.threads.append(thread)
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:18,代码来源:multithread.py
示例16: __init__
def __init__(self, logger):
# DELAYED LOAD FOR THREADS MODULE
from pyLibrary.thread.threads import Queue
self.queue = Queue("logs", max=10000, silent=True)
self.logger = logger
def worker(please_stop):
while not please_stop:
Thread.sleep(1)
logs = self.queue.pop_all()
for log in logs:
if log is Thread.STOP:
if DEBUG_LOGGING:
sys.stdout.write("Log_usingThread.worker() sees stop, filling rest of queue\n")
please_stop.go()
else:
self.logger.write(**log)
self.thread = Thread("log thread", worker)
self.thread.start()
开发者ID:klahnakoski,项目名称:intermittents,代码行数:21,代码来源:logs.py
示例17: FromESMetadata
class FromESMetadata(Schema):
"""
QUERY THE METADATA
"""
def __new__(cls, *args, **kwargs):
global singlton
if singlton:
return singlton
else:
singlton = object.__new__(cls)
return singlton
@use_settings
def __init__(self, host, index, alias=None, name=None, port=9200, settings=None):
global _elasticsearch
if hasattr(self, "settings"):
return
from pyLibrary.queries.containers.lists import ListContainer
from pyLibrary.env import elasticsearch as _elasticsearch
self.settings = settings
self.default_name = coalesce(name, alias, index)
self.default_es = _elasticsearch.Cluster(settings=settings)
self.todo = Queue("refresh metadata", max=100000, unique=True)
self.meta=Dict()
table_columns = metadata_tables()
column_columns = metadata_columns()
self.meta.tables = ListContainer("meta.tables", [], wrap({c.name: c for c in table_columns}))
self.meta.columns = ListContainer("meta.columns", [], wrap({c.name: c for c in column_columns}))
self.meta.columns.insert(column_columns)
self.meta.columns.insert(table_columns)
# TODO: fix monitor so it does not bring down ES
if ENABLE_META_SCAN:
self.worker = Thread.run("refresh metadata", self.monitor)
else:
self.worker = Thread.run("refresh metadata", self.not_monitor)
return
@property
def query_path(self):
return None
@property
def url(self):
return self.default_es.path + "/" + self.default_name.replace(".", "/")
def get_table(self, table_name):
with self.meta.tables.locker:
return self.meta.tables.query({"where": {"eq": {"name": table_name}}})
def _upsert_column(self, c):
# ASSUMING THE self.meta.columns.locker IS HAD
existing_columns = [r for r in self.meta.columns.data if r.table == c.table and r.name == c.name]
if not existing_columns:
self.meta.columns.add(c)
Log.note("todo: {{table}}.{{column}}", table=c.table, column=c.es_column)
self.todo.add(c)
# MARK meta.columns AS DIRTY TOO
cols = [r for r in self.meta.columns.data if r.table == "meta.columns"]
for cc in cols:
cc.partitions = cc.cardinality = None
cc.last_updated = Date.now()
self.todo.extend(cols)
else:
canonical = existing_columns[0]
if canonical.relative and not c.relative:
return # RELATIVE COLUMNS WILL SHADOW ABSOLUTE COLUMNS
for key in Column.__slots__:
canonical[key] = c[key]
Log.note("todo: {{table}}.{{column}}", table=canonical.table, column=canonical.es_column)
self.todo.add(canonical)
def _get_columns(self, table=None, metadata=None):
# TODO: HANDLE MORE THEN ONE ES, MAP TABLE SHORT_NAME TO ES INSTANCE
if not metadata:
metadata = self.default_es.get_metadata(force=True)
def parse_all(please_stop):
for abs_index, meta in jx.sort(metadata.indices.items(), {"value": 0, "sort": -1}):
if meta.index != abs_index:
continue
for _, properties in meta.mappings.items():
if please_stop:
return
self._parse_properties(abs_index, properties, meta)
if table:
for abs_index, meta in jx.sort(metadata.indices.items(), {"value": 0, "sort": -1}):
if table == meta.index:
for _, properties in meta.mappings.items():
self._parse_properties(abs_index, properties, meta)
return
if table == abs_index:
self._get_columns(table=meta.index, metadata=metadata)
#.........这里部分代码省略.........
开发者ID:klahnakoski,项目名称:esReplicate,代码行数:101,代码来源:meta.py
示例18: FromESMetadata
class FromESMetadata(Schema):
"""
QUERY THE METADATA
"""
def __new__(cls, *args, **kwargs):
global singlton
if singlton:
return singlton
else:
singlton = object.__new__(cls)
return singlton
@use_settings
def __init__(self, host, index, alias=None, name=None, port=9200, settings=None):
global _elasticsearch
if hasattr(self, "settings"):
return
from pyLibrary.queries.containers.list_usingPythonList import ListContainer
from pyLibrary.env import elasticsearch as _elasticsearch
self.settings = settings
self.default_name = coalesce(name, alias, index)
self.default_es = _elasticsearch.Cluster(settings=settings)
self.todo = Queue("refresh metadata", max=100000, unique=True)
self.es_metadata = Null
self.last_es_metadata = Date.now()-OLD_METADATA
self.meta=Dict()
table_columns = metadata_tables()
column_columns = metadata_columns()
self.meta.tables = ListContainer("meta.tables", [], wrap({c.name: c for c in table_columns}))
self.meta.columns = ColumnList()
self.meta.columns.insert(column_columns)
self.meta.columns.insert(table_columns)
# TODO: fix monitor so it does not bring down ES
if ENABLE_META_SCAN:
self.worker = Thread.run("refresh metadata", self.monitor)
else:
self.worker = Thread.run("refresh metadata", self.not_monitor)
return
@property
def query_path(self):
return None
@property
def url(self):
return self.default_es.path + "/" + self.default_name.replace(".", "/")
def get_table(self, table_name):
with self.meta.tables.locker:
return wrap([t for t in self.meta.tables.data if t.name == table_name])
def _upsert_column(self, c):
# ASSUMING THE self.meta.columns.locker IS HAD
existing_columns = self.meta.columns.find(c.table, c.name)
if not existing_columns:
self.meta.columns.add(c)
self.todo.add(c)
if ENABLE_META_SCAN:
Log.note("todo: {{table}}::{{column}}", table=c.table, column=c.es_column)
# MARK meta.columns AS DIRTY TOO
cols = self.meta.columns.find("meta.columns", None)
for cc in cols:
cc.partitions = cc.cardinality = None
cc.last_updated = Date.now()
self.todo.extend(cols)
else:
canonical = existing_columns[0]
if canonical.relative and not c.relative:
return # RELATIVE COLUMNS WILL SHADOW ABSOLUTE COLUMNS
for key in Column.__slots__:
canonical[key] = c[key]
Log.note("todo: {{table}}::{{column}}", table=canonical.table, column=canonical.es_column)
self.todo.add(canonical)
def _get_columns(self, table=None):
# TODO: HANDLE MORE THEN ONE ES, MAP TABLE SHORT_NAME TO ES INSTANCE
meta = self.es_metadata.indices[table]
if not meta or self.last_es_metadata < Date.now() - OLD_METADATA:
self.es_metadata = self.default_es.get_metadata(force=True)
meta = self.es_metadata.indices[table]
for _, properties in meta.mappings.items():
self._parse_properties(meta.index, properties, meta)
def _parse_properties(self, abs_index, properties, meta):
abs_columns = _elasticsearch.parse_properties(abs_index, None, properties.properties)
abs_columns = abs_columns.filter( # TODO: REMOVE WHEN jobs PROPERTY EXPLOSION IS CONTAINED
lambda r: not r.es_column.startswith("other.") and
not r.es_column.startswith("previous_values.cf_") and
not r.es_index.startswith("debug") and
r.es_column.find("=")==-1 and
r.es_column.find(" ")==-1
)
#.........这里部分代码省略.........
开发者ID:klahnakoski,项目名称:TestFailures,代码行数:101,代码来源:meta.py
示例19: __init__
def __init__(self, name=None):
queue_name = "log messages to queue"
if name:
queue_name += " "+name
self.queue = Queue(queue_name)
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:5,代码来源:log_usingQueue.py
示例20: Sqlite
class Sqlite(object):
canonical = None
def __init__(self, db=None):
"""
:param db: Optional, wrap a sqlite db in a thread
:return: Multithread save database
"""
self.db = None
self.queue = Queue("sql commands") # HOLD (command, result, signal) PAIRS
self.worker = Thread.run("sqlite db thread", self._worker)
self.get_trace = DEBUG
def execute(self, command):
"""
COMMANDS WILL BE EXECUTED IN THE ORDER THEY ARE GIVEN
BUT CAN INTERLEAVE WITH OTHER TREAD COMMANDS
:param command: COMMAND FOR SQLITE
:return: None
"""
if self.get_trace:
trace = extract_stack(1)
else:
trace = None
self.queue.add((command, None, None, trace))
def query(self, command):
"""
WILL STALL CALLING THREAD UNTIL THE command IS COMPLETED
:param command: COMMAND FOR SQLITE
:return: list OF RESULTS
"""
signal = Signal()
result = Dict()
self.queue.add((command, result, signal, None))
signal.wait_for_go()
if result.exception:
Log.error("Problem with Sqlite call", cause=result.exception)
return result
def _worker(self, please_stop):
if Sqlite.canonical:
self.db = Sqlite.canonical
else:
self.db = sqlite3.connect(':memory:')
try:
while not please_stop:
if DEBUG:
Log.note("begin pop")
command, result, signal, trace = self.queue.pop()
if DEBUG:
Log.note("done pop")
if DEBUG:
Log.note("Running command\n{{command|indent}}", command=command)
with Timer("Run command", debug=DEBUG):
if signal is not None:
try:
curr = self.db.execute(command)
result.meta.format = "table"
result.data = curr.fetchall()
except Exception, e:
e=Except.wrap(e)
result.exception = Except(ERROR, "Problem with\n{{command|indent}}", command=command, cause=e)
finally:
signal.go()
开发者ID:klahnakoski,项目名称:MoDataSubmission,代码行数:68,代码来源:sqlite.py
注:本文中的pyLibrary.thread.threads.Queue类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论