本文整理汇总了Python中tq_config.TaskQueueConfig类的典型用法代码示例。如果您正苦于以下问题:Python TaskQueueConfig类的具体用法?Python TaskQueueConfig怎么用?Python TaskQueueConfig使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了TaskQueueConfig类的16个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: reload_worker
def reload_worker(self, json_request):
""" Reloads taskqueue workers as needed.
A worker can be started on both a master and slave node.
Args:
json_request: A JSON string with the application id.
Returns:
A JSON string with the error status and error reason.
"""
request = self.__parse_json_and_validate_tags(json_request,
self.SETUP_WORKERS_TAGS)
logging.info("Reload worker request: {0}".format(request))
if 'error' in request:
return json.dumps(request)
app_id = self.__cleanse(request['app_id'])
config = TaskQueueConfig(TaskQueueConfig.RABBITMQ, app_id)
old_queues = self.__queue_info_cache.get(app_id, {'queue': []})
old_queue_dict = {}
for queue in old_queues['queue']:
old_queue_dict[queue['name']] = queue
new_queue_dict = {}
# Load the new queue info.
try:
new_queues = config.load_queues_from_file(app_id)
for queue in new_queues['queue']:
new_queue_dict[queue['name']] = queue
except ValueError, value_error:
return json.dumps({"error": True, "reason": str(value_error)})
开发者ID:,项目名称:,代码行数:31,代码来源:
示例2: stop_worker
def stop_worker(self, json_request):
""" Stops the god watch for queues of an application on the current
node.
Args:
json_request: A JSON string with the queue name for which we're
stopping its queues.
Returns:
A JSON string with the result.
"""
request = self.__parse_json_and_validate_tags(json_request,
self.STOP_WORKERS_TAGS)
if 'error' in request:
return json.dumps(request)
app_id = request['app_id']
watch = "celery-" + str(app_id)
try:
if god_interface.stop(watch):
stop_command = self.get_worker_stop_command(app_id)
os.system(stop_command)
TaskQueueConfig.remove_config_files(app_id)
result = {'error': False}
else:
result = {'error': True, 'reason': "Unable to stop watch %s" % watch}
except OSError, os_error:
result = {'error': True, 'reason' : str(os_error)}
开发者ID:yoshimov,项目名称:appscale,代码行数:27,代码来源:distributed_tq.py
示例3: test_load_queues_from_xml_file
def test_load_queues_from_xml_file(self):
flexmock(file_io) \
.should_receive("read").and_return(sample_queue_xml)
flexmock(file_io) \
.should_receive("exists").and_return(False).and_return(True)
flexmock(file_io) \
.should_receive("write").and_return(None)
flexmock(file_io) \
.should_receive("mkdir").and_return(None)
tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ,
'myapp')
queue_info = tqc.load_queues_from_file('app_id')
self.assertEquals(queue_info, {'queue': [{'max_concurrent_requests': '300', 'rate': '100/s', 'bucket_size': '100', 'name': 'default', 'retry_parameters': {'task_age_limit': '3d'}}, {'max_concurrent_requests': '100', 'rate': '100/s', 'bucket_size': '100', 'name': 'mapreduce-workers', 'retry_parameters': {'task_age_limit': '3d'}}]})
开发者ID:GavinHwa,项目名称:appscale,代码行数:13,代码来源:test_tq_config.py
示例4: test_load_queues
def test_load_queues(self):
flexmock(file_io) \
.should_receive("read").and_return(sample_queue_yaml2)
flexmock(file_io) \
.should_receive("exists").and_return(True)
flexmock(file_io) \
.should_receive("write").and_return(None)
flexmock(file_io) \
.should_receive("mkdir").and_return(None)
flexmock(datastore).should_receive("Get").\
and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'})
tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ,
'myapp')
queue_info = tqc.load_queues_from_file('app_id')
queue_info = tqc.load_queues_from_db()
开发者ID:GavinHwa,项目名称:appscale,代码行数:15,代码来源:test_tq_config.py
示例5: test_load_queues_from_db
def test_load_queues_from_db(self):
flexmock(file_io) \
.should_receive("read").and_return(sample_queue_yaml2)
flexmock(file_io) \
.should_receive("write").and_return(None)
flexmock(file_io) \
.should_receive("mkdir").and_return(None)
flexmock(datastore).should_receive("Get").\
and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'})
tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ,
'myapp')
queue_info = tqc.load_queues_from_db()
self.assertEquals(queue_info, {'queue':[{'name': 'foo',
'rate': '10/m'},
]})
开发者ID:GavinHwa,项目名称:appscale,代码行数:15,代码来源:test_tq_config.py
示例6: __get_task_function
def __get_task_function(self, request):
""" Returns a function pointer to a celery task.
Load the module for the app/queue.
Args:
request: A taskqueue_service_pb.TaskQueueAddRequest.
Returns:
A function pointer to a celery task.
Raises:
taskqueue_service_pb.TaskQueueServiceError
"""
try:
task_module = __import__(TaskQueueConfig.\
get_celery_worker_module_name(request.app_id()))
# If a new queue was added we need to relaod the python code.
if self.__force_reload:
start = time.time()
reload(task_module)
time_taken = time.time() - start
self.__force_reload = False
logging.info("Reloading module for {0} took {1} seconds.".\
format(request.app_id(), time_taken))
task_func = getattr(task_module,
TaskQueueConfig.get_queue_function_name(request.queue_name()))
return task_func
except AttributeError, attribute_error:
logging.exception(attribute_error)
raise apiproxy_errors.ApplicationError(
taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
开发者ID:,项目名称:,代码行数:31,代码来源:
示例7: start_worker
def start_worker(self, json_request):
""" Starts taskqueue workers if they are not already running.
A worker can be started on both a master and slave node.
Args:
json_request: A JSON string with the application id.
Returns:
A JSON string with the error status and error reason.
"""
request = self.__parse_json_and_validate_tags(json_request,
self.SETUP_WORKERS_TAGS)
if 'error' in request:
return json.dumps(request)
app_id = self.__cleanse(request['app_id'])
hostname = socket.gethostbyname(socket.gethostname())
config = TaskQueueConfig(TaskQueueConfig.RABBITMQ, app_id)
# Load the queue info
try:
config.load_queues_from_file(app_id)
config.create_celery_file(TaskQueueConfig.QUEUE_INFO_FILE)
config.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_FILE)
except ValueError, value_error:
return json.dumps({"error": True, "reason": str(value_error)})
开发者ID:GavinHwa,项目名称:appscale,代码行数:27,代码来源:distributed_tq.py
示例8: test_save_queues_to_db
def test_save_queues_to_db(self):
flexmock(file_io) \
.should_receive("read").and_return(sample_queue_yaml2)
flexmock(file_io) \
.should_receive("write").and_return(None)
flexmock(file_io) \
.should_receive("mkdir").and_return(None)
flexmock(file_io) \
.should_receive('exists').and_return(True)
flexmock(datastore).should_receive("Put").\
and_return()
tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ,
'myapp')
try:
queue_info = tqc.save_queues_to_db()
raise
except ValueError:
pass
queue_info = tqc.load_queues_from_file('app_id')
queue_info = tqc.save_queues_to_db()
开发者ID:GavinHwa,项目名称:appscale,代码行数:20,代码来源:test_tq_config.py
示例9: __enqueue_push_task
def __enqueue_push_task(self, request):
""" Enqueues a batch of push tasks.
Args:
request: A taskqueue_service_pb.TaskQueueAddRequest.
"""
self.__validate_push_task(request)
self.__check_and_store_task_names(request)
args = self.get_task_args(request)
headers = self.get_task_headers(request)
countdown = int(headers['X-AppEngine-TaskETA']) - \
int(datetime.datetime.now().strftime("%s"))
task_func = self.__get_task_function(request)
result = task_func.apply_async(kwargs={'headers':headers,
'args':args},
expires=args['expires'],
acks_late=True,
countdown=countdown,
queue=TaskQueueConfig.get_celery_queue_name(
request.app_id(), request.queue_name()),
routing_key=TaskQueueConfig.get_celery_queue_name(
request.app_id(), request.queue_name()))
开发者ID:yoshimov,项目名称:appscale,代码行数:22,代码来源:distributed_tq.py
示例10: test_create_celery_file
def test_create_celery_file(self):
flexmock(file_io) \
.should_receive("read").and_return(sample_queue_yaml2)
flexmock(file_io) \
.should_receive("exists").and_return(True)
flexmock(file_io) \
.should_receive("write").and_return(None)
flexmock(file_io) \
.should_receive("mkdir").and_return(None)
flexmock(datastore).should_receive("Get").\
and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'})
tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ,
'myapp')
flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2)
queue_info = tqc.load_queues_from_file('app_id')
queue_info = tqc.load_queues_from_db()
# making sure it does not throw an exception
self.assertEquals(tqc.create_celery_file(TaskQueueConfig.QUEUE_INFO_DB),
TaskQueueConfig.CELERY_CONFIG_DIR + "myapp" + ".py")
self.assertEquals(tqc.create_celery_file(TaskQueueConfig.QUEUE_INFO_FILE),
TaskQueueConfig.CELERY_CONFIG_DIR + "myapp" + ".py")
开发者ID:GavinHwa,项目名称:appscale,代码行数:22,代码来源:test_tq_config.py
示例11: get_task_args
def get_task_args(self, request):
""" Gets the task args used when making a task web request.
Args:
request: A taskqueue_service_pb.TaskQueueAddRequest
Returns:
A dictionary used by a task worker.
"""
args = {}
args['task_name'] = request.task_name()
args['url'] = request.url()
args['app_id'] = request.app_id()
args['queue_name'] = request.queue_name()
args['method'] = self.__method_mapping(request.method())
args['body'] = request.body()
args['payload'] = request.payload()
args['description'] = request.description()
# Set defaults.
args['max_retries'] = self.DEFAULT_MAX_RETRIES
args['expires'] = self.__when_to_expire(request)
args['max_retries'] = self.DEFAULT_MAX_RETRIES
args['max_backoff_sec'] = self.DEFAULT_MAX_BACKOFF
args['min_backoff_sec'] = self.DEFAULT_MIN_BACKOFF
args['max_doublings'] = self.DEFAULT_MAX_DOUBLINGS
# Load queue info into cache.
if request.app_id() not in self.__queue_info_cache:
try:
config = TaskQueueConfig(TaskQueueConfig.RABBITMQ, request.app_id())
self.__queue_info_cache[request.app_id()] = config.load_queues_from_file(
request.app_id())
except ValueError, value_error:
logging.error("Unable to load queues for app id {0} using defaults."\
.format(request.app_id()))
except NameError, name_error:
logging.error("Unable to load queues for app id {0} using defaults."\
.format(request.app_id()))
开发者ID:yoshimov,项目名称:appscale,代码行数:38,代码来源:distributed_tq.py
示例12: test_create_celery_worker_scripts
def test_create_celery_worker_scripts(self):
flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2)
flexmock(file_io).should_receive("write").and_return(None)
flexmock(file_io).should_receive("mkdir").and_return(None)
flexmock(datastore).should_receive("Get").\
and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'})
tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ,
'myapp')
flexmock(file_io) \
.should_receive("exists").and_return(True)
queue_info = tqc.load_queues_from_file('app_id')
queue_info = tqc.load_queues_from_db()
FILE1 = open(os.path.dirname(os.path.realpath(__file__)) + '/../../templates/header.py', 'r')
file1 = FILE1.read()
FILE1.close()
FILE2 = open(os.path.dirname(os.path.realpath(__file__)) + '/../../templates/task.py', 'r')
file2 = FILE2.read()
FILE2.close()
flexmock(file_io).should_receive('write').and_return(None)
flexmock(file_io).should_receive("read").and_return(file1).and_return(file2)
self.assertEquals(tqc.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_DB), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py')
self.assertEquals(tqc.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_FILE), TaskQueueConfig.CELERY_WORKER_DIR + 'app___myapp.py')
开发者ID:GavinHwa,项目名称:appscale,代码行数:24,代码来源:test_tq_config.py
示例13: test_load_queues_from_file
def test_load_queues_from_file(self):
flexmock(file_io) \
.should_receive("read").and_return(sample_queue_yaml)
flexmock(file_io) \
.should_receive("exists").and_return(True)
flexmock(file_io) \
.should_receive("write").and_return(None)
flexmock(file_io) \
.should_receive("mkdir").and_return(None)
tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ,
'myapp')
queue_info = tqc.load_queues_from_file('app_id')
self.assertEquals(queue_info, {'queue':[{'name': 'default',
'rate': '5/s'},
{'name': 'foo',
'rate': '10/m'}]})
flexmock(file_io) \
.should_receive("read").and_return('blah').and_raise(IOError)
tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ,
'myapp')
queue_info = tqc.load_queues_from_file('app_id')
self.assertEquals(queue_info, {'queue':[{'name': 'default',
'rate': '5/s'}]})
flexmock(file_io) \
.should_receive("read").and_return(sample_queue_yaml2)
flexmock(file_io) \
.should_receive("write").and_return(None)
tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ,
'myapp')
queue_info = tqc.load_queues_from_file('app_id')
self.assertEquals(queue_info, {'queue':[{'name': 'foo',
'rate': '10/m'},
{'name': 'default',
'rate': '5/s'},
]})
开发者ID:GavinHwa,项目名称:appscale,代码行数:37,代码来源:test_tq_config.py
示例14: test_validate_queue_name
def test_validate_queue_name(self):
flexmock(file_io).should_receive("read").and_return(sample_queue_yaml2)
flexmock(file_io).should_receive("write").and_return(None)
flexmock(file_io).should_receive("mkdir").and_return(None)
flexmock(datastore).should_receive("Get").\
and_return({TaskQueueConfig.QUEUE_INFO: '{"queue":[{"name": "foo", "rate": "10/m"}]}'})
tqc = TaskQueueConfig(TaskQueueConfig.RABBITMQ,
'myapp')
tqc.validate_queue_name("hello")
tqc.validate_queue_name("hello_hello5354")
try:
tqc.validate_queue_name("hello-hello")
raise
except NameError:
pass
try:
tqc.validate_queue_name("hello$hello")
raise
except NameError:
pass
try:
tqc.validate_queue_name("[email protected]")
raise
except NameError:
pass
try:
tqc.validate_queue_name("hello&hello")
raise
except NameError:
pass
try:
tqc.validate_queue_name("hello*hello")
raise
except NameError:
pass
开发者ID:GavinHwa,项目名称:appscale,代码行数:36,代码来源:test_tq_config.py
示例15: TaskQueueConfig
import constants
from google.appengine.runtime import apiproxy_errors
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_distributed
from google.appengine.api import datastore
from google.appengine.ext import db
sys.path.append(TaskQueueConfig.CELERY_CONFIG_DIR)
sys.path.append(TaskQueueConfig.CELERY_WORKER_DIR)
app_id = "APP_ID"
config = TaskQueueConfig(TaskQueueConfig.RABBITMQ, app_id)
module_name = TaskQueueConfig.get_celery_worker_module_name(app_id)
celery = Celery(module_name, broker=config.get_broker_string(), backend="amqp")
celery.config_from_object("CELERY_CONFIGURATION")
logger = get_task_logger(__name__)
master_db_ip = appscale_info.get_db_master_ip()
connection_str = master_db_ip + ":" + str(constants.DB_SERVER_PORT)
ds_distrib = datastore_distributed.DatastoreDistributed("appscaledashboard", connection_str, require_indexes=False)
apiproxy_stub_map.apiproxy.RegisterStub("datastore_v3", ds_distrib)
os.environ["APPLICATION_ID"] = "appscaledashboard"
# This template header and tasks can be found in appscale/AppTaskQueue/templates
开发者ID:mackjoner,项目名称:appscale,代码行数:30,代码来源:header.py
示例16: str
# Load the queue info
try:
self.__queue_info_cache[app_id] = config.load_queues_from_file(app_id)
config.create_celery_file(TaskQueueConfig.QUEUE_INFO_FILE)
config.create_celery_worker_scripts(TaskQueueConfig.QUEUE_INFO_FILE)
except ValueError, value_error:
return json.dumps({"error": True, "reason": str(value_error)})
except NameError, name_error:
return json.dumps({"error": True, "reason": str(name_error)})
log_file = self.LOG_DIR + app_id + ".log"
command = ["celery",
"worker",
"--app=" + \
TaskQueueConfig.get_celery_worker_module_name(app_id),
#"--autoscale=" + self.MIN_MAX_CONCURRENCY,
"--hostname=" + hostname + "." + app_id,
"--workdir=" + TaskQueueConfig.CELERY_WORKER_DIR,
"--logfile=" + log_file,
"--time-limit=" + str(self.HARD_TIME_LIMIT),
"--soft-time-limit=" + str(self.TASK_SOFT_TIME_LIMIT),
"--pidfile=" + self.PID_FILE_LOC + 'celery___' + \
app_id + ".pid",
"--autoreload"]
start_command = str(' '.join(command))
stop_command = self.get_worker_stop_command(app_id)
watch = "celery-" + str(app_id)
god_config = god_app_configuration.create_config_file(watch,
start_command,
stop_command,
开发者ID:yoshimov,项目名称:appscale,代码行数:30,代码来源:distributed_tq.py
注:本文中的tq_config.TaskQueueConfig类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论