本文整理汇总了Python中tvb.core.services.operation_service.OperationService类的典型用法代码示例。如果您正苦于以下问题:Python OperationService类的具体用法?Python OperationService怎么用?Python OperationService使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了OperationService类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: create_group
def create_group(test_user=None, test_project=None, subject="John Doe"):
"""
Create a group of 2 operations, each with at least one resultant DataType.
"""
if test_user is None:
test_user = TestFactory.create_user()
if test_project is None:
test_project = TestFactory.create_project(test_user)
### Retrieve Adapter instance
algo_group = dao.find_group('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
algo_category = dao.get_category_by_id(algo_group.fk_category)
algo = dao.get_algorithm_by_group(algo_group.id)
adapter_inst = TestFactory.create_adapter(algo_group=algo_group, test_project=test_project)
adapter_inst.meta_data = {DataTypeMetaData.KEY_SUBJECT: subject}
args = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
### Prepare Operations group. Execute them synchronously
service = OperationService()
operations = service.prepare_operations(test_user.id, test_project.id, algo, algo_category, {}, **args)[0]
service.launch_operation(operations[0].id, False, adapter_inst)
service.launch_operation(operations[1].id, False, adapter_inst)
resulted_dts = dao.get_datatype_in_group(operation_group_id=operations[0].fk_operation_group)
return resulted_dts, operations[0].fk_operation_group
开发者ID:unimauro,项目名称:tvb-framework,代码行数:26,代码来源:test_factory.py
示例2: cancel_all_operations
def cancel_all_operations(self):
"""
To make sure that no running operations are left which could make some other
test started afterwards to fail, cancel all operations after each test.
"""
LOGGER.info("Stopping all operations.")
op_service = OperationService()
operations = self.get_all_entities(model.Operation)
for operation in operations:
op_service.stop_operation(operation.id)
开发者ID:lcosters,项目名称:tvb-framework,代码行数:10,代码来源:base_testcase.py
示例3: setUp
def setUp(self):
"""
Sets up the environment for testing;
creates a `FlowController`
"""
self.init()
self.flow_c = FlowController()
self.burst_c = BurstController()
self.operation_service = OperationService()
开发者ID:sdiazpier,项目名称:tvb-framework,代码行数:9,代码来源:flow_controller_test.py
示例4: setup_method
def setup_method(self):
"""
Reset the database before each test.
"""
self.clean_database()
initialize_storage()
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(self.test_user)
self.operation_service = OperationService()
self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE
开发者ID:maedoc,项目名称:tvb-framework,代码行数:10,代码来源:operation_service_test.py
示例5: stop_operation
def stop_operation(self, operation_id, is_group, remove_after_stop=False):
"""
Stop the operation given by operation_id. If is_group is true stop all the
operations from that group.
"""
operation_service = OperationService()
result = False
if int(is_group) == 0:
result = operation_service.stop_operation(operation_id)
if remove_after_stop:
ProjectService().remove_operation(operation_id)
else:
op_group = ProjectService.get_operation_group_by_id(operation_id)
operations_in_group = ProjectService.get_operations_in_group(op_group)
for operation in operations_in_group:
tmp_res = operation_service.stop_operation(operation.id)
if remove_after_stop:
ProjectService().remove_operation(operation.id)
result = result or tmp_res
return result
开发者ID:transpersonify,项目名称:tvb-framework,代码行数:20,代码来源:flow_controller.py
示例6: transactional_setup_method
def transactional_setup_method(self):
"""
Sets up the testing environment;
saves config file;
creates a test user, a test project;
creates burst, operation, flow and workflow services
"""
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(self.test_user)
self.workflow_service = WorkflowService()
self.burst_service = BurstService()
self.operation_service = OperationService()
self.flow_service = FlowService()
开发者ID:maedoc,项目名称:tvb-framework,代码行数:13,代码来源:workflow_service_test.py
示例7: __init__
def __init__(self, overwrites=None, settings_file=None):
""" Parameters can be overwritten either from a settigns file or from a dictionary. """
if overwrites is not None:
self.overwrites.update(overwrites)
if settings_file is not None:
settings = open(sys.argv[1]).read()
for line in settings.split('\n'):
key, value = line.split('=')
self.overwrites[key.strip()] = value.strip()
if KEY_PROJECT not in self.overwrites:
raise Exception("Settings file should contain the id of the project: %s=1" % KEY_PROJECT)
self.project = dao.get_project_by_id(self.overwrites[KEY_PROJECT])
self.flow_service = FlowService()
self.operation_service = OperationService()
开发者ID:gummadhav,项目名称:tvb-framework,代码行数:14,代码来源:model_validations.py
示例8: ModelValidator
class ModelValidator(object):
overwrites = {}
def __init__(self, overwrites=None, settings_file=None):
""" Parameters can be overwritten either from a settigns file or from a dictionary. """
if overwrites is not None:
self.overwrites.update(overwrites)
if settings_file is not None:
settings = open(sys.argv[1]).read()
for line in settings.split('\n'):
key, value = line.split('=')
self.overwrites[key.strip()] = value.strip()
if KEY_PROJECT not in self.overwrites:
raise Exception("Settings file should contain the id of the project: %s=1" % KEY_PROJECT)
self.project = dao.get_project_by_id(self.overwrites[KEY_PROJECT])
self.flow_service = FlowService()
self.operation_service = OperationService()
def launch_validation(self):
"""
Prepare the arguments to be submitted and launch actual operations group.
TODO: Now get the results and check if any errors
"""
stored_adapter = self.flow_service.get_algorithm_by_module_and_class(SIMULATOR_MODULE, SIMULATOR_CLASS)
simulator_adapter = ABCAdapter.build_adapter(stored_adapter)
launch_args = {}
flatten_interface = simulator_adapter.flaten_input_interface()
itree_mngr = self.flow_service.input_tree_manager
prepared_flatten_interface = itree_mngr.fill_input_tree_with_options(flatten_interface, self.project.id,
stored_adapter.fk_category)
for entry in prepared_flatten_interface:
value = entry['default']
if isinstance(value, dict):
value = str(value)
if hasattr(value, 'tolist'):
value = value.tolist()
launch_args[entry['name']] = value
launch_args.update(self.overwrites)
nr_of_operations = 1
for key in self.overwrites:
if key.startswith(PARAM_RANGE_PREFIX):
range_values = self.operation_service.get_range_values(launch_args, key)
nr_of_operations *= len(range_values)
do_launch = False
print "Warning! This will launch %s operations. Do you agree? (yes/no)" % nr_of_operations
while 1:
accept = raw_input()
if accept.lower() == 'yes':
do_launch = True
break
if accept.lower() == 'no':
do_launch = False
break
print "Please type either yes or no"
if do_launch:
self.launched_operations = self.flow_service.fire_operation(simulator_adapter, self.project.administrator,
self.project.id, **launch_args)
return self.validate_results(0)
else:
return "Operation canceled by user."
def validate_results(self, last_verified_index):
error_count = 0
while last_verified_index < len(self.launched_operations):
operation_to_check = self.launched_operations[last_verified_index]
operation = dao.get_operation_by_id(operation_to_check.id)
if not operation.has_finished:
sleep(10)
if operation.status == STATUS_ERROR:
sys.stdout.write("E(" + str(operation_to_check.id) + ")")
error_count += 1
last_verified_index += 1
sys.stdout.flush()
if operation.status == STATUS_FINISHED:
last_verified_index += 1
sys.stdout.write('.')
sys.stdout.flush()
if error_count:
return "%s operations in error; %s operations successfully." % (error_count,
len(self.launched_operations) - error_count)
return "All operations finished successfully!"
开发者ID:gummadhav,项目名称:tvb-framework,代码行数:86,代码来源:model_validations.py
示例9: __init__
def __init__(self):
self.operation_service = OperationService()
self.workflow_service = WorkflowService()
self.logger = get_logger(self.__class__.__module__)
开发者ID:lcosters,项目名称:tvb-framework,代码行数:4,代码来源:burst_service.py
示例10: FlowContollerTest
class FlowContollerTest(BaseControllersTest):
""" Unit tests for FlowController """
def setUp(self):
"""
Sets up the environment for testing;
creates a `FlowController`
"""
self.init()
self.flow_c = FlowController()
self.burst_c = BurstController()
self.operation_service = OperationService()
def tearDown(self):
""" Cleans up the testing environment """
self.cleanup()
self.clean_database()
def test_context_selected(self):
"""
Remove the project from CherryPy session and check that you are redirected to projects page.
"""
del cherrypy.session[common.KEY_PROJECT]
self._expect_redirect('/project/viewall', self.flow_c.step)
def test_invalid_step(self):
"""
Pass an invalid step and make sure we are redirected to tvb start page.
"""
self._expect_redirect('/tvb', self.flow_c.step)
def test_valid_step(self):
"""
For all algorithm categories check that a submenu is generated and the result
page has it's title given by category name.
"""
categories = dao.get_algorithm_categories()
for categ in categories:
result_dict = self.flow_c.step(categ.id)
self.assertTrue(common.KEY_SUBMENU_LIST in result_dict,
"Expect to have a submenu with available algorithms for category.")
self.assertEqual(result_dict["section_name"], categ.displayname.lower())
def test_step_connectivity(self):
"""
Check that the correct section name and connectivity sub-menu are returned for the connectivity step.
"""
result_dict = self.flow_c.step_connectivity()
self.assertEqual(result_dict['section_name'], 'connectivity')
self.assertEqual(result_dict['submenu_list'], self.flow_c.connectivity_submenu)
def test_default(self):
"""
Test default method from step controllers. Check that the submit link is ok, that a mainContent
is present in result dict and that the isAdapter flag is set to true.
"""
cherrypy.request.method = "GET"
categories = dao.get_algorithm_categories()
for categ in categories:
algo_groups = dao.get_groups_by_categories([categ.id])
for algo in algo_groups:
result_dict = self.flow_c.default(categ.id, algo.id)
self.assertEqual(result_dict[common.KEY_SUBMIT_LINK], '/flow/%i/%i' % (categ.id, algo.id))
self.assertTrue('mainContent' in result_dict)
self.assertTrue(result_dict['isAdapter'])
def test_default_cancel(self):
"""
On cancel we should get a redirect to the back page link.
"""
cherrypy.request.method = "POST"
categories = dao.get_algorithm_categories()
algo_groups = dao.get_groups_by_categories([categories[0].id])
self._expect_redirect('/project/viewoperations/%i' % self.test_project.id, self.flow_c.default,
categories[0].id, algo_groups[0].id, cancel=True, back_page='operations')
def test_default_invalid_key(self):
"""
Pass invalid keys for adapter and step and check you get redirect to tvb entry
page with error set.
"""
self._expect_redirect('/tvb?error=True', self.flow_c.default, 'invalid', 'invalid')
def test_read_datatype_attribute(self):
"""
Read an attribute from a datatype.
"""
dt = DatatypesFactory().create_datatype_with_storage("test_subject", "RAW_STATE",
'this is the stored data'.split())
returned_data = self.flow_c.read_datatype_attribute(dt.gid, "string_data")
self.assertEqual(returned_data, '["this", "is", "the", "stored", "data"]')
#.........这里部分代码省略.........
开发者ID:sdiazpier,项目名称:tvb-framework,代码行数:101,代码来源:flow_controller_test.py
示例11: TestWorkflow
class TestWorkflow(TransactionalTestCase):
"""
Test that workflow conversion methods are valid.
"""
def transactional_setup_method(self):
"""
Sets up the testing environment;
saves config file;
creates a test user, a test project;
creates burst, operation, flow and workflow services
"""
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(self.test_user)
self.workflow_service = WorkflowService()
self.burst_service = BurstService()
self.operation_service = OperationService()
self.flow_service = FlowService()
def transactional_teardown_method(self):
"""
Remove project folders and clean up database.
"""
FilesHelper().remove_project_structure(self.test_project.name)
self.delete_project_folders()
def __create_complex_workflow(self, workflow_step_list):
"""
Creates a burst with a complex workflow with a given list of workflow steps.
:param workflow_step_list: a list of workflow steps that will be used in the
creation of a new workflow for a new burst
"""
burst_config = TestFactory.store_burst(self.test_project.id)
stored_dt = datatypes_factory.DatatypesFactory()._store_datatype(Datatype1())
first_step_algorithm = self.flow_service.get_algorithm_by_module_and_class("tvb.tests.framework.adapters.testadapter1",
"TestAdapterDatatypeInput")
metadata = {DataTypeMetaData.KEY_BURST: burst_config.id}
kwargs = {"test_dt_input": stored_dt.gid, 'test_non_dt_input': '0'}
operations, group = self.operation_service.prepare_operations(self.test_user.id, self.test_project.id,
first_step_algorithm,
first_step_algorithm.algorithm_category,
metadata, **kwargs)
workflows = self.workflow_service.create_and_store_workflow(project_id=self.test_project.id,
burst_id=burst_config.id,
simulator_index=0,
simulator_id=first_step_algorithm.id,
operations=operations)
self.operation_service.prepare_operations_for_workflowsteps(workflow_step_list, workflows, self.test_user.id,
burst_config.id, self.test_project.id, group,
operations)
#fire the first op
if len(operations) > 0:
self.operation_service.launch_operation(operations[0].id, False)
return burst_config.id
def test_workflow_generation(self):
"""
A simple test just for the fact that a workflow is created an ran,
no dynamic parameters are passed. In this case we create a two steps
workflow: step1 - tvb.tests.framework.adapters.testadapter2.TestAdapter2
step2 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
The first adapter doesn't return anything and the second returns one
tvb.datatypes.datatype1.Datatype1 instance. We check that the steps
are actually ran by checking that two operations are created and that
one dataType is stored.
"""
workflow_step_list = [TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter2",
"TestAdapter2", step_index=1,
static_kwargs={"test2": 2}),
TestFactory.create_workflow_step("tvb.tests.framework.adapters.testadapter1",
"TestAdapter1", step_index=2,
static_kwargs={"test1_val1": 1, "test1_val2": 1})]
self.__create_complex_workflow(workflow_step_list)
stored_datatypes = dao.get_datatypes_in_project(self.test_project.id)
assert len(stored_datatypes) == 2, "DataType from second step was not stored."
assert stored_datatypes[0].type == 'Datatype1', "Wrong type was stored."
assert stored_datatypes[1].type == 'Datatype1', "Wrong type was stored."
finished, started, error, _, _ = dao.get_operation_numbers(self.test_project.id)
assert finished == 3, "Didnt start operations for both adapters in workflow."
assert started == 0, "Some operations from workflow didnt finish."
assert error == 0, "Some operations finished with error status."
def test_workflow_dynamic_params(self):
"""
A simple test just for the fact that dynamic parameters are passed properly
between two workflow steps:
step1 - tvb.tests.framework.adapters.testadapter1.TestAdapter1
step2 - tvb.tests.framework.adapters.testadapter3.TestAdapter3
The first adapter returns a tvb.datatypes.datatype1.Datatype1 instance.
The second adapter has this passed as a dynamic workflow parameter.
We check that the steps are actually ran by checking that two operations
#.........这里部分代码省略.........
开发者ID:maedoc,项目名称:tvb-framework,代码行数:101,代码来源:workflow_service_test.py
示例12: TestOperationService
class TestOperationService(BaseTestCase):
"""
Test class for the introspection module. Some tests from here do async launches. For those
cases Transactional tests won't work.
TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks
"""
def setup_method(self):
"""
Reset the database before each test.
"""
self.clean_database()
initialize_storage()
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(self.test_user)
self.operation_service = OperationService()
self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE
def teardown_method(self):
"""
Reset the database when test is done.
"""
TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size
self.clean_database()
def _assert_no_dt2(self):
count = dao.count_datatypes(self.test_project.id, Datatype2)
assert 0 == count
def _assert_stored_dt2(self, expected_cnt=1):
count = dao.count_datatypes(self.test_project.id, Datatype2)
assert expected_cnt == count
datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2)
assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
return datatype
def test_datatypes_groups(self):
"""
Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
"""
flow_service = FlowService()
all_operations = dao.get_filtered_operations(self.test_project.id, None)
assert len(all_operations) == 0, "There should be no operation"
adapter_instance = TestFactory.create_adapter('tvb.tests.framework.adapters.testadapter3', 'TestAdapter3')
data = {model.RANGE_PARAMETER_1: 'param_5', 'param_5': [1, 2]}
## Create Group of operations
flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)
all_operations = dao.get_filtered_operations(self.test_project.id, None)
assert len(all_operations) == 1, "Expected one operation group"
assert all_operations[0][2] == 2, "Expected 2 operations in group"
operation_group_id = all_operations[0][3]
assert operation_group_id != None, "The operation should be part of a group."
self.operation_service.stop_operation(all_operations[0][0])
self.operation_service.stop_operation(all_operations[0][1])
## Make sure operations are executed
self.operation_service.launch_operation(all_operations[0][0], False)
self.operation_service.launch_operation(all_operations[0][1], False)
resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
assert len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes))
dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
assert dt.fk_datatype_group == datatype_group.id, "DataTypeGroup is incorrect"
def test_initiate_operation(self):
"""
Test the actual operation flow by executing a test adapter.
"""
module = "tvb.tests.framework.adapters.testadapter1"
class_name = "TestAdapter1"
adapter = TestFactory.create_adapter(module, class_name)
output = adapter.get_output()
output_type = output[0].__name__
data = {"test1_val1": 5, "test1_val2": 5}
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
res = self.operation_service.initiate_operation(self.test_user, self.test_project.id, adapter,
tmp_folder, **data)
assert res.index("has finished.") > 10, "Operation didn't finish"
group = dao.get_algorithm_by_module(module, class_name)
assert group.module == 'tvb.tests.framework.adapters.testadapter1', "Wrong data stored."
assert group.classname == 'TestAdapter1', "Wrong data stored."
dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
assert count == 1
assert len(dts) == 1
datatype = dao.get_datatype_by_id(dts[0][0])
assert datatype.subject == DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored."
assert datatype.type == output_type, "Wrong data stored."
#.........这里部分代码省略.........
开发者ID:maedoc,项目名称:tvb-framework,代码行数:101,代码来源:operation_service_test.py
示例13: __init__
def __init__(self):
self.operation_service = OperationService()
self.workflow_service = WorkflowService()
self.logger = get_logger(self.__class__.__module__)
self.cache_portlet_configurators = {}
开发者ID:LauHoiYanGladys,项目名称:tvb-framework,代码行数:5,代码来源:burst_service.py
示例14: BurstService
class BurstService(object):
"""
Service layer for Burst related entities.
"""
def __init__(self):
self.operation_service = OperationService()
self.workflow_service = WorkflowService()
self.logger = get_logger(self.__class__.__module__)
self.cache_portlet_configurators = {}
def build_portlet_interface(self, portlet_configuration, project_id):
"""
From a portlet_id and a project_id, first build the portlet
entity then get it's configurable interface.
:param portlet_configuration: a portlet configuration entity. It holds at the
least the portlet_id, and in case any default parameters were saved
they can be rebuilt from the analyzers // visualizer parameters
:param project_id: the id of the current project
:returns: the portlet interface will be of the following form::
[{'interface': adapter_interface,
'prefix': prefix_for_parameter_names,
'subalg': {algorithm_field_name: default_algorithm_value},
'algo_group': algorithm_group,
'alg_ui_name': displayname},
......]
A list of dictionaries for each adapter that makes up the portlet.
"""
portlet_configurer = self._get_portlet_configurer(portlet_configuration.portlet_id)
portlet_interface = portlet_configurer.get_configurable_interface()
for adapter_conf in portlet_interface:
interface = adapter_conf.interface
itree_mngr = InputTreeManager()
interface = itree_mngr.fill_input_tree_with_options(interface, project_id,
adapter_conf.stored_adapter.fk_category)
adapter_conf.interface = itree_mngr.prepare_param_names(interface)
portlet_configurer.update_default_values(portlet_interface, portlet_configuration)
portlet_configurer.prefix_adapters_parameters(portlet_interface)
return portlet_interface
def _get_portlet_configurer(self, portlet_id):
if portlet_id not in self.cache_portlet_configurators:
portlet_entity = dao.get_portlet_by_id(portlet_id)
if portlet_entity is None:
raise InvalidPortletConfiguration("No portlet entity located in database with id=%s. " % portlet_id)
self.cache_portlet_configurators[portlet_id] = PortletConfigurer(portlet_entity)
self.logger.debug("Recently parsed portlet XML:" + str([portlet_entity]))
return self.cache_portlet_configurators[portlet_id]
def update_portlet_configuration(self, portlet_configuration, submited_parameters):
"""
:param portlet_configuration: the portlet configuration that needs to be updated
:param submited_parameters: a list of parameters as submitted from the UI. This
is a dictionary in the form :
{'dynamic' : {name:value pairs}, 'static' : {name:value pairs}}
All names are prefixed with adapter specific generated prefix.
"""
portlet_configurer = self._get_portlet_configurer(portlet_configuration.portlet_id)
return portlet_configurer.update_portlet_configuration(portlet_configuration, submited_parameters)
def new_burst_configuration(self, project_id):
"""
Return a new burst configuration entity with all the default values.
"""
burst_configuration = model.BurstConfiguration(project_id)
burst_configuration.selected_tab = 0
# Now set the default portlets for the specified burst configuration.
# The default portlets are specified in the __init__.py script from tvb root.
for tab_idx, value in DEFAULT_PORTLETS.items():
for sel_idx, portlet_identifier in value.items():
portlet = BurstService.get_portlet_by_identifier(portlet_identifier)
if portlet is not None:
portlet_configuration = self.new_portlet_configuration(portlet.id, tab_idx, sel_idx,
portlet.algorithm_identifier)
burst_configuration.set_portlet(tab_idx, sel_idx, portlet_configuration)
return burst_configuration
@staticmethod
def _store_burst_config(burst_config):
"""
Store a burst configuration entity.
"""
#.........这里部分代码省略.........
开发者ID:LauHoiYanGladys,项目名称:tvb-framework,代码行数:101,代码来源:burst_service.py
示例15: OperationServiceTest
class OperationServiceTest(BaseTestCase):
"""
Test class for the introspection module. Some tests from here do async launches. For those
cases Transactional tests won't work.
TODO: this is still to be refactored, for being huge, with duplicates and many irrelevant checks
"""
def setUp(self):
"""
Reset the database before each test.
"""
self.clean_database()
initialize_storage()
self.test_user = TestFactory.create_user()
self.test_project = TestFactory.create_project(self.test_user)
self.operation_service = OperationService()
self.backup_hdd_size = TvbProfile.current.MAX_DISK_SPACE
def tearDown(self):
"""
Reset the database when test is done.
"""
TvbProfile.current.MAX_DISK_SPACE = self.backup_hdd_size
self.clean_database()
def _assert_no_dt2(self):
count = dao.count_datatypes(self.test_project.id, Datatype2)
self.assertEqual(0, count)
def _assert_stored_dt2(self, expected_cnt=1):
count = dao.count_datatypes(self.test_project.id, Datatype2)
self.assertEqual(expected_cnt, count)
datatype = dao.try_load_last_entity_of_type(self.test_project.id, Datatype2)
self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
return datatype
def test_datatypes_groups(self):
"""
Tests if the dataType group is set correct on the dataTypes resulted from the same operation group.
"""
flow_service = FlowService()
all_operations = dao.get_filtered_operations(self.test_project.id, None)
self.assertEqual(len(all_operations), 0, "There should be no operation")
algogroup = dao.find_group("tvb.tests.framework.adapters.testadapter3", "TestAdapter3")
group, _ = flow_service.prepare_adapter(self.test_project.id, algogroup)
adapter_instance = flow_service.build_adapter_instance(group)
data = {model.RANGE_PARAMETER_1: "param_5", "param_5": [1, 2]}
## Create Group of operations
flow_service.fire_operation(adapter_instance, self.test_user, self.test_project.id, **data)
all_operations = dao.get_filtered_operations(self.test_project.id, None)
self.assertEqual(len(all_operations), 1, "Expected one operation group")
self.assertEqual(all_operations[0][2], 2, "Expected 2 operations in group")
operation_group_id = all_operations[0][3]
self.assertNotEquals(operation_group_id, None, "The operation should be part of a group.")
self.operation_service.stop_operation(all_operations[0][0])
self.operation_service.stop_operation(all_operations[0][1])
## Make sure operations are executed
self.operation_service.launch_operation(all_operations[0][0], False)
self.operation_service.launch_operation(all_operations[0][1], False)
resulted_datatypes = dao.get_datatype_in_group(operation_group_id=operation_group_id)
self.assertTrue(len(resulted_datatypes) >= 2, "Expected at least 2, but: " + str(len(resulted_datatypes)))
dt = dao.get_datatype_by_id(resulted_datatypes[0].id)
datatype_group = dao.get_datatypegroup_by_op_group_id(operation_group_id)
self.assertEqual(dt.fk_datatype_group, datatype_group.id, "DataTypeGroup is incorrect")
def test_initiate_operation(self):
"""
Test the actual operation flow by executing a test adapter.
"""
module = "tvb.tests.framework.adapters.testadapter1"
class_name = "TestAdapter1"
group = dao.find_group(module, class_name)
adapter = FlowService().build_adapter_instance(group)
output = adapter.get_output()
output_type = output[0].__name__
data = {"test1_val1": 5, "test1_val2": 5}
tmp_folder = FilesHelper().get_project_folder(self.test_project, "TEMP")
res = self.operation_service.initiate_operation(
self.test_user, self.test_project.id, adapter, tmp_folder, **data
)
self.assertTrue(res.index("has finished.") > 10, "Operation didn't finish")
group = dao.find_group(module, class_name)
self.assertEqual(group.module, "tvb.tests.framework.adapters.testadapter1", "Wrong data stored.")
self.assertEqual(group.classname, "TestAdapter1", "Wrong data stored.")
dts, count = dao.get_values_of_datatype(self.test_project.id, Datatype1)
self.assertEqual(count, 1)
self.assertEqual(len(dts), 1)
datatype = dao.get_datatype_by_id(dts[0][0])
self.assertEqual(datatype.subject, DataTypeMetaData.DEFAULT_SUBJECT, "Wrong data stored.")
self.assertEqual(datatype.type, output_type, "Wrong data stored.")
def test_delete_dt_free_HDD_space(self):
"""
#.........这里部分代码省略.........
开发者ID:lcosters,项目名称:tvb-framework,代码行数:101,代码来源:operation_service_test.py
注:本文中的tvb.core.services.operation_service.OperationService类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论