本文整理汇总了Python中tvb.core.entities.storage.dao.get_generic_entity函数的典型用法代码示例。如果您正苦于以下问题:Python get_generic_entity函数的具体用法?Python get_generic_entity怎么用?Python get_generic_entity使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了get_generic_entity函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: submit_connectivity
def submit_connectivity(self, original_connectivity, new_weights, new_tracts, interest_area_indexes, **_):
"""
Method to be called when user submits changes on the
Connectivity matrix in the Visualizer.
"""
result = []
conn = self.load_entity_by_gid(original_connectivity)
self.meta_data[DataTypeMetaData.KEY_SUBJECT] = conn.subject
new_weights = numpy.asarray(json.loads(new_weights))
new_tracts = numpy.asarray(json.loads(new_tracts))
interest_area_indexes = numpy.asarray(json.loads(interest_area_indexes))
result_connectivity = conn.generate_new_connectivity_from_ordered_arrays(new_weights, interest_area_indexes,
self.storage_path, new_tracts)
result.append(result_connectivity)
linked_region_mappings = dao.get_generic_entity(RegionMapping, original_connectivity, '_connectivity')
for mapping in linked_region_mappings:
result.append(mapping.generate_new_region_mapping(result_connectivity.gid, self.storage_path))
linked_projection = dao.get_generic_entity(ProjectionRegionEEG, original_connectivity, '_sources')
for projection in linked_projection:
result.append(projection.generate_new_projection(result_connectivity.gid, self.storage_path))
return result
开发者ID:unimauro,项目名称:tvb-framework,代码行数:25,代码来源:connectivity.py
示例2: prepare_parameters
def prepare_parameters(datatype_group_gid, back_page, color_metric=None, size_metric=None):
"""
We suppose that there are max 2 ranges and from each operation results exactly one dataType.
:param datatype_group_gid: the group id for the `DataType` to be visualised
:param color_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`
:param size_metric: a list of `DataTypeMeasure` which has been executed on `datatype_group_gid`
:returns: `ContextDiscretePSE`
:raises Exception: when `datatype_group_id` is invalid (not in database)
"""
datatype_group = dao.get_datatype_group_by_gid(datatype_group_gid)
if datatype_group is None:
raise Exception("Selected DataTypeGroup is no longer present in the database. "
"It might have been remove or the specified id is not the correct one.")
operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)
range1_name, range1_values, range1_labels = DiscretePSEAdapter.prepare_range_labels(operation_group,
operation_group.range1)
range2_name, range2_values, range2_labels = DiscretePSEAdapter.prepare_range_labels(operation_group,
operation_group.range2)
pse_context = ContextDiscretePSE(datatype_group_gid, color_metric, size_metric, back_page)
pse_context.setRanges(range1_name, range1_values, range1_labels, range2_name, range2_values, range2_labels)
final_dict = {}
operations = dao.get_operations_in_group(operation_group.id)
for operation_ in operations:
if not operation_.has_finished:
pse_context.has_started_ops = True
range_values = eval(operation_.range_values)
key_1 = range_values[range1_name]
key_2 = model.RANGE_MISSING_STRING
if range2_name is not None:
key_2 = range_values[range2_name]
datatype = None
if operation_.status == model.STATUS_FINISHED:
datatypes = dao.get_results_for_operation(operation_.id)
if len(datatypes) > 0:
datatype = datatypes[0]
if datatype.type == "DatatypeMeasure":
## Load proper entity class from DB.
measures = dao.get_generic_entity(DatatypeMeasure, datatype.id)
else:
measures = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
pse_context.prepare_metrics_datatype(measures, datatype)
if key_1 not in final_dict:
final_dict[key_1] = {}
final_dict[key_1][key_2] = pse_context.build_node_info(operation_, datatype)
pse_context.fill_object(final_dict)
## datatypes_dict is not actually used in the drawing of the PSE and actually
## causes problems in case of NaN values, so just remove it before creating the json
pse_context.datatypes_dict = {}
return pse_context
开发者ID:unimauro,项目名称:tvb-framework,代码行数:60,代码来源:pse_discrete.py
示例3: _find_metrics
def _find_metrics(operations):
""" Search for an operation with results. Then get the metrics of the generated data type"""
dt_measure = None
for operation in operations:
if not operation.has_finished:
raise LaunchException("Can not display until all operations from this range are finished!")
op_results = dao.get_results_for_operation(operation.id)
if len(op_results):
datatype = op_results[0]
if datatype.type == "DatatypeMeasure":
## Load proper entity class from DB.
dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.id)[0]
else:
dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
if dt_measure:
dt_measure = dt_measure[0]
break
if dt_measure:
return dt_measure.metrics
else:
raise LaunchException("No datatypes were generated due to simulation errors. Nothing to display.")
开发者ID:maedoc,项目名称:tvb-framework,代码行数:25,代码来源:pse_isocline.py
示例4: remove_datatype
def remove_datatype(self, skip_validation=False):
"""
Called when a Connectivity is to be removed.
"""
if not skip_validation:
associated_ts = dao.get_generic_entity(TimeSeriesRegion, self.handled_datatype.gid, '_connectivity')
associated_rm = dao.get_generic_entity(RegionMapping, self.handled_datatype.gid, '_connectivity')
associated_stim = dao.get_generic_entity(StimuliRegion, self.handled_datatype.gid, '_connectivity')
associated_mes = dao.get_generic_entity(ConnectivityMeasure, self.handled_datatype.gid, '_connectivity')
msg = "Connectivity cannot be removed as it is used by at least one "
if len(associated_ts) > 0:
raise RemoveDataTypeException(msg + " TimeSeriesRegion.")
if len(associated_rm) > 0:
raise RemoveDataTypeException(msg + " RegionMapping.")
if len(associated_stim) > 0:
raise RemoveDataTypeException(msg + " StimuliRegion.")
if len(associated_mes) > 0:
raise RemoveDataTypeException(msg + " ConnectivityMeasure.")
#### Update child Connectivities, if any.
child_conns = dao.get_generic_entity(Connectivity, self.handled_datatype.gid, '_parent_connectivity')
if len(child_conns) > 0:
for one_conn in child_conns[1:]:
one_conn.parent_connectivity = child_conns[0].gid
if child_conns and child_conns[0]:
child_conns[0].parent_connectivity = self.handled_datatype.parent_connectivity
for one_child in child_conns:
dao.store_entity(one_child)
ABCRemover.remove_datatype(self, skip_validation)
开发者ID:gummadhav,项目名称:tvb-framework,代码行数:31,代码来源:remover_connectivity.py
示例5: _edit_data
def _edit_data(self, datatype, new_data, from_group=False):
"""
Private method, used for editing a meta-data XML file and a DataType row
for a given custom DataType entity with new dictionary of data from UI.
"""
if isinstance(datatype, MappedType) and not os.path.exists(datatype.get_storage_file_path()):
if not datatype.invalid:
datatype.invalid = True
dao.store_entity(datatype)
return
# 1. First update Operation fields:
# Update group field if possible
new_group_name = new_data[CommonDetails.CODE_OPERATION_TAG]
empty_group_value = (new_group_name is None or new_group_name == "")
if from_group:
if empty_group_value:
raise StructureException("Empty group is not allowed!")
group = dao.get_generic_entity(model.OperationGroup, new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
if group and len(group) > 0 and new_group_name != group[0].name:
group = group[0]
exists_group = dao.get_generic_entity(model.OperationGroup, new_group_name, 'name')
if exists_group:
raise StructureException("Group '" + new_group_name + "' already exists.")
group.name = new_group_name
dao.store_entity(group)
else:
operation = dao.get_operation_by_id(datatype.fk_from_operation)
operation.user_group = new_group_name
dao.store_entity(operation)
# 2. Update dateType fields:
datatype.subject = new_data[DataTypeOverlayDetails.DATA_SUBJECT]
datatype.state = new_data[DataTypeOverlayDetails.DATA_STATE]
if DataTypeOverlayDetails.DATA_TAG_1 in new_data:
datatype.user_tag_1 = new_data[DataTypeOverlayDetails.DATA_TAG_1]
if DataTypeOverlayDetails.DATA_TAG_2 in new_data:
datatype.user_tag_2 = new_data[DataTypeOverlayDetails.DATA_TAG_2]
if DataTypeOverlayDetails.DATA_TAG_3 in new_data:
datatype.user_tag_3 = new_data[DataTypeOverlayDetails.DATA_TAG_3]
if DataTypeOverlayDetails.DATA_TAG_4 in new_data:
datatype.user_tag_4 = new_data[DataTypeOverlayDetails.DATA_TAG_4]
if DataTypeOverlayDetails.DATA_TAG_5 in new_data:
datatype.user_tag_5 = new_data[DataTypeOverlayDetails.DATA_TAG_5]
datatype = dao.store_entity(datatype)
# 3. Update MetaData in H5 as well.
datatype.persist_full_metadata()
# 4. Update the group_name/user_group into the operation meta-data file
operation = dao.get_operation_by_id(datatype.fk_from_operation)
self.structure_helper.update_operation_metadata(operation.project.name, new_group_name,
str(datatype.fk_from_operation), from_group)
开发者ID:rajul,项目名称:tvb-framework,代码行数:52,代码来源:project_service.py
示例6: _create_bench_project
def _create_bench_project():
prj = lab.new_project("benchmark_project_ %s" % datetime.now())
data_dir = path.abspath(path.dirname(tvb_data.__file__))
zip_path = path.join(data_dir, 'connectivity', 'connectivity_68.zip')
lab.import_conn_zip(prj.id, zip_path)
zip_path = path.join(data_dir, 'connectivity', 'connectivity_96.zip')
lab.import_conn_zip(prj.id, zip_path)
zip_path = path.join(data_dir, 'connectivity', 'connectivity_192.zip')
lab.import_conn_zip(prj.id, zip_path)
conn68 = dao.get_generic_entity(Connectivity, 68, "_number_of_regions")[0]
conn96 = dao.get_generic_entity(Connectivity, 96, "_number_of_regions")[0]
conn190 = dao.get_generic_entity(Connectivity, 192, "_number_of_regions")[0]
return prj, [conn68, conn96, conn190]
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:14,代码来源:benchmark.py
示例7: launch
def launch(self, datatype_group, **kwargs):
"""
Also overwrite launch from ABCDisplayer, since we want to handle a list of figures,
instead of only one Matplotlib figure.
:raises LaunchException: when called before all operations in the group are finished
"""
if self.PARAM_FIGURE_SIZE in kwargs:
figsize = kwargs[self.PARAM_FIGURE_SIZE]
figsize = ((figsize[0]) / 80, (figsize[1]) / 80)
del kwargs[self.PARAM_FIGURE_SIZE]
else:
figsize = (15, 7)
operation_group = dao.get_operationgroup_by_id(datatype_group.fk_operation_group)
_, range1_name, self.range1 = operation_group.load_range_numbers(operation_group.range1)
_, range2_name, self.range2 = operation_group.load_range_numbers(operation_group.range2)
for operation in dao.get_operations_in_group(operation_group.id):
if operation.status == model.STATUS_STARTED:
raise LaunchException("Can not display until all operations from this range are finished!")
op_results = dao.get_results_for_operation(operation.id)
if len(op_results):
datatype = op_results[0]
if datatype.type == "DatatypeMeasure":
## Load proper entity class from DB.
dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.id)[0]
else:
dt_measure = dao.get_generic_entity(DatatypeMeasure, datatype.gid, '_analyzed_datatype')
if dt_measure:
dt_measure = dt_measure[0]
break
else:
dt_measure = None
figure_nrs = {}
metrics = dt_measure.metrics if dt_measure else {}
if metrics:
for metric in metrics:
# Separate plot for each metric.
self._create_plot(metric, figsize, operation_group, range1_name, range2_name, figure_nrs)
else:
raise LaunchException("No datatypes were generated due to simulation errors. Nothing to display.")
parameters = dict(title=self._ui_name, showFullToolbar=True,
serverIp=config.SERVER_IP, serverPort=config.MPLH5_SERVER_PORT,
figureNumbers=figure_nrs, metrics=metrics, figuresJSON=json.dumps(figure_nrs))
return self.build_display_result("pse_isocline/view", parameters)
开发者ID:HuifangWang,项目名称:the-virtual-brain-website,代码行数:50,代码来源:pse_isocline.py
示例8: remove_datatype
def remove_datatype(self, skip_validation=False):
"""
Called when a Surface is to be removed.
"""
if not skip_validation:
associated_ts = dao.get_generic_entity(TimeSeriesVolume, self.handled_datatype.gid, '_volume')
associated_stim = dao.get_generic_entity(SpatialPatternVolume, self.handled_datatype.gid, '_volume')
error_msg = "Surface cannot be removed because is still used by a "
if len(associated_ts) > 0:
raise RemoveDataTypeException(error_msg + " TimeSeriesVolume.")
if len(associated_stim) > 0:
raise RemoveDataTypeException(error_msg + " SpatialPatternVolume.")
ABCRemover.remove_datatype(self, skip_validation)
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:14,代码来源:remover_volume.py
示例9: remove_datatype
def remove_datatype(self, skip_validation = False):
"""
Called when a TimeSeries is removed.
"""
associated_cv = dao.get_generic_entity(Covariance, self.handled_datatype.gid, '_source')
associated_pca = dao.get_generic_entity(PrincipalComponents, self.handled_datatype.gid, '_source')
associated_is = dao.get_generic_entity(IndependentComponents, self.handled_datatype.gid, '_source')
associated_cc = dao.get_generic_entity(CrossCorrelation, self.handled_datatype.gid, '_source')
associated_fr = dao.get_generic_entity(FourierSpectrum, self.handled_datatype.gid, '_source')
associated_wv = dao.get_generic_entity(WaveletCoefficients, self.handled_datatype.gid, '_source')
associated_cs = dao.get_generic_entity(CoherenceSpectrum, self.handled_datatype.gid, '_source')
associated_dm = dao.get_generic_entity(DatatypeMeasure, self.handled_datatype.gid, '_analyzed_datatype')
for datatype_measure in associated_dm:
datatype_measure._analyed_datatype = None
dao.store_entity(datatype_measure)
msg = "TimeSeries cannot be removed as it is used by at least one "
if not skip_validation:
if len(associated_cv) > 0:
raise RemoveDataTypeException(msg + " Covariance.")
if len(associated_pca) > 0:
raise RemoveDataTypeException(msg + " PrincipalComponents.")
if len(associated_is) > 0:
raise RemoveDataTypeException(msg + " IndependentComponents.")
if len(associated_cc) > 0:
raise RemoveDataTypeException(msg + " CrossCorrelation.")
if len(associated_fr) > 0:
raise RemoveDataTypeException(msg + " FourierSpectrum.")
if len(associated_wv) > 0:
raise RemoveDataTypeException(msg + " WaveletCoefficients.")
if len(associated_cs) > 0:
raise RemoveDataTypeException(msg + " CoherenceSpectrum.")
ABCRemover.remove_datatype(self, skip_validation)
开发者ID:HuifangWang,项目名称:the-virtual-brain-website,代码行数:32,代码来源:remover_timeseries.py
示例10: _check_datatype_group_removed
def _check_datatype_group_removed(self, datatype_group_id, operation_groupp_id):
"""
Checks if the DataTypeGroup and OperationGroup was removed.
"""
try:
dao.get_generic_entity(model.DataTypeGroup, datatype_group_id)
self.fail("The DataTypeGroup entity was not removed.")
except Exception:
pass
try:
dao.get_operationgroup_by_id(operation_groupp_id)
self.fail("The OperationGroup entity was not removed.")
except Exception:
pass
开发者ID:LauHoiYanGladys,项目名称:tvb-framework,代码行数:15,代码来源:project_structure_test.py
示例11: test_bct_all
def test_bct_all(self):
"""
Iterate all BCT algorithms and execute them.
"""
for i in xrange(len(self.bct_adapters)):
for bct_identifier in self.bct_adapters[i].get_algorithms_dictionary():
### Prepare Operation and parameters
algorithm = dao.get_algorithm_by_group(self.algo_groups[i].id, bct_identifier)
operation = TestFactory.create_operation(algorithm=algorithm, test_user=self.test_user,
test_project=self.test_project,
operation_status=model.STATUS_STARTED)
self.assertEqual(model.STATUS_STARTED, operation.status)
### Launch BCT algorithm
submit_data = {self.algo_groups[i].algorithm_param_name: bct_identifier,
algorithm.parameter_name: self.connectivity.gid}
try:
OperationService().initiate_prelaunch(operation, self.bct_adapters[i], {}, **submit_data)
if bct_identifier in BCTTest.EXPECTED_TO_FAIL_VALIDATION:
raise Exception("Algorithm %s was expected to throw input validation "
"exception, but did not!" % (bct_identifier,))
operation = dao.get_operation_by_id(operation.id)
### Check that operation status after execution is success.
self.assertEqual(STATUS_FINISHED, operation.status)
### Make sure at least one result exists for each BCT algorithm
results = dao.get_generic_entity(model.DataType, operation.id, 'fk_from_operation')
self.assertTrue(len(results) > 0)
except InvalidParameterException, excep:
## Some algorithms are expected to throw validation exception.
if bct_identifier not in BCTTest.EXPECTED_TO_FAIL_VALIDATION:
raise excep
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:32,代码来源:bct_test.py
示例12: upgrade
def upgrade(migrate_engine):
"""
Upgrade operations go here.
Don't create your own engine; bind migrate_engine to your metadata.
"""
meta.bind = migrate_engine
table = meta.tables['DATA_TYPES_GROUPS']
create_column(COL_RANGES_1, table)
create_column(COL_RANGES_2, table)
try:
## Iterate DataTypeGroups from previous code-versions and try to update value for the new column.
previous_groups = dao.get_generic_entity(model.DataTypeGroup, "0", "no_of_ranges")
for group in previous_groups:
operation_group = dao.get_operationgroup_by_id(group.fk_operation_group)
#group.only_numeric_ranges = operation_group.has_only_numeric_ranges
if operation_group.range3 is not None:
group.no_of_ranges = 3
elif operation_group.range2 is not None:
group.no_of_ranges = 2
elif operation_group.range1 is not None:
group.no_of_ranges = 1
else:
group.no_of_ranges = 0
dao.store_entity(group)
except Exception, excep:
## we can live with a column only having default value. We will not stop the startup.
logger = get_logger(__name__)
logger.exception(excep)
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:35,代码来源:004_update_db.py
示例13: test_noise_2d_happy_flow
def test_noise_2d_happy_flow(self):
"""
Test a simulation with noise.
"""
SIMULATOR_PARAMETERS['integrator'] = u'HeunStochastic'
noise_2d_config = [[1 for _ in xrange(self.CONNECTIVITY_NODES)] for _ in xrange(2)]
SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_dt'] = u'0.01220703125'
SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise'] = u'Additive'
SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = str(noise_2d_config)
SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_ntau'] = u'0.0'
SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream'] = u'RandomStream'
SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_random_stream_parameters_option_RandomStream_init_seed'] = u'42'
filtered_params = self.simulator_adapter.prepare_ui_inputs(SIMULATOR_PARAMETERS)
self.simulator_adapter.configure(**filtered_params)
if hasattr(self.simulator_adapter, 'algorithm'):
self.assertEqual((2, 74, 1), self.simulator_adapter.algorithm.integrator.noise.nsig.shape)
else:
self.fail("Simulator adapter was not initialized properly")
OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS)
sim_result = dao.get_generic_entity(TimeSeriesRegion, 'TimeSeriesRegion', 'type')[0]
self.assertEquals(sim_result.read_data_shape(), (32, 1, self.CONNECTIVITY_NODES, 1))
SIMULATOR_PARAMETERS['integrator_parameters_option_HeunStochastic_noise_parameters_option_Additive_nsig'] = '[1]'
filtered_params = self.simulator_adapter.prepare_ui_inputs(SIMULATOR_PARAMETERS)
self.simulator_adapter.configure(**filtered_params)
if hasattr(self.simulator_adapter, 'algorithm'):
self.assertEqual((1,), self.simulator_adapter.algorithm.integrator.noise.nsig.shape)
else:
self.fail("Simulator adapter was not initialized properly")
OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS)
开发者ID:sdiazpier,项目名称:tvb-framework,代码行数:30,代码来源:simulator_adapter_test.py
示例14: test_happy_flow_launch
def test_happy_flow_launch(self):
"""
Test that launching a simulation from UI works.
"""
OperationService().initiate_prelaunch(self.operation, self.simulator_adapter, {}, **SIMULATOR_PARAMETERS)
sim_result = dao.get_generic_entity(TimeSeriesRegion, 'TimeSeriesRegion', 'type')[0]
self.assertEquals(sim_result.read_data_shape(), (32, 1, self.CONNECTIVITY_NODES, 1))
开发者ID:sdiazpier,项目名称:tvb-framework,代码行数:7,代码来源:simulator_adapter_test.py
示例15: _populate_values
def _populate_values(data_list, type_, category_key):
"""
Populate meta-data fields for data_list (list of DataTypes).
Private method, to be called recursively.
It will receive a list of Attributes, and it will populate 'options'
entry with data references from DB.
"""
values = []
all_field_values = ''
for value in data_list:
# Here we only populate with DB data, actual
# XML check will be done after select and submit.
entity_gid = value[2]
actual_entity = dao.get_generic_entity(type_, entity_gid, "gid")
display_name = ''
if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType):
display_name = actual_entity[0].display_name
display_name += ' - ' + (value[3] or "None ")
if value[5]:
display_name += ' - From: ' + str(value[5])
else:
display_name += utils.date2string(value[4])
if value[6]:
display_name += ' - ' + str(value[6])
display_name += ' - ID:' + str(value[0])
all_field_values += str(entity_gid) + ','
values.append({KEY_NAME: display_name, KEY_VALUE: entity_gid})
if category_key is not None:
category = dao.get_category_by_id(category_key)
if not category.display and not category.rawinput and len(data_list) > 1:
values.insert(0, {KEY_NAME: "All", KEY_VALUE: all_field_values[:-1]})
return values
开发者ID:gummadhav,项目名称:tvb-framework,代码行数:33,代码来源:input_tree.py
示例16: test_remove_project_node
def test_remove_project_node(self):
"""
Test removing of a node from a project.
"""
inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user)
project_to_link = model.Project("Link", self.test_user.id, "descript")
project_to_link = dao.store_entity(project_to_link)
exact_data = dao.get_datatype_by_gid(gid)
dao.store_entity(model.Links(exact_data.id, project_to_link.id))
self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Initialization problem!")
operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
self.assertTrue(os.path.exists(op_folder))
sub_files = os.listdir(op_folder)
self.assertEqual(2, len(sub_files))
### Validate that no more files are created than needed.
self.project_service._remove_project_node_files(inserted_project.id, gid)
sub_files = os.listdir(op_folder)
self.assertEqual(1, len(sub_files))
### operation.xml file should still be there
op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1))
sub_files = os.listdir(op_folder)
self.assertEqual(2, len(sub_files))
self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links")
self.project_service._remove_project_node_files(project_to_link.id, gid)
self.assertTrue(dao.get_datatype_by_gid(gid) is None)
sub_files = os.listdir(op_folder)
self.assertEqual(1, len(sub_files))
开发者ID:sdiazpier,项目名称:tvb-framework,代码行数:31,代码来源:project_service_test.py
示例17: _populate_values
def _populate_values(data_list, type_, category_key, complex_dt_attributes=None):
"""
Populate meta-data fields for data_list (list of DataTypes).
Private method, to be called recursively.
It will receive a list of Attributes, and it will populate 'options'
entry with data references from DB.
"""
values = []
all_field_values = []
for id_, _, entity_gid, subject, completion_date, group, gr_name, tag1 in data_list:
# Here we only populate with DB data, actual
# XML check will be done after select and submit.
actual_entity = dao.get_generic_entity(type_, entity_gid, "gid")
display_name = ''
if actual_entity is not None and len(actual_entity) > 0 and isinstance(actual_entity[0], model.DataType):
display_name = actual_entity[0].display_name
display_name += ' - ' + (subject or "None ")
if group:
display_name += ' - From: ' + str(group)
else:
display_name += utils.date2string(completion_date)
if gr_name:
display_name += ' - ' + str(gr_name)
display_name += ' - ID:' + str(id_)
all_field_values.append(str(entity_gid))
values.append({KEY_NAME: display_name, KEY_VALUE: entity_gid})
if complex_dt_attributes is not None:
### TODO apply filter on sub-attributes
values[-1][KEY_ATTRIBUTES] = complex_dt_attributes # this is the copy of complex dtype attributes on all db options
if category_key is not None:
category = dao.get_category_by_id(category_key)
if not category.display and not category.rawinput and len(data_list) > 1:
values.insert(0, {KEY_NAME: "All", KEY_VALUE: ','.join(all_field_values)})
return values
开发者ID:gummadhav,项目名称:tvb-framework,代码行数:35,代码来源:input_tree2.py
示例18: test_db_mapping
def test_db_mapping(self):
""" Test DB storage/retrieval of a simple traited attribute"""
session = SA_SESSIONMAKER()
model.Base.metadata.create_all(bind=session.connection())
session.commit()
session.close()
# test data
dikt = {'a': 6}
tup = ('5', 9.348)
dtype = numpy.dtype(float)
json = {'a': 'asdf', 'b': {'23': '687568'}}
test_inst = MappedTestClass()
test_inst.dikt = copy.deepcopy(dikt)
test_inst.tup = copy.deepcopy(tup)
test_inst.dtype = copy.deepcopy(dtype)
test_inst.json = copy.deepcopy(json)
test_inst.set_operation_id(self.operation.id)
test_inst = dao.store_entity(test_inst)
test_inst = dao.get_generic_entity(MappedTestClass, test_inst.gid, 'gid')[0]
self.assertEqual(test_inst.dikt, dikt)
self.assertEqual(test_inst.tup, tup)
self.assertEqual(test_inst.dtype, dtype)
self.assertEqual(test_inst.json, json)
开发者ID:LauHoiYanGladys,项目名称:tvb-framework,代码行数:26,代码来源:mapping_test.py
示例19: update_metadata
def update_metadata(self, submit_data):
"""
Update DataType/ DataTypeGroup metadata
THROW StructureException when input data is invalid.
"""
new_data = dict()
for key in DataTypeOverlayDetails().meta_attributes_list:
if key in submit_data:
new_data[key] = submit_data[key]
if new_data[CommonDetails.CODE_OPERATION_TAG] == '':
new_data[CommonDetails.CODE_OPERATION_TAG] = None
try:
if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]
and new_data[CommonDetails.CODE_OPERATION_GROUP_ID] != ''):
# We need to edit a group
all_data_in_group = dao.get_datatype_in_group(operation_group_id=
new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
if len(all_data_in_group) < 1:
raise StructureException("Inconsistent group, can not be updated!")
datatype_group = dao.get_generic_entity(model.DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
all_data_in_group.append(datatype_group)
for datatype in all_data_in_group:
new_data[CommonDetails.CODE_GID] = datatype.gid
self._edit_data(datatype, new_data, True)
else:
# Get the required DataType and operation from DB to store changes that will be done in XML.
gid = new_data[CommonDetails.CODE_GID]
datatype = dao.get_datatype_by_gid(gid)
self._edit_data(datatype, new_data)
except Exception, excep:
self.logger.exception(excep)
raise StructureException(str(excep))
开发者ID:rajul,项目名称:tvb-framework,代码行数:34,代码来源:project_service.py
示例20: test_remove_project_node
def test_remove_project_node(self):
"""
Test removing of a node from a project.
"""
inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user)
project_to_link = model.Project("Link", self.test_user.id, "descript")
project_to_link = dao.store_entity(project_to_link)
exact_data = dao.get_datatype_by_gid(gid)
dao.store_entity(model.Links(exact_data.id, project_to_link.id))
assert dao.get_datatype_by_gid(gid) is not None, "Initialization problem!"
operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
assert os.path.exists(op_folder)
sub_files = os.listdir(op_folder)
assert 2 == len(sub_files)
### Validate that no more files are created than needed.
if(dao.get_system_user() is None):
dao.store_entity(model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True, None))
self.project_service._remove_project_node_files(inserted_project.id, gid)
sub_files = os.listdir(op_folder)
assert 1 == len(sub_files)
### operation.xml file should still be there
op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1))
sub_files = os.listdir(op_folder)
assert 2 == len(sub_files)
assert dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links"
self.project_service._remove_project_node_files(project_to_link.id, gid)
assert dao.get_datatype_by_gid(gid) is None
sub_files = os.listdir(op_folder)
assert 1 == len(sub_files)
开发者ID:maedoc,项目名称:tvb-framework,代码行数:33,代码来源:project_service_test.py
注:本文中的tvb.core.entities.storage.dao.get_generic_entity函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论