本文整理汇总了Python中tvb.core.entities.storage.dao.get_datatype_by_gid函数的典型用法代码示例。如果您正苦于以下问题:Python get_datatype_by_gid函数的具体用法?Python get_datatype_by_gid怎么用?Python get_datatype_by_gid使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了get_datatype_by_gid函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_remove_project_node
def test_remove_project_node(self):
"""
Test removing of a node from a project.
"""
inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user)
project_to_link = model.Project("Link", self.test_user.id, "descript")
project_to_link = dao.store_entity(project_to_link)
exact_data = dao.get_datatype_by_gid(gid)
dao.store_entity(model.Links(exact_data.id, project_to_link.id))
assert dao.get_datatype_by_gid(gid) is not None, "Initialization problem!"
operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
assert os.path.exists(op_folder)
sub_files = os.listdir(op_folder)
assert 2 == len(sub_files)
### Validate that no more files are created than needed.
if(dao.get_system_user() is None):
dao.store_entity(model.User(TvbProfile.current.web.admin.SYSTEM_USER_NAME, None, None, True, None))
self.project_service._remove_project_node_files(inserted_project.id, gid)
sub_files = os.listdir(op_folder)
assert 1 == len(sub_files)
### operation.xml file should still be there
op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1))
sub_files = os.listdir(op_folder)
assert 2 == len(sub_files)
assert dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links"
self.project_service._remove_project_node_files(project_to_link.id, gid)
assert dao.get_datatype_by_gid(gid) is None
sub_files = os.listdir(op_folder)
assert 1 == len(sub_files)
开发者ID:maedoc,项目名称:tvb-framework,代码行数:33,代码来源:project_service_test.py
示例2: test_remove_project_node
def test_remove_project_node(self):
"""
Test removing of a node from a project.
"""
inserted_project, gid, gid_op = self._create_value_wrapper(self.test_user)
project_to_link = model.Project("Link", self.test_user.id, "descript")
project_to_link = dao.store_entity(project_to_link)
exact_data = dao.get_datatype_by_gid(gid)
dao.store_entity(model.Links(exact_data.id, project_to_link.id))
self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Initialization problem!")
operation_id = dao.get_generic_entity(model.Operation, gid_op, 'gid')[0].id
op_folder = self.structure_helper.get_project_folder("test_proj", str(operation_id))
self.assertTrue(os.path.exists(op_folder))
sub_files = os.listdir(op_folder)
self.assertEqual(2, len(sub_files))
### Validate that no more files are created than needed.
self.project_service._remove_project_node_files(inserted_project.id, gid)
sub_files = os.listdir(op_folder)
self.assertEqual(1, len(sub_files))
### operation.xml file should still be there
op_folder = self.structure_helper.get_project_folder("Link", str(operation_id + 1))
sub_files = os.listdir(op_folder)
self.assertEqual(2, len(sub_files))
self.assertTrue(dao.get_datatype_by_gid(gid) is not None, "Data should still be in DB, because of links")
self.project_service._remove_project_node_files(project_to_link.id, gid)
self.assertTrue(dao.get_datatype_by_gid(gid) is None)
sub_files = os.listdir(op_folder)
self.assertEqual(1, len(sub_files))
开发者ID:sdiazpier,项目名称:tvb-framework,代码行数:31,代码来源:project_service_test.py
示例3: test_set_visibility_datatype
def test_set_visibility_datatype(self):
"""
Set datatype visibility to true and false and check results are updated.
"""
datatype = DatatypesFactory().create_datatype_with_storage()
self.assertTrue(datatype.visible)
self.project_c.set_visibility('datatype', datatype.gid, 'False')
datatype = dao.get_datatype_by_gid(datatype.gid)
self.assertFalse(datatype.visible)
self.project_c.set_visibility('datatype', datatype.gid, 'True')
datatype = dao.get_datatype_by_gid(datatype.gid)
self.assertTrue(datatype.visible)
开发者ID:LauHoiYanGladys,项目名称:tvb-framework,代码行数:12,代码来源:project_controller_test.py
示例4: _store_imported_datatypes_in_db
def _store_imported_datatypes_in_db(self, project, all_datatypes, dt_burst_mappings, burst_ids_mapping):
def by_time(dt):
return dt.create_date or datetime.now()
if burst_ids_mapping is None:
burst_ids_mapping = {}
if dt_burst_mappings is None:
dt_burst_mappings = {}
all_datatypes.sort(key=by_time)
for datatype in all_datatypes:
old_burst_id = dt_burst_mappings.get(datatype.gid)
if old_burst_id is not None:
datatype.fk_parent_burst = burst_ids_mapping[old_burst_id]
datatype_allready_in_tvb = dao.get_datatype_by_gid(datatype.gid)
if not datatype_allready_in_tvb:
# Compute disk size. Similar to ABCAdapter._capture_operation_results.
# No need to close the h5 as we have not written to it.
associated_file = os.path.join(datatype.storage_path, datatype.get_storage_file_name())
datatype.disk_size = FilesHelper.compute_size_on_disk(associated_file)
self.store_datatype(datatype)
else:
FlowService.create_link([datatype_allready_in_tvb.id], project.id)
开发者ID:paolavals,项目名称:tvb-framework,代码行数:28,代码来源:import_service.py
示例5: update_dt
def update_dt(dt_id, new_create_date):
dt = dao.get_datatype_by_id(dt_id)
dt.create_date = new_create_date
dao.store_entity(dt)
# Update MetaData in H5 as well.
dt = dao.get_datatype_by_gid(dt.gid)
dt.persist_full_metadata()
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:7,代码来源:modify_h5_metadata.py
示例6: set_datatype_visibility
def set_datatype_visibility(datatype_gid, is_visible):
"""
Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
dataType group than this method will set the visibility for each dataType from this group.
"""
def set_visibility(dt):
""" set visibility flag, persist in db and h5"""
dt.visible = is_visible
dt = dao.store_entity(dt)
dt.persist_full_metadata()
def set_group_descendants_visibility(datatype_group_id):
datatypes_in_group = dao.get_datatypes_from_datatype_group(datatype_group_id)
for group_dt in datatypes_in_group:
set_visibility(group_dt)
datatype = dao.get_datatype_by_gid(datatype_gid)
if isinstance(datatype, DataTypeGroup): # datatype is a group
set_group_descendants_visibility(datatype.id)
elif datatype.fk_datatype_group is not None: # datatype is member of a group
set_group_descendants_visibility(datatype.fk_datatype_group)
# the datatype to be updated is the parent datatype group
datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)
# update the datatype or datatype group.
set_visibility(datatype)
开发者ID:rajul,项目名称:tvb-framework,代码行数:27,代码来源:project_service.py
示例7: __upgrade_datatype_list
def __upgrade_datatype_list(self, datatypes):
"""
Upgrade a list of DataTypes to the current version.
:param datatypes: The list of DataTypes that should be upgraded.
:returns: (nr_of_dts_upgraded_fine, nr_of_dts_upgraded_fault) a two-tuple of integers representing
the number of DataTypes for which the upgrade worked fine, and the number of DataTypes for which
some kind of fault occurred
"""
nr_of_dts_upgraded_fine = 0
nr_of_dts_upgraded_fault = 0
for datatype in datatypes:
specific_datatype = dao.get_datatype_by_gid(datatype.gid)
if isinstance(specific_datatype, MappedType):
try:
self.upgrade_file(specific_datatype.get_storage_file_path())
nr_of_dts_upgraded_fine += 1
except (MissingDataFileException, FileVersioningException) as ex:
# The file is missing for some reason. Just mark the DataType as invalid.
datatype.invalid = True
dao.store_entity(datatype)
nr_of_dts_upgraded_fault += 1
self.log.exception(ex)
return nr_of_dts_upgraded_fine, nr_of_dts_upgraded_fault
开发者ID:unimauro,项目名称:tvb-framework,代码行数:25,代码来源:files_update_manager.py
示例8: update_metadata
def update_metadata(self, submit_data):
"""
Update DataType/ DataTypeGroup metadata
THROW StructureException when input data is invalid.
"""
new_data = dict()
for key in DataTypeOverlayDetails().meta_attributes_list:
if key in submit_data:
new_data[key] = submit_data[key]
if new_data[CommonDetails.CODE_OPERATION_TAG] == '':
new_data[CommonDetails.CODE_OPERATION_TAG] = None
try:
if (CommonDetails.CODE_OPERATION_GROUP_ID in new_data
and new_data[CommonDetails.CODE_OPERATION_GROUP_ID]
and new_data[CommonDetails.CODE_OPERATION_GROUP_ID] != ''):
# We need to edit a group
all_data_in_group = dao.get_datatype_in_group(operation_group_id=
new_data[CommonDetails.CODE_OPERATION_GROUP_ID])
if len(all_data_in_group) < 1:
raise StructureException("Inconsistent group, can not be updated!")
datatype_group = dao.get_generic_entity(model.DataTypeGroup, all_data_in_group[0].fk_datatype_group)[0]
all_data_in_group.append(datatype_group)
for datatype in all_data_in_group:
new_data[CommonDetails.CODE_GID] = datatype.gid
self._edit_data(datatype, new_data, True)
else:
# Get the required DataType and operation from DB to store changes that will be done in XML.
gid = new_data[CommonDetails.CODE_GID]
datatype = dao.get_datatype_by_gid(gid)
self._edit_data(datatype, new_data)
except Exception, excep:
self.logger.exception(excep)
raise StructureException(str(excep))
开发者ID:rajul,项目名称:tvb-framework,代码行数:34,代码来源:project_service.py
示例9: get_project_structure
def get_project_structure(self, project, visibility_filter, first_level, second_level, filter_value):
"""
Find all DataTypes (including the linked ones and the groups) relevant for the current project.
In case of a problem, will return an empty list.
"""
metadata_list = []
dt_list = dao.get_data_in_project(project.id, visibility_filter, filter_value)
for dt in dt_list:
# Prepare the DT results from DB, for usage in controller, by converting into DataTypeMetaData objects
data = {}
is_group = False
group_op = None
dt_entity = dao.get_datatype_by_gid(dt.gid)
if dt_entity is None:
self.logger.warning("Ignored entity (possibly removed DT class)" + str(dt))
continue
## Filter by dt.type, otherwise Links to individual DT inside a group will be mistaken
if dt.type == "DataTypeGroup" and dt.parent_operation.operation_group is not None:
is_group = True
group_op = dt.parent_operation.operation_group
# All these fields are necessary here for dynamic Tree levels.
data[DataTypeMetaData.KEY_DATATYPE_ID] = dt.id
data[DataTypeMetaData.KEY_GID] = dt.gid
data[DataTypeMetaData.KEY_NODE_TYPE] = dt.type
data[DataTypeMetaData.KEY_STATE] = dt.state
data[DataTypeMetaData.KEY_SUBJECT] = str(dt.subject)
data[DataTypeMetaData.KEY_TITLE] = dt_entity.display_name
data[DataTypeMetaData.KEY_RELEVANCY] = dt.visible
data[DataTypeMetaData.KEY_LINK] = dt.parent_operation.fk_launched_in != project.id
data[DataTypeMetaData.KEY_TAG_1] = dt.user_tag_1 if dt.user_tag_1 else ''
data[DataTypeMetaData.KEY_TAG_2] = dt.user_tag_2 if dt.user_tag_2 else ''
data[DataTypeMetaData.KEY_TAG_3] = dt.user_tag_3 if dt.user_tag_3 else ''
data[DataTypeMetaData.KEY_TAG_4] = dt.user_tag_4 if dt.user_tag_4 else ''
data[DataTypeMetaData.KEY_TAG_5] = dt.user_tag_5 if dt.user_tag_5 else ''
# Operation related fields:
operation_name = CommonDetails.compute_operation_name(
dt.parent_operation.algorithm.algorithm_category.displayname,
dt.parent_operation.algorithm.displayname)
data[DataTypeMetaData.KEY_OPERATION_TYPE] = operation_name
data[DataTypeMetaData.KEY_OPERATION_ALGORITHM] = dt.parent_operation.algorithm.displayname
data[DataTypeMetaData.KEY_AUTHOR] = dt.parent_operation.user.username
data[DataTypeMetaData.KEY_OPERATION_TAG] = group_op.name if is_group else dt.parent_operation.user_group
data[DataTypeMetaData.KEY_OP_GROUP_ID] = group_op.id if is_group else None
completion_date = dt.parent_operation.completion_date
string_year = completion_date.strftime(MONTH_YEAR_FORMAT) if completion_date is not None else ""
string_month = completion_date.strftime(DAY_MONTH_YEAR_FORMAT) if completion_date is not None else ""
data[DataTypeMetaData.KEY_DATE] = date2string(completion_date) if (completion_date is not None) else ''
data[DataTypeMetaData.KEY_CREATE_DATA_MONTH] = string_year
data[DataTypeMetaData.KEY_CREATE_DATA_DAY] = string_month
data[DataTypeMetaData.KEY_BURST] = dt._parent_burst.name if dt._parent_burst is not None else '-None-'
metadata_list.append(DataTypeMetaData(data, dt.invalid))
return StructureNode.metadata2tree(metadata_list, first_level, second_level, project.id, project.name)
开发者ID:gummadhav,项目名称:tvb-framework,代码行数:60,代码来源:project_service.py
示例10: build_structure_for_datatype
def build_structure_for_datatype(datatype_gid):
datatype = dao.get_datatype_by_gid(datatype_gid)
is_group = dao.is_datatype_group(datatype_gid)
structure = NodeStructure(datatype_gid, datatype.type)
structure.data = NodeData.build_node_for_datatype(datatype.id, datatype.display_name, is_group=is_group)
return structure
开发者ID:lcosters,项目名称:tvb-framework,代码行数:8,代码来源:graph_structures.py
示例11: remove_datatype
def remove_datatype(self, project_id, datatype_gid, skip_validation=False):
"""
Method used for removing a dataType. If the given dataType is a DatatypeGroup
or a dataType from a DataTypeGroup than this method will remove the entire group.
The operation(s) used for creating the dataType(s) will also be removed.
"""
datatype = dao.get_datatype_by_gid(datatype_gid)
if datatype is None:
self.logger.warning("Attempt to delete DT[%s] which no longer exists." % datatype_gid)
return
user = dao.get_user_for_datatype(datatype.id)
freed_space = datatype.disk_size or 0
is_datatype_group = False
if dao.is_datatype_group(datatype_gid):
is_datatype_group = True
freed_space = dao.get_datatype_group_disk_size(datatype.id)
elif datatype.fk_datatype_group is not None:
is_datatype_group = True
datatype = dao.get_datatype_by_id(datatype.fk_datatype_group)
freed_space = dao.get_datatype_group_disk_size(datatype.id)
operations_set = [datatype.fk_from_operation]
correct = True
if is_datatype_group:
self.logger.debug("Removing datatype group %s" % datatype)
data_list = dao.get_datatypes_from_datatype_group(datatype.id)
for adata in data_list:
self._remove_project_node_files(project_id, adata.gid, skip_validation)
if adata.fk_from_operation not in operations_set:
operations_set.append(adata.fk_from_operation)
datatype_group = dao.get_datatype_group_by_gid(datatype.gid)
dao.remove_datatype(datatype_gid)
correct = correct and dao.remove_entity(model.OperationGroup, datatype_group.fk_operation_group)
else:
self.logger.debug("Removing datatype %s" % datatype)
self._remove_project_node_files(project_id, datatype.gid, skip_validation)
## Remove Operation entity in case no other DataType needs them.
project = dao.get_project_by_id(project_id)
for operation_id in operations_set:
dependent_dt = dao.get_generic_entity(model.DataType, operation_id, "fk_from_operation")
if len(dependent_dt) > 0:
### Do not remove Operation in case DataType still exist referring it.
continue
correct = correct and dao.remove_entity(model.Operation, operation_id)
## Make sure Operation folder is removed
self.structure_helper.remove_operation_data(project.name, datatype.fk_from_operation)
if not correct:
raise RemoveDataTypeException("Could not remove DataType " + str(datatype_gid))
user.used_disk_space = user.used_disk_space - freed_space
dao.store_entity(user)
开发者ID:unimauro,项目名称:tvb-framework,代码行数:56,代码来源:project_service.py
示例12: test_import_export
def test_import_export(self):
"""
Test the import/export mechanism for a project structure.
The project contains the following data types: Connectivity, Surface, MappedArray and ValueWrapper.
"""
result = self.get_all_datatypes()
expected_results = {}
for one_data in result:
expected_results[one_data.gid] = (one_data.module, one_data.type)
#create an array mapped in DB
data = {'param_1': 'some value'}
OperationService().initiate_prelaunch(self.operation, self.adapter_instance, {}, **data)
inserted = self.flow_service.get_available_datatypes(self.test_project.id,
"tvb.datatypes.arrays.MappedArray")[1]
self.assertEqual(1, inserted, "Problems when inserting data")
#create a value wrapper
value_wrapper = self._create_value_wrapper()
count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
self.assertEqual(2, count_operations, "Invalid ops number before export!")
# Export project as ZIP
self.zip_path = ExportManager().export_project(self.test_project)
self.assertTrue(self.zip_path is not None, "Exported file is none")
# Remove the original project
self.project_service.remove_project(self.test_project.id)
result, lng_ = self.project_service.retrieve_projects_for_user(self.test_user.id)
self.assertEqual(0, len(result), "Project Not removed!")
self.assertEqual(0, lng_, "Project Not removed!")
# Now try to import again project
self.import_service.import_project_structure(self.zip_path, self.test_user.id)
result = self.project_service.retrieve_projects_for_user(self.test_user.id)[0]
self.assertEqual(len(result), 1, "There should be only one project.")
self.assertEqual(result[0].name, "GeneratedProject", "The project name is not correct.")
self.assertEqual(result[0].description, "test_desc", "The project description is not correct.")
self.test_project = result[0]
count_operations = dao.get_filtered_operations(self.test_project.id, None, is_count=True)
#1 op. - import cff; 2 op. - save the array wrapper;
self.assertEqual(2, count_operations, "Invalid ops number after export and import !")
for gid in expected_results:
datatype = dao.get_datatype_by_gid(gid)
self.assertEqual(datatype.module, expected_results[gid][0], 'DataTypes not imported correctly')
self.assertEqual(datatype.type, expected_results[gid][1], 'DataTypes not imported correctly')
#check the value wrapper
new_val = self.flow_service.get_available_datatypes(self.test_project.id,
"tvb.datatypes.mapped_values.ValueWrapper")[0]
self.assertEqual(1, len(new_val), "One !=" + str(len(new_val)))
new_val = ABCAdapter.load_entity_by_gid(new_val[0][2])
self.assertEqual(value_wrapper.data_value, new_val.data_value, "Data value incorrect")
self.assertEqual(value_wrapper.data_type, new_val.data_type, "Data type incorrect")
self.assertEqual(value_wrapper.data_name, new_val.data_name, "Data name incorrect")
开发者ID:amitsaroj001,项目名称:tvb-framework,代码行数:56,代码来源:import_service_test.py
示例13: test_remove_used_surface
def test_remove_used_surface(self):
"""
Tries to remove an used surface
"""
mapping, mapping_count = self.flow_service.get_available_datatypes(self.test_project.id, RegionMapping)
self.assertEquals(1, mapping_count, "There should be one Mapping.")
mapping_gid = mapping[0][2]
mapping = ABCAdapter.load_entity_by_gid(mapping_gid)
surface = dao.get_datatype_by_gid(mapping.surface.gid)
self.assertEqual(surface.gid, mapping.surface.gid, "The surfaces should have the same GID")
try:
self.project_service.remove_datatype(self.test_project.id, surface.gid)
self.fail("The surface should still be used by a RegionMapping " + str(surface.gid))
except RemoveDataTypeException:
#OK, do nothing
pass
res = dao.get_datatype_by_gid(surface.gid)
self.assertEqual(surface.id, res.id, "A used surface was deleted")
开发者ID:unimauro,项目名称:tvb-framework,代码行数:19,代码来源:remove_test.py
示例14: set_datatype_visibility
def set_datatype_visibility(datatype_gid, is_visible):
"""
Sets the dataType visibility. If the given dataType is a dataType group or it is part of a
dataType group than this method will set the visibility for each dataType from this group.
"""
datatype = dao.get_datatype_by_gid(datatype_gid)
if datatype.fk_datatype_group is not None:
datatype_gid = dao.get_datatype_by_id(datatype.fk_datatype_group).gid
dao.set_datatype_visibility(datatype_gid, is_visible)
开发者ID:HuifangWang,项目名称:the-virtual-brain-website,代码行数:10,代码来源:project_service.py
示例15: test_remove_value_wrapper
def test_remove_value_wrapper(self):
"""
Test the deletion of a value wrapper dataType
"""
count_vals = self.count_all_entities(ValueWrapper)
self.assertEqual(0, count_vals, "There should be no value wrapper")
value_wrapper = self._create_value_wrapper()
self.project_service.remove_datatype(self.test_project.id, value_wrapper.gid)
res = dao.get_datatype_by_gid(value_wrapper.gid)
self.assertEqual(None, res, "The value wrapper was not deleted.")
开发者ID:unimauro,项目名称:tvb-framework,代码行数:10,代码来源:remove_test.py
示例16: update
def update(input_file):
"""
:param input_file: the file that needs to be converted to a newer file storage version.
"""
if not os.path.isfile(input_file):
raise IncompatibleFileManagerException("The input path %s received for upgrading from 3 -> 4 is not a "
"valid file on the disk." % input_file)
folder, file_name = os.path.split(input_file)
storage_manager = HDF5StorageManager(folder, file_name)
root_metadata = storage_manager.get_metadata()
if DataTypeMetaData.KEY_CLASS_NAME not in root_metadata:
raise IncompatibleFileManagerException("File %s received for upgrading 3 -> 4 is not valid, due to missing "
"metadata: %s" % (input_file, DataTypeMetaData.KEY_CLASS_NAME))
class_name = root_metadata[DataTypeMetaData.KEY_CLASS_NAME]
if "ProjectionSurface" in class_name and FIELD_PROJECTION_TYPE not in root_metadata:
LOGGER.info("Updating ProjectionSurface %s from %s" % (file_name, folder))
projection_type = projections.EEG_POLYMORPHIC_IDENTITY
if "SEEG" in class_name:
projection_type = projections.SEEG_POLYMORPHIC_IDENTITY
elif "MEG" in class_name:
projection_type = projections.MEG_POLYMORPHIC_IDENTITY
root_metadata[FIELD_PROJECTION_TYPE] = json.dumps(projection_type)
LOGGER.debug("Setting %s = %s" % (FIELD_PROJECTION_TYPE, projection_type))
elif "TimeSeries" in class_name:
LOGGER.info("Updating TS %s from %s" % (file_name, folder))
service = ImportService()
operation_id = int(os.path.split(folder)[1])
dt = service.load_datatype_from_file(folder, file_name, operation_id, move=False)
dt_db = dao.get_datatype_by_gid(dt.gid)
if dt_db is not None:
# DT already in DB (update of own storage, by making sure all fields are being correctly populated)
dt_db.configure()
dt_db.persist_full_metadata()
try:
# restore in DB, in case TVB 1.4 had wrongly imported flags
dao.store_entity(dt_db)
except Exception:
LOGGER.exception("Could not update flags in DB, but we continue with the update!")
elif FIELD_SURFACE_MAPPING not in root_metadata:
# Have default values, to avoid the full project not being imported
root_metadata[FIELD_SURFACE_MAPPING] = json.dumps(False)
root_metadata[FIELD_VOLUME_MAPPING] = json.dumps(False)
root_metadata[TvbProfile.current.version.DATA_VERSION_ATTRIBUTE] = TvbProfile.current.version.DATA_VERSION
storage_manager.set_metadata(root_metadata)
开发者ID:gummadhav,项目名称:tvb-framework,代码行数:55,代码来源:004_update_files.py
示例17: _remove_entity
def _remove_entity(self, data_class, before_number):
"""
Try to remove entity. Fail otherwise.
"""
dts, count = self.flow_service.get_available_datatypes(self.test_project.id, data_class)
self.assertEquals(count, before_number)
for dt in dts:
data_gid = dt[2]
self.project_service.remove_datatype(self.test_project.id, data_gid)
res = dao.get_datatype_by_gid(data_gid)
self.assertEqual(None, res, "The entity was not deleted")
开发者ID:unimauro,项目名称:tvb-framework,代码行数:11,代码来源:remove_test.py
示例18: _update_datatype_disk_size
def _update_datatype_disk_size(self, file_path):
"""
Computes and updates the disk_size attribute of the DataType, for which was created the given file.
"""
file_handler = FilesHelper()
datatype_gid = self._get_manager(file_path).get_gid_attribute()
datatype = dao.get_datatype_by_gid(datatype_gid)
if datatype is not None:
datatype.disk_size = file_handler.compute_size_on_disk(file_path)
dao.store_entity(datatype)
开发者ID:unimauro,项目名称:tvb-framework,代码行数:11,代码来源:files_update_manager.py
示例19: test_remove_time_series
def test_remove_time_series(self):
"""
Tests the happy flow for the deletion of a time series.
"""
count_ts = self.count_all_entities(TimeSeries)
self.assertEqual(0, count_ts, "There should be no time series")
self._create_timeseries()
series = self.get_all_entities(TimeSeries)
self.assertEqual(1, len(series), "There should be only one time series")
self.project_service.remove_datatype(self.test_project.id, series[0].gid)
res = dao.get_datatype_by_gid(series[0].gid)
self.assertEqual(None, res, "The time series was not deleted.")
开发者ID:unimauro,项目名称:tvb-framework,代码行数:12,代码来源:remove_test.py
示例20: _get_linked_datatypes_storage_path
def _get_linked_datatypes_storage_path(project):
"""
:return: the file paths to the datatypes that are linked in `project`
"""
paths = []
for lnk_dt in dao.get_linked_datatypes_in_project(project.id):
# get datatype as a mapped type
lnk_dt = dao.get_datatype_by_gid(lnk_dt.gid)
if lnk_dt.storage_path is not None:
paths.append(lnk_dt.get_storage_file_path())
else:
LOG.warning("Problem when trying to retrieve path on %s:%s for export!" % (lnk_dt.type, lnk_dt.gid))
return paths
开发者ID:LauHoiYanGladys,项目名称:tvb-framework,代码行数:13,代码来源:export_manager.py
注:本文中的tvb.core.entities.storage.dao.get_datatype_by_gid函数示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论