本文整理汇总了Python中tool_shed.util.encoding_util.tool_shed_decode函数的典型用法代码示例。如果您正苦于以下问题:Python tool_shed_decode函数的具体用法?Python tool_shed_decode怎么用?Python tool_shed_decode使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了tool_shed_decode函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: check_for_tool_dependencies
def check_for_tool_dependencies( self, trans, migration_stage ):
# Get the 000x_tools.xml file associated with migration_stage.
tools_xml_file_path = os.path.abspath( os.path.join( trans.app.config.root, 'scripts', 'migrate_tools', '%04d_tools.xml' % migration_stage ) )
tree = galaxy.util.parse_xml( tools_xml_file_path )
root = tree.getroot()
tool_shed = root.get( 'name' )
tool_shed_url = self.get_tool_shed_url_from_tools_xml_file_path( trans, tool_shed )
repo_name_dependency_tups = []
if tool_shed_url:
for elem in root:
if elem.tag == 'repository':
tool_dependencies = []
tool_dependencies_dict = {}
repository_name = elem.get( 'name' )
changeset_revision = elem.get( 'changeset_revision' )
url = '%s/repository/get_tool_dependencies?name=%s&owner=devteam&changeset_revision=%s&from_install_manager=True' % \
( tool_shed_url, repository_name, changeset_revision )
text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
if text:
tool_dependencies_dict = encoding_util.tool_shed_decode( text )
for dependency_key, requirements_dict in tool_dependencies_dict.items():
tool_dependency_name = requirements_dict[ 'name' ]
tool_dependency_version = requirements_dict[ 'version' ]
tool_dependency_type = requirements_dict[ 'type' ]
tool_dependency_readme = requirements_dict.get( 'readme', '' )
tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
repo_name_dependency_tups.append( ( repository_name, tool_dependencies ) )
return repo_name_dependency_tups
开发者ID:knowingchaos,项目名称:galaxy,代码行数:28,代码来源:admin.py
示例2: check_for_tool_dependencies
def check_for_tool_dependencies( self, trans, migration_stage ):
# Get the 000x_tools.xml file associated with migration_stage.
tools_xml_file_path = os.path.abspath( os.path.join( trans.app.config.root, 'scripts', 'migrate_tools', '%04d_tools.xml' % migration_stage ) )
tree = galaxy.util.parse_xml( tools_xml_file_path )
root = tree.getroot()
tool_shed = root.get( 'name' )
shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed )
repo_name_dependency_tups = []
if shed_url:
for elem in root:
if elem.tag == 'repository':
tool_dependencies = []
tool_dependencies_dict = {}
repository_name = elem.get( 'name' )
changeset_revision = elem.get( 'changeset_revision' )
params = dict( name=repository_name, owner='devteam', changeset_revision=changeset_revision )
pathspec = [ 'repository', 'get_tool_dependencies' ]
text = url_get( shed_url, password_mgr=self.app.tool_shed_registry.url_auth( shed_url ), pathspec=pathspec, params=params )
if text:
tool_dependencies_dict = encoding_util.tool_shed_decode( text )
for dependency_key, requirements_dict in tool_dependencies_dict.items():
tool_dependency_name = requirements_dict[ 'name' ]
tool_dependency_version = requirements_dict[ 'version' ]
tool_dependency_type = requirements_dict[ 'type' ]
tool_dependency_readme = requirements_dict.get( 'readme', '' )
tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
repo_name_dependency_tups.append( ( repository_name, tool_dependencies ) )
return repo_name_dependency_tups
开发者ID:ashvark,项目名称:galaxy,代码行数:28,代码来源:admin.py
示例3: check_for_missing_tools
def check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number ):
# Get the 000x_tools.xml file associated with the current migrate_tools version number.
tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
# Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
migrated_tool_configs_dict = odict()
tree, error_message = xml_util.parse_xml( tools_xml_file_path )
if tree is None:
return False, odict()
root = tree.getroot()
tool_shed = root.get( 'name' )
tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
# The default behavior is that the tool shed is down.
tool_shed_accessible = False
missing_tool_configs_dict = odict()
if tool_shed_url:
for elem in root:
if elem.tag == 'repository':
tool_dependencies = []
tool_dependencies_dict = {}
repository_name = elem.get( 'name' )
changeset_revision = elem.get( 'changeset_revision' )
url = '%s/repository/get_tool_dependencies?name=%s&owner=%s&changeset_revision=%s&from_install_manager=True' % \
( tool_shed_url, repository_name, REPOSITORY_OWNER, changeset_revision )
try:
text = tool_shed_get( app, tool_shed_url, url )
tool_shed_accessible = True
except Exception, e:
# Tool shed may be unavailable - we have to set tool_shed_accessible since we're looping.
tool_shed_accessible = False
print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
if tool_shed_accessible:
if text:
tool_dependencies_dict = encoding_util.tool_shed_decode( text )
for dependency_key, requirements_dict in tool_dependencies_dict.items():
tool_dependency_name = requirements_dict[ 'name' ]
tool_dependency_version = requirements_dict[ 'version' ]
tool_dependency_type = requirements_dict[ 'type' ]
tool_dependency_readme = requirements_dict.get( 'readme', '' )
tool_dependencies.append( ( tool_dependency_name, tool_dependency_version, tool_dependency_type, tool_dependency_readme ) )
for tool_elem in elem.findall( 'tool' ):
migrated_tool_configs_dict[ tool_elem.get( 'file' ) ] = tool_dependencies
if tool_shed_accessible:
# Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
for tool_panel_config in tool_panel_configs:
tree, error_message = xml_util.parse_xml( tool_panel_config )
if tree:
root = tree.getroot()
for elem in root:
if elem.tag == 'tool':
missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
elif elem.tag == 'section':
for section_elem in elem:
if section_elem.tag == 'tool':
missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
开发者ID:knowingchaos,项目名称:galaxy,代码行数:54,代码来源:common_util.py
示例4: generate_workflow_image
def generate_workflow_image( trans, workflow_name, repository_metadata_id=None, repository_id=None ):
"""
Return an svg image representation of a workflow dictionary created when the workflow was exported. This method is called
from both Galaxy and the tool shed. When called from the tool shed, repository_metadata_id will have a value and repository_id
will be None. When called from Galaxy, repository_metadata_id will be None and repository_id will have a value.
"""
workflow_name = encoding_util.tool_shed_decode( workflow_name )
if trans.webapp.name == 'tool_shed':
# We're in the tool shed.
repository_metadata = metadata_util.get_repository_metadata_by_id( trans.app, repository_metadata_id )
repository_id = trans.security.encode_id( repository_metadata.repository_id )
changeset_revision = repository_metadata.changeset_revision
metadata = repository_metadata.metadata
else:
# We're in Galaxy.
repository = suc.get_tool_shed_repository_by_id( trans.app, repository_id )
changeset_revision = repository.changeset_revision
metadata = repository.metadata
# metadata[ 'workflows' ] is a list of tuples where each contained tuple is
# [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
for workflow_tup in metadata[ 'workflows' ]:
workflow_dict = workflow_tup[1]
if workflow_dict[ 'name' ] == workflow_name:
break
if 'tools' in metadata:
tools_metadata = metadata[ 'tools' ]
else:
tools_metadata = []
workflow, missing_tool_tups = get_workflow_from_dict( trans=trans,
workflow_dict=workflow_dict,
tools_metadata=tools_metadata,
repository_id=repository_id,
changeset_revision=changeset_revision )
workflow_canvas = WorkflowCanvas()
canvas = workflow_canvas.canvas
# Store px width for boxes of each step.
for step in workflow.steps:
step.upgrade_messages = {}
module = module_factory.from_workflow_step( trans, repository_id, changeset_revision, tools_metadata, step )
tool_errors = module.type == 'tool' and not module.tool
module_data_inputs = get_workflow_data_inputs( step, module )
module_data_outputs = get_workflow_data_outputs( step, module, workflow.steps )
module_name = get_workflow_module_name( module, missing_tool_tups )
workflow_canvas.populate_data_for_step(
step,
module_name,
module_data_inputs,
module_data_outputs,
tool_errors=tool_errors
)
workflow_canvas.add_steps( highlight_errors=True )
workflow_canvas.finish( )
trans.response.set_content_type( "image/svg+xml" )
return canvas.tostring()
开发者ID:BenjaminHCCarr,项目名称:galaxy,代码行数:54,代码来源:workflow_util.py
示例5: get_repository_dependencies
def get_repository_dependencies(app, tool_shed_url, repository_name, repository_owner, changeset_revision):
repository_dependencies_dict = {}
tool_shed_accessible = True
params = dict(name=repository_name, owner=repository_owner, changeset_revision=changeset_revision)
pathspec = ['repository', 'get_repository_dependencies']
try:
raw_text = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params)
tool_shed_accessible = True
except Exception as e:
tool_shed_accessible = False
log.warning("The URL\n%s\nraised the exception:\n%s\n", util.build_url(tool_shed_url, pathspec=pathspec, params=params), e)
if tool_shed_accessible:
if len(raw_text) > 2:
encoded_text = json.loads(raw_text)
repository_dependencies_dict = encoding_util.tool_shed_decode(encoded_text)
return tool_shed_accessible, repository_dependencies_dict
开发者ID:ImmPortDB,项目名称:immport-galaxy,代码行数:16,代码来源:common_util.py
示例6: get_update_to_changeset_revision_and_ctx_rev
def get_update_to_changeset_revision_and_ctx_rev( self, repository ):
"""Return the changeset revision hash to which the repository can be updated."""
changeset_revision_dict = {}
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, str( repository.tool_shed ) )
params = dict( name=str( repository.name ),
owner=str( repository.owner ),
changeset_revision=str( repository.installed_changeset_revision ) )
pathspec = [ 'repository', 'get_changeset_revision_and_ctx_rev' ]
try:
encoded_update_dict = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
if encoded_update_dict:
update_dict = encoding_util.tool_shed_decode( encoded_update_dict )
includes_data_managers = update_dict.get( 'includes_data_managers', False )
includes_datatypes = update_dict.get( 'includes_datatypes', False )
includes_tools = update_dict.get( 'includes_tools', False )
includes_tools_for_display_in_tool_panel = update_dict.get( 'includes_tools_for_display_in_tool_panel', False )
includes_tool_dependencies = update_dict.get( 'includes_tool_dependencies', False )
includes_workflows = update_dict.get( 'includes_workflows', False )
has_repository_dependencies = update_dict.get( 'has_repository_dependencies', False )
has_repository_dependencies_only_if_compiling_contained_td = update_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
changeset_revision = update_dict.get( 'changeset_revision', None )
ctx_rev = update_dict.get( 'ctx_rev', None )
changeset_revision_dict[ 'includes_data_managers' ] = includes_data_managers
changeset_revision_dict[ 'includes_datatypes' ] = includes_datatypes
changeset_revision_dict[ 'includes_tools' ] = includes_tools
changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = includes_tools_for_display_in_tool_panel
changeset_revision_dict[ 'includes_tool_dependencies' ] = includes_tool_dependencies
changeset_revision_dict[ 'includes_workflows' ] = includes_workflows
changeset_revision_dict[ 'has_repository_dependencies' ] = has_repository_dependencies
changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = has_repository_dependencies_only_if_compiling_contained_td
changeset_revision_dict[ 'changeset_revision' ] = changeset_revision
changeset_revision_dict[ 'ctx_rev' ] = ctx_rev
except Exception as e:
log.debug( "Error getting change set revision for update from the tool shed for repository '%s': %s" % ( repository.name, str( e ) ) )
changeset_revision_dict[ 'includes_data_managers' ] = False
changeset_revision_dict[ 'includes_datatypes' ] = False
changeset_revision_dict[ 'includes_tools' ] = False
changeset_revision_dict[ 'includes_tools_for_display_in_tool_panel' ] = False
changeset_revision_dict[ 'includes_tool_dependencies' ] = False
changeset_revision_dict[ 'includes_workflows' ] = False
changeset_revision_dict[ 'has_repository_dependencies' ] = False
changeset_revision_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ] = False
changeset_revision_dict[ 'changeset_revision' ] = None
changeset_revision_dict[ 'ctx_rev' ] = None
return changeset_revision_dict
开发者ID:glormph,项目名称:galaxy,代码行数:45,代码来源:update_repository_manager.py
示例7: get_tool_dependencies
def get_tool_dependencies(app, tool_shed_url, repository_name, repository_owner, changeset_revision):
tool_dependencies = []
tool_shed_accessible = True
params = dict(name=repository_name, owner=repository_owner, changeset_revision=changeset_revision)
pathspec = ['repository', 'get_tool_dependencies']
try:
text = util.url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params)
tool_shed_accessible = True
except Exception as e:
tool_shed_accessible = False
log.warning("The URL\n%s\nraised the exception:\n%s\n", util.build_url(tool_shed_url, pathspec=pathspec, params=params), e)
if tool_shed_accessible:
if text:
tool_dependencies_dict = encoding_util.tool_shed_decode(text)
for requirements_dict in tool_dependencies_dict.values():
tool_dependency_name = requirements_dict['name']
tool_dependency_version = requirements_dict['version']
tool_dependency_type = requirements_dict['type']
tool_dependencies.append((tool_dependency_name, tool_dependency_version, tool_dependency_type))
return tool_shed_accessible, tool_dependencies
开发者ID:ImmPortDB,项目名称:immport-galaxy,代码行数:20,代码来源:common_util.py
示例8: get_repository_dependencies_for_installed_tool_shed_repository
def get_repository_dependencies_for_installed_tool_shed_repository(self, app, repository):
"""
Send a request to the appropriate tool shed to retrieve the dictionary of repository dependencies defined
for the received repository which is installed into Galaxy. This method is called only from Galaxy.
"""
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(app, str(repository.tool_shed))
params = dict(name=str(repository.name),
owner=str(repository.owner),
changeset_revision=str(repository.changeset_revision))
pathspec = ['repository', 'get_repository_dependencies']
try:
raw_text = url_get(tool_shed_url, password_mgr=app.tool_shed_registry.url_auth(tool_shed_url), pathspec=pathspec, params=params)
except Exception as e:
log.error("The URL\n%s\nraised the exception:\n%s\n", build_url(tool_shed_url, pathspec=pathspec, params=params), str(e))
return ''
if len(raw_text) > 2:
encoded_text = json.loads(raw_text)
text = encoding_util.tool_shed_decode(encoded_text)
else:
text = ''
return text
开发者ID:bwlang,项目名称:galaxy,代码行数:21,代码来源:repository_dependency_manager.py
示例9: check_for_tool_dependencies
def check_for_tool_dependencies(self, trans, migration_stage):
# Get the 000x_tools.xml file associated with migration_stage.
tools_xml_file_path = os.path.abspath(
os.path.join(trans.app.config.root, "scripts", "migrate_tools", "%04d_tools.xml" % migration_stage)
)
tree = galaxy.util.parse_xml(tools_xml_file_path)
root = tree.getroot()
tool_shed = root.get("name")
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry(trans.app, tool_shed)
repo_name_dependency_tups = []
if tool_shed_url:
for elem in root:
if elem.tag == "repository":
tool_dependencies = []
tool_dependencies_dict = {}
repository_name = elem.get("name")
changeset_revision = elem.get("changeset_revision")
params = dict(name=repository_name, owner="devteam", changeset_revision=changeset_revision)
pathspec = ["repository", "get_tool_dependencies"]
text = common_util.tool_shed_get(trans.app, tool_shed_url, pathspec=pathspec, params=params)
if text:
tool_dependencies_dict = encoding_util.tool_shed_decode(text)
for dependency_key, requirements_dict in tool_dependencies_dict.items():
tool_dependency_name = requirements_dict["name"]
tool_dependency_version = requirements_dict["version"]
tool_dependency_type = requirements_dict["type"]
tool_dependency_readme = requirements_dict.get("readme", "")
tool_dependencies.append(
(
tool_dependency_name,
tool_dependency_version,
tool_dependency_type,
tool_dependency_readme,
)
)
repo_name_dependency_tups.append((repository_name, tool_dependencies))
return repo_name_dependency_tups
开发者ID:NickSto,项目名称:galaxy,代码行数:37,代码来源:admin.py
示例10: validate_capsule
def validate_capsule( trans, **kwd ):
"""Inspect the uploaded capsule's manifest and it's contained files to ensure it is a valid repository capsule."""
capsule_dict = {}
capsule_dict.update( kwd )
encoded_file_path = capsule_dict.get( 'encoded_file_path', '' )
file_path = encoding_util.tool_shed_decode( encoded_file_path )
# The capsule must contain a valid XML file named export_info.xml.
export_info_file_path = os.path.join( file_path, 'export_info.xml' )
export_info_tree, error_message = xml_util.parse_xml( export_info_file_path )
if error_message:
capsule_dict[ 'error_message' ] = error_message
capsule_dict[ 'status' ] = 'error'
return capsule_dict
# The capsule must contain a valid XML file named manifest.xml.
manifest_file_path = os.path.join( file_path, 'manifest.xml' )
# Validate the capsule manifest by inspecting name, owner, changeset_revision and type information contained within
# each <repository> tag set.
repository_info_dicts, error_message = get_repository_info_from_manifest( manifest_file_path )
if error_message:
capsule_dict[ 'error_message' ] = error_message
capsule_dict[ 'status' ] = 'error'
return capsule_dict
# Validate the capsule manifest by ensuring all <repository> tag sets contain a valid <archive> sub-element.
archives, error_message = get_archives_from_manifest( manifest_file_path )
if error_message:
capsule_dict[ 'error_message' ] = error_message
capsule_dict[ 'status' ] = 'error'
return capsule_dict
# Validate the capsule manifest by ensuring each defined archive file name exists within the capsule.
error_message = verify_archives_in_capsule( file_path, archives )
if error_message:
capsule_dict[ 'error_message' ] = error_message
capsule_dict[ 'status' ] = 'error'
return capsule_dict
capsule_dict[ 'status' ] = 'ok'
return capsule_dict
开发者ID:knowingchaos,项目名称:galaxy-central,代码行数:36,代码来源:import_util.py
示例11: str
message = 'Error opening file %s: %s' % ( str( capsule_file_name ), str( e ) )
log.error( message, exc_info=True )
trans.response.status = 500
return message
capsule_dict[ 'tar_archive' ] = tar_archive
capsule_dict[ 'capsule_file_name' ] = capsule_file_name
capsule_dict = import_util.extract_capsule_files( trans, **capsule_dict )
capsule_dict = import_util.validate_capsule( trans, **capsule_dict )
status = capsule_dict.get( 'status', 'error' )
if status == 'error':
message = 'The capsule contents are invalid and cannpt be imported:<br/>%s' % str( capsule_dict.get( 'error_message', '' ) )
log.error( message, exc_info=True )
trans.response.status = 500
return message
encoded_file_path = capsule_dict.get( 'encoded_file_path', None )
file_path = encoding_util.tool_shed_decode( encoded_file_path )
export_info_file_path = os.path.join( file_path, 'export_info.xml' )
export_info_dict = import_util.get_export_info_dict( export_info_file_path )
manifest_file_path = os.path.join( file_path, 'manifest.xml' )
# The manifest.xml file has already been validated, so no error_message should be returned here.
repository_info_dicts, error_message = import_util.get_repository_info_from_manifest( manifest_file_path )
# Determine the status for each exported repository archive contained within the capsule.
repository_status_info_dicts = import_util.get_repository_status_from_tool_shed( trans, repository_info_dicts )
# Generate a list of repository name / import results message tuples for display after the capsule is imported.
import_results_tups = []
# Only create repositories that do not yet exist and that the current user is authorized to create. The
# status will be None for repositories that fall into the intersection of these 2 categories.
for repository_status_info_dict in repository_status_info_dicts:
# Add the capsule_file_name and encoded_file_path to the repository_status_info_dict.
repository_status_info_dict[ 'capsule_file_name' ] = capsule_file_name
repository_status_info_dict[ 'encoded_file_path' ] = encoded_file_path
开发者ID:knowingchaos,项目名称:galaxy-central,代码行数:31,代码来源:repositories.py
示例12: get_required_repo_info_dicts
def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ):
"""
Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of
them to the list. All repository_dependency entries in each of the received repo_info_dicts includes
all required repositories, so only one pass through this method is required to retrieve all repository
dependencies.
"""
all_required_repo_info_dict = {}
all_repo_info_dicts = []
if repo_info_dicts:
# We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool
# shed to discover repository ids.
required_repository_tups = []
for repo_info_dict in repo_info_dicts:
if repo_info_dict not in all_repo_info_dicts:
all_repo_info_dicts.append( repo_info_dict )
for repository_name, repo_info_tup in repo_info_dict.items():
description, \
repository_clone_url, \
changeset_revision, \
ctx_rev, \
repository_owner, \
repository_dependencies, \
tool_dependencies = \
suc.get_repo_info_tuple_contents( repo_info_tup )
if repository_dependencies:
for key, val in repository_dependencies.items():
if key in [ 'root_key', 'description' ]:
continue
repository_components_tuple = container_util.get_components_from_key( key )
components_list = suc.extract_components_from_tuple( repository_components_tuple )
# Skip listing a repository dependency if it is required only to compile a tool dependency
# defined for the dependent repository since in this case, the repository dependency is really
# a dependency of the dependent repository's contained tool dependency, and only if that
# tool dependency requires compilation.
# For backward compatibility to the 12/20/12 Galaxy release.
prior_installation_required = 'False'
only_if_compiling_contained_td = 'False'
if len( components_list ) == 4:
prior_installation_required = 'False'
only_if_compiling_contained_td = 'False'
elif len( components_list ) == 5:
prior_installation_required = components_list[ 4 ]
only_if_compiling_contained_td = 'False'
if not asbool( only_if_compiling_contained_td ):
if components_list not in required_repository_tups:
required_repository_tups.append( components_list )
for components_list in val:
try:
only_if_compiling_contained_td = components_list[ 5 ]
except:
only_if_compiling_contained_td = 'False'
# Skip listing a repository dependency if it is required only to compile a tool dependency
# defined for the dependent repository (see above comment).
if not asbool( only_if_compiling_contained_td ):
if components_list not in required_repository_tups:
required_repository_tups.append( components_list )
else:
# We have a single repository with no dependencies.
components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision ]
required_repository_tups.append( components_list )
if required_repository_tups:
# The value of required_repository_tups is a list of tuples, so we need to encode it.
encoded_required_repository_tups = []
for required_repository_tup in required_repository_tups:
# Convert every item in required_repository_tup to a string.
required_repository_tup = [ str( item ) for item in required_repository_tup ]
encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
if suc.is_tool_shed_client( self.app ):
# Handle secure / insecure Tool Shed URL protocol changes and port changes.
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( self.app, tool_shed_url )
url = common_util.url_join( tool_shed_url, '/repository/get_required_repo_info_dict' )
# Fix for handling 307 redirect not being handled nicely by urllib2.urlopen when the urllib2.Request has data provided
url = urllib2.urlopen( urllib2.Request( url ) ).geturl()
request = urllib2.Request( url, data=urllib.urlencode( dict( encoded_str=encoded_required_repository_str ) ) )
response = urllib2.urlopen( request ).read()
if response:
try:
required_repo_info_dict = json.loads( response )
except Exception, e:
log.exception( e )
return all_repo_info_dicts
required_repo_info_dicts = []
for k, v in required_repo_info_dict.items():
if k == 'repo_info_dicts':
encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
for encoded_dict_str in encoded_dict_strings:
decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
required_repo_info_dicts.append( decoded_dict )
else:
if k not in all_required_repo_info_dict:
all_required_repo_info_dict[ k ] = v
else:
if v and not all_required_repo_info_dict[ k ]:
all_required_repo_info_dict[ k ] = v
if required_repo_info_dicts:
for required_repo_info_dict in required_repo_info_dicts:
# Each required_repo_info_dict has a single entry, and all_repo_info_dicts is a list
#.........这里部分代码省略.........
开发者ID:HullUni-bioinformatics,项目名称:ReproPhyloGalaxy,代码行数:101,代码来源:repository_dependency_manager.py
示例13: import_repository_archive
def import_repository_archive( trans, repository, repository_archive_dict ):
"""Import a repository archive contained within a repository capsule."""
archive_file_name = repository_archive_dict.get( 'archive_file_name', None )
capsule_file_name = repository_archive_dict[ 'capsule_file_name' ]
encoded_file_path = repository_archive_dict[ 'encoded_file_path' ]
file_path = encoding_util.tool_shed_decode( encoded_file_path )
results_dict = dict( ok=True, error_message='' )
archive_file_path = os.path.join( file_path, archive_file_name )
archive = tarfile.open( archive_file_path, 'r:*' )
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
undesirable_dirs_removed = 0
undesirable_files_removed = 0
ok, error_message = commit_util.check_archive( repository, archive )
if ok:
full_path = os.path.abspath( repo_dir )
filenames_in_archive = []
for tarinfo_obj in archive.getmembers():
# Check files and directories in the archive.
ok = os.path.basename( tarinfo_obj.name ) not in commit_util.UNDESIRABLE_FILES
if ok:
for file_path_item in tarinfo_obj.name.split( '/' ):
if file_path_item in commit_util.UNDESIRABLE_DIRS:
undesirable_dirs_removed += 1
error_message = 'Import failed: invalid file path <b>%s</b> in archive <b>%s</b>' % \
( str( file_path_item ), str( archive_file_name ) )
results_dict[ 'ok' ] = False
results_dict[ 'error_message' ] += error_message
return results_dict
filenames_in_archive.append( tarinfo_obj.name )
else:
undesirable_files_removed += 1
# Extract the uploaded archive to the repository root.
archive.extractall( path=full_path )
archive.close()
for filename in filenames_in_archive:
uploaded_file_name = os.path.join( full_path, filename )
if os.path.split( uploaded_file_name )[ -1 ] == suc.REPOSITORY_DEPENDENCY_DEFINITION_FILENAME:
# Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
altered, root_elem, error_message = commit_util.handle_repository_dependencies_definition( trans,
uploaded_file_name,
unpopulate=False )
if error_message:
results_dict[ 'ok' ] = False
results_dict[ 'error_message' ] += error_message
if altered:
tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
shutil.move( tmp_filename, uploaded_file_name )
elif os.path.split( uploaded_file_name )[ -1 ] == suc.TOOL_DEPENDENCY_DEFINITION_FILENAME:
# Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
altered, root_elem, error_message = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name )
if error_message:
results_dict[ 'ok' ] = False
results_dict[ 'error_message' ] += error_message
if altered:
tmp_filename = xml_util.create_and_write_tmp_file( root_elem )
shutil.move( tmp_filename, uploaded_file_name )
commit_message = 'Imported from capsule %s' % str( capsule_file_name )
# Send email notification to those that have registered to receive alerts for new repositories in this Tool Shed.
new_repo_alert = True
# Since the repository is new, the following must be False.
remove_repo_files_not_in_tar = False
ok, error_message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed = \
commit_util.handle_directory_changes( trans,
repository,
full_path,
filenames_in_archive,
remove_repo_files_not_in_tar,
new_repo_alert,
commit_message,
undesirable_dirs_removed,
undesirable_files_removed )
try:
metadata_util.set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=content_alert_str )
except Exception, e:
log.debug( "Error setting metadata on repository %s created from imported archive %s: %s" % \
( str( repository.name ), str( archive_file_name ), str( e ) ) )
results_dict[ 'ok' ] = ok
results_dict[ 'error_message' ] += error_message
开发者ID:knowingchaos,项目名称:galaxy-central,代码行数:79,代码来源:import_util.py
示例14: install
def install( self, trans, **kwd ):
"""
POST /api/tool_shed_repositories/install
Initiate the installation of a repository.
:param install_resolver_dependencies: True to install resolvable dependencies.
:param install_tool_dependencies: True to install tool dependencies.
:param install_repository_dependencies: True to install repository dependencies.
:param tool_panel_section_id: The unique identifier for an existing tool panel section
:param new_tool_panel_section_label: Create a new tool panel section with this label
:param shed_tool_conf: The shed tool config file to use for this installation
:param tool_shed_url: The URL for the toolshed whence this repository is being installed
:param changeset: The changeset to update to after cloning the repository
"""
irm = InstallRepositoryManager( self.app )
tool_shed_url = kwd.get( 'tool_shed_url', None )
repositories = json.loads( kwd.get( 'repositories', '[]' ) )
repo_info_dict = self.__get_repo_info_dict( trans, repositories, tool_shed_url )
includes_tools = False
includes_tools_for_display_in_tool_panel = False
has_repository_dependencies = False
includes_tool_dependencies = False
install_resolver_dependencies = util.asbool( kwd.get( 'install_resolver_dependencies', False ) )
for encoded_repo_info_dict in repo_info_dict.get( 'repo_info_dicts', [] ):
decoded_repo_info_dict = encoding_util.tool_shed_decode( encoded_repo_info_dict )
if not includes_tools:
includes_tools = util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools', False ) )
if not includes_tools_for_display_in_tool_panel:
includes_tools_for_display_in_tool_panel = \
util.string_as_bool( decoded_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False ) )
if not has_repository_dependencies:
has_repository_dependencies = util.string_as_bool( repo_info_dict.get( 'has_repository_dependencies', False ) )
if not includes_tool_dependencies:
includes_tool_dependencies = util.string_as_bool( repo_info_dict.get( 'includes_tool_dependencies', False ) )
encoded_repo_info_dicts = util.listify( repo_info_dict.get( 'repo_info_dicts', [] ) )
repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
tool_panel_section_id = kwd.get( 'tool_panel_section_id', None )
new_tool_panel_section_label = kwd.get( 'new_tool_panel_section', None )
tool_panel_section_mapping = json.loads( kwd.get( 'tool_panel_section', '{}' ) )
install_tool_dependencies = util.asbool( kwd.get( 'install_tool_dependencies', False ) )
install_repository_dependencies = util.asbool( kwd.get( 'install_repository_dependencies', False ) )
shed_tool_conf = kwd.get( 'shed_tool_conf', None )
tool_path = suc.get_tool_path_by_shed_tool_conf_filename( self.app, shed_tool_conf )
installation_dict = dict( install_repository_dependencies=install_repository_dependencies,
new_tool_panel_section_label=new_tool_panel_section_label,
no_changes_checked=False,
repo_info_dicts=repo_info_dicts,
tool_panel_section_id=tool_panel_section_id,
tool_path=tool_path,
tool_shed_url=tool_shed_url )
new_repositories, tool_panel_keys, repo_info_dicts, filtered_repos = irm.handle_tool_shed_repositories( installation_dict )
if new_repositories:
installation_dict = dict( created_or_updated_tool_shed_repositories=new_repositories,
filtered_repo_info_dicts=filtered_repos,
has_repository_dependencies=has_repository_dependencies,
includes_tool_dependencies=includes_tool_dependencies,
includes_tools=includes_tools,
includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
install_repository_dependencies=install_repository_dependencies,
install_tool_dependencies=install_tool_dependencies,
message='',
new_tool_panel_section_label=new_tool_panel_section_label,
tool_panel_section_mapping=tool_panel_section_mapping,
install_resolver_dependencies=install_resolver_dependencies,
shed_tool_conf=shed_tool_conf,
status='ok',
tool_panel_section_id=tool_panel_section_id,
tool_panel_section_keys=tool_panel_keys,
tool_path=tool_path,
tool_shed_url=tool_shed_url )
encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = \
irm.initiate_repository_installation( installation_dict )
return json.dumps( dict( operation='install',
api=True,
install_resolver_dependencies=install_resolver_dependencies,
install_tool_dependencies=install_tool_dependencies,
encoded_kwd=encoded_kwd,
reinstalling=False,
tool_shed_repository_ids=json.dumps( [ repo[0] for repo in repositories ] ),
repositories=[ trans.security.encode_id( repo.id ) for repo in new_repositories ] ) )
开发者ID:ashvark,项目名称:galaxy,代码行数:80,代码来源:tool_shed_repositories.py
示例15: get_required_repo_info_dicts
def get_required_repo_info_dicts( trans, tool_shed_url, repo_info_dicts ):
"""
Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
repository_de
|
请发表评论