1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a749bcb13792/
changeset: a749bcb13792
user: jgoecks
date: 2012-12-14 16:31:19
summary: Decouple set metadata tool from a web transaction and require only an app.
affected #: 1 file
diff -r 2d7a64c143d1136fdfa6ed2203c22527b04b5ff1 -r a749bcb137928ede156899ca5ef4d2eef6e9f45c lib/galaxy/tools/actions/metadata.py
--- a/lib/galaxy/tools/actions/metadata.py
+++ b/lib/galaxy/tools/actions/metadata.py
@@ -8,29 +8,50 @@
class SetMetadataToolAction( ToolAction ):
"""Tool action used for setting external metadata on an existing dataset"""
+
+ def execute( self, tool, trans, incoming={}, set_output_hid=False, overwrite=True, history=None, job_params=None ):
+ """
+ Execute using a web transaction.
+ """
+ user_id = None
+ if trans.user:
+ user_id = trans.user.id
+ job, odict = self.execute_via_app( tool, trans.app, trans.get_galaxy_session().id,
+ trans.history.id, user_id, incoming, set_output_hid,
+ overwrite, history, job_params )
+ # FIXME: can remove this when logging in execute_via_app method.
+ trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+ return job, odict
- def execute( self, tool, trans, incoming = {}, set_output_hid = False, overwrite = True, history=None, job_params=None ):
+ def execute_via_app( self, tool, app, session_id, history_id, user_id = None,
+ incoming = {}, set_output_hid = False, overwrite = True,
+ history=None, job_params=None ):
+ """
+ Execute using application.
+ """
for name, value in incoming.iteritems():
- if isinstance( value, trans.app.model.HistoryDatasetAssociation ):
+ if isinstance( value, app.model.HistoryDatasetAssociation ):
dataset = value
dataset_name = name
type = 'hda'
break
- elif isinstance( value, trans.app.model.LibraryDatasetDatasetAssociation ):
+ elif isinstance( value, app.model.LibraryDatasetDatasetAssociation ):
dataset = value
dataset_name = name
type = 'ldda'
break
else:
raise Exception( 'The dataset to set metadata on could not be determined.' )
+
+ sa_session = app.model.context
# Create the job object
- job = trans.app.model.Job()
- job.session_id = trans.get_galaxy_session().id
- job.history_id = trans.history.id
+ job = app.model.Job()
+ job.session_id = session_id
+ job.history_id = history_id
job.tool_id = tool.id
- if trans.user:
- job.user_id = trans.user.id
+ if user_id:
+ job.user_id = user_id
if job_params:
job.params = to_json_string( job_params )
start_job_state = job.state #should be job.states.NEW
@@ -40,26 +61,26 @@
except:
job.tool_version = "1.0.1"
job.state = job.states.WAITING #we need to set job state to something other than NEW, or else when tracking jobs in db it will be picked up before we have added input / output parameters
- trans.sa_session.add( job )
- trans.sa_session.flush() #ensure job.id is available
+ sa_session.add( job )
+ sa_session.flush() #ensure job.id is available
#add parameters to job_parameter table
# Store original dataset state, so we can restore it. A separate table might be better (no chance of 'losing' the original state)?
incoming[ '__ORIGINAL_DATASET_STATE__' ] = dataset.state
external_metadata_wrapper = JobExternalOutputMetadataWrapper( job )
cmd_line = external_metadata_wrapper.setup_external_metadata( dataset,
- trans.sa_session,
+ sa_session,
exec_dir = None,
- tmp_dir = trans.app.config.new_file_path,
- dataset_files_path = trans.app.model.Dataset.file_path,
+ tmp_dir = app.config.new_file_path,
+ dataset_files_path = app.model.Dataset.file_path,
output_fnames = None,
- config_root = trans.app.config.root,
- config_file = trans.app.config.config_file,
- datatypes_config = trans.app.datatypes_registry.integrated_datatypes_configs,
+ config_root = app.config.root,
+ config_file = app.config.config_file,
+ datatypes_config = app.datatypes_registry.integrated_datatypes_configs,
job_metadata = None,
kwds = { 'overwrite' : overwrite } )
incoming[ '__SET_EXTERNAL_METADATA_COMMAND_LINE__' ] = cmd_line
- for name, value in tool.params_to_strings( incoming, trans.app ).iteritems():
+ for name, value in tool.params_to_strings( incoming, app ).iteritems():
job.add_parameter( name, value )
#add the dataset to job_to_input_dataset table
if type == 'hda':
@@ -70,11 +91,12 @@
# i.e. if state was set to 'running' the set metadata job would never run, as it would wait for input (the dataset to set metadata on) to be in a ready state
dataset._state = dataset.states.SETTING_METADATA
job.state = start_job_state #job inputs have been configured, restore initial job state
- trans.sa_session.flush()
+ sa_session.flush()
# Queue the job for execution
- trans.app.job_queue.put( job.id, tool )
- trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
+ app.job_queue.put( job.id, tool )
+ # FIXME: need to add event logging to app and log events there rather than trans.
+ #trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
#clear e.g. converted files
dataset.datatype.before_setting_metadata( dataset )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0ec7f0b6dec8/
changeset: 0ec7f0b6dec8
user: jgoecks
date: 2012-12-14 14:47:08
summary: Include custom genomes in dbkey field when uploading library datasets.
affected #: 1 file
diff -r 1d73c973c9adfa27e1759c280e94c712145e1e03 -r 0ec7f0b6dec8707b6f4d114468367c9131e61200 lib/galaxy/webapps/galaxy/controllers/library_common.py
--- a/lib/galaxy/webapps/galaxy/controllers/library_common.py
+++ b/lib/galaxy/webapps/galaxy/controllers/library_common.py
@@ -944,11 +944,7 @@
# Send list of data formats to the upload form so the "extension" select list can be populated dynamically
file_formats = trans.app.datatypes_registry.upload_file_formats
- # Send list of genome builds to the form so the "dbkey" select list can be populated dynamically
- def get_dbkey_options( last_used_build ):
- for dbkey, build_name in util.dbnames:
- yield build_name, dbkey, ( dbkey==last_used_build )
- dbkeys = get_dbkey_options( last_used_build )
+ dbkeys = trans.app.genomes.get_dbkeys_with_chrom_info( trans )
# Send the current history to the form to enable importing datasets from history to library
history = trans.get_history()
if history is not None:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1d73c973c9ad/
changeset: 1d73c973c9ad
user: greg
date: 2012-12-13 23:13:40
summary: When getting updates to an installed tool shed repository that contains tools, automatically load any updated tools into the Galaxy tool panel.
affected #: 3 files
diff -r 97ec62934b6b17e674706f2c41ce4b34032fa338 -r 1d73c973c9adfa27e1759c280e94c712145e1e03 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -397,7 +397,7 @@
# If there is not yet a tool_shed_repository record, we're in the process of installing
# a new repository, so any included tools can be loaded into the tool panel.
can_load_into_panel_dict = True
- tool = self.load_tool( os.path.join( tool_path, path ), guid=guid )
+ tool = self.load_tool( os.path.join( tool_path, path ), guid=guid )
key = 'tool_%s' % str( tool.id )
if can_load_into_panel_dict:
if guid is not None:
@@ -435,9 +435,12 @@
tta = self.app.model.ToolTagAssociation( tool_id=tool.id, tag_id=tag.id )
self.sa_session.add( tta )
self.sa_session.flush()
- if tool.id not in self.tools_by_id:
- # Allow for the same tool to be loaded into multiple places in the tool panel.
- self.tools_by_id[ tool.id ] = tool
+ #if tool.id not in self.tools_by_id:
+ # Allow for the same tool to be loaded into multiple places in the tool panel. We have to handle the case where the tool is contained
+ # in a repository installed from the tool shed, and the Galaxy administrator has retrieved updates to the installed repository. In this
+ # case, the tool may have been updated, but the version was not changed, so the tool should always be reloaded here. We used to only load
+ # the tool if it's it was not found in self.tools_by_id, but performing that check did not enable this scenario.
+ self.tools_by_id[ tool.id ] = tool
if load_panel_dict:
self.__add_tool_to_tool_panel( tool.id, panel_dict, section=isinstance( panel_dict, galaxy.tools.ToolSection ) )
# Always load the tool into the integrated_panel_dict, or it will not be included in the integrated_tool_panel.xml file.
diff -r 97ec62934b6b17e674706f2c41ce4b34032fa338 -r 1d73c973c9adfa27e1759c280e94c712145e1e03 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1036,7 +1036,7 @@
return metadata_dict
def generate_tool_panel_dict_from_shed_tool_conf_entries( app, repository ):
"""
- Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed-tool_conf in
+ Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed_tool_conf in
which the repository's tools are defined and storing the tool panel definition of each tool in the repository. This method is called
only when the repository is being deactivated or uninstalled and allows for activation or reinstallation using the original layout.
"""
@@ -2232,8 +2232,10 @@
sa_session.flush()
return new_tool_dependency
def update_in_shed_tool_config( app, repository ):
- # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
- # of config_elems instead of using the in-memory list.
+ """
+ A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
+ of config_elems instead of using the in-memory list.
+ """
shed_conf_dict = repository.get_shed_config_dict( app )
shed_tool_conf = shed_conf_dict[ 'config_filename' ]
tool_path = shed_conf_dict[ 'tool_path' ]
diff -r 97ec62934b6b17e674706f2c41ce4b34032fa338 -r 1d73c973c9adfa27e1759c280e94c712145e1e03 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -327,7 +327,7 @@
shed_util.add_to_tool_panel( trans.app,
repository.name,
repository_clone_url,
- repository.changeset_revision,
+ repository.installed_changeset_revision,
repository_tools_tups,
repository.owner,
shed_tool_conf,
@@ -810,7 +810,7 @@
shed_util.add_to_tool_panel( app=trans.app,
repository_name=tool_shed_repository.name,
repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.changeset_revision,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
repository_tools_tups=repository_tools_tups,
owner=tool_shed_repository.owner,
shed_tool_conf=shed_tool_conf,
@@ -1777,7 +1777,20 @@
repository.update_available = False
trans.sa_session.add( repository )
trans.sa_session.flush()
- # Fixme: call shed_util.add_to_tool_panel here?
+ if 'tools' in metadata_dict:
+ tool_panel_dict = metadata_dict.get( 'tool_panel_section', None )
+ if tool_panel_dict is None:
+ tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans.app, repository )
+ repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
+ shed_util.add_to_tool_panel( app=trans.app,
+ repository_name=repository.name,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=repository.installed_changeset_revision,
+ repository_tools_tups=repository_tools_tups,
+ owner=repository.owner,
+ shed_tool_conf=shed_tool_conf,
+ tool_panel_dict=tool_panel_dict,
+ new_install=False )
# Create tool_dependency records if necessary.
if 'tool_dependencies' in metadata_dict:
tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/90b28e58bb11/
changeset: 90b28e58bb11
user: greg
date: 2012-12-13 22:00:34
summary: Tool shed util refactoring and import fixes and cleanup.
affected #: 5 files
diff -r 7fb4047a3d6be5847f38acba440e4113eb21e12c -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 lib/galaxy/tool_shed/__init__.py
--- a/lib/galaxy/tool_shed/__init__.py
+++ b/lib/galaxy/tool_shed/__init__.py
@@ -3,6 +3,7 @@
"""
import os
import galaxy.util.shed_util
+import galaxy.util.shed_util_common
from galaxy.model.orm import and_
from galaxy import eggs
@@ -27,7 +28,7 @@
ElementInclude.include( root )
tool_path = root.get( 'tool_path', None )
if tool_path:
- tool_shed = galaxy.util.shed_util.clean_tool_shed_url( tool_shed_repository.tool_shed )
+ tool_shed = galaxy.util.shed_util_common.clean_tool_shed_url( tool_shed_repository.tool_shed )
relative_path = os.path.join( tool_path,
tool_shed,
'repos',
diff -r 7fb4047a3d6be5847f38acba440e4113eb21e12c -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -37,7 +37,7 @@
self.tool_shed_install_config = tool_shed_install_config
tree = util.parse_xml( tool_shed_install_config )
root = tree.getroot()
- self.tool_shed = shed_util.clean_tool_shed_url( root.get( 'name' ) )
+ self.tool_shed = suc.clean_tool_shed_url( root.get( 'name' ) )
self.repository_owner = common_util.REPOSITORY_OWNER
index, self.shed_config_dict = shed_util.get_shed_tool_conf_dict( app, self.migrated_tools_config )
# Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
@@ -198,6 +198,7 @@
tool_dependencies = None
if 'tools' in metadata_dict:
sample_files = metadata_dict.get( 'sample_files', [] )
+ sample_files = [ str( s ) for s in sample_files ]
tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
diff -r 7fb4047a3d6be5847f38acba440e4113eb21e12c -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -166,11 +166,6 @@
os.close( fd )
shutil.move( filename, os.path.abspath( config_filename ) )
os.chmod( config_filename, 0644 )
-def clean_tool_shed_url( tool_shed_url ):
- if tool_shed_url.find( ':' ) > 0:
- # Eliminate the port, if any, since it will result in an invalid directory name.
- return tool_shed_url.split( ':' )[ 0 ]
- return tool_shed_url.rstrip( '/' )
def copy_sample_files( app, sample_files, tool_path=None, sample_files_copied=None, dest_path=None ):
"""
Copy all appropriate files to dest_path in the local Galaxy environment that have not already been copied. Those that have been copied
@@ -379,53 +374,6 @@
else:
tool_panel_dict[ guid ] = [ tool_section_dict ]
return tool_panel_dict
-def generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository ):
- """
- Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed-tool_conf in
- which the repository's tools are defined and storing the tool panel definition of each tool in the repository. This method is called
- only when the repository is being deactivated or uninstalled and allows for activation or reinstallation using the original layout.
- """
- tool_panel_dict = {}
- shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- metadata = repository.metadata
- # Create a dictionary of tool guid and tool config file name for each tool in the repository.
- guids_and_configs = {}
- for tool_dict in metadata[ 'tools' ]:
- guid = tool_dict[ 'guid' ]
- tool_config = tool_dict[ 'tool_config' ]
- file_name = suc.strip_path( tool_config )
- guids_and_configs[ guid ] = file_name
- # Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
- tree = util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'tool':
- guid = elem.get( 'guid' )
- if guid in guids_and_configs:
- # The tool is displayed in the tool panel outside of any tool sections.
- tool_section_dict = dict( tool_config=guids_and_configs[ guid ], id='', name='', version='' )
- if guid in tool_panel_dict:
- tool_panel_dict[ guid ].append( tool_section_dict )
- else:
- tool_panel_dict[ guid ] = [ tool_section_dict ]
- elif elem.tag == 'section':
- section_id = elem.get( 'id' ) or ''
- section_name = elem.get( 'name' ) or ''
- section_version = elem.get( 'version' ) or ''
- for section_elem in elem:
- if section_elem.tag == 'tool':
- guid = section_elem.get( 'guid' )
- if guid in guids_and_configs:
- # The tool is displayed in the tool panel inside the current tool section.
- tool_section_dict = dict( tool_config=guids_and_configs[ guid ],
- id=section_id,
- name=section_name,
- version=section_version )
- if guid in tool_panel_dict:
- tool_panel_dict[ guid ].append( tool_section_dict )
- else:
- tool_panel_dict[ guid ] = [ tool_section_dict ]
- return tool_panel_dict
def generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=None ):
"""
Create a dictionary of the following type for a single tool config file name. The intent is to call this method for every tool config
@@ -583,7 +531,7 @@
for s in sample_files:
# The problem with this is that Galaxy does not follow a standard naming convention for file names.
if s.endswith( '.loc.sample' ) or s.endswith( '.xml.sample' ) or s.endswith( '.txt.sample' ):
- tool_index_sample_files.append( s )
+ tool_index_sample_files.append( str( s ) )
return tool_index_sample_files
def get_tool_dependency( trans, id ):
"""Get a tool_dependency from the database via id"""
@@ -598,23 +546,6 @@
if as_string:
return ','.join( tool_dependency_ids )
return tool_dependency_ids
-def get_tool_panel_config_tool_path_install_dir( app, repository ):
- # Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the
- # repository is installed. This method assumes all repository tools are defined in a single shed-related tool panel config.
- tool_shed = clean_tool_shed_url( repository.tool_shed )
- partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
- # Get the relative tool installation paths from each of the shed tool configs.
- relative_install_dir = None
- shed_config_dict = repository.get_shed_config_dict( app )
- if not shed_config_dict:
- #just pick a semi-random shed config
- for shed_config_dict in app.toolbox.shed_tool_confs:
- if ( repository.dist_to_shed and shed_config_dict['config_filename'] == app.config.migrated_tools_config ) or ( not repository.dist_to_shed and shed_config_dict['config_filename'] != app.config.migrated_tools_config ):
- break
- shed_tool_conf = shed_config_dict[ 'config_filename' ]
- tool_path = shed_config_dict[ 'tool_path' ]
- relative_install_dir = partial_install_dir
- return shed_tool_conf, tool_path, relative_install_dir
def get_tool_path_install_dir( partial_install_dir, shed_tool_conf_dict, tool_dict, config_elems ):
for elem in config_elems:
if elem.tag == 'tool':
@@ -913,7 +844,7 @@
"""A tool shed repository is being deactivated or uninstalled so handle tool panel alterations accordingly."""
# Determine where the tools are currently defined in the tool panel and store this information so the tools can be displayed
# in the same way when the repository is activated or reinstalled.
- tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
+ tool_panel_dict = suc.generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
repository.metadata[ 'tool_panel_section' ] = tool_panel_dict
trans.sa_session.add( repository )
trans.sa_session.flush()
diff -r 7fb4047a3d6be5847f38acba440e4113eb21e12c -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -328,6 +328,11 @@
if can_delete:
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
+def clean_tool_shed_url( tool_shed_url ):
+ if tool_shed_url.find( ':' ) > 0:
+ # Eliminate the port, if any, since it will result in an invalid directory name.
+ return tool_shed_url.split( ':' )[ 0 ]
+ return tool_shed_url.rstrip( '/' )
def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
"""Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository."""
try:
@@ -567,8 +572,8 @@
return repo_info_dict
def generate_clone_url_for_installed_repository( trans, repository ):
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
- tool_shed_url = suc.get_url_from_repository_tool_shed( trans.app, repository )
- return suc.url_join( tool_shed_url, 'repos', repository.owner, repository.name )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
"""Generate the URL for cloning a repository that is in the tool shed."""
base_url = url_for( '/', qualified=True ).rstrip( '/' )
@@ -689,6 +694,10 @@
original_repository_metadata = None
readme_file_names = get_readme_file_names( repository.name )
metadata_dict = { 'shed_config_filename' : shed_config_dict.get( 'config_filename' ) }
+ # If we're regenerating metadata for a repository that contains tools, make sure we keep the tool panel section information.
+ # Fixme: do we need this?
+ #if original_repository_metadata and 'tool_panel_section' in original_repository_metadata:
+ # metadata_dict[ 'tool_panel_section' ] = original_repository_metadata[ 'tool_panel_section' ]
readme_files = []
invalid_file_tups = []
invalid_tool_configs = []
@@ -994,6 +1003,53 @@
else:
metadata_dict[ 'tools' ] = [ tool_dict ]
return metadata_dict
+def generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository ):
+ """
+ Keep track of the section in the tool panel in which this repository's tools will be contained by parsing the shed-tool_conf in
+ which the repository's tools are defined and storing the tool panel definition of each tool in the repository. This method is called
+ only when the repository is being deactivated or uninstalled and allows for activation or reinstallation using the original layout.
+ """
+ tool_panel_dict = {}
+ shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ metadata = repository.metadata
+ # Create a dictionary of tool guid and tool config file name for each tool in the repository.
+ guids_and_configs = {}
+ for tool_dict in metadata[ 'tools' ]:
+ guid = tool_dict[ 'guid' ]
+ tool_config = tool_dict[ 'tool_config' ]
+ file_name = strip_path( tool_config )
+ guids_and_configs[ guid ] = file_name
+ # Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
+ tree = util.parse_xml( shed_tool_conf )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'tool':
+ guid = elem.get( 'guid' )
+ if guid in guids_and_configs:
+ # The tool is displayed in the tool panel outside of any tool sections.
+ tool_section_dict = dict( tool_config=guids_and_configs[ guid ], id='', name='', version='' )
+ if guid in tool_panel_dict:
+ tool_panel_dict[ guid ].append( tool_section_dict )
+ else:
+ tool_panel_dict[ guid ] = [ tool_section_dict ]
+ elif elem.tag == 'section':
+ section_id = elem.get( 'id' ) or ''
+ section_name = elem.get( 'name' ) or ''
+ section_version = elem.get( 'version' ) or ''
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ guid = section_elem.get( 'guid' )
+ if guid in guids_and_configs:
+ # The tool is displayed in the tool panel inside the current tool section.
+ tool_section_dict = dict( tool_config=guids_and_configs[ guid ],
+ id=section_id,
+ name=section_name,
+ version=section_version )
+ if guid in tool_panel_dict:
+ tool_panel_dict[ guid ].append( tool_section_dict )
+ else:
+ tool_panel_dict[ guid ] = [ tool_section_dict ]
+ return tool_panel_dict
def generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict ):
"""Update the received metadata_dict with changes that have been applied to the received exported_workflow_dict."""
if 'workflows' in metadata_dict:
@@ -1372,6 +1428,23 @@
relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
+def get_tool_panel_config_tool_path_install_dir( app, repository ):
+ # Return shed-related tool panel config, the tool_path configured in it, and the relative path to the directory where the
+ # repository is installed. This method assumes all repository tools are defined in a single shed-related tool panel config.
+ tool_shed = clean_tool_shed_url( repository.tool_shed )
+ partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
+ # Get the relative tool installation paths from each of the shed tool configs.
+ relative_install_dir = None
+ shed_config_dict = repository.get_shed_config_dict( app )
+ if not shed_config_dict:
+ #just pick a semi-random shed config
+ for shed_config_dict in app.toolbox.shed_tool_confs:
+ if ( repository.dist_to_shed and shed_config_dict['config_filename'] == app.config.migrated_tools_config ) or ( not repository.dist_to_shed and shed_config_dict['config_filename'] != app.config.migrated_tools_config ):
+ break
+ shed_tool_conf = shed_config_dict[ 'config_filename' ]
+ tool_path = shed_config_dict[ 'tool_path' ]
+ relative_install_dir = partial_install_dir
+ return shed_tool_conf, tool_path, relative_install_dir
def get_tool_shed_from_clone_url( repository_clone_url ):
tmp_url = clean_repository_clone_url( repository_clone_url )
return tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
@@ -2019,7 +2092,7 @@
str( markupsafe.escape( ''.join( translated ) ) )
return ''.join( translated )
def tool_shed_from_repository_clone_url( repository_clone_url ):
- return suc.clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
+ return clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def tool_shed_is_this_tool_shed( toolshed_base_url ):
return toolshed_base_url.rstrip( '/' ) == str( url_for( '/', qualified=True ) ).rstrip( '/' )
def translate_string( raw_text, to_html=True ):
@@ -2106,13 +2179,14 @@
shed_tool_conf = shed_conf_dict[ 'config_filename' ]
tool_path = shed_conf_dict[ 'tool_path' ]
+ # TODO Fix this - we should be able to pass only app - we should not need trans...
#hack for 'trans.app' used in lots of places. These places should just directly use app
trans = util.bunch.Bunch()
trans.app = app
tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
- cleaned_repository_clone_url = suc.clean_repository_clone_url( suc.generate_clone_url_for_installed_repository( trans, repository ) )
+ cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) )
tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
owner = repository.owner
if not owner:
diff -r 7fb4047a3d6be5847f38acba440e4113eb21e12c -r 90b28e58bb11eb65bd4f461e0b380c5921c626e2 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -309,7 +309,7 @@
def activate_repository( self, trans, **kwd ):
"""Activate a repository that was deactivated but not uninstalled."""
repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, repository )
repository.deleted = False
repository.status = trans.model.ToolShedRepository.installation_status.INSTALLED
@@ -460,7 +460,7 @@
remove_from_disk = params.get( 'remove_from_disk', '' )
remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk )
tool_shed_repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
if relative_install_dir:
if tool_path:
relative_install_dir = os.path.join( tool_path, relative_install_dir )
@@ -553,7 +553,7 @@
items = tmp_url.split( 'repos' )
tool_shed_url = items[ 0 ]
repo_path = items[ 1 ]
- tool_shed_url = shed_util.clean_tool_shed_url( tool_shed_url )
+ tool_shed_url = suc.clean_tool_shed_url( tool_shed_url )
return suc.url_join( tool_shed_url, 'repos', repo_path, changeset_revision )
@web.json
@web.require_admin
@@ -794,7 +794,7 @@
sample_files = metadata_dict.get( 'sample_files', [] )
tool_index_sample_files = shed_util.get_tool_index_sample_files( sample_files )
shed_util.copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
- sample_files_copied = [ s for s in tool_index_sample_files ]
+ sample_files_copied = [ str( s ) for s in tool_index_sample_files ]
repository_tools_tups = shed_util.get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
@@ -866,7 +866,7 @@
( repository.name, repository.owner, repository.installed_changeset_revision, ( url_for( '/', qualified=True ) ) ) )
return trans.response.send_redirect( url )
description = util.restore_text( params.get( 'description', repository.description ) )
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, repository.name ) )
else:
@@ -1294,7 +1294,7 @@
install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
tool_panel_section = kwd.get( 'tool_panel_section', '' )
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
repository_clone_url = suc.generate_clone_url_for_installed_repository( trans, tool_shed_repository )
clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
@@ -1660,7 +1660,7 @@
message += "Reset all of this reppository's metadata in the tool shed, then set the installed tool versions "
message ++ "from the installed repository's <b>Repository Actions</b> menu. "
status = 'error'
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
@@ -1749,7 +1749,7 @@
if changeset_revision == latest_changeset_revision:
message = "The installed repository named '%s' is current, there are no updates available. " % name
else:
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
if tool_path:
repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
@@ -1759,7 +1759,7 @@
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
shed_util.pull_repository( repo, repository_clone_url, latest_ctx_rev )
suc.update_repository( repo, latest_ctx_rev )
- tool_shed = shed_util.clean_tool_shed_url( tool_shed_url )
+ tool_shed = suc.clean_tool_shed_url( tool_shed_url )
# Update the repository metadata.
metadata_dict, invalid_file_tups = suc.generate_metadata_for_changeset_revision( app=trans.app,
repository=repository,
@@ -1777,12 +1777,13 @@
repository.update_available = False
trans.sa_session.add( repository )
trans.sa_session.flush()
+ # Fixme: call shed_util.add_to_tool_panel here?
# Create tool_dependency records if necessary.
if 'tool_dependencies' in metadata_dict:
tool_dependencies = shed_util.create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
# See if any tool dependencies can be installed.
- shed_tool_conf, tool_path, relative_install_dir = shed_util.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if repository.missing_tool_dependencies:
message += "Click the name of one of the missing tool dependencies listed below to install tool dependencies."
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/34f61e53a563/
changeset: 34f61e53a563
user: greg
date: 2012-12-13 18:00:53
summary: More import tweaks.
affected #: 6 files
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -2,13 +2,14 @@
Manage automatic installation of tools configured in the xxx.xml files in ~/scripts/migrate_tools (e.g., 0002_tools.xml).
All of the tools were at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool shed.
"""
-import urllib2, tempfile
+import os, urllib2, tempfile
+from galaxy import util
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
import galaxy.util.shed_util as shed_util
import galaxy.util.shed_util_common as suc
from galaxy.util.odict import odict
-from galaxy.tool_shed.common_util import *
+from galaxy.tool_shed import common_util
class InstallManager( object ):
def __init__( self, app, latest_migration_script_number, tool_shed_install_config, migrated_tools_config, install_dependencies ):
@@ -37,17 +38,17 @@
tree = util.parse_xml( tool_shed_install_config )
root = tree.getroot()
self.tool_shed = shed_util.clean_tool_shed_url( root.get( 'name' ) )
- self.repository_owner = REPOSITORY_OWNER
+ self.repository_owner = common_util.REPOSITORY_OWNER
index, self.shed_config_dict = shed_util.get_shed_tool_conf_dict( app, self.migrated_tools_config )
# Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
# tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
# The default behavior is that the tool shed is down.
tool_shed_accessible = False
- tool_panel_configs = get_non_shed_tool_panel_configs( app )
+ tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
if tool_panel_configs:
# The missing_tool_configs_dict contents are something like:
# {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
- tool_shed_accessible, missing_tool_configs_dict = check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
+ tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
else:
# It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
# we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
@@ -112,7 +113,7 @@
# Tools outside of sections.
file_path = elem.get( 'file', None )
if file_path:
- name = strip_path( file_path )
+ name = suc.strip_path( file_path )
if name in migrated_tool_configs:
if elem not in tool_panel_elems:
tool_panel_elems.append( elem )
@@ -122,7 +123,7 @@
if section_elem.tag == 'tool':
file_path = section_elem.get( 'file', None )
if file_path:
- name = strip_path( file_path )
+ name = suc.strip_path( file_path )
if name in migrated_tool_configs:
# Append the section, not the tool.
if elem not in tool_panel_elems:
@@ -139,7 +140,7 @@
if proprietary_tool_panel_elem.tag == 'tool':
# The proprietary_tool_panel_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
proprietary_tool_config = proprietary_tool_panel_elem.get( 'file' )
- proprietary_name = strip_path( proprietary_tool_config )
+ proprietary_name = suc.strip_path( proprietary_tool_config )
if tool_config == proprietary_name:
# The tool is loaded outside of any sections.
tool_sections.append( None )
@@ -151,7 +152,7 @@
if section_elem.tag == 'tool':
# The section_elem looks something like <tool file="emboss_5/emboss_antigenic.xml" />.
proprietary_tool_config = section_elem.get( 'file' )
- proprietary_name = strip_path( proprietary_tool_config )
+ proprietary_name = suc.strip_path( proprietary_tool_config )
if tool_config == proprietary_name:
# The tool is loaded inside of the section_elem.
tool_sections.append( ToolSection( proprietary_tool_panel_elem ) )
@@ -349,7 +350,7 @@
shed_util.update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.INSTALLED )
@property
def non_shed_tool_panel_configs( self ):
- return get_non_shed_tool_panel_configs( self.app )
+ return common_util.get_non_shed_tool_panel_configs( self.app )
def __get_url_from_tool_shed( self, tool_shed ):
# The value of tool_shed is something like: toolshed.g2.bx.psu.edu. We need the URL to this tool shed, which is something like:
# http://toolshed.g2.bx.psu.edu/
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/tool_shed/migrate/check.py
--- a/lib/galaxy/tool_shed/migrate/check.py
+++ b/lib/galaxy/tool_shed/migrate/check.py
@@ -6,6 +6,7 @@
from migrate.versioning import repository, schema
from sqlalchemy import *
from galaxy.util.odict import odict
+from galaxy.tool_shed import common_util
log = logging.getLogger( __name__ )
@@ -48,11 +49,11 @@
# New installations will not be missing tools, so we don't need to worry about them.
missing_tool_configs_dict = odict()
else:
- tool_panel_configs = get_non_shed_tool_panel_configs( app )
+ tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
if tool_panel_configs:
# The missing_tool_configs_dict contents are something like:
# {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
- tool_shed_accessible, missing_tool_configs_dict = check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number )
+ tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_tool_migration_script_number )
else:
# It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
# we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -32,7 +32,7 @@
from cgi import FieldStorage
from galaxy.util.hash_util import *
from galaxy.util import listify
-import galaxy.util.shed_util as shed_util
+import galaxy.util.shed_util
from galaxy.web import url_for
from galaxy.visualization.genome.visual_analytics import TracksterConfig
@@ -887,11 +887,11 @@
def tool_shed_repository( self ):
# If this tool is included in an installed tool shed repository, return it.
if self.tool_shed:
- return shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
- self.tool_shed,
- self.repository_name,
- self.repository_owner,
- self.installed_changeset_revision )
+ return galaxy.util.shed_util.get_tool_shed_repository_by_shed_name_owner_installed_changeset_revision( self.app,
+ self.tool_shed,
+ self.repository_name,
+ self.repository_owner,
+ self.installed_changeset_revision )
return None
def __get_job_run_config( self, run_configs, key, job_params=None ):
# Look through runners/handlers to find one with matching parameters.
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -393,7 +393,7 @@
for tool_dict in metadata[ 'tools' ]:
guid = tool_dict[ 'guid' ]
tool_config = tool_dict[ 'tool_config' ]
- file_name = strip_path( tool_config )
+ file_name = suc.strip_path( tool_config )
guids_and_configs[ guid ] = file_name
# Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
tree = util.parse_xml( shed_tool_conf )
@@ -434,7 +434,7 @@
{<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]}
"""
tool_panel_dict = {}
- file_name = strip_path( tool_config )
+ file_name = suc.strip_path( tool_config )
tool_section_dicts = generate_tool_section_dicts( tool_config=file_name, tool_sections=tool_sections )
tool_panel_dict[ guid ] = tool_section_dicts
return tool_panel_dict
@@ -471,11 +471,11 @@
return tool_section
def get_config( config_file, repo, ctx, dir ):
"""Return the latest version of config_filename from the repository manifest."""
- config_file = strip_path( config_file )
+ config_file = suc.strip_path( config_file )
for changeset in suc.reversed_upper_bounded_changelog( repo, ctx ):
changeset_ctx = repo.changectx( changeset )
for ctx_file in changeset_ctx.files():
- ctx_file_name = strip_path( ctx_file )
+ ctx_file_name = suc.strip_path( ctx_file )
if ctx_file_name == config_file:
return suc.get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
return None
@@ -491,7 +491,7 @@
for converter in elem.findall( 'converter' ):
converter_config = converter.get( 'file', None )
if converter_config:
- converter_config_file_name = strip_path( converter_config )
+ converter_config_file_name = suc.strip_path( converter_config )
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0:
for name in files:
@@ -508,7 +508,7 @@
for display_app in elem.findall( 'display' ):
display_config = display_app.get( 'file', None )
if display_config:
- display_config_file_name = strip_path( display_config )
+ display_config_file_name = suc.strip_path( display_config )
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0:
for name in files:
@@ -574,7 +574,7 @@
if shed_tool_conf == shed_tool_conf_dict[ 'config_filename' ]:
return index, shed_tool_conf_dict
else:
- file_name = strip_path( shed_tool_conf_dict[ 'config_filename' ] )
+ file_name = suc.strip_path( shed_tool_conf_dict[ 'config_filename' ] )
if shed_tool_conf == file_name:
return index, shed_tool_conf_dict
def get_tool_index_sample_files( sample_files ):
@@ -722,11 +722,11 @@
params_with_missing_index_file = repository_tool.params_with_missing_index_file
for param in params_with_missing_index_file:
options = param.options
- missing_file_name = strip_path( options.missing_index_file )
+ missing_file_name = suc.strip_path( options.missing_index_file )
if missing_file_name not in sample_files_copied:
# The repository must contain the required xxx.loc.sample file.
for sample_file in sample_files:
- sample_file_name = strip_path( sample_file )
+ sample_file_name = suc.strip_path( sample_file )
if sample_file_name == '%s.sample' % missing_file_name:
suc.copy_sample_file( app, sample_file )
if options.tool_data_table and options.tool_data_table.missing_index_file:
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1316,7 +1316,7 @@
return suc.get_repository_file_contents( file_path )
def get_file_from_changeset_revision( self, repo_files_dir, changeset_revision, file_name, dir ):
"""Return file_name from the received changeset_revision of the repository manifest."""
- stripped_file_name = strip_path( file_name )
+ stripped_file_name = suc.strip_path( file_name )
repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
named_tmp_file = suc.get_named_tmpfile_from_ctx( ctx, file_name, dir )
diff -r b96fe1d76b383ef2a77ff76bdc6580362aab4f0d -r 34f61e53a563e4b2b44b45b94aff8ed2d1e17738 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1109,7 +1109,7 @@
tool_path = shed_tool_conf_dict[ 'tool_path' ]
break
else:
- file_name = strip_path( config_filename )
+ file_name = suc.strip_path( config_filename )
if file_name == shed_tool_conf:
tool_path = shed_tool_conf_dict[ 'tool_path' ]
break
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.