1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3b918b912fe6/
Changeset: 3b918b912fe6
User: natefoo
Date: 2013-03-26 20:40:59
Summary: merge next-stable.
Affected #: 7 files
diff -r 42b616e64d0575012d40acf69a355b98bf954db7 -r 3b918b912fe68718eb0b20c5c76e0e639f1cf42c lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -95,35 +95,43 @@
"""
raise NotImplementedError()
- # Runners must override the job handling methods
- def queue_job(self, job_wrapper):
+ def prepare_job(self, job_wrapper, include_metadata=False, include_work_dir_outputs=True):
"""Some sanity checks that all runners' queue_job() methods are likely to want to do
"""
job_id = job_wrapper.get_id_tag()
job_state = job_wrapper.get_state()
job_wrapper.is_ready = False
+ job_wrapper.runner_command_line = None
# Make sure the job hasn't been deleted
if job_state == model.Job.states.DELETED:
log.debug( "(%s) Job deleted by user before it entered the %s queue" % ( job_id, self.runner_name ) )
if self.app.config.cleanup_job in ( "always", "onsuccess" ):
job_wrapper.cleanup()
- return
+ return False
elif job_state != model.Job.states.QUEUED:
log.info( "(%d) Job is in state %s, skipping execution" % ( job_id, job_state ) )
# cleanup may not be safe in all states
- return
+ return False
# Prepare the job
try:
job_wrapper.prepare()
- job_wrapper.runner_command_line = self.build_command_line( job_wrapper )
+ job_wrapper.runner_command_line = self.build_command_line( job_wrapper, include_metadata=include_metadata, include_work_dir_outputs=include_work_dir_outputs )
except:
log.exception("(%s) Failure preparing job" % job_id)
job_wrapper.fail( "failure preparing job", exception=True )
- return
+ return False
- job_wrapper.is_ready = True
+ if not job_wrapper.runner_command_line:
+ job_wrapper.finish( '', '' )
+ return False
+
+ return True
+
+ # Runners must override the job handling methods
+ def queue_job(self, job_wrapper):
+ raise NotImplementedError()
def stop_job(self, job):
raise NotImplementedError()
diff -r 42b616e64d0575012d40acf69a355b98bf954db7 -r 3b918b912fe68718eb0b20c5c76e0e639f1cf42c lib/galaxy/jobs/runners/cli.py
--- a/lib/galaxy/jobs/runners/cli.py
+++ b/lib/galaxy/jobs/runners/cli.py
@@ -75,12 +75,11 @@
def queue_job( self, job_wrapper ):
"""Create job script and submit it to the DRM"""
- # Superclass method has some basic sanity checks
- super( ShellJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper, include_metadata=True ):
return
- # command line has been added to the wrapper by the superclass queue_job()
+ # command line has been added to the wrapper by prepare_job()
command_line = job_wrapper.runner_command_line
# Get shell and job execution interface
diff -r 42b616e64d0575012d40acf69a355b98bf954db7 -r 3b918b912fe68718eb0b20c5c76e0e639f1cf42c lib/galaxy/jobs/runners/condor.py
--- a/lib/galaxy/jobs/runners/condor.py
+++ b/lib/galaxy/jobs/runners/condor.py
@@ -58,12 +58,11 @@
def queue_job( self, job_wrapper ):
"""Create job script and submit it to the DRM"""
- # Superclass method has some basic sanity checks
- super( CondorJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper, include_metadata=True ):
return
- # command line has been added to the wrapper by the superclass queue_job()
+ # command line has been added to the wrapper by prepare_job()
command_line = job_wrapper.runner_command_line
# get configured job destination
diff -r 42b616e64d0575012d40acf69a355b98bf954db7 -r 3b918b912fe68718eb0b20c5c76e0e639f1cf42c lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -105,14 +105,13 @@
def queue_job( self, job_wrapper ):
"""Create job script and submit it to the DRM"""
- # Superclass method has some basic sanity checks
- super( DRMAAJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper, include_metadata=True ):
return
- # command line has been added to the wrapper by the superclass queue_job()
+ # command line has been added to the wrapper by prepare_job()
command_line = job_wrapper.runner_command_line
-
+
# get configured job destination
job_destination = job_wrapper.job_destination
diff -r 42b616e64d0575012d40acf69a355b98bf954db7 -r 3b918b912fe68718eb0b20c5c76e0e639f1cf42c lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -35,15 +35,14 @@
self._init_worker_threads()
def queue_job( self, job_wrapper ):
- # Superclass method has some basic sanity checks
- super( LocalJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper ):
return
stderr = stdout = ''
exit_code = 0
- # command line has been added to the wrapper by the superclass queue_job()
+ # command line has been added to the wrapper by prepare_job()
command_line = job_wrapper.runner_command_line
job_id = job_wrapper.get_id_tag()
diff -r 42b616e64d0575012d40acf69a355b98bf954db7 -r 3b918b912fe68718eb0b20c5c76e0e639f1cf42c lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -222,12 +222,11 @@
def queue_job( self, job_wrapper ):
"""Create PBS script for a job and submit it to the PBS queue"""
- # Superclass method has some basic sanity checks
- super( PBSJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper, include_metadata=not( self.app.config.pbs_stage_path ) ):
return
- # command line has been added to the wrapper by the superclass queue_job()
+ # command line has been added to the wrapper by prepare_job()
command_line = job_wrapper.runner_command_line
job_destination = job_wrapper.job_destination
diff -r 42b616e64d0575012d40acf69a355b98bf954db7 -r 3b918b912fe68718eb0b20c5c76e0e639f1cf42c lib/galaxy/jobs/runners/tasks.py
--- a/lib/galaxy/jobs/runners/tasks.py
+++ b/lib/galaxy/jobs/runners/tasks.py
@@ -26,12 +26,14 @@
self._init_worker_threads()
def queue_job( self, job_wrapper ):
- super( TaskedJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper ):
return
+ # command line has been added to the wrapper by prepare_job()
+ command_line = job_wrapper.runner_command_line
+
stderr = stdout = ''
- command_line = job_wrapper.runner_command_line
# Persist the destination
job_wrapper.set_job_destination(job_wrapper.job_destination)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c5d7d2bd7928/
Changeset: c5d7d2bd7928
Branch: next-stable
User: natefoo
Date: 2013-03-26 20:40:36
Summary: Fix setting of external metadata broken by refactoring job preparation.
Affected #: 7 files
diff -r 55b651e70aef895545bf88039f6a5558976a1842 -r c5d7d2bd7928c160a5163fe9fc9a5e20aa5470ae lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -95,35 +95,43 @@
"""
raise NotImplementedError()
- # Runners must override the job handling methods
- def queue_job(self, job_wrapper):
+ def prepare_job(self, job_wrapper, include_metadata=False, include_work_dir_outputs=True):
"""Some sanity checks that all runners' queue_job() methods are likely to want to do
"""
job_id = job_wrapper.get_id_tag()
job_state = job_wrapper.get_state()
job_wrapper.is_ready = False
+ job_wrapper.runner_command_line = None
# Make sure the job hasn't been deleted
if job_state == model.Job.states.DELETED:
log.debug( "(%s) Job deleted by user before it entered the %s queue" % ( job_id, self.runner_name ) )
if self.app.config.cleanup_job in ( "always", "onsuccess" ):
job_wrapper.cleanup()
- return
+ return False
elif job_state != model.Job.states.QUEUED:
log.info( "(%d) Job is in state %s, skipping execution" % ( job_id, job_state ) )
# cleanup may not be safe in all states
- return
+ return False
# Prepare the job
try:
job_wrapper.prepare()
- job_wrapper.runner_command_line = self.build_command_line( job_wrapper )
+ job_wrapper.runner_command_line = self.build_command_line( job_wrapper, include_metadata=include_metadata, include_work_dir_outputs=include_work_dir_outputs )
except:
log.exception("(%s) Failure preparing job" % job_id)
job_wrapper.fail( "failure preparing job", exception=True )
- return
+ return False
- job_wrapper.is_ready = True
+ if not job_wrapper.runner_command_line:
+ job_wrapper.finish( '', '' )
+ return False
+
+ return True
+
+ # Runners must override the job handling methods
+ def queue_job(self, job_wrapper):
+ raise NotImplementedError()
def stop_job(self, job):
raise NotImplementedError()
diff -r 55b651e70aef895545bf88039f6a5558976a1842 -r c5d7d2bd7928c160a5163fe9fc9a5e20aa5470ae lib/galaxy/jobs/runners/cli.py
--- a/lib/galaxy/jobs/runners/cli.py
+++ b/lib/galaxy/jobs/runners/cli.py
@@ -75,12 +75,11 @@
def queue_job( self, job_wrapper ):
"""Create job script and submit it to the DRM"""
- # Superclass method has some basic sanity checks
- super( ShellJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper, include_metadata=True ):
return
- # command line has been added to the wrapper by the superclass queue_job()
+ # command line has been added to the wrapper by prepare_job()
command_line = job_wrapper.runner_command_line
# Get shell and job execution interface
diff -r 55b651e70aef895545bf88039f6a5558976a1842 -r c5d7d2bd7928c160a5163fe9fc9a5e20aa5470ae lib/galaxy/jobs/runners/condor.py
--- a/lib/galaxy/jobs/runners/condor.py
+++ b/lib/galaxy/jobs/runners/condor.py
@@ -58,12 +58,11 @@
def queue_job( self, job_wrapper ):
"""Create job script and submit it to the DRM"""
- # Superclass method has some basic sanity checks
- super( CondorJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper, include_metadata=True ):
return
- # command line has been added to the wrapper by the superclass queue_job()
+ # command line has been added to the wrapper by prepare_job()
command_line = job_wrapper.runner_command_line
# get configured job destination
diff -r 55b651e70aef895545bf88039f6a5558976a1842 -r c5d7d2bd7928c160a5163fe9fc9a5e20aa5470ae lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -105,14 +105,13 @@
def queue_job( self, job_wrapper ):
"""Create job script and submit it to the DRM"""
- # Superclass method has some basic sanity checks
- super( DRMAAJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper, include_metadata=True ):
return
- # command line has been added to the wrapper by the superclass queue_job()
+ # command line has been added to the wrapper by prepare_job()
command_line = job_wrapper.runner_command_line
-
+
# get configured job destination
job_destination = job_wrapper.job_destination
diff -r 55b651e70aef895545bf88039f6a5558976a1842 -r c5d7d2bd7928c160a5163fe9fc9a5e20aa5470ae lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -35,15 +35,14 @@
self._init_worker_threads()
def queue_job( self, job_wrapper ):
- # Superclass method has some basic sanity checks
- super( LocalJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper ):
return
stderr = stdout = ''
exit_code = 0
- # command line has been added to the wrapper by the superclass queue_job()
+ # command line has been added to the wrapper by prepare_job()
command_line = job_wrapper.runner_command_line
job_id = job_wrapper.get_id_tag()
diff -r 55b651e70aef895545bf88039f6a5558976a1842 -r c5d7d2bd7928c160a5163fe9fc9a5e20aa5470ae lib/galaxy/jobs/runners/pbs.py
--- a/lib/galaxy/jobs/runners/pbs.py
+++ b/lib/galaxy/jobs/runners/pbs.py
@@ -222,12 +222,11 @@
def queue_job( self, job_wrapper ):
"""Create PBS script for a job and submit it to the PBS queue"""
- # Superclass method has some basic sanity checks
- super( PBSJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper, include_metadata=not( self.app.config.pbs_stage_path ) ):
return
- # command line has been added to the wrapper by the superclass queue_job()
+ # command line has been added to the wrapper by prepare_job()
command_line = job_wrapper.runner_command_line
job_destination = job_wrapper.job_destination
diff -r 55b651e70aef895545bf88039f6a5558976a1842 -r c5d7d2bd7928c160a5163fe9fc9a5e20aa5470ae lib/galaxy/jobs/runners/tasks.py
--- a/lib/galaxy/jobs/runners/tasks.py
+++ b/lib/galaxy/jobs/runners/tasks.py
@@ -26,12 +26,14 @@
self._init_worker_threads()
def queue_job( self, job_wrapper ):
- super( TaskedJobRunner, self ).queue_job( job_wrapper )
- if not job_wrapper.is_ready:
+ # prepare the job
+ if not self.prepare_job( job_wrapper ):
return
+ # command line has been added to the wrapper by prepare_job()
+ command_line = job_wrapper.runner_command_line
+
stderr = stdout = ''
- command_line = job_wrapper.runner_command_line
# Persist the destination
job_wrapper.set_job_destination(job_wrapper.job_destination)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/eb7f1d35ad35/
Changeset: eb7f1d35ad35
User: james_taylor
Date: 2013-03-26 18:04:36
Summary: biostar: pass slugified version of tool name as tag when sending to question form
Affected #: 1 file
diff -r ac80dbfe7703102ff152760419361a55d3eb0f6f -r eb7f1d35ad35e2410393362b636039714c873b5e lib/galaxy/webapps/galaxy/controllers/biostar.py
--- a/lib/galaxy/webapps/galaxy/controllers/biostar.py
+++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py
@@ -8,16 +8,35 @@
from galaxy.util import json
import hmac
+# Slugifying from Armin Ronacher (http://flask.pocoo.org/snippets/5/)
+
+import re
+from unicodedata import normalize
+
+_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?(a)\[\\\]^_`{|},.]+')
+
+
+def slugify(text, delim=u'-'):
+ """Generates an slightly worse ASCII-only slug."""
+ result = []
+ for word in _punct_re.split(text.lower()):
+ word = normalize('NFKD', word).encode('ascii', 'ignore')
+ if word:
+ result.append(word)
+ return unicode(delim.join(result))
+
+
# Biostar requires all keys to be present, so we start with a template
DEFAULT_PAYLOAD = {
- 'email': "",
- 'title': "Question about Galaxy",
+ 'email': "",
+ 'title': "Question about Galaxy",
'tags': 'galaxy',
- 'tool_name': '',
- 'tool_version': '',
+ 'tool_name': '',
+ 'tool_version': '',
'tool_id': ''
}
+
def encode_data( key, data ):
"""
Encode data to send a question to Biostar
@@ -28,6 +47,13 @@
return text, digest
+def tag_for_tool( tool ):
+ """
+ Generate a reasonavle biostar tag for a tool.
+ """
+ return slugify( unicode( tool.name ) )
+
+
class BiostarController( BaseUIController ):
"""
Provides integration with Biostar through external authentication, see: http://liondb.com/help/x/
@@ -81,6 +107,10 @@
if not tool:
return error( "No tool found matching '%s'" % tool_id )
# Tool specific information for payload
- payload = { 'title': "Question about Galaxy tool '%s'" % tool.name, 'tool_name': tool.name, 'tool_version': tool.version, 'tool_id': tool.id }
+ payload = { 'title': "Question about Galaxy tool '%s'" % tool.name,
+ 'tool_name': tool.name,
+ 'tool_version': tool.version,
+ 'tool_id': tool.id,
+ 'tags': 'galaxy ' + tag_for_tool( tool ) }
# Pass on to regular question method
return self.biostar_question_redirect( trans, payload )
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/504264153fe1/
Changeset: 504264153fe1
User: inithello
Date: 2013-03-26 15:02:17
Summary: Set do_not_test = True if the repository fails functional tests. Set do_not_test = False if the repository passes functional tests, so that the repository will always be re-tested against the most recent code.
Affected #: 1 file
diff -r b756a49b424565f5ec4037f25a89e987a47d184b -r 504264153fe1804c409c775be2c6db3f160b8fe2 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -241,8 +241,7 @@
listing = file( location, 'r' ).read()
elif source == 'url':
assert tool_shed_api_key is not None, 'Cannot proceed without tool shed API key.'
- params = urllib.urlencode( dict( tools_functionally_correct='false',
- do_not_test='false',
+ params = urllib.urlencode( dict( do_not_test='false',
downloadable='true',
malicious='false',
includes_tools='true' ) )
@@ -281,10 +280,15 @@
return from_json_string( url_contents )
def register_test_result( url, metadata_id, test_results_dict, tests_passed=False ):
+ '''
+ Set do_not_test = True if the repository fails functional tests. Set do_not_test = False
+ if the repository passes functional tests, so that the repository will always be re-tested
+ against the most recent code.
+ '''
params = {}
if tests_passed:
params[ 'tools_functionally_correct' ] = 'true'
- params[ 'do_not_test' ] = 'true'
+ params[ 'do_not_test' ] = 'false'
else:
params[ 'tools_functionally_correct' ] = 'false'
params[ 'do_not_test' ] = 'true'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b756a49b4245/
Changeset: b756a49b4245
User: greg
Date: 2013-03-26 14:29:02
Summary: Enhance and correct documentation in the new tool_shed_repositories Galaxy API controller as well as the documentation in the example API install scripts. Fix discovery of the tool_path setting in the selected shed-related tool panel config file in the tool_shed_repositories controller's install_repository_revision method.
Affected #: 3 files
diff -r 50e8e5efacadef15969a663ac3849883e0a49727 -r b756a49b424565f5ec4037f25a89e987a47d184b lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -58,7 +58,7 @@
GET /api/tool_shed_repositories/{encoded_tool_shed_repsository_id}
Display a dictionary containing information about a specified tool_shed_repository.
- :param tool_shed_repository_id: the encoded id of the `ToolShedRepository` object
+ :param id: the encoded id of the ToolShedRepository object
"""
# Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e
try:
@@ -80,18 +80,32 @@
POST /api/tool_shed_repositories/install_repository_revision
Install a specified repository revision from a specified tool shed into Galaxy.
- :param tool_shed_url: the base URL of the Tool Shed from which to install the Repository
- :param name: the name of the Repository
- :param owner: the owner of the Repository
- :param changset_revision: the changset_revision of the RepositoryMetadata object associated with the Repository
:param key: the current Galaxy admin user's API key
- :param new_tool_panel_section_label: optional label of a new section to be added to the Galaxy tool panel in which to load
- tools contained in the Repository. Either this parameter must be an empty string or
- the tool_panel_section_id parameter must be an empty string, as both cannot be used.
- :param tool_panel_section_id: optional id of the Galaxy tool panel section in which to load tools contained in the Repository.
- If not set, tools will be loaded outside of any sections in the tool panel. Either this
- parameter must be an empty string or the tool_panel_section_id parameter must be an empty string,
- as both cannot be used.
+
+ The following parameters are included in the payload.
+ :param tool_shed_url (required): the base URL of the Tool Shed from which to install the Repository
+ :param name (required): the name of the Repository
+ :param owner (required): the owner of the Repository
+ :param changset_revision (required): the changset_revision of the RepositoryMetadata object associated with the Repository
+ :param new_tool_panel_section_label (optional): label of a new section to be added to the Galaxy tool panel in which to load
+ tools contained in the Repository. Either this parameter must be an empty string or
+ the tool_panel_section_id parameter must be an empty string or both must be an empty
+ string (both cannot be used simultaneously).
+ :param tool_panel_section_id (optional): id of the Galaxy tool panel section in which to load tools contained in the Repository.
+ If this parameter is an empty string and the above new_tool_panel_section_label parameter is an
+ empty string, tools will be loaded outside of any sections in the tool panel. Either this
+ parameter must be an empty string or the tool_panel_section_id parameter must be an empty string
+ of both must be an empty string (both cannot be used simultaneously).
+ :param install_repository_dependencies (optional): Set to True if you want to install repository dependencies defined for the specified
+ repository being installed. The default setting is False.
+ :param install_tool_dependencies (optional): Set to True if you want to install tool dependencies defined for the specified repository being
+ installed. The default setting is False.
+ :param shed_tool_conf (optional): The shed-related tool panel configuration file configured in the "tool_config_file" setting in the Galaxy config file
+ (e.g., universe_wsgi.ini). At least one shed-related tool panel config file is required to be configured. Setting
+ this parameter to a specific file enables you to choose where the specified repository will be installed because
+ the tool_path attribute of the <toolbox> from the specified file is used as the installation location
+ (e.g., <toolbox tool_path="../shed_tools">). If this parameter is not set, a shed-related tool panel configuration
+ file will be selected automatically.
"""
# Get the information about the repository to be installed from the payload.
tool_shed_url = payload.get( 'tool_shed_url', '' )
@@ -160,29 +174,30 @@
includes_tools_for_display_in_tool_panel = repository_revision_dict[ 'includes_tools_for_display_in_tool_panel' ]
except:
raise HTTPBadRequest( detail="Missing required parameter 'includes_tools_for_display_in_tool_panel'." )
- # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain the repository
- # information.
+ # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain the repository information.
install_repository_dependencies = payload.get( 'install_repository_dependencies', False )
install_tool_dependencies = payload.get( 'install_tool_dependencies', False )
new_tool_panel_section = payload.get( 'new_tool_panel_section_label', '' )
shed_tool_conf = payload.get( 'shed_tool_conf', None )
- tool_path = payload.get( 'tool_path', None )
- tool_panel_section_id = payload.get( 'tool_panel_section_id', '' )
- if tool_panel_section_id not in [ None, '' ]:
- tool_panel_section = trans.app.toolbox.tool_panel[ tool_panel_section_id ]
+ if shed_tool_conf:
+ # Get the tool_path setting.
+ index, shed_conf_dict = suc.get_shed_tool_conf_dict( trans.app, shed_tool_conf )
+ tool_path = shed_config_dict[ 'tool_path' ]
else:
- tool_panel_section = ''
- if not shed_tool_conf or not tool_path:
- # Pick a semi-random shed-related tool panel configuration file.
+ # Pick a semi-random shed-related tool panel configuration file and get the tool_path setting.
for shed_config_dict in trans.app.toolbox.shed_tool_confs:
+ # Don't use migrated_tools_conf.xml.
if shed_config_dict[ 'config_filename' ] != trans.app.config.migrated_tools_config:
break
shed_tool_conf = shed_config_dict[ 'config_filename' ]
tool_path = shed_config_dict[ 'tool_path' ]
if not shed_tool_conf:
raise HTTPBadRequest( detail="Missing required parameter 'shed_tool_conf'." )
- if not tool_path:
- raise HTTPBadRequest( detail="Missing required parameter 'tool_path'." )
+ tool_panel_section_id = payload.get( 'tool_panel_section_id', '' )
+ if tool_panel_section_id not in [ None, '' ]:
+ tool_panel_section = trans.app.toolbox.tool_panel[ tool_panel_section_id ]
+ else:
+ tool_panel_section = ''
# Build the dictionary of information necessary for creating tool_shed_repository database records for each repository being installed.
installation_dict = dict( install_repository_dependencies=install_repository_dependencies,
new_tool_panel_section=new_tool_panel_section,
diff -r 50e8e5efacadef15969a663ac3849883e0a49727 -r b756a49b424565f5ec4037f25a89e987a47d184b scripts/api/install_repository_tools_into_existing_tool_panel_section.py
--- a/scripts/api/install_repository_tools_into_existing_tool_panel_section.py
+++ b/scripts/api/install_repository_tools_into_existing_tool_panel_section.py
@@ -9,7 +9,7 @@
<section id="from_test_tool_shed" name="From Test Tool Shed" version=""></section>
-usage: ./install_repository_tools_into_existing_tool_panel_section <api_key <galaxy base url> tool_shed_url name owner changeset_revision tool_panel_section_id
+usage: ./install_repository_tools_into_existing_tool_panel_section.py <api_key <galaxy base url> tool_shed_url name owner changeset_revision tool_panel_section_id
Here is a working example of how to use this script to install a repository from the test tool shed.
./install_repository_tools_into_existing_tool_panel_section.py <api key><galaxy base url>/api/tool_shed_repositories/new/install_repository_revision http://testtoolshed.g2.bx.psu.edu gregs_filter greg f28d5018f9cb from_test_tool_shed
diff -r 50e8e5efacadef15969a663ac3849883e0a49727 -r b756a49b424565f5ec4037f25a89e987a47d184b scripts/api/install_repository_tools_into_new_tool_panel_section.py
--- a/scripts/api/install_repository_tools_into_new_tool_panel_section.py
+++ b/scripts/api/install_repository_tools_into_new_tool_panel_section.py
@@ -4,10 +4,10 @@
valid tools, loading them into a new section of the Galaxy tool panel. The repository has no tool dependencies or repository dependencies, so only
a single repository will be installed.
-usage: ./install_repository_tools_into_new_tool_panel_section <api_key <galaxy base url> tool_shed_url name owner changeset_revision new_tool_panel_section_label
+usage: ./install_repository_tools_into_new_tool_panel_section.py <api_key <galaxy base url> tool_shed_url name owner changeset_revision new_tool_panel_section_label
Here is a working example of how to use this script to install a repository from the test tool shed.
-./install_repository_tools_into_new_tool_panel_section.py <api key><galaxy base url>/api/tool_shed_repositories/new/install_repository_revision http://testtoolshed.g2.bx.psu.edu gregs_filter greg f28d5018f9cb From%20Test%20Tool%20Shed
+./install_repository_tools_into_new_tool_panel_section.py <api key><galaxy base url>/api/tool_shed_repositories/new/install_repository_revision http://testtoolshed.g2.bx.psu.edu gregs_filter greg f28d5018f9cb 'From Test Tool Shed'
"""
import os
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/50e8e5efacad/
Changeset: 50e8e5efacad
User: inithello
Date: 2013-03-26 14:27:39
Summary: More explicit specification of return codes, documentation of same.
Affected #: 1 file
diff -r 5b7bc81c3fc7712894a1ff1bd75476259bf6c591 -r 50e8e5efacadef15969a663ac3849883e0a49727 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -787,14 +787,19 @@
print '# ----------------------------------------------------------------------------------'
print "# %d repositories not installed correctly:" % len( repositories_failed_install )
show_summary_output( repositories_failed_install )
+ else:
+ success = True
+ else:
+ success = True
print "####################################################################################"
- if repositories_tested > 0:
- if success:
- return 0
- else:
- return 1
+ # Normally, the value of 'success' would determine whether this test suite is marked as passed or failed
+ # in the automated buildbot framework. However, due to the procedure used here, we only want to report
+ # failure if a repository fails to install correctly. Therefore, we have overriden the value of 'success'
+ # here based on what actions the script has executed.
+ if success:
+ return 0
else:
- return 0
+ return 1
if __name__ == "__main__":
now = strftime( "%Y-%m-%d %H:%M:%S" )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.