galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
March 2013
- 1 participants
- 183 discussions
commit/galaxy-central: james_taylor: biostar: do not fill in question title
by commits-noreply@bitbucket.org 28 Mar '13
by commits-noreply@bitbucket.org 28 Mar '13
28 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/67f38ca2d9b3/
Changeset: 67f38ca2d9b3
User: james_taylor
Date: 2013-03-28 17:40:44
Summary: biostar: do not fill in question title
Affected #: 1 file
diff -r 4a150da14655b78c67cfa540496b64fe6a01433c -r 67f38ca2d9b3fa3721a93542ca9b26d7902d042b lib/galaxy/webapps/galaxy/controllers/biostar.py
--- a/lib/galaxy/webapps/galaxy/controllers/biostar.py
+++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py
@@ -29,7 +29,7 @@
# Biostar requires all keys to be present, so we start with a template
DEFAULT_PAYLOAD = {
'email': "",
- 'title': "Question about Galaxy",
+ 'title': "",
'tags': 'galaxy',
'tool_name': '',
'tool_version': '',
@@ -109,8 +109,7 @@
if not tool:
return error( "No tool found matching '%s'" % tool_id )
# Tool specific information for payload
- payload = { 'title': "Question about Galaxy tool '%s'" % tool.name,
- 'tool_name': tool.name,
+ payload = { 'tool_name': tool.name,
'tool_version': tool.version,
'tool_id': tool.id,
'tags': 'galaxy ' + tag_for_tool( tool ) }
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: biostar: open tool question link in new window/tab
by commits-noreply@bitbucket.org 28 Mar '13
by commits-noreply@bitbucket.org 28 Mar '13
28 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4a150da14655/
Changeset: 4a150da14655
User: james_taylor
Date: 2013-03-28 17:38:35
Summary: biostar: open tool question link in new window/tab
Affected #: 1 file
diff -r 2f0f9ce885682df7cee1b486634b401872fdff9e -r 4a150da14655b78c67cfa540496b64fe6a01433c templates/webapps/galaxy/tool_form.mako
--- a/templates/webapps/galaxy/tool_form.mako
+++ b/templates/webapps/galaxy/tool_form.mako
@@ -73,7 +73,7 @@
});
$(this).append(select_link).append(" ").append(unselect_link);
});
-
+
$(".add-librarydataset").live("click", function() {
var link = $(this);
$.ajax({
@@ -311,7 +311,7 @@
%if trans.app.config.biostar_url:
<!-- BioStar links -->
- <span class="pull-right"><a href="${h.url_for( controller='biostar', action='biostar_tool_question_redirect', tool_id=tool.id )}" target="galaxy_main" class="fa-icon-question-sign tooltip" data-original-title="Ask a question about this tool"></a></span>
+ <span class="pull-right"><a href="${h.url_for( controller='biostar', action='biostar_tool_question_redirect', tool_id=tool.id )}" target="_blank" class="fa-icon-question-sign tooltip" data-original-title="Ask a question about this tool"></a></span><!-- End of BioStar links -->
%endif
</div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/76a1a432fc20/
Changeset: 76a1a432fc20
Branch: next-stable
User: carlfeberhard
Date: 2013-03-28 16:21:23
Summary: UsesHDA: fix new-style display apps; documentation
Affected #: 1 file
diff -r d64d821891f5d7678205c2261c66e13b348049f9 -r 76a1a432fc202cd7f44bf97e0e5d361d3159d2aa lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -332,6 +332,8 @@
return None
def get_hda_dict( self, trans, hda ):
+ """Return full details of this HDA in dictionary form.
+ """
hda_dict = hda.get_api_value( view='element' )
history = hda.history
hda_dict[ 'api_type' ] = "file"
@@ -341,9 +343,10 @@
can_access_hda = ( trans.user_is_admin() or can_access_hda )
hda_dict[ 'accessible' ] = can_access_hda
- # return here if deleted and purged or can't access
+ # ---- return here if deleted AND purged OR can't access
purged = ( hda.purged or hda.dataset.purged )
if ( hda.deleted and purged ) or not can_access_hda:
+ #TODO: get_api_value should really go AFTER this - only summary data
return trans.security.encode_dict_ids( hda_dict )
if trans.user_is_admin() or trans.app.config.expose_dataset_path:
@@ -353,6 +356,7 @@
history_id = trans.security.encode_id( history.id ),
history_content_id = trans.security.encode_id( hda.id ) )
+ # indeces, assoc. metadata files, etc.
meta_files = []
for meta_type in hda.metadata.spec.keys():
if isinstance( hda.metadata.spec[ meta_type ].param, FileParameter ):
@@ -361,13 +365,15 @@
hda_dict[ 'meta_files' ] = meta_files
hda_dict[ 'display_types' ] = self.get_old_display_applications( trans, hda )
- #hda_dict[ 'display_apps' ] = self.get_display_apps( trans, hda )
+ hda_dict[ 'display_apps' ] = self.get_display_apps( trans, hda )
hda_dict[ 'visualizations' ] = hda.get_visualizations()
- # return here if deleted
+ # ---- return here if deleted
if hda.deleted and not purged:
return trans.security.encode_dict_ids( hda_dict )
+ # if a tool declares 'force_history_refresh' in its xml, when the hda -> ready, reload the history panel
+ # expensive
if( ( hda.state in [ 'running', 'queued' ] )
and ( hda.creating_job and hda.creating_job.tool_id ) ):
tool_used = trans.app.toolbox.get_tool( hda.creating_job.tool_id )
@@ -383,7 +389,6 @@
def get_display_app_url( display_app_link, hda, trans ):
web_url_for = routes.URLGenerator( trans.webapp.mapper, trans.environ )
dataset_hash, user_hash = da_util.encode_dataset_user( trans, hda, None )
- return ''
return web_url_for( controller='dataset',
action="display_application",
dataset_id=dataset_hash,
https://bitbucket.org/galaxy/galaxy-central/commits/2f0f9ce88568/
Changeset: 2f0f9ce88568
User: carlfeberhard
Date: 2013-03-28 16:21:56
Summary: merge next-stable
Affected #: 1 file
diff -r 2a1a820d0357b6d4bc7632dc88e6853acd5622ae -r 2f0f9ce885682df7cee1b486634b401872fdff9e lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -332,6 +332,8 @@
return None
def get_hda_dict( self, trans, hda ):
+ """Return full details of this HDA in dictionary form.
+ """
hda_dict = hda.get_api_value( view='element' )
history = hda.history
hda_dict[ 'api_type' ] = "file"
@@ -341,9 +343,10 @@
can_access_hda = ( trans.user_is_admin() or can_access_hda )
hda_dict[ 'accessible' ] = can_access_hda
- # return here if deleted and purged or can't access
+ # ---- return here if deleted AND purged OR can't access
purged = ( hda.purged or hda.dataset.purged )
if ( hda.deleted and purged ) or not can_access_hda:
+ #TODO: get_api_value should really go AFTER this - only summary data
return trans.security.encode_dict_ids( hda_dict )
if trans.user_is_admin() or trans.app.config.expose_dataset_path:
@@ -353,6 +356,7 @@
history_id = trans.security.encode_id( history.id ),
history_content_id = trans.security.encode_id( hda.id ) )
+ # indeces, assoc. metadata files, etc.
meta_files = []
for meta_type in hda.metadata.spec.keys():
if isinstance( hda.metadata.spec[ meta_type ].param, FileParameter ):
@@ -361,13 +365,15 @@
hda_dict[ 'meta_files' ] = meta_files
hda_dict[ 'display_types' ] = self.get_old_display_applications( trans, hda )
- #hda_dict[ 'display_apps' ] = self.get_display_apps( trans, hda )
+ hda_dict[ 'display_apps' ] = self.get_display_apps( trans, hda )
hda_dict[ 'visualizations' ] = hda.get_visualizations()
- # return here if deleted
+ # ---- return here if deleted
if hda.deleted and not purged:
return trans.security.encode_dict_ids( hda_dict )
+ # if a tool declares 'force_history_refresh' in its xml, when the hda -> ready, reload the history panel
+ # expensive
if( ( hda.state in [ 'running', 'queued' ] )
and ( hda.creating_job and hda.creating_job.tool_id ) ):
tool_used = trans.app.toolbox.get_tool( hda.creating_job.tool_id )
@@ -383,7 +389,6 @@
def get_display_app_url( display_app_link, hda, trans ):
web_url_for = routes.URLGenerator( trans.webapp.mapper, trans.environ )
dataset_hash, user_hash = da_util.encode_dataset_user( trans, hda, None )
- return ''
return web_url_for( controller='dataset',
action="display_application",
dataset_id=dataset_hash,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: biostar: send encoded unique ID for user as username
by commits-noreply@bitbucket.org 27 Mar '13
by commits-noreply@bitbucket.org 27 Mar '13
27 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2a1a820d0357/
Changeset: 2a1a820d0357
User: james_taylor
Date: 2013-03-27 20:36:07
Summary: biostar: send encoded unique ID for user as username
Affected #: 1 file
diff -r bc62ff98a7e61a1fe6d24bddcfcd277b39573256 -r 2a1a820d0357b6d4bc7632dc88e6853acd5622ae lib/galaxy/webapps/galaxy/controllers/biostar.py
--- a/lib/galaxy/webapps/galaxy/controllers/biostar.py
+++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py
@@ -72,14 +72,16 @@
payload = dict( DEFAULT_PAYLOAD, **payload )
# Do the best we can of providing user information for the payload
if trans.user:
+ payload['username'] = "user-" + trans.security.encode_id( trans.user.id )
payload['email'] = trans.user.email
if trans.user.username:
- payload['username'] = trans.user.username
payload['display_name'] = trans.user.username
else:
- payload['display_name'] = "Galaxy User"
+ payload['display_name'] = trans.user.email.split( "@" )[0]
else:
- payload['username'] = payload['display_name'] = "Anonymous Galaxy User %d" % trans.galaxy_session.id
+ encoded = trans.security.encode_id( trans.galaxy_session.id )
+ payload['username'] = "anon-" + encoded
+ payload['display_name'] = "Anonymous Galaxy User %d" % encoded[0:8]
data, digest = encode_data( trans.app.config.biostar_key, payload )
return trans.response.send_redirect( url_for( trans.app.config.biostar_url, data=data, digest=digest, name=trans.app.config.biostar_key_name, action=biostar_action ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d64d821891f5/
Changeset: d64d821891f5
Branch: next-stable
User: carlfeberhard
Date: 2013-03-27 20:31:41
Summary: history panel: only fetch force_history_refresh for running/queued HDAs
Affected #: 1 file
diff -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf -r d64d821891f5d7678205c2261c66e13b348049f9 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -360,15 +360,16 @@
if meta_files:
hda_dict[ 'meta_files' ] = meta_files
- hda_dict[ 'display_apps' ] = self.get_display_apps( trans, hda )
hda_dict[ 'display_types' ] = self.get_old_display_applications( trans, hda )
+ #hda_dict[ 'display_apps' ] = self.get_display_apps( trans, hda )
hda_dict[ 'visualizations' ] = hda.get_visualizations()
# return here if deleted
if hda.deleted and not purged:
return trans.security.encode_dict_ids( hda_dict )
- if hda.creating_job and hda.creating_job.tool_id:
+ if( ( hda.state in [ 'running', 'queued' ] )
+ and ( hda.creating_job and hda.creating_job.tool_id ) ):
tool_used = trans.app.toolbox.get_tool( hda.creating_job.tool_id )
if tool_used and tool_used.force_history_refresh:
hda_dict[ 'force_history_refresh' ] = True
@@ -382,6 +383,7 @@
def get_display_app_url( display_app_link, hda, trans ):
web_url_for = routes.URLGenerator( trans.webapp.mapper, trans.environ )
dataset_hash, user_hash = da_util.encode_dataset_user( trans, hda, None )
+ return ''
return web_url_for( controller='dataset',
action="display_application",
dataset_id=dataset_hash,
https://bitbucket.org/galaxy/galaxy-central/commits/bc62ff98a7e6/
Changeset: bc62ff98a7e6
User: carlfeberhard
Date: 2013-03-27 20:32:19
Summary: merge next-stable
Affected #: 1 file
diff -r 5af3b1cbb2255fcf02dd4d214f211ffe4c4bc8b3 -r bc62ff98a7e61a1fe6d24bddcfcd277b39573256 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -360,15 +360,16 @@
if meta_files:
hda_dict[ 'meta_files' ] = meta_files
- hda_dict[ 'display_apps' ] = self.get_display_apps( trans, hda )
hda_dict[ 'display_types' ] = self.get_old_display_applications( trans, hda )
+ #hda_dict[ 'display_apps' ] = self.get_display_apps( trans, hda )
hda_dict[ 'visualizations' ] = hda.get_visualizations()
# return here if deleted
if hda.deleted and not purged:
return trans.security.encode_dict_ids( hda_dict )
- if hda.creating_job and hda.creating_job.tool_id:
+ if( ( hda.state in [ 'running', 'queued' ] )
+ and ( hda.creating_job and hda.creating_job.tool_id ) ):
tool_used = trans.app.toolbox.get_tool( hda.creating_job.tool_id )
if tool_used and tool_used.force_history_refresh:
hda_dict[ 'force_history_refresh' ] = True
@@ -382,6 +383,7 @@
def get_display_app_url( display_app_link, hda, trans ):
web_url_for = routes.URLGenerator( trans.webapp.mapper, trans.environ )
dataset_hash, user_hash = da_util.encode_dataset_user( trans, hda, None )
+ return ''
return web_url_for( controller='dataset',
action="display_application",
dataset_id=dataset_hash,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a0ee8a9673a7/
Changeset: a0ee8a9673a7
User: dannon
Date: 2013-03-27 19:26:38
Summary: Jobs: Fix job finish() to only set state after reconciling all outputs. This will allow us to eliminate force_history_refresh in the new history panel. Slight import cleanup.
Affected #: 1 file
diff -r d71a574758c9e49cbbdf889e5926c105745ef860 -r a0ee8a9673a70e6728ef1ed357c1606637e874e3 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -2,24 +2,22 @@
Support for running a tool in Galaxy via an internal job management system
"""
+import copy
+import datetime
+import logging
import os
+import pwd
+import random
+import re
+import shutil
+import subprocess
import sys
-import pwd
-import time
-import copy
-import random
-import logging
-import datetime
import threading
import traceback
-import subprocess
import galaxy
from galaxy import util, model
from galaxy.util.bunch import Bunch
-from galaxy.datatypes.tabular import *
-from galaxy.datatypes.interval import *
-# tabular/interval imports appear to be unused. Clean up?
from galaxy.datatypes import metadata
from galaxy.util.json import from_json_string
from galaxy.util.expressions import ExpressionContext
@@ -882,13 +880,16 @@
return self.fail( job.info, stderr=stderr, stdout=stdout, exit_code=tool_exit_code )
# Check the tool's stdout, stderr, and exit code for errors, but only
- # if the job has not already been marked as having an error.
+ # if the job has not already been marked as having an error.
# The job's stdout and stderr will be set accordingly.
+
+ # We set final_job_state to use for dataset management, but *don't* set
+ # job.state until after dataset collection to prevent history issues
if job.states.ERROR != job.state:
if ( self.check_tool_output( stdout, stderr, tool_exit_code, job )):
- job.state = job.states.OK
+ final_job_state = job.states.OK
else:
- job.state = job.states.ERROR
+ final_job_state = job.states.ERROR
if self.version_string_cmd:
version_filename = self.get_version_string_path()
@@ -908,9 +909,11 @@
if os.path.exists( dataset_path.real_path ) and os.stat( dataset_path.real_path ).st_size > 0:
log.warning( "finish(): %s not found, but %s is not empty, so it will be used instead" % ( dataset_path.false_path, dataset_path.real_path ) )
else:
+ # Prior to fail we need to set job.state
+ job.state = final_job_state
return self.fail( "Job %s's output dataset(s) could not be read" % job.id )
+
job_context = ExpressionContext( dict( stdout = job.stdout, stderr = job.stderr ) )
- job_tool = self.app.toolbox.tools_by_id.get( job.tool_id, None )
for dataset_assoc in job.output_datasets + job.output_library_datasets:
context = self.get_dataset_finish_context( job_context, dataset_assoc.dataset.dataset )
@@ -926,10 +929,7 @@
# Update (non-library) job output datasets through the object store
if dataset not in job.output_library_datasets:
self.app.object_store.update_from_file(dataset.dataset, create=True)
- # TODO: The context['stderr'] holds stderr's contents. An error
- # only really occurs if the job also has an error. So check the
- # job's state:
- if job.states.ERROR == job.state:
+ if job.states.ERROR == final_job_state:
dataset.blurb = "error"
dataset.mark_unhidden()
elif dataset.has_data():
@@ -945,13 +945,7 @@
( not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) \
and self.app.config.retry_metadata_internally ):
dataset.datatype.set_meta( dataset, overwrite = False ) #call datatype.set_meta directly for the initial set_meta call during dataset creation
- # TODO: The context['stderr'] used to indicate that there
- # was an error. Now we must rely on the job's state instead;
- # that indicates whether the tool relied on stderr to indicate
- # the state or whether the tool used exit codes and regular
- # expressions to do so. So we use
- # job.state == job.states.ERROR to replace this same test.
- elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and job.states.ERROR != job.state:
+ elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and job.states.ERROR != final_job_state:
dataset._state = model.Dataset.states.FAILED_METADATA
else:
#load metadata from file
@@ -981,10 +975,7 @@
if dataset.ext == 'auto':
dataset.extension = 'txt'
self.sa_session.add( dataset )
- # TODO: job.states.ERROR == job.state now replaces checking
- # stderr for a problem:
- #if context['stderr']:
- if job.states.ERROR == job.state:
+ if job.states.ERROR == final_job_state:
log.debug( "setting dataset state to ERROR" )
# TODO: This is where the state is being set to error. Change it!
dataset_assoc.dataset.dataset.state = model.Dataset.states.ERROR
@@ -1054,7 +1045,12 @@
# fix permissions
for path in [ dp.real_path for dp in self.get_mutable_output_fnames() ]:
util.umask_fix_perms( path, self.app.config.umask, 0666, self.app.config.gid )
+
+ # Finally set the job state. This should only happen *after* all
+ # dataset creation, and will allow us to eliminate force_history_refresh.
+ job.state = final_job_state
self.sa_session.flush()
+
log.debug( 'job %d ended' % self.job_id )
if self.app.config.cleanup_job == 'always' or ( not stderr and self.app.config.cleanup_job == 'onsuccess' ):
self.cleanup()
https://bitbucket.org/galaxy/galaxy-central/commits/5af3b1cbb225/
Changeset: 5af3b1cbb225
User: dannon
Date: 2013-03-27 19:27:14
Summary: Strip whitespace in jobs/__init__.py
Affected #: 1 file
diff -r a0ee8a9673a70e6728ef1ed357c1606637e874e3 -r 5af3b1cbb2255fcf02dd4d214f211ffe4c4bc8b3 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -84,7 +84,7 @@
class JobConfiguration( object ):
"""A parser and interface to advanced job management features.
-
+
These features are configured in the job configuration, by default, ``job_conf.xml``
"""
DEFAULT_NWORKERS = 4
@@ -609,7 +609,7 @@
Calling this method for the first time causes the dynamic runner to do
its calculation, if any.
-
+
:returns: ``JobDestination``
"""
return self.job_runner_mapper.get_job_destination(self.params)
@@ -673,7 +673,7 @@
special = self.sa_session.query( model.GenomeIndexToolData ).filter_by( job=job ).first()
if special:
out_data[ "output_file" ] = FakeDatasetAssociation( dataset=special.dataset )
-
+
# These can be passed on the command line if wanted as $__user_*__
if job.history and job.history.user:
user_id = '%d' % job.history.user.id
@@ -777,11 +777,11 @@
if ( len( stdout ) > 32768 ):
stdout = stdout[:32768]
log.info( "stdout for job %d is greater than 32K, only first part will be logged to database" % job.id )
- job.stdout = stdout
+ job.stdout = stdout
if ( len( stderr ) > 32768 ):
stderr = stderr[:32768]
log.info( "stderr for job %d is greater than 32K, only first part will be logged to database" % job.id )
- job.stderr = stderr
+ job.stderr = stderr
# Let the exit code be Null if one is not provided:
if ( exit_code != None ):
job.exit_code = exit_code
@@ -863,7 +863,7 @@
self.sa_session.expunge_all()
job = self.get_job()
- # TODO: After failing here, consider returning from the function.
+ # TODO: After failing here, consider returning from the function.
try:
self.reclaim_ownership()
except:
@@ -945,7 +945,7 @@
( not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) \
and self.app.config.retry_metadata_internally ):
dataset.datatype.set_meta( dataset, overwrite = False ) #call datatype.set_meta directly for the initial set_meta call during dataset creation
- elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and job.states.ERROR != final_job_state:
+ elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and job.states.ERROR != final_job_state:
dataset._state = model.Dataset.states.FAILED_METADATA
else:
#load metadata from file
@@ -1006,7 +1006,7 @@
job.stderr = job.stderr[:32768]
# The exit code will be null if there is no exit code to be set.
# This is so that we don't assign an exit code, such as 0, that
- # is either incorrect or has the wrong semantics.
+ # is either incorrect or has the wrong semantics.
if None != tool_exit_code:
job.exit_code = tool_exit_code
# custom post process setup
@@ -1058,26 +1058,26 @@
def check_tool_output( self, stdout, stderr, tool_exit_code, job ):
"""
Check the output of a tool - given the stdout, stderr, and the tool's
- exit code, return True if the tool exited succesfully and False
+ exit code, return True if the tool exited succesfully and False
otherwise. No exceptions should be thrown. If this code encounters
an exception, it returns True so that the workflow can continue;
- otherwise, a bug in this code could halt workflow progress.
+ otherwise, a bug in this code could halt workflow progress.
Note that, if the tool did not define any exit code handling or
any stdio/stderr handling, then it reverts back to previous behavior:
if stderr contains anything, then False is returned.
Note that the job id is just for messages.
"""
- # By default, the tool succeeded. This covers the case where the code
+ # By default, the tool succeeded. This covers the case where the code
# has a bug but the tool was ok, and it lets a workflow continue.
- success = True
+ success = True
try:
- # Check exit codes and match regular expressions against stdout and
+ # Check exit codes and match regular expressions against stdout and
# stderr if this tool was configured to do so.
# If there is a regular expression for scanning stdout/stderr,
- # then we assume that the tool writer overwrote the default
+ # then we assume that the tool writer overwrote the default
# behavior of just setting an error if there is *anything* on
- # stderr.
+ # stderr.
if ( len( self.tool.stdio_regexes ) > 0 or
len( self.tool.stdio_exit_codes ) > 0 ):
# Check the exit code ranges in the order in which
@@ -1088,9 +1088,9 @@
max_error_level = galaxy.tools.StdioErrorLevel.NO_ERROR
if tool_exit_code != None:
for stdio_exit_code in self.tool.stdio_exit_codes:
- if ( tool_exit_code >= stdio_exit_code.range_start and
+ if ( tool_exit_code >= stdio_exit_code.range_start and
tool_exit_code <= stdio_exit_code.range_end ):
- # Tack on a generic description of the code
+ # Tack on a generic description of the code
# plus a specific code description. For example,
# this might prepend "Job 42: Warning (Out of Memory)\n".
code_desc = stdio_exit_code.desc
@@ -1102,21 +1102,21 @@
code_desc ) )
log.info( "Job %s: %s" % (job.get_id_tag(), tool_msg) )
stderr = tool_msg + "\n" + stderr
- max_error_level = max( max_error_level,
+ max_error_level = max( max_error_level,
stdio_exit_code.error_level )
- if ( max_error_level >=
+ if ( max_error_level >=
galaxy.tools.StdioErrorLevel.FATAL ):
break
-
+
if max_error_level < galaxy.tools.StdioErrorLevel.FATAL:
# We'll examine every regex. Each regex specifies whether
- # it is to be run on stdout, stderr, or both. (It is
+ # it is to be run on stdout, stderr, or both. (It is
# possible for neither stdout nor stderr to be scanned,
# but those regexes won't be used.) We record the highest
# error level, which are currently "warning" and "fatal".
# If fatal, then we set the job's state to ERROR.
# If warning, then we still set the job's state to OK
- # but include a message. We'll do this if we haven't seen
+ # but include a message. We'll do this if we haven't seen
# a fatal error yet
for regex in self.tool.stdio_regexes:
# If ( this regex should be matched against stdout )
@@ -1126,16 +1126,16 @@
# Repeat the stdout stuff for stderr.
# TODO: Collapse this into a single function.
if ( regex.stdout_match ):
- regex_match = re.search( regex.match, stdout,
+ regex_match = re.search( regex.match, stdout,
re.IGNORECASE )
if ( regex_match ):
rexmsg = self.regex_err_msg( regex_match, regex)
- log.info( "Job %s: %s"
+ log.info( "Job %s: %s"
% ( job.get_id_tag(), rexmsg ) )
stdout = rexmsg + "\n" + stdout
- max_error_level = max( max_error_level,
+ max_error_level = max( max_error_level,
regex.error_level )
- if ( max_error_level >=
+ if ( max_error_level >=
galaxy.tools.StdioErrorLevel.FATAL ):
break
@@ -1144,33 +1144,33 @@
re.IGNORECASE )
if ( regex_match ):
rexmsg = self.regex_err_msg( regex_match, regex)
- log.info( "Job %s: %s"
+ log.info( "Job %s: %s"
% ( job.get_id_tag(), rexmsg ) )
stderr = rexmsg + "\n" + stderr
- max_error_level = max( max_error_level,
+ max_error_level = max( max_error_level,
regex.error_level )
- if ( max_error_level >=
+ if ( max_error_level >=
galaxy.tools.StdioErrorLevel.FATAL ):
break
-
+
# If we encountered a fatal error, then we'll need to set the
# job state accordingly. Otherwise the job is ok:
if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
- success = False
+ success = False
else:
- success = True
-
+ success = True
+
# When there are no regular expressions and no exit codes to check,
# default to the previous behavior: when there's anything on stderr
- # the job has an error, and the job is ok otherwise.
+ # the job has an error, and the job is ok otherwise.
else:
- # TODO: Add in the tool and job id:
+ # TODO: Add in the tool and job id:
log.debug( "Tool did not define exit code or stdio handling; "
+ "checking stderr for success" )
if stderr:
- success = False
+ success = False
else:
- success = True
+ success = True
# On any exception, return True.
except:
@@ -1178,7 +1178,7 @@
log.warning( "Tool check encountered unexpected exception; "
+ "assuming tool was successful: " + tb )
success = True
-
+
# Store the modified stdout and stderr in the job:
if None != job:
job.stdout = stdout
@@ -1192,7 +1192,7 @@
ToolStdioRegex regex object. The regex_match is a MatchObject
that will contain the string matched on.
"""
- # Get the description for the error level:
+ # Get the description for the error level:
err_msg = galaxy.tools.StdioErrorLevel.desc( regex.error_level ) + ": "
# If there's a description for the regular expression, then use it.
# Otherwise, we'll take the first 256 characters of the match.
@@ -1206,7 +1206,7 @@
if mend - mstart > 256:
err_msg += match.string[ mstart : mstart+256 ] + "..."
else:
- err_msg += match.string[ mstart: mend ]
+ err_msg += match.string[ mstart: mend ]
return err_msg
def cleanup( self ):
@@ -1485,7 +1485,7 @@
self.status = task.states.NEW
def can_split( self ):
- # Should the job handler split this job up? TaskWrapper should
+ # Should the job handler split this job up? TaskWrapper should
# always return False as the job has already been split.
return False
@@ -1627,8 +1627,8 @@
the contents of the output files.
"""
# This may have ended too soon
- log.debug( 'task %s for job %d ended; exit code: %d'
- % (self.task_id, self.job_id,
+ log.debug( 'task %s for job %d ended; exit code: %d'
+ % (self.task_id, self.job_id,
tool_exit_code if tool_exit_code != None else -256 ) )
# default post job setup_external_metadata
self.sa_session.expunge_all()
@@ -1643,12 +1643,12 @@
self.fail( task.info )
return
- # Check what the tool returned. If the stdout or stderr matched
+ # Check what the tool returned. If the stdout or stderr matched
# regular expressions that indicate errors, then set an error.
# The same goes if the tool's exit code was in a given range.
if ( self.check_tool_output( stdout, stderr, tool_exit_code, task ) ):
task.state = task.states.OK
- else:
+ else:
task.state = task.states.ERROR
# Save stdout and stderr
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4cfd8885a743/
Changeset: 4cfd8885a743
User: jmchilton
Date: 2013-03-07 18:16:53
Summary: select2 fixes for workflow running.
Affected #: 1 file
diff -r e3ae0bbd800a68532fc51625642e0abc29e1b085 -r 4cfd8885a74319559b1c31896dcfe64afd60bb87 templates/webapps/galaxy/workflow/run.mako
--- a/templates/webapps/galaxy/workflow/run.mako
+++ b/templates/webapps/galaxy/workflow/run.mako
@@ -8,7 +8,7 @@
<%def name="javascripts()">
${parent.javascripts()}
- ${h.js( "libs/jquery/jquery.autocomplete" )}
+ ${h.js( "libs/jquery/select2" )}
<script type="text/javascript">
$( function() {
function show_tool_body(title){
@@ -40,12 +40,12 @@
select.val($('option:last', select).val());
}
select.closest('.form-row').children('label').children('span.mode-icon').hide();
- select.removeAttr('multiple').removeAttr('size');
+ select.removeAttr('multiple').select2().removeAttr('size');
placeholder = 'type to filter';
} else {
$('.multiinput', select.closest('.form-row')).removeClass('disabled');
select.closest('.form-row').children('label').children('span.mode-icon').show();
- select.attr('multiple', 'multiple').attr('size', 8);
+ select.attr('multiple', 'multiple').select2().attr('size', 8);
placeholder = 'type to filter, [enter] to select all';
}
$('input.multiinput-filter', select.parent()).attr(
@@ -79,7 +79,7 @@
$("#new_history_cbx").click(function(){
$("#new_history_input").toggle(this.checked);
});
- $('span.multiinput_wrap select[name*="|input"]').removeAttr('multiple').each(function(i, s) {
+ $('span.multiinput_wrap select[name*="|input"]').removeAttr('multiple').select2().each(function(i, s) {
var select = $(s);
var new_width = Math.max(200, select.width()) + 20;
// Find the label for this element.
https://bitbucket.org/galaxy/galaxy-central/commits/d92b3bbccbfb/
Changeset: d92b3bbccbfb
User: jmchilton
Date: 2013-03-07 20:42:11
Summary: Abstract away logic for updating select boxes. For multiple select boxes set closeOnSelect to false, this vastly reduces the clicking required to select many items.
Affected #: 2 files
diff -r 4cfd8885a74319559b1c31896dcfe64afd60bb87 -r d92b3bbccbfb56c0c0b2cf203796a2d37e532ff3 static/scripts/galaxy.base.js
--- a/static/scripts/galaxy.base.js
+++ b/static/scripts/galaxy.base.js
@@ -239,6 +239,14 @@
return 0;
}
+$.fn.refresh_select2 = function() {
+ var select_elt = $(this);
+ var options = { width: "resolve",
+ closeOnSelect: !select_elt.is("[MULTIPLE]"),
+ };
+ return select_elt.select2( options );
+}
+
// Replace select box with a text input box + autocomplete.
function replace_big_select_inputs(min_length, max_length, select_elts) {
// To do replace, the select2 plugin must be loaded.
@@ -276,9 +284,7 @@
*
* - should we still sort dbkey fields here?
*/
-
- select_elt.select2( { width: "resolve" } );
-
+ select_elt.refresh_select2();
});
}
diff -r 4cfd8885a74319559b1c31896dcfe64afd60bb87 -r d92b3bbccbfb56c0c0b2cf203796a2d37e532ff3 templates/webapps/galaxy/workflow/run.mako
--- a/templates/webapps/galaxy/workflow/run.mako
+++ b/templates/webapps/galaxy/workflow/run.mako
@@ -40,12 +40,12 @@
select.val($('option:last', select).val());
}
select.closest('.form-row').children('label').children('span.mode-icon').hide();
- select.removeAttr('multiple').select2().removeAttr('size');
+ select.removeAttr('multiple').refresh_select2().removeAttr('size');
placeholder = 'type to filter';
} else {
$('.multiinput', select.closest('.form-row')).removeClass('disabled');
select.closest('.form-row').children('label').children('span.mode-icon').show();
- select.attr('multiple', 'multiple').select2().attr('size', 8);
+ select.attr('multiple', 'multiple').refresh_select2().attr('size', 8);
placeholder = 'type to filter, [enter] to select all';
}
$('input.multiinput-filter', select.parent()).attr(
@@ -79,7 +79,7 @@
$("#new_history_cbx").click(function(){
$("#new_history_input").toggle(this.checked);
});
- $('span.multiinput_wrap select[name*="|input"]').removeAttr('multiple').select2().each(function(i, s) {
+ $('span.multiinput_wrap select[name*="|input"]').removeAttr('multiple').refresh_select2().each(function(i, s) {
var select = $(s);
var new_width = Math.max(200, select.width()) + 20;
// Find the label for this element.
https://bitbucket.org/galaxy/galaxy-central/commits/d71a574758c9/
Changeset: d71a574758c9
User: dannon
Date: 2013-03-27 16:53:18
Summary: Merged in galaxyp/galaxy-central-parallelism-refactorings (pull request #136)
select2 fixes for workflow running.
Affected #: 2 files
diff -r 3826fb2deb717d7fce67e3b6351cd39c61b07379 -r d71a574758c9e49cbbdf889e5926c105745ef860 static/scripts/galaxy.base.js
--- a/static/scripts/galaxy.base.js
+++ b/static/scripts/galaxy.base.js
@@ -239,6 +239,14 @@
return 0;
}
+$.fn.refresh_select2 = function() {
+ var select_elt = $(this);
+ var options = { width: "resolve",
+ closeOnSelect: !select_elt.is("[MULTIPLE]"),
+ };
+ return select_elt.select2( options );
+}
+
// Replace select box with a text input box + autocomplete.
function replace_big_select_inputs(min_length, max_length, select_elts) {
// To do replace, the select2 plugin must be loaded.
@@ -276,9 +284,7 @@
*
* - should we still sort dbkey fields here?
*/
-
- select_elt.select2( { width: "resolve" } );
-
+ select_elt.refresh_select2();
});
}
diff -r 3826fb2deb717d7fce67e3b6351cd39c61b07379 -r d71a574758c9e49cbbdf889e5926c105745ef860 templates/webapps/galaxy/workflow/run.mako
--- a/templates/webapps/galaxy/workflow/run.mako
+++ b/templates/webapps/galaxy/workflow/run.mako
@@ -8,7 +8,7 @@
<%def name="javascripts()">
${parent.javascripts()}
- ${h.js( "libs/jquery/jquery.autocomplete" )}
+ ${h.js( "libs/jquery/select2" )}
<script type="text/javascript">
$( function() {
function show_tool_body(title){
@@ -40,12 +40,12 @@
select.val($('option:last', select).val());
}
select.closest('.form-row').children('label').children('span.mode-icon').hide();
- select.removeAttr('multiple').removeAttr('size');
+ select.removeAttr('multiple').refresh_select2().removeAttr('size');
placeholder = 'type to filter';
} else {
$('.multiinput', select.closest('.form-row')).removeClass('disabled');
select.closest('.form-row').children('label').children('span.mode-icon').show();
- select.attr('multiple', 'multiple').attr('size', 8);
+ select.attr('multiple', 'multiple').refresh_select2().attr('size', 8);
placeholder = 'type to filter, [enter] to select all';
}
$('input.multiinput-filter', select.parent()).attr(
@@ -79,7 +79,7 @@
$("#new_history_cbx").click(function(){
$("#new_history_input").toggle(this.checked);
});
- $('span.multiinput_wrap select[name*="|input"]').removeAttr('multiple').each(function(i, s) {
+ $('span.multiinput_wrap select[name*="|input"]').removeAttr('multiple').refresh_select2().each(function(i, s) {
var select = $(s);
var new_width = Math.max(200, select.width()) + 20;
// Find the label for this element.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Change install and test script to never set do_not_test = True.
by commits-noreply@bitbucket.org 27 Mar '13
by commits-noreply@bitbucket.org 27 Mar '13
27 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3826fb2deb71/
Changeset: 3826fb2deb71
User: inithello
Date: 2013-03-27 16:20:39
Summary: Change install and test script to never set do_not_test = True.
Affected #: 1 file
diff -r 2477c4ef8aa4742319bfd9d892f2dbcadf072743 -r 3826fb2deb717d7fce67e3b6351cd39c61b07379 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -281,8 +281,7 @@
def register_test_result( url, metadata_id, test_results_dict, tests_passed=False ):
'''
- Set do_not_test = True if the repository fails functional tests. Set do_not_test = False
- if the repository passes functional tests, so that the repository will always be re-tested
+ This script should never set do_not_test = True, because the repositories should always be re-tested
against the most recent code.
'''
params = {}
@@ -291,7 +290,7 @@
params[ 'do_not_test' ] = 'false'
else:
params[ 'tools_functionally_correct' ] = 'false'
- params[ 'do_not_test' ] = 'true'
+ params[ 'do_not_test' ] = 'false'
params[ 'tool_test_errors' ] = test_results_dict
return update( tool_shed_api_key, '%s' % ( url_join( galaxy_tool_shed_url, 'api', 'repository_revisions', metadata_id ) ), params, return_formatted=False )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Merged correctly this time.
by commits-noreply@bitbucket.org 27 Mar '13
by commits-noreply@bitbucket.org 27 Mar '13
27 Mar '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2477c4ef8aa4/
Changeset: 2477c4ef8aa4
User: inithello
Date: 2013-03-27 15:10:26
Summary: Merged correctly this time.
Affected #: 2 files
diff -r 3b918b912fe68718eb0b20c5c76e0e639f1cf42c -r 2477c4ef8aa4742319bfd9d892f2dbcadf072743 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -24,15 +24,21 @@
# Python bytecode
*.pyc
+# Galaxy Runtime Files
+paster.lock
+paster.log
+paster.pid
+
# Tool Shed Runtime Files
-community_webapp.log
-community_webapp.pid
+tool_shed_webapp.lock
+tool_shed_webapp.log
+tool_shed_webapp.pid
hgweb.config*
# Config files
universe_wsgi.ini
reports_wsgi.ini
-community_wsgi.ini
+tool_shed_wsgi.ini
# Config files.
datatypes_conf.xml
diff -r 3b918b912fe68718eb0b20c5c76e0e639f1cf42c -r 2477c4ef8aa4742319bfd9d892f2dbcadf072743 lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
--- a/lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
+++ b/lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
@@ -4,7 +4,8 @@
import os, logging
import sqlalchemy
from paste.auth.basic import AuthBasicAuthenticator
-from paste.httpheaders import REMOTE_USER, AUTH_TYPE
+from paste.httpheaders import AUTH_TYPE
+from paste.httpheaders import REMOTE_USER
from galaxy.webapps.tool_shed import model
from galaxy.util.hash_util import new_secure_hash
@@ -12,13 +13,15 @@
log = logging.getLogger(__name__)
+
class Hg( object ):
+
def __init__( self, app, config ):
print "mercurial version is:", mercurial.__version__.version
self.app = app
self.config = config
# Authenticate this mercurial request using basic authentication
- self.authentication = AuthBasicAuthenticator( '', self.__basic_authentication )
+ self.authentication = AuthBasicAuthenticator( 'hgweb in the tool shed', self.__basic_authentication )
self.remote_address = None
self.repository = None
self.username = None
@@ -28,6 +31,7 @@
self.db_url = self.config[ 'database_connection' ]
else:
self.db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config[ 'database_file' ]
+
def __call__( self, environ, start_response ):
cmd = self.__get_hg_command( **environ )
if cmd == 'changegroup':
@@ -89,16 +93,23 @@
else:
return result.wsgi_application( environ, start_response )
return self.app( environ, start_response )
+
def __get_hg_command( self, **kwd ):
- # Pulls mercurial commands from environ[ 'QUERY_STRING" ] and returns them.
+ """Pulls mercurial commands from environ[ 'QUERY_STRING" ] and returns them."""
if 'QUERY_STRING' in kwd:
for qry in kwd[ 'QUERY_STRING' ].split( '&' ):
if qry.startswith( 'cmd' ):
return qry.split( '=' )[ -1 ]
return None
+
def __basic_authentication( self, environ, username, password ):
- # The environ parameter is needed in basic authentication.
- return self.__authenticate( username, password )
+ """The environ parameter is needed in basic authentication. We also check it if use_remote_user is true."""
+ if asbool( self.config.get( 'use_remote_user', False ) ):
+ assert "HTTP_REMOTE_USER" in environ, "use_remote_user is set but no HTTP_REMOTE_USER variable"
+ return self.__authenticate_remote_user( environ, username, password )
+ else:
+ return self.__authenticate( username, password )
+
def __authenticate( self, username, password ):
# Instantiate a database connection
engine = sqlalchemy.create_engine( self.db_url )
@@ -111,3 +122,26 @@
connection.close()
# Check if password matches db_password when hashed.
return new_secure_hash( text_type=password ) == db_password
+
+ def __authenticate_remote_user( self, environ, username, password ):
+ """
+ Look after a remote user and "authenticate" - upstream server should already have achieved this for us, but we check that the
+ user exists at least. Hg allow_push = must include username - some versions of mercurial blow up with 500 errors.
+ """
+ ru_email = environ[ 'HTTP_REMOTE_USER' ].lower()
+ ## Instantiate a database connection...
+ engine = sqlalchemy.create_engine( self.db_url )
+ connection = engine.connect()
+ result_set = connection.execute( "select email, username, password from galaxy_user where email = '%s'" % ru_email )
+ for row in result_set:
+ # Should only be 1 row...
+ db_email = row[ 'email' ]
+ db_password = row[ 'password' ]
+ db_username = row[ 'username' ]
+ connection.close()
+
+ """
+ We could check the password here except that the function galaxy.web.framework.get_or_create_remote_user() does some random generation of
+ a password - so that no-one knows the password and only the hash is stored...
+ """
+ return db_username == username
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/234138cd783c/
Changeset: 234138cd783c
Branch: next-stable
User: inithello
Date: 2013-03-27 14:50:32
Summary: Apply changes from pull request 145 by Roy Storey. Manually committed to exclude .hgflow mercurial extension configuration file.
Affected #: 1 file
diff -r c5d7d2bd7928c160a5163fe9fc9a5e20aa5470ae -r 234138cd783c7c9539eea3572cc8e1042741407b .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -24,15 +24,21 @@
# Python bytecode
*.pyc
+# Galaxy Runtime Files
+paster.lock
+paster.log
+paster.pid
+
# Tool Shed Runtime Files
-community_webapp.log
-community_webapp.pid
+tool_shed_webapp.lock
+tool_shed_webapp.log
+tool_shed_webapp.pid
hgweb.config*
# Config files
universe_wsgi.ini
reports_wsgi.ini
-community_wsgi.ini
+tool_shed_wsgi.ini
# Config files.
datatypes_conf.xml
https://bitbucket.org/galaxy/galaxy-central/commits/75415966a26b/
Changeset: 75415966a26b
Branch: next-stable
User: inithello
Date: 2013-03-27 14:52:55
Summary: Apply changes from pull request 146 by Roy Storey. Manually committed to exclude .hgflow mercurial extension configuration file.
Affected #: 1 file
diff -r 234138cd783c7c9539eea3572cc8e1042741407b -r 75415966a26b5597c1ec46a10f6071238a5578be lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
--- a/lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
+++ b/lib/galaxy/webapps/tool_shed/framework/middleware/hg.py
@@ -4,7 +4,8 @@
import os, logging
import sqlalchemy
from paste.auth.basic import AuthBasicAuthenticator
-from paste.httpheaders import REMOTE_USER, AUTH_TYPE
+from paste.httpheaders import AUTH_TYPE
+from paste.httpheaders import REMOTE_USER
from galaxy.webapps.tool_shed import model
from galaxy.util.hash_util import new_secure_hash
@@ -12,13 +13,15 @@
log = logging.getLogger(__name__)
+
class Hg( object ):
+
def __init__( self, app, config ):
print "mercurial version is:", mercurial.__version__.version
self.app = app
self.config = config
# Authenticate this mercurial request using basic authentication
- self.authentication = AuthBasicAuthenticator( '', self.__basic_authentication )
+ self.authentication = AuthBasicAuthenticator( 'hgweb in the tool shed', self.__basic_authentication )
self.remote_address = None
self.repository = None
self.username = None
@@ -28,6 +31,7 @@
self.db_url = self.config[ 'database_connection' ]
else:
self.db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config[ 'database_file' ]
+
def __call__( self, environ, start_response ):
cmd = self.__get_hg_command( **environ )
if cmd == 'changegroup':
@@ -89,16 +93,23 @@
else:
return result.wsgi_application( environ, start_response )
return self.app( environ, start_response )
+
def __get_hg_command( self, **kwd ):
- # Pulls mercurial commands from environ[ 'QUERY_STRING" ] and returns them.
+ """Pulls mercurial commands from environ[ 'QUERY_STRING" ] and returns them."""
if 'QUERY_STRING' in kwd:
for qry in kwd[ 'QUERY_STRING' ].split( '&' ):
if qry.startswith( 'cmd' ):
return qry.split( '=' )[ -1 ]
return None
+
def __basic_authentication( self, environ, username, password ):
- # The environ parameter is needed in basic authentication.
- return self.__authenticate( username, password )
+ """The environ parameter is needed in basic authentication. We also check it if use_remote_user is true."""
+ if asbool( self.config.get( 'use_remote_user', False ) ):
+ assert "HTTP_REMOTE_USER" in environ, "use_remote_user is set but no HTTP_REMOTE_USER variable"
+ return self.__authenticate_remote_user( environ, username, password )
+ else:
+ return self.__authenticate( username, password )
+
def __authenticate( self, username, password ):
# Instantiate a database connection
engine = sqlalchemy.create_engine( self.db_url )
@@ -111,3 +122,26 @@
connection.close()
# Check if password matches db_password when hashed.
return new_secure_hash( text_type=password ) == db_password
+
+ def __authenticate_remote_user( self, environ, username, password ):
+ """
+ Look after a remote user and "authenticate" - upstream server should already have achieved this for us, but we check that the
+ user exists at least. Hg allow_push = must include username - some versions of mercurial blow up with 500 errors.
+ """
+ ru_email = environ[ 'HTTP_REMOTE_USER' ].lower()
+ ## Instantiate a database connection...
+ engine = sqlalchemy.create_engine( self.db_url )
+ connection = engine.connect()
+ result_set = connection.execute( "select email, username, password from galaxy_user where email = '%s'" % ru_email )
+ for row in result_set:
+ # Should only be 1 row...
+ db_email = row[ 'email' ]
+ db_password = row[ 'password' ]
+ db_username = row[ 'username' ]
+ connection.close()
+
+ """
+ We could check the password here except that the function galaxy.web.framework.get_or_create_remote_user() does some random generation of
+ a password - so that no-one knows the password and only the hash is stored...
+ """
+ return db_username == username
https://bitbucket.org/galaxy/galaxy-central/commits/2af272e9f256/
Changeset: 2af272e9f256
Branch: next-stable
User: inithello
Date: 2013-03-27 14:53:55
Summary: Merge next-stable.
Affected #: 39 files
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/datatypes/display_applications/__init__.py
--- a/lib/galaxy/datatypes/display_applications/__init__.py
+++ b/lib/galaxy/datatypes/display_applications/__init__.py
@@ -1,1 +1,3 @@
-
+"""
+Contains functionality of the newer XML defined external display applications (not hardcoded into datatype classes).
+"""
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/datatypes/display_applications/link_generator.py
--- a/lib/galaxy/datatypes/display_applications/link_generator.py
+++ b/lib/galaxy/datatypes/display_applications/link_generator.py
@@ -1,8 +1,11 @@
-"""Classes to generate links for display applications.
+"""Classes to generate links for old-style display applications.
Separating Transaction based elements of display applications from datatypes.
"""
+#FIXME: The code contained within this file is for old-style display applications, but
+#this module namespace is intended to only handle the new-style display applications.
+
import urllib
# for the url_for hack
@@ -19,6 +22,8 @@
#TODO: these could be extended to handle file_function and parse/contain the builds.txt files
+#HACK: these duplicate functionality from the individual datatype classes themselves
+
def get_display_app_link_generator( display_app_name ):
"""Returns an instance of the proper link generator class
based on the display_app_name or DisplayAppLinkGenerator
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3040,9 +3040,9 @@
pass
class ToolShedRepository( object ):
- api_collection_visible_keys = ( 'id', 'name', 'tool_shed', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
+ api_collection_visible_keys = ( 'id', 'tool_shed', 'name', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
'update_available', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
- api_element_visible_keys = ( 'id', 'name', 'tool_shed', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
+ api_element_visible_keys = ( 'id', 'tool_shed', 'name', 'owner', 'installed_changeset_revision', 'changeset_revision', 'ctx_rev', 'includes_datatypes',
'update_available', 'deleted', 'uninstalled', 'dist_to_shed', 'status', 'error_message' )
installation_status = Bunch( NEW='New',
CLONING='Cloning',
@@ -3079,10 +3079,8 @@
self.dist_to_shed = dist_to_shed
self.status = status
self.error_message = error_message
- def as_dict( self, trans ):
- tsr_dict = self.get_api_value( view='element' )
- tsr_dict[ 'id' ] = trans.security.encode_id( self.id )
- return tsr_dict
+ def as_dict( self, value_mapper=None ):
+ return self.get_api_value( view='element', value_mapper=value_mapper )
def repo_files_directory( self, app ):
repo_path = self.repo_path( app )
if repo_path:
@@ -3182,7 +3180,7 @@
try:
rval[ key ] = self.__getattribute__( key )
if key in value_mapper:
- rval[ key ] = value_mapper.get( key )( rval[ key ] )
+ rval[ key ] = value_mapper.get( key, rval[ key ] )
except AttributeError:
rval[ key ] = None
return rval
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/web/__init__.py
--- a/lib/galaxy/web/__init__.py
+++ b/lib/galaxy/web/__init__.py
@@ -1,7 +1,16 @@
"""
The Galaxy web application framework
"""
-
-from framework import expose, json, json_pretty, require_login, require_admin, url_for, error, form, FormBuilder, expose_api, expose_api_raw
+from framework import expose
+from framework import json
+from framework import json_pretty
+from framework import require_login
+from framework import require_admin
+from framework import url_for
+from framework import error
+from framework import form
+from framework import FormBuilder
+from framework import expose_api
+from framework import expose_api_anonymous
+from framework import expose_api_raw
from framework.base import httpexceptions
-
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/web/framework/__init__.py
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -106,7 +106,13 @@
"""
return expose_api( func, to_json=False )
-def expose_api( func, to_json=True ):
+def expose_api_anonymous( func, to_json=True ):
+ """
+ Expose this function via the API but don't require an API key.
+ """
+ return expose_api( func, to_json=to_json, key_required=False )
+
+def expose_api( func, to_json=True, key_required=True ):
@wraps(func)
def decorator( self, trans, *args, **kwargs ):
def error( environ, start_response ):
@@ -114,7 +120,7 @@
return error_message
error_status = '403 Forbidden'
## If there is a user, we've authenticated a session.
- if not trans.user and isinstance(trans.galaxy_session, Bunch):
+ if key_required and not trans.user and isinstance( trans.galaxy_session, Bunch ):
# If trans.user is already set, don't check for a key.
# This happens when we're authenticating using session instead of an API key.
# The Bunch clause is used to prevent the case where there's no user, but there is a real session.
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- /dev/null
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -0,0 +1,339 @@
+import logging
+import urllib2
+from galaxy.util import json
+from galaxy import util
+from galaxy import web
+from galaxy.web.base.controller import BaseAPIController
+from tool_shed.galaxy_install import repository_util
+import tool_shed.util.shed_util_common as suc
+
+log = logging.getLogger( __name__ )
+
+def default_tool_shed_repository_value_mapper( trans, tool_shed_repository ):
+ value_mapper={ 'id' : trans.security.encode_id( tool_shed_repository.id ),
+ 'error_message' : tool_shed_repository.error_message or '' }
+ return value_mapper
+
+def get_message_for_no_shed_tool_config():
+ # This Galaxy instance is not configured with a shed-related tool panel configuration file.
+ message = 'The tool_config_file setting in universe_wsgi.ini must include at least one shed tool configuration file name with a <toolbox> '
+ message += 'tag that includes a tool_path attribute value which is a directory relative to the Galaxy installation directory in order to '
+ message += 'automatically install tools from a tool shed into Galaxy (e.g., the file name shed_tool_conf.xml whose <toolbox> tag is '
+ message += '<toolbox tool_path="../shed_tools">). For details, see the "Installation of Galaxy tool shed repository tools into a local '
+ message += 'Galaxy instance" section of the Galaxy tool shed wiki at http://wiki.galaxyproject.org/InstallingRepositoriesToGalaxy#'
+ message += 'Installing_Galaxy_tool_shed_repository_tools_into_a_local_Galaxy_instance.'
+ return message
+
+class ToolShedRepositoriesController( BaseAPIController ):
+ """RESTful controller for interactions with tool shed repositories."""
+
+ @web.expose_api
+ def index( self, trans, **kwd ):
+ """
+ GET /api/tool_shed_repositories
+ Display a list of dictionaries containing information about installed tool shed repositories.
+ """
+ # Example URL: http://localhost:8763/api/tool_shed_repositories
+ tool_shed_repository_dicts = []
+ try:
+ query = trans.sa_session.query( trans.app.model.ToolShedRepository ) \
+ .order_by( trans.app.model.ToolShedRepository.table.c.name ) \
+ .all()
+ for tool_shed_repository in query:
+ tool_shed_repository_dict = tool_shed_repository.get_api_value( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ tool_shed_repository_dicts.append( tool_shed_repository_dict )
+ return tool_shed_repository_dicts
+ except Exception, e:
+ message = "Error in the tool_shed_repositories API in index: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+
+ @web.expose_api
+ def show( self, trans, id, **kwd ):
+ """
+ GET /api/tool_shed_repositories/{encoded_tool_shed_repsository_id}
+ Display a dictionary containing information about a specified tool_shed_repository.
+
+ :param id: the encoded id of the ToolShedRepository object
+ """
+ # Example URL: http://localhost:8763/api/tool_shed_repositories/df7a1f0c02a5b08e
+ try:
+ tool_shed_repository = suc.get_tool_shed_repository_by_id( trans, id )
+ tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ return tool_shed_repository_dict
+ except Exception, e:
+ message = "Error in tool_shed_repositories API in index: " + str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
+
+ @web.expose_api
+ def install_repository_revision( self, trans, payload, **kwd ):
+ """
+ POST /api/tool_shed_repositories/install_repository_revision
+ Install a specified repository revision from a specified tool shed into Galaxy.
+
+ :param key: the current Galaxy admin user's API key
+
+ The following parameters are included in the payload.
+ :param tool_shed_url (required): the base URL of the Tool Shed from which to install the Repository
+ :param name (required): the name of the Repository
+ :param owner (required): the owner of the Repository
+ :param changset_revision (required): the changset_revision of the RepositoryMetadata object associated with the Repository
+ :param new_tool_panel_section_label (optional): label of a new section to be added to the Galaxy tool panel in which to load
+ tools contained in the Repository. Either this parameter must be an empty string or
+ the tool_panel_section_id parameter must be an empty string or both must be an empty
+ string (both cannot be used simultaneously).
+ :param tool_panel_section_id (optional): id of the Galaxy tool panel section in which to load tools contained in the Repository.
+ If this parameter is an empty string and the above new_tool_panel_section_label parameter is an
+ empty string, tools will be loaded outside of any sections in the tool panel. Either this
+ parameter must be an empty string or the tool_panel_section_id parameter must be an empty string
+ of both must be an empty string (both cannot be used simultaneously).
+ :param install_repository_dependencies (optional): Set to True if you want to install repository dependencies defined for the specified
+ repository being installed. The default setting is False.
+ :param install_tool_dependencies (optional): Set to True if you want to install tool dependencies defined for the specified repository being
+ installed. The default setting is False.
+ :param shed_tool_conf (optional): The shed-related tool panel configuration file configured in the "tool_config_file" setting in the Galaxy config file
+ (e.g., universe_wsgi.ini). At least one shed-related tool panel config file is required to be configured. Setting
+ this parameter to a specific file enables you to choose where the specified repository will be installed because
+ the tool_path attribute of the <toolbox> from the specified file is used as the installation location
+ (e.g., <toolbox tool_path="../shed_tools">). If this parameter is not set, a shed-related tool panel configuration
+ file will be selected automatically.
+ """
+ # Get the information about the repository to be installed from the payload.
+ tool_shed_url = payload.get( 'tool_shed_url', '' )
+ if not tool_shed_url:
+ raise HTTPBadRequest( detail="Missing required parameter 'tool_shed_url'." )
+ name = payload.get( 'name', '' )
+ if not name:
+ raise HTTPBadRequest( detail="Missing required parameter 'name'." )
+ owner = payload.get( 'owner', '' )
+ if not owner:
+ raise HTTPBadRequest( detail="Missing required parameter 'owner'." )
+ changeset_revision = payload.get( 'changeset_revision', '' )
+ if not changeset_revision:
+ raise HTTPBadRequest( detail="Missing required parameter 'changeset_revision'." )
+ # Make sure this Galaxy instance is configured with a shed-related tool panel configuration file.
+ if not suc.have_shed_tool_conf_for_install( trans ):
+ message = get_message_for_no_shed_tool_config()
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
+ if not trans.user_is_admin():
+ raise HTTPForbidden( detail='You are not authorized to install a tool shed repository into this Galaxy instance.' )
+ # Keep track of all repositories that are installed - there may be more than one if repository dependencies are installed.
+ installed_tool_shed_repositories = []
+ # Get all of the information necessary for installing the repository from the specified tool shed.
+ url = suc.url_join( tool_shed_url,
+ 'api/repositories/get_repository_revision_install_info?name=%s&owner=%s&changeset_revision=%s' % \
+ ( name, owner, changeset_revision ) )
+ try:
+ response = urllib2.urlopen( url )
+ raw_text = response.read()
+ response.close()
+ except Exception, e:
+ message = "Error attempting to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \
+ ( str( tool_shed_url ), str( name ), str( owner ), str( changeset_revision ), str( e ) )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ if raw_text:
+ items = json.from_json_string( raw_text )
+ repository_dict = items[ 0 ]
+ repository_revision_dict = items[ 1 ]
+ repo_info_dict = items[ 2 ]
+ else:
+ message = "Unable to retrieve installation information from tool shed %s for revision %s of repository %s owned by %s: %s" % \
+ ( str( tool_shed_url ), str( name ), str( owner ), str( changeset_revision ) )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ repo_info_dicts = [ repo_info_dict ]
+ # Make sure the tool shed returned everything we need for installing the repository.
+ try:
+ has_repository_dependencies = repository_revision_dict[ 'has_repository_dependencies' ]
+ except:
+ raise HTTPBadRequest( detail="Missing required parameter 'has_repository_dependencies'." )
+ try:
+ includes_tools = repository_revision_dict[ 'includes_tools' ]
+ except:
+ raise HTTPBadRequest( detail="Missing required parameter 'includes_tools'." )
+ try:
+ includes_tool_dependencies = repository_revision_dict[ 'includes_tool_dependencies' ]
+ except:
+ raise HTTPBadRequest( detail="Missing required parameter 'includes_tool_dependencies'." )
+ try:
+ includes_tools_for_display_in_tool_panel = repository_revision_dict[ 'includes_tools_for_display_in_tool_panel' ]
+ except:
+ raise HTTPBadRequest( detail="Missing required parameter 'includes_tools_for_display_in_tool_panel'." )
+ # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain the repository information.
+ install_repository_dependencies = payload.get( 'install_repository_dependencies', False )
+ install_tool_dependencies = payload.get( 'install_tool_dependencies', False )
+ new_tool_panel_section = payload.get( 'new_tool_panel_section_label', '' )
+ shed_tool_conf = payload.get( 'shed_tool_conf', None )
+ if shed_tool_conf:
+ # Get the tool_path setting.
+ index, shed_conf_dict = suc.get_shed_tool_conf_dict( trans.app, shed_tool_conf )
+ tool_path = shed_config_dict[ 'tool_path' ]
+ else:
+ # Pick a semi-random shed-related tool panel configuration file and get the tool_path setting.
+ for shed_config_dict in trans.app.toolbox.shed_tool_confs:
+ # Don't use migrated_tools_conf.xml.
+ if shed_config_dict[ 'config_filename' ] != trans.app.config.migrated_tools_config:
+ break
+ shed_tool_conf = shed_config_dict[ 'config_filename' ]
+ tool_path = shed_config_dict[ 'tool_path' ]
+ if not shed_tool_conf:
+ raise HTTPBadRequest( detail="Missing required parameter 'shed_tool_conf'." )
+ tool_panel_section_id = payload.get( 'tool_panel_section_id', '' )
+ if tool_panel_section_id not in [ None, '' ]:
+ tool_panel_section = trans.app.toolbox.tool_panel[ tool_panel_section_id ]
+ else:
+ tool_panel_section = ''
+ # Build the dictionary of information necessary for creating tool_shed_repository database records for each repository being installed.
+ installation_dict = dict( install_repository_dependencies=install_repository_dependencies,
+ new_tool_panel_section=new_tool_panel_section,
+ no_changes_checked=False,
+ reinstalling=False,
+ repo_info_dicts=repo_info_dicts,
+ tool_panel_section=tool_panel_section,
+ tool_path=tool_path,
+ tool_shed_url=tool_shed_url )
+ # Create the tool_shed_repository database records and gather additional information for repository installation.
+ created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
+ repository_util.handle_tool_shed_repositories( trans, installation_dict, using_api=True )
+ if message and len( repo_info_dicts ) == 1:
+ # We're attempting to install a single repository that has already been installed into this Galaxy instance.
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ if created_or_updated_tool_shed_repositories:
+ # Build the dictionary of information necessary for installing the repositories.
+ installation_dict = dict( created_or_updated_tool_shed_repositories=created_or_updated_tool_shed_repositories,
+ filtered_repo_info_dicts=filtered_repo_info_dicts,
+ has_repository_dependencies=has_repository_dependencies,
+ includes_tool_dependencies=includes_tool_dependencies,
+ includes_tools=includes_tools,
+ includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+ install_repository_dependencies=install_repository_dependencies,
+ install_tool_dependencies=install_tool_dependencies,
+ message='',
+ new_tool_panel_section=new_tool_panel_section,
+ shed_tool_conf=shed_tool_conf,
+ status='done',
+ tool_panel_section=tool_panel_section,
+ tool_panel_section_keys=tool_panel_section_keys,
+ tool_path=tool_path,
+ tool_shed_url=tool_shed_url )
+ # Prepare the repositories for installation. Even though this method receives a single combination of tool_shed_url, name, owner and
+ # changeset_revision, there may be multiple repositories for installation at this point because repository dependencies may have added
+ # additional repositories for installation along with the single specified repository.
+ encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = repository_util.initiate_repository_installation( trans, installation_dict )
+ # Install the repositories, keeping track of each one for later display.
+ for index, tool_shed_repository in enumerate( tool_shed_repositories ):
+ repo_info_dict = repo_info_dicts[ index ]
+ tool_panel_section_key = tool_panel_section_keys[ index ]
+ repository_util.install_tool_shed_repository( trans,
+ tool_shed_repository,
+ repo_info_dict,
+ tool_panel_section_key,
+ shed_tool_conf,
+ tool_path,
+ install_tool_dependencies,
+ reinstalling=False )
+ tool_shed_repository_dict = tool_shed_repository.as_dict( value_mapper=default_tool_shed_repository_value_mapper( trans, tool_shed_repository ) )
+ tool_shed_repository_dict[ 'url' ] = web.url_for( controller='tool_shed_repositories',
+ action='show',
+ id=trans.security.encode_id( tool_shed_repository.id ) )
+ installed_tool_shed_repositories.append( tool_shed_repository_dict )
+ else:
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ # Display the list of installed repositories.
+ return installed_tool_shed_repositories
+
+ @web.expose_api
+ def install_repository_revisions( self, trans, payload, **kwd ):
+ """
+ POST /api/tool_shed_repositories/install_repository_revisions
+ Install one or more specified repository revisions from one or more specified tool sheds into Galaxy. The received parameters
+ must be ordered lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.
+
+ It's questionable whether this method is needed as the above method for installing a single repository can probably cover all
+ desired scenarios. We'll keep this one around just in case...
+
+ :param tool_shed_urls: the base URLs of the Tool Sheds from which to install a specified Repository
+ :param names: the names of the Repositories to be installed
+ :param owners: the owners of the Repositories to be installed
+ :param changset_revisions: the changset_revisions of each RepositoryMetadata object associated with each Repository to be installed
+ :param key: the current Galaxy admin user's API key
+ :param new_tool_panel_section_label: optional label of a new section to be added to the Galaxy tool panel in which to load
+ tools contained in the Repository. Either this parameter must be an empty string or
+ the tool_panel_section_id parameter must be an empty string, as both cannot be used.
+ :param tool_panel_section_id: optional id of the Galaxy tool panel section in which to load tools contained in the Repository.
+ If not set, tools will be loaded outside of any sections in the tool panel. Either this
+ parameter must be an empty string or the tool_panel_section_id parameter must be an empty string,
+ as both cannot be used.
+ """
+ if not suc.have_shed_tool_conf_for_install( trans ):
+ # This Galaxy instance is not configured with a shed-related tool panel configuration file.
+ message = get_message_for_no_shed_tool_config()
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ if not trans.user_is_admin():
+ raise HTTPForbidden( detail='You are not authorized to install a tool shed repository into this Galaxy instance.' )
+ # Get the information about all of the repositories to be installed.
+ tool_shed_urls = util.listify( payload.get( 'tool_shed_urls', '' ) )
+ names = util.listify( payload.get( 'names', '' ) )
+ owners = util.listify( payload.get( 'owners', '' ) )
+ changeset_revisions = util.listify( payload.get( 'changeset_revisions', '' ) )
+ num_specified_repositories = len( tool_shed_urls )
+ if len( names ) != num_specified_repositories or \
+ len( owners ) != num_specified_repositories or \
+ len( changeset_revisions ) != num_specified_repositories:
+ message = 'Error in tool_shed_repositories API in install_repository_revisions: the received parameters must be ordered '
+ message += 'lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.'
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return dict( status='error', error=message )
+ # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain information
+ # about each of the repositories being installed.
+ # TODO: we may want to enhance this method to allow for each of the following to be associated with each repository instead of
+ # forcing all repositories to use the same settings.
+ install_repository_dependencies = payload.get( 'install_repository_dependencies', False )
+ install_tool_dependencies = payload.get( 'install_tool_dependencies', False )
+ new_tool_panel_section = payload.get( 'new_tool_panel_section_label', '' )
+ shed_tool_conf = payload.get( 'shed_tool_conf', None )
+ tool_path = payload.get( 'tool_path', None )
+ tool_panel_section_id = payload.get( 'tool_panel_section_id', '' )
+ all_installed_tool_shed_repositories = []
+ for index, tool_shed_url in enumerate( tool_shed_urls ):
+ current_payload = {}
+ current_payload[ 'tool_shed_url' ] = tool_shed_url
+ current_payload[ 'name' ] = names[ index ]
+ current_payload[ 'owner' ] = owners[ index ]
+ current_payload[ 'changeset_revision' ] = changeset_revisions[ index ]
+ current_payload[ 'install_repository_dependencies' ] = install_repository_dependencies
+ current_payload[ 'install_tool_dependencies' ] = install_tool_dependencies
+ current_payload[ 'new_tool_panel_section' ] = new_tool_panel_section
+ current_payload[ 'shed_tool_conf' ] = shed_tool_conf
+ current_payload[ 'tool_path' ] = tool_path
+ current_payload[ 'tool_panel_section_id' ] = tool_panel_section_id
+ installed_tool_shed_repositories = self.install_repository_revision( trans, **current_payload )
+ if isinstance( installed_tool_shed_repositories, dict ):
+ # We encountered an error.
+ return installed_tool_shed_repositories
+ elif isinstance( installed_tool_shed_repositories, list ):
+ all_installed_tool_shed_repositories.extend( installed_tool_shed_repositories )
+ return all_installed_tool_shed_repositories
+
\ No newline at end of file
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/webapps/galaxy/buildapp.py
--- a/lib/galaxy/webapps/galaxy/buildapp.py
+++ b/lib/galaxy/webapps/galaxy/buildapp.py
@@ -104,12 +104,6 @@
name_prefix='group_',
path_prefix='/api/groups/:group_id',
parent_resources=dict( member_name='group', collection_name='groups' ) )
- webapp.api_mapper.resource( 'content',
- 'contents',
- controller='tool_shed_repository_contents',
- name_prefix='tool_shed_repository_',
- path_prefix='/api/tool_shed_repositories/:tool_shed_repository_id',
- parent_resources=dict( member_name='tool_shed_repository', collection_name='tool_shed_repositories' ) )
_add_item_tags_controller( webapp,
name_prefix="history_content_",
path_prefix='/api/histories/:history_id/contents/:history_content_id' )
@@ -142,7 +136,6 @@
webapp.api_mapper.resource( 'tool', 'tools', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' )
webapp.api_mapper.resource( 'genome', 'genomes', path_prefix='/api' )
- webapp.api_mapper.resource( 'tool_shed_repository', 'tool_shed_repositories', path_prefix='/api' )
webapp.api_mapper.resource( 'visualization', 'visualizations', path_prefix='/api' )
webapp.api_mapper.resource( 'workflow', 'workflows', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'history', 'histories', path_prefix='/api' )
@@ -155,7 +148,14 @@
webapp.api_mapper.connect("workflow_dict", '/api/workflows/{workflow_id}/download', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
# Preserve the following download route for now for dependent applications -- deprecate at some point
webapp.api_mapper.connect("workflow_dict", '/api/workflows/download/{workflow_id}', controller='workflows', action='workflow_dict', conditions=dict(method=['GET']))
-
+ # Galaxy API for tool shed features.
+ webapp.api_mapper.resource( 'tool_shed_repository',
+ 'tool_shed_repositories',
+ controller='tool_shed_repositories',
+ name_prefix='tool_shed_repository_',
+ path_prefix='/api',
+ new={ 'install_repository_revision' : 'POST' },
+ parent_resources=dict( member_name='tool_shed_repository', collection_name='tool_shed_repositories' ) )
# Connect logger from app
if app.trace_logger:
webapp.trace_logger = app.trace_logger
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -1,19 +1,35 @@
-import logging, os, shutil, tempfile, urllib2
+import logging
+import os
+import shutil
+import urllib2
from admin import AdminGalaxy
-from galaxy import web, util, eggs, tools
-from galaxy.web.form_builder import SelectField, CheckboxField
-from galaxy.web.framework.helpers import iff, grids
+from galaxy import eggs
+from galaxy import web
+from galaxy import util
+from galaxy.web.form_builder import CheckboxField
+from galaxy.web.framework.helpers import grids
+from galaxy.web.framework.helpers import iff
from galaxy.util import json
from galaxy.model.orm import or_
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import common_install_util, data_manager_util, datatype_util, encoding_util, metadata_util
-from tool_shed.util import readme_util, repository_dependency_util, tool_dependency_util, tool_util, workflow_util
+from tool_shed.util import common_install_util
+from tool_shed.util import data_manager_util
+from tool_shed.util import datatype_util
+from tool_shed.util import encoding_util
+from tool_shed.util import metadata_util
+from tool_shed.util import readme_util
+from tool_shed.util import repository_dependency_util
+from tool_shed.util import tool_dependency_util
+from tool_shed.util import tool_util
+from tool_shed.util import workflow_util
from tool_shed.galaxy_install import repository_util
import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids
import pkg_resources
eggs.require( 'mercurial' )
-from mercurial import hg, ui, commands
+from mercurial import commands
+from mercurial import hg
+from mercurial import ui
pkg_resources.require( 'elementtree' )
from elementtree import ElementTree
@@ -373,91 +389,6 @@
tool_version = tool_util.get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
- def handle_repository_contents( self, trans, tool_shed_repository, tool_path, repository_clone_url, relative_install_dir, tool_shed=None,
- tool_section=None, shed_tool_conf=None, reinstalling=False ):
- """
- Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
- when an admin is installing a new repository or reinstalling an uninstalled repository.
- """
- shed_config_dict = trans.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
- metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
- repository=tool_shed_repository,
- changeset_revision=tool_shed_repository.changeset_revision,
- repository_clone_url=repository_clone_url,
- shed_config_dict=shed_config_dict,
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=True )
- tool_shed_repository.metadata = metadata_dict
- trans.sa_session.add( tool_shed_repository )
- trans.sa_session.flush()
- if 'tool_dependencies' in metadata_dict and not reinstalling:
- tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
- if 'tools' in metadata_dict:
- tool_panel_dict = tool_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
- sample_files = metadata_dict.get( 'sample_files', [] )
- tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
- tool_util.copy_sample_files( trans.app, tool_index_sample_files, tool_path=tool_path )
- sample_files_copied = [ str( s ) for s in tool_index_sample_files ]
- repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
- if repository_tools_tups:
- # Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = tool_util.handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
- # Handle missing index files for tool parameters that are dynamically generated select lists.
- repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( trans.app,
- tool_path,
- sample_files,
- repository_tools_tups,
- sample_files_copied )
- # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- tool_util.copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
- tool_util.add_to_tool_panel( app=trans.app,
- repository_name=tool_shed_repository.name,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.installed_changeset_revision,
- repository_tools_tups=repository_tools_tups,
- owner=tool_shed_repository.owner,
- shed_tool_conf=shed_tool_conf,
- tool_panel_dict=tool_panel_dict,
- new_install=True )
- if 'data_manager' in metadata_dict:
- new_data_managers = data_manager_util.install_data_managers( trans.app,
- trans.app.config.shed_data_manager_config_file,
- metadata_dict,
- shed_config_dict,
- relative_install_dir,
- tool_shed_repository,
- repository_tools_tups )
- if 'datatypes' in metadata_dict:
- tool_shed_repository.status = trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
- if not tool_shed_repository.includes_datatypes:
- tool_shed_repository.includes_datatypes = True
- trans.sa_session.add( tool_shed_repository )
- trans.sa_session.flush()
- files_dir = relative_install_dir
- if shed_config_dict.get( 'tool_path' ):
- files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
- datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir )
- # Load data types required by tools.
- converter_path, display_path = datatype_util.alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
- if converter_path or display_path:
- # Create a dictionary of tool shed repository related information.
- repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
- name=tool_shed_repository.name,
- owner=tool_shed_repository.owner,
- installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
- tool_dicts=metadata_dict.get( 'tools', [] ),
- converter_path=converter_path,
- display_path=display_path )
- if converter_path:
- # Load proprietary datatype converters
- trans.app.datatypes_registry.load_datatype_converters( trans.app.toolbox, installed_repository_dict=repository_dict )
- if display_path:
- # Load proprietary datatype display applications
- trans.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
-
@web.expose
@web.require_admin
def import_workflow( self, trans, workflow_name, repository_id, **kwd ):
@@ -605,96 +536,21 @@
"""Install specified tool shed repositories."""
shed_tool_conf = kwd.get( 'shed_tool_conf', '' )
tool_path = kwd[ 'tool_path' ]
- includes_tool_dependencies = util.string_as_bool( kwd[ 'includes_tool_dependencies' ] )
install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
- # There must be a one-to-one mapping between items in the 3 lists:tool_shed_repositories, tool_panel_section_keys, repo_info_dicts.
+ # There must be a one-to-one mapping between items in the 3 lists: tool_shed_repositories, tool_panel_section_keys, repo_info_dicts.
tool_panel_section_keys = util.listify( kwd[ 'tool_panel_section_keys' ] )
repo_info_dicts = util.listify( kwd[ 'repo_info_dicts' ] )
for index, tool_shed_repository in enumerate( tool_shed_repositories ):
repo_info_dict = repo_info_dicts[ index ]
tool_panel_section_key = tool_panel_section_keys[ index ]
- if tool_panel_section_key:
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- tool_section = None
- if isinstance( repo_info_dict, basestring ):
- repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
- # Clone each repository to the configured location.
- suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
- repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
- relative_clone_dir = suc.generate_tool_shed_repository_install_dir( repository_clone_url, tool_shed_repository.installed_changeset_revision )
- clone_dir = os.path.join( tool_path, relative_clone_dir )
- relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
- install_dir = os.path.join( tool_path, relative_install_dir )
- cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
- if cloned_ok:
- if reinstalling:
- # Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
- changeset_revision_dict = repository_util.get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
- current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None )
- current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None )
- if current_ctx_rev != ctx_rev:
- repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( install_dir ) )
- repository_util.pull_repository( repo, repository_clone_url, current_changeset_revision )
- suc.update_repository( repo, ctx_rev=current_ctx_rev )
- self.handle_repository_contents( trans,
- tool_shed_repository=tool_shed_repository,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- tool_shed=tool_shed_repository.tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- reinstalling=reinstalling )
- trans.sa_session.refresh( tool_shed_repository )
- metadata = tool_shed_repository.metadata
- if 'tools' in metadata:
- # Get the tool_versions from the tool shed for each tool in the installed change set.
- suc.update_tool_shed_repository_status( trans.app,
- tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
- tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed )
- url = suc.url_join( tool_shed_url,
- '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
- ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
- response = urllib2.urlopen( url )
- text = response.read()
- response.close()
- if text:
- tool_version_dicts = json.from_json_string( text )
- tool_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
- else:
- message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
- message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
- message += "from the installed repository's <b>Repository Actions</b> menu. "
- status = 'error'
- if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
- work_dir = tempfile.mkdtemp()
- # Install tool dependencies.
- suc.update_tool_shed_repository_status( trans.app,
- tool_shed_repository,
- trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
- # Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
- installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
- tool_shed_repository=tool_shed_repository,
- tool_dependencies_config=tool_dependencies_config,
- tool_dependencies=tool_shed_repository.tool_dependencies )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
- else:
- # An error occurred while cloning the repository, so reset everything necessary to enable another attempt.
- self.set_repository_attributes( trans,
- tool_shed_repository,
- status=trans.model.ToolShedRepository.installation_status.ERROR,
- error_message=error_message,
- deleted=False,
- uninstalled=False,
- remove_from_disk=True )
+ repository_util.install_tool_shed_repository( trans,
+ tool_shed_repository,
+ repo_info_dict,
+ tool_panel_section_key,
+ shed_tool_conf,
+ tool_path,
+ install_tool_dependencies,
+ reinstalling=reinstalling )
tsr_ids_for_monitoring = [ trans.security.encode_id( tsr.id ) for tsr in tool_shed_repositories ]
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='monitor_repository_installation',
@@ -929,7 +785,7 @@
@web.expose
@web.require_admin
def prepare_for_install( self, trans, **kwd ):
- if not have_shed_tool_conf_for_install( trans ):
+ if not suc.have_shed_tool_conf_for_install( trans ):
message = 'The <b>tool_config_file</b> setting in <b>universe_wsgi.ini</b> must include at least one shed tool configuration file name with a '
message += '<b><toolbox></b> tag that includes a <b>tool_path</b> attribute value which is a directory relative to the Galaxy installation '
message += 'directory in order to automatically install tools from a Galaxy tool shed (e.g., the file name <b>shed_tool_conf.xml</b> whose '
@@ -956,7 +812,7 @@
install_tool_dependencies = kwd.get( 'install_tool_dependencies', '' )
encoded_repo_info_dicts = util.listify( kwd.get( 'encoded_repo_info_dicts', None ) )
if not encoded_repo_info_dicts:
- # The request originated in the tool shed.
+ # The request originated in the tool shed via a tool search.
repository_ids = kwd.get( 'repository_ids', None )
changeset_revisions = kwd.get( 'changeset_revisions', None )
# Get the information necessary to install each repository.
@@ -981,78 +837,40 @@
else:
install_tool_dependencies = False
tool_path = suc.get_tool_path_by_shed_tool_conf_filename( trans, shed_tool_conf )
+ installation_dict = dict( install_repository_dependencies=install_repository_dependencies,
+ new_tool_panel_section=new_tool_panel_section,
+ no_changes_checked=False,
+ reinstalling=False,
+ repo_info_dicts=repo_info_dicts,
+ tool_panel_section=tool_panel_section,
+ tool_path=tool_path,
+ tool_shed_url=tool_shed_url )
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
- repository_dependency_util.create_repository_dependency_objects( trans,
- tool_path,
- tool_shed_url,
- repo_info_dicts,
- reinstalling=False,
- install_repository_dependencies=install_repository_dependencies,
- no_changes_checked=False,
- tool_panel_section=tool_panel_section,
- new_tool_panel_section=new_tool_panel_section )
+ repository_util.handle_tool_shed_repositories( trans, installation_dict, using_api=False )
if message and len( repo_info_dicts ) == 1:
- installed_tool_shed_repository = created_or_updated_tool_shed_repositories[ 0 ]
- message+= 'Click <a href="%s">here</a> to manage the repository. ' % \
- ( web.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( installed_tool_shed_repository.id ) ) )
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='browse_repositories',
message=message,
status='error' ) )
+
if created_or_updated_tool_shed_repositories:
- # Handle contained tools.
- if includes_tools_for_display_in_tool_panel and ( new_tool_panel_section or tool_panel_section ):
- if new_tool_panel_section:
- section_id = new_tool_panel_section.lower().replace( ' ', '_' )
- tool_panel_section_key = 'section_%s' % str( section_id )
- if tool_panel_section_key in trans.app.toolbox.tool_panel:
- # Appending a tool to an existing section in trans.app.toolbox.tool_panel
- log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- # Appending a new section to trans.app.toolbox.tool_panel
- log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = Element( 'section' )
- elem.attrib[ 'name' ] = new_tool_panel_section
- elem.attrib[ 'id' ] = section_id
- elem.attrib[ 'version' ] = ''
- tool_section = tools.ToolSection( elem )
- trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
- else:
- tool_panel_section_key = 'section_%s' % tool_panel_section
- tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
- else:
- tool_panel_section_key = None
- tool_section = None
- encoded_repository_ids = [ trans.security.encode_id( tsr.id ) for tsr in created_or_updated_tool_shed_repositories ]
- # Create a one-to-one mapping of tool shed repository id and tool panel section key. All tools contained in the repositories
- # being installed will be loaded into the same section in the tool panel.
- for tsr in created_or_updated_tool_shed_repositories:
- tool_panel_section_keys.append( tool_panel_section_key )
- new_kwd = dict( includes_tools=includes_tools,
- includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
- has_repository_dependencies=has_repository_dependencies,
- install_repository_dependencies=install_repository_dependencies,
- includes_tool_dependencies=includes_tool_dependencies,
- install_tool_dependencies=install_tool_dependencies,
- message=message,
- repo_info_dicts=filtered_repo_info_dicts,
- shed_tool_conf=shed_tool_conf,
- status=status,
- tool_path=tool_path,
- tool_panel_section_keys=tool_panel_section_keys,
- tool_shed_repository_ids=encoded_repository_ids,
- tool_shed_url=tool_shed_url )
- encoded_kwd = encoding_util.tool_shed_encode( new_kwd )
- tsr_ids = [ r.id for r in created_or_updated_tool_shed_repositories ]
- tool_shed_repositories = []
- for tsr_id in tsr_ids:
- tsr = trans.sa_session.query( trans.model.ToolShedRepository ).get( tsr_id )
- tool_shed_repositories.append( tsr )
- clause_list = []
- for tsr_id in tsr_ids:
- clause_list.append( trans.model.ToolShedRepository.table.c.id == tsr_id )
- query = trans.sa_session.query( trans.model.ToolShedRepository ).filter( or_( *clause_list ) )
+ installation_dict = dict( created_or_updated_tool_shed_repositories=created_or_updated_tool_shed_repositories,
+ filtered_repo_info_dicts=filtered_repo_info_dicts,
+ has_repository_dependencies=has_repository_dependencies,
+ includes_tool_dependencies=includes_tool_dependencies,
+ includes_tools=includes_tools,
+ includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+ install_repository_dependencies=install_repository_dependencies,
+ install_tool_dependencies=install_tool_dependencies,
+ message=message,
+ new_tool_panel_section=new_tool_panel_section,
+ shed_tool_conf=shed_tool_conf,
+ status=status,
+ tool_panel_section=tool_panel_section,
+ tool_panel_section_keys=tool_panel_section_keys,
+ tool_path=tool_path,
+ tool_shed_url=tool_shed_url )
+ encoded_kwd, query, tool_shed_repositories, encoded_repository_ids = repository_util.initiate_repository_installation( trans, installation_dict )
return trans.fill_template( 'admin/tool_shed_repository/initiate_repository_installation.mako',
encoded_kwd=encoded_kwd,
query=query,
@@ -1065,9 +883,9 @@
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='manage_repositories',
**kwd ) )
- shed_tool_conf_select_field = build_shed_tool_conf_select_field( trans )
+ shed_tool_conf_select_field = tool_util.build_shed_tool_conf_select_field( trans )
tool_path = suc.get_tool_path_by_shed_tool_conf_filename( trans, shed_tool_conf )
- tool_panel_section_select_field = build_tool_panel_section_select_field( trans )
+ tool_panel_section_select_field = tool_util.build_tool_panel_section_select_field( trans )
if len( repo_info_dicts ) == 1:
# If we're installing a single repository, see if it contains a readme or dependencies that we can display.
repo_info_dict = repo_info_dicts[ 0 ]
@@ -1401,7 +1219,7 @@
original_section_name = ''
else:
original_section_name = ''
- tool_panel_section_select_field = build_tool_panel_section_select_field( trans )
+ tool_panel_section_select_field = tool_util.build_tool_panel_section_select_field( trans )
no_changes_check_box = CheckboxField( 'no_changes', checked=True )
if original_section_name:
message += "The tools contained in your <b>%s</b> repository were last loaded into the tool panel section <b>%s</b>. " \
@@ -1417,7 +1235,7 @@
no_changes_check_box = None
original_section_name = ''
tool_panel_section_select_field = None
- shed_tool_conf_select_field = build_shed_tool_conf_select_field( trans )
+ shed_tool_conf_select_field = tool_util.build_shed_tool_conf_select_field( trans )
containers_dict = repository_util.populate_containers_dict_for_new_install( trans=trans,
tool_shed_url=tool_shed_url,
tool_path=tool_path,
@@ -1521,13 +1339,13 @@
"""An error occurred while cloning the repository, so reset everything necessary to enable another attempt."""
repository = suc.get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
if kwd.get( 'reset_repository', False ):
- self.set_repository_attributes( trans,
- repository,
- status=trans.model.ToolShedRepository.installation_status.NEW,
- error_message=None,
- deleted=False,
- uninstalled=False,
- remove_from_disk=True )
+ suc.set_repository_attributes( trans,
+ repository,
+ status=trans.model.ToolShedRepository.installation_status.NEW,
+ error_message=None,
+ deleted=False,
+ uninstalled=False,
+ remove_from_disk=True )
new_kwd = {}
new_kwd[ 'message' ] = "You can now attempt to install the repository named <b>%s</b> again." % repository.name
new_kwd[ 'status' ] = "done"
@@ -1538,20 +1356,6 @@
action='manage_repository',
**kwd ) )
- def set_repository_attributes( self, trans, repository, status, error_message, deleted, uninstalled, remove_from_disk=False ):
- if remove_from_disk:
- relative_install_dir = repository.repo_path( trans.app )
- if relative_install_dir:
- clone_dir = os.path.abspath( relative_install_dir )
- shutil.rmtree( clone_dir )
- log.debug( "Removed repository installation directory: %s" % str( clone_dir ) )
- repository.error_message = error_message
- repository.status = status
- repository.deleted = deleted
- repository.uninstalled = uninstalled
- trans.sa_session.add( repository )
- trans.sa_session.flush()
-
@web.expose
@web.require_admin
def set_tool_versions( self, trans, **kwd ):
@@ -1803,46 +1607,3 @@
metadata=metadata,
message=message,
status=status )
-
-## ---- Utility methods -------------------------------------------------------
-
-def build_shed_tool_conf_select_field( trans ):
- """Build a SelectField whose options are the keys in trans.app.toolbox.shed_tool_confs."""
- options = []
- for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
- shed_tool_conf_filename = shed_tool_conf_dict[ 'config_filename' ]
- if shed_tool_conf_filename != trans.app.config.migrated_tools_config:
- if shed_tool_conf_filename.startswith( './' ):
- option_label = shed_tool_conf_filename.replace( './', '', 1 )
- else:
- option_label = shed_tool_conf_filename
- options.append( ( option_label, shed_tool_conf_filename ) )
- select_field = SelectField( name='shed_tool_conf' )
- for option_tup in options:
- select_field.add_option( option_tup[0], option_tup[1] )
- return select_field
-
-def build_tool_panel_section_select_field( trans ):
- """Build a SelectField whose options are the sections of the current in-memory toolbox."""
- options = []
- for k, v in trans.app.toolbox.tool_panel.items():
- if isinstance( v, tools.ToolSection ):
- options.append( ( v.name, v.id ) )
- select_field = SelectField( name='tool_panel_section', display='radio' )
- for option_tup in options:
- select_field.add_option( option_tup[0], option_tup[1] )
- return select_field
-
-def can_select_tool_panel_section():
- pass
-
-def have_shed_tool_conf_for_install( trans ):
- if not trans.app.toolbox.shed_tool_confs:
- return False
- migrated_tools_conf_path, migrated_tools_conf_name = os.path.split( trans.app.config.migrated_tools_config )
- for shed_tool_conf_dict in trans.app.toolbox.shed_tool_confs:
- shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
- shed_tool_conf_path, shed_tool_conf_name = os.path.split( shed_tool_conf )
- if shed_tool_conf_name != migrated_tools_conf_name:
- return True
- return False
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/webapps/galaxy/controllers/biostar.py
--- a/lib/galaxy/webapps/galaxy/controllers/biostar.py
+++ b/lib/galaxy/webapps/galaxy/controllers/biostar.py
@@ -8,26 +8,52 @@
from galaxy.util import json
import hmac
+# Slugifying from Armin Ronacher (http://flask.pocoo.org/snippets/5/)
+
+import re
+from unicodedata import normalize
+
+_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?(a)\[\\\]^_`{|},.]+')
+
+
+def slugify(text, delim=u'-'):
+ """Generates an slightly worse ASCII-only slug."""
+ result = []
+ for word in _punct_re.split(text.lower()):
+ word = normalize('NFKD', word).encode('ascii', 'ignore')
+ if word:
+ result.append(word)
+ return unicode(delim.join(result))
+
+
# Biostar requires all keys to be present, so we start with a template
DEFAULT_PAYLOAD = {
- 'email': "",
- 'title': "Question about Galaxy",
+ 'email': "",
+ 'title': "Question about Galaxy",
'tags': 'galaxy',
- 'tool_name': '',
- 'tool_version': '',
+ 'tool_name': '',
+ 'tool_version': '',
'tool_id': ''
}
+
def encode_data( key, data ):
"""
Encode data to send a question to Biostar
"""
- text = json.dumps(data)
+ text = json.to_json_string(data)
text = base64.urlsafe_b64encode(text)
digest = hmac.new(key, text).hexdigest()
return text, digest
+def tag_for_tool( tool ):
+ """
+ Generate a reasonavle biostar tag for a tool.
+ """
+ return slugify( unicode( tool.name ) )
+
+
class BiostarController( BaseUIController ):
"""
Provides integration with Biostar through external authentication, see: http://liondb.com/help/x/
@@ -81,6 +107,10 @@
if not tool:
return error( "No tool found matching '%s'" % tool_id )
# Tool specific information for payload
- payload = { 'title': "Question about Galaxy tool '%s'" % tool.name, 'tool_name': tool.name, 'tool_version': tool.version, 'tool_id': tool.id }
+ payload = { 'title': "Question about Galaxy tool '%s'" % tool.name,
+ 'tool_name': tool.name,
+ 'tool_version': tool.version,
+ 'tool_id': tool.id,
+ 'tags': 'galaxy ' + tag_for_tool( tool ) }
# Pass on to regular question method
return self.biostar_question_redirect( trans, payload )
\ No newline at end of file
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -1,20 +1,42 @@
import logging
+from galaxy.web.framework.helpers import time_ago
+from galaxy import web
+from galaxy import util
+from galaxy.web.base.controller import BaseAPIController
import tool_shed.util.shed_util_common as suc
-from galaxy import web, util
-from galaxy.web.base.controller import BaseAPIController
+from tool_shed.galaxy_install import repository_util
+
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( 'mercurial' )
+from mercurial import hg, ui, commands
log = logging.getLogger( __name__ )
+def default_repository_value_mapper( trans, repository ):
+ value_mapper={ 'id' : trans.security.encode_id( repository.id ),
+ 'user_id' : trans.security.encode_id( repository.user_id ) }
+ return value_mapper
+
+def default_repository_metadata_value_mapper( trans, repository_metadata ):
+ value_mapper = { 'id' : trans.security.encode_id( repository_metadata.id ),
+ 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ) }
+ if repository_metadata.time_last_tested:
+ value_mapper[ 'time_last_tested' ] = time_ago( repository_metadata.time_last_tested )
+ return value_mapper
+
class RepositoriesController( BaseAPIController ):
"""RESTful controller for interactions with repositories in the Tool Shed."""
- @web.expose_api
+ @web.expose_api_anonymous
def index( self, trans, deleted=False, **kwd ):
"""
GET /api/repositories
Displays a collection (list) of repositories.
"""
+ # Example URL: http://localhost:9009/api/repositories
repository_dicts = []
deleted = util.string_as_bool( deleted )
try:
@@ -23,12 +45,10 @@
.order_by( trans.app.model.Repository.table.c.name ) \
.all()
for repository in query:
- value_mapper={ 'id' : trans.security.encode_id( repository.id ),
- 'user_id' : trans.security.encode_id( repository.user_id ) }
- repository_dict = repository.get_api_value( view='collection', value_mapper=value_mapper )
- repository_dict[ 'url' ] = web.url_for( controller='repository_contents',
- action='index',
- repository_id=trans.security.encode_id( repository.id ) )
+ repository_dict = repository.get_api_value( view='collection', value_mapper=default_repository_value_mapper( trans, repository ) )
+ repository_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=trans.security.encode_id( repository.id ) )
repository_dicts.append( repository_dict )
return repository_dicts
except Exception, e:
@@ -37,25 +57,117 @@
trans.response.status = 500
return message
- @web.expose_api
+ @web.expose_api_anonymous
def show( self, trans, id, **kwd ):
"""
GET /api/repositories/{encoded_repository_id}
- Displays information about a repository in the Tool Shed.
+ Returns information about a repository in the Tool Shed.
- :param id: the encoded id of the `Repository` object
+ :param id: the encoded id of the Repository object
"""
+ # Example URL: http://localhost:9009/api/repositories/f9cad7b01a472135
try:
repository = suc.get_repository_in_tool_shed( trans, id )
- value_mapper={ 'id' : trans.security.encode_id( repository.id ),
- 'user_id' : trans.security.encode_id( repository.user_id ) }
- repository_dict = repository.get_api_value( view='element', value_mapper=value_mapper )
- repository_dict[ 'url' ] = web.url_for( controller='repository_contents',
- action='index',
- repository_id=trans.security.encode_id( repository.id ) )
+ repository_dict = repository.get_api_value( view='element', value_mapper=default_repository_value_mapper( trans, repository ) )
+ repository_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=trans.security.encode_id( repository.id ) )
return repository_dict
except Exception, e:
message = "Error in the Tool Shed repositories API in show: %s" % str( e )
log.error( message, exc_info=True )
trans.response.status = 500
return message
+
+ @web.expose_api_anonymous
+ def get_repository_revision_install_info( self, trans, name, owner, changeset_revision, **kwd ):
+ """
+ GET /api/repository/get_repository_revision_install_info
+
+ :param name: the name of the Repository
+ :param owner: the owner of the Repository
+ :param changset_revision: the changset_revision of the RepositoryMetadata object associated with the Repository
+
+ Returns a list of the following dictionaries::
+ - a dictionary defining the Repository. For example:
+ {
+ "deleted": false,
+ "deprecated": false,
+ "description": "add_column hello",
+ "id": "f9cad7b01a472135",
+ "long_description": "add_column hello",
+ "name": "add_column",
+ "owner": "test",
+ "private": false,
+ "times_downloaded": 6,
+ "url": "/api/repositories/f9cad7b01a472135",
+ "user_id": "f9cad7b01a472135"
+ }
+ - a dictionary defining the Repsoitory revision (RepositoryMetadata). For example:
+ {
+ "changeset_revision": "3a08cc21466f",
+ "downloadable": true,
+ "has_repository_dependencies": false,
+ "id": "f9cad7b01a472135",
+ "includes_datatypes": false,
+ "includes_tool_dependencies": false,
+ "includes_tools": true,
+ "includes_tools_for_display_in_tool_panel": true,
+ "includes_workflows": false,
+ "malicious": false,
+ "repository_id": "f9cad7b01a472135",
+ "url": "/api/repository_revisions/f9cad7b01a472135"
+ }
+ - a dictionary including the additional information required to install the repository. For example:
+ {
+ "add_column": [
+ "add_column hello",
+ "http://test@localhost:9009/repos/test/add_column",
+ "3a08cc21466f",
+ "1",
+ "test",
+ {},
+ {}
+ ]
+ }
+ """
+ # Example URL: http://localhost:9009/api/repositories/get_repository_revision_install_info…
+ try:
+ # Get the repository information.
+ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ encoded_repository_id = trans.security.encode_id( repository.id )
+ repository_dict = repository.get_api_value( view='element', value_mapper=default_repository_value_mapper( trans, repository ) )
+ repository_dict[ 'url' ] = web.url_for( controller='repositories',
+ action='show',
+ id=encoded_repository_id )
+ # Get the repository_metadata information.
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, changeset_revision )
+ if not repository_metadata:
+ # The changeset_revision column in the repository_metadata table has been updated with a new value value, so find the
+ # changeset_revision to which we need to update.
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ new_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, encoded_repository_id, new_changeset_revision )
+ changeset_revision = new_changeset_revision
+ if repository_metadata:
+ encoded_repository_metadata_id = trans.security.encode_id( repository_metadata.id )
+ repository_metadata_dict = repository_metadata.get_api_value( view='collection',
+ value_mapper=default_repository_metadata_value_mapper( trans, repository_metadata ) )
+ repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+ action='show',
+ id=encoded_repository_metadata_id )
+ # Get the repo_info_dict for installing the repository.
+ repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, has_repository_dependencies = \
+ repository_util.get_repo_info_dict( trans, encoded_repository_id, changeset_revision )
+ return repository_dict, repository_metadata_dict, repo_info_dict
+ else:
+ message = "Unable to locate repository_metadata record for repository id %d and changeset_revision %s" % ( repository.id, changeset_revision )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return repository_dict, {}, {}
+ except Exception, e:
+ message = "Error in the Tool Shed repositories API in get_repository_revision_install_info: %s" % str( e )
+ log.error( message, exc_info=True )
+ trans.response.status = 500
+ return message
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/webapps/tool_shed/api/repository_contents.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_contents.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import logging
-import tool_shed.util.shed_util_common as suc
-from galaxy import web
-from galaxy.web.base.controller import BaseAPIController
-
-log = logging.getLogger( __name__ )
-
-
-class RepositoryContentsController( BaseAPIController ):
-
- @web.expose_api
- def index( self, trans, **kwd ):
- """
- GET /api/repositories/{encoded_repository_id}
- Displays a collection (dictionary) of repository contents.
-
- :param repository_id: the encoded id of the `Repository` object
- """
- try:
- repository_id = kwd[ 'repository_id' ]
- repository = suc.get_repository_in_tool_shed( trans, repository_id )
- value_mapper={ 'id' : trans.security.encode_id( repository.id ),
- 'user_id' : trans.security.encode_id( repository.user_id ) }
- repository_dict = repository.as_dict( value_mapper )
- repository_dict[ 'url' ] = web.url_for( controller='repository_contents',
- action='index',
- repository_id=repository_id )
- return repository_dict
- except Exception, e:
- message = "Error in the Tool Shed repository_contents API in index: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/webapps/tool_shed/api/repository_revision_contents.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_revision_contents.py
+++ /dev/null
@@ -1,39 +0,0 @@
-import logging
-from galaxy import web
-from galaxy.web.framework.helpers import time_ago
-from tool_shed.util import metadata_util
-from galaxy.web.base.controller import BaseAPIController
-
-log = logging.getLogger( __name__ )
-
-def default_value_mapper( trans, repository_metadata ):
- value_mapper = { 'id' : trans.security.encode_id( repository_metadata.id ),
- 'repository_id' : trans.security.encode_id( repository_metadata.repository_id ) }
- if repository_metadata.time_last_tested:
- value_mapper[ 'time_last_tested' ] = time_ago( repository_metadata.time_last_tested )
- return value_mapper
-
-
-class RepositoryRevisionContentsController( BaseAPIController ):
-
- @web.expose_api
- def index( self, trans, **kwd ):
- """
- GET /api/repository_revisions/{encoded_repository_metadata_id}
- Displays a collection (dictionary) of repository_metadata contents.
-
- :param repository_metadata_id: the encoded id of the `RepositoryMetadata` object
- """
- try:
- repository_metadata_id = kwd.get( 'repository_metadata_id', None )
- repository_metadata = metadata_util.get_repository_metadata_by_id( trans, repository_metadata_id )
- repository_dict = repository_metadata.as_dict( value_mapper=default_value_mapper( trans, repository_metadata ) )
- repository_dict[ 'url' ] = web.url_for( controller='repository_revision_contents',
- action='index',
- repository_metadata_id=repository_metadata_id )
- return repository_dict
- except Exception, e:
- message = "Error in the Tool Shed repository_revision_contents API in index: %s" % str( e )
- log.error( message, exc_info=True )
- trans.response.status = 500
- return message
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/webapps/tool_shed/api/repository_revisions.py
--- a/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
+++ b/lib/galaxy/webapps/tool_shed/api/repository_revisions.py
@@ -1,7 +1,9 @@
-import datetime, logging
+import datetime
+import logging
from galaxy.web.framework.helpers import time_ago
from tool_shed.util import metadata_util
-from galaxy import web, util
+from galaxy import web
+from galaxy import util
from galaxy.model.orm import and_
from galaxy.web.base.controller import BaseAPIController
@@ -24,6 +26,7 @@
GET /api/repository_revisions
Displays a collection (list) of repository revisions.
"""
+ # Example URL: http://localhost:9009/api/repository_revisions
repository_metadata_dicts = []
# Build up an anded clause list of filters.
clause_list = []
@@ -55,9 +58,9 @@
for repository_metadata in query:
repository_metadata_dict = repository_metadata.get_api_value( view='collection',
value_mapper=default_value_mapper( trans, repository_metadata ) )
- repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revision_contents',
- action='index',
- repository_metadata_id=trans.security.encode_id( repository_metadata.id ) )
+ repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+ action='show',
+ id=trans.security.encode_id( repository_metadata.id ) )
repository_metadata_dicts.append( repository_metadata_dict )
return repository_metadata_dicts
except Exception, e:
@@ -74,12 +77,13 @@
:param id: the encoded id of the `RepositoryMetadata` object
"""
+ # Example URL: http://localhost:9009/api/repository_revisions/bb125606ff9ea620
try:
repository_metadata = metadata_util.get_repository_metadata_by_id( trans, id )
repository_metadata_dict = repository_metadata.as_dict( value_mapper=default_value_mapper( trans, repository_metadata ) )
- repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revision_contents',
- action='index',
- repository_metadata_id=trans.security.encode_id( repository_metadata.id ) )
+ repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+ action='show',
+ id=trans.security.encode_id( repository_metadata.id ) )
return repository_metadata_dict
except Exception, e:
message = "Error in the Tool Shed repository_revisions API in show: %s" % str( e )
@@ -114,7 +118,7 @@
trans.response.status = 500
return message
repository_metadata_dict = repository_metadata.as_dict( value_mapper=default_value_mapper( trans, repository_metadata ) )
- repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revision_contents',
- action='index',
- repository_metadata_id=trans.security.encode_id( repository_metadata.id ) )
+ repository_metadata_dict[ 'url' ] = web.url_for( controller='repository_revisions',
+ action='show',
+ id=trans.security.encode_id( repository_metadata.id ) )
return repository_metadata_dict
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/webapps/tool_shed/buildapp.py
--- a/lib/galaxy/webapps/tool_shed/buildapp.py
+++ b/lib/galaxy/webapps/tool_shed/buildapp.py
@@ -1,8 +1,11 @@
"""
Provides factory methods to assemble the Galaxy web application
"""
-
-import logging, atexit, os, os.path, sys, config
+import atexit
+import config
+import logging
+import os
+import sys
from inspect import isclass
@@ -69,22 +72,21 @@
webapp.add_route( '/:controller/:action', action='index' )
webapp.add_route( '/:action', controller='repository', action='index' )
webapp.add_route( '/repos/*path_info', controller='hg', action='handle_request', path_info='/' )
- # Add the web API.
+ # Add the web API. # A good resource for RESTful services - http://routes.readthedocs.org/en/latest/restful.html
webapp.add_api_controllers( 'galaxy.webapps.tool_shed.api', app )
- webapp.api_mapper.resource( 'content',
- 'contents',
- controller='repository_contents',
+ webapp.api_mapper.resource( 'repository',
+ 'repositories',
+ controller='repositories',
+ collection={ 'get_repository_revision_install_info' : 'GET' },
name_prefix='repository_',
- path_prefix='/api/repositories/:repository_id',
+ path_prefix='/api',
parent_resources=dict( member_name='repository', collection_name='repositories' ) )
- webapp.api_mapper.resource( 'content',
- 'contents',
- controller='repository_revision_contents',
+ webapp.api_mapper.resource( 'repository_revision',
+ 'repository_revisions',
+ controller='repository_revisions',
name_prefix='repository_revision_',
- path_prefix='/api/repository_revisions/:repository_metadata_id',
+ path_prefix='/api',
parent_resources=dict( member_name='repository_revision', collection_name='repository_revisions' ) )
- webapp.api_mapper.resource( 'repository', 'repositories', path_prefix='/api' )
- webapp.api_mapper.resource( 'repository_revision', 'repository_revisions', path_prefix='/api' )
webapp.finalize_config()
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -1,26 +1,47 @@
-import os, logging, re, tempfile, ConfigParser, string
-from time import gmtime, strftime
+import ConfigParser
+import logging
+import os
+import re
+import string
+import tempfile
+from time import gmtime
+from time import strftime
from datetime import date, datetime
-from galaxy import util, web
+from galaxy import util
+from galaxy import web
from galaxy.util.odict import odict
from galaxy.web.base.controller import BaseUIController
-from galaxy.web.form_builder import CheckboxField, build_select_field
+from galaxy.web.form_builder import CheckboxField
+from galaxy.web.form_builder import build_select_field
from galaxy.webapps.tool_shed import model
from galaxy.webapps.tool_shed.model import directory_hash_id
from galaxy.web.framework.helpers import grids
from galaxy.util import json
-from galaxy.model.orm import and_, or_
+from galaxy.model.orm import and_
+from galaxy.model.orm import or_
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import encoding_util, metadata_util, readme_util, repository_dependency_util, review_util, tool_dependency_util, tool_util, workflow_util
+from tool_shed.util import encoding_util
+from tool_shed.util import metadata_util
+from tool_shed.util import readme_util
+from tool_shed.util import repository_dependency_util
+from tool_shed.util import review_util
+from tool_shed.util import tool_dependency_util
+from tool_shed.util import tool_util
+from tool_shed.util import workflow_util
from tool_shed.galaxy_install import repository_util
-from galaxy.webapps.tool_shed.util import common_util, container_util
+from galaxy.webapps.tool_shed.util import common_util
+from galaxy.webapps.tool_shed.util import container_util
import galaxy.tools
import tool_shed.grids.repository_grids as repository_grids
import tool_shed.grids.util as grids_util
from galaxy import eggs
eggs.require('mercurial')
-from mercurial import hg, ui, patch, commands
+
+from mercurial import commands
+from mercurial import hg
+from mercurial import patch
+from mercurial import ui
log = logging.getLogger( __name__ )
@@ -1124,35 +1145,17 @@
includes_tool_dependencies = False
repo_info_dicts = []
for tup in zip( util.listify( repository_ids ), util.listify( changeset_revisions ) ):
- repository_id, changeset_revision = tup
- repository = suc.get_repository_in_tool_shed( trans, repository_id )
- repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
- metadata = repository_metadata.metadata
- if not includes_tools:
- if 'tools' in metadata:
- includes_tools = True
- if not includes_tools_for_display_in_tool_panel:
- includes_tools_for_display_in_tool_panel = repository_metadata.includes_tools_for_display_in_tool_panel
- if not has_repository_dependencies:
- if 'repository_dependencies' in metadata:
- has_repository_dependencies = True
- if not includes_tool_dependencies:
- if 'tool_dependencies' in metadata:
- includes_tool_dependencies = True
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( suc.get_configured_ui(), repo_dir )
- ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
- repo_info_dict = repository_util.create_repo_info_dict( trans=trans,
- repository_clone_url=repository_clone_url,
- changeset_revision=changeset_revision,
- ctx_rev=str( ctx.rev() ),
- repository_owner=repository.user.username,
- repository_name=repository.name,
- repository=repository,
- repository_metadata=repository_metadata,
- tool_dependencies=None,
- repository_dependencies=None )
+ repository_id, changeset_revision = tup
+ repo_info_dict, cur_includes_tools, cur_includes_tool_dependencies, cur_includes_tools_for_display_in_tool_panel, cur_has_repository_dependencies = \
+ repository_util.get_repo_info_dict( trans, repository_id, changeset_revision )
+ if cur_has_repository_dependencies and not has_repository_dependencies:
+ has_repository_dependencies = True
+ if cur_includes_tools and not includes_tools:
+ includes_tools = True
+ if cur_includes_tool_dependencies and not includes_tool_dependencies:
+ includes_tool_dependencies = True
+ if cur_includes_tools_for_display_in_tool_panel and not includes_tools_for_display_in_tool_panel:
+ includes_tools_for_display_in_tool_panel = True
repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
return dict( includes_tools=includes_tools,
includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/galaxy/webapps/tool_shed/model/__init__.py
--- a/lib/galaxy/webapps/tool_shed/model/__init__.py
+++ b/lib/galaxy/webapps/tool_shed/model/__init__.py
@@ -192,10 +192,10 @@
class RepositoryMetadata( object, APIItem ):
api_collection_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable', 'has_repository_dependencies', 'includes_datatypes',
- 'includes_tools', 'includes_tool_dependencies', 'includes_workflows' )
+ 'includes_tools', 'includes_tool_dependencies', 'includes_tools_for_display_in_tool_panel', 'includes_workflows' )
api_element_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable', 'tools_functionally_correct',
'do_not_test', 'time_last_tested', 'tool_test_errors', 'has_repository_dependencies', 'includes_datatypes', 'includes_tools',
- 'includes_tool_dependencies', 'includes_workflows' )
+ 'includes_tool_dependencies', 'includes_tools_for_display_in_tool_panel', 'includes_workflows' )
def __init__( self, id=None, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False, downloadable=False,
tools_functionally_correct=False, do_not_test=False, time_last_tested=None, tool_test_errors=None, has_repository_dependencies=False,
includes_datatypes=False, includes_tools=False, includes_tool_dependencies=False, includes_workflows=False ):
diff -r 75415966a26b5597c1ec46a10f6071238a5578be -r 2af272e9f2565cd9272c0aa5004a5726cfafc9bf lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -1,14 +1,35 @@
-import os, logging, threading, urllib2
-from galaxy.web import url_for
+import logging
+import os
+import shutil
+import tempfile
+import threading
+import urllib2
+from galaxy import tools
+from galaxy.util import json
+from galaxy import web
+from galaxy.model.orm import or_
from galaxy.webapps.tool_shed.util import container_util
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import encoding_util, repository_dependency_util, tool_dependency_util, tool_util
+from tool_shed.util import common_install_util
+from tool_shed.util import data_manager_util
+from tool_shed.util import datatype_util
+from tool_shed.util import encoding_util
+from tool_shed.util import repository_dependency_util
+from tool_shed.util import metadata_util
+from tool_shed.util import tool_dependency_util
+from tool_shed.util import tool_util
from galaxy import eggs
import pkg_resources
pkg_resources.require( 'mercurial' )
-from mercurial import hg, ui, commands
+from mercurial import commands
+from mercurial import hg
+from mercurial import ui
+
+pkg_resources.require( 'elementtree' )
+from elementtree import ElementTree
+from elementtree.ElementTree import Element
log = logging.getLogger( __name__ )
@@ -41,12 +62,12 @@
repository_dependency_util.get_repository_dependencies_for_changeset_revision( trans=trans,
repository=repository,
repository_metadata=repository_metadata,
- toolshed_base_url=str( url_for( '/', qualified=True ) ).rstrip( '/' ),
+ toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
key_rd_dicts_to_be_processed=None,
all_repository_dependencies=None,
handled_key_rd_dicts=None,
circular_repository_dependencies=None )
- tool_dependencies = metadata.get( 'tool_dependencies', None )
+ tool_dependencies = metadata.get( 'tool_dependencies', {} )
if tool_dependencies:
new_tool_dependencies = {}
for dependency_key, requirements_dict in tool_dependencies.items():
@@ -74,6 +95,39 @@
tool_dependencies )
return repo_info_dict
+def get_repo_info_dict( trans, repository_id, changeset_revision ):
+ repository = suc.get_repository_in_tool_shed( trans, repository_id )
+ repository_clone_url = suc.generate_clone_url_for_repository_in_tool_shed( trans, repository )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ metadata = repository_metadata.metadata
+ if 'tools' in metadata:
+ includes_tools = True
+ else:
+ includes_tools = False
+ includes_tools_for_display_in_tool_panel = repository_metadata.includes_tools_for_display_in_tool_panel
+ if 'repository_dependencies' in metadata:
+ has_repository_dependencies = True
+ else:
+ has_repository_dependencies = False
+ if 'tool_dependencies' in metadata:
+ includes_tool_dependencies = True
+ else:
+ includes_tool_dependencies = False
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+ repo_info_dict = create_repo_info_dict( trans=trans,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=changeset_revision,
+ ctx_rev=str( ctx.rev() ),
+ repository_owner=repository.user.username,
+ repository_name=repository.name,
+ repository=repository,
+ repository_metadata=repository_metadata,
+ tool_dependencies=None,
+ repository_dependencies=None )
+ return repo_info_dict, includes_tools, includes_tool_dependencies, includes_tools_for_display_in_tool_panel, has_repository_dependencies
+
def get_update_to_changeset_revision_and_ctx_rev( trans, repository ):
"""Return the changeset revision hash to which the repository can be updated."""
changeset_revision_dict = {}
@@ -117,6 +171,271 @@
changeset_revision_dict[ 'ctx_rev' ] = None
return changeset_revision_dict
+def handle_repository_contents( trans, tool_shed_repository, tool_path, repository_clone_url, relative_install_dir, tool_shed=None, tool_section=None, shed_tool_conf=None,
+ reinstalling=False ):
+ """
+ Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
+ when an administrator is installing a new repository or reinstalling an uninstalled repository.
+ """
+ shed_config_dict = trans.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf )
+ metadata_dict, invalid_file_tups = metadata_util.generate_metadata_for_changeset_revision( app=trans.app,
+ repository=tool_shed_repository,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict=shed_config_dict,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=True )
+ tool_shed_repository.metadata = metadata_dict
+ trans.sa_session.add( tool_shed_repository )
+ trans.sa_session.flush()
+ if 'tool_dependencies' in metadata_dict and not reinstalling:
+ tool_dependencies = tool_dependency_util.create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
+ if 'tools' in metadata_dict:
+ tool_panel_dict = tool_util.generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
+ sample_files = metadata_dict.get( 'sample_files', [] )
+ tool_index_sample_files = tool_util.get_tool_index_sample_files( sample_files )
+ tool_util.copy_sample_files( trans.app, tool_index_sample_files, tool_path=tool_path )
+ sample_files_copied = [ str( s ) for s in tool_index_sample_files ]
+ repository_tools_tups = suc.get_repository_tools_tups( trans.app, metadata_dict )
+ if repository_tools_tups:
+ # Handle missing data table entries for tool parameters that are dynamically generated select lists.
+ repository_tools_tups = tool_util.handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
+ # Handle missing index files for tool parameters that are dynamically generated select lists.
+ repository_tools_tups, sample_files_copied = tool_util.handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups, sample_files_copied )
+ # Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
+ tool_util.copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
+ tool_util.add_to_tool_panel( app=trans.app,
+ repository_name=tool_shed_repository.name,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
+ repository_tools_tups=repository_tools_tups,
+ owner=tool_shed_repository.owner,
+ shed_tool_conf=shed_tool_conf,
+ tool_panel_dict=tool_panel_dict,
+ new_install=True )
+ if 'data_manager' in metadata_dict:
+ new_data_managers = data_manager_util.install_data_managers( trans.app,
+ trans.app.config.shed_data_manager_config_file,
+ metadata_dict,
+ shed_config_dict,
+ relative_install_dir,
+ tool_shed_repository,
+ repository_tools_tups )
+ if 'datatypes' in metadata_dict:
+ tool_shed_repository.status = trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
+ if not tool_shed_repository.includes_datatypes:
+ tool_shed_repository.includes_datatypes = True
+ trans.sa_session.add( tool_shed_repository )
+ trans.sa_session.flush()
+ files_dir = relative_install_dir
+ if shed_config_dict.get( 'tool_path' ):
+ files_dir = os.path.join( shed_config_dict[ 'tool_path' ], files_dir )
+ datatypes_config = suc.get_config_from_disk( 'datatypes_conf.xml', files_dir )
+ # Load data types required by tools.
+ converter_path, display_path = datatype_util.alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, files_dir, override=False )
+ if converter_path or display_path:
+ # Create a dictionary of tool shed repository related information.
+ repository_dict = datatype_util.create_repository_dict_for_proprietary_datatypes( tool_shed=tool_shed,
+ name=tool_shed_repository.name,
+ owner=tool_shed_repository.owner,
+ installed_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dicts=metadata_dict.get( 'tools', [] ),
+ converter_path=converter_path,
+ display_path=display_path )
+ if converter_path:
+ # Load proprietary datatype converters
+ trans.app.datatypes_registry.load_datatype_converters( trans.app.toolbox, installed_repository_dict=repository_dict )
+ if display_path:
+ # Load proprietary datatype display applications
+ trans.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
+
+def handle_tool_shed_repositories( trans, installation_dict, using_api=False ):
+ # The following installation_dict entries are all required.
+ install_repository_dependencies = installation_dict[ 'install_repository_dependencies' ]
+ new_tool_panel_section = installation_dict[ 'new_tool_panel_section' ]
+ no_changes_checked = installation_dict[ 'no_changes_checked' ]
+ reinstalling = installation_dict[ 'reinstalling' ]
+ repo_info_dicts = installation_dict[ 'repo_info_dicts' ]
+ tool_panel_section = installation_dict[ 'tool_panel_section' ]
+ tool_path = installation_dict[ 'tool_path' ]
+ tool_shed_url = installation_dict[ 'tool_shed_url' ]
+ created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message = \
+ repository_dependency_util.create_repository_dependency_objects( trans=trans,
+ tool_path=tool_path,
+ tool_shed_url=tool_shed_url,
+ repo_info_dicts=repo_info_dicts,
+ reinstalling=reinstalling,
+ install_repository_dependencies=install_repository_dependencies,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
+ if message and len( repo_info_dicts ) == 1 and not using_api:
+ installed_tool_shed_repository = created_or_updated_tool_shed_repositories[ 0 ]
+ message += 'Click <a href="%s">here</a> to manage the repository. ' % \
+ ( web.url_for( controller='admin_toolshed', action='manage_repository', id=trans.security.encode_id( installed_tool_shed_repository.id ) ) )
+ return created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts, message
+
+def initiate_repository_installation( trans, installation_dict ):
+ # The following installation_dict entries are all required.
+ created_or_updated_tool_shed_repositories = installation_dict[ 'created_or_updated_tool_shed_repositories' ]
+ filtered_repo_info_dicts = installation_dict[ 'filtered_repo_info_dicts' ]
+ has_repository_dependencies = installation_dict[ 'has_repository_dependencies' ]
+ includes_tool_dependencies = installation_dict[ 'includes_tool_dependencies' ]
+ includes_tools = installation_dict[ 'includes_tools' ]
+ includes_tools_for_display_in_tool_panel = installation_dict[ 'includes_tools_for_display_in_tool_panel' ]
+ install_repository_dependencies = installation_dict[ 'install_repository_dependencies' ]
+ install_tool_dependencies = installation_dict[ 'install_tool_dependencies' ]
+ message = installation_dict[ 'message' ]
+ new_tool_panel_section = installation_dict[ 'new_tool_panel_section' ]
+ shed_tool_conf = installation_dict[ 'shed_tool_conf' ]
+ status = installation_dict[ 'status' ]
+ tool_panel_section = installation_dict[ 'tool_panel_section' ]
+ tool_panel_section_keys = installation_dict[ 'tool_panel_section_keys' ]
+ tool_path = installation_dict[ 'tool_path' ]
+ tool_shed_url = installation_dict[ 'tool_shed_url' ]
+ # Handle contained tools.
+ if includes_tools_for_display_in_tool_panel and ( new_tool_panel_section or tool_panel_section ):
+ if new_tool_panel_section:
+ section_id = new_tool_panel_section.lower().replace( ' ', '_' )
+ tool_panel_section_key = 'section_%s' % str( section_id )
+ if tool_panel_section_key in trans.app.toolbox.tool_panel:
+ # Appending a tool to an existing section in trans.app.toolbox.tool_panel
+ log.debug( "Appending to tool panel section: %s" % new_tool_panel_section )
+ tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
+ else:
+ # Appending a new section to trans.app.toolbox.tool_panel
+ log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
+ elem = Element( 'section' )
+ elem.attrib[ 'name' ] = new_tool_panel_section
+ elem.attrib[ 'id' ] = section_id
+ elem.attrib[ 'version' ] = ''
+ tool_section = tools.ToolSection( elem )
+ trans.app.toolbox.tool_panel[ tool_panel_section_key ] = tool_section
+ else:
+ tool_panel_section_key = 'section_%s' % tool_panel_section
+ tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
+ else:
+ tool_panel_section_key = None
+ tool_section = None
+ encoded_repository_ids = [ trans.security.encode_id( tsr.id ) for tsr in created_or_updated_tool_shed_repositories ]
+ # Create a one-to-one mapping of tool shed repository id and tool panel section key. All tools contained in the repositories being installed will be loaded
+ # into the same section in the tool panel.
+ for tsr in created_or_updated_tool_shed_repositories:
+ tool_panel_section_keys.append( tool_panel_section_key )
+ new_kwd = dict( includes_tools=includes_tools,
+ includes_tools_for_display_in_tool_panel=includes_tools_for_display_in_tool_panel,
+ has_repository_dependencies=has_repository_dependencies,
+ install_repository_dependencies=install_repository_dependencies,
+ includes_tool_dependencies=includes_tool_dependencies,
+ install_tool_dependencies=install_tool_dependencies,
+ message=message,
+ repo_info_dicts=filtered_repo_info_dicts,
+ shed_tool_conf=shed_tool_conf,
+ status=status,
+ tool_path=tool_path,
+ tool_panel_section_keys=tool_panel_section_keys,
+ tool_shed_repository_ids=encoded_repository_ids,
+ tool_shed_url=tool_shed_url )
+ encoded_kwd = encoding_util.tool_shed_encode( new_kwd )
+ tsr_ids = [ r.id for r in created_or_updated_tool_shed_repositories ]
+ tool_shed_repositories = []
+ for tsr_id in tsr_ids:
+ tsr = trans.sa_session.query( trans.model.ToolShedRepository ).get( tsr_id )
+ tool_shed_repositories.append( tsr )
+ clause_list = []
+ for tsr_id in tsr_ids:
+ clause_list.append( trans.model.ToolShedRepository.table.c.id == tsr_id )
+ query = trans.sa_session.query( trans.model.ToolShedRepository ).filter( or_( *clause_list ) )
+ return encoded_kwd, query, tool_shed_repositories, encoded_repository_ids
+
+def install_tool_shed_repository( trans, tool_shed_repository, repo_info_dict, tool_panel_section_key, shed_tool_conf, tool_path, install_tool_dependencies,
+ reinstalling=False ):
+ if tool_panel_section_key:
+ tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
+ else:
+ tool_section = None
+ if isinstance( repo_info_dict, basestring ):
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
+ # Clone each repository to the configured location.
+ suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
+ repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+ relative_clone_dir = suc.generate_tool_shed_repository_install_dir( repository_clone_url, tool_shed_repository.installed_changeset_revision )
+ clone_dir = os.path.join( tool_path, relative_clone_dir )
+ relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
+ install_dir = os.path.join( tool_path, relative_install_dir )
+ cloned_ok, error_message = suc.clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
+ if cloned_ok:
+ if reinstalling:
+ # Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
+ changeset_revision_dict = get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
+ current_changeset_revision = changeset_revision_dict.get( 'changeset_revision', None )
+ current_ctx_rev = changeset_revision_dict.get( 'ctx_rev', None )
+ if current_ctx_rev != ctx_rev:
+ repo = hg.repository( suc.get_configured_ui(), path=os.path.abspath( install_dir ) )
+ pull_repository( repo, repository_clone_url, current_changeset_revision )
+ suc.update_repository( repo, ctx_rev=current_ctx_rev )
+ handle_repository_contents( trans,
+ tool_shed_repository=tool_shed_repository,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ tool_shed=tool_shed_repository.tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf,
+ reinstalling=reinstalling )
+ trans.sa_session.refresh( tool_shed_repository )
+ metadata = tool_shed_repository.metadata
+ if 'tools' in metadata:
+ # Get the tool_versions from the tool shed for each tool in the installed change set.
+ suc.update_tool_shed_repository_status( trans.app,
+ tool_shed_repository,
+ trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS )
+ tool_shed_url = suc.get_url_from_tool_shed( trans.app, tool_shed_repository.tool_shed )
+ url = suc.url_join( tool_shed_url,
+ '/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
+ ( tool_shed_repository.name, tool_shed_repository.owner, tool_shed_repository.changeset_revision ) )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_version_dicts = json.from_json_string( text )
+ tool_util.handle_tool_versions( trans.app, tool_version_dicts, tool_shed_repository )
+ else:
+ message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
+ message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
+ message += "from the installed repository's <b>Repository Actions</b> menu. "
+ status = 'error'
+ if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
+ work_dir = tempfile.mkdtemp()
+ # Install tool dependencies.
+ suc.update_tool_shed_repository_status( trans.app,
+ tool_shed_repository,
+ trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
+ # Get the tool_dependencies.xml file from the repository.
+ tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
+ installed_tool_dependencies = common_install_util.handle_tool_dependencies( app=trans.app,
+ tool_shed_repository=tool_shed_repository,
+ tool_dependencies_config=tool_dependencies_config,
+ tool_dependencies=tool_shed_repository.tool_dependencies )
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ suc.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.INSTALLED )
+ else:
+ # An error occurred while cloning the repository, so reset everything necessary to enable another attempt.
+ suc.set_repository_attributes( trans,
+ tool_shed_repository,
+ status=trans.model.ToolShedRepository.installation_status.ERROR,
+ error_message=error_message,
+ deleted=False,
+ uninstalled=False,
+ remove_from_disk=True )
+
def merge_containers_dicts_for_new_install( containers_dicts ):
"""
When installing one or more tool shed repositories for the first time, the received list of containers_dicts contains a containers_dict for
This diff is so big that we needed to truncate the remainder.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0