1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/41bb60076082/
Changeset: 41bb60076082
User: devteam
Date: 2014-05-02 21:55:45
Summary: Enhance the framework that supports discovery of tool dependencies' scripts that are in the same repository as the tool. This eliminates the requirement for setting a variable _SCRIPT_PATH in the tool_dependencies.xml and requirements tagset in a tool config. Now it only needs to be set in the tool config and it should look like: <requirement type="set_environment">PATH</requirement>
Affected #: 5 files
diff -r 2f79c07c4438b627df03aa3f96dcd51f8b12c641 -r 41bb60076082ea46686a4bb9db8cb593b8f35851 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -289,6 +289,55 @@
raise Exception( message )
return handled_tool_dependencies
+def handle_env_vars_for_set_environment_tool_dependency( app, tool_shed_repository, tool_shed_repository_install_dir ):
+ env_var_name = 'PATH'
+ install_dir = \
+ tool_dependency_util.get_tool_dependency_install_dir( app=app,
+ repository_name=tool_shed_repository.name,
+ repository_owner=tool_shed_repository.owner,
+ repository_changeset_revision=tool_shed_repository.installed_changeset_revision,
+ tool_dependency_type='set_environment',
+ tool_dependency_name=env_var_name,
+ tool_dependency_version=None )
+ env_var_dict = dict( name=env_var_name, action='prepend_to', value=tool_shed_repository_install_dir )
+ if not os.path.exists( install_dir ):
+ os.makedirs( install_dir )
+ status = app.install_model.ToolDependency.installation_status.INSTALLING
+ tool_dependency = \
+ tool_dependency_util.create_or_update_tool_dependency( app=app,
+ tool_shed_repository=tool_shed_repository,
+ name=env_var_name,
+ version=None,
+ type='set_environment',
+ status=status,
+ set_status=True )
+ env_file_builder = EnvFileBuilder( install_dir )
+ return_code = env_file_builder.append_line( make_executable=True, **env_var_dict )
+ if return_code:
+ error_message = 'Error creating env.sh file for tool dependency %s, return_code: %s' % \
+ ( str( tool_dependency.name ), str( return_code ) )
+ log.debug( error_message )
+ status = app.install_model.ToolDependency.installation_status.ERROR
+ tool_dependency = \
+ tool_dependency_util.set_tool_dependency_attributes( app,
+ tool_dependency=tool_dependency,
+ status=status,
+ error_message=error_message,
+ remove_from_disk=False )
+ else:
+ if tool_dependency.status not in [ app.install_model.ToolDependency.installation_status.ERROR,
+ app.install_model.ToolDependency.installation_status.INSTALLED ]:
+ status = app.install_model.ToolDependency.installation_status.INSTALLED
+ tool_dependency = \
+ tool_dependency_util.set_tool_dependency_attributes( app,
+ tool_dependency=tool_dependency,
+ status=status,
+ error_message=None,
+ remove_from_disk=False )
+ log.debug( 'Environment variable %s set in %s for tool dependency %s.' % \
+ ( str( env_var_name ), str( install_dir ), str( tool_dependency.name ) ) )
+ return tool_dependency
+
def install_and_build_package_via_fabric( app, tool_shed_repository, tool_dependency, actions_dict ):
sa_session = app.install_model.context
try:
@@ -638,12 +687,20 @@
"""
# TODO: Add support for a repository dependency definition within this tool dependency type's tag set. This should look something like
# the following. See the implementation of support for this in the tool dependency package type's method above.
+ # This function is only called for set environment actions as defined below, not within an <install version="1.0"> tool
+ # dependency type. Here is an example of the tag set this function does handle:
+ # <action type="set_environment">
+ # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR</environment_variable>
+ # </action>
+ # Here is an example of the tag set this function does not handle:
# <set_environment version="1.0">
# <repository toolshed="<tool shed>" name="<repository name>" owner="<repository owner>" changeset_revision="<changeset revision>" />
# </set_environment>
sa_session = app.install_model.context
- tool_dependency = None
+ tool_dependencies = []
env_var_version = elem.get( 'version', '1.0' )
+ tool_shed_repository_install_dir = fabric_util.get_tool_shed_repository_install_dir( app, tool_shed_repository )
+ tool_shed_repository_install_dir_added_to_path = False
for env_var_elem in elem:
# Althoug we're in a loop here, this method will always return only a single ToolDependency or None.
env_var_name = env_var_elem.get( 'name', None )
@@ -662,10 +719,10 @@
tool_dependency_type='set_environment',
tool_dependency_name=env_var_name,
tool_dependency_version=None )
- tool_shed_repository_install_dir = fabric_util.get_tool_shed_repository_install_dir( app, tool_shed_repository )
+ install_environment = InstallEnvironment( tool_shed_repository_install_dir=tool_shed_repository_install_dir,
+ install_dir=install_dir )
env_var_dict = td_common_util.create_env_var_dict( elem=env_var_elem,
- tool_dependency_install_dir=install_dir,
- tool_shed_repository_install_dir=tool_shed_repository_install_dir )
+ install_environment=install_environment )
if env_var_dict:
if not os.path.exists( install_dir ):
os.makedirs( install_dir )
@@ -714,7 +771,18 @@
status=status,
error_message=error_message,
remove_from_disk=False )
- return tool_dependency
+ if tool_dependency.status != app.install_model.ToolDependency.installation_status.ERROR:
+ if env_var_dict[ 'name' ] == 'PATH' and \
+ env_var_dict[ 'action' ] in [ 'prepend_to', 'set_to', 'append_to' ] and \
+ env_var_dict[ 'value' ] == install_environment.tool_shed_repository_install_dir:
+ tool_shed_repository_install_dir_added_to_path = True
+ tool_dependencies.append( tool_dependency )
+ if not tool_shed_repository_install_dir_added_to_path:
+ tool_dependency = handle_env_vars_for_set_environment_tool_dependency( app,
+ tool_shed_repository,
+ tool_shed_repository_install_dir )
+ tool_dependencies.append( tool_dependency )
+ return tool_dependencies
def strip_path( fpath ):
if not fpath:
diff -r 2f79c07c4438b627df03aa3f96dcd51f8b12c641 -r 41bb60076082ea46686a4bb9db8cb593b8f35851 lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
@@ -707,17 +707,22 @@
return env_var_dict
def prepare_step( self, app, tool_dependency, action_elem, action_dict, install_environment, is_binary_download ):
+ # This function is only called for set environment actions as defined above, not within a <set_environment> tool
+ # dependency type. Here is an example of the tag set this function does handle:
# <action type="set_environment">
# <environment_variable name="PYTHONPATH" action="append_to">$INSTALL_DIR/lib/python</environment_variable>
# <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR/bin</environment_variable>
# </action>
+ # Here is an example of the tag set this function does not handle:
+ # <action type="set_environment">
+ # <environment_variable name="PATH" action="prepend_to">$INSTALL_DIR</environment_variable>
+ # </action>
env_var_dicts = []
for env_elem in action_elem:
if env_elem.tag == 'environment_variable':
env_var_dict = \
td_common_util.create_env_var_dict( elem=env_elem,
- tool_dependency_install_dir=install_environment.install_dir,
- tool_shed_repository_install_dir=install_environment.tool_shed_repository_install_dir )
+ install_environment=install_environment )
if env_var_dict:
env_var_dicts.append( env_var_dict )
if env_var_dicts:
diff -r 2f79c07c4438b627df03aa3f96dcd51f8b12c641 -r 41bb60076082ea46686a4bb9db8cb593b8f35851 lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
@@ -168,10 +168,12 @@
return True
return False
-def create_env_var_dict( elem, tool_dependency_install_dir=None, tool_shed_repository_install_dir=None ):
+def create_env_var_dict( elem, install_environment ):
env_var_name = elem.get( 'name', 'PATH' )
env_var_action = elem.get( 'action', 'prepend_to' )
env_var_text = None
+ tool_dependency_install_dir = install_environment.install_dir
+ tool_shed_repository_install_dir = install_environment.tool_shed_repository_install_dir
if elem.text and elem.text.find( 'REPOSITORY_INSTALL_DIR' ) >= 0:
if tool_shed_repository_install_dir and elem.text.find( '$REPOSITORY_INSTALL_DIR' ) != -1:
env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_shed_repository_install_dir )
diff -r 2f79c07c4438b627df03aa3f96dcd51f8b12c641 -r 41bb60076082ea46686a4bb9db8cb593b8f35851 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -349,9 +349,15 @@
requirement_dict[ 'status' ] = tool_dependency_status
new_val.append( requirement_dict )
if tool_dependency_status in [ trans.install_model.ToolDependency.installation_status.INSTALLED ]:
- installed_tool_dependencies[ td_key ] = new_val
+ if td_key in installed_tool_dependencies:
+ installed_tool_dependencies[ td_key ].extend( new_val )
+ else:
+ installed_tool_dependencies[ td_key ] = new_val
else:
- missing_tool_dependencies[ td_key ] = new_val
+ if td_key in missing_tool_dependencies:
+ missing_tool_dependencies[ td_key ].extend( new_val )
+ else:
+ missing_tool_dependencies[ td_key ] = new_val
else:
# The val dictionary looks something like this:
# {'repository_name': 'xx',
@@ -497,6 +503,7 @@
return installed_tool_dependencies
root = tree.getroot()
fabric_version_checked = False
+ set_environment_handled = False
for elem in root:
if elem.tag == 'package':
# Only install the tool_dependency if it is not already installed and it is associated with a database record in the received
@@ -556,21 +563,31 @@
# <set_environment version="1.0">
# <environment_variable name="R_SCRIPT_PATH"action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
# </set_environment>
+ set_environment_handled = True
try:
- tool_dependency = set_environment( app, elem, tool_shed_repository, attr_tups_of_dependencies_for_install )
+ tool_dependencies = set_environment( app, elem, tool_shed_repository, attr_tups_of_dependencies_for_install )
except Exception, e:
error_message = "Error setting environment for tool dependency: %s" % str( e )
log.debug( error_message )
- if tool_dependency:
- # Since there was an installation error, update the tool dependency status to Error. The remove_installation_path option must
- # be left False here.
- tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app,
- tool_dependency,
- error_message,
- remove_installation_path=False )
- if tool_dependency and tool_dependency.status in [ app.install_model.ToolDependency.installation_status.INSTALLED,
- app.install_model.ToolDependency.installation_status.ERROR ]:
- installed_tool_dependencies.append( tool_dependency )
+ for tool_dependency in tool_dependencies:
+ if tool_dependency and tool_dependency.status == app.install_model.ToolDependency.installation_status.ERROR:
+ # Since there was an installation error, update the tool dependency status to Error. The remove_installation_path option must
+ # be left False here.
+ tool_dependency = tool_dependency_util.handle_tool_dependency_installation_error( app,
+ tool_dependency,
+ error_message,
+ remove_installation_path=False )
+ for tool_dependency in tool_dependencies:
+ if tool_dependency and tool_dependency.status in [ app.install_model.ToolDependency.installation_status.INSTALLED,
+ app.install_model.ToolDependency.installation_status.ERROR ]:
+ installed_tool_dependencies.append( tool_dependency )
+ if not set_environment_handled:
+ element_attributes = dict( name='PATH', action='prepend_to' )
+ generated_elem = xml_util.create_element( 'environment_variable', attributes=element_attributes, sub_elements=None )
+ generated_elem.text = '$REPOSITORY_INSTALL_DIR'
+ generated_attr_tups = [ ( 'PATH', None, 'set_environment' ) ]
+ tool_dependencies = set_environment( app, generated_elem, tool_shed_repository, generated_attr_tups )
+ installed_tool_dependencies.extend( tool_dependencies )
return installed_tool_dependencies
def repository_dependency_needed_only_for_compiling_tool_dependency( repository, repository_dependency ):
diff -r 2f79c07c4438b627df03aa3f96dcd51f8b12c641 -r 41bb60076082ea46686a4bb9db8cb593b8f35851 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -23,6 +23,7 @@
from tool_shed.galaxy_install.tool_dependencies import install_util
from tool_shed.galaxy_install.tool_dependencies import td_common_util
import tool_shed.repository_types.util as rt_util
+from xml.etree import ElementTree as XmlET
eggs.require( 'mercurial' )
@@ -921,7 +922,28 @@
invalid_tool_dependencies_dict = {}
valid_repository_dependency_tups = []
invalid_repository_dependency_tups = []
+ needs_set_environment_tool_dependency_for_path = False
+ tools_metadata = metadata_dict.get( 'tools', None )
+ if tools_metadata is not None:
+ for tools_dict in tools_metadata:
+ requirements = tools_dict.get( 'requirements', None )
+ if requirements is not None:
+ for requirements_dict in requirements:
+ if requirements_dict[ 'type' ] == 'set_environment' and requirements_dict[ 'name' ] == 'PATH':
+ needs_set_environment_tool_dependency_for_path = True
+ break
description = root.get( 'description' )
+ if needs_set_environment_tool_dependency_for_path:
+ # Add this to the in-memory XML tree that is parsed to determine the database tool dependency records. This will not
+ # modify the on-disk tool dependency definitions, but is needed in order for the tool to correctly source the env.sh
+ # file that was generated for the PATH variable.
+ # <set_environment version="1.0">
+ # <environment_variable action="prepend_to" name="PATH">$REPOSITORY_INSTALL_DIR</environment_variable>
+ # </set_environment>
+ env_var_elem_attributes = dict( name='PATH', action='prepend_to' )
+ set_environment_elem = xml_util.create_element( 'set_environment', attributes=dict( version='1.0' ) )
+ XmlET.SubElement( set_environment_elem, 'environment_variable', attrib=env_var_elem_attributes )
+ root.append( set_environment_elem )
for elem in root:
if elem.tag == 'package':
valid_tool_dependencies_dict, invalid_tool_dependencies_dict, repository_dependency_tup, repository_dependency_is_valid, message = \
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2f79c07c4438/
Changeset: 2f79c07c4438
User: carlfeberhard
Date: 2014-05-02 21:45:25
Summary: History: cruft removal from controller and associated javascript
Affected #: 3 files
diff -r 3f8e4852623187152ffaea5fd529fac2001c8316 -r 2f79c07c4438b627df03aa3f96dcd51f8b12c641 lib/galaxy/webapps/galaxy/controllers/history.py
--- a/lib/galaxy/webapps/galaxy/controllers/history.py
+++ b/lib/galaxy/webapps/galaxy/controllers/history.py
@@ -549,6 +549,7 @@
count += 1
return trans.show_ok_message( "%d datasets have been deleted permanently" % count, refresh_frames=['history'] )
+ #TODO: use api instead
@web.expose
def delete_current( self, trans, purge=False ):
"""Delete just the active history -- this does not require a logged in user."""
@@ -604,6 +605,7 @@
trans.sa_session.add( history )
trans.sa_session.flush()
return trans.show_ok_message( "Your datasets have been unhidden.", refresh_frames=refresh_frames )
+ #TODO: used in index.mako
@web.expose
def resume_paused_jobs( self, trans, current=False, ids=None ):
@@ -618,6 +620,7 @@
trans.sa_session.add( history )
trans.sa_session.flush()
return trans.show_ok_message( "Your jobs have been resumed.", refresh_frames=refresh_frames )
+ #TODO: used in index.mako
@web.expose
@web.require_login( "rate items" )
@@ -630,34 +633,7 @@
# Rate history.
history_rating = self.rate_item( trans.sa_session, trans.get_user(), history, rating )
return self.get_ave_item_rating_data( trans.sa_session, history )
-
- @web.expose
- def rename_async( self, trans, id=None, new_name=None ):
- history = self.get_history( trans, id )
- # Check that the history exists, and is either owned by the current
- # user (if logged in) or the current history
- assert history is not None
- if history.user is None:
- assert history == trans.get_history()
- else:
- assert history.user == trans.user
- # Rename
- new_name = sanitize_html( new_name )
- history.name = new_name
- trans.sa_session.add( history )
- trans.sa_session.flush()
- return new_name
-
- @web.expose
- @web.require_login( "use Galaxy histories" )
- def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
- history = self.get_history( trans, id )
- if new_annotation:
- # Sanitize annotation before adding it.
- new_annotation = sanitize_html( new_annotation, 'utf-8', 'text/html' )
- self.add_item_annotation( trans.sa_session, trans.get_user(), history, new_annotation )
- trans.sa_session.flush()
- return new_annotation
+ #TODO: used in display_base.mako
@web.expose
# TODO: Remove require_login when users are warned that, if they are not
@@ -686,6 +662,7 @@
self.queue_history_import( trans, archive_type=archive_type, archive_source=archive_source )
return trans.show_message( "Importing history from '%s'. \
This history will be visible when the import is complete" % archive_source )
+ #TODO: used in this file and index.mako
@web.expose
def export_archive( self, trans, id=None, gzip=True, include_hidden=False, include_deleted=False ):
@@ -719,6 +696,7 @@
return trans.show_message( "Exporting History '%(n)s'. Use this link to download \
the archive or import it to another Galaxy server: \
<a href='%(u)s'>%(u)s</a>" % ( { 'n' : history.name, 'u' : url } ) )
+ #TODO: used in this file and index.mako
@web.expose
@web.json
@@ -733,6 +711,7 @@
"link" : url_for(controller='history', action="display_by_username_and_slug",
username=history.user.username, slug=history.slug ) }
return return_dict
+ #TODO: used in page/editor.mako
@web.expose
@web.require_login( "set history's accessible flag" )
@@ -748,6 +727,7 @@
history.importable = importable
trans.sa_session.flush()
return
+ #TODO: used in page/editor.mako
@web.expose
def get_item_content_async( self, trans, id ):
@@ -764,6 +744,7 @@
for dataset in datasets:
dataset.annotation = self.get_item_annotation_str( trans.sa_session, history.user, dataset )
return trans.stream_template_mako( "/history/item_content.mako", item = history, item_data = datasets )
+ #TODO: used in embed_base.mako
@web.expose
def name_autocomplete_data( self, trans, q=None, limit=None, timestamp=None ):
@@ -773,9 +754,12 @@
return
ac_data = ""
- for history in trans.sa_session.query( model.History ).filter_by( user=user ).filter( func.lower( model.History.name ) .like(q.lower() + "%") ):
+ for history in ( trans.sa_session.query( model.History )
+ .filter_by( user=user )
+ .filter( func.lower( model.History.name ).like(q.lower() + "%") ) ):
ac_data = ac_data + history.name + "\n"
return ac_data
+ #TODO: used in grid_base.mako
@web.expose
def imp( self, trans, id=None, confirm=False, **kwd ):
@@ -852,6 +836,7 @@
Warning! If you import this history, you will lose your current
history. <br>You can <a href="%s">continue and import this history</a> or %s.
""" % ( web.url_for(controller='history', action='imp', id=id, confirm=True, referer=trans.request.referer ), referer_message ), use_panels=True )
+ #TODO: used in history/view, display, embed
@web.expose
def view( self, trans, id=None, show_deleted=False, show_hidden=False, use_panels=True ):
@@ -1390,51 +1375,4 @@
def get_item( self, trans, id ):
return self.get_history( trans, id )
-
- @web.json
- def get_display_application_links( self, trans, hda_ids=None ):
- """
- Returns external display application JSON data for all/given
- HDAs within the current history.
- """
- #TODO: fold into API and remove
- try:
- history = trans.get_history()
- #TODO: allow id for more flexibility? (the following doesn't work if anonymous...)
- #history = self.get_history( trans, id, check_ownership=True, check_accessible=True, deleted=None )
- if hda_ids:
- unencoded_hda_ids = [ trans.security.decode_id( hda_id ) for hda_id in galaxy.util.listify( hda_ids ) ]
- #TODO: this gets all - should narrow query by using hda_ids here - no way with this current method
- hdas = self.get_history_datasets( trans, history, show_deleted=False, show_hidden=True, show_purged=False )
-
- except Exception, exc:
- log.error( 'Failed loading data for ids (%s): %s', hda_ids, str( exc ), exc_info=True )
- trans.response.status = 500
- return str( exc )
-
- hda_display_links = []
- for hda in hdas:
- # only get requested hdas
- if hda_ids and hda.id not in unencoded_hda_ids:
- continue
-
- hda_link_data = { 'id': trans.security.encode_id( hda.id ) }
- # don't bail on entire list if one hda has an error; record and move on
- try:
- # 'old style': must be enabled in config (see universe_wsgi.ini)
- if trans.app.config.enable_old_display_applications:
- hda_link_data[ 'display_types' ] = self.get_old_display_applications( trans, hda )
-
- # 'new style'
- hda_link_data[ 'display_apps' ] = self.get_display_apps( trans, hda )
-
- except Exception, exc:
- log.error( 'Failed getting links, hda (%s): %s', hda_link_data[ 'id' ], str( exc ), exc_info=True )
- hda_link_data[ 'error' ] = str( exc )
-
- hda_display_links.append( hda_link_data )
-
- # send 'do not cache' headers to handle IE's caching of ajax get responses
- trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
- trans.response.set_content_type( 'application/json' )
- return hda_display_links
+ #TODO: override of base ui controller?
diff -r 3f8e4852623187152ffaea5fd529fac2001c8316 -r 2f79c07c4438b627df03aa3f96dcd51f8b12c641 static/scripts/mvc/history/history-model.js
--- a/static/scripts/mvc/history/history-model.js
+++ b/static/scripts/mvc/history/history-model.js
@@ -34,26 +34,6 @@
// ........................................................................ urls
urlRoot: galaxy_config.root + 'api/histories',
- /** url for changing the name of the history */
- renameUrl : function(){
-//TODO: just use this.save()
- var id = this.get( 'id' );
- if( !id ){ return undefined; }
- return galaxy_config.root + 'history/rename_async?id=' + this.get( 'id' );
- },
- /** url for changing the annotation of the history */
- annotateUrl : function(){
- var id = this.get( 'id' );
- if( !id ){ return undefined; }
- return galaxy_config.root + 'history/annotate_async?id=' + this.get( 'id' );
- },
- /** url for changing the tags of the history */
- tagUrl : function(){
- var id = this.get( 'id' );
- if( !id ){ return undefined; }
- return galaxy_config.root + 'tag/get_tagging_elt_async?item_id=' + this.get( 'id' ) + '&item_class=History';
- },
-
// ........................................................................ set up/tear down
/** Set up the model
* @param {Object} historyJSON model data for this History
diff -r 3f8e4852623187152ffaea5fd529fac2001c8316 -r 2f79c07c4438b627df03aa3f96dcd51f8b12c641 static/scripts/packed/mvc/history/history-model.js
--- a/static/scripts/packed/mvc/history/history-model.js
+++ b/static/scripts/packed/mvc/history/history-model.js
@@ -1,1 +1,1 @@
-define(["mvc/dataset/hda-model","mvc/base-mvc","utils/localization"],function(c,a,b){var e=Backbone.Model.extend(a.LoggableMixin).extend({defaults:{model_class:"History",id:null,name:"Unnamed History",state:"new",diskSize:0,deleted:false},urlRoot:galaxy_config.root+"api/histories",renameUrl:function(){var g=this.get("id");if(!g){return undefined}return galaxy_config.root+"history/rename_async?id="+this.get("id")},annotateUrl:function(){var g=this.get("id");if(!g){return undefined}return galaxy_config.root+"history/annotate_async?id="+this.get("id")},tagUrl:function(){var g=this.get("id");if(!g){return undefined}return galaxy_config.root+"tag/get_tagging_elt_async?item_id="+this.get("id")+"&item_class=History"},initialize:function(h,i,g){g=g||{};this.logger=g.logger||null;this.log(this+".initialize:",h,i,g);this.hdas=new c.HDACollection(i||[],{historyId:this.get("id")});if(i&&_.isArray(i)){this.hdas.reset(i)}this._setUpListeners();this.updateTimeoutId=null},_setUpListeners:function(){this.on("error",function(h,k,g,j,i){this.errorHandler(h,k,g,j,i)});if(this.hdas){this.listenTo(this.hdas,"error",function(){this.trigger.apply(this,["error:hdas"].concat(jQuery.makeArray(arguments)))})}this.on("change:id",function(h,g){if(this.hdas){this.hdas.historyId=g}},this)},errorHandler:function(h,k,g,j,i){this.clearUpdateTimeout()},ownedByCurrUser:function(){if(!Galaxy||!Galaxy.currUser){return false}if(Galaxy.currUser.isAnonymous()||Galaxy.currUser.id!==this.get("user_id")){return false}return true},hdaCount:function(){return _.reduce(_.values(this.get("state_details")),function(g,h){return g+h},0)},checkForUpdates:function(g){if(this.hdas.running().length){this.setUpdateTimeout()}else{this.trigger("ready");if(_.isFunction(g)){g.call(this)}}return this},setUpdateTimeout:function(g){g=g||e.UPDATE_DELAY;var h=this;this.clearUpdateTimeout();this.updateTimeoutId=setTimeout(function(){h.refresh()},g);return this.updateTimeoutId},clearUpdateTimeout:function(){if(this.updateTimeoutId){clearTimeout(this.updateTimeoutId);this.updateTimeoutId=null}},refresh:function(h,g){h=h||[];g=g||{};var i=this;g.data=g.data||{};if(h.length){g.data.details=h.join(",")}var j=this.hdas.fetch(g);j.done(function(k){i.checkForUpdates(function(){this.fetch()})});return j},toString:function(){return"History("+this.get("id")+","+this.get("name")+")"}});e.UPDATE_DELAY=4000;e.getHistoryData=function f(h,r){r=r||{};var l=r.hdaDetailIds||[];var n=jQuery.Deferred(),m=null;function i(s){return jQuery.ajax(galaxy_config.root+"api/histories/"+h)}function g(s){if(!s||!s.state_ids){return 0}return _.reduce(s.state_ids,function(t,v,u){return t+v.length},0)}function q(t){if(!g(t)){return[]}if(_.isFunction(l)){l=l(t)}var s=(l.length)?({details:l.join(",")}):({});return jQuery.ajax(galaxy_config.root+"api/histories/"+t.id+"/contents",{data:s})}var p=r.historyFn||i,o=r.hdaFn||q;var k=p(h);k.done(function(s){m=s;n.notify({status:"history data retrieved",historyJSON:m})});k.fail(function(u,s,t){n.reject(u,"loading the history")});var j=k.then(o);j.then(function(s){n.notify({status:"dataset data retrieved",historyJSON:m,hdaJSON:s});n.resolve(m,s)});j.fail(function(u,s,t){n.reject(u,"loading the datasets",{history:m})});return n};var d=Backbone.Collection.extend(a.LoggableMixin).extend({model:e,urlRoot:galaxy_config.root+"api/histories"});return{History:e,HistoryCollection:d}});
\ No newline at end of file
+define(["mvc/dataset/hda-model","mvc/base-mvc","utils/localization"],function(c,a,b){var e=Backbone.Model.extend(a.LoggableMixin).extend({defaults:{model_class:"History",id:null,name:"Unnamed History",state:"new",diskSize:0,deleted:false},urlRoot:galaxy_config.root+"api/histories",initialize:function(h,i,g){g=g||{};this.logger=g.logger||null;this.log(this+".initialize:",h,i,g);this.hdas=new c.HDACollection(i||[],{historyId:this.get("id")});if(i&&_.isArray(i)){this.hdas.reset(i)}this._setUpListeners();this.updateTimeoutId=null},_setUpListeners:function(){this.on("error",function(h,k,g,j,i){this.errorHandler(h,k,g,j,i)});if(this.hdas){this.listenTo(this.hdas,"error",function(){this.trigger.apply(this,["error:hdas"].concat(jQuery.makeArray(arguments)))})}this.on("change:id",function(h,g){if(this.hdas){this.hdas.historyId=g}},this)},errorHandler:function(h,k,g,j,i){this.clearUpdateTimeout()},ownedByCurrUser:function(){if(!Galaxy||!Galaxy.currUser){return false}if(Galaxy.currUser.isAnonymous()||Galaxy.currUser.id!==this.get("user_id")){return false}return true},hdaCount:function(){return _.reduce(_.values(this.get("state_details")),function(g,h){return g+h},0)},checkForUpdates:function(g){if(this.hdas.running().length){this.setUpdateTimeout()}else{this.trigger("ready");if(_.isFunction(g)){g.call(this)}}return this},setUpdateTimeout:function(g){g=g||e.UPDATE_DELAY;var h=this;this.clearUpdateTimeout();this.updateTimeoutId=setTimeout(function(){h.refresh()},g);return this.updateTimeoutId},clearUpdateTimeout:function(){if(this.updateTimeoutId){clearTimeout(this.updateTimeoutId);this.updateTimeoutId=null}},refresh:function(h,g){h=h||[];g=g||{};var i=this;g.data=g.data||{};if(h.length){g.data.details=h.join(",")}var j=this.hdas.fetch(g);j.done(function(k){i.checkForUpdates(function(){this.fetch()})});return j},toString:function(){return"History("+this.get("id")+","+this.get("name")+")"}});e.UPDATE_DELAY=4000;e.getHistoryData=function f(h,r){r=r||{};var l=r.hdaDetailIds||[];var n=jQuery.Deferred(),m=null;function i(s){return jQuery.ajax(galaxy_config.root+"api/histories/"+h)}function g(s){if(!s||!s.state_ids){return 0}return _.reduce(s.state_ids,function(t,v,u){return t+v.length},0)}function q(t){if(!g(t)){return[]}if(_.isFunction(l)){l=l(t)}var s=(l.length)?({details:l.join(",")}):({});return jQuery.ajax(galaxy_config.root+"api/histories/"+t.id+"/contents",{data:s})}var p=r.historyFn||i,o=r.hdaFn||q;var k=p(h);k.done(function(s){m=s;n.notify({status:"history data retrieved",historyJSON:m})});k.fail(function(u,s,t){n.reject(u,"loading the history")});var j=k.then(o);j.then(function(s){n.notify({status:"dataset data retrieved",historyJSON:m,hdaJSON:s});n.resolve(m,s)});j.fail(function(u,s,t){n.reject(u,"loading the datasets",{history:m})});return n};var d=Backbone.Collection.extend(a.LoggableMixin).extend({model:e,urlRoot:galaxy_config.root+"api/histories"});return{History:e,HistoryCollection:d}});
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3f8e48526231/
Changeset: 3f8e48526231
User: devteam
Date: 2014-05-02 18:50:57
Summary: Fix for generating metadata when a tool dependency definition sets multiple environment variables within a single <set_environment> tool dependency type.
Affected #: 1 file
diff -r 888cd02028b37f93beba8b175fec8c85a912ac9f -r 3f8e4852623187152ffaea5fd529fac2001c8316 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -563,14 +563,11 @@
# <set_environment version="1.0">
# <environment_variable name="JAVA_JAR_PATH" action="set_to">$INSTALL_DIR</environment_variable>
# </set_environment>
- requirements_dict = {}
for env_elem in elem:
# <environment_variable name="JAVA_JAR_PATH" action="set_to">$INSTALL_DIR</environment_variable>
env_name = env_elem.get( 'name', None )
if env_name:
- requirements_dict[ 'name' ] = env_name
- requirements_dict[ 'type' ] = 'set_environment'
- if requirements_dict:
+ requirements_dict = dict( name=env_name, type='set_environment' )
if 'set_environment' in valid_tool_dependencies_dict:
valid_tool_dependencies_dict[ 'set_environment' ].append( requirements_dict )
else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/888cd02028b3/
Changeset: 888cd02028b3
User: jmchilton
Date: 2014-05-02 18:21:10
Summary: Small fixes for LWR.
Things added/modified for parameter rewriting at tool evaluatino time that regressed when implementing message queue stuff I guess.
Affected #: 2 files
diff -r 42b9fb317fdd9248154216530883603110905301 -r 888cd02028b37f93beba8b175fec8c85a912ac9f lib/galaxy/jobs/runners/lwr_client/setup_handler.py
--- a/lib/galaxy/jobs/runners/lwr_client/setup_handler.py
+++ b/lib/galaxy/jobs/runners/lwr_client/setup_handler.py
@@ -32,7 +32,7 @@
def __init__(self, client, destination_args):
self.client = client
system_properties = self.__build_system_properties(destination_args)
- system_properties["sep"] = client.job_directory.separator
+ system_properties["separator"] = client.job_directory.separator
self.system_properties = system_properties
self.jobs_directory = destination_args["jobs_directory"]
diff -r 42b9fb317fdd9248154216530883603110905301 -r 888cd02028b37f93beba8b175fec8c85a912ac9f lib/galaxy/tools/evaluation.py
--- a/lib/galaxy/tools/evaluation.py
+++ b/lib/galaxy/tools/evaluation.py
@@ -251,7 +251,10 @@
if real_path in output_dataset_paths:
dataset_path = output_dataset_paths[ real_path ]
param_dict[name] = DatasetFilenameWrapper( hda, dataset_path=dataset_path )
- open( dataset_path.false_path, 'w' ).close()
+ try:
+ open( dataset_path.false_path, 'w' ).close()
+ except EnvironmentError:
+ pass # May well not exist - e.g. LWR.
else:
param_dict[name] = DatasetFilenameWrapper( hda )
# Provide access to a path to store additional files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f337aefda5c6/
Changeset: f337aefda5c6
User: greg
Date: 2014-05-01 22:07:48
Summary: Enhance the Tool Shed API to provide information about users that is public (i.e., public username) and to include a function for creating a new user. Add a Tool Shed API script that retrieves all of the pubic information about users from a Tool Shed, automatically generates test email accounts and test passwords, and creates the list of users in another Tool Shed. This script streamlines the process of populating a development Tool Shed with users, enabling import of a repository capsule exportied from the specified Tool Shed. This simplifies the process of developing new repository hierarchies in a local development Tool Shed.
Affected #: 5 files
diff -r f6561aae5484240c8b76cc32a7b4b1f501b0e660 -r f337aefda5c6c709760c51019df777c083b036bb lib/galaxy/webapps/tool_shed/api/users.py
--- /dev/null
+++ b/lib/galaxy/webapps/tool_shed/api/users.py
@@ -0,0 +1,128 @@
+import logging
+import os
+
+from galaxy import util
+from galaxy import web
+from galaxy.web.base.controller import BaseAPIController
+from galaxy.security.validate_user_input import validate_email
+from galaxy.security.validate_user_input import validate_publicname
+from galaxy.security.validate_user_input import validate_password
+import tool_shed.util.shed_util_common as suc
+
+log = logging.getLogger( __name__ )
+
+
+class UsersController( BaseAPIController ):
+ """RESTful controller for interactions with users in the Tool Shed."""
+
+ @web.expose_api
+ def create_user( self, trans, payload, **kwd ):
+ """
+ POST /api/users/create_user
+ Returns a dictionary of information about the created user.
+
+: param key: the current Galaxy admin user's API key
+
+ The following parameters are included in the payload.
+ :param username (required): the public username of the user
+ """
+ user_dict = dict( message = '',
+ status = 'ok' )
+ # Make sure the current user's API key proves he is an admin user in this Tool Shed.
+ if trans.user_is_admin():
+ # Get the information about the user to be created from the payload.
+ email = payload.get( 'email', '' )
+ password = payload.get( 'password', '' )
+ username = payload.get( 'username', '' )
+ message = self.__validate( trans,
+ email=email,
+ password=password,
+ confirm=password,
+ username=username )
+ if message:
+ message = 'email: %s, username: %s - %s' % ( email, username, message )
+ user_dict[ 'message' ] = message
+ user_dict[ 'status' ] = 'error'
+ else:
+ # Create the user.
+ user = self.__create_user( trans, email, username, password )
+ user_dict = user.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
+ user_dict[ 'message' ] = "User '%s' has been created." % str( user.username )
+ user_dict[ 'url' ] = web.url_for( controller='users',
+ action='show',
+ id=trans.security.encode_id( user.id ) )
+ else:
+ user_dict[ 'message' ] = 'You are not authorized to create a user in this Tool Shed.'
+ user_dict[ 'status' ] = 'error'
+ return user_dict
+
+ def __create_user( self, trans, email, username, password ):
+ user = trans.app.model.User( email=email )
+ user.set_password_cleartext( password )
+ user.username = username
+ if trans.app.config.user_activation_on:
+ user.active = False
+ else:
+ user.active = True # Activation is off, every new user is active by default.
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
+ trans.app.security_agent.create_private_user_role( user )
+ return user
+
+ def __get_value_mapper( self, trans ):
+ value_mapper = { 'id' : trans.security.encode_id }
+ return value_mapper
+
+ @web.expose_api_anonymous
+ def index( self, trans, deleted=False, **kwd ):
+ """
+ GET /api/users
+ Returns a list of dictionaries that contain information about each user.
+ """
+ # Example URL: http://localhost:9009/api/users
+ user_dicts = []
+ deleted = util.asbool( deleted )
+ for user in trans.sa_session.query( trans.app.model.User ) \
+ .filter( trans.app.model.User.table.c.deleted == deleted ) \
+ .order_by( trans.app.model.User.table.c.username ):
+ user_dict = user.to_dict( view='collection',
+ value_mapper=self.__get_value_mapper( trans ) )
+ user_dict[ 'url' ] = web.url_for( controller='users',
+ action='show',
+ id=trans.security.encode_id( user.id ) )
+ user_dicts.append( user_dict )
+ return user_dicts
+
+ @web.expose_api_anonymous
+ def show( self, trans, id, **kwd ):
+ """
+ GET /api/users/{encoded_user_id}
+ Returns a dictionary of information about a user.
+
+ :param id: the encoded id of the User object.
+ """
+ # Example URL: http://localhost:9009/api/users/f9cad7b01a472135
+ user = suc.get_user( trans, id )
+ if user is None:
+ user_dict = dict( message = 'Unable to locate user record for id %s.' % ( str( id ) ),
+ status = 'error' )
+ return user_dict
+ user_dict = user.to_dict( view='element',
+ value_mapper=self.__get_value_mapper( trans ) )
+ user_dict[ 'url' ] = web.url_for( controller='users',
+ action='show',
+ id=trans.security.encode_id( user.id ) )
+ return user_dict
+
+ def __validate( self, trans, email, password, confirm, username ):
+ if not username:
+ return "A public user name is required in the Tool Shed."
+ if username in [ 'repos' ]:
+ return "The term <b>%s</b> is a reserved word in the Tool Shed, so it cannot be used as a public user name." % username
+ message = validate_email( trans, email )
+ if not message:
+ message = validate_password( trans, password, confirm )
+ if not message and username:
+ message = validate_publicname( trans, username )
+ return message
diff -r f6561aae5484240c8b76cc32a7b4b1f501b0e660 -r f337aefda5c6c709760c51019df777c083b036bb lib/galaxy/webapps/tool_shed/buildapp.py
--- a/lib/galaxy/webapps/tool_shed/buildapp.py
+++ b/lib/galaxy/webapps/tool_shed/buildapp.py
@@ -107,6 +107,13 @@
name_prefix='repository_revision_',
path_prefix='/api',
parent_resources=dict( member_name='repository_revision', collection_name='repository_revisions' ) )
+ webapp.mapper.resource( 'user',
+ 'users',
+ controller='users',
+ name_prefix='user_',
+ path_prefix='/api',
+ new={ 'create_user' : 'POST' },
+ parent_resources=dict( member_name='user', collection_name='users' ) )
webapp.finalize_config()
# Wrap the webapp in some useful middleware
if kwargs.get( 'middleware', True ):
diff -r f6561aae5484240c8b76cc32a7b4b1f501b0e660 -r f337aefda5c6c709760c51019df777c083b036bb lib/galaxy/webapps/tool_shed/model/__init__.py
--- a/lib/galaxy/webapps/tool_shed/model/__init__.py
+++ b/lib/galaxy/webapps/tool_shed/model/__init__.py
@@ -20,8 +20,8 @@
class User( object, Dictifiable ):
- dict_collection_visible_keys = ( 'id', 'email' )
- dict_element_visible_keys = ( 'id', 'email', 'username' )
+ dict_collection_visible_keys = ( 'id', 'username' )
+ dict_element_visible_keys = ( 'id', 'username' )
def __init__( self, email=None, password=None ):
self.email = email
diff -r f6561aae5484240c8b76cc32a7b4b1f501b0e660 -r f337aefda5c6c709760c51019df777c083b036bb lib/tool_shed/scripts/api/create_categories.py
--- a/lib/tool_shed/scripts/api/create_categories.py
+++ b/lib/tool_shed/scripts/api/create_categories.py
@@ -9,10 +9,10 @@
This script is very useful for populating a new development Tool Shed with the set of categories that
currently exist in either the test or main public Galaxy Tool Sheds. This will streamline building
-new repository hierarchies in the development Tool Shed and exportin gthem into a capsule that can be
+new repository hierarchies in the development Tool Shed and exporting them into a capsule that can be
imported into one of the public Tool Sheds.
-Here is a working example of how to use this script to retrieve the current set of repositories that are
+Here is a working example of how to use this script to retrieve the current set of categories that are
available in the test public Tool Shed and create each of them in a local development Tool Shed.
./create_categories.py -a <api key> -f http://testtoolshed.g2.bx.psu.edu -t http://localhost:9009
diff -r f6561aae5484240c8b76cc32a7b4b1f501b0e660 -r f337aefda5c6c709760c51019df777c083b036bb lib/tool_shed/scripts/api/create_users.py
--- /dev/null
+++ b/lib/tool_shed/scripts/api/create_users.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+"""
+This script will retrieve a list of dictionaries (one for each user) from the Tool Shed defined
+by the --from_tool_shed parameter, which should be a base Tool Shed URL. It will retrieve the
+username from each dictionary and create a new user with that username in the Tool Shed defined
+by the --to_tool_shed parameter (a different base Tool Shed URL). An email and password value
+will automatically be provided for each user. Email addresses will be <username>@test.org and
+passwords will be testuser. Users that already exist with a specified username in the Tool Shed
+in which the users are being created will not be affected.
+
+This script is very useful for populating a new development Tool Shed with the set of users that
+currently exist in either the test or main public Galaxy Tool Sheds. This will streamline building
+new repository hierarchies in the development Tool Shed and exporting them into a capsule that can
+be imported into one of the public Tool Sheds.
+
+Here is a working example of how to use this script to retrieve the current set of users that
+are available in the test public Tool Shed and create each of them in a local development Tool Shed.
+
+./create_users.py -a <api key> -f http://testtoolshed.g2.bx.psu.edu -t http://localhost:9009
+"""
+
+import os
+import sys
+import argparse
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import get
+from common import submit
+
+def main( options ):
+ api_key = options.api
+ from_tool_shed = options.from_tool_shed.rstrip( '/' )
+ to_tool_shed = options.to_tool_shed.rstrip( '/' )
+ # Get the users from the specified Tool Shed.
+ url = '%s/api/users' % from_tool_shed
+ user_dicts = get( url )
+ create_response_dicts = []
+ for user_dict in user_dicts:
+ username = user_dict.get( 'username', None )
+ if username is not None:
+ email = '%s(a)test.org' % username
+ password = 'testuser'
+ data = dict( email=email,
+ password=password,
+ username=username )
+ url = '%s/api/users/new/create_user' % to_tool_shed
+ try:
+ response = submit( url, data, api_key )
+ except Exception, e:
+ response = str( e )
+ log.exception( str( e ) )
+ create_response_dict = dict( response=response )
+ create_response_dicts.append( create_response_dict )
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser( description='Retrieve a list of users from a Tool Shed and create them in another Tool Shed.' )
+ parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key for Tool Shed in which users will be created" )
+ parser.add_argument( "-f", "--from_tool_shed", dest="from_tool_shed", required=True, help="URL of Tool Shed from which to retrieve the users" )
+ parser.add_argument( "-t", "--to_tool_shed", dest="to_tool_shed", required=True, help="URL of Tool Shed in which to create the users" )
+ options = parser.parse_args()
+ main( options )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.