galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
February 2012
- 2 participants
- 113 discussions
03 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/84baec18959c/
changeset: 84baec18959c
user: greg
date: 2012-02-03 22:30:30
summary: Correctly handle ToolPanelSection objects when presenting the tool panel sectin redio buttons for selecting a section to contain tools installed with a tool shed repostory into a local Galaxy instance.
affected #: 1 file
diff -r 86d9c5ddbafc3f54acd8398cf3f3d9c10a5ca54c -r 84baec18959cb7f48f450f9aa676aaa1882044c1 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -797,8 +797,9 @@
def build_tool_panel_section_select_field( trans ):
"""Build a SelectField whose options are the sections of the current in-memory toolbox."""
options = []
- for k, tool_section in trans.app.toolbox.tool_panel.items():
- options.append( ( tool_section.name, tool_section.id ) )
+ for k, v in trans.app.toolbox.tool_panel.items():
+ if isinstance( v, tools.ToolSection ):
+ options.append( ( v.name, v.id ) )
select_field = SelectField( name='tool_panel_section', display='radio' )
for option_tup in options:
select_field.add_option( option_tup[0], option_tup[1] )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Make sure a tool shed repository has metadata before setting versions for contained tools.
by Bitbucket 03 Feb '12
by Bitbucket 03 Feb '12
03 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/86d9c5ddbafc/
changeset: 86d9c5ddbafc
user: greg
date: 2012-02-03 21:29:02
summary: Make sure a tool shed repository has metadata before setting versions for contained tools.
affected #: 2 files
diff -r e6464387ed3fce7f235cf5c9af1281da14a32811 -r 86d9c5ddbafc3f54acd8398cf3f3d9c10a5ca54c lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -559,23 +559,25 @@
for index, changeset_revision in enumerate( changeset_revisions ):
tool_versions_dict = {}
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- metadata = repository_metadata.metadata
- tool_dicts = metadata.get( 'tools', [] )
- if index == 0:
- # The first changset_revision is a special case because it will have no ancestor
- # changeset_revisions in which to match tools. The parent tool id for tools in
- # the first changeset_revision will be the "old_id" in the tool config.
- for tool_dict in tool_dicts:
- tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
- else:
- for tool_dict in tool_dicts:
- # We have at least 2 changeset revisions to compare tool guids and tool ids.
- parent_id = get_parent_id( trans, id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], changeset_revisions[ 0:index ] )
- tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
- if tool_versions_dict:
- repository_metadata.tool_versions = tool_versions_dict
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ tool_dicts = metadata.get( 'tools', [] )
+ if index == 0:
+ # The first changset_revision is a special case because it will have no ancestor
+ # changeset_revisions in which to match tools. The parent tool id for tools in
+ # the first changeset_revision will be the "old_id" in the tool config.
+ for tool_dict in tool_dicts:
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
+ else:
+ for tool_dict in tool_dicts:
+ # We have at least 2 changeset revisions to compare tool guids and tool ids.
+ parent_id = get_parent_id( trans, id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], changeset_revisions[ 0:index ] )
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
+ if tool_versions_dict:
+ repository_metadata.tool_versions = tool_versions_dict
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ):
parent_id = None
# Compare from most recent to oldest.
diff -r e6464387ed3fce7f235cf5c9af1281da14a32811 -r 86d9c5ddbafc3f54acd8398cf3f3d9c10a5ca54c lib/galaxy/webapps/community/model/migrate/versions/0011_add_tool_versions_column.py
--- a/lib/galaxy/webapps/community/model/migrate/versions/0011_add_tool_versions_column.py
+++ b/lib/galaxy/webapps/community/model/migrate/versions/0011_add_tool_versions_column.py
@@ -1,5 +1,5 @@
"""
-Migration script to add the new_repo_alert column to the galaxy_user table.
+Migration script to add the tool_versions column to the repository_metadata table.
"""
from sqlalchemy import *
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/e6464387ed3f/
changeset: e6464387ed3f
user: greg
date: 2012-02-03 21:13:28
summary: Add the ability to chain Galaxy tools into a lineage of tool versions. This enables workflows and the rerun button on history items to work when a new version of a tool is loaded into the Galaxy tool panel. Version information for tools included in tool shed repositories is maintained for each changeset revision in the tool shed, and this information can be retrieved from the Administrative "Manage installed tool shed repositories" menu item for installed tool shed repositories that contain tools. This new functionality eliminates the need for the tool_id_guid_map table and associated features, so they have all been removed.
affected #: 17 files
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2687,16 +2687,46 @@
def includes_workflows( self ):
return 'workflows' in self.metadata
-class ToolIdGuidMap( object ):
- def __init__( self, id=None, create_time=None, tool_id=None, tool_version=None, tool_shed=None, repository_owner=None, repository_name=None, guid=None ):
+class ToolVersion( object ):
+ def __init__( self, id=None, create_time=None, tool_id=None, tool_shed_repository=None ):
self.id = id
self.create_time = create_time
self.tool_id = tool_id
- self.tool_version = tool_version
- self.tool_shed = tool_shed
- self.repository_owner = repository_owner
- self.repository_name = repository_name
- self.guid = guid
+ self.tool_shed_repository = tool_shed_repository
+ def get_versions( self, app ):
+ sa_session = app.model.context.current
+ tool_versions = []
+ # Prepend ancestors.
+ def __ancestors( tool_version ):
+ # Should we handle multiple parents at each level?
+ previous_tva = tool_version.previous_version
+ if previous_tva:
+ parent_version = previous_tva[0].parent_version
+ if parent_version not in tool_versions:
+ tool_versions.insert( 0, parent_version )
+ __ancestors( parent_version )
+ # Append descendants.
+ def __descendants( tool_version ):
+ # Should we handle multiple child siblings at each level?
+ next_tva = sa_session.query( app.model.ToolVersionAssociation ) \
+ .filter( app.model.ToolVersionAssociation.table.c.parent_id == tool_version.id ) \
+ .first()
+ if next_tva:
+ current_version = next_tva.tool_version
+ if current_version not in tool_versions:
+ tool_versions.append( current_version )
+ __descendants( current_version )
+ __ancestors( self )
+ __descendants( self )
+ return tool_versions
+ def get_version_ids( self, app ):
+ return [ tool_version.tool_id for tool_version in self.get_versions( app ) ]
+
+class ToolVersionAssociation( object ):
+ def __init__( self, id=None, tool_id=None, parent_id=None ):
+ self.id = id
+ self.tool_id = tool_id
+ self.parent_id = parent_id
## ---- Utility methods -------------------------------------------------------
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -382,16 +382,17 @@
Column( "uninstalled", Boolean, default=False ),
Column( "dist_to_shed", Boolean, default=False ) )
-ToolIdGuidMap.table = Table( "tool_id_guid_map", metadata,
+ToolVersion.table = Table( "tool_version", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "tool_id", String( 255 ) ),
- Column( "tool_version", TEXT ),
- Column( "tool_shed", TrimmedString( 255 ) ),
- Column( "repository_owner", TrimmedString( 255 ) ),
- Column( "repository_name", TrimmedString( 255 ) ),
- Column( "guid", TEXT, index=True, unique=True ) )
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=True ) )
+
+ToolVersionAssociation.table = Table( "tool_version_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "tool_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ),
+ Column( "parent_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ) )
Job.table = Table( "job", metadata,
Column( "id", Integer, primary_key=True ),
@@ -1619,9 +1620,20 @@
ratings=relation( PageRatingAssociation, order_by=PageRatingAssociation.table.c.id, backref="pages" )
) )
-assign_mapper( context, ToolShedRepository, ToolShedRepository.table )
+assign_mapper( context, ToolShedRepository, ToolShedRepository.table,
+ properties=dict( tool_versions=relation( ToolVersion,
+ primaryjoin=( ToolShedRepository.table.c.id == ToolVersion.table.c.tool_shed_repository_id ),
+ backref='tool_shed_repository' ) ) )
-assign_mapper( context, ToolIdGuidMap, ToolIdGuidMap.table )
+assign_mapper( context, ToolVersion, ToolVersion.table )
+
+assign_mapper( context, ToolVersionAssociation, ToolVersionAssociation.table,
+ properties=dict( tool_version=relation( ToolVersion,
+ primaryjoin=( ToolVersionAssociation.table.c.tool_id == ToolVersion.table.c.id ),
+ backref='current_version' ),
+ parent_version=relation( ToolVersion,
+ primaryjoin=( ToolVersionAssociation.table.c.parent_id == ToolVersion.table.c.id ),
+ backref='previous_version' ) ) )
# Set up proxy so that
# Page.users_shared_with
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py
--- /dev/null
+++ b/lib/galaxy/model/migrate/versions/0091_add_tool_version_tables.py
@@ -0,0 +1,119 @@
+"""
+Migration script to create the tool_version and tool_version_association tables and drop the tool_id_guid_map table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+from galaxy.util.json import from_json_string, to_json_string
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+def nextval( table, col='id' ):
+ if migrate_engine.name == 'postgres':
+ return "nextval('%s_%s_seq')" % ( table, col )
+ elif migrate_engine.name == 'mysql' or migrate_engine.name == 'sqlite':
+ return "null"
+ else:
+ raise Exception( 'Unable to convert data for unknown database type: %s' % migrate_engine.name )
+
+def localtimestamp():
+ if migrate_engine.name == 'postgres' or migrate_engine.name == 'mysql':
+ return "LOCALTIMESTAMP"
+ elif migrate_engine.name == 'sqlite':
+ return "current_date || ' ' || current_time"
+ else:
+ raise Exception( 'Unable to convert data for unknown database type: %s' % db )
+
+ToolVersion_table = Table( "tool_version", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_id", String( 255 ) ),
+ Column( "tool_shed_repository_id", Integer, ForeignKey( "tool_shed_repository.id" ), index=True, nullable=True ) )
+
+ToolVersionAssociation_table = Table( "tool_version_association", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "tool_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ),
+ Column( "parent_id", Integer, ForeignKey( "tool_version.id" ), index=True, nullable=False ) )
+
+def upgrade():
+ print __doc__
+
+ ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata, autoload=True )
+
+ metadata.reflect()
+ # Create the tables.
+ try:
+ ToolVersion_table.create()
+ except Exception, e:
+ log.debug( "Creating tool_version table failed: %s" % str( e ) )
+ try:
+ ToolVersionAssociation_table.create()
+ except Exception, e:
+ log.debug( "Creating tool_version_association table failed: %s" % str( e ) )
+ # Populate the tool table with tools included in installed tool shed repositories.
+ cmd = "SELECT id, metadata FROM tool_shed_repository"
+ result = db_session.execute( cmd )
+ count = 0
+ for row in result:
+ if row[1]:
+ tool_shed_repository_id = row[0]
+ repository_metadata = from_json_string( str( row[1] ) )
+ # Create a new row in the tool table for each tool included in repository. We will NOT
+ # handle tool_version_associaions because we do not have the information we need to do so.
+ tools = repository_metadata.get( 'tools', [] )
+ for tool_dict in tools:
+ cmd = "INSERT INTO tool_version VALUES (%s, %s, %s, '%s', %s)" % \
+ ( nextval( 'tool_version' ), localtimestamp(), localtimestamp(), tool_dict[ 'guid' ], tool_shed_repository_id )
+ db_session.execute( cmd )
+ count += 1
+ print "Added %d rows to the new tool_version table." % count
+ # Drop the tool_id_guid_map table since the 2 new tables render it unnecessary.
+ try:
+ ToolIdGuidMap_table.drop()
+ except Exception, e:
+ log.debug( "Dropping tool_id_guid_map table failed: %s" % str( e ) )
+
+def downgrade():
+
+ ToolIdGuidMap_table = Table( "tool_id_guid_map", metadata,
+ Column( "id", Integer, primary_key=True ),
+ Column( "create_time", DateTime, default=now ),
+ Column( "update_time", DateTime, default=now, onupdate=now ),
+ Column( "tool_id", String( 255 ) ),
+ Column( "tool_version", TEXT ),
+ Column( "tool_shed", TrimmedString( 255 ) ),
+ Column( "repository_owner", TrimmedString( 255 ) ),
+ Column( "repository_name", TrimmedString( 255 ) ),
+ Column( "guid", TEXT, index=True, unique=True ) )
+
+ metadata.reflect()
+ try:
+ ToolVersionAssociation_table.drop()
+ except Exception, e:
+ log.debug( "Dropping tool_version_association table failed: %s" % str( e ) )
+ try:
+ ToolVersion_table.drop()
+ except Exception, e:
+ log.debug( "Dropping tool_version table failed: %s" % str( e ) )
+ try:
+ ToolIdGuidMap_table.create()
+ except Exception, e:
+ log.debug( "Creating tool_id_guid_map table failed: %s" % str( e ) )
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -4,7 +4,9 @@
shed. Tools included in tool_shed_install.xml that have already been installed will not be
re-installed.
"""
+import urllib2
from galaxy.tools import ToolSection
+from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
log = logging.getLogger( __name__ )
@@ -53,7 +55,7 @@
changeset_revision = elem.get( 'changeset_revision' )
# Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, changeset_revision )
- if self.__isinstalled( elem, clone_dir ):
+ if self.__isinstalled( clone_dir ):
log.debug( "Skipping automatic install of repository '%s' because it has already been installed in location '%s'" % ( name, clone_dir ) )
else:
if section_name and section_id:
@@ -81,47 +83,59 @@
if returncode == 0:
returncode, tmp_name = update_repository( current_working_dir, relative_install_dir, changeset_revision )
if returncode == 0:
- metadata_dict = load_repository_contents( app=self.app,
- repository_name=name,
- description=description,
- owner=self.repository_owner,
- changeset_revision=changeset_revision,
- tool_path=self.tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- current_working_dir=current_working_dir,
- tmp_name=tmp_name,
- tool_shed=self.tool_shed,
- tool_section=tool_section,
- shed_tool_conf=self.install_tool_config,
- new_install=True,
- dist_to_shed=True )
- # Add a new record to the tool_id_guid_map table for each tool in the repository if one doesn't already exist.
+ tool_shed_repository, metadata_dict = load_repository_contents( app=self.app,
+ repository_name=name,
+ description=description,
+ owner=self.repository_owner,
+ changeset_revision=changeset_revision,
+ tool_path=self.tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tmp_name=tmp_name,
+ tool_shed=self.tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=self.install_tool_config,
+ new_install=True,
+ dist_to_shed=True )
if 'tools' in metadata_dict:
- tools_mapped = 0
- for tool_dict in metadata_dict[ 'tools' ]:
- flush_needed = False
- tool_id = tool_dict[ 'id' ]
- tool_version = tool_dict[ 'version' ]
- guid = tool_dict[ 'guid' ]
- tool_id_guid_map = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
- if tool_id_guid_map:
- if tool_id_guid_map.guid != guid:
- tool_id_guid_map.guid = guid
- flush_needed = True
- else:
- tool_id_guid_map = self.app.model.ToolIdGuidMap( tool_id=tool_id,
- tool_version=tool_version,
- tool_shed=self.tool_shed,
- repository_owner=self.repository_owner,
- repository_name=name,
- guid=guid )
- flush_needed = True
- if flush_needed:
- self.sa_session.add( tool_id_guid_map )
- self.sa_session.flush()
- tools_mapped += 1
- log.debug( "Mapped tool ids to guids for %d tools included in repository '%s'." % ( tools_mapped, name ) )
+ # Get the tool_versions from the tool shed for each tool in the installed change set.
+ url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_url, name, self.repository_owner, changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_versions_dict = from_json_string( text )
+ handle_tool_versions( self.app, tool_versions_dict, tool_shed_repository )
+ else:
+ # Set the tool versions since they seem to be missing for this repository in the tool shed.
+ for tool_dict in metadata_dict[ 'tools' ]:
+ flush_needed = False
+ tool_id = tool_dict[ 'guid' ]
+ old_tool_id = tool_dict[ 'id' ]
+ tool_version = tool_dict[ 'version' ]
+ tool_version_using_old_id = get_tool_version( self.app, old_tool_id )
+ tool_version_using_guid = get_tool_version( self.app, tool_id )
+ if not tool_version_using_old_id:
+ tool_version_using_old_id = self.app.model.ToolVersion( tool_id=old_tool_id,
+ tool_shed_repository=tool_shed_repository )
+ self.sa_session.add( tool_version_using_old_id )
+ self.sa_session.flush()
+ if not tool_version_using_guid:
+ tool_version_using_guid = self.app.model.ToolVersion( tool_id=tool_id,
+ tool_shed_repository=tool_shed_repository )
+ self.sa_session.add( tool_version_using_guid )
+ self.sa_session.flush()
+ # Associate the two versions as parent / child.
+ tool_version_association = get_tool_version_association( self.app,
+ tool_version_using_old_id,
+ tool_version_using_guid )
+ if not tool_version_association:
+ tool_version_association = self.app.model.ToolVersionAssociation( tool_id=tool_version_using_guid.id,
+ parent_id=tool_version_using_old_id.id )
+ self.sa_session.add( tool_version_association )
+ self.sa_session.flush()
else:
tmp_stderr = open( tmp_name, 'rb' )
log.debug( "Error updating repository '%s': %s" % ( name, tmp_stderr.read() ) )
@@ -154,26 +168,11 @@
# The tool shed from which the repository was originally
# installed must no longer be configured in tool_sheds_conf.xml.
return None
- def __isinstalled( self, repository_elem, clone_dir ):
- name = repository_elem.get( 'name' )
- installed = False
- for tool_elem in repository_elem:
- tool_config = tool_elem.get( 'file' )
- tool_id = tool_elem.get( 'id' )
- tool_version = tool_elem.get( 'version' )
- tigm = get_tool_id_guid_map( self.app, tool_id, tool_version, self.tool_shed, self.repository_owner, name )
- if tigm:
- # A record exists in the tool_id_guid_map table, so see if the repository is installed.
- if os.path.exists( clone_dir ):
- installed = True
- break
- if not installed:
- full_path = os.path.abspath( clone_dir )
- # We may have a repository that contains no tools.
- if os.path.exists( full_path ):
- for root, dirs, files in os.walk( full_path ):
- if '.hg' in dirs:
- # Assume that the repository has been installed if we find a .hg directory.
- installed = True
- break
- return installed
+ def __isinstalled( self, clone_dir ):
+ full_path = os.path.abspath( clone_dir )
+ if os.path.exists( full_path ):
+ for root, dirs, files in os.walk( full_path ):
+ if '.hg' in dirs:
+ # Assume that the repository has been installed if we find a .hg directory.
+ return True
+ return False
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -120,36 +120,32 @@
return tool
else:
return tool
- # Handle the case where the tool was used when the tool was included in the Galaxy distribution,
- # but now the tool is contained in an installed tool shed repository. In this case, the original
- # tool id can be mapped to the new tool id, which is the tool's guid in the tool shed repository.
- # This scenarios can occur in workflows and in a history item when the rerun icon is clicked.
- # The weakness here is that workflows currently handle only tool ids and not versions.
- tool_id_guid_map = self.__get_tool_id_guid_map( tool_id, tool_version=tool_version )
- if tool_id_guid_map:
- guid = tool_id_guid_map.guid
- if guid in self.tools_by_id:
- return self.tools_by_id[ guid ]
- # Handle the case where a proprietary tool was initially developed and hosted in a local Galaxy
- # instance, but the developer later uploaded the tool to a Galaxy tool shed, removed the original
- # tool from the local Galaxy instance and installed the tool's repository from the tool shed.
- for k, tool in self.tools_by_id.items():
- if tool_id == tool.old_id:
- if tool_version and tool.version == tool_version:
- return tool
- else:
- return tool
+ # Handle the case where the received tool_id has a tool_version. In this case, one of the following
+ # conditions is true.
+ # 1. The tool was used when it was included in the Galaxy distribution, but now the tool is contained
+ # in an installed tool shed repository. In this case, the original tool id can be mapped to the new
+ # tool id, which is the tool's guid in the tool shed repository. This scenarios can occur in
+ # workflows and in a history item when the rerun icon is clicked. The weakness here is that workflows
+ # currently handle only tool ids and not versions.
+ # 2. A proprietary tool was initially developed and hosted in a local Galaxy instance, but the developer
+ # later uploaded the tool to a Galaxy tool shed, removed the original tool from the local Galaxy
+ # instance and installed the tool's repository from the tool shed.
+ tv = self.__get_tool_version( tool_id )
+ if tv:
+ tool_version_ids = tv.get_version_ids( self.app )
+ for tool_version_id in tool_version_ids:
+ if tool_version_id in self.tools_by_id:
+ tool = self.tools_by_id[ tool_version_id ]
+ if tool_version and tool.version == tool_version:
+ return tool
+ else:
+ return tool
return None
- def __get_tool_id_guid_map( self, tool_id, tool_version=None ):
- if tool_version:
- return self.sa_session.query( self.app.model.ToolIdGuidMap ) \
- .filter( and_( self.app.model.ToolIdGuidMap.table.c.tool_id == tool_id,
- self.app.model.ToolIdGuidMap.table.c.tool_version == tool_version ) ) \
- .first()
- else:
- return self.sa_session.query( self.app.model.ToolIdGuidMap ) \
- .filter( self.app.model.ToolIdGuidMap.table.c.tool_id == tool_id ) \
- .first()
+ def __get_tool_version( self, tool_id ):
+ """Return a ToolVersion if one exists for our tool_id"""
+ return self.sa_session.query( self.app.model.ToolVersion ) \
+ .filter( self.app.model.ToolVersion.table.c.tool_id == tool_id ) \
+ .first()
def __get_tool_shed_repository( self, tool_shed, name, owner, installed_changeset_revision ):
return self.sa_session.query( self.app.model.ToolShedRepository ) \
.filter( and_( self.app.model.ToolShedRepository.table.c.tool_shed == tool_shed,
@@ -160,7 +156,10 @@
def load_tool_tag_set( self, elem, panel_dict, tool_path, guid=None, section=None ):
try:
path = elem.get( "file" )
- if guid is not None:
+ if guid is None:
+ tool_shed_repository = None
+ can_load = True
+ else:
# The tool is contained in an installed tool shed repository, so load
# the tool only if the repository has not been marked deleted.
tool_shed = elem.find( "tool_shed" ).text
@@ -218,8 +217,6 @@
# If there is not yet a tool_shed_repository record, we're in the process of installing
# a new repository, so any included tools can be loaded into the tool panel.
can_load = True
- else:
- can_load = True
if can_load:
tool = self.load_tool( os.path.join( tool_path, path ), guid=guid )
if guid is not None:
@@ -230,6 +227,11 @@
tool.guid = guid
tool.old_id = elem.find( "id" ).text
tool.version = elem.find( "version" ).text
+ # Make sure the tool has a tool_version.
+ if not self.__get_tool_version( tool.id ):
+ tool_version = self.app.model.ToolVersion( tool_id=tool.id, tool_shed_repository=tool_shed_repository )
+ self.sa_session.add( tool_version )
+ self.sa_session.flush()
if self.app.config.get_bool( 'enable_tool_tags', False ):
tag_names = elem.get( "tags", "" ).split( "," )
for tag_name in tag_names:
@@ -542,13 +544,30 @@
# Parse XML element containing configuration
self.parse( root, guid=guid )
self.external_runJob_script = app.config.drmaa_external_runjob_script
-
@property
def sa_session( self ):
- """
- Returns a SQLAlchemy session
- """
+ """Returns a SQLAlchemy session"""
return self.app.model.context
+ @property
+ def tool_version( self ):
+ """Return a ToolVersion if one exists for our id"""
+ return self.sa_session.query( self.app.model.ToolVersion ) \
+ .filter( self.app.model.ToolVersion.table.c.tool_id == self.id ) \
+ .first()
+ @property
+ def tool_versions( self ):
+ # If we have versions, return them.
+ tool_version = self.tool_version
+ if tool_version:
+ return tool_version.get_versions( self.app )
+ return []
+ @property
+ def tool_version_ids( self ):
+ # If we have versions, return a list of their tool_ids.
+ tool_version = self.tool_version
+ if tool_version:
+ return tool_version.get_version_ids( self.app )
+ return []
def parse( self, root, guid=None ):
"""
Read tool configuration from the element `root` and fill in `self`.
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -110,6 +110,7 @@
dist_to_shed=dist_to_shed )
sa_session.add( tool_shed_repository )
sa_session.flush()
+ return tool_shed_repository
def generate_datatypes_metadata( datatypes_config, metadata_dict ):
"""
Update the received metadata_dict with changes that have been applied
@@ -358,15 +359,18 @@
tool = app.toolbox.load_tool( os.path.abspath( relative_path ), guid=guid )
repository_tools_tups.append( ( relative_path, guid, tool ) )
return repository_tools_tups
-def get_tool_id_guid_map( app, tool_id, version, tool_shed, repository_owner, repository_name ):
+def get_tool_version( app, tool_id ):
# This method is used by the InstallManager, which does not have access to trans.
sa_session = app.model.context.current
- return sa_session.query( app.model.ToolIdGuidMap ) \
- .filter( and_( app.model.ToolIdGuidMap.table.c.tool_id == tool_id,
- app.model.ToolIdGuidMap.table.c.tool_version == version,
- app.model.ToolIdGuidMap.table.c.tool_shed == tool_shed,
- app.model.ToolIdGuidMap.table.c.repository_owner == repository_owner,
- app.model.ToolIdGuidMap.table.c.repository_name == repository_name ) ) \
+ return sa_session.query( app.model.ToolVersion ) \
+ .filter( app.model.ToolVersion.table.c.tool_id == tool_id ) \
+ .first()
+def get_tool_version_association( app, parent_tool_version, tool_version ):
+ """Return a ToolVersionAssociation if one exists that associates the two received tool_versions"""
+ sa_session = app.model.context.current
+ return sa_session.query( app.model.ToolVersionAssociation ) \
+ .filter( and_( app.model.ToolVersionAssociation.table.c.parent_id == parent_tool_version.id,
+ app.model.ToolVersionAssociation.table.c.tool_id == tool_version.id ) ) \
.first()
def get_url_from_repository_tool_shed( app, repository ):
"""
@@ -472,6 +476,33 @@
error = tmp_stderr.read()
tmp_stderr.close()
log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) )
+def handle_tool_versions( app, tool_versions, tool_shed_repository ):
+ """
+ This method is used by the InstallManager, which does not have access to trans. Using
+ the tool_versions dictionary retrieved from the tool shed, create the parent / child pairs
+ of tool versions. The tool_versions dictionary contains { tool id : parent tool id } pairs.
+ """
+ sa_session = app.model.context.current
+ for tool_guid, parent_id in tool_versions.items():
+ tool_version_using_tool_guid = get_tool_version( app, tool_guid )
+ tool_version_using_parent_id = get_tool_version( app, parent_id )
+ if not tool_version_using_tool_guid:
+ tool_version_using_tool_guid = app.model.ToolVersion( tool_id=tool_guid, tool_shed_repository=tool_shed_repository )
+ sa_session.add( tool_version_using_tool_guid )
+ sa_session.flush()
+ if not tool_version_using_parent_id:
+ tool_version_using_parent_id = app.model.ToolVersion( tool_id=parent_id, tool_shed_repository=tool_shed_repository )
+ sa_session.add( tool_version_using_parent_id )
+ sa_session.flush()
+ # Associate the two versions as parent / child.
+ tool_version_association = get_tool_version_association( app,
+ tool_version_using_parent_id,
+ tool_version_using_tool_guid )
+ if not tool_version_association:
+ tool_version_association = app.model.ToolVersionAssociation( tool_id=tool_version_using_tool_guid.id,
+ parent_id=tool_version_using_parent_id.id )
+ sa_session.add( tool_version_association )
+ sa_session.flush()
def load_datatype_items( app, repository, relative_install_dir, deactivate=False ):
# Load proprietary datatypes.
metadata = repository.metadata
@@ -638,13 +669,13 @@
# deleted, undelete it. It is imperative that this happens before the call to alter_tool_panel() below because
# tools will not be properly loaded if the repository is marked deleted.
log.debug( "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % repository_name )
- create_or_update_tool_shed_repository( app,
- repository_name,
- description,
- changeset_revision,
- repository_clone_url,
- metadata_dict,
- dist_to_shed=dist_to_shed )
+ tool_shed_repository = create_or_update_tool_shed_repository( app,
+ repository_name,
+ description,
+ changeset_revision,
+ repository_clone_url,
+ metadata_dict,
+ dist_to_shed=dist_to_shed )
if 'tools' in metadata_dict:
repository_tools_tups = get_repository_tools_tups( app, metadata_dict )
if repository_tools_tups:
@@ -696,7 +727,7 @@
if display_path:
# Load proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
- return metadata_dict
+ return tool_shed_repository, metadata_dict
def alter_tool_panel( app, repository_name, repository_clone_url, changeset_revision, repository_tools_tups, tool_section,
shed_tool_conf, tool_path, owner, new_install=True, deactivate=False, uninstall=False ):
"""
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -1,58 +1,11 @@
+import urllib2
from galaxy.web.controllers.admin import *
+from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
from galaxy import tools
log = logging.getLogger( __name__ )
-class ToolIdGuidMapGrid( grids.Grid ):
- class ToolIdColumn( grids.TextColumn ):
- def get_value( self, trans, grid, tool_id_guid_map ):
- return tool_id_guid_map.tool_id
- class ToolVersionColumn( grids.TextColumn ):
- def get_value( self, trans, grid, tool_id_guid_map ):
- return tool_id_guid_map.tool_version
- class ToolGuidColumn( grids.TextColumn ):
- def get_value( self, trans, grid, tool_id_guid_map ):
- return tool_id_guid_map.guid
- class ToolShedColumn( grids.TextColumn ):
- def get_value( self, trans, grid, tool_id_guid_map ):
- return tool_id_guid_map.tool_shed
- class RepositoryNameColumn( grids.TextColumn ):
- def get_value( self, trans, grid, tool_id_guid_map ):
- return tool_id_guid_map.repository_name
- class RepositoryOwnerColumn( grids.TextColumn ):
- def get_value( self, trans, grid, tool_id_guid_map ):
- return tool_id_guid_map.repository_owner
- # Grid definition
- title = "Map tool id to guid"
- model_class = model.ToolIdGuidMap
- template='/admin/tool_shed_repository/grid.mako'
- default_sort_key = "tool_id"
- columns = [
- ToolIdColumn( "Tool id" ),
- ToolVersionColumn( "Version" ),
- ToolGuidColumn( "Guid" ),
- ToolShedColumn( "Tool shed" ),
- RepositoryNameColumn( "Repository name" ),
- RepositoryOwnerColumn( "Repository owner" )
- ]
- columns.append( grids.MulticolFilterColumn( "Search repository name",
- cols_to_filter=[ columns[0], columns[2], columns[4], columns[5] ],
- key="free-text-search",
- visible=False,
- filterable="standard" ) )
- global_actions = [
- grids.GridAction( "Manage installed tool shed repositories", dict( controller='admin_toolshed', action='browse_repositories' ) )
- ]
- operations = []
- standard_filters = []
- default_filter = {}
- num_rows_per_page = 50
- preserve_state = False
- use_paging = True
- def build_initial_query( self, trans, **kwd ):
- return trans.sa_session.query( self.model_class )
-
class RepositoryListGrid( grids.Grid ):
class NameColumn( grids.TextColumn ):
def get_value( self, trans, grid, tool_shed_repository ):
@@ -96,9 +49,7 @@
key="free-text-search",
visible=False,
filterable="standard" ) )
- global_actions = [
- grids.GridAction( "View tool id guid map", dict( controller='admin_toolshed', action='browse_tool_id_guid_map' ) )
- ]
+ global_actions = []
operations = [ grids.GridOperation( "Get updates",
allow_multiple=False,
condition=( lambda item: not item.deleted ),
@@ -122,14 +73,9 @@
class AdminToolshed( AdminGalaxy ):
repository_list_grid = RepositoryListGrid()
- tool_id_guid_map_grid = ToolIdGuidMapGrid()
@web.expose
@web.require_admin
- def browse_tool_id_guid_map( self, trans, **kwd ):
- return self.tool_id_guid_map_grid( trans, **kwd )
- @web.expose
- @web.require_admin
def browse_repository( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -267,21 +213,38 @@
if returncode == 0:
owner = get_repository_owner( clean_repository_clone_url( repository_clone_url ) )
tool_shed = clean_tool_shed_url( tool_shed_url )
- metadata_dict = load_repository_contents( app=trans.app,
- repository_name=name,
- description=description,
- owner=owner,
- changeset_revision=changeset_revision,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- current_working_dir=current_working_dir,
- tmp_name=tmp_name,
- tool_shed=tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- new_install=True,
- dist_to_shed=False )
+ tool_shed_repository, metadata_dict = load_repository_contents( app=trans.app,
+ repository_name=name,
+ description=description,
+ owner=owner,
+ changeset_revision=changeset_revision,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tmp_name=tmp_name,
+ tool_shed=tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf,
+ new_install=True,
+ dist_to_shed=False )
+
+
+ if 'tools' in metadata_dict:
+ # Get the tool_versions from the tool shed for each tool in the installed change set.
+ url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_url, name, owner, changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_versions_dict = from_json_string( text )
+ handle_tool_versions( trans.app, tool_versions_dict, tool_shed_repository )
+ else:
+ message += "Version information for the tools included in the <b>%s</b> repository is missing. " % name
+ message += "Reset all of this repository's metadata in the tool shed, then set the installed tool versions "
+ message += "from the installed repository's <b>Repository Actions</b> menu. "
+ status = 'error'
installed_repository_names.append( name )
else:
tmp_stderr = open( tmp_name, 'rb' )
@@ -293,7 +256,7 @@
message += '%s<br/>' % tmp_stderr.read()
tmp_stderr.close()
status = 'error'
- if installed_repository_names:
+ if installed_repository_names:
installed_repository_names.sort()
num_repositories_installed = len( installed_repository_names )
if tool_section:
@@ -332,6 +295,35 @@
status=status )
@web.expose
@web.require_admin
+ def set_tool_versions( self, trans, **kwd ):
+ # Get the tool_versions from the tool shed for each tool in the installed change set.
+ repository = get_repository( trans, kwd[ 'id' ] )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ url = '%s/repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_url, repository.name, repository.owner, repository.changeset_revision )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ tool_versions_dict = from_json_string( text )
+ handle_tool_versions( trans.app, tool_versions_dict, repository )
+ message = "Tool versions have been set for all included tools."
+ status = 'done'
+ else:
+ message = "Version information for the tools included in the <b>%s</b> repository is missing. " % repository.name
+ message += "Reset all of this reppository's metadata in the tool shed, then set the installed tool versions "
+ message ++ "from the installed repository's <b>Repository Actions</b> menu. "
+ status = 'error'
+ shed_tool_conf, tool_path, relative_install_dir = self.__get_tool_path_and_relative_install_dir( trans, repository )
+ repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
+ return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
+ repository=repository,
+ description=repository.description,
+ repo_files_dir=repo_files_dir,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_admin
def deactivate_or_uninstall_repository( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -392,17 +384,6 @@
log.debug( "Removed repository installation directory: %s" % str( relative_install_dir ) )
except Exception, e:
log.debug( "Error removing repository installation directory %s: %s" % ( str( relative_install_dir ), str( e ) ) )
- # If the repository was installed by the InstallManager, remove
- # all appropriate rows from the tool_id_guid_map database table.
- if repository.dist_to_shed:
- count = 0
- for tool_id_guid_map in trans.sa_session.query( trans.model.ToolIdGuidMap ) \
- .filter( and_( trans.model.ToolIdGuidMap.table.c.tool_shed==repository.tool_shed,
- trans.model.ToolIdGuidMap.table.c.repository_owner==repository.owner,
- trans.model.ToolIdGuidMap.table.c.repository_name==repository.name ) ):
- trans.sa_session.delete( tool_id_guid_map )
- count += 1
- log.debug( "Removed %d rows from the tool_id_guid_map database table." % count )
repository.uninstalled = True
repository.deleted = True
trans.sa_session.add( repository )
@@ -490,21 +471,21 @@
elem.attrib[ 'version' ] = ''
tool_section = tools.ToolSection( elem )
trans.app.toolbox.tool_panel[ section_key ] = tool_section
- metadata_dict = load_repository_contents( app=trans.app,
- repository_name=repository.name,
- description=repository.description,
- owner=repository.owner,
- changeset_revision=repository.installed_changeset_revision,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- current_working_dir=current_working_dir,
- tmp_name=tmp_name,
- tool_shed=repository.tool_shed,
- tool_section=tool_section,
- shed_tool_conf=shed_tool_conf,
- new_install=True,
- dist_to_shed=False )
+ tool_shed_repository, metadata_dict = load_repository_contents( app=trans.app,
+ repository_name=repository.name,
+ description=repository.description,
+ owner=repository.owner,
+ changeset_revision=repository.installed_changeset_revision,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tmp_name=tmp_name,
+ tool_shed=repository.tool_shed,
+ tool_section=tool_section,
+ shed_tool_conf=shed_tool_conf,
+ new_install=True,
+ dist_to_shed=False )
repository.uninstalled = False
repository.deleted = False
trans.sa_session.add( repository )
@@ -518,6 +499,22 @@
metadata = repository.metadata
repository_tools_tups = get_repository_tools_tups( trans.app, metadata )
guids_to_activate = [ repository_tool_tup[1] for repository_tool_tup in repository_tools_tups ]
+ # Undelete the tool_version for each guid.
+ for guid_to_activate in guids_to_activate:
+ tool_version = get_tool_version( trans.app, guid_to_activate )
+ if tool_version:
+ if tool_version.deleted:
+ # This should not happen as we are currently not marking tool versions as deleted
+ # upon deactivation. We may decide to eliminate the tool_version.deleted column
+ # at some point, but we'll keep it for now in case we decide its useful.
+ tool_version.deleted = False
+ trans.sa_session.add( tool_version )
+ trans.sa_session.flush()
+ else:
+ # We're somehow missing a tool_version, so create a new one.
+ tool_version = trans.model.ToolVersion( tool_id=guid_to_activate, tool_shed_repository=repository )
+ trans.sa_session.add( tool_version )
+ trans.sa_session.flush()
tool_panel_section = metadata[ 'tool_panel_section' ]
original_section_id = tool_panel_section[ 'id' ]
if original_section_id in [ '' ]:
@@ -653,8 +650,8 @@
# Send a request to the relevant tool shed to see if there are any updates.
repository = get_repository( trans, kwd[ 'id' ] )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- url = '%s/repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
- ( tool_shed_url, url_for( '', qualified=True ), repository.name, repository.owner, repository.changeset_revision )
+ url = '%/srepository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s&webapp=galaxy' % \
+ ( tool_shed_url, url_for( '/', qualified=True ), repository.name, repository.owner, repository.changeset_revision )
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
@@ -684,20 +681,20 @@
# Update the repository metadata.
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
tool_shed = clean_tool_shed_url( tool_shed_url )
- metadata_dict = load_repository_contents( app=trans.app,
- name=name,
- description=repository.description,
- owner=owner,
- changeset_revision=changeset_revision,
- tool_path=tool_path,
- repository_clone_url=repository_clone_url,
- relative_install_dir=relative_install_dir,
- current_working_dir=current_working_dir,
- tmp_name=tmp_name,
- tool_shed=tool_shed,
- tool_section=None,
- shed_tool_conf=None,
- new_install=False )
+ tool_shed_repository, metadata_dict = load_repository_contents( app=trans.app,
+ name=name,
+ description=repository.description,
+ owner=owner,
+ changeset_revision=changeset_revision,
+ tool_path=tool_path,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ current_working_dir=current_working_dir,
+ tmp_name=tmp_name,
+ tool_shed=tool_shed,
+ tool_section=None,
+ shed_tool_conf=None,
+ new_install=False )
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
repository.update_available = False
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -1220,7 +1220,7 @@
for shed_name, shed_url in trans.app.tool_shed_registry.tool_sheds.items():
if shed_url.endswith( '/' ):
shed_url = shed_url.rstrip( '/' )
- url = '%s/repository/find_tools?galaxy_url=%s&webapp=%s' % ( shed_url, url_for( '', qualified=True ), webapp )
+ url = '%s/repository/find_tools?galaxy_url=%s&webapp=%s' % ( shed_url, url_for( '/', qualified=True ), webapp )
if missing_tool_tups:
url += '&tool_id='
for missing_tool_tup in missing_tool_tups:
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -488,6 +488,7 @@
repo = hg.repository( get_configured_ui(), repo_dir )
if len( repo ) == 1:
message, status = set_repository_metadata( trans, id, repository.tip, **kwd )
+ add_repository_metadata_tool_versions( trans, id, [ repository.tip ] )
else:
# The list of changeset_revisions refers to repository_metadata records that have been
# created or updated. When the following loop completes, we'll delete all repository_metadata
@@ -543,6 +544,7 @@
ancestor_changeset_revision = None
ancestor_metadata_dict = None
clean_repository_metadata( trans, id, changeset_revisions )
+ add_repository_metadata_tool_versions( trans, id, changeset_revisions )
def clean_repository_metadata( trans, id, changeset_revisions ):
# Delete all repository_metadata reecords associated with the repository
# that have a changeset_revision that is not in changeset_revisions.
@@ -551,6 +553,47 @@
if repository_metadata.changeset_revision not in changeset_revisions:
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
+def add_repository_metadata_tool_versions( trans, id, changeset_revisions ):
+ # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' }
+ # pairs for each tool in each changeset revision.
+ for index, changeset_revision in enumerate( changeset_revisions ):
+ tool_versions_dict = {}
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ metadata = repository_metadata.metadata
+ tool_dicts = metadata.get( 'tools', [] )
+ if index == 0:
+ # The first changset_revision is a special case because it will have no ancestor
+ # changeset_revisions in which to match tools. The parent tool id for tools in
+ # the first changeset_revision will be the "old_id" in the tool config.
+ for tool_dict in tool_dicts:
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
+ else:
+ for tool_dict in tool_dicts:
+ # We have at least 2 changeset revisions to compare tool guids and tool ids.
+ parent_id = get_parent_id( trans, id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], changeset_revisions[ 0:index ] )
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
+ if tool_versions_dict:
+ repository_metadata.tool_versions = tool_versions_dict
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ):
+ parent_id = None
+ # Compare from most recent to oldest.
+ changeset_revisions.reverse()
+ for changeset_revision in changeset_revisions:
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ metadata = repository_metadata.metadata
+ tools_dicts = metadata.get( 'tools', [] )
+ for tool_dict in tools_dicts:
+ if tool_dict[ 'guid' ] == guid:
+ # The tool has not changed between the compared changeset revisions.
+ continue
+ if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version:
+ # The tool version is different, so we've found the parent.
+ return tool_dict[ 'guid' ]
+ if parent_id is None:
+ # The tool did not change through all of the changeset revisions.
+ return old_id
def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -438,7 +438,7 @@
galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
encoded_repo_info_dict, includes_tools = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) )
url = '%sadmin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s&includes_tools=%s' % \
- ( galaxy_url, url_for( '', qualified=True ), webapp, encoded_repo_info_dict, str( includes_tools ) )
+ ( galaxy_url, url_for( '/', qualified=True ), webapp, encoded_repo_info_dict, str( includes_tools ) )
return trans.response.send_redirect( url )
else:
# This can only occur when there is a multi-select grid with check boxes and an operation,
@@ -514,7 +514,7 @@
galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
encoded_repo_info_dict, includes_tools = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) )
url = '%sadmin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s&includes_tools=%s' % \
- ( galaxy_url, url_for( '', qualified=True ), webapp, encoded_repo_info_dict, str( includes_tools ) )
+ ( galaxy_url, url_for( '/', qualified=True ), webapp, encoded_repo_info_dict, str( includes_tools ) )
return trans.response.send_redirect( url )
else:
# This can only occur when there is a multi-select grid with check boxes and an operation,
@@ -770,9 +770,19 @@
encoded_repo_info_dict = encode( repo_info_dict )
# Redirect back to local Galaxy to perform install.
url = '%sadmin_toolshed/install_repository?tool_shed_url=%s&repo_info_dict=%s&includes_tools=%s' % \
- ( galaxy_url, url_for( '', qualified=True ), encoded_repo_info_dict, str( includes_tools ) )
+ ( galaxy_url, url_for( '/', qualified=True ), encoded_repo_info_dict, str( includes_tools ) )
return trans.response.send_redirect( url )
@web.expose
+ def get_tool_versions( self, trans, **kwd ):
+ name = kwd[ 'name' ]
+ owner = kwd[ 'owner' ]
+ changeset_revision = kwd[ 'changeset_revision' ]
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
+ if repository_metadata.tool_versions:
+ return to_json_string( repository_metadata.tool_versions )
+ return ''
+ @web.expose
def check_for_updates( self, trans, **kwd ):
# Handle a request from a local Galaxy instance. If the request originated with the
# Galaxy instances' UpdateManager, the value of 'webapp' will be 'update_manager'.
@@ -792,7 +802,7 @@
no_update = 'false'
else:
# Start building up the url to redirect back to the calling Galaxy instance.
- url = '%sadmin_toolshed/update_to_changeset_revision?tool_shed_url=%s' % ( galaxy_url, url_for( '', qualified=True ) )
+ url = '%sadmin_toolshed/update_to_changeset_revision?tool_shed_url=%s' % ( galaxy_url, url_for( '/', qualified=True ) )
url += '&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
( repository.name, repository.user.username, changeset_revision )
if changeset_revision == repository.tip:
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/webapps/community/controllers/workflow.py
--- a/lib/galaxy/webapps/community/controllers/workflow.py
+++ b/lib/galaxy/webapps/community/controllers/workflow.py
@@ -402,8 +402,8 @@
to_file.write( to_json_string( workflow_data ) )
return open( tmp_fname )
galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
- url = '%s/workflow/import_workflow?tool_shed_url=%s&repository_metadata_id=%s&workflow_name=%s&webapp=%s' % \
- ( galaxy_url, url_for( '', qualified=True ), repository_metadata_id, encode( workflow_name ), webapp )
+ url = '%sworkflow/import_workflow?tool_shed_url=%s&repository_metadata_id=%s&workflow_name=%s&webapp=%s' % \
+ ( galaxy_url, url_for( '/', qualified=True ), repository_metadata_id, encode( workflow_name ), webapp )
return trans.response.send_redirect( url )
return trans.response.send_redirect( web.url_for( controller='workflow',
action='view_workflow',
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -162,10 +162,11 @@
fp.close()
class RepositoryMetadata( object ):
- def __init__( self, repository_id=None, changeset_revision=None, metadata=None, malicious=False ):
+ def __init__( self, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False ):
self.repository_id = repository_id
self.changeset_revision = changeset_revision
self.metadata = metadata or dict()
+ self.tool_versions = tool_versions or dict()
self.malicious = malicious
class ItemRatingAssociation( object ):
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/webapps/community/model/mapping.py
--- a/lib/galaxy/webapps/community/model/mapping.py
+++ b/lib/galaxy/webapps/community/model/mapping.py
@@ -119,6 +119,7 @@
Column( "repository_id", Integer, ForeignKey( "repository.id" ), index=True ),
Column( "changeset_revision", TrimmedString( 255 ), index=True ),
Column( "metadata", JSONType, nullable=True ),
+ Column( "tool_versions", JSONType, nullable=True ),
Column( "malicious", Boolean, default=False ) )
RepositoryRatingAssociation.table = Table( "repository_rating_association", metadata,
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/webapps/community/model/migrate/versions/0011_add_tool_versions_column.py
--- /dev/null
+++ b/lib/galaxy/webapps/community/model/migrate/versions/0011_add_tool_versions_column.py
@@ -0,0 +1,46 @@
+"""
+Migration script to add the new_repo_alert column to the galaxy_user table.
+"""
+
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, autocommit=True ) )
+
+def upgrade():
+ print __doc__
+ metadata.reflect()
+ RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+ c = Column( "tool_versions", JSONType, nullable=True )
+ try:
+ # Create
+ c.create( RepositoryMetadata_table )
+ assert c is RepositoryMetadata_table.c.tool_versions
+ except Exception, e:
+ print "Adding tool_versions column to the repository_metadata table failed: %s" % str( e )
+
+def downgrade():
+ metadata.reflect()
+ # Drop new_repo_alert column from galaxy_user table.
+ RepositoryMetadata_table = Table( "repository_metadata", metadata, autoload=True )
+ try:
+ RepositoryMetadata_table.c.tool_versions.drop()
+ except Exception, e:
+ print "Dropping column tool_versions from the repository_metadata table failed: %s" % str( e )
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -194,17 +194,19 @@
@classmethod
def from_workflow_step( Class, trans, step ):
tool_id = step.tool_id
- install_tool_id = None
if trans.app.toolbox and tool_id not in trans.app.toolbox.tools_by_id:
- # Handle the case where the tool was used when the tool was included in the Galaxy distribution,
- # but now the tool is contained in an installed tool shed repository. In this case, the original
- # tool id can be mapped to the new tool id, which is the tool's guid in the tool shed repository.
- tool_id_guid_map = trans.sa_session.query( trans.model.ToolIdGuidMap ) \
- .filter( trans.model.ToolIdGuidMap.table.c.tool_id == tool_id ) \
- .first()
- if tool_id_guid_map:
- install_tool_id = tool_id_guid_map.guid
- if ( trans.app.toolbox and tool_id in trans.app.toolbox.tools_by_id ) or install_tool_id:
+ # See if we have access to a different version of the tool.
+ # TODO: If workflows are ever enhanced to use tool version
+ # in addition to tool id, enhance the selection process here
+ # to retrieve the correct version of the tool.
+ tool_version = self.__get_tool_version( trans, tool_id )
+ if tool_version:
+ tool_version_ids = tool_version.get_version_ids( trans.app )
+ for tool_version_id in tool_version_ids:
+ if tool_version_id in trans.app.toolbox.tools_by_id:
+ tool_id = tool_version_id
+ break
+ if ( trans.app.toolbox and tool_id in trans.app.toolbox.tools_by_id ):
module = Class( trans, tool_id )
module.state = DefaultToolState()
module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True )
@@ -217,6 +219,11 @@
module.post_job_actions = pjadict
return module
return None
+ def __get_tool_version( self, trans, tool_id ):
+ # Return a ToolVersion if one exists for tool_id.
+ return trans.sa_session.query( trans.app.model.ToolVersion ) \
+ .filter( trans.app.model.ToolVersion.table.c.tool_id == tool_id ) \
+ .first()
def save_to_step( self, step ):
step.type = self.type
step.tool_id = self.tool_id
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
--- a/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
+++ b/templates/admin/tool_shed_repository/deactivate_or_uninstall_repository.mako
@@ -84,11 +84,6 @@
<div class="toolParamHelp" style="clear: both;">
* The repository record's uninstalled column in the tool_shed_repository database table will be set to True.
</div>
- %if repository.dist_to_shed:
- <div class="toolParamHelp" style="clear: both;">
- * All records associated with this repository will be eliminated from the tool_id_guid_map database table.
- </div>
- %endif
<div style="clear: both"></div></div><div class="form-row">
diff -r f87dff286e86368fb33032ac567a5c9368dc990b -r e6464387ed3fce7f235cf5c9af1281da14a32811 templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -8,6 +8,9 @@
<div popupmenu="repository-${repository.id}-popup"><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='browse_repository', id=trans.security.encode_id( repository.id ) )}">Browse repository</a><a class="action-button" href="${h.url_for( controller='admin_toolshed', action='check_for_updates', id=trans.security.encode_id( repository.id ) )}">Get updates</a>
+ %if repository.includes_tools:
+ <a class="action-button" href="${h.url_for( controller='admin_toolshed', action='set_tool_versions', id=trans.security.encode_id( repository.id ) )}">Set tool versions</a>
+ %endif
<a class="action-button" href="${h.url_for( controller='admin_toolshed', action='deactivate_or_uninstall_repository', id=trans.security.encode_id( repository.id ) )}">Deactivate or Uninstall</a></div></ul>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Use cluster_files_directory for drmaa job template.
by Bitbucket 03 Feb '12
by Bitbucket 03 Feb '12
03 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f87dff286e86/
changeset: f87dff286e86
user: natefoo
date: 2012-02-03 18:22:16
summary: Use cluster_files_directory for drmaa job template.
affected #: 1 file
diff -r 0b7ae34da7fbc3a66f21eac243af22965c73e7cc -r f87dff286e86368fb33032ac567a5c9368dc990b lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -169,7 +169,7 @@
ofile = "%s.drmout" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
efile = "%s.drmerr" % os.path.join(os.getcwd(), job_wrapper.working_directory, job_wrapper.get_id_tag())
jt = self.ds.createJobTemplate()
- jt.remoteCommand = "%s/database/pbs/galaxy_%s.sh" % (os.getcwd(), job_wrapper.get_id_tag())
+ jt.remoteCommand = "%s/galaxy_%s.sh" % (self.app.config.cluster_files_directory, job_wrapper.get_id_tag())
jt.outputPath = ":%s" % ofile
jt.errorPath = ":%s" % efile
native_spec = self.get_native_spec( runner_url )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1327f2219423/
changeset: 1327f2219423
branch: meme_requirement_on_fimo
user: dan
date: 2012-02-03 17:15:29
summary: close branch
affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
03 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8ba74c17e809/
changeset: 8ba74c17e809
branch: meme_requirement_on_fimo
user: jmchilton
date: 2012-02-03 04:29:30
summary: Specify meme requirement for fimo tool.
affected #: 1 file
diff -r 87d5491cc3fef55931c09eb6bd688d1b10e946f5 -r 8ba74c17e8091e448ea448c563d883bb35102283 tools/meme/fimo.xml
--- a/tools/meme/fimo.xml
+++ b/tools/meme/fimo.xml
@@ -1,4 +1,5 @@
<tool id="meme_fimo" name="FIMO" version="0.0.1">
+ <requirements><requirement type="package">meme</requirement></requirements><description>- Find Individual Motif Occurrences</description><command interpreter="python">fimo_wrapper.py 'fimo --o "${$html_outfile.files_path}" --verbosity "1"
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Slight cleanup of the previous workflow share removal commit.
by Bitbucket 02 Feb '12
by Bitbucket 02 Feb '12
02 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/87d5491cc3fe/
changeset: 87d5491cc3fe
user: dannon
date: 2012-02-02 21:16:29
summary: Slight cleanup of the previous workflow share removal commit.
affected #: 1 file
diff -r 0a001dea361385d107989740582f855aa2b71dc4 -r 87d5491cc3fef55931c09eb6bd688d1b10e946f5 lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -290,9 +290,7 @@
@web.require_login( "use Galaxy workflows" )
def sharing( self, trans, id, **kwargs ):
""" Handle workflow sharing. """
-
session = trans.sa_session
-
if 'unshare_me' in kwargs:
# Remove self from shared associations with workflow.
stored = self.get_stored_workflow(trans, id, False, True)
@@ -332,9 +330,8 @@
if stored.importable and not stored.slug:
self._make_item_accessible( trans.sa_session, stored )
- session.flush()
-
- return trans.fill_template( "/workflow/sharing.mako", use_panels=True, item=stored )
+ session.flush()
+ return trans.fill_template( "/workflow/sharing.mako", use_panels=True, item=stored )
@web.expose
@web.require_login( "to import a workflow", use_panels=True )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3b2acb89495b/
changeset: 3b2acb89495b
user: dannon
date: 2012-02-02 20:53:34
summary: Workflows: Add the ability to remove workflows that have been shared with you.
affected #: 2 files
diff -r 50bf77aa34c3689a122187d7ba9b55deb348df92 -r 3b2acb89495bee2355f20fc3954baa174d486a3c lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -22,11 +22,11 @@
from galaxy.web.framework.helpers import to_unicode
from galaxy.jobs.actions.post import ActionBox
-class StoredWorkflowListGrid( grids.Grid ):
+class StoredWorkflowListGrid( grids.Grid ):
class StepsColumn( grids.GridColumn ):
def get_value(self, trans, grid, workflow):
return len( workflow.latest_workflow.steps )
-
+
# Grid definition
use_panels = True
title = "Saved Workflows"
@@ -40,10 +40,10 @@
grids.GridColumn( "Created", key="create_time", format=time_ago ),
grids.GridColumn( "Last Updated", key="update_time", format=time_ago ),
]
- columns.append(
- grids.MulticolFilterColumn(
- "Search",
- cols_to_filter=[ columns[0], columns[1] ],
+ columns.append(
+ grids.MulticolFilterColumn(
+ "Search",
+ cols_to_filter=[ columns[0], columns[1] ],
key="free-text-search", visible=False, filterable="standard" )
)
operations = [
@@ -67,14 +67,14 @@
grids.PublicURLColumn( "Name", key="name", filterable="advanced" ),
grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.StoredWorkflowAnnotationAssociation, filterable="advanced" ),
grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
- grids.CommunityRatingColumn( "Community Rating", key="rating" ),
+ grids.CommunityRatingColumn( "Community Rating", key="rating" ),
grids.CommunityTagsColumn( "Community Tags", key="tags", model_tag_association_class=model.StoredWorkflowTagAssociation, filterable="advanced", grid_name="PublicWorkflowListGrid" ),
grids.ReverseSortColumn( "Last Updated", key="update_time", format=time_ago )
]
- columns.append(
- grids.MulticolFilterColumn(
- "Search name, annotation, owner, and tags",
- cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
+ columns.append(
+ grids.MulticolFilterColumn(
+ "Search name, annotation, owner, and tags",
+ cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
key="free-text-search", visible=False, filterable="standard" )
)
operations = []
@@ -84,20 +84,20 @@
def apply_query_filter( self, trans, query, **kwargs ):
# A public workflow is published, has a slug, and is not deleted.
return query.filter( self.model_class.published==True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False )
-
+
# Simple SGML parser to get all content in a single tag.
class SingleTagContentsParser( sgmllib.SGMLParser ):
-
+
def __init__( self, target_tag ):
sgmllib.SGMLParser.__init__( self )
self.target_tag = target_tag
self.cur_tag = None
self.tag_content = ""
-
+
def unknown_starttag( self, tag, attrs ):
""" Called for each start tag. """
self.cur_tag = tag
-
+
def handle_data( self, text ):
""" Called for each block of plain text. """
if self.cur_tag == self.target_tag:
@@ -106,13 +106,13 @@
class WorkflowController( BaseUIController, Sharable, UsesStoredWorkflow, UsesAnnotations, UsesItemRatings ):
stored_list_grid = StoredWorkflowListGrid()
published_list_grid = StoredWorkflowAllPublishedGrid()
-
+
__myexp_url = "www.myexperiment.org:80"
-
+
@web.expose
def index( self, trans ):
return self.list( trans )
-
+
@web.expose
@web.require_login( "use Galaxy workflows" )
def list_grid( self, trans, **kwargs ):
@@ -145,7 +145,7 @@
.filter( model.StoredWorkflow.deleted == False ) \
.order_by( desc( model.StoredWorkflow.update_time ) ) \
.all()
-
+
# Legacy issue: all shared workflows must have slugs.
slug_set = False
for workflow_assoc in shared_by_others:
@@ -157,7 +157,7 @@
return trans.fill_template( "workflow/list.mako",
workflows = workflows,
shared_by_others = shared_by_others )
-
+
@web.expose
@web.require_login( "use Galaxy workflows" )
def list_for_run( self, trans ):
@@ -179,7 +179,7 @@
return trans.fill_template( "workflow/list_for_run.mako",
workflows = workflows,
shared_by_others = shared_by_others )
-
+
@web.expose
def list_published( self, trans, **kwargs ):
grid = self.published_list_grid( trans, **kwargs )
@@ -235,11 +235,11 @@
@web.expose
def get_item_content_async( self, trans, id ):
""" Returns item content in HTML format. """
-
+
stored = self.get_stored_workflow( trans, id, False, True )
if stored is None:
raise web.httpexceptions.HTTPNotFound()
-
+
# Get data for workflow's steps.
self.get_stored_workflow_steps( trans, stored )
# Get annotations.
@@ -247,7 +247,7 @@
for step in stored.latest_workflow.steps:
step.annotation = self.get_item_annotation_str( trans.sa_session, stored.user, step )
return trans.stream_template_mako( "/workflow/item_content.mako", item = stored, item_data = stored.latest_workflow.steps )
-
+
@web.expose
@web.require_login( "use Galaxy workflows" )
def share( self, trans, id, email="", use_panels=False ):
@@ -285,45 +285,55 @@
item=stored,
email=email,
use_panels=use_panels )
-
+
@web.expose
@web.require_login( "use Galaxy workflows" )
def sharing( self, trans, id, **kwargs ):
""" Handle workflow sharing. """
-
- # Get session and workflow.
+
session = trans.sa_session
- stored = self.get_stored_workflow( trans, id )
- session.add( stored )
-
- # Do operation on workflow.
- if 'make_accessible_via_link' in kwargs:
- self._make_item_accessible( trans.sa_session, stored )
- elif 'make_accessible_and_publish' in kwargs:
- self._make_item_accessible( trans.sa_session, stored )
- stored.published = True
- elif 'publish' in kwargs:
- stored.published = True
- elif 'disable_link_access' in kwargs:
- stored.importable = False
- elif 'unpublish' in kwargs:
- stored.published = False
- elif 'disable_link_access_and_unpublish' in kwargs:
- stored.importable = stored.published = False
- elif 'unshare_user' in kwargs:
- user = session.query( model.User ).get( trans.security.decode_id( kwargs['unshare_user' ] ) )
- if not user:
- error( "User not found for provided id" )
+
+ if 'unshare_me' in kwargs:
+ # Remove self from shared associations with workflow.
+ stored = self.get_stored_workflow(trans, id, False, True)
association = session.query( model.StoredWorkflowUserShareAssociation ) \
- .filter_by( user=user, stored_workflow=stored ).one()
+ .filter_by( user=trans.user, stored_workflow=stored ).one()
session.delete( association )
-
- # Legacy issue: workflows made accessible before recent updates may not have a slug. Create slug for any workflows that need them.
- if stored.importable and not stored.slug:
- self._make_item_accessible( trans.sa_session, stored )
-
+ session.flush()
+ return self.list( trans )
+ else:
+ # Get session and workflow.
+ stored = self.get_stored_workflow( trans, id )
+ session.add( stored )
+
+ # Do operation on workflow.
+ if 'make_accessible_via_link' in kwargs:
+ self._make_item_accessible( trans.sa_session, stored )
+ elif 'make_accessible_and_publish' in kwargs:
+ self._make_item_accessible( trans.sa_session, stored )
+ stored.published = True
+ elif 'publish' in kwargs:
+ stored.published = True
+ elif 'disable_link_access' in kwargs:
+ stored.importable = False
+ elif 'unpublish' in kwargs:
+ stored.published = False
+ elif 'disable_link_access_and_unpublish' in kwargs:
+ stored.importable = stored.published = False
+ elif 'unshare_user' in kwargs:
+ user = session.query( model.User ).get( trans.security.decode_id( kwargs['unshare_user' ] ) )
+ if not user:
+ error( "User not found for provided id" )
+ association = session.query( model.StoredWorkflowUserShareAssociation ) \
+ .filter_by( user=user, stored_workflow=stored ).one()
+ session.delete( association )
+
+ # Legacy issue: workflows made accessible before recent updates may not have a slug. Create slug for any workflows that need them.
+ if stored.importable and not stored.slug:
+ self._make_item_accessible( trans.sa_session, stored )
+
session.flush()
-
+
return trans.fill_template( "/workflow/sharing.mako", use_panels=True, item=stored )
@web.expose
@@ -335,7 +345,7 @@
referer_message = "<a href='%s'>return to the previous page</a>" % referer
else:
referer_message = "<a href='%s'>go to Galaxy's start page</a>" % url_for( '/' )
-
+
# Do import.
session = trans.sa_session
stored = self.get_stored_workflow( trans, id, check_ownership=False )
@@ -353,19 +363,19 @@
session = trans.sa_session
session.add( imported_stored )
session.flush()
-
+
# Copy annotations.
self.copy_item_annotation( session, stored.user, stored, imported_stored.user, imported_stored )
for order_index, step in enumerate( stored.latest_workflow.steps ):
self.copy_item_annotation( session, stored.user, step, \
imported_stored.user, imported_stored.latest_workflow.steps[order_index] )
session.flush()
-
+
# Redirect to load galaxy frames.
return trans.show_ok_message(
- message="""Workflow "%s" has been imported. <br>You can <a href="%s">start using this workflow</a> or %s."""
+ message="""Workflow "%s" has been imported. <br>You can <a href="%s">start using this workflow</a> or %s."""
% ( stored.name, web.url_for( controller='workflow' ), referer_message ), use_panels=True )
-
+
@web.expose
@web.require_login( "use Galaxy workflows" )
def edit_attributes( self, trans, id, **kwargs ):
@@ -382,11 +392,11 @@
annotation = sanitize_html( kwargs[ 'annotation' ], 'utf-8', 'text/html' )
self.add_item_annotation( trans.sa_session, trans.get_user(), stored, annotation )
trans.sa_session.flush()
- return trans.fill_template( 'workflow/edit_attributes.mako',
- stored=stored,
- annotation=self.get_item_annotation_str( trans.sa_session, trans.user, stored )
+ return trans.fill_template( 'workflow/edit_attributes.mako',
+ stored=stored,
+ annotation=self.get_item_annotation_str( trans.sa_session, trans.user, stored )
)
-
+
@web.expose
@web.require_login( "use Galaxy workflows" )
def rename( self, trans, id, new_name=None, **kwargs ):
@@ -403,10 +413,10 @@
#message = "Workflow renamed to '%s'." % new_name
#return self.list_grid( trans, message=message, status='done' )
else:
- return form( url_for( action='rename', id=trans.security.encode_id(stored.id) ),
+ return form( url_for( action='rename', id=trans.security.encode_id(stored.id) ),
"Rename workflow", submit_text="Rename", use_panels=True ) \
.add_text( "new_name", "Workflow Name", value=to_unicode( stored.name ) )
-
+
@web.expose
@web.require_login( "use Galaxy workflows" )
def rename_async( self, trans, id, new_name=None, **kwargs ):
@@ -417,7 +427,7 @@
stored.latest_workflow.name = san_new_name
trans.sa_session.flush()
return stored.name
-
+
@web.expose
@web.require_login( "use Galaxy workflows" )
def annotate_async( self, trans, id, new_annotation=None, **kwargs ):
@@ -428,7 +438,7 @@
self.add_item_annotation( trans.sa_session, trans.get_user(), stored, new_annotation )
trans.sa_session.flush()
return new_annotation
-
+
@web.expose
@web.require_login( "rate items" )
@web.json
@@ -443,7 +453,7 @@
stored_rating = self.rate_item( trans.sa_session, trans.get_user(), stored, rating )
return self.get_ave_item_rating_data( trans.sa_session, stored )
-
+
@web.expose
@web.require_login( "use Galaxy workflows" )
def set_accessible_async( self, trans, id=None, accessible=False ):
@@ -459,7 +469,7 @@
stored.importable = importable
trans.sa_session.flush()
return
-
+
@web.expose
@web.require_login( "modify Galaxy items" )
def set_slug_async( self, trans, id, new_slug ):
@@ -468,7 +478,7 @@
stored.slug = new_slug
trans.sa_session.flush()
return stored.slug
-
+
@web.expose
def get_embed_html_async( self, trans, id ):
""" Returns HTML for embedding a workflow in a page. """
@@ -477,7 +487,7 @@
stored = self.get_stored_workflow( trans, id )
if stored:
return "Embedded Workflow '%s'" % stored.name
-
+
@web.expose
@web.json
@web.require_login( "use Galaxy workflows" )
@@ -489,33 +499,33 @@
trans.sa_session.flush()
return_dict = { "name" : stored.name, "link" : url_for( action="display_by_username_and_slug", username=stored.user.username, slug=stored.slug ) }
return return_dict
-
+
@web.expose
@web.require_login( "use Galaxy workflows" )
def gen_image( self, trans, id ):
stored = self.get_stored_workflow( trans, id, check_ownership=True )
session = trans.sa_session
-
+
workflow = stored.latest_workflow
data = []
-
+
canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
text = svgfig.SVG("g")
connectors = svgfig.SVG("g")
boxes = svgfig.SVG("g")
svgfig.Text.defaults["font-size"] = "10px"
-
+
in_pos = {}
out_pos = {}
margin = 5
line_px = 16 # how much spacing between input/outputs
widths = {} # store px width for boxes of each step
max_width, max_x, max_y = 0, 0, 0
-
+
for step in workflow.steps:
# Load from database representation
module = module_factory.from_workflow_step( trans, step )
-
+
# Pack attributes into plain dictionary
step_dict = {
'id': step.order_index,
@@ -523,21 +533,21 @@
'data_outputs': module.get_data_outputs(),
'position': step.position
}
-
+
input_conn_dict = {}
for conn in step.input_connections:
input_conn_dict[ conn.input_name ] = \
dict( id=conn.output_step.order_index, output_name=conn.output_name )
step_dict['input_connections'] = input_conn_dict
-
+
data.append(step_dict)
-
+
x, y = step.position['left'], step.position['top']
count = 0
-
+
max_len = len(module.get_name()) * 1.5
text.append( svgfig.Text(x, y + 20, module.get_name(), **{"font-size": "14px"} ).SVG() )
-
+
y += 45
for di in module.get_data_inputs():
cur_y = y+count*line_px
@@ -547,11 +557,11 @@
text.append( svgfig.Text(x, cur_y, di['label']).SVG() )
count += 1
max_len = max(max_len, len(di['label']))
-
-
+
+
if len(module.get_data_inputs()) > 0:
y += 15
-
+
for do in module.get_data_outputs():
cur_y = y+count*line_px
if step.order_index not in out_pos:
@@ -560,12 +570,12 @@
text.append( svgfig.Text(x, cur_y, do['name']).SVG() )
count += 1
max_len = max(max_len, len(do['name']))
-
+
widths[step.order_index] = max_len*5.5
max_x = max(max_x, step.position['left'])
max_y = max(max_y, step.position['top'])
max_width = max(max_width, widths[step.order_index])
-
+
for step_dict in data:
width = widths[step_dict['id']]
x, y = step_dict['position']['left'], step_dict['position']['top']
@@ -576,18 +586,18 @@
if len(step_dict['data_inputs']) > 0:
box_height += 15
sep_y = y + len(step_dict['data_inputs']) * line_px + 40
- text.append( svgfig.Line(x-margin, sep_y, x+width-margin, sep_y).SVG() ) #
-
+ text.append( svgfig.Line(x-margin, sep_y, x+width-margin, sep_y).SVG() ) #
+
# input/output box
boxes.append( svgfig.Rect(x-margin, y+30, x+width-margin, y+30+box_height, fill="#ffffff").SVG() )
-
+
for conn, output_dict in step_dict['input_connections'].iteritems():
in_coords = in_pos[step_dict['id']][conn]
out_conn_pos = out_pos[output_dict['id']][output_dict['output_name']]
adjusted = (out_conn_pos[0] + widths[output_dict['id']], out_conn_pos[1])
text.append( svgfig.SVG("circle", cx=out_conn_pos[0]+widths[output_dict['id']]-margin, cy=out_conn_pos[1]-margin, r=5, fill="#ffffff" ) )
connectors.append( svgfig.Line(adjusted[0], adjusted[1]-margin, in_coords[0]-10, in_coords[1], arrow_end="true" ).SVG() )
-
+
canvas.append(connectors)
canvas.append(boxes)
canvas.append(text)
@@ -597,8 +607,8 @@
canvas['viewBox'] = "0 0 %s %s" % (width, height)
trans.response.set_content_type("image/svg+xml")
return canvas.standalone_xml()
-
-
+
+
@web.expose
@web.require_login( "use Galaxy workflows" )
def clone( self, trans, id ):
@@ -612,7 +622,7 @@
.filter_by( user=user, stored_workflow=stored ).count() == 0:
error( "Workflow is not owned by or shared with current user" )
owner = False
-
+
# Clone.
new_stored = model.StoredWorkflow()
new_stored.name = "Clone of '%s'" % stored.name
@@ -629,7 +639,7 @@
new_swta.user_tname = swta.user_tname
new_swta.user_value = swta.user_value
new_swta.value = swta.value
- new_stored.tags.append( new_swta )
+ new_stored.tags.append( new_swta )
if not owner:
new_stored.name += " shared by '%s'" % stored.user.email
new_stored.user = user
@@ -640,7 +650,7 @@
# Display the management page
trans.set_message( 'Clone created with name "%s"' % new_stored.name )
return self.list( trans )
-
+
@web.expose
@web.require_login( "create workflows" )
def create( self, trans, workflow_name=None, workflow_annotation="" ):
@@ -672,12 +682,12 @@
return form( url_for(), "Create New Workflow", submit_text="Create", use_panels=True ) \
.add_text( "workflow_name", "Workflow Name", value="Unnamed workflow" ) \
.add_text( "workflow_annotation", "Workflow Annotation", value="", help="A description of the workflow; annotation is shown alongside shared or published workflows." )
-
+
@web.expose
def delete( self, trans, id=None ):
"""
Mark a workflow as deleted
- """
+ """
# Load workflow from database
stored = self.get_stored_workflow( trans, id )
# Marke as deleted and save
@@ -687,7 +697,7 @@
# Display the management page
trans.set_message( "Workflow '%s' deleted" % stored.name )
return self.list( trans )
-
+
@web.expose
@web.require_login( "edit workflows" )
def editor( self, trans, id=None ):
@@ -700,7 +710,7 @@
error( "Invalid workflow id" )
stored = self.get_stored_workflow( trans, id )
return trans.fill_template( "workflow/editor.mako", stored=stored, annotation=self.get_item_annotation_str( trans.sa_session, trans.user, stored ) )
-
+
@web.json
def editor_form_post( self, trans, type='tool', tool_id=None, annotation=None, **incoming ):
"""
@@ -709,7 +719,7 @@
This is used for the form shown in the right pane when a node
is selected.
"""
-
+
trans.workflow_building_mode = True
module = module_factory.from_dict( trans, {
'type': type,
@@ -717,7 +727,7 @@
'tool_state': incoming.pop("tool_state")
} )
module.update_state( incoming )
-
+
if type=='tool':
return {
'tool_state': module.get_state(),
@@ -737,7 +747,7 @@
'form_html': module.get_config_form(),
'annotation': annotation
}
-
+
@web.json
def get_new_module_info( self, trans, type, **kwargs ):
"""
@@ -855,7 +865,7 @@
# post_job_actions
pja_dict = {}
for pja in step.post_job_actions:
- pja_dict[pja.action_type+pja.output_name] = dict(action_type = pja.action_type,
+ pja_dict[pja.action_type+pja.output_name] = dict(action_type = pja.action_type,
output_name = pja.output_name,
action_arguments = pja.action_arguments)
step_dict['post_job_actions'] = pja_dict
@@ -961,7 +971,7 @@
rval = dict( message="Workflow saved" )
rval['name'] = workflow.name
return rval
-
+
@web.expose
@web.require_login( "use workflows" )
def export( self, trans, id=None, **kwd ):
@@ -1014,27 +1024,27 @@
"""
Exports a workflow to myExperiment website.
"""
-
- # Load encoded workflow from database
+
+ # Load encoded workflow from database
user = trans.get_user()
id = trans.security.decode_id( id )
trans.workflow_building_mode = True
stored = trans.sa_session.query( model.StoredWorkflow ).get( id )
self.security_check( trans, stored, False, True )
-
+
# Convert workflow to dict.
workflow_dict = self._workflow_to_dict( trans, stored )
-
+
#
# Create and submit workflow myExperiment request.
#
-
+
# Create workflow content XML.
workflow_dict_packed = simplejson.dumps( workflow_dict, indent=4, sort_keys=True )
workflow_content = trans.fill_template( "workflow/myexp_export_content.mako", \
workflow_dict_packed=workflow_dict_packed, \
workflow_steps=workflow_dict['steps'] )
-
+
# Create myExperiment request.
request_raw = trans.fill_template( "workflow/myexp_export.mako", \
workflow_name=workflow_dict['name'], \
@@ -1043,7 +1053,7 @@
)
# strip() b/c myExperiment XML parser doesn't allow white space before XML; utf-8 handles unicode characters.
request = unicode( request_raw.strip(), 'utf-8' )
-
+
# Do request and get result.
auth_header = base64.b64encode( '%s:%s' % ( myexp_username, myexp_password ))
headers = { "Content-type": "text/xml", "Accept": "text/xml", "Authorization" : "Basic %s" % auth_header }
@@ -1053,7 +1063,7 @@
response = conn.getresponse()
response_data = response.read()
conn.close()
-
+
# Do simple parse of response to see if export successful and provide user feedback.
parser = SingleTagContentsParser( 'id' )
parser.feed( response_data )
@@ -1062,13 +1072,13 @@
if myexp_workflow_id:
return trans.show_message( \
"Workflow '%s' successfully exported to myExperiment. %s" % \
- ( stored.name, workflow_list_str ),
+ ( stored.name, workflow_list_str ),
use_panels=True )
else:
return trans.show_error_message( \
"Workflow '%s' could not be exported to myExperiment. Error: %s. %s" % \
( stored.name, response_data, workflow_list_str ), use_panels=True )
-
+
@web.json_pretty
def for_direct_import( self, trans, id ):
"""
@@ -1087,15 +1097,15 @@
"""
Get the latest Workflow for the StoredWorkflow identified by `id` and
encode it as a json string that can be imported back into Galaxy
-
+
This has slightly different information than the above. In particular,
it does not attempt to decode forms and build UIs, it just stores
the raw state.
"""
-
+
# Get workflow.
stored = self.get_stored_workflow( trans, id, check_ownership=False, check_accessible=True )
-
+
# Stream workflow to file.
stored_dict = self._workflow_to_dict( trans, stored )
valid_chars = '.,^_-()[]0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
@@ -1146,7 +1156,7 @@
workflow_data = None
if url:
# Load workflow from external URL
- # NOTE: blocks the web thread.
+ # NOTE: blocks the web thread.
try:
workflow_data = urllib2.urlopen( url ).read()
except Exception, e:
@@ -1204,9 +1214,9 @@
# A required tool is not available in the local Galaxy instance.
# TODO: It would sure be nice to be able to redirect to a mako template here that displays a nice
# page including the links to the configured tool sheds instead of this message, but trying
- # to get the panels back is a nightmare since workflow eliminates the Galaxy panels. Someone
+ # to get the panels back is a nightmare since workflow eliminates the Galaxy panels. Someone
# involved in workflow development needs to figure out what it will take to be able to switch
- # back and forth between Galaxy (with panels ) and the workflow view (without panels ), having
+ # back and forth between Galaxy (with panels ) and the workflow view (without panels ), having
# the Galaxy panels displayed whenever in Galaxy.
message += "The workflow requires the following tools that are not available in this Galaxy instance."
message += "You can likely install the required tools from one of the Galaxy tool sheds listed below.<br/><br/>"
@@ -1222,7 +1232,7 @@
message += '<a href="%s">%s</a><br/>' % ( url, shed_name )
status = 'error'
if installed_repository_file or tool_shed_url:
- # Another Galaxy panels Hack: The request did not originate from the Galaxy
+ # Another Galaxy panels Hack: The request did not originate from the Galaxy
# workflow view, so we don't need to render the Galaxy panels.
action = 'center'
else:
@@ -1275,13 +1285,13 @@
if issubclass( base, Data ):
types.add( base.__module__ + "." + base.__name__ )
visit_bases( types, base )
- for c in classes:
+ for c in classes:
n = c.__module__ + "." + c.__name__
types = set( [ n ] )
visit_bases( types, c )
class_to_classes[ n ] = dict( ( t, True ) for t in types )
return dict( ext_to_class_name=ext_to_class_name, class_to_classes=class_to_classes )
-
+
@web.expose
def build_from_current_history( self, trans, job_ids=None, dataset_ids=None, workflow_name=None ):
user = trans.get_user()
@@ -1292,7 +1302,7 @@
jobs, warnings = get_job_dict( trans )
# Render
return trans.fill_template(
- "workflow/build_from_current_history.mako",
+ "workflow/build_from_current_history.mako",
jobs=jobs,
warnings=warnings,
history=history )
@@ -1310,7 +1320,7 @@
job_ids = [ int( id ) for id in job_ids ]
dataset_ids = [ int( id ) for id in dataset_ids ]
# Find each job, for security we (implicately) check that they are
- # associated witha job in the current history.
+ # associated witha job in the current history.
jobs, warnings = get_job_dict( trans )
jobs_by_id = dict( ( job.id, job ) for job in jobs.keys() )
steps = []
@@ -1351,7 +1361,7 @@
conn.output_step = other_step
conn.output_name = other_name
steps.append( step )
- steps_by_job_id[ job_id ] = step
+ steps_by_job_id[ job_id ] = step
# Store created dataset hids
for assoc in job.output_datasets:
hid_to_output_pair[ assoc.dataset.hid ] = ( step, assoc.name )
@@ -1380,8 +1390,8 @@
# Index page with message
return trans.show_message( "Workflow '%s' created from current history." % workflow_name )
## return trans.show_ok_message( "<p>Workflow '%s' created.</p><p><a target='_top' href='%s'>Click to load in workflow editor</a></p>"
- ## % ( workflow_name, web.url_for( action='editor', id=trans.security.encode_id(stored.id) ) ) )
-
+ ## % ( workflow_name, web.url_for( action='editor', id=trans.security.encode_id(stored.id) ) ) )
+
@web.expose
def run( self, trans, id, history_id=None, hide_fixed_params=False, **kwargs ):
stored = self.get_stored_workflow( trans, id, check_ownership=False )
@@ -1589,8 +1599,8 @@
trans.set_history(saved_history)
def get_item( self, trans, id ):
- return self.get_stored_workflow( trans, id )
-
+ return self.get_stored_workflow( trans, id )
+
@web.expose
def tag_outputs( self, trans, id, **kwargs ):
stored = self.get_stored_workflow( trans, id, check_ownership=False )
@@ -1644,7 +1654,7 @@
has_upgrade_messages = True
# Any connected input needs to have value DummyDataset (these
# are not persisted so we need to do it every time)
- step.module.add_dummy_datasets( connections=step.input_connections )
+ step.module.add_dummy_datasets( connections=step.input_connections )
# Store state with the step
step.state = step.module.state
# Error dict
@@ -1665,7 +1675,7 @@
has_upgrade_messages=has_upgrade_messages,
errors=errors,
incoming=kwargs )
-
+
@web.expose
def configure_menu( self, trans, workflow_ids=None ):
user = trans.get_user()
@@ -1693,7 +1703,7 @@
user.stored_workflow_menu_entries.append( m )
sess.flush()
return trans.show_message( "Menu updated", refresh_frames=['tools'] )
- else:
+ else:
user = trans.get_user()
ids_in_menu = set( [ x.stored_workflow_id for x in user.stored_workflow_menu_entries ] )
workflows = trans.sa_session.query( model.StoredWorkflow ) \
@@ -1709,7 +1719,7 @@
workflows=workflows,
shared_by_others=shared_by_others,
ids_in_menu=ids_in_menu )
-
+
def _workflow_to_dict( self, trans, stored ):
"""
Converts a workflow to a dict of attributes suitable for exporting.
@@ -1734,7 +1744,7 @@
step_annotation = self.get_item_annotation_obj(trans.sa_session, trans.user, step )
annotation_str = ""
if step_annotation:
- annotation_str = step_annotation.annotation
+ annotation_str = step_annotation.annotation
# Step info
step_dict = {
'id': step.order_index,
@@ -1752,7 +1762,7 @@
if module.type == 'tool':
pja_dict = {}
for pja in step.post_job_actions:
- pja_dict[pja.action_type+pja.output_name] = dict( action_type = pja.action_type,
+ pja_dict[pja.action_type+pja.output_name] = dict( action_type = pja.action_type,
output_name = pja.output_name,
action_arguments = pja.action_arguments )
step_dict[ 'post_job_actions' ] = pja_dict
@@ -1868,8 +1878,8 @@
# Unpack and add post-job actions.
post_job_actions = step_dict.get( 'post_job_actions', {} )
for name, pja_dict in post_job_actions.items():
- pja = PostJobAction( pja_dict[ 'action_type' ],
- step, pja_dict[ 'output_name' ],
+ pja = PostJobAction( pja_dict[ 'action_type' ],
+ step, pja_dict[ 'output_name' ],
pja_dict[ 'action_arguments' ] )
# Second pass to deal with connections between steps
for step in steps:
@@ -1894,7 +1904,7 @@
trans.sa_session.add( stored )
trans.sa_session.flush()
return stored, missing_tool_tups
-
+
## ---- Utility methods -------------------------------------------------------
def attach_ordered_steps( workflow, steps ):
@@ -1937,13 +1947,13 @@
return [ steps[i] for i in node_order ]
except CycleError:
return None
-
+
def order_workflow_steps_with_levels( steps ):
try:
return topsort_levels( edgelist_for_workflow_steps( steps ) )
except CycleError:
return None
-
+
class FakeJob( object ):
"""
Fake job object for datasets that have no creating_job_associations,
@@ -1952,7 +1962,7 @@
def __init__( self, dataset ):
self.is_fake = True
self.id = "fake_%s" % dataset.id
-
+
def get_job_dict( trans ):
"""
Return a dictionary of Job -> [ Dataset ] mappings, for all finished
@@ -1967,22 +1977,22 @@
if dataset.state in ( 'new', 'running', 'queued' ):
warnings.add( "Some datasets still queued or running were ignored" )
continue
-
+
#if this hda was copied from another, we need to find the job that created the origial hda
job_hda = dataset
while job_hda.copied_from_history_dataset_association:
job_hda = job_hda.copied_from_history_dataset_association
-
+
if not job_hda.creating_job_associations:
jobs[ FakeJob( dataset ) ] = [ ( None, dataset ) ]
-
+
for assoc in job_hda.creating_job_associations:
job = assoc.job
if job in jobs:
jobs[ job ].append( ( assoc.name, dataset ) )
else:
jobs[ job ] = [ ( assoc.name, dataset ) ]
- return jobs, warnings
+ return jobs, warnings
def cleanup_param_values( inputs, values ):
"""
@@ -2015,7 +2025,7 @@
key = prefix + key + "_"
for k in root_values.keys():
if k.startswith( key ):
- del root_values[k]
+ del root_values[k]
elif isinstance( input, Repeat ):
group_values = values[key]
for i, rep_values in enumerate( group_values ):
@@ -2029,4 +2039,4 @@
cleanup( prefix, input.cases[current_case].inputs, group_values )
cleanup( "", inputs, values )
return associations
-
+
diff -r 50bf77aa34c3689a122187d7ba9b55deb348df92 -r 3b2acb89495bee2355f20fc3954baa174d486a3c templates/workflow/list.mako
--- a/templates/workflow/list.mako
+++ b/templates/workflow/list.mako
@@ -103,6 +103,7 @@
<a class="action-button" href="${h.url_for( controller='workflow', action='display_by_username_and_slug', username=workflow.user.username, slug=workflow.slug )}" target="_top">View</a><a class="action-button" href="${h.url_for( controller='workflow', action='run', id=trans.security.encode_id( workflow.id ) )}">Run</a><a class="action-button" href="${h.url_for( controller='workflow', action='clone', id=trans.security.encode_id( workflow.id ) )}">Clone</a>
+ <a class="action-button" confirm="Are you sure you want to remove the shared workflow '${h.to_unicode( workflow.name ) | h}'?" href="${h.url_for( controller='workflow', action='sharing', unshare_me=True, id=trans.security.encode_id( workflow.id ))}">Remove</a></div></td></tr>
https://bitbucket.org/galaxy/galaxy-central/changeset/0a001dea3613/
changeset: 0a001dea3613
user: dannon
date: 2012-02-02 20:55:19
summary: Merge.
affected #: 7 files
diff -r 3b2acb89495bee2355f20fc3954baa174d486a3c -r 0a001dea361385d107989740582f855aa2b71dc4 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -96,7 +96,7 @@
self.retry_job_output_collection = int( kwargs.get( 'retry_job_output_collection', 0 ) )
self.job_walltime = kwargs.get( 'job_walltime', None )
self.admin_users = kwargs.get( "admin_users", "" )
- self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-user-join(a)bx.psu.edu")
+ self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-announce-join(a)bx.psu.edu")
self.error_email_to = kwargs.get( 'error_email_to', None )
self.smtp_server = kwargs.get( 'smtp_server', None )
self.smtp_username = kwargs.get( 'smtp_username', None )
diff -r 3b2acb89495bee2355f20fc3954baa174d486a3c -r 0a001dea361385d107989740582f855aa2b71dc4 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -196,7 +196,6 @@
if job_wrapper.get_state() == model.Job.states.DELETED:
log.debug( "Job %s deleted by user before it entered the queue" % job_wrapper.get_id_tag() )
if self.app.config.cleanup_job in ( "always", "onsuccess" ):
- self.cleanup( ( ofile, efile, jt.remoteCommand ) )
job_wrapper.cleanup()
return
@@ -334,25 +333,12 @@
except:
log.exception("Job wrapper finish method failed")
- # clean up the drm files
- if self.app.config.cleanup_job == "always" or ( not stderr and self.app.config.cleanup_job == "onsuccess" ):
- self.cleanup( ( ofile, efile, job_file ) )
-
def fail_job( self, drm_job_state ):
"""
Seperated out so we can use the worker threads for it.
"""
self.stop_job( self.sa_session.query( self.app.model.Job ).get( drm_job_state.job_wrapper.job_id ) )
drm_job_state.job_wrapper.fail( drm_job_state.fail_message )
- if self.app.config.cleanup_job == "always":
- self.cleanup( ( drm_job_state.ofile, drm_job_state.efile, drm_job_state.job_file ) )
-
- def cleanup( self, files ):
- for file in files:
- try:
- os.unlink( file )
- except Exception, e:
- log.warning( "Unable to cleanup: %s" % str( e ) )
def put( self, job_wrapper ):
"""Add a job to the queue (by job identifier)"""
@@ -440,6 +426,6 @@
# The expected output is a single line containing a single numeric value:
# the DRMAA job-ID. If not the case, will throw an error.
jobId = stdoutdata
- return jobId;
+ return jobId
diff -r 3b2acb89495bee2355f20fc3954baa174d486a3c -r 0a001dea361385d107989740582f855aa2b71dc4 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -68,7 +68,7 @@
self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates/community" ), self.root )
self.admin_users = kwargs.get( "admin_users", "" )
self.sendmail_path = kwargs.get('sendmail_path',"/usr/sbin/sendmail")
- self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-user-join(a)bx.psu.edu")
+ self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-announce-join(a)bx.psu.edu")
self.error_email_to = kwargs.get( 'error_email_to', None )
self.smtp_server = kwargs.get( 'smtp_server', None )
self.smtp_username = kwargs.get( 'smtp_username', None )
diff -r 3b2acb89495bee2355f20fc3954baa174d486a3c -r 0a001dea361385d107989740582f855aa2b71dc4 lib/galaxy/webapps/demo_sequencer/config.py
--- a/lib/galaxy/webapps/demo_sequencer/config.py
+++ b/lib/galaxy/webapps/demo_sequencer/config.py
@@ -45,7 +45,7 @@
self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates/demo_sequencer" ), self.root )
self.admin_users = kwargs.get( "admin_users", "" )
self.sendmail_path = kwargs.get('sendmail_path',"/usr/sbin/sendmail")
- self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-user-join(a)bx.psu.edu")
+ self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-announce-join(a)bx.psu.edu")
self.error_email_to = kwargs.get( 'error_email_to', None )
self.smtp_server = kwargs.get( 'smtp_server', None )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
diff -r 3b2acb89495bee2355f20fc3954baa174d486a3c -r 0a001dea361385d107989740582f855aa2b71dc4 templates/user/register.mako
--- a/templates/user/register.mako
+++ b/templates/user/register.mako
@@ -72,6 +72,8 @@
<% subscribe_check_box.checked = True %>
%endif
${subscribe_check_box.get_html()}
+ <p>See <a href="http://galaxyproject.org/wiki/Mailing%20Lists" target="_blank">
+ all Galaxy project mailing lists</a>.</p></div>
%endif
%if user_type_fd_id_select_field and len( user_type_fd_id_select_field.options ) > 1:
diff -r 3b2acb89495bee2355f20fc3954baa174d486a3c -r 0a001dea361385d107989740582f855aa2b71dc4 tools/variant_detection/freebayes.xml
--- a/tools/variant_detection/freebayes.xml
+++ b/tools/variant_detection/freebayes.xml
@@ -1,5 +1,5 @@
<?xml version="1.0"?>
-<tool id="freebayes" name="FreeBayes" version="0.0.1">
+<tool id="freebayes" name="FreeBayes" version="0.0.2"><requirements><requirement type="package" version="0.9.4">freebayes</requirement><requirement type="package">samtools</requirement>
@@ -29,11 +29,17 @@
##outputs
--vcf "${output_vcf}"
- --trace "${output_trace}"
- --failed-alleles "${output_failed_alleles_bed}"
##advanced options
#if str( $options_type.options_type_selector ) == "advanced":
+ ##additional outputs
+ #if $options_type.output_trace_option:
+ --trace "${output_trace}"
+ #end if
+ #if $options_type.output_failed_alleles_option:
+ --failed-alleles "${output_failed_alleles_bed}"
+ #end if
+
##additional inputs
#if str( $options_type.target_limit_type.target_limit_type_selector ) == "limit_by_target_file":
--targets "${options_type.target_limit_type.input_target_bed}"
@@ -186,8 +192,13 @@
<!-- Do nothing here --></when><when value="advanced">
-
- <!-- input and output -->
+
+ <!-- output -->
+ <param name="output_failed_alleles_option" type="boolean" truevalue="--failed-alleles" falsevalue="" checked="False" label="Write out failed alleles file" />
+ <param name="output_trace_option" type="boolean" truevalue="--trace" falsevalue="" checked="False" label="Write out algorithm trace file" />
+
+
+ <!-- input --><conditional name="target_limit_type"><param name="target_limit_type_selector" type="select" label="Limit analysis to listed targets"><option value="do_not_limit" selected="True">Do not limit</option>
@@ -398,8 +409,12 @@
</inputs><outputs><data format="vcf" name="output_vcf" label="${tool.name} on ${on_string} (variants)" />
- <data format="txt" name="output_trace" label="${tool.name} on ${on_string} (trace)" />
- <data format="bed" name="output_failed_alleles_bed" label="${tool.name} on ${on_string} (failed alleles)" />
+ <data format="bed" name="output_failed_alleles_bed" label="${tool.name} on ${on_string} (failed alleles)">
+ <filter>options_type['options_type_selector'] == "advanced" and options_type['output_failed_alleles_option'] is True</filter>
+ </data>
+ <data format="txt" name="output_trace" label="${tool.name} on ${on_string} (trace)">
+ <filter>options_type['options_type_selector'] == "advanced" and options_type['output_trace_option'] is True</filter>
+ </data></outputs><tests><test>
@@ -408,8 +423,8 @@
<param name="input_bam" ftype="bam" value="gatk/fake_phiX_reads_1.bam"/><param name="options_type_selector" value="basic"/><output name="output_vcf" file="variant_detection/freebayes/freebayes_out_1.vcf.contains" compare="contains"/>
- <output name="output_trace" file="variant_detection/freebayes/freebayes_out_1.output_trace" />
- <output name="output_failed_alleles_bed" file="empty_file.dat" />
+ <!-- <output name="output_failed_alleles_bed" file="empty_file.dat" />
+ <output name="output_trace" file="variant_detection/freebayes/freebayes_out_1.output_trace" /> --></test></tests><help>
diff -r 3b2acb89495bee2355f20fc3954baa174d486a3c -r 0a001dea361385d107989740582f855aa2b71dc4 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -205,7 +205,7 @@
# On the user registration form, users may choose to join the mailing list.
# This is the address of the list they'll be subscribed to.
-#mailing_join_addr = galaxy-user-join(a)bx.psu.edu
+#mailing_join_addr = galaxy-announce-join(a)bx.psu.edu
# Datasets in an error state include a link to report the error. Those reports
# will be sent to this address. Error reports are disabled if no address is set.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: clements: Changed the default new user mailing list from galaxy-user to galaxy-announce, which is a much lower volume. Also added text to link to wiki page with all mailing lists.
by Bitbucket 02 Feb '12
by Bitbucket 02 Feb '12
02 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/eadc91223171/
changeset: eadc91223171
user: clements
date: 2012-02-02 06:25:41
summary: Changed the default new user mailing list from galaxy-user to galaxy-announce, which is a much lower volume. Also added text to link to wiki page with all mailing lists.
affected #: 5 files
diff -r 238207122b68e961dac14238fe1af0a585a9e903 -r eadc91223171926a699f5f34ce088be2a9e17783 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -96,7 +96,7 @@
self.retry_job_output_collection = int( kwargs.get( 'retry_job_output_collection', 0 ) )
self.job_walltime = kwargs.get( 'job_walltime', None )
self.admin_users = kwargs.get( "admin_users", "" )
- self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-user-join(a)bx.psu.edu")
+ self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-announce-join(a)bx.psu.edu")
self.error_email_to = kwargs.get( 'error_email_to', None )
self.smtp_server = kwargs.get( 'smtp_server', None )
self.smtp_username = kwargs.get( 'smtp_username', None )
diff -r 238207122b68e961dac14238fe1af0a585a9e903 -r eadc91223171926a699f5f34ce088be2a9e17783 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -68,7 +68,7 @@
self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates/community" ), self.root )
self.admin_users = kwargs.get( "admin_users", "" )
self.sendmail_path = kwargs.get('sendmail_path',"/usr/sbin/sendmail")
- self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-user-join(a)bx.psu.edu")
+ self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-announce-join(a)bx.psu.edu")
self.error_email_to = kwargs.get( 'error_email_to', None )
self.smtp_server = kwargs.get( 'smtp_server', None )
self.smtp_username = kwargs.get( 'smtp_username', None )
diff -r 238207122b68e961dac14238fe1af0a585a9e903 -r eadc91223171926a699f5f34ce088be2a9e17783 lib/galaxy/webapps/demo_sequencer/config.py
--- a/lib/galaxy/webapps/demo_sequencer/config.py
+++ b/lib/galaxy/webapps/demo_sequencer/config.py
@@ -45,7 +45,7 @@
self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates/demo_sequencer" ), self.root )
self.admin_users = kwargs.get( "admin_users", "" )
self.sendmail_path = kwargs.get('sendmail_path',"/usr/sbin/sendmail")
- self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-user-join(a)bx.psu.edu")
+ self.mailing_join_addr = kwargs.get('mailing_join_addr',"galaxy-announce-join(a)bx.psu.edu")
self.error_email_to = kwargs.get( 'error_email_to', None )
self.smtp_server = kwargs.get( 'smtp_server', None )
self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
diff -r 238207122b68e961dac14238fe1af0a585a9e903 -r eadc91223171926a699f5f34ce088be2a9e17783 templates/user/register.mako
--- a/templates/user/register.mako
+++ b/templates/user/register.mako
@@ -72,6 +72,8 @@
<% subscribe_check_box.checked = True %>
%endif
${subscribe_check_box.get_html()}
+ <p>See <a href="http://galaxyproject.org/wiki/Mailing%20Lists" target="_blank">
+ all Galaxy project mailing lists</a>.</p></div>
%endif
%if user_type_fd_id_select_field and len( user_type_fd_id_select_field.options ) > 1:
diff -r 238207122b68e961dac14238fe1af0a585a9e903 -r eadc91223171926a699f5f34ce088be2a9e17783 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -205,7 +205,7 @@
# On the user registration form, users may choose to join the mailing list.
# This is the address of the list they'll be subscribed to.
-#mailing_join_addr = galaxy-user-join(a)bx.psu.edu
+#mailing_join_addr = galaxy-announce-join(a)bx.psu.edu
# Datasets in an error state include a link to report the error. Those reports
# will be sent to this address. Error reports are disabled if no address is set.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: DRMAA runner no longer needs to clean up its stdout/stderr since they live in the working directory which is cleaned up by the job wrapper.
by Bitbucket 02 Feb '12
by Bitbucket 02 Feb '12
02 Feb '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/238207122b68/
changeset: 238207122b68
user: natefoo
date: 2012-02-02 19:25:59
summary: DRMAA runner no longer needs to clean up its stdout/stderr since they live in the working directory which is cleaned up by the job wrapper.
affected #: 1 file
diff -r 37e787787128662a1aba3c6c831f9dabc9de3289 -r 238207122b68e961dac14238fe1af0a585a9e903 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -196,7 +196,6 @@
if job_wrapper.get_state() == model.Job.states.DELETED:
log.debug( "Job %s deleted by user before it entered the queue" % job_wrapper.get_id_tag() )
if self.app.config.cleanup_job in ( "always", "onsuccess" ):
- self.cleanup( ( ofile, efile, jt.remoteCommand ) )
job_wrapper.cleanup()
return
@@ -334,25 +333,12 @@
except:
log.exception("Job wrapper finish method failed")
- # clean up the drm files
- if self.app.config.cleanup_job == "always" or ( not stderr and self.app.config.cleanup_job == "onsuccess" ):
- self.cleanup( ( ofile, efile, job_file ) )
-
def fail_job( self, drm_job_state ):
"""
Seperated out so we can use the worker threads for it.
"""
self.stop_job( self.sa_session.query( self.app.model.Job ).get( drm_job_state.job_wrapper.job_id ) )
drm_job_state.job_wrapper.fail( drm_job_state.fail_message )
- if self.app.config.cleanup_job == "always":
- self.cleanup( ( drm_job_state.ofile, drm_job_state.efile, drm_job_state.job_file ) )
-
- def cleanup( self, files ):
- for file in files:
- try:
- os.unlink( file )
- except Exception, e:
- log.warning( "Unable to cleanup: %s" % str( e ) )
def put( self, job_wrapper ):
"""Add a job to the queue (by job identifier)"""
@@ -440,6 +426,6 @@
# The expected output is a single line containing a single numeric value:
# the DRMAA job-ID. If not the case, will throw an error.
jobId = stdoutdata
- return jobId;
+ return jobId
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0