galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
October 2012
- 1 participants
- 194 discussions
commit/galaxy-central: jgoecks: GFF/GTF reading and visualization enhancements: (a) enable GFF to FLI conversion and (b) better feature aggregation.
by Bitbucket 29 Oct '12
by Bitbucket 29 Oct '12
29 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fa045aad74e9/
changeset: fa045aad74e9
user: jgoecks
date: 2012-10-29 15:44:32
summary: GFF/GTF reading and visualization enhancements: (a) enable GFF to FLI conversion and (b) better feature aggregation.
affected #: 4 files
diff -r ecd131b136d00d5753992061d12b12156be8b277 -r fa045aad74e90f16995e0cbb670a59e6b9becbed lib/galaxy/datatypes/converters/bed_to_fli_converter.xml
--- a/lib/galaxy/datatypes/converters/bed_to_fli_converter.xml
+++ b/lib/galaxy/datatypes/converters/bed_to_fli_converter.xml
@@ -1,7 +1,7 @@
<tool id="CONVERTER_bed_to_fli_0" name="Convert BED to Feature Location Index"><!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> --><!-- Used on the metadata edit page. -->
- <command interpreter="python">interval_to_fli.py -B $input1 $output1</command>
+ <command interpreter="python">interval_to_fli.py -F bed $input1 $output1</command><inputs><param format="bed" name="input1" type="data" label="Choose BED file"/></inputs>
diff -r ecd131b136d00d5753992061d12b12156be8b277 -r fa045aad74e90f16995e0cbb670a59e6b9becbed lib/galaxy/datatypes/converters/gff_to_fli_converter.xml
--- a/lib/galaxy/datatypes/converters/gff_to_fli_converter.xml
+++ b/lib/galaxy/datatypes/converters/gff_to_fli_converter.xml
@@ -1,7 +1,7 @@
<tool id="CONVERTER_gff_to_fli_0" name="Convert GFF to Feature Location Index"><!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> --><!-- Used on the metadata edit page. -->
- <command interpreter="python">interval_to_fli.py -G $input1 $output1</command>
+ <command interpreter="python">interval_to_fli.py -F $input1.extension $input1 $output1</command><inputs><param format="gff" name="input1" type="data" label="Choose GFF file"/></inputs>
diff -r ecd131b136d00d5753992061d12b12156be8b277 -r fa045aad74e90f16995e0cbb670a59e6b9becbed lib/galaxy/datatypes/converters/interval_to_fli.py
--- a/lib/galaxy/datatypes/converters/interval_to_fli.py
+++ b/lib/galaxy/datatypes/converters/interval_to_fli.py
@@ -12,22 +12,28 @@
import sys, optparse
from galaxy import eggs
-from galaxy.datatypes.util.gff_util import read_unordered_gtf, convert_gff_coords_to_bed
+from galaxy.datatypes.util.gff_util import GFFReaderWrapper, read_unordered_gtf, convert_gff_coords_to_bed
def main():
# Process arguments.
parser = optparse.OptionParser()
- parser.add_option( '-B', '--bed', action="store_true", dest="bed_input" )
- parser.add_option( '-G', '--gff', action="store_true", dest="gff_input" )
+ parser.add_option( '-F', '--format', dest="input_format" )
(options, args) = parser.parse_args()
in_fname, out_fname = args
-
+ input_format = options.input_format.lower()
# Create dict of name-location pairings.
name_loc_dict = {}
- if options.gff_input:
- # GFF format
- for feature in read_unordered_gtf( open( in_fname, 'r' ) ):
+ if input_format in [ 'gff', 'gtf' ]:
+ # GTF/GFF format
+
+ # Create reader.
+ if input_format == 'gff':
+ in_reader = GFFReaderWrapper( open( in_fname, 'r' ) )
+ else: #input_format == 'gtf'
+ in_reader = read_unordered_gtf( open( in_fname, 'r' ) )
+
+ for feature in in_reader:
for name in feature.attributes:
val = feature.attributes[ name ]
try:
@@ -50,7 +56,7 @@
loc[ 'start' ] = feature.start
if feature.end > loc[ 'end' ]:
loc[ 'end' ] = feature.end
- else:
+ elif input_format == 'bed':
# BED format.
for line in open( in_fname, 'r' ):
# Ignore track lines.
diff -r ecd131b136d00d5753992061d12b12156be8b277 -r fa045aad74e90f16995e0cbb670a59e6b9becbed lib/galaxy/datatypes/util/gff_util.py
--- a/lib/galaxy/datatypes/util/gff_util.py
+++ b/lib/galaxy/datatypes/util/gff_util.py
@@ -227,22 +227,20 @@
continue
# Determine if interval is part of feature.
- part_of = True
+ part_of = False
group = interval.attributes.get( 'group', None )
# GFF test:
- if group and feature_group != group:
- part_of = False
+ if group and feature_group == group:
+ part_of = True
# GFF3 test:
parent_id = interval.attributes.get( 'Parent', None )
cur_id = interval.attributes.get( 'ID', None )
- if ( cur_id and cur_id != feature_id ) or ( parent_id and parent_id != feature_id ):
- part_of = False
+ if ( cur_id and cur_id == feature_id ) or ( parent_id and parent_id == feature_id ):
+ part_of = True
# GTF test:
- gene_id = interval.attributes.get( 'gene_id', None )
transcript_id = interval.attributes.get( 'transcript_id', None )
- if ( transcript_id and transcript_id != feature_transcript_id ) or \
- ( gene_id and gene_id != feature_gene_id ):
- part_of = False
+ if transcript_id and transcript_id == feature_transcript_id:
+ part_of = True
# If interval is not part of feature, clean up and break.
if not part_of:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: james_taylor: libraries: fix in query for 'datasets_are_public'
by Bitbucket 26 Oct '12
by Bitbucket 26 Oct '12
26 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ecd131b136d0/
changeset: ecd131b136d0
user: james_taylor
date: 2012-10-26 22:23:01
summary: libraries: fix in query for 'datasets_are_public'
affected #: 1 file
diff -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a -r ecd131b136d00d5753992061d12b12156be8b277 lib/galaxy/security/__init__.py
--- a/lib/galaxy/security/__init__.py
+++ b/lib/galaxy/security/__init__.py
@@ -879,7 +879,7 @@
# Now get all datasets which have DATASET_ACCESS actions:
access_data_perms = trans.sa_session.query( trans.app.model.DatasetPermissions ) \
- .filter( and_( trans.app.model.DatasetPermissions.dataset_id in dataset_ids,
+ .filter( and_( trans.app.model.DatasetPermissions.dataset_id.in_( dataset_ids ),
trans.app.model.DatasetPermissions.action == self.permitted_actions.DATASET_ACCESS.action ) ) \
.all()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Refactor code supporting setting metadata on repositories in the tool shed and installed in Galaxy - enables using same code across web apps.
by Bitbucket 26 Oct '12
by Bitbucket 26 Oct '12
26 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/50b1d7a65bd2/
changeset: 50b1d7a65bd2
user: greg
date: 2012-10-26 21:39:50
summary: Refactor code supporting setting metadata on repositories in the tool shed and installed in Galaxy - enables using same code across web apps.
affected #: 14 files
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -3,9 +3,12 @@
from datetime import date, datetime, timedelta
from time import strftime, gmtime
from galaxy import util
+from galaxy.web import url_for
+from galaxy.web.form_builder import SelectField
from galaxy.tools import parameters
from galaxy.datatypes.checkers import *
from galaxy.util.json import *
+from galaxy.util import inflector
from galaxy.tools.search import ToolBoxSearch
from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package, set_environment
from galaxy.tool_shed.encoding_util import *
@@ -23,6 +26,7 @@
log = logging.getLogger( __name__ )
+GALAXY_ADMIN_TOOL_SHED_CONTROLLER = 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER'
INITIAL_CHANGELOG_HASH = '000000000000'
# Characters that must be html escaped
MAPPED_CHARS = { '>' :'>',
@@ -33,6 +37,7 @@
MAX_CONTENT_SIZE = 32768
NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'tool_dependencies.xml' ]
VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" )
+TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER'
class ShedCounter( object ):
def __init__( self, model ):
@@ -246,6 +251,27 @@
except:
pass
return converter_path, display_path
+def build_repository_ids_select_field( trans, cntrller, name='repository_ids', multiple=True, display='checkboxes' ):
+ """Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata."""
+ repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
+ if cntrller == TOOL_SHED_ADMIN_CONTROLLER:
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.deleted == False ) \
+ .order_by( trans.model.Repository.table.c.name,
+ trans.model.Repository.table.c.user_id ):
+ owner = repository.user.username
+ option_label = '%s (%s)' % ( repository.name, owner )
+ option_value = '%s' % trans.security.encode_id( repository.id )
+ repositories_select_field.add_option( option_label, option_value )
+ elif cntrller == GALAXY_ADMIN_TOOL_SHED_CONTROLLER:
+ for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
+ .filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \
+ .order_by( trans.model.ToolShedRepository.table.c.name,
+ trans.model.ToolShedRepository.table.c.owner ):
+ option_label = '%s (%s)' % ( repository.name, repository.owner )
+ option_value = trans.security.encode_id( repository.id )
+ repositories_select_field.add_option( option_label, option_value )
+ return repositories_select_field
def can_generate_tool_dependency_metadata( root, metadata_dict ):
"""
Make sure the combination of name, version and type (the type will be the value of elem.tag) of each root element tag in the tool_dependencies.xml
@@ -339,6 +365,105 @@
correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
return invalid_files_and_errors_tups
+def clean_repository_metadata( trans, id, changeset_revisions ):
+ # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
+ # We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
+ # records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later..
+ changeset_revisions_checked = []
+ for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
+ .order_by( trans.model.RepositoryMetadata.table.c.changeset_revision,
+ trans.model.RepositoryMetadata.table.c.update_time.desc() ):
+ changeset_revision = repository_metadata.changeset_revision
+ can_delete = changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions
+ if can_delete:
+ trans.sa_session.delete( repository_metadata )
+ trans.sa_session.flush()
+def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
+ # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of
+ # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only
+ # when this method returns the string 'not equal and not subset'.
+ ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] )
+ ancestor_tools = ancestor_metadata_dict.get( 'tools', [] )
+ ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ]
+ ancestor_guids.sort()
+ ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', [] )
+ ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] )
+ current_datatypes = current_metadata_dict.get( 'datatypes', [] )
+ current_tools = current_metadata_dict.get( 'tools', [] )
+ current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ]
+ current_guids.sort()
+ current_tool_dependencies = current_metadata_dict.get( 'tool_dependencies', [] )
+ current_workflows = current_metadata_dict.get( 'workflows', [] )
+ # Handle case where no metadata exists for either changeset.
+ if not ancestor_guids and not current_guids and not ancestor_workflows and not current_workflows and not ancestor_datatypes and not current_datatypes:
+ return 'no metadata'
+ workflow_comparison = compare_workflows( ancestor_workflows, current_workflows )
+ datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes )
+ # Handle case where all metadata is the same.
+ if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal':
+ return 'equal'
+ if workflow_comparison in [ 'equal', 'subset' ] and datatype_comparison in [ 'equal', 'subset' ]:
+ is_subset = True
+ for guid in ancestor_guids:
+ if guid not in current_guids:
+ is_subset = False
+ break
+ if is_subset:
+ return 'subset'
+ return 'not equal and not subset'
+def compare_datatypes( ancestor_datatypes, current_datatypes ):
+ # Determine if ancestor_datatypes is the same as current_datatypes
+ # or if ancestor_datatypes is a subset of current_datatypes. Each
+ # datatype dict looks something like:
+ # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"}
+ if len( ancestor_datatypes ) <= len( current_datatypes ):
+ for ancestor_datatype in ancestor_datatypes:
+ # Currently the only way to differentiate datatypes is by name.
+ ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ]
+ ancestor_datatype_extension = ancestor_datatype[ 'extension' ]
+ ancestor_datatype_mimetype = ancestor_datatype.get( 'mimetype', None )
+ found_in_current = False
+ for current_datatype in current_datatypes:
+ if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \
+ current_datatype[ 'extension' ] == ancestor_datatype_extension and \
+ current_datatype.get( 'mimetype', None ) == ancestor_datatype_mimetype:
+ found_in_current = True
+ break
+ if not found_in_current:
+ return 'not equal and not subset'
+ if len( ancestor_datatypes ) == len( current_datatypes ):
+ return 'equal'
+ else:
+ return 'subset'
+ return 'not equal and not subset'
+def compare_workflows( ancestor_workflows, current_workflows ):
+ # Determine if ancestor_workflows is the same as current_workflows
+ # or if ancestor_workflows is a subset of current_workflows.
+ if len( ancestor_workflows ) <= len( current_workflows ):
+ for ancestor_workflow_tup in ancestor_workflows:
+ # ancestor_workflows is a list of tuples where each contained tuple is
+ # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
+ ancestor_workflow_dict = ancestor_workflow_tup[1]
+ # Currently the only way to differentiate workflows is by name.
+ ancestor_workflow_name = ancestor_workflow_dict[ 'name' ]
+ num_ancestor_workflow_steps = len( ancestor_workflow_dict[ 'steps' ] )
+ found_in_current = False
+ for current_workflow_tup in current_workflows:
+ current_workflow_dict = current_workflow_tup[1]
+ # Assume that if the name and number of steps are euqal,
+ # then the workflows are the same. Of course, this may
+ # not be true...
+ if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps:
+ found_in_current = True
+ break
+ if not found_in_current:
+ return 'not equal and not subset'
+ if len( ancestor_workflows ) == len( current_workflows ):
+ return 'equal'
+ else:
+ return 'subset'
+ return 'not equal and not subset'
def concat_messages( msg1, msg2 ):
if msg1:
if msg2:
@@ -447,6 +572,20 @@
tool_dicts=tool_dicts,
converter_path=converter_path,
display_path=display_path )
+def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
+ downloadable = is_downloadable( metadata_dict )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if repository_metadata:
+ repository_metadata.metadata = metadata_dict
+ repository_metadata.downloadable = downloadable
+ else:
+ repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id,
+ changeset_revision=changeset_revision,
+ metadata=metadata_dict,
+ downloadable=downloadable )
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ return repository_metadata
def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
status, current_changeset_revision=None, owner='', dist_to_shed=False ):
# The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
@@ -546,10 +685,19 @@
set_status=set_status )
tool_dependency_objects.append( tool_dependency )
return tool_dependency_objects
-def generate_clone_url( trans, repository ):
- """Generate the URL for cloning a repository."""
+def generate_clone_url_for_installed_repository( trans, repository ):
+ """Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
+def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
+ """Generate the URL for cloning a repository that is in the tool shed."""
+ base_url = url_for( '/', qualified=True ).rstrip( '/' )
+ if trans.user:
+ protocol, base = base_url.split( '://' )
+ username = '%s@' % trans.user.username
+ return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
+ else:
+ return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
def generate_datatypes_metadata( datatypes_config, metadata_dict ):
"""Update the received metadata_dict with information from the parsed datatypes_config."""
tree = ElementTree.parse( datatypes_config )
@@ -750,6 +898,42 @@
app.config.tool_data_path = original_tool_data_path
app.config.tool_data_table_config_path = original_tool_data_table_config_path
return metadata_dict, invalid_file_tups
+def generate_message_for_invalid_tools( invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ):
+ if as_html:
+ new_line = '<br/>'
+ bold_start = '<b>'
+ bold_end = '</b>'
+ else:
+ new_line = '\n'
+ bold_start = ''
+ bold_end = ''
+ message = ''
+ if not displaying_invalid_tool:
+ if metadata_dict:
+ message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip )
+ message += "Correct the following problems if necessary and reset metadata.%s" % new_line
+ else:
+ message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip )
+ message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line
+ for itc_tup in invalid_file_tups:
+ tool_file, exception_msg = itc_tup
+ if exception_msg.find( 'No such file or directory' ) >= 0:
+ exception_items = exception_msg.split()
+ missing_file_items = exception_items[ 7 ].split( '/' )
+ missing_file = missing_file_items[ -1 ].rstrip( '\'' )
+ if missing_file.endswith( '.loc' ):
+ sample_ext = '%s.sample' % missing_file
+ else:
+ sample_ext = missing_file
+ correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end )
+ correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end )
+ else:
+ if as_html:
+ correction_msg = exception_msg
+ else:
+ correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
+ message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
+ return message
def generate_package_dependency_metadata( elem, tool_dependencies_dict ):
"""The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set."""
requirements_dict = {}
@@ -1158,6 +1342,9 @@
ctx = get_changectx_for_changeset( repo, changeset_revision )
named_tmp_file = get_named_tmpfile_from_ctx( ctx, file_name, dir )
return named_tmp_file
+def get_installed_tool_shed_repository( trans, id ):
+ """Get a repository on the Galaxy side from the database via id"""
+ return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
def get_list_of_copied_sample_files( repo, ctx, dir ):
"""
Find all sample files (files in the repository with the special .sample extension) in the reversed repository manifest up to ctx. Copy
@@ -1211,6 +1398,24 @@
fh.close()
return tmp_filename
return None
+def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ):
+ parent_id = None
+ # Compare from most recent to oldest.
+ changeset_revisions.reverse()
+ for changeset_revision in changeset_revisions:
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ metadata = repository_metadata.metadata
+ tools_dicts = metadata.get( 'tools', [] )
+ for tool_dict in tools_dicts:
+ if tool_dict[ 'guid' ] == guid:
+ # The tool has not changed between the compared changeset revisions.
+ continue
+ if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version:
+ # The tool version is different, so we've found the parent.
+ return tool_dict[ 'guid' ]
+ if parent_id is None:
+ # The tool did not change through all of the changeset revisions.
+ return old_id
def get_repository_file_contents( file_path ):
if is_gzip( file_path ):
to_html = to_html_str( '\ngzip compressed file\n' )
@@ -1242,6 +1447,9 @@
if contents:
contents.sort()
return contents
+def get_repository_in_tool_shed( trans, id ):
+ """Get a repository on the tool shed side from the database via id"""
+ return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ):
"""Get metadata for a specified repository change set from the database"""
# Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow
@@ -1635,6 +1843,8 @@
parent_id=tool_version_using_parent_id.id )
sa_session.add( tool_version_association )
sa_session.flush()
+def is_downloadable( metadata_dict ):
+ return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
def load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype converters
app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=installed_repository_dict, deactivate=deactivate )
@@ -1739,6 +1949,12 @@
repo,
source=repository_clone_url,
rev=[ ctx_rev ] )
+def remove_dir( dir ):
+ if os.path.exists( dir ):
+ try:
+ shutil.rmtree( dir )
+ except:
+ pass
def remove_from_shed_tool_config( trans, shed_tool_conf_dict, guids_to_remove ):
# A tool shed repository is being uninstalled so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -1912,6 +2128,188 @@
removed = True
error_message = ''
return removed, error_message
+def reset_all_metadata_on_installed_repository( trans, id ):
+ """Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
+ repository = get_installed_tool_shed_repository( trans, id )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
+ tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
+ if relative_install_dir:
+ original_metadata_dict = repository.metadata
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False )
+ repository.metadata = metadata_dict
+ if metadata_dict != original_metadata_dict:
+ update_in_shed_tool_config( trans.app, repository )
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ log.debug( 'Metadata has been reset on repository %s.' % repository.name )
+ else:
+ log.debug( 'Metadata did not need to be reset on repository %s.' % repository.name )
+ else:
+ log.debug( 'Error locating installation directory for repository %s.' % repository.name )
+ return invalid_file_tups, metadata_dict
+def reset_all_metadata_on_repository_in_tool_shed( trans, id ):
+ """Reset all metadata on a single repository in a tool shed."""
+ def reset_all_tool_versions( trans, id, repo ):
+ changeset_revisions = []
+ for changeset in repo.changelog:
+ changeset_revision = str( repo.changectx( changeset ) )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if metadata.get( 'tools', None ):
+ changeset_revisions.append( changeset_revision )
+ # The list of changeset_revisions is now filtered to contain only those that are downloadable and contain tools.
+ # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
+ for index, changeset_revision in enumerate( changeset_revisions ):
+ tool_versions_dict = {}
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ metadata = repository_metadata.metadata
+ tool_dicts = metadata[ 'tools' ]
+ if index == 0:
+ # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools.
+ # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config.
+ for tool_dict in tool_dicts:
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
+ else:
+ for tool_dict in tool_dicts:
+ parent_id = get_parent_id( trans,
+ id,
+ tool_dict[ 'id' ],
+ tool_dict[ 'version' ],
+ tool_dict[ 'guid' ],
+ changeset_revisions[ 0:index ] )
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
+ if tool_versions_dict:
+ repository_metadata.tool_versions = tool_versions_dict
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ repository = get_repository_in_tool_shed( trans, id )
+ log.debug( "Resetting all metadata on repository: %s" % repository.name )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
+ # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop
+ # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list.
+ changeset_revisions = []
+ # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict.
+ metadata_changeset_revision = None
+ metadata_dict = None
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ invalid_file_tups = []
+ home_dir = os.getcwd()
+ for changeset in repo.changelog:
+ work_dir = tempfile.mkdtemp()
+ current_changeset_revision = str( repo.changectx( changeset ) )
+ ctx = repo.changectx( changeset )
+ log.debug( "Cloning repository revision: %s", str( ctx.rev() ) )
+ cloned_ok, error_message = clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
+ if cloned_ok:
+ log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
+ current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=repo_dir,
+ repository_files_dir=work_dir,
+ resetting_all_metadata_on_repository=True,
+ updating_installed_repository=False )
+ if current_metadata_dict:
+ if not metadata_changeset_revision and not metadata_dict:
+ # We're at the first change set in the change log.
+ metadata_changeset_revision = current_changeset_revision
+ metadata_dict = current_metadata_dict
+ if ancestor_changeset_revision:
+ # Compare metadata from ancestor and current. The value of comparison will be one of:
+ # 'no metadata' - no metadata for either ancestor or current, so continue from current
+ # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
+ # 'subset' - ancestor metadata is a subset of current metadata, so continue from current
+ # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
+ comparison = compare_changeset_revisions( ancestor_changeset_revision,
+ ancestor_metadata_dict,
+ current_changeset_revision,
+ current_metadata_dict )
+ if comparison in [ 'no metadata', 'equal', 'subset' ]:
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ elif comparison == 'not equal and not subset':
+ metadata_changeset_revision = ancestor_changeset_revision
+ metadata_dict = ancestor_metadata_dict
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ else:
+ # We're at the beginning of the change log.
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ if not ctx.children():
+ metadata_changeset_revision = current_changeset_revision
+ metadata_dict = current_metadata_dict
+ # We're at the end of the change log.
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ elif ancestor_metadata_dict:
+ # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
+ if not ctx.children():
+ # We're at the end of the change log.
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ remove_dir( work_dir )
+ # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
+ clean_repository_metadata( trans, id, changeset_revisions )
+ # Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog.
+ reset_all_tool_versions( trans, id, repo )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
+ return invalid_file_tups, metadata_dict
+def reset_metadata_on_selected_repositories( trans, **kwd ):
+ # This method is called from both Galaxy and the Tool Shed, so the cntrller param is required.
+ repository_ids = util.listify( kwd.get( 'repository_ids', None ) )
+ CONTROLLER = kwd[ 'CONTROLLER' ]
+ message = ''
+ status = 'done'
+ if repository_ids:
+ successful_count = 0
+ unsuccessful_count = 0
+ for repository_id in repository_ids:
+ try:
+ if CONTROLLER == 'TOOL_SHED_ADMIN_CONTROLLER':
+ repository = get_repository_in_tool_shed( trans, repository_id )
+ invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
+ elif CONTROLLER == 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER':
+ repository = get_installed_tool_shed_repository( trans, repository_id )
+ invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans, repository_id )
+ if invalid_file_tups:
+ message = generate_message_for_invalid_tools( invalid_file_tups, repository, None, as_html=False )
+ log.debug( message )
+ unsuccessful_count += 1
+ else:
+ log.debug( "Successfully reset metadata on repository %s" % repository.name )
+ successful_count += 1
+ except Exception, e:
+ log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) )
+ unsuccessful_count += 1
+ message = "Successfully reset metadata on %d %s. " % ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
+ if unsuccessful_count:
+ message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count,
+ inflector.cond_plural( unsuccessful_count, "repository" ) )
+ else:
+ message = 'Select at least one repository to on which to reset all metadata.'
+ status = 'error'
+ return message, status
def reset_tool_data_tables( app ):
# Reset the tool_data_tables to an empty dictionary.
app.tool_data_tables.data_tables = {}
@@ -2061,7 +2459,7 @@
tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
- cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url( trans, repository ) )
+ cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url_for_installed_repository( trans, repository ) )
tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
owner = repository.owner
if not owner:
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -5,7 +5,9 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.web.form_builder import SelectField
from galaxy.util import inflector
-from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui
+# TODO: re-factor shed_util to eliminate the following restricted imports
+from galaxy.util.shed_util import build_repository_ids_select_field, get_changectx_for_changeset, get_configured_ui, get_repository_in_tool_shed
+from galaxy.util.shed_util import reset_metadata_on_selected_repositories, TOOL_SHED_ADMIN_CONTROLLER
from common import *
from repository import RepositoryGrid, CategoryGrid
@@ -481,7 +483,7 @@
# The received id is the repository id, so we need to get the id of the user
# that uploaded the repository.
repository_id = kwd.get( 'id', None )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
kwd[ 'f-email' ] = repository.user.email
elif operation == "repositories_by_category":
# Eliminate the current filters if any exist.
@@ -513,7 +515,7 @@
changset_revision_str = 'changeset_revision_'
if k.startswith( changset_revision_str ):
repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
if repository.tip != v:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -586,7 +588,7 @@
count = 0
deleted_repositories = ""
for repository_id in ids:
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
if not repository.deleted:
repository.deleted = True
trans.sa_session.add( repository )
@@ -715,57 +717,14 @@
status=status )
@web.expose
@web.require_admin
- def reset_metadata_on_selected_repositories( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- repository_names_by_owner = util.listify( kwd.get( 'repository_names_by_owner', None ) )
+ def reset_metadata_on_selected_repositories_in_tool_shed( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- if repository_names_by_owner:
- successful_count = 0
- unsuccessful_count = 0
- for repository_name_owner_str in repository_names_by_owner:
- repository_name_owner_list = repository_name_owner_str.split( STRSEP )
- name = repository_name_owner_list[ 0 ]
- owner = repository_name_owner_list[ 1 ]
- repository = get_repository_by_name_and_owner( trans, name, owner )
- try:
- invalid_file_tups, metadata_dict = reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) )
- if invalid_file_tups:
- message = generate_message_for_invalid_tools( invalid_file_tups, repository, None, as_html=False )
- log.debug( message )
- unsuccessful_count += 1
- else:
- log.debug( "Successfully reset metadata on repository %s" % repository.name )
- successful_count += 1
- except Exception, e:
- log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) )
- unsuccessful_count += 1
- message = "Successfully reset metadata on %d %s. " % ( successful_count,
- inflector.cond_plural( successful_count, "repository" ) )
- if unsuccessful_count:
- message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count,
- inflector.cond_plural( unsuccessful_count,
- "repository" ) )
- trans.response.send_redirect( web.url_for( controller='admin',
- action='browse_repository_metadata',
- message=util.sanitize_text( message ),
- status=status ) )
- else:
- 'Select at least one repository to on which to reset all metadata.'
- status = 'error'
- repositories_select_field = SelectField( name='repository_names_by_owner',
- multiple=True,
- display='checkboxes' )
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( and_( trans.model.Repository.table.c.deleted == False,
- trans.model.Repository.table.c.deprecated == False ) ) \
- .order_by( trans.model.Repository.table.c.name,
- trans.model.Repository.table.c.user_id ):
- owner = repository.user.username
- option_label = '%s (%s)' % ( repository.name, owner )
- option_value = '%s%s%s' % ( repository.name, STRSEP, owner )
- repositories_select_field.add_option( option_label, option_value )
+ kwd[ 'CONTROLLER' ] = TOOL_SHED_ADMIN_CONTROLLER
+ message, status = reset_metadata_on_selected_repositories( trans, **kwd )
+ else:
+ message = util.restore_text( kwd.get( 'message', '' ) )
+ status = kwd.get( 'status', 'done' )
+ repositories_select_field = build_repository_ids_select_field( trans, TOOL_SHED_ADMIN_CONTROLLER )
return trans.fill_template( '/webapps/community/admin/reset_metadata_on_selected_repositories.mako',
repositories_select_field=repositories_select_field,
message=message,
@@ -783,7 +742,7 @@
count = 0
undeleted_repositories = ""
for repository_id in ids:
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
if repository.deleted:
repository.deleted = False
trans.sa_session.add( repository )
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -5,11 +5,14 @@
from galaxy.tools import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
-from galaxy.util.shed_util import check_tool_input_params, clone_repository, concat_messages, copy_sample_file, generate_metadata_for_changeset_revision
+# TODO: re-factor shed_util to eliminate the following restricted imports
+from galaxy.util.shed_util import check_tool_input_params, clone_repository, concat_messages, copy_sample_file, create_or_update_repository_metadata
+from galaxy.util.shed_util import generate_clone_url_for_repository_in_tool_shed, generate_message_for_invalid_tools, generate_metadata_for_changeset_revision
from galaxy.util.shed_util import get_changectx_for_changeset, get_config_from_disk, get_configured_ui, get_file_context_from_ctx, get_named_tmpfile_from_ctx
-from galaxy.util.shed_util import get_repository_metadata_by_changeset_revision, handle_sample_files_and_load_tool_from_disk
-from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_tmp_config, handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH
-from galaxy.util.shed_util import load_tool_from_config, reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path
+from galaxy.util.shed_util import get_parent_id, get_repository_in_tool_shed, get_repository_metadata_by_changeset_revision
+from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_disk, handle_sample_files_and_load_tool_from_tmp_config
+from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH, is_downloadable, load_tool_from_config, remove_dir
+from galaxy.util.shed_util import reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path
from galaxy.web.base.controller import *
from galaxy.web.base.controllers.admin import *
from galaxy.webapps.community import model
@@ -171,105 +174,6 @@
if user_email in admin_users:
return True
return False
-def clean_repository_metadata( trans, id, changeset_revisions ):
- # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
- # We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
- # records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later..
- changeset_revisions_checked = []
- for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
- .order_by( trans.model.RepositoryMetadata.table.c.changeset_revision,
- trans.model.RepositoryMetadata.table.c.update_time.desc() ):
- changeset_revision = repository_metadata.changeset_revision
- can_delete = changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions
- if can_delete:
- trans.sa_session.delete( repository_metadata )
- trans.sa_session.flush()
-def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
- # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of
- # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only
- # when this method returns the string 'not equal and not subset'.
- ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] )
- ancestor_tools = ancestor_metadata_dict.get( 'tools', [] )
- ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ]
- ancestor_guids.sort()
- ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', [] )
- ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] )
- current_datatypes = current_metadata_dict.get( 'datatypes', [] )
- current_tools = current_metadata_dict.get( 'tools', [] )
- current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ]
- current_guids.sort()
- current_tool_dependencies = current_metadata_dict.get( 'tool_dependencies', [] )
- current_workflows = current_metadata_dict.get( 'workflows', [] )
- # Handle case where no metadata exists for either changeset.
- if not ancestor_guids and not current_guids and not ancestor_workflows and not current_workflows and not ancestor_datatypes and not current_datatypes:
- return 'no metadata'
- workflow_comparison = compare_workflows( ancestor_workflows, current_workflows )
- datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes )
- # Handle case where all metadata is the same.
- if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal':
- return 'equal'
- if workflow_comparison in [ 'equal', 'subset' ] and datatype_comparison in [ 'equal', 'subset' ]:
- is_subset = True
- for guid in ancestor_guids:
- if guid not in current_guids:
- is_subset = False
- break
- if is_subset:
- return 'subset'
- return 'not equal and not subset'
-def compare_datatypes( ancestor_datatypes, current_datatypes ):
- # Determine if ancestor_datatypes is the same as current_datatypes
- # or if ancestor_datatypes is a subset of current_datatypes. Each
- # datatype dict looks something like:
- # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"}
- if len( ancestor_datatypes ) <= len( current_datatypes ):
- for ancestor_datatype in ancestor_datatypes:
- # Currently the only way to differentiate datatypes is by name.
- ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ]
- ancestor_datatype_extension = ancestor_datatype[ 'extension' ]
- ancestor_datatype_mimetype = ancestor_datatype.get( 'mimetype', None )
- found_in_current = False
- for current_datatype in current_datatypes:
- if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \
- current_datatype[ 'extension' ] == ancestor_datatype_extension and \
- current_datatype.get( 'mimetype', None ) == ancestor_datatype_mimetype:
- found_in_current = True
- break
- if not found_in_current:
- return 'not equal and not subset'
- if len( ancestor_datatypes ) == len( current_datatypes ):
- return 'equal'
- else:
- return 'subset'
- return 'not equal and not subset'
-def compare_workflows( ancestor_workflows, current_workflows ):
- # Determine if ancestor_workflows is the same as current_workflows
- # or if ancestor_workflows is a subset of current_workflows.
- if len( ancestor_workflows ) <= len( current_workflows ):
- for ancestor_workflow_tup in ancestor_workflows:
- # ancestor_workflows is a list of tuples where each contained tuple is
- # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
- ancestor_workflow_dict = ancestor_workflow_tup[1]
- # Currently the only way to differentiate workflows is by name.
- ancestor_workflow_name = ancestor_workflow_dict[ 'name' ]
- num_ancestor_workflow_steps = len( ancestor_workflow_dict[ 'steps' ] )
- found_in_current = False
- for current_workflow_tup in current_workflows:
- current_workflow_dict = current_workflow_tup[1]
- # Assume that if the name and number of steps are euqal,
- # then the workflows are the same. Of course, this may
- # not be true...
- if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps:
- found_in_current = True
- break
- if not found_in_current:
- return 'not equal and not subset'
- if len( ancestor_workflows ) == len( current_workflows ):
- return 'equal'
- else:
- return 'subset'
- return 'not equal and not subset'
def copy_file_from_disk( filename, repo_dir, dir ):
file_path = None
found = False
@@ -300,66 +204,6 @@
fh.close()
return file_path
return None
-def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
- downloadable = is_downloadable( metadata_dict )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- if repository_metadata:
- repository_metadata.metadata = metadata_dict
- repository_metadata.downloadable = downloadable
- else:
- repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id,
- changeset_revision=changeset_revision,
- metadata=metadata_dict,
- downloadable=downloadable )
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- return repository_metadata
-def generate_clone_url( trans, repository_id ):
- """Generate the URL for cloning a repository."""
- repository = get_repository( trans, repository_id )
- base_url = url_for( '/', qualified=True ).rstrip( '/' )
- if trans.user:
- protocol, base = base_url.split( '://' )
- username = '%s@' % trans.user.username
- return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
- else:
- return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
-def generate_message_for_invalid_tools( invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ):
- if as_html:
- new_line = '<br/>'
- bold_start = '<b>'
- bold_end = '</b>'
- else:
- new_line = '\n'
- bold_start = ''
- bold_end = ''
- message = ''
- if not displaying_invalid_tool:
- if metadata_dict:
- message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip )
- message += "Correct the following problems if necessary and reset metadata.%s" % new_line
- else:
- message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip )
- message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line
- for itc_tup in invalid_file_tups:
- tool_file, exception_msg = itc_tup
- if exception_msg.find( 'No such file or directory' ) >= 0:
- exception_items = exception_msg.split()
- missing_file_items = exception_items[ 7 ].split( '/' )
- missing_file = missing_file_items[ -1 ].rstrip( '\'' )
- if missing_file.endswith( '.loc' ):
- sample_ext = '%s.sample' % missing_file
- else:
- sample_ext = missing_file
- correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end )
- correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end )
- else:
- if as_html:
- correction_msg = exception_msg
- else:
- correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
- message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
- return message
def generate_tool_guid( trans, repository, tool ):
"""
Generate a guid for the received tool. The form of the guid is
@@ -450,24 +294,6 @@
fh.close()
return tmp_filename
return None
-def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ):
- parent_id = None
- # Compare from most recent to oldest.
- changeset_revisions.reverse()
- for changeset_revision in changeset_revisions:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- metadata = repository_metadata.metadata
- tools_dicts = metadata.get( 'tools', [] )
- for tool_dict in tools_dicts:
- if tool_dict[ 'guid' ] == guid:
- # The tool has not changed between the compared changeset revisions.
- continue
- if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version:
- # The tool version is different, so we've found the parent.
- return tool_dict[ 'guid' ]
- if parent_id is None:
- # The tool did not change through all of the changeset revisions.
- return old_id
def get_previous_downloadable_changset_revision( repository, repo, before_changeset_revision ):
"""
Return the downloadable changeset_revision in the repository changelog just prior to the changeset to which before_changeset_revision
@@ -542,9 +368,6 @@
for changeset in repo.changelog:
reversed_changelog.insert( 0, changeset )
return reversed_changelog
-def get_repository( trans, id ):
- """Get a repository from the database via id"""
- return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
def get_repository_by_name( trans, name ):
"""Get a repository from the database via name"""
return trans.sa_session.query( trans.model.Repository ).filter_by( name=name ).one()
@@ -707,8 +530,6 @@
if previous_changeset_revision in reviewed_revision_hashes:
return True
return False
-def is_downloadable( metadata_dict ):
- return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
def load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config_filename ):
"""
Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value of tool_config_filename. The value of changeset_revision
@@ -716,7 +537,7 @@
revision and the first changeset revision in the repository, searching backwards.
"""
original_tool_data_path = trans.app.config.tool_data_path
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
repo_files_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_files_dir )
message = ''
@@ -802,134 +623,6 @@
return True
# The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed.
return False
-def remove_dir( dir ):
- if os.path.exists( dir ):
- try:
- shutil.rmtree( dir )
- except:
- pass
-def reset_all_metadata_on_repository( trans, id, **kwd ):
- def reset_all_tool_versions( trans, id, repo ):
- changeset_revisions = []
- for changeset in repo.changelog:
- changeset_revision = str( repo.changectx( changeset ) )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- if repository_metadata:
- metadata = repository_metadata.metadata
- if metadata:
- if metadata.get( 'tools', None ):
- changeset_revisions.append( changeset_revision )
- # The list of changeset_revisions is now filtered to contain only those that are downloadable and contain tools.
- # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
- for index, changeset_revision in enumerate( changeset_revisions ):
- tool_versions_dict = {}
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- metadata = repository_metadata.metadata
- tool_dicts = metadata[ 'tools' ]
- if index == 0:
- # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools.
- # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config.
- for tool_dict in tool_dicts:
- tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
- else:
- for tool_dict in tool_dicts:
- parent_id = get_parent_id( trans,
- id,
- tool_dict[ 'id' ],
- tool_dict[ 'version' ],
- tool_dict[ 'guid' ],
- changeset_revisions[ 0:index ] )
- tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
- if tool_versions_dict:
- repository_metadata.tool_versions = tool_versions_dict
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- repository = get_repository( trans, id )
- log.debug( "Resetting all metadata on repository: %s" % repository.name )
- repo_dir = repository.repo_path
- repo = hg.repository( get_configured_ui(), repo_dir )
- repository_clone_url = generate_clone_url( trans, id )
- # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop
- # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list.
- changeset_revisions = []
- # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict.
- metadata_changeset_revision = None
- metadata_dict = None
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- invalid_file_tups = []
- home_dir = os.getcwd()
- for changeset in repo.changelog:
- work_dir = tempfile.mkdtemp()
- current_changeset_revision = str( repo.changectx( changeset ) )
- ctx = repo.changectx( changeset )
- log.debug( "Cloning repository revision: %s", str( ctx.rev() ) )
- cloned_ok, error_message = clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
- if cloned_ok:
- log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
- current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_clone_url=repository_clone_url,
- relative_install_dir=repo_dir,
- repository_files_dir=work_dir,
- resetting_all_metadata_on_repository=True,
- updating_installed_repository=False )
- if current_metadata_dict:
- if not metadata_changeset_revision and not metadata_dict:
- # We're at the first change set in the change log.
- metadata_changeset_revision = current_changeset_revision
- metadata_dict = current_metadata_dict
- if ancestor_changeset_revision:
- # Compare metadata from ancestor and current. The value of comparison will be one of:
- # 'no metadata' - no metadata for either ancestor or current, so continue from current
- # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
- # 'subset' - ancestor metadata is a subset of current metadata, so continue from current
- # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
- comparison = compare_changeset_revisions( ancestor_changeset_revision,
- ancestor_metadata_dict,
- current_changeset_revision,
- current_metadata_dict )
- if comparison in [ 'no metadata', 'equal', 'subset' ]:
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
- elif comparison == 'not equal and not subset':
- metadata_changeset_revision = ancestor_changeset_revision
- metadata_dict = ancestor_metadata_dict
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
- changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
- else:
- # We're at the beginning of the change log.
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
- if not ctx.children():
- metadata_changeset_revision = current_changeset_revision
- metadata_dict = current_metadata_dict
- # We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
- changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- elif ancestor_metadata_dict:
- # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
- if not ctx.children():
- # We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
- changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- remove_dir( work_dir )
- # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
- clean_repository_metadata( trans, id, changeset_revisions )
- # Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog.
- reset_all_tool_versions( trans, id, repo )
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
- return invalid_file_tups, metadata_dict
def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
"""
Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset
@@ -938,7 +631,7 @@
message = ''
status = 'done'
encoded_id = trans.security.encode_id( repository.id )
- repository_clone_url = generate_clone_url( trans, encoded_id )
+ repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -9,11 +9,14 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
-from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for_changeset, get_configured_ui, get_file_from_changeset_revision
-from galaxy.util.shed_util import get_repository_file_contents, get_repository_metadata_by_changeset_revision, handle_sample_files_and_load_tool_from_disk
+# TODO: re-factor shed_util to eliminate the following restricted imports
+from galaxy.util.shed_util import create_repo_info_dict, generate_clone_url_for_repository_in_tool_shed, generate_message_for_invalid_tools
+from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, get_file_from_changeset_revision, get_repository_file_contents
+from galaxy.util.shed_util import get_repository_in_tool_shed, get_repository_metadata_by_changeset_revision, handle_sample_files_and_load_tool_from_disk
from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_tmp_config, INITIAL_CHANGELOG_HASH, load_tool_from_config, NOT_TOOL_CONFIGS
-from galaxy.util.shed_util import open_repository_files_folder, reversed_lower_upper_bounded_changelog, reversed_upper_bounded_changelog, strip_path
-from galaxy.util.shed_util import to_html_escaped, update_repository, url_join
+from galaxy.util.shed_util import open_repository_files_folder, remove_dir, reset_all_metadata_on_repository_in_tool_shed
+from galaxy.util.shed_util import reversed_lower_upper_bounded_changelog, reversed_upper_bounded_changelog, strip_path, to_html_escaped
+from galaxy.util.shed_util import update_repository, url_join
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -639,7 +642,7 @@
else:
# The received id is the repository id, so we need to get the id of the user that uploaded the repository.
repository_id = kwd.get( 'id', None )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
kwd[ 'f-email' ] = repository.user.email
elif operation == "repositories_i_own":
# Eliminate the current filters if any exist.
@@ -696,7 +699,7 @@
changset_revision_str = 'changeset_revision_'
if k.startswith( changset_revision_str ):
repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
if repository.tip != v:
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
@@ -710,7 +713,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
- repository = get_repository( trans, id )
+ repository = get_repository_in_tool_shed( trans, id )
repo = hg.repository( get_configured_ui(), repository.repo_path )
# Update repository files for browsing.
update_repository( repo )
@@ -771,7 +774,7 @@
operation = kwd[ 'operation' ].lower()
if operation == "preview_tools_in_changeset":
repository_id = kwd.get( 'id', None )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
repository_metadata = get_latest_repository_metadata( trans, repository.id )
latest_installable_changeset_revision = repository_metadata.changeset_revision
return trans.response.send_redirect( web.url_for( controller='repository',
@@ -795,7 +798,7 @@
changset_revision_str = 'changeset_revision_'
if k.startswith( changset_revision_str ):
repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
if repository.tip != v:
return trans.response.send_redirect( web.url_for( controller='repository',
action='preview_tools_in_changeset',
@@ -922,7 +925,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, id )
+ repository = get_repository_in_tool_shed( trans, id )
metadata = self.get_metadata( trans, id, repository.tip )
if trans.user and trans.user.email:
return trans.fill_template( "/webapps/community/repository/contact_owner.mako",
@@ -1033,7 +1036,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
repository_id = params.get( 'id', None )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
mark_deprecated = util.string_as_bool( params.get( 'mark_deprecated', False ) )
repository.deprecated = mark_deprecated
trans.sa_session.add( repository )
@@ -1086,7 +1089,7 @@
def download( self, trans, repository_id, changeset_revision, file_type, **kwd ):
# Download an archive of the repository files compressed as zip, gz or bz2.
params = util.Params( kwd )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
# Allow hgweb to handle the download. This requires the tool shed
# server account's .hgrc file to include the following setting:
# [web]
@@ -1119,7 +1122,7 @@
# The received id is a RepositoryMetadata id, so we have to get the repository id.
repository_metadata = get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
if trans.webapp.name == 'community' and ( is_admin or repository.user == trans.user ):
@@ -1204,7 +1207,7 @@
# The received id is a RepositoryMetadata id, so we have to get the repository id.
repository_metadata = get_repository_metadata_by_id( trans, item_id )
repository_id = trans.security.encode_id( repository_metadata.repository.id )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
kwd[ 'id' ] = repository_id
kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
if trans.webapp.name == 'community' and ( is_admin or repository.user == trans.user ):
@@ -1309,8 +1312,8 @@
repo_info_dicts = []
for tup in zip( util.listify( repository_ids ), util.listify( changeset_revisions ) ):
repository_id, changeset_revision = tup
- repository_clone_url = generate_clone_url( trans, repository_id )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
+ repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
metadata = repository_metadata.metadata
if not includes_tools and 'tools' in metadata:
@@ -1668,7 +1671,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
cntrller = params.get( 'cntrller', 'repository' )
- repository = get_repository( trans, id )
+ repository = get_repository_in_tool_shed( trans, id )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
repo_name = util.restore_text( params.get( 'repo_name', repository.name ) )
@@ -1892,7 +1895,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip ) )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata:
@@ -1957,7 +1960,7 @@
action='browse_repositories',
message='Select a repository to rate',
status='error' ) )
- repository = get_repository( trans, id )
+ repository = get_repository_in_tool_shed( trans, id )
repo = hg.repository( get_configured_ui(), repository.repo_path )
if repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
@@ -1985,9 +1988,11 @@
status=status )
@web.expose
def reset_all_metadata( self, trans, id, **kwd ):
- invalid_file_tups, metadata_dict = reset_all_metadata_on_repository( trans, id, **kwd )
+ # This method is called only from the ~/templates/webapps/community/repository/manage_repository.mako template.
+ # It resets all metadata on the complete changelog for a single repository in the tool shed.
+ invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd )
if invalid_file_tups:
- repository = get_repository( trans, id )
+ repository = get_repository_in_tool_shed( trans, id )
message = generate_message_for_invalid_tools( invalid_file_tups, repository, metadata_dict )
status = 'error'
else:
@@ -2093,7 +2098,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
- repository = get_repository( trans, id )
+ repository = get_repository_in_tool_shed( trans, id )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) )
@@ -2155,7 +2160,7 @@
status=status )
@web.expose
def send_to_owner( self, trans, id, message='' ):
- repository = get_repository( trans, id )
+ repository = get_repository_in_tool_shed( trans, id )
if not message:
message = 'Enter a message'
status = 'error'
@@ -2205,7 +2210,7 @@
total_alerts_removed = 0
flush_needed = False
for repository_id in repository_ids:
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
if repository.email_alerts:
email_alerts = from_json_string( repository.email_alerts )
else:
@@ -2277,7 +2282,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, id )
+ repository = get_repository_in_tool_shed( trans, id )
repo = hg.repository( get_configured_ui(), repository.repo_path )
changesets = []
for changeset in repo.changelog:
@@ -2314,7 +2319,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, id )
+ repository = get_repository_in_tool_shed( trans, id )
repo = hg.repository( get_configured_ui(), repository.repo_path )
ctx = get_changectx_for_changeset( repo, ctx_str )
if ctx is None:
@@ -2351,7 +2356,7 @@
status=status )
@web.expose
def view_or_manage_repository( self, trans, **kwd ):
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] )
if trans.user_is_admin() or repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
@@ -2366,7 +2371,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
cntrller = params.get( 'cntrller', 'repository' )
- repository = get_repository( trans, id )
+ repository = get_repository_in_tool_shed( trans, id )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
@@ -2417,7 +2422,7 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
cntrller = params.get( 'cntrller', 'repository' )
- repository = get_repository( trans, id )
+ repository = get_repository_in_tool_shed( trans, id )
repo = hg.repository( get_configured_ui(), repository.repo_path )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip ) )
@@ -2499,7 +2504,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
repo_files_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_files_dir )
tool_metadata_dict = {}
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/repository_review.py
--- a/lib/galaxy/webapps/community/controllers/repository_review.py
+++ b/lib/galaxy/webapps/community/controllers/repository_review.py
@@ -8,7 +8,8 @@
from sqlalchemy.sql.expression import func
from common import *
from repository import RepositoryGrid
-from galaxy.util.shed_util import get_configured_ui
+# TODO: re-factor shed_util to eliminate the following restricted imports
+from galaxy.util.shed_util import get_configured_ui, get_repository_in_tool_shed
from galaxy.util.odict import odict
from galaxy import eggs
@@ -381,7 +382,7 @@
message = "You have already created a review for revision <b>%s</b> of repository <b>%s</b>." % ( changeset_revision, repository.name )
status = "error"
else:
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
# See if there are any reviews for previous changeset revisions that the user can copy.
if not create_without_copying and not previous_review_id and has_previous_repository_reviews( trans, repository, changeset_revision ):
return trans.response.send_redirect( web.url_for( controller='repository_review',
@@ -650,7 +651,7 @@
status = params.get( 'status', 'done' )
repository_id = kwd.get( 'id', None )
if repository_id:
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
metadata_revision_hashes = [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
@@ -697,7 +698,7 @@
status = params.get( 'status', 'done' )
repository_id = kwd.get( 'id', None )
changeset_revision = kwd.get( 'changeset_revision', None )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
installable = changeset_revision in [ metadata_revision.changeset_revision for metadata_revision in repository.metadata_revisions ]
@@ -762,7 +763,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] )
changeset_revision = kwd.get( 'changeset_revision', None )
repo = hg.repository( get_configured_ui(), repository.repo_path )
previous_reviews_dict = get_previous_repository_reviews( trans, repository, changeset_revision )
@@ -777,7 +778,7 @@
@web.expose
@web.require_login( "view or manage repository" )
def view_or_manage_repository( self, trans, **kwd ):
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository = get_repository_in_tool_shed( trans, kwd[ 'id' ] )
if trans.user_is_admin() or repository.user == trans.user:
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -3,7 +3,9 @@
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
from common import *
-from galaxy.util.shed_util import get_configured_ui, reset_tool_data_tables, handle_sample_tool_data_table_conf_file, update_repository
+# TODO: re-factor shed_util to eliminate the following restricted imports
+from galaxy.util.shed_util import get_configured_ui, get_repository_in_tool_shed, reset_tool_data_tables, handle_sample_tool_data_table_conf_file
+from galaxy.util.shed_util import update_repository
from galaxy import eggs
eggs.require('mercurial')
@@ -28,7 +30,7 @@
category_ids = util.listify( params.get( 'category_id', '' ) )
categories = get_categories( trans )
repository_id = params.get( 'repository_id', '' )
- repository = get_repository( trans, repository_id )
+ repository = get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) )
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/community/controllers/workflow.py
--- a/lib/galaxy/webapps/community/controllers/workflow.py
+++ b/lib/galaxy/webapps/community/controllers/workflow.py
@@ -10,6 +10,8 @@
from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps
from galaxy.model.orm import *
from common import *
+# TODO: re-factor shed_util to eliminate the following restricted imports
+from galaxy.util.shed_util import get_repository_in_tool_shed
from galaxy.tool_shed.encoding_util import *
class RepoInputDataModule( InputDataModule ):
@@ -144,7 +146,7 @@
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
- repository = get_repository( trans, trans.security.encode_id( repository_metadata.repository_id ) )
+ repository = get_repository_in_tool_shed( trans, trans.security.encode_id( repository_metadata.repository_id ) )
return trans.fill_template( "/webapps/community/repository/view_workflow.mako",
repository=repository,
changeset_revision=repository_metadata.changeset_revision,
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -10,8 +10,6 @@
log = logging.getLogger( __name__ )
-MAX_CONTENT_SIZE = 32768
-
class InstalledRepositoryGrid( grids.Grid ):
class NameColumn( grids.TextColumn ):
def get_value( self, trans, grid, tool_shed_repository ):
@@ -300,7 +298,7 @@
return query
class AdminToolshed( AdminGalaxy ):
-
+
installed_repository_grid = InstalledRepositoryGrid()
repository_installation_grid = RepositoryInstallationGrid()
tool_dependency_grid = ToolDependencyGrid()
@@ -309,9 +307,9 @@
@web.require_admin
def activate_repository( self, trans, **kwd ):
"""Activate a repository that was deactivated but not uninstalled."""
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
- repository_clone_url = self.__generate_clone_url( trans, repository )
+ repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
repository.deleted = False
repository.status = trans.model.ToolShedRepository.installation_status.INSTALLED
if repository.includes_tools:
@@ -356,7 +354,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
return trans.fill_template( '/admin/tool_shed_repository/browse_repository.mako',
repository=repository,
message=message,
@@ -380,7 +378,7 @@
action='reset_to_install',
**kwd ) )
if operation == "activate or reinstall":
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
if repository.uninstalled:
if repository.includes_tools:
# Only allow selecting a different section in the tool panel if the repository was uninstalled.
@@ -440,7 +438,7 @@
@web.require_admin
def check_for_updates( self, trans, **kwd ):
# Send a request to the relevant tool shed to see if there are any updates.
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
url = url_join( tool_shed_url,
'repository/check_for_updates?galaxy_url=%s&name=%s&owner=%s&changeset_revision=%s' % \
@@ -454,7 +452,7 @@
status = params.get( 'status', 'done' )
remove_from_disk = params.get( 'remove_from_disk', '' )
remove_from_disk_checked = CheckboxField.is_checked( remove_from_disk )
- tool_shed_repository = get_repository( trans, kwd[ 'id' ] )
+ tool_shed_repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
if relative_install_dir:
if tool_path:
@@ -819,7 +817,7 @@
status = params.get( 'status', 'done' )
repository_id = kwd[ 'id' ]
operation = kwd.get( 'operation', None )
- repository = get_repository( trans, repository_id )
+ repository = get_installed_tool_shed_repository( trans, repository_id )
if not repository:
return trans.show_error_message( 'Invalid repository specified.' )
if repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING ]:
@@ -1237,14 +1235,14 @@
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_id = kwd[ 'id' ]
- tool_shed_repository = get_repository( trans, repository_id )
+ tool_shed_repository = get_installed_tool_shed_repository( trans, repository_id )
no_changes = kwd.get( 'no_changes', '' )
no_changes_checked = CheckboxField.is_checked( no_changes )
install_tool_dependencies = CheckboxField.is_checked( kwd.get( 'install_tool_dependencies', '' ) )
new_tool_panel_section = kwd.get( 'new_tool_panel_section', '' )
tool_panel_section = kwd.get( 'tool_panel_section', '' )
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, tool_shed_repository )
- repository_clone_url = generate_clone_url( trans, tool_shed_repository )
+ repository_clone_url = generate_clone_url_for_installed_repository( trans, tool_shed_repository )
clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, tool_shed_repository )
@@ -1395,11 +1393,11 @@
@web.expose
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
metadata = repository.metadata
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
ctx_rev = get_ctx_rev( tool_shed_url, repository.name, repository.owner, repository.installed_changeset_revision )
- repository_clone_url = generate_clone_url( trans, repository )
+ repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
repo_info_dict = create_repo_info_dict( repository,
repository.owner,
repository_clone_url,
@@ -1460,63 +1458,25 @@
status=status )
@web.expose
@web.require_admin
- def reset_metadata_on_selected_repositories( self, trans, **kwd ):
- # TODO: merge this with the similar method in the repository controller.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- repository_ids = util.listify( kwd.get( 'repository_names_by_owner', None ) )
+ def reset_metadata_on_selected_installed_repositories( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- if repository_ids:
- successful_count = 0
- unsuccessful_count = 0
- for repository_id in repository_ids:
- repository = get_repository( trans, repository_id )
- try:
- invalid_file_tups, metadata_dict = self.reset_repository_metadata( trans,
- trans.security.encode_id( repository.id ),
- resetting_all_repositories=True )
- if invalid_file_tups:
- unsuccessful_count += 1
- else:
- successful_count += 1
- except Exception, e:
- log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) )
- unsuccessful_count += 1
- message = "Successfully reset metadata on %d %s. " % ( successful_count,
- inflector.cond_plural( successful_count, "repository" ) )
- if unsuccessful_count:
- message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count,
- inflector.cond_plural( unsuccessful_count,
- "repository" ) )
- trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='browse_repositories',
- message=util.sanitize_text( message ),
- status=status ) )
- else:
- 'Select at least one repository to on which to reset all metadata.'
- status = 'error'
- repositories_select_field = SelectField( name='repository_names_by_owner',
- multiple=True,
- display='checkboxes' )
- for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \
- .order_by( trans.model.ToolShedRepository.table.c.name,
- trans.model.ToolShedRepository.table.c.owner ):
- option_label = '%s (%s)' % ( repository.name, repository.owner )
- option_value = trans.security.encode_id( repository.id )
- repositories_select_field.add_option( option_label, option_value )
+ kwd[ 'CONTROLLER' ] = GALAXY_ADMIN_TOOL_SHED_CONTROLLER
+ message, status = reset_metadata_on_selected_repositories( trans, **kwd )
+ else:
+ message = util.restore_text( kwd.get( 'message', '' ) )
+ status = kwd.get( 'status', 'done' )
+ repositories_select_field = build_repository_ids_select_field( trans, GALAXY_ADMIN_TOOL_SHED_CONTROLLER )
return trans.fill_template( '/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako',
repositories_select_field=repositories_select_field,
message=message,
status=status )
@web.expose
@web.require_admin
- def reset_repository_metadata( self, trans, id, resetting_all_repositories=False ):
- """Reset all metadata on the installed tool shed repository."""
- repository = get_repository( trans, id )
+ def reset_repository_metadata( self, trans, id ):
+ """Reset all metadata on a single installed tool shed repository."""
+ repository = get_installed_tool_shed_repository( trans, id )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- repository_clone_url = self.__generate_clone_url( trans, repository )
+ repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
original_metadata_dict = repository.metadata
@@ -1533,35 +1493,24 @@
update_in_shed_tool_config( trans.app, repository )
trans.sa_session.add( repository )
trans.sa_session.flush()
- if resetting_all_repositories:
- log.debug( 'Metadata has been reset on repository %s.' % repository.name )
- else:
- message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name
- status = 'done'
+ message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name
+ status = 'done'
else:
- if resetting_all_repositories:
- log.debug( 'Metadata did not need to be reset on repository %s.' % repository.name )
- else:
- message = 'Metadata did not need to be reset on repository <b>%s</b>.' % repository.name
- status = 'done'
+ message = 'Metadata did not need to be reset on repository <b>%s</b>.' % repository.name
+ status = 'done'
else:
- if resetting_all_repositories:
- log.debug( 'Error locating installation directory for repository %s.' % repository.name )
- else:
- message = 'Error locating installation directory for repository <b>%s</b>.' % repository.name
- status = 'error'
- if resetting_all_repositories:
- return invalid_file_tups, metadata_dict
- else:
- new_kwd = dict( id=id, message=message, status=status )
- return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
- action='manage_repository',
- **new_kwd ) )
+ message = 'Error locating installation directory for repository <b>%s</b>.' % repository.name
+ status = 'error'
+ return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
+ action='manage_repository',
+ id=id,
+ message=message,
+ status=status ) )
@web.expose
@web.require_admin
def reset_to_install( self, trans, **kwd ):
"""An error occurred while cloning the repository, so reset everything necessary to enable another attempt."""
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
if kwd.get( 'reset_repository', False ):
self.set_repository_attributes( trans,
repository,
@@ -1596,7 +1545,7 @@
@web.require_admin
def set_tool_versions( self, trans, **kwd ):
# Get the tool_versions from the tool shed for each tool in the installed change set.
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository = get_installed_tool_shed_repository( trans, kwd[ 'id' ] )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
url = url_join( tool_shed_url,
'repository/get_tool_versions?name=%s&owner=%s&changeset_revision=%s' % \
@@ -1756,7 +1705,7 @@
message = util.restore_text( params.get( 'message', '' ) )
cntrller = params.get( 'cntrller', 'admin_toolshed' )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, id )
+ repository = get_installed_tool_shed_repository( trans, id )
metadata = repository.metadata
shed_config_dict = repository.get_shed_config_dict( trans.app )
tool_path = shed_config_dict.get( 'tool_path', None )
@@ -1790,7 +1739,7 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, repository_id )
+ repository = get_installed_tool_shed_repository( trans, repository_id )
repository_metadata = repository.metadata
shed_config_dict = repository.get_shed_config_dict( trans.app )
tool_metadata = {}
@@ -1815,10 +1764,6 @@
tool_lineage=tool_lineage,
message=message,
status=status )
- def __generate_clone_url( self, trans, repository ):
- """Generate the URL for cloning a repository."""
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
## ---- Utility methods -------------------------------------------------------
@@ -1847,9 +1792,6 @@
for option_tup in options:
select_field.add_option( option_tup[0], option_tup[1] )
return select_field
-def get_repository( trans, id ):
- """Get a tool_shed_repository from the database via id"""
- return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
def get_tool_dependency( trans, id ):
"""Get a tool_dependency from the database via id"""
return trans.sa_session.query( trans.model.ToolDependency ).get( trans.security.decode_id( id ) )
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako
--- a/templates/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako
+++ b/templates/admin/tool_shed_repository/reset_metadata_on_selected_repositories.mako
@@ -1,42 +1,10 @@
<%inherit file="/base.mako"/><%namespace file="/message.mako" import="render_msg" />
-
-<%def name="local_javascripts()">
- <script type="text/javascript">
- function checkAllFields()
- {
- var chkAll = document.getElementById('checkAll');
- var checks = document.getElementsByTagName('input');
- var boxLength = checks.length;
- var allChecked = false;
- var totalChecked = 0;
- if ( chkAll.checked == true )
- {
- for ( i=0; i < boxLength; i++ )
- {
- if ( checks[i].name.indexOf( 'repository_names_by_owner' ) != -1)
- {
- checks[i].checked = true;
- }
- }
- }
- else
- {
- for ( i=0; i < boxLength; i++ )
- {
- if ( checks[i].name.indexOf( 'repository_names_by_owner' ) != -1)
- {
- checks[i].checked = false
- }
- }
- }
- }
- </script>
-</%def>
+<%namespace file="/webapps/community/common/common.mako" import="common_misc_javascripts" /><%def name="javascripts()">
${parent.javascripts()}
- ${local_javascripts()}
+ ${common_misc_javascripts()}
</%def>
%if message:
@@ -50,13 +18,13 @@
<div class="toolForm"><div class="toolFormTitle">Reset all metadata on each selected tool shed repository</div>
- <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin_toolshed', action='reset_metadata_on_selected_repositories' )}" method="post" >
+ <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin_toolshed', action='reset_metadata_on_selected_installed_repositories' )}" method="post" ><div class="form-row">
Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses.
</div><div style="clear: both"></div><div class="form-row">
- <input type="checkbox" id="checkAll" name=select_all_repositories_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_repositories_checkbox value="true"/><b>Select/unselect all repositories</b>
+ <input type="checkbox" id="checkAll" name="select_all_repositories_checkbox" value="true" onclick="checkAllFields('repository_ids');"/><input type="hidden" name="select_all_repositories_checkbox" value="true"/><b>Select/unselect all repositories</b></div><div style="clear: both"></div><div class="form-row">
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/webapps/community/admin/index.mako
--- a/templates/webapps/community/admin/index.mako
+++ b/templates/webapps/community/admin/index.mako
@@ -55,7 +55,7 @@
<a target="galaxy_main" href="${h.url_for( controller='admin', action='browse_repositories' )}">Browse all repositories</a></div><div class="toolTitle">
- <a target="galaxy_main" href="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories' )}">Reset selected metadata</a>
+ <a target="galaxy_main" href="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories_in_tool_shed' )}">Reset selected metadata</a></div><div class="toolTitle"><a target="galaxy_main" href="${h.url_for( controller='admin', action='browse_repository_metadata' )}">Browse metadata</a>
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako
--- a/templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako
+++ b/templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako
@@ -1,42 +1,10 @@
<%inherit file="/base.mako"/><%namespace file="/message.mako" import="render_msg" />
-
-<%def name="local_javascripts()">
- <script type="text/javascript">
- function checkAllFields()
- {
- var chkAll = document.getElementById('checkAll');
- var checks = document.getElementsByTagName('input');
- var boxLength = checks.length;
- var allChecked = false;
- var totalChecked = 0;
- if ( chkAll.checked == true )
- {
- for ( i=0; i < boxLength; i++ )
- {
- if ( checks[i].name.indexOf( 'repository_names_by_owner' ) != -1)
- {
- checks[i].checked = true;
- }
- }
- }
- else
- {
- for ( i=0; i < boxLength; i++ )
- {
- if ( checks[i].name.indexOf( 'repository_names_by_owner' ) != -1)
- {
- checks[i].checked = false
- }
- }
- }
- }
- </script>
-</%def>
+<%namespace file="/webapps/community/common/common.mako" import="common_misc_javascripts" /><%def name="javascripts()">
${parent.javascripts()}
- ${local_javascripts()}
+ ${common_misc_javascripts()}
</%def>
%if message:
@@ -51,13 +19,13 @@
<div class="toolForm"><div class="toolFormTitle">Reset all metadata on each selected repository</div>
- <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories' )}" method="post" >
+ <form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories_in_tool_shed' )}" method="post" ><div class="form-row">
Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses.
</div><div style="clear: both"></div><div class="form-row">
- <input type="checkbox" id="checkAll" name=select_all_repositories_checkbox value="true" onclick='checkAllFields(1);'/><input type="hidden" name=select_all_repositories_checkbox value="true"/><b>Select/unselect all repositories</b>
+ <input type="checkbox" id="checkAll" name="select_all_repositories_checkbox" value="true" onclick="checkAllFields('repository_ids');"/><input type="hidden" name="select_all_repositories_checkbox" value="true"/><b>Select/unselect all repositories</b></div><div style="clear: both"></div><div class="form-row">
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/webapps/community/common/common.mako
--- a/templates/webapps/community/common/common.mako
+++ b/templates/webapps/community/common/common.mako
@@ -1,3 +1,36 @@
+<%def name="common_misc_javascripts()">
+ <script type="text/javascript">
+ function checkAllFields( name )
+ {
+ var chkAll = document.getElementById( 'checkAll' );
+ var checks = document.getElementsByTagName( 'input' );
+ var boxLength = checks.length;
+ var allChecked = false;
+ var totalChecked = 0;
+ if ( chkAll.checked == true )
+ {
+ for ( i=0; i < boxLength; i++ )
+ {
+ if ( checks[i].name.indexOf( name ) != -1 )
+ {
+ checks[i].checked = true;
+ }
+ }
+ }
+ else
+ {
+ for ( i=0; i < boxLength; i++ )
+ {
+ if ( checks[i].name.indexOf( name ) != -1 )
+ {
+ checks[i].checked = false
+ }
+ }
+ }
+ }
+ </script>
+</%def>
+
<%def name="escape_html_add_breaks( value )"><%
from galaxy import eggs
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -77,8 +77,8 @@
<%def name="render_clone_str( repository )"><%
- from galaxy.webapps.community.controllers.common import generate_clone_url
- clone_str = generate_clone_url( trans, trans.security.encode_id( repository.id ) )
+ from galaxy.util.shed_util import generate_clone_url_for_repository_in_tool_shed
+ clone_str = generate_clone_url_for_repository_in_tool_shed( trans, repository )
%>
hg clone <a href="${clone_str}">${clone_str}</a></%def>
diff -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 -r 50b1d7a65bd2760699d9982d6e1ab60f2dbd665a templates/webapps/galaxy/admin/index.mako
--- a/templates/webapps/galaxy/admin/index.mako
+++ b/templates/webapps/galaxy/admin/index.mako
@@ -75,7 +75,7 @@
<div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='monitor_repository_installation', tool_shed_repository_ids=installing_repository_ids )}" target="galaxy_main">Monitor installing tool shed repositories</a></div>
%endif
%if installed_repositories:
- <div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='reset_metadata_on_selected_repositories' )}" target="galaxy_main">Reset metadata for tool shed repositories</a></div>
+ <div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='reset_metadata_on_selected_installed_repositories' )}" target="galaxy_main">Reset metadata for tool shed repositories</a></div><div class="toolTitle"><a href="${h.url_for( controller='admin_toolshed', action='browse_repositories' )}" target="galaxy_main">Manage installed tool shed repositories</a></div>
%endif
</div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
26 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b2975e2fa684/
changeset: b2975e2fa684
user: natefoo
date: 2012-10-26 18:56:23
summary: Download eggs from Galaxy resources in EC2.
affected #: 1 file
diff -r bfbd456ebbbe7cff89ffda39091d37ffdbdf2d2a -r b2975e2fa6844b230ca4a656ecdb82f9e6612815 eggs.ini
--- a/eggs.ini
+++ b/eggs.ini
@@ -7,7 +7,7 @@
;
[general]
-repository = http://eggs.g2.bx.psu.edu
+repository = http://eggs.galaxyproject.org
; these eggs must be scrambled for your local environment
no_auto = pbs_python DRMAA_python
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Log the underlying cause of job output collection failure in the drmaa runner.
by Bitbucket 26 Oct '12
by Bitbucket 26 Oct '12
26 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bfbd456ebbbe/
changeset: bfbd456ebbbe
user: natefoo
date: 2012-10-26 16:55:46
summary: Log the underlying cause of job output collection failure in the drmaa runner.
affected #: 1 file
diff -r 720b4067ab5e5b617569278155ab21216c0de176 -r bfbd456ebbbe7cff89ffda39091d37ffdbdf2d2a lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -353,11 +353,11 @@
# The exit code should only be 8 bits, but read more anyway
exit_code_str = ecfh.read(32)
which_try = (self.app.config.retry_job_output_collection + 1)
- except:
+ except Exception, e:
if which_try == self.app.config.retry_job_output_collection:
stdout = ''
stderr = 'Job output not returned from cluster'
- log.debug( stderr )
+ log.warning( '%s: %s' % ( stderr, str( e ) ) )
else:
time.sleep(1)
which_try += 1
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5c3675b6d1bd/
changeset: 5c3675b6d1bd
user: fubar
date: 2012-10-26 07:00:13
summary: Fix parameters for fastqc to work properly with bam and with a contaminants file
Been b0rken for a long time - probably not much used..
affected #: 3 files
diff -r f156b9e144437e541127009c2a32efc1019af5c7 -r 5c3675b6d1bd44f27225d4bef1d2ea7096cedee9 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -911,7 +911,7 @@
self.ref_input = None
self.default_value = elem.get( "default_value", None )
self.is_dynamic = True
- self.usecolnames = string_as_bool( elem.get( "use_header_names", False ))
+ self.usecolnames = string_as_bool( elem.get( "use_header_names", True )) # much easier for the user - make default if not overridden
def from_html( self, value, trans=None, context={} ):
"""
diff -r f156b9e144437e541127009c2a32efc1019af5c7 -r 5c3675b6d1bd44f27225d4bef1d2ea7096cedee9 tools/filters/sorter.xml
--- a/tools/filters/sorter.xml
+++ b/tools/filters/sorter.xml
@@ -15,7 +15,7 @@
</command><inputs><param format="tabular" name="input" type="data" label="Sort Query" />
- <param name="column" label="on column" type="data_column" data_ref="input" accept_default="true" />
+ <param name="column" label="on column" type="data_column" data_ref="input" accept_default="true"/><param name="style" type="select" label="with flavor"><option value="num">Numerical sort</option><option value="alpha">Alphabetical sort</option>
diff -r f156b9e144437e541127009c2a32efc1019af5c7 -r 5c3675b6d1bd44f27225d4bef1d2ea7096cedee9 tools/rgenetics/rgFastQC.py
--- a/tools/rgenetics/rgFastQC.py
+++ b/tools/rgenetics/rgFastQC.py
@@ -51,9 +51,9 @@
fastq = os.path.basename(self.opts.input)
cl = [self.opts.executable,'--outdir=%s' % self.opts.outputdir]
if self.opts.informat in ['sam','bam']:
- cl.append('-f %s' % self.opts.informat)
+ cl.append('--f=%s' % self.opts.informat)
if self.opts.contaminants <> None :
- cl.append('-c %s' % self.opts.contaminants)
+ cl.append('--contaminants=%s' % self.opts.contaminants)
# patch suggested by bwlang https://bitbucket.org/galaxy/galaxy-central/pull-request/30
# use a symlink in a temporary directory so that the FastQC report reflects the history input file name
fastqinfilename = re.sub(ur'[^a-zA-Z0-9_\-\.]', '_', os.path.basename(self.opts.inputfilename))
https://bitbucket.org/galaxy/galaxy-central/changeset/720b4067ab5e/
changeset: 720b4067ab5e
user: fubar
date: 2012-10-26 07:03:10
summary: Revert use_header_names to default false - breaks a couple of existing tests - would need to repair those too.
affected #: 1 file
diff -r 5c3675b6d1bd44f27225d4bef1d2ea7096cedee9 -r 720b4067ab5e5b617569278155ab21216c0de176 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -911,7 +911,7 @@
self.ref_input = None
self.default_value = elem.get( "default_value", None )
self.is_dynamic = True
- self.usecolnames = string_as_bool( elem.get( "use_header_names", True )) # much easier for the user - make default if not overridden
+ self.usecolnames = string_as_bool( elem.get( "use_header_names", False ))
def from_html( self, value, trans=None, context={} ):
"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for determining the extraction directory for tool dependency tarballs when installing tool dependencies along with tool shed repositories.
by Bitbucket 25 Oct '12
by Bitbucket 25 Oct '12
25 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f156b9e14443/
changeset: f156b9e14443
user: greg
date: 2012-10-25 20:46:57
summary: Fix for determining the extraction directory for tool dependency tarballs when installing tool dependencies along with tool shed repositories.
affected #: 2 files
diff -r 42eedc24fdd62034ce039bf56b2bbb0b382c63e0 -r f156b9e144437e541127009c2a32efc1019af5c7 lib/galaxy/tool_shed/tool_dependencies/common_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py
@@ -1,18 +1,6 @@
import os, shutil, tarfile, urllib2, zipfile
from galaxy.datatypes.checkers import *
-def zipfile_ok( path_to_archive ):
- """
- This function is a bit pedantic and not functionally necessary. It checks whether there is no file pointing outside of the extraction,
- because ZipFile.extractall() has some potential security holes. See python zipfile documentation for more details.
- """
- basename = os.path.realpath( os.path.dirname( path_to_archive ) )
- zip_archive = zipfile.ZipFile( path_to_archive )
- for member in zip_archive.namelist():
- member_path = os.path.realpath( os.path.join( basename, member ) )
- if not member_path.startswith( basename ):
- return False
- return True
def create_env_var_dict( elem, tool_dependency_install_dir=None, tool_shed_repository_install_dir=None ):
env_var_name = elem.get( 'name', 'PATH' )
env_var_action = elem.get( 'action', 'prepend_to' )
@@ -103,6 +91,7 @@
os.makedirs( destination_directory )
shutil.move( source_file, destination_directory )
def tar_extraction_directory( file_path, file_name ):
+ """Try to return the correct extraction directory."""
file_name = file_name.strip()
extensions = [ '.tar.gz', '.tgz', '.tar.bz2', '.zip' ]
for extension in extensions:
@@ -111,8 +100,8 @@
if os.path.exists( os.path.abspath( os.path.join( file_path, dir_name ) ) ):
return dir_name
if os.path.exists( os.path.abspath( os.path.join( file_path, file_name ) ) ):
- return os.path.abspath( os.path.join( file_path, file_name ) )
- raise ValueError( 'Could not find directory %s' % os.path.abspath( os.path.join( file_path, file_name[ :-len( extension ) ] ) ) )
+ return os.path.abspath( file_path )
+ raise ValueError( 'Could not find path to file %s' % os.path.abspath( os.path.join( file_path, file_name ) ) )
def url_download( install_dir, downloaded_file_name, download_url ):
file_path = os.path.join( install_dir, downloaded_file_name )
src = None
@@ -138,3 +127,15 @@
if os.path.isdir( os.path.join( file_path, files[ 0 ] ) ):
return os.path.abspath( os.path.join( file_path, files[ 0 ] ) )
raise ValueError( 'Could not find directory for the extracted file %s' % os.path.abspath( os.path.join( file_path, file_name ) ) )
+def zipfile_ok( path_to_archive ):
+ """
+ This function is a bit pedantic and not functionally necessary. It checks whether there is no file pointing outside of the extraction,
+ because ZipFile.extractall() has some potential security holes. See python zipfile documentation for more details.
+ """
+ basename = os.path.realpath( os.path.dirname( path_to_archive ) )
+ zip_archive = zipfile.ZipFile( path_to_archive )
+ for member in zip_archive.namelist():
+ member_path = os.path.realpath( os.path.join( basename, member ) )
+ if not member_path.startswith( basename ):
+ return False
+ return True
diff -r 42eedc24fdd62034ce039bf56b2bbb0b382c63e0 -r f156b9e144437e541127009c2a32efc1019af5c7 lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -79,7 +79,7 @@
if not os.path.exists( dir ):
os.makedirs( dir )
# The package has been down-loaded, so we can now perform all of the actions defined for building it.
- with lcd( dir ):
+ with lcd( dir ):
for action_tup in actions[ 1: ]:
action_type, action_dict = action_tup
current_dir = os.path.abspath( os.path.join( work_dir, dir ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Pack workflow editor javascript to be compatible with changes in pull request #76
by Bitbucket 25 Oct '12
by Bitbucket 25 Oct '12
25 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/42eedc24fdd6/
changeset: 42eedc24fdd6
user: dannon
date: 2012-10-25 19:52:26
summary: Pack workflow editor javascript to be compatible with changes in pull request #76
affected #: 1 file
diff -r 1136605c015d0e28ba42c9ad83f860652ee453a4 -r 42eedc24fdd62034ce039bf56b2bbb0b382c63e0 static/scripts/packed/galaxy.workflow_editor.canvas.js
--- a/static/scripts/packed/galaxy.workflow_editor.canvas.js
+++ b/static/scripts/packed/galaxy.workflow_editor.canvas.js
@@ -1,1 +1,1 @@
-function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}OutputTerminal.prototype=new Terminal();function InputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1){for(var c in this.datatypes){var f=new Array();f=f.concat(a.datatypes);if(a.node.post_job_actions){for(var d in a.node.post_job_actions){var g=a.node.post_job_actions[d];if(g.action_type=="ChangeDatatypeAction"&&(g.output_name==""||g.output_name==a.name)&&g.action_arguments){f.push(g.action_arguments.newtype)}}}for(var b in f){if(f[b]=="input"||issubtype(f[b],this.datatypes[c])){return true}}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a){this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;this.handle1.connect(this);this.handle2=a;this.handle2.connect(this)},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(d,a,b){var c=this;$(d).each(function(){var f=this.terminal=new InputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dropinit",function(g,h){return $(h.drag).hasClass("output-terminal")&&f.can_accept(h.drag.terminal)}).bind("dropstart",function(g,h){h.proxy.terminal.connectors[0].inner_color="#BBFFBB"}).bind("dropend",function(g,h){h.proxy.terminal.connectors[0].inner_color="#FFFFFF"}).bind("drop",function(g,h){(new Connector(h.drag.terminal,f)).redraw()}).bind("hover",function(){if(f.connectors.length>0){var g=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='buttons'></div>").append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png").click(function(){$.each(f.connectors,function(i,h){h.destroy()});g.remove()}))).bind("mouseleave",function(){$(this).remove()});g.css({top:$(this).offset().top-2,left:$(this).offset().left-g.width(),"padding-right":$(this).width()}).show()}});c.input_terminals[a]=f})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j,k){$(k.available).addClass("input-terminal-active");workflow.check_changes_in_active_form();var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var l=new Connector();l.dragging=true;l.connect(this.terminal,i.terminal);return i}).bind("drag",function(i,j){var h=function(){var l=$(j.proxy).offsetParent().offset(),k=j.offsetX-l.left,m=j.offsetY-l.top;$(j.proxy).css({left:k,top:m});j.proxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h,i){i.proxy.terminal.connectors[0].destroy();$(i.proxy).remove();$(i.available).removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(h){var g=this.element;if(h.type){this.type=h.type}this.name=h.name;this.form_html=h.form_html;this.tool_state=h.tool_state;this.tool_errors=h.tool_errors;this.tooltip=h.tooltip?h.tooltip:"";this.annotation=h.annotation;this.post_job_actions=h.post_job_actions?h.post_job_actions:{};this.workflow_outputs=h.workflow_outputs?h.workflow_outputs:[];if(this.tool_errors){g.addClass("tool-node-error")}else{g.removeClass("tool-node-error")}var d=this;var c=Math.max(150,g.width());var a=g.find(".toolFormBody");a.find("div").remove();var i=$("<div class='inputs'></div>").appendTo(a);$.each(h.data_inputs,function(k,f){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,f.name,f.extensions);var b=$("<div class='form-row dataRow input-data-row' name='"+f.name+"'>"+f.label+"</div>");b.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(b);c=Math.max(c,b.outerWidth());b.css({position:"",left:"",top:"",display:""});b.remove();i.append(b.prepend(j))});if((h.data_inputs.length>0)&&(h.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(h.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");d.enable_output_terminal(j,b.name,b.extensions);var f=b.name;if(b.extensions.indexOf("input")<0){f=f+" ("+b.extensions.join(", ")+")"}var m=$("<div class='form-row dataRow'>"+f+"</div>");if(d.type=="tool"){var l=$("<div class='callout "+f+"'></div>").css({display:"none"}).append($("<div class='buttons'></div>").append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png").click(function(){if($.inArray(b.name,d.workflow_outputs)!=-1){d.workflow_outputs.splice($.inArray(b.name,d.workflow_outputs),1);l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{d.workflow_outputs.push(b.name);l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}workflow.has_changes=true;canvas_manager.draw_overview()}))).tooltip({delay:500,title:"Flag this as a workflow output. All non-flagged outputs will be hidden."});l.css({top:"50%",margin:"-8px 0px 0px 0px",right:8});l.show();m.append(l);if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}m.hover(function(){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-yellow.png")},function(){if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}})}m.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(m);c=Math.max(c,m.outerWidth()+17);m.css({position:"",left:"",top:"",display:""});m.detach();a.append(m.append(j))});g.css("width",Math.min(250,Math.max(g.width(),c)));workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;this.annotation=f.annotation;var g=$.parseJSON(f.post_job_actions);this.post_job_actions=g?g:{};if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var h=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=h.find("div.input-data-row");$.each(f.data_inputs,function(l,j){var k=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(k,j.name,j.extensions);h.find("div[name='"+j.name+"']").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){k[0].terminal.connectors[0]=i;i.handle2=k[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+j.name+"'>"+j.label+"</div>").prepend(k))});h.replaceWith(b);h.find("div.input-data-row > .terminal").each(function(){this.terminal.destroy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false;this.active_form_has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},rectify_workflow_outputs:function(){var b,a=false;$.each(this.nodes,function(c,d){if(d.workflow_outputs&&d.workflow_outputs.length>0){b=true}$.each(d.post_job_actions,function(g,f){if(f.action_type==="HideDatasetAction"){a=true}})});if(b!==false||a!==false){$.each(this.nodes,function(c,g){if(g.type==="tool"){var f=false;if(g.post_job_actions==null){g.post_job_actions={};f=true}var d=[];$.each(g.post_job_actions,function(i,h){if(h.action_type=="HideDatasetAction"){d.push(i)}});if(d.length>0&&g==workflow.active_node){$.each(d,function(h,j){f=true;delete g.post_job_actions[j]})}if(b){$.each(g.output_terminals,function(i,j){var h=true;$.each(g.workflow_outputs,function(l,m){if(j.name===m){h=false}});if(h===true){f=true;var k={action_type:"HideDatasetAction",output_name:j.name,action_arguments:{}};g.post_job_actions["HideDatasetAction"+j.name]=null;g.post_job_actions["HideDatasetAction"+j.name]=k}})}if(workflow.active_node==g&&f===true){workflow.reload_active_node()}}})}},to_simple:function(){var a={};$.each(this.nodes,function(c,f){var g={};$.each(f.input_terminals,function(h,i){g[i.name]=null;$.each(i.connectors,function(j,k){g[i.name]={id:k.handle1.node.id,output_name:k.handle1.name}})});var b={};if(f.post_job_actions){$.each(f.post_job_actions,function(j,h){var k={action_type:h.action_type,output_name:h.output_name,action_arguments:h.action_arguments};b[h.action_type+h.output_name]=null;b[h.action_type+h.output_name]=k})}if(!f.workflow_outputs){f.workflow_outputs=[]}var d={id:f.id,type:f.type,tool_id:f.tool_id,tool_state:f.tool_state,tool_errors:f.tool_errors,input_connections:g,position:$(f.element).position(),annotation:f.annotation,post_job_actions:f.post_job_actions,workflow_outputs:f.workflow_outputs};a[f.id]=d});return{steps:a}},from_simple:function(b){wf=this;var c=0;wf.name=b.name;var a=false;$.each(b.steps,function(g,f){var d=prebuild_node("tool",f.name,f.tool_id);d.init_field_data(f);if(f.position){d.element.css({top:f.position.top,left:f.position.left})}d.id=f.id;wf.nodes[d.id]=d;c=Math.max(c,parseInt(g));if(!a&&d.type==="tool"){if(d.workflow_outputs.length>0){a=true}else{$.each(d.post_job_actions,function(i,h){if(h.action_type==="HideDatasetAction"){a=true}})}}});wf.id_counter=c+1;$.each(b.steps,function(g,f){var d=wf.nodes[g];$.each(f.input_connections,function(i,h){if(h){var j=wf.nodes[h.id];var l=new Connector();l.connect(j.output_terminals[h.output_name],d.input_terminals[i]);l.redraw()}});if(a&&d.type==="tool"){$.each(d.output_terminals,function(h,i){if(d.post_job_actions["HideDatasetAction"+i.name]===undefined){d.workflow_outputs.push(i.name);callout=$(d.element).find(".callout."+i.name);callout.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png");workflow.has_changes=true}})}})},check_changes_in_active_form:function(){if(this.active_form_has_changes){this.has_changes=true;$("#right-content").find("form").submit();this.active_form_has_changes=false}},reload_active_node:function(){if(this.active_node){var a=this.active_node;this.clear_active_node();this.activate_node(a)}},clear_active_node:function(){if(this.active_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.check_changes_in_active_form();this.clear_active_node();parent.show_form_for_tool(a.form_html+a.tooltip,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){this.check_changes_in_active_form();parent.show_form_for_tool(a.form_html+a.tooltip,a)}},layout:function(){this.check_changes_in_active_form();this.has_changes=true;var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){level_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];for(var g in b[j]){i[b[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></div>");var h="<div><img height='16' align='middle' src='"+galaxy_paths.attributes.image_path+"/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float: right;'></div>");k.append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png").click(function(b){g.destroy()}).hover(function(){$(this).attr("src",galaxy_paths.attributes.image_path+"/delete_icon_dark.png")},function(){$(this).attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png")}));i.appendTo("#canvas-container");var d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o,p){var f=$(this).offsetParent().offset(),b=p.offsetX-f.left,s=p.offsetY-f.top;$(this).css({left:b,top:s});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prototype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.css("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:function(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(){var g=$(this).offset();var f=b.cc.position();c=f.top-g.top;d=f.left-g.left}).bind("drag",function(f,g){a(g.offsetX+d,g.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});this.ov.bind("drag",function(k,l){var h=b.cc.width(),n=b.cc.height(),m=b.oc.width(),j=b.oc.height(),f=$(this).offsetParent().offset(),i=l.offsetX-f.left,g=l.offsetY-f.top;a(-(i/m*h),-(g/j*n))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g,i){var j=$(this).offsetParent();var h=j.offset();var f=Math.max(j.width()-(i.offsetX-h.left),j.height()-(i.offsetY-h.top));$(this).css({width:f,height:f});b.draw_overview()});$("#overview-border div").bind("drag",function(){})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);$.each(workflow.nodes,function(t,q){i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;if(q.tool_errors){i.fillStyle="#FFCCCC";i.strokeStyle="#AA6666"}else{if(q.workflow_outputs!=undefined&&q.workflow_outputs.length>0){i.fillStyle="#E8A92D";i.strokeStyle="#E8A92D"}}i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewport_overlay()}});
\ No newline at end of file
+function Terminal(a){this.element=a;this.connectors=[]}$.extend(Terminal.prototype,{connect:function(a){this.connectors.push(a);if(this.node){this.node.changed()}},disconnect:function(a){this.connectors.splice($.inArray(a,this.connectors),1);if(this.node){this.node.changed()}},redraw:function(){$.each(this.connectors,function(a,b){b.redraw()})},destroy:function(){$.each(this.connectors.slice(),function(a,b){b.destroy()})}});function OutputTerminal(a,b){Terminal.call(this,a);this.datatypes=b}OutputTerminal.prototype=new Terminal();function InputTerminal(b,c,a){Terminal.call(this,b);this.datatypes=c;this.multiple=a}InputTerminal.prototype=new Terminal();$.extend(InputTerminal.prototype,{can_accept:function(a){if(this.connectors.length<1||this.multiple){for(var c in this.datatypes){var f=new Array();f=f.concat(a.datatypes);if(a.node.post_job_actions){for(var d in a.node.post_job_actions){var g=a.node.post_job_actions[d];if(g.action_type=="ChangeDatatypeAction"&&(g.output_name==""||g.output_name==a.name)&&g.action_arguments){f.push(g.action_arguments.newtype)}}}for(var b in f){if(f[b]=="input"||issubtype(f[b],this.datatypes[c])){return true}}}}return false}});function Connector(b,a){this.canvas=null;this.dragging=false;this.inner_color="#FFFFFF";this.outer_color="#D8B365";if(b&&a){this.connect(b,a)}}$.extend(Connector.prototype,{connect:function(b,a){this.handle1=b;this.handle1.connect(this);this.handle2=a;this.handle2.connect(this)},destroy:function(){if(this.handle1){this.handle1.disconnect(this)}if(this.handle2){this.handle2.disconnect(this)}$(this.canvas).remove()},redraw:function(){var d=$("#canvas-container");if(!this.canvas){this.canvas=document.createElement("canvas");if(window.G_vmlCanvasManager){G_vmlCanvasManager.initElement(this.canvas)}d.append($(this.canvas));if(this.dragging){this.canvas.style.zIndex="300"}}var n=function(c){return $(c).offset().left-d.offset().left};var i=function(c){return $(c).offset().top-d.offset().top};if(!this.handle1||!this.handle2){return}var h=n(this.handle1.element)+5;var g=i(this.handle1.element)+5;var p=n(this.handle2.element)+5;var m=i(this.handle2.element)+5;var f=100;var k=Math.min(h,p);var a=Math.max(h,p);var j=Math.min(g,m);var t=Math.max(g,m);var b=Math.min(Math.max(Math.abs(t-j)/2,100),300);var o=k-f;var s=j-f;var q=a-k+2*f;var l=t-j+2*f;this.canvas.style.left=o+"px";this.canvas.style.top=s+"px";this.canvas.setAttribute("width",q);this.canvas.setAttribute("height",l);h-=o;g-=s;p-=o;m-=s;var r=this.canvas.getContext("2d");r.lineCap="round";r.strokeStyle=this.outer_color;r.lineWidth=7;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke();r.strokeStyle=this.inner_color;r.lineWidth=5;r.beginPath();r.moveTo(h,g);r.bezierCurveTo(h+b,g,p-b,m,p,m);r.stroke()}});function Node(a){this.element=a;this.input_terminals={};this.output_terminals={};this.tool_errors={}}$.extend(Node.prototype,{enable_input_terminal:function(f,b,c,a){var d=this;$(f).each(function(){var g=this.terminal=new InputTerminal(this,c,a);g.node=d;g.name=b;$(this).bind("dropinit",function(h,i){return $(i.drag).hasClass("output-terminal")&&g.can_accept(i.drag.terminal)}).bind("dropstart",function(h,i){if(i.proxy.terminal){i.proxy.terminal.connectors[0].inner_color="#BBFFBB"}}).bind("dropend",function(h,i){if(i.proxy.terminal){i.proxy.terminal.connectors[0].inner_color="#FFFFFF"}}).bind("drop",function(h,i){(new Connector(i.drag.terminal,g)).redraw()}).bind("hover",function(){if(g.connectors.length>0){var h=$("<div class='callout'></div>").css({display:"none"}).appendTo("body").append($("<div class='buttons'></div>").append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png").click(function(){$.each(g.connectors,function(j,i){if(i){i.destroy()}});h.remove()}))).bind("mouseleave",function(){$(this).remove()});h.css({top:$(this).offset().top-2,left:$(this).offset().left-h.width(),"padding-right":$(this).width()}).show()}});d.input_terminals[b]=g})},enable_output_terminal:function(d,a,b){var c=this;$(d).each(function(){var g=this;var f=this.terminal=new OutputTerminal(this,b);f.node=c;f.name=a;$(this).bind("dragstart",function(j,k){$(k.available).addClass("input-terminal-active");workflow.check_changes_in_active_form();var i=$('<div class="drag-terminal" style="position: absolute;"></div>').appendTo("#canvas-container").get(0);i.terminal=new OutputTerminal(i);var l=new Connector();l.dragging=true;l.connect(this.terminal,i.terminal);return i}).bind("drag",function(i,j){var h=function(){var l=$(j.proxy).offsetParent().offset(),k=j.offsetX-l.left,m=j.offsetY-l.top;$(j.proxy).css({left:k,top:m});j.proxy.terminal.redraw();canvas_manager.update_viewport_overlay()};h();$("#canvas-container").get(0).scroll_panel.test(i,h)}).bind("dragend",function(h,i){i.proxy.terminal.connectors[0].destroy();$(i.proxy).remove();$(i.available).removeClass("input-terminal-active");$("#canvas-container").get(0).scroll_panel.stop()});c.output_terminals[a]=f})},redraw:function(){$.each(this.input_terminals,function(a,b){b.redraw()});$.each(this.output_terminals,function(a,b){b.redraw()})},destroy:function(){$.each(this.input_terminals,function(a,b){b.destroy()});$.each(this.output_terminals,function(a,b){b.destroy()});workflow.remove_node(this);$(this.element).remove()},make_active:function(){$(this.element).addClass("toolForm-active")},make_inactive:function(){var a=this.element.get(0);(function(b){b.removeChild(a);b.appendChild(a)})(a.parentNode);$(a).removeClass("toolForm-active")},init_field_data:function(h){var g=this.element;if(h.type){this.type=h.type}this.name=h.name;this.form_html=h.form_html;this.tool_state=h.tool_state;this.tool_errors=h.tool_errors;this.tooltip=h.tooltip?h.tooltip:"";this.annotation=h.annotation;this.post_job_actions=h.post_job_actions?h.post_job_actions:{};this.workflow_outputs=h.workflow_outputs?h.workflow_outputs:[];if(this.tool_errors){g.addClass("tool-node-error")}else{g.removeClass("tool-node-error")}var d=this;var c=Math.max(150,g.width());var a=g.find(".toolFormBody");a.find("div").remove();var i=$("<div class='inputs'></div>").appendTo(a);$.each(h.data_inputs,function(k,f){var j=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(j,f.name,f.extensions,f.multiple);var b=$("<div class='form-row dataRow input-data-row' name='"+f.name+"'>"+f.label+"</div>");b.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(b);c=Math.max(c,b.outerWidth());b.css({position:"",left:"",top:"",display:""});b.remove();i.append(b.prepend(j))});if((h.data_inputs.length>0)&&(h.data_outputs.length>0)){a.append($("<div class='rule'></div>"))}$.each(h.data_outputs,function(k,b){var j=$("<div class='terminal output-terminal'></div>");d.enable_output_terminal(j,b.name,b.extensions);var f=b.name;if(b.extensions.indexOf("input")<0){f=f+" ("+b.extensions.join(", ")+")"}var m=$("<div class='form-row dataRow'>"+f+"</div>");if(d.type=="tool"){var l=$("<div class='callout "+f+"'></div>").css({display:"none"}).append($("<div class='buttons'></div>").append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png").click(function(){if($.inArray(b.name,d.workflow_outputs)!=-1){d.workflow_outputs.splice($.inArray(b.name,d.workflow_outputs),1);l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{d.workflow_outputs.push(b.name);l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}workflow.has_changes=true;canvas_manager.draw_overview()}))).tooltip({delay:500,title:"Flag this as a workflow output. All non-flagged outputs will be hidden."});l.css({top:"50%",margin:"-8px 0px 0px 0px",right:8});l.show();m.append(l);if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}m.hover(function(){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-yellow.png")},function(){if($.inArray(b.name,d.workflow_outputs)===-1){l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small-outline.png")}else{l.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png")}})}m.css({position:"absolute",left:-1000,top:-1000,display:"none"});$("body").append(m);c=Math.max(c,m.outerWidth()+17);m.css({position:"",left:"",top:"",display:""});m.detach();a.append(m.append(j))});g.css("width",Math.min(250,Math.max(g.width(),c)));workflow.node_changed(this)},update_field_data:function(f){var c=$(this.element),d=this;this.tool_state=f.tool_state;this.form_html=f.form_html;this.tool_errors=f.tool_errors;this.annotation=f.annotation;var g=$.parseJSON(f.post_job_actions);this.post_job_actions=g?g:{};if(this.tool_errors){c.addClass("tool-node-error")}else{c.removeClass("tool-node-error")}var h=c.find("div.inputs");var b=$("<div class='inputs'></div>");var a=h.find("div.input-data-row");$.each(f.data_inputs,function(l,j){var k=$("<div class='terminal input-terminal'></div>");d.enable_input_terminal(k,j.name,j.extensions,j.multiple);h.find("div[name='"+j.name+"']").each(function(){$(this).find(".input-terminal").each(function(){var i=this.terminal.connectors[0];if(i){k[0].terminal.connectors[0]=i;i.handle2=k[0].terminal}});$(this).remove()});b.append($("<div class='form-row dataRow input-data-row' name='"+j.name+"'>"+j.label+"</div>").prepend(k))});h.replaceWith(b);h.find("div.input-data-row > .terminal").each(function(){this.terminal.destroy()});this.changed();this.redraw()},error:function(d){var a=$(this.element).find(".toolFormBody");a.find("div").remove();var c="<div style='color: red; text-style: italic;'>"+d+"</div>";this.form_html=c;a.html(c);workflow.node_changed(this)},changed:function(){workflow.node_changed(this)}});function Workflow(a){this.canvas_container=a;this.id_counter=0;this.nodes={};this.name=null;this.has_changes=false;this.active_form_has_changes=false}$.extend(Workflow.prototype,{add_node:function(a){a.id=this.id_counter;a.element.attr("id","wf-node-step-"+a.id);this.id_counter++;this.nodes[a.id]=a;this.has_changes=true;a.workflow=this},remove_node:function(a){if(this.active_node==a){this.clear_active_node()}delete this.nodes[a.id];this.has_changes=true},remove_all:function(){wf=this;$.each(this.nodes,function(b,a){a.destroy();wf.remove_node(a)})},rectify_workflow_outputs:function(){var b,a=false;$.each(this.nodes,function(c,d){if(d.workflow_outputs&&d.workflow_outputs.length>0){b=true}$.each(d.post_job_actions,function(g,f){if(f.action_type==="HideDatasetAction"){a=true}})});if(b!==false||a!==false){$.each(this.nodes,function(c,g){if(g.type==="tool"){var f=false;if(g.post_job_actions==null){g.post_job_actions={};f=true}var d=[];$.each(g.post_job_actions,function(i,h){if(h.action_type=="HideDatasetAction"){d.push(i)}});if(d.length>0&&g==workflow.active_node){$.each(d,function(h,j){f=true;delete g.post_job_actions[j]})}if(b){$.each(g.output_terminals,function(i,j){var h=true;$.each(g.workflow_outputs,function(l,m){if(j.name===m){h=false}});if(h===true){f=true;var k={action_type:"HideDatasetAction",output_name:j.name,action_arguments:{}};g.post_job_actions["HideDatasetAction"+j.name]=null;g.post_job_actions["HideDatasetAction"+j.name]=k}})}if(workflow.active_node==g&&f===true){workflow.reload_active_node()}}})}},to_simple:function(){var a={};$.each(this.nodes,function(c,f){var g={};$.each(f.input_terminals,function(i,j){g[j.name]=null;var h=[];$.each(j.connectors,function(k,l){h[k]={id:l.handle1.node.id,output_name:l.handle1.name};g[j.name]=h})});var b={};if(f.post_job_actions){$.each(f.post_job_actions,function(j,h){var k={action_type:h.action_type,output_name:h.output_name,action_arguments:h.action_arguments};b[h.action_type+h.output_name]=null;b[h.action_type+h.output_name]=k})}if(!f.workflow_outputs){f.workflow_outputs=[]}var d={id:f.id,type:f.type,tool_id:f.tool_id,tool_state:f.tool_state,tool_errors:f.tool_errors,input_connections:g,position:$(f.element).position(),annotation:f.annotation,post_job_actions:f.post_job_actions,workflow_outputs:f.workflow_outputs};a[f.id]=d});return{steps:a}},from_simple:function(b){wf=this;var c=0;wf.name=b.name;var a=false;$.each(b.steps,function(g,f){var d=prebuild_node("tool",f.name,f.tool_id);d.init_field_data(f);if(f.position){d.element.css({top:f.position.top,left:f.position.left})}d.id=f.id;wf.nodes[d.id]=d;c=Math.max(c,parseInt(g));if(!a&&d.type==="tool"){if(d.workflow_outputs.length>0){a=true}else{$.each(d.post_job_actions,function(i,h){if(h.action_type==="HideDatasetAction"){a=true}})}}});wf.id_counter=c+1;$.each(b.steps,function(g,f){var d=wf.nodes[g];$.each(f.input_connections,function(i,h){if(h){if($.isArray(h)){$.each(h,function(m,k){var n=wf.nodes[k.id];var o=new Connector();o.connect(n.output_terminals[k.output_name],d.input_terminals[i]);o.redraw()})}else{var j=wf.nodes[h.id];var l=new Connector();l.connect(j.output_terminals[h.output_name],d.input_terminals[i]);l.redraw()}}});if(a&&d.type==="tool"){$.each(d.output_terminals,function(h,i){if(d.post_job_actions["HideDatasetAction"+i.name]===undefined){d.workflow_outputs.push(i.name);callout=$(d.element).find(".callout."+i.name);callout.find("img").attr("src",galaxy_paths.attributes.image_path+"/fugue/asterisk-small.png");workflow.has_changes=true}})}})},check_changes_in_active_form:function(){if(this.active_form_has_changes){this.has_changes=true;$("#right-content").find("form").submit();this.active_form_has_changes=false}},reload_active_node:function(){if(this.active_node){var a=this.active_node;this.clear_active_node();this.activate_node(a)}},clear_active_node:function(){if(this.active_node){this.active_node.make_inactive();this.active_node=null}parent.show_form_for_tool("<div>No node selected</div>")},activate_node:function(a){if(this.active_node!=a){this.check_changes_in_active_form();this.clear_active_node();parent.show_form_for_tool(a.form_html+a.tooltip,a);a.make_active();this.active_node=a}},node_changed:function(a){this.has_changes=true;if(this.active_node==a){this.check_changes_in_active_form();parent.show_form_for_tool(a.form_html+a.tooltip,a)}},layout:function(){this.check_changes_in_active_form();this.has_changes=true;var i={};var b={};$.each(this.nodes,function(l,k){if(i[l]===undefined){i[l]=0}if(b[l]===undefined){b[l]=[]}});$.each(this.nodes,function(l,k){$.each(k.input_terminals,function(m,n){$.each(n.connectors,function(p,q){var o=q.handle1.node;i[k.id]+=1;b[o.id].push(k.id)})})});node_ids_by_level=[];while(true){level_parents=[];for(var a in i){if(i[a]==0){level_parents.push(a)}}if(level_parents.length==0){break}node_ids_by_level.push(level_parents);for(var f in level_parents){var j=level_parents[f];delete i[j];for(var g in b[j]){i[b[j][g]]-=1}}}if(i.length){return}var d=this.nodes;var h=80;v_pad=30;var c=h;$.each(node_ids_by_level,function(k,l){l.sort(function(p,o){return $(d[p].element).position().top-$(d[o].element).position().top});var m=0;var n=v_pad;$.each(l,function(o,r){var q=d[r];var p=$(q.element);$(p).css({top:n,left:c});m=Math.max(m,$(p).width());n+=$(p).height()+v_pad});c+=m+h});$.each(d,function(k,l){l.redraw()})},bounds_for_all_nodes:function(){var d=Infinity,b=-Infinity,c=Infinity,a=-Infinity,f;$.each(this.nodes,function(h,g){e=$(g.element);f=e.position();d=Math.min(d,f.left);b=Math.max(b,f.left+e.width());c=Math.min(c,f.top);a=Math.max(a,f.top+e.width())});return{xmin:d,xmax:b,ymin:c,ymax:a}},fit_canvas_to_nodes:function(){var a=this.bounds_for_all_nodes();var f=this.canvas_container.position();var i=this.canvas_container.parent();var d=fix_delta(a.xmin,100);var h=fix_delta(a.ymin,100);d=Math.max(d,f.left);h=Math.max(h,f.top);var c=f.left-d;var g=f.top-h;var b=round_up(a.xmax+100,100)+d;var j=round_up(a.ymax+100,100)+h;b=Math.max(b,-c+i.width());j=Math.max(j,-g+i.height());this.canvas_container.css({left:c,top:g,width:b,height:j});this.canvas_container.children().each(function(){var k=$(this).position();$(this).css("left",k.left+d);$(this).css("top",k.top+h)})}});function fix_delta(a,b){if(a<b||a>3*b){new_pos=(Math.ceil(((a%b))/b)+1)*b;return(-(a-new_pos))}return 0}function round_up(a,b){return Math.ceil(a/b)*b}function prebuild_node(l,j,r){var i=$("<div class='toolForm toolFormInCanvas'></div>");var g=new Node(i);g.type=l;if(l=="tool"){g.tool_id=r}var n=$("<div class='toolFormTitle unselectable'>"+j+"</div>");i.append(n);i.css("left",$(window).scrollLeft()+20);i.css("top",$(window).scrollTop()+20);var m=$("<div class='toolFormBody'></div>");var h="<div><img height='16' align='middle' src='"+galaxy_paths.attributes.image_path+"/loading_small_white_bg.gif'/> loading tool info...</div>";m.append(h);g.form_html=h;i.append(m);var k=$("<div class='buttons' style='float: right;'></div>");k.append($("<img/>").attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png").click(function(b){g.destroy()}).hover(function(){$(this).attr("src",galaxy_paths.attributes.image_path+"/delete_icon_dark.png")},function(){$(this).attr("src",galaxy_paths.attributes.image_path+"/delete_icon.png")}));i.appendTo("#canvas-container");var d=$("#canvas-container").position();var c=$("#canvas-container").parent();var a=i.width();var q=i.height();i.css({left:(-d.left)+(c.width()/2)-(a/2),top:(-d.top)+(c.height()/2)-(q/2)});k.prependTo(n);a+=(k.width()+10);i.css("width",a);$(i).bind("dragstart",function(){workflow.activate_node(g)}).bind("dragend",function(){workflow.node_changed(this);workflow.fit_canvas_to_nodes();canvas_manager.draw_overview()}).bind("dragclickonly",function(){workflow.activate_node(g)}).bind("drag",function(o,p){var f=$(this).offsetParent().offset(),b=p.offsetX-f.left,s=p.offsetY-f.top;$(this).css({left:b,top:s});$(this).find(".terminal").each(function(){this.terminal.redraw()})});return g}var ext_to_type=null;var type_to_type=null;function issubtype(b,a){b=ext_to_type[b];a=ext_to_type[a];return(type_to_type[b])&&(a in type_to_type[b])}function populate_datatype_info(a){ext_to_type=a.ext_to_class_name;type_to_type=a.class_to_classes}function ScrollPanel(a){this.panel=a}$.extend(ScrollPanel.prototype,{test:function(v,d){clearTimeout(this.timeout);var k=v.pageX,j=v.pageY,l=$(this.panel),c=l.position(),b=l.width(),i=l.height(),w=l.parent(),s=w.width(),a=w.height(),r=w.offset(),p=r.left,m=r.top,A=p+w.width(),u=m+w.height(),B=-(b-(s/2)),z=-(i-(a/2)),g=(s/2),f=(a/2),h=false,q=5,o=23;if(k-q<p){if(c.left<g){var n=Math.min(o,g-c.left);l.css("left",c.left+n);h=true}}else{if(k+q>A){if(c.left>B){var n=Math.min(o,c.left-B);l.css("left",c.left-n);h=true}}else{if(j-q<m){if(c.top<f){var n=Math.min(o,f-c.top);l.css("top",c.top+n);h=true}}else{if(j+q>u){if(c.top>z){var n=Math.min(o,c.top-B);l.css("top",(c.top-n)+"px");h=true}}}}}if(h){d();var l=this;this.timeout=setTimeout(function(){l.test(v,d)},50)}},stop:function(b,a){clearTimeout(this.timeout)}});function CanvasManager(b,a){this.cv=b;this.cc=this.cv.find("#canvas-container");this.oc=a.find("#overview-canvas");this.ov=a.find("#overview-viewport");this.init_drag()}$.extend(CanvasManager.prototype,{init_drag:function(){var b=this;var a=function(f,g){f=Math.min(f,b.cv.width()/2);f=Math.max(f,-b.cc.width()+b.cv.width()/2);g=Math.min(g,b.cv.height()/2);g=Math.max(g,-b.cc.height()+b.cv.height()/2);b.cc.css({left:f,top:g});b.update_viewport_overlay()};this.cc.each(function(){this.scroll_panel=new ScrollPanel(this)});var d,c;this.cv.bind("dragstart",function(){var g=$(this).offset();var f=b.cc.position();c=f.top-g.top;d=f.left-g.left}).bind("drag",function(f,g){a(g.offsetX+d,g.offsetY+c)}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});this.ov.bind("drag",function(k,l){var h=b.cc.width(),n=b.cc.height(),m=b.oc.width(),j=b.oc.height(),f=$(this).offsetParent().offset(),i=l.offsetX-f.left,g=l.offsetY-f.top;a(-(i/m*h),-(g/j*n))}).bind("dragend",function(){workflow.fit_canvas_to_nodes();b.draw_overview()});$("#overview-border").bind("drag",function(g,i){var j=$(this).offsetParent();var h=j.offset();var f=Math.max(j.width()-(i.offsetX-h.left),j.height()-(i.offsetY-h.top));$(this).css({width:f,height:f});b.draw_overview()});$("#overview-border div").bind("drag",function(){})},update_viewport_overlay:function(){var b=this.cc,f=this.cv,a=this.oc,c=this.ov,d=b.width(),j=b.height(),i=a.width(),g=a.height(),h=b.position();c.css({left:-(h.left/d*i),top:-(h.top/j*g),width:(f.width()/d*i)-2,height:(f.height()/j*g)-2})},draw_overview:function(){var j=$("#overview-canvas"),m=j.parent().parent().width(),i=j.get(0).getContext("2d"),d=$("#canvas-container").width(),l=$("#canvas-container").height();var g,a,k,f;var h=this.cv.width();var b=this.cv.height();if(d<h&&l<b){k=d/h*m;f=(m-k)/2;g=l/b*m;a=(m-g)/2}else{if(d<l){a=0;g=m;k=Math.ceil(g*d/l);f=(m-k)/2}else{k=m;f=0;g=Math.ceil(k*l/d);a=(m-g)/2}}j.parent().css({left:f,top:a,width:k,height:g});j.attr("width",k);j.attr("height",g);$.each(workflow.nodes,function(t,q){i.fillStyle="#D2C099";i.strokeStyle="#D8B365";i.lineWidth=1;var s=$(q.element),n=s.position(),c=n.left/d*k,r=n.top/l*g,o=s.width()/d*k,p=s.height()/l*g;if(q.tool_errors){i.fillStyle="#FFCCCC";i.strokeStyle="#AA6666"}else{if(q.workflow_outputs!=undefined&&q.workflow_outputs.length>0){i.fillStyle="#E8A92D";i.strokeStyle="#E8A92D"}}i.fillRect(c,r,o,p);i.strokeRect(c,r,o,p)});this.update_viewport_overlay()}});
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Updated migration script to reference repository with correct use of tool dependency tag sets.
by Bitbucket 25 Oct '12
by Bitbucket 25 Oct '12
25 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1136605c015d/
changeset: 1136605c015d
user: inithello
date: 2012-10-25 19:25:39
summary: Updated migration script to reference repository with correct use of tool dependency tag sets.
affected #: 1 file
diff -r b121409702081725920cec548eea991b1bbe6a6f -r 1136605c015d0e28ba42c9ad83f860652ee453a4 scripts/migrate_tools/0006_tools.xml
--- a/scripts/migrate_tools/0006_tools.xml
+++ b/scripts/migrate_tools/0006_tools.xml
@@ -1,6 +1,6 @@
<?xml version="1.0"?><toolshed name="toolshed.g2.bx.psu.edu">
- <repository name="picard" description="Galaxy wrappers for captain picard." changeset_revision="1cd7f3b42609">
+ <repository name="picard" description="Galaxy wrappers for the Picard SAM/BAM manipulation tools." changeset_revision="e0232cbac965"><tool id="picard_FastqToSam" version="1.56.0" file="picard_FastqToSam.xml" /><tool id="picard_SamToFastq" version="1.56.1" file="picard_SamToFastq.xml" /><tool id="picard_BamIndexStats" version="1.56.0" file="picard_BamIndexStats.xml" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
25 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b12140970208/
changeset: b12140970208
user: greg
date: 2012-10-25 17:54:07
summary: Add an altered version of the code contributed by Bjorn Gruning to support installation of tool dependencies that are zip archives when installing repositories from the tool shed. Bjorn's initial code required Python 2.6, so change ser made to support Python 2.5+.
affected #: 3 files
diff -r 6641f66fdafd6684deda5c8d132c28491c5d39c7 -r b121409702081725920cec548eea991b1bbe6a6f lib/galaxy/tool_shed/tool_dependencies/common_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/common_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/common_util.py
@@ -1,12 +1,24 @@
-import os, shutil, tarfile, urllib2
+import os, shutil, tarfile, urllib2, zipfile
from galaxy.datatypes.checkers import *
+def zipfile_ok( path_to_archive ):
+ """
+ This function is a bit pedantic and not functionally necessary. It checks whether there is no file pointing outside of the extraction,
+ because ZipFile.extractall() has some potential security holes. See python zipfile documentation for more details.
+ """
+ basename = os.path.realpath( os.path.dirname( path_to_archive ) )
+ zip_archive = zipfile.ZipFile( path_to_archive )
+ for member in zip_archive.namelist():
+ member_path = os.path.realpath( os.path.join( basename, member ) )
+ if not member_path.startswith( basename ):
+ return False
+ return True
def create_env_var_dict( elem, tool_dependency_install_dir=None, tool_shed_repository_install_dir=None ):
env_var_name = elem.get( 'name', 'PATH' )
env_var_action = elem.get( 'action', 'prepend_to' )
env_var_text = None
if elem.text and elem.text.find( 'REPOSITORY_INSTALL_DIR' ) >= 0:
- if tool_shed_repository_install_dir:
+ if tool_shed_repository_install_dir and elem.text.find( '$REPOSITORY_INSTALL_DIR' ) != -1:
env_var_text = elem.text.replace( '$REPOSITORY_INSTALL_DIR', tool_shed_repository_install_dir )
return dict( name=env_var_name, action=env_var_action, value=env_var_text )
else:
@@ -50,6 +62,20 @@
tar = tarfile.open( file_name )
tar.extractall( path=file_path )
tar.close()
+def extract_zip( archive_path, extraction_path ):
+ # TODO: change this method to use zipfile.Zipfile.extractall() when we stop supporting Python 2.5.
+ if not zipfile_ok( archive_path ):
+ return False
+ zip_archive = zipfile.ZipFile( archive_path, 'r' )
+ for name in zip_archive.namelist():
+ uncompressed_path = os.path.join( extraction_path, name )
+ if uncompressed_path.endswith( '/' ):
+ if not os.path.isdir( uncompressed_path ):
+ os.makedirs( uncompressed_path )
+ else:
+ file( uncompressed_path, 'wb' ).write( zip_archive.read( name ) )
+ zip_archive.close()
+ return True
def isbz2( file_path ):
return is_bz2( file_path )
def isgzip( file_path ):
@@ -102,3 +128,13 @@
if dst:
dst.close()
return os.path.abspath( file_path )
+def zip_extraction_directory( file_path, file_name ):
+ """Try to return the correct extraction directory."""
+ files = [ filename for filename in os.listdir( file_path ) if not filename.endswith( '.zip' ) ]
+ if len( files ) > 1:
+ return os.path.abspath( file_path )
+ elif len( files ) == 1:
+ # If there is only on file it should be a directory.
+ if os.path.isdir( os.path.join( file_path, files[ 0 ] ) ):
+ return os.path.abspath( os.path.join( file_path, files[ 0 ] ) )
+ raise ValueError( 'Could not find directory for the extracted file %s' % os.path.abspath( os.path.join( file_path, file_name ) ) )
diff -r 6641f66fdafd6684deda5c8d132c28491c5d39c7 -r b121409702081725920cec548eea991b1bbe6a6f lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/fabric_util.py
@@ -49,8 +49,6 @@
sa_session = app.model.context.current
install_dir = actions_dict[ 'install_dir' ]
package_name = actions_dict[ 'package_name' ]
- #download_url = actions_dict.get( 'download_url', None )
- #clone_cmd = actions_dict.get( 'clone_cmd', None )
actions = actions_dict.get( 'actions', None )
if actions:
with make_tmp_dir() as work_dir:
@@ -59,13 +57,17 @@
# are currently only two supported processes; download_by_url and clone via a "shell_command" action type.
action_type, action_dict = actions[ 0 ]
if action_type == 'download_by_url':
- # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1…</action>
url = action_dict[ 'url' ]
downloaded_filename = os.path.split( url )[ -1 ]
downloaded_file_path = common_util.url_download( work_dir, downloaded_filename, url )
if common_util.istar( downloaded_file_path ):
+ # <action type="download_by_url">http://sourceforge.net/projects/samtools/files/samtools/0.1.18/samtools-0.1…</action>
common_util.extract_tar( downloaded_file_path, work_dir )
dir = common_util.tar_extraction_directory( work_dir, downloaded_filename )
+ elif common_util.iszip( downloaded_file_path ):
+ # <action type="download_by_url">http://downloads.sourceforge.net/project/picard/picard-tools/1.56/picard-to…</action>
+ zip_archive_extracted = common_util.extract_zip( downloaded_file_path, work_dir )
+ dir = common_util.zip_extraction_directory( work_dir, downloaded_filename )
else:
dir = work_dir
elif action_type == 'shell_command':
diff -r 6641f66fdafd6684deda5c8d132c28491c5d39c7 -r b121409702081725920cec548eea991b1bbe6a6f templates/admin/tool_shed_repository/browse_tool_dependency.mako
--- a/templates/admin/tool_shed_repository/browse_tool_dependency.mako
+++ b/templates/admin/tool_shed_repository/browse_tool_dependency.mako
@@ -58,7 +58,7 @@
${tool_dependency.status}
<div style="clear: both"></div></div>
- %if repository.in_error_state:
+ %if tool_dependency.in_error_state:
<div class="form-row" ><label>Tool dependency installation error:</label>
${tool_dependency.error_message}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0