1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3d0db17ca1fa/
changeset: 3d0db17ca1fa
user: greg
date: 2011-11-21 18:09:25
summary: Fix for generating metadata on the repository tip.
affected #: 1 file
diff -r 058c507d05fd41694261f9f679b7cccafe295faf -r 3d0db17ca1fa86aaeff546c1a3cb375f15148c72 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1631,7 +1631,10 @@
for changeset in repo.changelog:
current_changeset_revision = str( repo.changectx( changeset ) )
ctx = get_changectx_for_changeset( trans, repo, current_changeset_revision )
- current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, id, ctx, current_changeset_revision, repo_dir )
+ if current_changeset_revision == repository.tip:
+ current_metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, current_changeset_revision, repo_dir )
+ else:
+ current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, id, ctx, current_changeset_revision, repo_dir )
if current_metadata_dict:
if ancestor_changeset_revision:
# Compare metadata from ancestor and current. The value of comparsion will be one of:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/058c507d05fd/
changeset: 058c507d05fd
user: greg
date: 2011-11-21 17:21:23
summary: Subtle but critical fixes in resetting all metadata for tool shed repositories ( and some code clean up ).
affected #: 4 files
diff -r 66038323cb438066cb76e255fd436fc245056bfd -r 058c507d05fd41694261f9f679b7cccafe295faf lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1622,51 +1622,56 @@
if len( repo ) == 1:
message, status = set_repository_metadata( trans, id, repository.tip, **kwd )
else:
- # The following will be a list of changeset_revisions that have been created or updated.
- # When the following loop completes, we'll delete all repository_metadata records for
- # this repository that do not have a changeset_revision value in this list.
+ # The list of changeset_revisions refers to repository_metadata records that have been
+ # created or updated. When the following loop completes, we'll delete all repository_metadata
+ # records for this repository that do not have a changeset_revision value in this list.
changeset_revisions = []
- repository_metadata_tup_for_comparison = ()
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
for changeset in repo.changelog:
- ctx = repo.changectx( changeset )
- current_changeset_revision = str( ctx )
+ current_changeset_revision = str( repo.changectx( changeset ) )
ctx = get_changectx_for_changeset( trans, repo, current_changeset_revision )
current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, id, ctx, current_changeset_revision, repo_dir )
if current_metadata_dict:
- if repository_metadata_tup_for_comparison:
- ancestor_changeset_revision, ancestor_metadata_dict = repository_metadata_tup_for_comparison
- comparison = self.__compare_changeset_revisions( ancestor_changeset_revision,
- ancestor_metadata_dict,
- current_changeset_revision,
- current_metadata_dict )
- # The value of comparsion will be one of:
+ if ancestor_changeset_revision:
+ # Compare metadata from ancestor and current. The value of comparsion will be one of:
# 'no metadata' - no metadata for either ancestor or current, so continue from current
# 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
# 'subset' - ancestor metadata is a subset of current metadata, so continue from current
# 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current
# metadata, so persist ancestor metadata.
+ comparison = self.__compare_changeset_revisions( ancestor_changeset_revision,
+ ancestor_metadata_dict,
+ current_changeset_revision,
+ current_metadata_dict )
if comparison in [ 'no metadata', 'equal', 'subset' ]:
- repository_metadata_tup_for_comparison = ( current_changeset_revision, current_metadata_dict )
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
elif comparison == 'not equal and not subset':
self.__create_or_update_repository_metadata( trans, id, repository, ancestor_changeset_revision, ancestor_metadata_dict )
- # keep track of the changeset_revisions that we've persisted.
+ # Keep track of the changeset_revisions that we've persisted.
changeset_revisions.append( ancestor_changeset_revision )
- repository_metadata_tup_for_comparison = ()
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
else:
- # We're at the first change set in the change log.
- repository_metadata_tup_for_comparison = ( current_changeset_revision, current_metadata_dict )
+ # We're either at the first change set in the change log or we have just created or updated
+ # a repository_metadata record. At this point we set the ancestor changeset to the current
+ # changeset for comparison in the next iteration.
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
if not ctx.children():
# We're at the end of the change log.
self.__create_or_update_repository_metadata( trans, id, repository, current_changeset_revision, current_metadata_dict )
changeset_revisions.append( current_changeset_revision )
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
elif ancestor_metadata_dict:
# Our current change set has no metadata, but our ancestor change set has metadata, so save it.
self.__create_or_update_repository_metadata( trans, id, repository, ancestor_changeset_revision, ancestor_metadata_dict )
- # keep track of the changeset_revisions that we've persisted.
+ # Keep track of the changeset_revisions that we've persisted.
changeset_revisions.append( ancestor_changeset_revision )
- repository_metadata_tup_for_comparison = ()
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
self.__clean_repository_metadata( trans, id, changeset_revisions )
if not message:
message = "Repository metadata has been reset."
@@ -1699,6 +1704,12 @@
def __compare_changeset_revisions( self, ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
# The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision
# is an ancestor of current_changeset_revision which is associated with current_metadata_dict.
+ #
+ # TODO: a new repository_metadata record will be created only when this method returns the string
+ # 'not equal and not subset'. However, we're currently also returning the strings 'no metadata',
+ # 'equal' and 'subset', depending upon how the 2 change sets compare. We'll leave things this way
+ # for the current time in case we discover a use for these additional result strings.
+ #
# Get information about tools.
if 'tools' in ancestor_metadata_dict:
ancestor_tools = ancestor_metadata_dict[ 'tools' ]
@@ -1735,7 +1746,7 @@
else:
current_datatypes = []
# Handle case where no metadata exists for either changeset.
- if not ( ancestor_guids or current_guids or ancestor_workflows or current_workflows or ancestor_datatypes or current_datatypes ):
+ if not ancestor_guids and not current_guids and not ancestor_workflows and not current_workflows and not ancestor_datatypes and not current_datatypes:
return 'no metadata'
workflow_comparison = self.__compare_workflows( ancestor_workflows, current_workflows )
datatype_comparison = self.__compare_datatypes( ancestor_datatypes, current_datatypes )
@@ -1758,9 +1769,13 @@
for ancestor_workflow in ancestor_workflows:
# Currently the only way to differentiate workflows is by name.
ancestor_workflow_name = ancestor_workflow[ 'name' ]
+ num_ancestor_workflow_steps = len( ancestor_workflow[ 'steps' ] )
found_in_current = False
for current_workflow in current_workflows:
- if current_workflow[ 'name' ] == ancestor_workflow_name:
+ # Assume that if the name and number of steps are euqal,
+ # then the workflows are the same. Of course, this may
+ # not be true...
+ if current_workflow[ 'name' ] == ancestor_workflow_name and len( current_workflow[ 'steps' ] ) == num_ancestor_workflow_steps:
found_in_current = True
break
if not found_in_current:
diff -r 66038323cb438066cb76e255fd436fc245056bfd -r 058c507d05fd41694261f9f679b7cccafe295faf lib/galaxy/webapps/community/model/mapping.py
--- a/lib/galaxy/webapps/community/model/mapping.py
+++ b/lib/galaxy/webapps/community/model/mapping.py
@@ -194,7 +194,7 @@
categories=relation( RepositoryCategoryAssociation ),
ratings=relation( RepositoryRatingAssociation, order_by=desc( RepositoryRatingAssociation.table.c.update_time ), backref="repositories" ),
user=relation( User.mapper ),
- downloadable_revisions=relation( RepositoryMetadata, order_by=desc( RepositoryMetadata.table.c.id ) ) ) )
+ downloadable_revisions=relation( RepositoryMetadata, order_by=desc( RepositoryMetadata.table.c.update_time ) ) ) )
assign_mapper( context, RepositoryMetadata, RepositoryMetadata.table,
properties=dict( repository=relation( Repository ) ) )
diff -r 66038323cb438066cb76e255fd436fc245056bfd -r 058c507d05fd41694261f9f679b7cccafe295faf templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -181,7 +181,7 @@
<a href="${h.url_for( controller='workflow', action='view_workflow', repository_metadata_id=repository_metadata_id, workflow_name=encode( workflow_name ), webapp=webapp )}">${workflow_name}</a></td><td>
- %if 'steps' in workflow_dict:
+ %if steps:
${len( steps )}
%else:
unknown
diff -r 66038323cb438066cb76e255fd436fc245056bfd -r 058c507d05fd41694261f9f679b7cccafe295faf templates/webapps/community/repository/view_changelog.mako
--- a/templates/webapps/community/repository/view_changelog.mako
+++ b/templates/webapps/community/repository/view_changelog.mako
@@ -107,7 +107,7 @@
else:
ctx_parent_str = "%s:%s" % ( ctx_parent_rev, ctx_parent )
if changeset[ 'has_metadata' ]:
- has_metadata_str = 'Repository metadata is associated with this change set.'
+ has_metadata_str = '<table border="0" bgcolor="#D8D8D8"><tr><td>Repository metadata is associated with this change set.</td></tr></table>'
else:
has_metadata_str = ''
%>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/dfdd405b4115/
changeset: dfdd405b4115
user: jgoecks
date: 2011-11-19 21:28:16
summary: Fix GFFDataProvider bugs introduced in 9b7d5c1c0be6.
affected #: 1 file
diff -r 4c6f1004071d5055c9bf2d0b35f5bfe077b79e1e -r dfdd405b4115c1cf06bf7f362854088478560922 lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -99,7 +99,7 @@
pass
- def get_data( self, chrom, start, end, start_val=0, max_vals=None, **kwargs ):
+ def get_data( self, chrom, start, end, start_val=0, max_vals=sys.maxint, **kwargs ):
"""
Returns data in region defined by chrom, start, and end. start_val and
max_vals are used to denote the data to return: start_val is the first element to
@@ -914,7 +914,15 @@
"""
start, end = int( start ), int( end )
source = open( self.original_dataset.file_name )
- return GFFReaderWrapper( source, fix_strand=True )
+
+ def features_in_region_iter():
+ for feature in GFFReaderWrapper( source, fix_strand=True ):
+ # Only provide features that are in region.
+ feature_start, feature_end = convert_gff_coords_to_bed( [ feature.start, feature.end ] )
+ if feature.chrom != chrom or feature_start < start or feature_end > end:
+ continue
+ yield feature
+ return features_in_region_iter()
def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
"""
@@ -931,9 +939,6 @@
message = ERROR_MAX_VALS % ( max_vals, "reads" )
break
- feature_start, feature_end = convert_gff_coords_to_bed( [ feature.start, feature.end ] )
- if feature.chrom != chrom or feature_start < start or feature_end > end:
- continue
payload = package_gff_feature( feature )
payload.insert( 0, offset )
results.append( payload )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4c6f1004071d/
changeset: 4c6f1004071d
user: greg
date: 2011-11-18 22:44:10
summary: Add the ability to reset all metadata (for every change set) in a tool shed repository (currently restricted to an admin), and add a fix for checking security when making changes to tool shed repository information on the Manage repsoitory page.
affected #: 4 files
diff -r 07df6866125adb6f3723aec8303e9300338e4cdc -r 4c6f1004071d5055c9bf2d0b35f5bfe077b79e1e lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -343,6 +343,127 @@
mimetype=mimetype ) )
metadata_dict[ 'datatypes' ] = datatypes
return metadata_dict
+def generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo_dir ):
+ # Browse the repository tip files on disk to generate metadata. This is faster than
+ # the generate_metadata_for_changeset_revision() method below because fctx.data() does
+ # not have to be written to disk to load tools. also, since changeset_revision is the
+ # repository tip, we handle things like .loc.sample files here.
+ metadata_dict = {}
+ invalid_files = []
+ sample_files = []
+ datatypes_config = None
+ # Find datatypes_conf.xml if it exists.
+ for root, dirs, files in os.walk( repo_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == 'datatypes_conf.xml':
+ datatypes_config = os.path.abspath( os.path.join( root, name ) )
+ break
+ if datatypes_config:
+ metadata_dict = generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict )
+ # Find all special .sample files.
+ for root, dirs, files in os.walk( repo_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name.endswith( '.sample' ):
+ sample_files.append( os.path.abspath( os.path.join( root, name ) ) )
+ # Find all tool configs and exported workflows.
+ for root, dirs, files in os.walk( repo_dir ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ if '.hg' in dirs:
+ dirs.remove( '.hg' )
+ for name in files:
+ # Find all tool configs.
+ if name != 'datatypes_conf.xml' and name.endswith( '.xml' ):
+ full_path = os.path.abspath( os.path.join( root, name ) )
+ try:
+ tool = load_tool( trans, full_path )
+ valid = True
+ except Exception, e:
+ valid = False
+ invalid_files.append( ( name, str( e ) ) )
+ if valid and tool is not None:
+ can_set_metadata, invalid_files = check_tool_input_params( trans, name, tool, sample_files, invalid_files )
+ if can_set_metadata:
+ # Update the list of metadata dictionaries for tools in metadata_dict.
+ tool_config = os.path.join( root, name )
+ metadata_dict = generate_tool_metadata( trans, id, changeset_revision, tool_config, tool, metadata_dict )
+ # Find all exported workflows
+ elif name.endswith( '.ga' ):
+ try:
+ full_path = os.path.abspath( os.path.join( root, name ) )
+ # Convert workflow data from json
+ fp = open( full_path, 'rb' )
+ workflow_text = fp.read()
+ fp.close()
+ exported_workflow_dict = from_json_string( workflow_text )
+ if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
+ # Update the list of metadata dictionaries for workflows in metadata_dict.
+ metadata_dict = generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict )
+ except Exception, e:
+ invalid_files.append( ( name, str( e ) ) )
+ return metadata_dict, invalid_files
+def generate_metadata_for_changeset_revision( trans, id, ctx, changeset_revision, repo_dir ):
+ # Browse repository files within a change set to generate metadata.
+ metadata_dict = {}
+ invalid_files = []
+ sample_files = []
+ datatypes_config = None
+ # Find datatypes_conf.xml if it exists.
+ for filename in ctx:
+ if filename == 'datatypes_conf.xml':
+ fctx = ctx[ filename ]
+ datatypes_config = fctx.data()
+ break
+ if datatypes_config:
+ metadata_dict = generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict )
+ # Get all tool config file names from the hgweb url, something like:
+ # /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml
+ for filename in ctx:
+ # Find all tool configs.
+ if filename != 'datatypes_conf.xml' and filename.endswith( '.xml' ):
+ fctx = ctx[ filename ]
+ # Write the contents of the old tool config to a temporary file.
+ # TODO: figure out how to enhance the load_tool method so that a
+ # temporary disk file is not necessary in order to pass the tool
+ # config.
+ fh = tempfile.NamedTemporaryFile( 'w' )
+ tmp_filename = fh.name
+ fh.close()
+ fh = open( tmp_filename, 'w' )
+ fh.write( fctx.data() )
+ fh.close()
+ try:
+ tool = load_tool( trans, tmp_filename )
+ valid = True
+ except Exception, e:
+ invalid_files.append( ( filename, str( e ) ) )
+ valid = False
+ if valid and tool is not None:
+ # Update the list of metadata dictionaries for tools in metadata_dict. Note that filename
+ # here is the relative path to the config file within the change set context, something
+ # like filtering.xml, but when the change set was the repository tip, the value was
+ # something like database/community_files/000/repo_1/filtering.xml. This shouldn't break
+ # anything, but may result in a bit of confusion when maintaining the code / data over time.
+ # IMPORTANT NOTE: Here we are assuming that since the current change set is not the repository
+ # tip, we do not have to handle any .loc.sample files since they would have been handled previously.
+ metadata_dict = generate_tool_metadata( trans, id, changeset_revision, filename, tool, metadata_dict )
+ try:
+ os.unlink( tmp_filename )
+ except:
+ pass
+ # Find all exported workflows.
+ elif filename.endswith( '.ga' ):
+ try:
+ fctx = ctx[ filename ]
+ workflow_text = fctx.data()
+ exported_workflow_dict = from_json_string( workflow_text )
+ if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
+ # Update the list of metadata dictionaries for workflows in metadata_dict.
+ metadata_dict = generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict )
+ except Exception, e:
+ invalid_files.append( ( name, str( e ) ) )
+ return metadata_dict, invalid_files
def set_repository_metadata( trans, id, changeset_revision, **kwd ):
"""Set repository metadata"""
message = ''
@@ -350,102 +471,12 @@
repository = get_repository( trans, id )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
- invalid_files = []
- sample_files = []
- datatypes_config = None
ctx = get_changectx_for_changeset( trans, repo, changeset_revision )
if ctx is not None:
- metadata_dict = {}
if changeset_revision == repository.tip:
- # Find datatypes_conf.xml if it exists.
- for root, dirs, files in os.walk( repo_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == 'datatypes_conf.xml':
- datatypes_config = os.path.abspath( os.path.join( root, name ) )
- break
- if datatypes_config:
- metadata_dict = generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict )
- # Find all special .sample files.
- for root, dirs, files in os.walk( repo_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name.endswith( '.sample' ):
- sample_files.append( os.path.abspath( os.path.join( root, name ) ) )
- # Find all tool configs and exported workflows.
- for root, dirs, files in os.walk( repo_dir ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- if '.hg' in dirs:
- dirs.remove( '.hg' )
- for name in files:
- # Find all tool configs.
- if name != 'datatypes_conf.xml' and name.endswith( '.xml' ):
- full_path = os.path.abspath( os.path.join( root, name ) )
- try:
- tool = load_tool( trans, full_path )
- valid = True
- except Exception, e:
- valid = False
- invalid_files.append( ( name, str( e ) ) )
- if valid and tool is not None:
- can_set_metadata, invalid_files = check_tool_input_params( trans, name, tool, sample_files, invalid_files )
- if can_set_metadata:
- # Update the list of metadata dictionaries for tools in metadata_dict.
- tool_config = os.path.join( root, name )
- metadata_dict = generate_tool_metadata( trans, id, changeset_revision, tool_config, tool, metadata_dict )
- # Find all exported workflows
- elif name.endswith( '.ga' ):
- try:
- full_path = os.path.abspath( os.path.join( root, name ) )
- # Convert workflow data from json
- fp = open( full_path, 'rb' )
- workflow_text = fp.read()
- fp.close()
- exported_workflow_dict = from_json_string( workflow_text )
- if exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
- # Update the list of metadata dictionaries for workflows in metadata_dict.
- metadata_dict = generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict )
- except Exception, e:
- invalid_files.append( ( name, str( e ) ) )
+ metadata_dict, invalid_files = generate_metadata_for_repository_tip( trans, id, ctx, changeset_revision, repo_dir )
else:
- # Find all special .sample files first.
- for filename in ctx:
- if filename.endswith( '.sample' ):
- sample_files.append( os.path.abspath( filename ) )
- # Get all tool config file names from the hgweb url, something like:
- # /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml
- for filename in ctx:
- # Find all tool configs - we do not have to update metadata for workflows or datatypes in anything
- # but repository tips (handled above) since at the time this code was written, no workflows or
- # dataytpes_conf.xml files exist in tool shed repositories, so they can only be added in future tips.
- if filename.endswith( '.xml' ):
- fctx = ctx[ filename ]
- # Write the contents of the old tool config to a temporary file.
- fh = tempfile.NamedTemporaryFile( 'w' )
- tmp_filename = fh.name
- fh.close()
- fh = open( tmp_filename, 'w' )
- fh.write( fctx.data() )
- fh.close()
- try:
- tool = load_tool( trans, tmp_filename )
- valid = True
- except Exception, e:
- invalid_files.append( ( filename, str( e ) ) )
- valid = False
- if valid and tool is not None:
- can_set_metadata, invalid_files = check_tool_input_params( trans, filename, tool, sample_files, invalid_files )
- if can_set_metadata:
- # Update the list of metadata dictionaries for tools in metadata_dict. Note that filename
- # here is the relative path to the config file within the change set context, something
- # like filtering.xml, but when the change set was the repository tip, the value was
- # something like database/community_files/000/repo_1/filtering.xml. This shouldn't break
- # anything, but may result in a bit of confusion when maintaining the code / data over time.
- metadata_dict = generate_tool_metadata( trans, id, changeset_revision, filename, tool, metadata_dict )
- try:
- os.unlink( tmp_filename )
- except:
- pass
+ metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, id, ctx, changeset_revision, repo_dir )
if metadata_dict:
if changeset_revision == repository.tip:
if new_tool_metadata_required( trans, id, metadata_dict ) or new_workflow_metadata_required( trans, id, metadata_dict ):
@@ -467,8 +498,8 @@
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
else:
- message = "Revision '%s' includes no tools or exported workflows for which metadata can be defined " % str( changeset_revision )
- message += "so this revision cannot be automatically installed into a local Galaxy instance."
+ message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( changeset_revision )
+ message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
status = "error"
else:
# change_set is None
diff -r 07df6866125adb6f3723aec8303e9300338e4cdc -r 4c6f1004071d5055c9bf2d0b35f5bfe077b79e1e lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1311,15 +1311,14 @@
if params.get( 'edit_repository_button', False ):
flush_needed = False
# TODO: add a can_manage in the security agent.
- if user != repository.user or not trans.user_is_admin():
+ if not ( user.email == repository.user.email or trans.user_is_admin() ):
message = "You are not the owner of this repository, so you cannot manage it."
- status = error
return trans.response.send_redirect( web.url_for( controller='repository',
action='view_repository',
id=id,
webapp='community',
message=message,
- status=status ) )
+ status='error' ) )
if description != repository.description:
repository.description = description
flush_needed = True
@@ -1447,6 +1446,10 @@
changesets = []
for changeset in repo.changelog:
ctx = repo.changectx( changeset )
+ if get_repository_metadata_by_changeset_revision( trans, id, str( ctx ) ):
+ has_metadata = True
+ else:
+ has_metadata = False
t, tz = ctx.date()
date = datetime( *time.gmtime( float( t ) - tz )[:6] )
display_date = date.strftime( "%Y-%m-%d" )
@@ -1457,7 +1460,8 @@
'description' : ctx.description(),
'files' : ctx.files(),
'user' : ctx.user(),
- 'parent' : ctx.parents()[0] }
+ 'parent' : ctx.parents()[0],
+ 'has_metadata' : has_metadata }
# Make sure we'll view latest changeset first.
changesets.insert( 0, change_dict )
is_malicious = change_set_is_malicious( trans, id, repository.tip )
@@ -1608,6 +1612,190 @@
message=message,
status=status ) )
@web.expose
+ def reset_all_metadata( self, trans, id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ repository = get_repository( trans, id )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ if len( repo ) == 1:
+ message, status = set_repository_metadata( trans, id, repository.tip, **kwd )
+ else:
+ # The following will be a list of changeset_revisions that have been created or updated.
+ # When the following loop completes, we'll delete all repository_metadata records for
+ # this repository that do not have a changeset_revision value in this list.
+ changeset_revisions = []
+ repository_metadata_tup_for_comparison = ()
+ for changeset in repo.changelog:
+ ctx = repo.changectx( changeset )
+ current_changeset_revision = str( ctx )
+ ctx = get_changectx_for_changeset( trans, repo, current_changeset_revision )
+ current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, id, ctx, current_changeset_revision, repo_dir )
+ if current_metadata_dict:
+ if repository_metadata_tup_for_comparison:
+ ancestor_changeset_revision, ancestor_metadata_dict = repository_metadata_tup_for_comparison
+ comparison = self.__compare_changeset_revisions( ancestor_changeset_revision,
+ ancestor_metadata_dict,
+ current_changeset_revision,
+ current_metadata_dict )
+ # The value of comparsion will be one of:
+ # 'no metadata' - no metadata for either ancestor or current, so continue from current
+ # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
+ # 'subset' - ancestor metadata is a subset of current metadata, so continue from current
+ # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current
+ # metadata, so persist ancestor metadata.
+ if comparison in [ 'no metadata', 'equal', 'subset' ]:
+ repository_metadata_tup_for_comparison = ( current_changeset_revision, current_metadata_dict )
+ elif comparison == 'not equal and not subset':
+ self.__create_or_update_repository_metadata( trans, id, repository, ancestor_changeset_revision, ancestor_metadata_dict )
+ # keep track of the changeset_revisions that we've persisted.
+ changeset_revisions.append( ancestor_changeset_revision )
+ repository_metadata_tup_for_comparison = ()
+ else:
+ # We're at the first change set in the change log.
+ repository_metadata_tup_for_comparison = ( current_changeset_revision, current_metadata_dict )
+ if not ctx.children():
+ # We're at the end of the change log.
+ self.__create_or_update_repository_metadata( trans, id, repository, current_changeset_revision, current_metadata_dict )
+ changeset_revisions.append( current_changeset_revision )
+ elif ancestor_metadata_dict:
+ # Our current change set has no metadata, but our ancestor change set has metadata, so save it.
+ self.__create_or_update_repository_metadata( trans, id, repository, ancestor_changeset_revision, ancestor_metadata_dict )
+ # keep track of the changeset_revisions that we've persisted.
+ changeset_revisions.append( ancestor_changeset_revision )
+ repository_metadata_tup_for_comparison = ()
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ self.__clean_repository_metadata( trans, id, changeset_revisions )
+ if not message:
+ message = "Repository metadata has been reset."
+ status = 'done'
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=id,
+ message=message,
+ status=status ) )
+ def __clean_repository_metadata( self, trans, id, changeset_revisions ):
+ # Delete all repository_metadata reecords associated with the repository
+ # that have a changeset_revision that is not in changeset_revisions.
+ for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ):
+ if repository_metadata.changeset_revision not in changeset_revisions:
+ trans.sa_session.delete( repository_metadata )
+ trans.sa_session.flush()
+ def __create_or_update_repository_metadata( self, trans, id, repository, changeset_revision, metadata_dict ):
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if repository_metadata:
+ # Update RepositoryMetadata.metadata.
+ repository_metadata.metadata = metadata_dict
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ else:
+ # Create a new repository_metadata table row.
+ repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict )
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ def __compare_changeset_revisions( self, ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
+ # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision
+ # is an ancestor of current_changeset_revision which is associated with current_metadata_dict.
+ # Get information about tools.
+ if 'tools' in ancestor_metadata_dict:
+ ancestor_tools = ancestor_metadata_dict[ 'tools' ]
+ else:
+ ancestor_tools = []
+ if 'tools' in current_metadata_dict:
+ current_tools = current_metadata_dict[ 'tools' ]
+ else:
+ current_tools = []
+ ancestor_guids = []
+ for tool_dict in ancestor_tools:
+ ancestor_guids.append( tool_dict[ 'guid' ] )
+ ancestor_guids.sort()
+ current_guids = []
+ for tool_dict in current_tools:
+ current_guids.append( tool_dict[ 'guid' ] )
+ current_guids.sort()
+ # Get information about workflows.
+ if 'workflows' in ancestor_metadata_dict:
+ ancestor_workflows = ancestor_metadata_dict[ 'workflows' ]
+ else:
+ ancestor_workflows = []
+ if 'workflows' in current_metadata_dict:
+ current_workflows = current_metadata_dict[ 'workflows' ]
+ else:
+ current_workflows = []
+ # Get information about datatypes.
+ if 'datatypes' in ancestor_metadata_dict:
+ ancestor_datatypes = ancestor_metadata_dict[ 'datatypes' ]
+ else:
+ ancestor_datatypes = []
+ if 'datatypes' in current_metadata_dict:
+ current_datatypes = current_metadata_dict[ 'datatypes' ]
+ else:
+ current_datatypes = []
+ # Handle case where no metadata exists for either changeset.
+ if not ( ancestor_guids or current_guids or ancestor_workflows or current_workflows or ancestor_datatypes or current_datatypes ):
+ return 'no metadata'
+ workflow_comparison = self.__compare_workflows( ancestor_workflows, current_workflows )
+ datatype_comparison = self.__compare_datatypes( ancestor_datatypes, current_datatypes )
+ # Handle case where all metadata is the same.
+ if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal':
+ return 'equal'
+ if workflow_comparison == 'subset' and datatype_comparison == 'subset':
+ is_subset = True
+ for guid in ancestor_guids:
+ if guid not in current_guids:
+ is_subset = False
+ break
+ if is_subset:
+ return 'subset'
+ return 'not equal and not subset'
+ def __compare_workflows( self, ancestor_workflows, current_workflows ):
+ # Determine if ancestor_workflows is the same as current_workflows
+ # or if ancestor_workflows is a subset of current_workflows.
+ if len( ancestor_workflows ) <= len( current_workflows ):
+ for ancestor_workflow in ancestor_workflows:
+ # Currently the only way to differentiate workflows is by name.
+ ancestor_workflow_name = ancestor_workflow[ 'name' ]
+ found_in_current = False
+ for current_workflow in current_workflows:
+ if current_workflow[ 'name' ] == ancestor_workflow_name:
+ found_in_current = True
+ break
+ if not found_in_current:
+ return 'not equal and not subset'
+ if len( ancestor_workflows ) == len( current_workflows ):
+ return 'equal'
+ else:
+ return 'subset'
+ return 'not equal and not subset'
+ def __compare_datatypes( self, ancestor_datatypes, current_datatypes ):
+ # Determine if ancestor_datatypes is the same as current_datatypes
+ # or if ancestor_datatypes is a subset of current_datatypes. Each
+ # datatype dict looks something like:
+ # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"}
+ if len( ancestor_datatypes ) <= len( current_datatypes ):
+ for ancestor_datatype in ancestor_datatypes:
+ # Currently the only way to differentiate datatypes is by name.
+ ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ]
+ ancestor_datatype_extension = ancestor_datatype[ 'extension' ]
+ ancestor_datatype_mimetype = ancestor_datatype[ 'mimetype' ]
+ found_in_current = False
+ for current_datatype in current_datatypes:
+ if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \
+ current_datatype[ 'extension' ] == ancestor_datatype_extension and \
+ current_datatype[ 'mimetype' ] == ancestor_datatype_mimetype:
+ found_in_current = True
+ break
+ if not found_in_current:
+ return 'not equal and not subset'
+ if len( ancestor_datatypes ) == len( current_datatypes ):
+ return 'equal'
+ else:
+ return 'subset'
+ return 'not equal and not subset'
+ @web.expose
def display_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
diff -r 07df6866125adb6f3723aec8303e9300338e4cdc -r 4c6f1004071d5055c9bf2d0b35f5bfe077b79e1e templates/webapps/community/repository/manage_repository.mako
--- a/templates/webapps/community/repository/manage_repository.mako
+++ b/templates/webapps/community/repository/manage_repository.mako
@@ -20,6 +20,7 @@
else:
browse_label = 'Browse repository tip files'
can_set_malicious = metadata and can_set_metadata and is_admin and changeset_revision == repository.tip
+ can_reset_all_metadata = is_admin and len( repo ) > 0
%><%!
@@ -88,6 +89,9 @@
%if can_contact_owner:
<a class="action-button" href="${h.url_for( controller='repository', action='contact_owner', id=trans.security.encode_id( repository.id ), webapp='community' )}">Contact repository owner</a>
%endif
+ %if can_reset_all_metadata:
+ <a class="action-button" href="${h.url_for( controller='repository', action='reset_all_metadata', id=trans.security.encode_id( repository.id ), webapp='community' )}">Reset all repository metadata</a>
+ %endif
%if can_download:
<a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision, file_type='gz' )}">Download as a .tar.gz file</a><a class="action-button" href="${h.url_for( controller='repository', action='download', repository_id=trans.app.security.encode_id( repository.id ), changeset_revision=changeset_revision, file_type='bz2' )}">Download as a .tar.bz2 file</a>
diff -r 07df6866125adb6f3723aec8303e9300338e4cdc -r 4c6f1004071d5055c9bf2d0b35f5bfe077b79e1e templates/webapps/community/repository/view_changelog.mako
--- a/templates/webapps/community/repository/view_changelog.mako
+++ b/templates/webapps/community/repository/view_changelog.mako
@@ -106,6 +106,10 @@
ctx_parent_str = 'None'
else:
ctx_parent_str = "%s:%s" % ( ctx_parent_rev, ctx_parent )
+ if changeset[ 'has_metadata' ]:
+ has_metadata_str = 'Repository metadata is associated with this change set.'
+ else:
+ has_metadata_str = ''
%><% display_date = changeset[ 'display_date' ] %>
%if test_date != display_date:
@@ -113,6 +117,11 @@
%endif
<tr><td>
+ %if is_admin and has_metadata_str:
+ <div class="form-row">
+ ${has_metadata_str}
+ </div>
+ %endif
<div class="form-row"><label>Description:</label><a href="${h.url_for( controller='repository', action='view_changeset', id=trans.security.encode_id( repository.id ), ctx_str=ctx_str )}">${changeset[ 'description' ]}</a>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.