galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
August 2012
- 1 participants
- 118 discussions
commit/galaxy-central: jgoecks: Data providers: unified and comprehensive handling of differences between different chromosome naming schemes.
by Bitbucket 09 Aug '12
by Bitbucket 09 Aug '12
09 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c6e1ef62c40b/
changeset: c6e1ef62c40b
user: jgoecks
date: 2012-08-09 23:32:47
summary: Data providers: unified and comprehensive handling of differences between different chromosome naming schemes.
affected #: 1 file
diff -r 3b5db939aebe61addd6f0b8d0c14267cf16c4144 -r c6e1ef62c40b6c212210356fa59222b60645070b lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -44,6 +44,21 @@
if read[ end_pos_index ] > max_high:
max_high = read[ end_pos_index ]
return max_low, max_high
+
+def _convert_between_ucsc_and_ensemble_naming( chrom ):
+ '''
+ Convert between UCSC chromosome ('chr1') naming conventions and Ensembl
+ naming conventions ('1')
+ '''
+ if chrom.startswith( 'chr' ):
+ # Convert from UCSC to Ensembl
+ return chrom[ 3: ]
+ else:
+ # Convert from Ensembl to UCSC
+ return 'chr' + chrom
+
+def _chrom_naming_matches( chrom1, chrom2 ):
+ return ( chrom1.startswith( 'chr' ) and chrom2.startswith( 'chr' ) ) or ( not chrom1.startswith( 'chr' ) and not chrom2.startswith( 'chr' ) )
class TracksDataProvider( object ):
""" Base class for tracks data providers. """
@@ -159,7 +174,7 @@
return filters
def get_default_max_vals( self ):
- return 5000;
+ return 5000
#
# -- Base mixins and providers --
@@ -262,12 +277,9 @@
tabix = ctabix.Tabixfile(bgzip_fname, index_filename=self.converted_dataset.file_name)
- # If chrom is not found in indexes, try removing the first three
- # characters (e.g. 'chr') and see if that works. This enables the
- # provider to handle chrome names defined as chrXXX and as XXX.
- chrom = str(chrom)
- if chrom not in tabix.contigs and chrom.startswith("chr") and (chrom[3:] in tabix.contigs):
- chrom = chrom[3:]
+ # If chrom not in data, try alternative.
+ if chrom not in tabix.contigs:
+ chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
return tabix.fetch(reference=chrom, start=start, end=end)
@@ -409,11 +421,6 @@
rval.append( payload )
continue
- # Simpler way to add stuff, but type casting is not done.
- # Name, score, strand, thick start, thick end.
- #end = min( len( feature ), 8 )
- #payload.extend( feature[ 3:end ] )
-
# Name, strand, thick start, thick end.
if length >= 4:
payload.append(feature[3])
@@ -470,6 +477,14 @@
"""
def get_iterator( self, chrom=None, start=None, end=None ):
+ # Read first line in order to match chrom naming format.
+ line = source.readline()
+ dataset_chrom = line.split()[0]
+ if not _chrom_naming_matches( chrom, dataset_chrom ):
+ chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+ # Undo read.
+ source.seek( 0 )
+
def line_filter_iter():
for line in open( self.original_dataset.file_name ):
if line.startswith( "track" ) or line.startswith( "browser" ):
@@ -483,6 +498,7 @@
or ( end is not None and feature_end < start ):
continue
yield line
+
return line_filter_iter()
#
@@ -601,6 +617,14 @@
"""
def get_iterator( self, chrom, start, end ):
+ # Read first line in order to match chrom naming format.
+ line = source.readline()
+ dataset_chrom = line.split()[0]
+ if not _chrom_naming_matches( chrom, dataset_chrom ):
+ chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+ # Undo read.
+ source.seek( 0 )
+
def line_filter_iter():
for line in open( self.original_dataset.file_name ):
if line.startswith("#"):
@@ -616,6 +640,7 @@
if variant_chrom != chrom or variant_start > end or variant_end < start:
continue
yield line
+
return line_filter_iter()
class SummaryTreeDataProvider( TracksDataProvider ):
@@ -639,15 +664,11 @@
st = summary_tree_from_file( self.converted_dataset.file_name )
self.CACHE[filename] = st
- # If chrom is not found in blocks, try removing the first three
- # characters (e.g. 'chr') and see if that works. This enables the
- # provider to handle chrome names defined as chrXXX and as XXX.
- if chrom in st.chrom_blocks:
- pass
- elif chrom[3:] in st.chrom_blocks:
- chrom = chrom[3:]
- else:
- return None
+ # Look for chrom in tree using both naming conventions.
+ if chrom not in st.chrom_blocks:
+ chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+ if chrom not in st.chrom_blocks:
+ return None
# Get or compute level.
if level:
@@ -684,7 +705,7 @@
self.CACHE[filename] = st
# Check for data.
- return st.chrom_blocks.get(chrom, None) is not None or (chrom and st.chrom_blocks.get(chrom[3:], None) is not None)
+ return st.chrom_blocks.get(chrom, None) or st.chrom_blocks.get(_convert_between_ucsc_and_ensemble_naming(chrom), None)
class BamDataProvider( TracksDataProvider, FilterableMixin ):
"""
@@ -727,13 +748,11 @@
try:
data = bamfile.fetch(start=start, end=end, reference=chrom)
except ValueError, e:
- # Some BAM files do not prefix chromosome names with chr, try without
- if chrom.startswith( 'chr' ):
- try:
- data = bamfile.fetch( start=start, end=end, reference=chrom[3:] )
- except ValueError:
- return None
- else:
+ # Try alternative chrom naming.
+ chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+ try:
+ data = bamfile.fetch( start=start, end=end, reference=chrom )
+ except ValueError:
return None
# Write reads in region.
@@ -757,13 +776,11 @@
try:
data = bamfile.fetch(start=start, end=end, reference=chrom)
except ValueError, e:
- # Some BAM files do not prefix chromosome names with chr, try without
- if chrom.startswith( 'chr' ):
- try:
- data = bamfile.fetch( start=start, end=end, reference=chrom[3:] )
- except ValueError:
- return None
- else:
+ # Try alternative chrom naming.
+ chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+ try:
+ data = bamfile.fetch( start=start, end=end, reference=chrom )
+ except ValueError:
return None
return data
@@ -1034,12 +1051,9 @@
source = open( self.original_dataset.file_name )
index = Indexes( self.converted_dataset.file_name )
- # If chrom is not found in indexes, try removing the first three
- # characters (e.g. 'chr') and see if that works. This enables the
- # provider to handle chrome names defined as chrXXX and as XXX.
- chrom = str(chrom)
- if chrom not in index.indexes and chrom[3:] in index.indexes:
- chrom = chrom[3:]
+ if chrom not in index.indexes:
+ # Try alternative naming.
+ chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
return index.find(chrom, start, end)
@@ -1091,6 +1105,14 @@
a file offset.
"""
source = open( self.original_dataset.file_name )
+
+ # Read first line in order to match chrom naming format.
+ line = source.readline()
+ dataset_chrom = line.split()[0]
+ if not _chrom_naming_matches( chrom, dataset_chrom ):
+ chrom = _convert_between_ucsc_and_ensemble_naming( chrom )
+ # Undo read.
+ source.seek( 0 )
def features_in_region_iter():
offset = 0
@@ -1100,6 +1122,7 @@
if feature.chrom == chrom and feature_end > start and feature_start < end:
yield feature, offset
offset += feature.raw_size
+
return features_in_region_iter()
def process_data( self, iterator, start_val=0, max_vals=None, **kwargs ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes to recent changes that broke the ability to install a tool shed repository to a local Galaxy instance.
by Bitbucket 09 Aug '12
by Bitbucket 09 Aug '12
09 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3b5db939aebe/
changeset: 3b5db939aebe
user: greg
date: 2012-08-09 17:37:47
summary: Fixes to recent changes that broke the ability to install a tool shed repository to a local Galaxy instance.
affected #: 3 files
diff -r 1eab72ce0a483dd8bb09e43d549b119879d5435d -r 3b5db939aebe61addd6f0b8d0c14267cf16c4144 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -132,7 +132,7 @@
tool_panel_dict_for_tool_config = generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
for k, v in tool_panel_dict_for_tool_config.items():
tool_panel_dict_for_display[ k ] = v
- metadata_dict = generate_metadata_for_changeset_revision( self.app, relative_install_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( self.app, relative_install_dir, repository_clone_url )
tool_shed_repository.metadata = metadata_dict
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
diff -r 1eab72ce0a483dd8bb09e43d549b119879d5435d -r 3b5db939aebe61addd6f0b8d0c14267cf16c4144 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -598,7 +598,7 @@
invalid_tool_configs.append( name )
break
if can_set_metadata:
- metadata_dict = generate_tool_metadata( name, tool, repository_clone_url, metadata_dict )
+ metadata_dict = generate_tool_metadata( os.path.join( root, name ), tool, repository_clone_url, metadata_dict )
else:
invalid_file_tups.extend( invalid_files_and_errors_tups )
# Find all exported workflows
diff -r 1eab72ce0a483dd8bb09e43d549b119879d5435d -r 3b5db939aebe61addd6f0b8d0c14267cf16c4144 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -684,7 +684,7 @@
Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
when an admin is installing a new repository or reinstalling an uninstalled repository.
"""
- metadata_dict = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
tool_shed_repository.metadata = metadata_dict
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
@@ -779,7 +779,7 @@
message = "The repository information has been updated."
elif params.get( 'set_metadata_button', False ):
repository_clone_url = generate_clone_url( trans, repository )
- metadata_dict = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
if metadata_dict:
repository.metadata = metadata_dict
trans.sa_session.add( repository )
@@ -1479,7 +1479,7 @@
update_repository( repo, latest_ctx_rev )
# Update the repository metadata.
tool_shed = clean_tool_shed_url( tool_shed_url )
- metadata_dict = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
repository.metadata = metadata_dict
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix imports broken in 9f790bc90769 using patch from Bjorn Gruning.
by Bitbucket 09 Aug '12
by Bitbucket 09 Aug '12
09 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1eab72ce0a48/
changeset: 1eab72ce0a48
user: greg
date: 2012-08-09 12:46:13
summary: Fix imports broken in 9f790bc90769 using patch from Bjorn Gruning.
affected #: 1 file
diff -r a10bb73f579386e67c52383b4c0722cb693d7cf9 -r 1eab72ce0a483dd8bb09e43d549b119879d5435d lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -2,7 +2,8 @@
import galaxy.tools.data
from datetime import date, datetime, timedelta
from time import strftime, gmtime
-from galaxy import tools, util
+from galaxy import util
+from galaxy.tools import parameters
from galaxy.datatypes.checkers import *
from galaxy.util.json import *
from galaxy.tools.search import ToolBoxSearch
@@ -255,7 +256,7 @@
invalid_files_and_errors_tups = []
correction_msg = ''
for input_param in tool.input_params:
- if isinstance( input_param, tools.parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
+ if isinstance( input_param, parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
# If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist.
options = input_param.dynamic_options or input_param.options
if options:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for setting versions for tools included in a repository in the tool shed.
by Bitbucket 08 Aug '12
by Bitbucket 08 Aug '12
08 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a10bb73f5793/
changeset: a10bb73f5793
user: greg
date: 2012-08-08 22:15:06
summary: Fixes for setting versions for tools included in a repository in the tool shed.
affected #: 1 file
diff -r 6a7d9d3714e99f613fb8dbcdb709e7e2a2b813e0 -r a10bb73f579386e67c52383b4c0722cb693d7cf9 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -122,7 +122,12 @@
else:
for tool_dict in tool_dicts:
# We have at least 2 changeset revisions to compare tool guids and tool ids.
- parent_id = get_parent_id( trans, id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], changeset_revisions[ 0:index ] )
+ parent_id = get_parent_id( trans,
+ id,
+ tool_dict[ 'id' ],
+ tool_dict[ 'version' ],
+ tool_dict[ 'guid' ],
+ changeset_revisions[ 0:index ] )
tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
if tool_versions_dict:
repository_metadata.tool_versions = tool_versions_dict
@@ -149,7 +154,8 @@
# Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
# We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
# records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later..
- changeset_revisions_checked = []
+ changeset_revisions_checked = []
+ cleaned_changeset_revisions = []
for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \
.filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
.order_by( trans.model.RepositoryMetadata.table.c.changeset_revision,
@@ -159,6 +165,9 @@
if can_delete:
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
+ else:
+ cleaned_changeset_revisions.append( changeset_revision )
+ return cleaned_changeset_revisions
def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
# The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of
# current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only
@@ -277,7 +286,7 @@
return file_path
return None
def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
- downloadable = 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
+ downloadable = is_downloadable( metadata_dict )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
repository_metadata.metadata = metadata_dict
@@ -289,6 +298,7 @@
downloadable=downloadable )
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
+ return repository_metadata
def generate_clone_url( trans, repository_id ):
"""Generate the URL for cloning a repository."""
repository = get_repository( trans, repository_id )
@@ -562,6 +572,8 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
+def is_downloadable( metadata_dict ):
+ return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
def load_tool( trans, config_file ):
"""Load a single tool from the file named by `config_file` and return an instance of `Tool`."""
# Parse XML configuration file and get the root element
@@ -797,7 +809,7 @@
elif comparison == 'not equal and not subset':
metadata_changeset_revision = ancestor_changeset_revision
metadata_dict = ancestor_metadata_dict
- create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = current_changeset_revision
ancestor_metadata_dict = current_metadata_dict
@@ -809,7 +821,7 @@
metadata_changeset_revision = current_changeset_revision
metadata_dict = current_metadata_dict
# We're at the end of the change log.
- create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = None
ancestor_metadata_dict = None
@@ -820,7 +832,7 @@
metadata_dict = ancestor_metadata_dict
if not ctx.children():
# We're at the end of the change log.
- create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = None
ancestor_metadata_dict = None
@@ -830,10 +842,9 @@
except:
pass
# Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
- clean_repository_metadata( trans, id, changeset_revisions )
+ cleaned_changeset_revisions = clean_repository_metadata( trans, id, changeset_revisions )
# Set tool version information for all downloadable changeset revisions.
- downloadable_changeset_revisions = [ rm.changeset_revision for rm in repository.downloadable_revisions ]
- add_repository_metadata_tool_versions( trans, id, downloadable_changeset_revisions )
+ add_repository_metadata_tool_versions( trans, id, cleaned_changeset_revisions )
def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
"""
Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset
@@ -846,23 +857,22 @@
repo = hg.repository( get_configured_ui(), repo_dir )
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, repo_dir, repository_clone_url )
if metadata_dict:
+ downloadable = is_downloadable( metadata_dict )
repository_metadata = None
if new_tool_metadata_required( trans, repository, metadata_dict ) or new_workflow_metadata_required( trans, repository, metadata_dict ):
# Create a new repository_metadata table row.
- repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict )
- trans.sa_session.add( repository_metadata )
- try:
- trans.sa_session.flush()
- # If this is the first record stored for this repository, see if we need to send any email alerts.
- if len( repository.downloadable_revisions ) == 1:
- handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
- except TypeError, e:
- message = "Unable to save metadata for this repository, exception: %s" % str( e )
- status = 'error'
+ repository_metadata = create_or_update_repository_metadata( trans,
+ trans.security.encode_id( repository.id ),
+ repository,
+ repository.tip,
+ metadata_dict )
+ # If this is the first record stored for this repository, see if we need to send any email alerts.
+ if len( repository.downloadable_revisions ) == 1:
+ handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
else:
repository_metadata = get_latest_repository_metadata( trans, repository.id )
if repository_metadata:
- downloadable = 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
+ downloadable = is_downloadable( metadata_dict )
# Update the last saved repository_metadata table row.
repository_metadata.changeset_revision = repository.tip
repository_metadata.metadata = metadata_dict
@@ -871,11 +881,22 @@
trans.sa_session.flush()
else:
# There are no tools in the repository, and we're setting metadata on the repository tip.
- repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict )
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
+ repository_metadata = create_or_update_repository_metadata( trans,
+ trans.security.encode_id( repository.id ),
+ repository,
+ repository.tip,
+ metadata_dict )
if 'tools' in metadata_dict and repository_metadata and status != 'error':
- add_repository_metadata_tool_versions( trans, trans.security.encode_id( repository.id ), [ repository_metadata.changeset_revision ] )
+ # Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog.
+ downloadable_changeset_revisions = [ rm.changeset_revision for rm in repository.downloadable_revisions ]
+ changeset_revisions = []
+ for changeset in repo.changelog:
+ changeset_revision = str( repo.changectx( changeset ) )
+ if changeset_revision in downloadable_changeset_revisions:
+ changeset_revisions.append( changeset_revision )
+ # Now append the latest changeset_revision we just updated above.
+ changeset_revisions.append( repository_metadata.changeset_revision )
+ add_repository_metadata_tool_versions( trans, trans.security.encode_id( repository.id ), changeset_revisions )
elif len( repo ) == 1 and not invalid_file_tups:
message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip )
message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Apply changes to rendering tool help to the tool shed's version of the tool form mako template so tools can be displayed again in the tool shed.
by Bitbucket 08 Aug '12
by Bitbucket 08 Aug '12
08 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/6a7d9d3714e9/
changeset: 6a7d9d3714e9
user: greg
date: 2012-08-08 20:46:59
summary: Apply changes to rendering tool help to the tool shed's version of the tool form mako template so tools can be displayed again in the tool shed.
affected #: 1 file
diff -r 9af69e5a6db262ab5c277674c42cf0081259d0de -r 6a7d9d3714e99f613fb8dbcdb709e7e2a2b813e0 templates/webapps/community/repository/tool_form.mako
--- a/templates/webapps/community/repository/tool_form.mako
+++ b/templates/webapps/community/repository/tool_form.mako
@@ -186,12 +186,15 @@
<div class="toolHelp"><div class="toolHelpBody"><%
+ tool_help = tool.help
+ # Help is Mako template, so render using current static path.
+ tool_help = tool_help.render( static_path=h.url_for( '/static' ) )
# Convert to unicode to display non-ascii characters.
- if type( tool.help ) is not unicode:
- tool.help = unicode( tool.help, 'utf-8')
+ if type( tool_help ) is not unicode:
+ tool_help = unicode( tool_help, 'utf-8')
%>
- ${tool.help}
- </div>
+ ${tool_help}
+ </div></div>
%endif
%else:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: fix for missing tool-data/shared/ucsc/publicbuilds.txt (related to test failure on functional.test_get_data.UploadData.test_0020_upload_file)
by Bitbucket 08 Aug '12
by Bitbucket 08 Aug '12
08 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/9af69e5a6db2/
changeset: 9af69e5a6db2
user: carlfeberhard
date: 2012-08-08 20:39:40
summary: fix for missing tool-data/shared/ucsc/publicbuilds.txt (related to test failure on functional.test_get_data.UploadData.test_0020_upload_file)
affected #: 1 file
diff -r 9f790bc90769df7a4f84f103707bdd8ceaf1115d -r 9af69e5a6db262ab5c277674c42cf0081259d0de buildbot_setup.sh
--- a/buildbot_setup.sh
+++ b/buildbot_setup.sh
@@ -68,8 +68,13 @@
datatypes_conf.xml.sample
universe_wsgi.ini.sample
tool_data_table_conf.xml.sample
+migrated_tools_conf.xml.sample
+tool-data/shared/ensembl/builds.txt.sample
+tool-data/shared/igv/igv_build_sites.txt.sample
+tool-data/shared/ncbi/builds.txt.sample
+tool-data/shared/rviewer/rviewer_build_sites.txt.sample
tool-data/shared/ucsc/builds.txt.sample
-migrated_tools_conf.xml.sample
+tool-data/shared/ucsc/publicbuilds.txt.sample
"
DIRS="
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
08 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/9f790bc90769/
changeset: 9f790bc90769
user: greg
date: 2012-08-08 20:31:46
summary: More fixes for setting metadata on repositories in the toool shed. Change set includes other miscellaneous fixes, including the elimination of some problematice historical code that managed temporary working directories which resulted in problematic race conditions.
affected #: 7 files
diff -r 5f2db4a18d3d3bef78f0a330cc6073f34db7c88b -r 9f790bc90769df7a4f84f103707bdd8ceaf1115d lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -2,7 +2,7 @@
Manage automatic installation of tools configured in the xxx.xml files in ~/scripts/migrate_tools (e.g., 0002_tools.xml).
All of the tools were at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool shed.
"""
-import urllib2
+import urllib2, tempfile
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
@@ -132,7 +132,7 @@
tool_panel_dict_for_tool_config = generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
for k, v in tool_panel_dict_for_tool_config.items():
tool_panel_dict_for_display[ k ] = v
- metadata_dict = generate_metadata_using_disk_files( self.toolbox, relative_install_dir, repository_clone_url )
+ metadata_dict = generate_metadata_for_changeset_revision( self.app, relative_install_dir, repository_clone_url )
tool_shed_repository.metadata = metadata_dict
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
@@ -142,7 +142,7 @@
else:
tool_dependencies = None
if 'tools' in metadata_dict:
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict )
if repository_tools_tups:
sample_files = metadata_dict.get( 'sample_files', [] )
@@ -195,7 +195,7 @@
tool_shed_repository.includes_datatypes = True
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
datatypes_config = get_config_from_repository( self.app,
'datatypes_conf.xml',
tool_shed_repository,
diff -r 5f2db4a18d3d3bef78f0a330cc6073f34db7c88b -r 9f790bc90769df7a4f84f103707bdd8ceaf1115d lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -2,7 +2,7 @@
import galaxy.tools.data
from datetime import date, datetime, timedelta
from time import strftime, gmtime
-from galaxy import util
+from galaxy import tools, util
from galaxy.datatypes.checkers import *
from galaxy.util.json import *
from galaxy.tools.search import ToolBoxSearch
@@ -247,6 +247,52 @@
except:
pass
return converter_path, display_path
+def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
+ """
+ Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
+ sure the files exist.
+ """
+ invalid_files_and_errors_tups = []
+ correction_msg = ''
+ for input_param in tool.input_params:
+ if isinstance( input_param, tools.parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
+ # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist.
+ options = input_param.dynamic_options or input_param.options
+ if options:
+ if options.tool_data_table or options.missing_tool_data_table_name:
+ # Make sure the repository contains a tool_data_table_conf.xml.sample file.
+ sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
+ if sample_tool_data_table_conf:
+ error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf )
+ if error:
+ invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
+ else:
+ options.missing_tool_data_table_name = None
+ else:
+ correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample "
+ correction_msg += "to the repository that includes the required entry to correct this error.<br/>"
+ invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
+ if options.index_file or options.missing_index_file:
+ # Make sure the repository contains the required xxx.loc.sample file.
+ index_file = options.index_file or options.missing_index_file
+ index_file_name = strip_path( index_file )
+ sample_found = False
+ for sample_file in sample_files:
+ sample_file_name = strip_path( sample_file )
+ if sample_file_name == '%s.sample' % index_file_name:
+ options.index_file = index_file_name
+ options.missing_index_file = None
+ if options.tool_data_table:
+ options.tool_data_table.missing_index_file = None
+ sample_found = True
+ break
+ if not sample_found:
+ correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
+ correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
+ invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( app )
+ return invalid_files_and_errors_tups
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
# Persist the current in-memory list of config_elems to a file named by the value of config_filename.
fd, filename = tempfile.mkstemp()
@@ -383,7 +429,7 @@
def create_tool_dependency_objects( app, tool_shed_repository, current_changeset_revision, set_status=True ):
# Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository.
tool_dependency_objects = []
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = get_config_from_repository( app,
'tool_dependencies.xml',
@@ -501,6 +547,76 @@
if not can_generate_dependency_metadata:
break
return can_generate_dependency_metadata
+def generate_metadata_for_changeset_revision( app, repository_files_dir, repository_clone_url ):
+ """
+ Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
+ the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
+ disk files, so the value of repository_files_dir will not always be repository.repo_path (it could be a temporary directory containing a clone).
+ """
+ metadata_dict = {}
+ invalid_file_tups = []
+ invalid_tool_configs = []
+ tool_dependencies_config = None
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repository_files_dir )
+ if datatypes_config:
+ metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
+ sample_files = get_sample_files_from_disk( repository_files_dir )
+ if sample_files:
+ metadata_dict[ 'sample_files' ] = sample_files
+ # Find all tool configs and exported workflows.
+ for root, dirs, files in os.walk( repository_files_dir ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ if '.hg' in dirs:
+ dirs.remove( '.hg' )
+ for name in files:
+ # Find all tool configs.
+ if name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
+ full_path = os.path.abspath( os.path.join( root, name ) )
+ if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
+ or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
+ try:
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( full_path )
+ element_tree_root = element_tree.getroot()
+ is_tool = element_tree_root.tag == 'tool'
+ except Exception, e:
+ print "Error parsing %s", full_path, ", exception: ", str( e )
+ is_tool = False
+ if is_tool:
+ try:
+ tool = app.toolbox.load_tool( full_path )
+ except Exception, e:
+ tool = None
+ invalid_tool_configs.append( name )
+ if tool is not None:
+ invalid_files_and_errors_tups = check_tool_input_params( app, repository_files_dir, name, tool, sample_files )
+ can_set_metadata = True
+ for tup in invalid_files_and_errors_tups:
+ if name in tup:
+ can_set_metadata = False
+ invalid_tool_configs.append( name )
+ break
+ if can_set_metadata:
+ metadata_dict = generate_tool_metadata( name, tool, repository_clone_url, metadata_dict )
+ else:
+ invalid_file_tups.extend( invalid_files_and_errors_tups )
+ # Find all exported workflows
+ elif name.endswith( '.ga' ):
+ relative_path = os.path.join( root, name )
+ fp = open( relative_path, 'rb' )
+ workflow_text = fp.read()
+ fp.close()
+ exported_workflow_dict = from_json_string( workflow_text )
+ if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
+ metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
+ if 'tools' in metadata_dict:
+ # This step must be done after metadata for tools has been defined.
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repository_files_dir )
+ if tool_dependencies_config:
+ metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
+ if invalid_tool_configs:
+ metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
+ return metadata_dict, invalid_file_tups
def generate_package_dependency_metadata( elem, tool_dependencies_dict ):
"""The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set."""
requirements_dict = {}
@@ -517,58 +633,6 @@
if requirements_dict:
tool_dependencies_dict[ dependency_key ] = requirements_dict
return tool_dependencies_dict
-def generate_metadata_using_disk_files( toolbox, relative_install_dir, repository_clone_url ):
- """Generate metadata using only the repository files on disk - files are not retrieved from the repository manifest."""
- metadata_dict = {}
- tool_dependencies_config = None
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
- if datatypes_config:
- metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
- sample_files = get_sample_files_from_disk( relative_install_dir )
- if sample_files:
- metadata_dict[ 'sample_files' ] = sample_files
- # Find all tool configs and exported workflows.
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- if '.hg' in dirs:
- dirs.remove( '.hg' )
- for name in files:
- # Find all tool configs.
- if name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
- full_path = os.path.abspath( os.path.join( root, name ) )
- if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
- or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
- try:
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = util.parse_xml( full_path )
- element_tree_root = element_tree.getroot()
- is_tool = element_tree_root.tag == 'tool'
- except Exception, e:
- log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) )
- is_tool = False
- if is_tool:
- try:
- tool = toolbox.load_tool( full_path )
- except Exception, e:
- tool = None
- if tool is not None:
- tool_config = os.path.join( root, name )
- metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict )
- # Find all exported workflows
- elif name.endswith( '.ga' ):
- relative_path = os.path.join( root, name )
- fp = open( relative_path, 'rb' )
- workflow_text = fp.read()
- fp.close()
- exported_workflow_dict = from_json_string( workflow_text )
- if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
- metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
- if 'tools' in metadata_dict:
- # This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
- if tool_dependencies_config:
- metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
- return metadata_dict
def generate_tool_guid( repository_clone_url, tool ):
"""
Generate a guid for the installed tool. It is critical that this guid matches the guid for
@@ -1266,7 +1330,7 @@
def load_installed_datatypes( app, repository, relative_install_dir, deactivate=False ):
# Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later.
metadata = repository.metadata
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
repository_dict = None
datatypes_config = get_config_from_repository( app,
'datatypes_conf.xml',
@@ -1293,17 +1357,6 @@
def load_installed_display_applications( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate )
-def make_tmp_directory():
- tmp_dir = os.getenv( 'TMPDIR', '' )
- if tmp_dir:
- tmp_dir = tmp_dir.strip()
- else:
- home_dir = os.getenv( 'HOME' )
- tmp_dir = os.path.join( home_dir, 'tmp' )
- work_dir = os.path.join( tmp_dir, 'work_tmp' )
- if not os.path.exists( work_dir ):
- os.makedirs( work_dir )
- return work_dir
def open_repository_files_folder( trans, folder_path ):
try:
files_list = get_repository_files( trans, folder_path )
diff -r 5f2db4a18d3d3bef78f0a330cc6073f34db7c88b -r 9f790bc90769df7a4f84f103707bdd8ceaf1115d lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -1,4 +1,4 @@
-import urllib2
+import urllib2, tempfile
from galaxy.web.controllers.admin import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
@@ -522,7 +522,7 @@
# Get the tool_shed_repository from one of the tool_dependencies.
message = ''
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = get_config_from_repository( trans.app,
'tool_dependencies.xml',
@@ -654,7 +654,7 @@
message += "from the installed repository's <b>Repository Actions</b> menu. "
status = 'error'
if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
# Install tool dependencies.
update_tool_shed_repository_status( trans.app,
tool_shed_repository,
@@ -684,7 +684,7 @@
Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
when an admin is installing a new repository or reinstalling an uninstalled repository.
"""
- metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url )
+ metadata_dict = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
tool_shed_repository.metadata = metadata_dict
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
@@ -695,7 +695,7 @@
repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
repository_tools_tups = handle_missing_data_table_entry( trans.app,
tool_shed_repository,
tool_shed_repository.changeset_revision,
@@ -726,7 +726,7 @@
tool_shed_repository.includes_datatypes = True
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
datatypes_config = get_config_from_repository( trans.app,
'datatypes_conf.xml',
tool_shed_repository,
@@ -779,7 +779,7 @@
message = "The repository information has been updated."
elif params.get( 'set_metadata_button', False ):
repository_clone_url = generate_clone_url( trans, repository )
- metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url )
+ metadata_dict = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
if metadata_dict:
repository.metadata = metadata_dict
trans.sa_session.add( repository )
@@ -1479,7 +1479,7 @@
update_repository( repo, latest_ctx_rev )
# Update the repository metadata.
tool_shed = clean_tool_shed_url( tool_shed_url )
- metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url )
+ metadata_dict = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
repository.metadata = metadata_dict
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
diff -r 5f2db4a18d3d3bef78f0a330cc6073f34db7c88b -r 9f790bc90769df7a4f84f103707bdd8ceaf1115d lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -1,15 +1,13 @@
-import os, string, socket, logging, simplejson, binascii
+import os, string, socket, logging, simplejson, binascii, tempfile
from time import strftime
from datetime import *
from galaxy.datatypes.checkers import *
from galaxy.tools import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
-from galaxy.util.shed_util import clone_repository, copy_sample_file, generate_datatypes_metadata, generate_tool_dependency_metadata, generate_tool_metadata
-from galaxy.util.shed_util import generate_workflow_metadata, get_changectx_for_changeset, get_config, get_config_from_disk, get_configured_ui
-from galaxy.util.shed_util import get_named_tmpfile_from_ctx, get_sample_files_from_disk, handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH
-from galaxy.util.shed_util import make_tmp_directory, NOT_TOOL_CONFIGS, reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path, to_html_escaped
-from galaxy.util.shed_util import to_html_str, update_repository
+from galaxy.util.shed_util import clone_repository, generate_metadata_for_changeset_revision, get_changectx_for_changeset, get_config_from_disk
+from galaxy.util.shed_util import get_configured_ui, get_named_tmpfile_from_ctx, handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH
+from galaxy.util.shed_util import reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
from galaxy.model.orm import *
@@ -107,11 +105,8 @@
trans.sa_session.flush()
return item_rating
-## ---- Utility methods -------------------------------------------------------
-
def add_repository_metadata_tool_versions( trans, id, changeset_revisions ):
- # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' }
- # pairs for each tool in each changeset revision.
+ # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
for index, changeset_revision in enumerate( changeset_revisions ):
tool_versions_dict = {}
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -120,9 +115,8 @@
if metadata:
tool_dicts = metadata.get( 'tools', [] )
if index == 0:
- # The first changset_revision is a special case because it will have no ancestor
- # changeset_revisions in which to match tools. The parent tool id for tools in
- # the first changeset_revision will be the "old_id" in the tool config.
+ # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools.
+ # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config.
for tool_dict in tool_dicts:
tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
else:
@@ -134,43 +128,6 @@
repository_metadata.tool_versions = tool_versions_dict
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
-def build_changeset_revision_select_field( trans, repository, selected_value=None, add_id_to_name=True ):
- """Build a SelectField whose options are the changeset_rev strings of all downloadable revisions of the received repository."""
- repo = hg.repository( get_configured_ui(), repository.repo_path )
- options = []
- changeset_tups = []
- refresh_on_change_values = []
- for repository_metadata in repository.downloadable_revisions:
- changeset_revision = repository_metadata.changeset_revision
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- if ctx:
- rev = '%04d' % ctx.rev()
- label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
- else:
- rev = '-1'
- label = "-1:%s" % changeset_revision
- changeset_tups.append( ( rev, label, changeset_revision ) )
- refresh_on_change_values.append( changeset_revision )
- # Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time,
- # the changeset revisions may not be sorted correctly because setting metadata over time will reset update_time.
- for changeset_tup in sorted( changeset_tups ):
- # Display the latest revision first.
- options.insert( 0, ( changeset_tup[1], changeset_tup[2] ) )
- if add_id_to_name:
- name = 'changeset_revision_%d' % repository.id
- else:
- name = 'changeset_revision'
- select_field = SelectField( name=name,
- refresh_on_change=True,
- refresh_on_change_values=refresh_on_change_values )
- for option_tup in options:
- selected = selected_value and option_tup[1] == selected_value
- select_field.add_option( option_tup[0], option_tup[1], selected=selected )
- return select_field
-def changeset_is_downloadable( metadata_dict ):
- # A RepositoryMetadata record will be created if metadata_dict includes only invalid stuff like 'invalid_tools', but in this case
- # it won't be downloadable.
- return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
def changeset_is_malicious( trans, id, changeset_revision, **kwd ):
"""Check the malicious flag in repository metadata for a specified change set"""
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -188,55 +145,6 @@
if user_email in admin_users:
return True
return False
-def check_tool_input_params( trans, repo_dir, tool_config, tool, sample_files, invalid_files ):
- """
- Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
- sure the files exist.
- """
- can_set_metadata = True
- correction_msg = ''
- for input_param in tool.input_params:
- if isinstance( input_param, tools.parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
- # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist.
- options = input_param.dynamic_options or input_param.options
- if options:
- if options.tool_data_table or options.missing_tool_data_table_name:
- # Make sure the repository contains a tool_data_table_conf.xml.sample file.
- sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
- if sample_tool_data_table_conf:
- error, correction_msg = handle_sample_tool_data_table_conf_file( trans, sample_tool_data_table_conf )
- if error:
- can_set_metadata = False
- invalid_files.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
- else:
- options.missing_tool_data_table_name = None
- else:
- can_set_metadata = False
- correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample "
- correction_msg += "to the repository that includes the required entry to correct this error.<br/>"
- invalid_files.append( ( tool_config, correction_msg ) )
- if options.index_file or options.missing_index_file:
- # Make sure the repository contains the required xxx.loc.sample file.
- index_file = options.index_file or options.missing_index_file
- index_file_name = strip_path( index_file )
- sample_found = False
- for sample_file in sample_files:
- sample_file_name = strip_path( sample_file )
- if sample_file_name == '%s.sample' % index_file_name:
- options.index_file = index_file_name
- options.missing_index_file = None
- if options.tool_data_table:
- options.tool_data_table.missing_index_file = None
- sample_found = True
- break
- if not sample_found:
- can_set_metadata = False
- correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
- correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
- invalid_files.append( ( tool_config, correction_msg ) )
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
- return can_set_metadata, invalid_files
def clean_repository_metadata( trans, id, changeset_revisions ):
# Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
# We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
@@ -369,12 +277,16 @@
return file_path
return None
def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
+ downloadable = 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
repository_metadata.metadata = metadata_dict
+ repository_metadata.downloadable = downloadable
else:
- repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict )
- repository_metadata.downloadable = changeset_is_downloadable( metadata_dict )
+ repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id,
+ changeset_revision=changeset_revision,
+ metadata=metadata_dict,
+ downloadable=downloadable )
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
def generate_clone_url( trans, repository_id ):
@@ -387,69 +299,6 @@
return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
else:
return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
-def generate_metadata_for_changeset_revision( trans, repository_files_dir, repository_clone_url ):
- """
- Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
- the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
- disk files, so the value of repository_files_dir will not always be repository.repo_path (it could be a temporary directory containing a clone).
- """
- metadata_dict = {}
- invalid_files = []
- invalid_tool_configs = []
- tool_dependencies_config = None
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repository_files_dir )
- if datatypes_config:
- metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
- sample_files = get_sample_files_from_disk( repository_files_dir )
- if sample_files:
- metadata_dict[ 'sample_files' ] = sample_files
- # Find all tool configs and exported workflows.
- for root, dirs, files in os.walk( repository_files_dir ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- if '.hg' in dirs:
- dirs.remove( '.hg' )
- for name in files:
- # Find all tool configs.
- if name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
- full_path = os.path.abspath( os.path.join( root, name ) )
- if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
- or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
- try:
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = util.parse_xml( full_path )
- element_tree_root = element_tree.getroot()
- is_tool = element_tree_root.tag == 'tool'
- except Exception, e:
- print "Error parsing %s", full_path, ", exception: ", str( e )
- is_tool = False
- if is_tool:
- try:
- tool = trans.app.toolbox.load_tool( full_path )
- tool_config = os.path.join( root, name )
- except Exception, e:
- tool = None
- invalid_tool_configs.append( name )
- if tool is not None:
- can_set_metadata, invalid_files = check_tool_input_params( trans, repository_files_dir, tool_config, tool, sample_files, invalid_files )
- if can_set_metadata:
- metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict )
- # Find all exported workflows
- elif name.endswith( '.ga' ):
- relative_path = os.path.join( root, name )
- fp = open( relative_path, 'rb' )
- workflow_text = fp.read()
- fp.close()
- exported_workflow_dict = from_json_string( workflow_text )
- if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
- metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
- if 'tools' in metadata_dict:
- # This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repository_files_dir )
- if tool_dependencies_config:
- metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
- if invalid_tool_configs:
- metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
- return metadata_dict, invalid_files
def generate_tool_guid( trans, repository, tool ):
"""
Generate a guid for the received tool. The form of the guid is
@@ -588,10 +437,23 @@
.first()
def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ):
"""Get metadata for a specified repository change set from the database"""
- return trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
- trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
- .first()
+ # Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow
+ # creatd in the past. This may or may not be resolved, so when it is confirmed that the cause of duplicate records has been corrected, tweak
+ # this method accordingly.
+ all_metadata_records = trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
+ trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
+ .order_by( trans.model.RepositoryMetadata.table.c.update_time.desc() ) \
+ .all()
+ if len( all_metadata_records ) > 1:
+ # Delete all recrds older than the last one updated.
+ for repository_metadata in all_metadata_records[ 1: ]:
+ trans.sa_session.delete( repository_metadata )
+ trans.sa_session.flush()
+ return all_metadata_records[ 0 ]
+ elif all_metadata_records:
+ return all_metadata_records[ 0 ]
+ return None
def get_repository_metadata_by_id( trans, id ):
"""Get repository metadata from the database"""
return trans.sa_session.query( trans.model.RepositoryMetadata ).get( trans.security.decode_id( id ) )
@@ -762,7 +624,7 @@
ctx = get_changectx_for_changeset( repo, changeset_revision )
tool = None
message = ''
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
if sample_files:
trans.app.config.tool_data_path = work_dir
@@ -913,7 +775,7 @@
print "Cloning repository revision: ", str( ctx.rev() )
clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
print "Generating metadata for changset revision: ", str( ctx.rev() )
- current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, work_dir, repository_clone_url )
+ current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, work_dir, repository_clone_url )
if current_metadata_dict:
if not metadata_changeset_revision and not metadata_dict:
# We're at the first change set in the change log.
@@ -969,7 +831,9 @@
pass
# Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
clean_repository_metadata( trans, id, changeset_revisions )
- add_repository_metadata_tool_versions( trans, id, changeset_revisions )
+ # Set tool version information for all downloadable changeset revisions.
+ downloadable_changeset_revisions = [ rm.changeset_revision for rm in repository.downloadable_revisions ]
+ add_repository_metadata_tool_versions( trans, id, downloadable_changeset_revisions )
def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
"""
Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset
@@ -980,8 +844,9 @@
repository_clone_url = generate_clone_url( trans, trans.security.encode_id( repository.id ) )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
- metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, repo_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, repo_dir, repository_clone_url )
if metadata_dict:
+ repository_metadata = None
if new_tool_metadata_required( trans, repository, metadata_dict ) or new_workflow_metadata_required( trans, repository, metadata_dict ):
# Create a new repository_metadata table row.
repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict )
@@ -997,10 +862,11 @@
else:
repository_metadata = get_latest_repository_metadata( trans, repository.id )
if repository_metadata:
+ downloadable = 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
# Update the last saved repository_metadata table row.
repository_metadata.changeset_revision = repository.tip
repository_metadata.metadata = metadata_dict
- repository_metadata.downloadable = changeset_is_downloadable( metadata_dict )
+ repository_metadata.downloadable = downloadable
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
else:
@@ -1008,18 +874,20 @@
repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict )
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
- elif len( repo ) == 1 and not invalid_files:
+ if 'tools' in metadata_dict and repository_metadata and status != 'error':
+ add_repository_metadata_tool_versions( trans, trans.security.encode_id( repository.id ), [ repository_metadata.changeset_revision ] )
+ elif len( repo ) == 1 and not invalid_file_tups:
message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip )
message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
status = "error"
- if invalid_files:
+ if invalid_file_tups:
if metadata_dict:
message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip )
message += "Correct the following problems if necessary and reset metadata.<br/>"
else:
message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip )
message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.<br/>"
- for itc_tup in invalid_files:
+ for itc_tup in invalid_file_tups:
tool_file, exception_msg = itc_tup
if exception_msg.find( 'No such file or directory' ) >= 0:
exception_items = exception_msg.split()
diff -r 5f2db4a18d3d3bef78f0a330cc6073f34db7c88b -r 9f790bc90769df7a4f84f103707bdd8ceaf1115d lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -9,8 +9,8 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
-from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for_changeset, get_configured_ui, get_repository_file_contents, make_tmp_directory, NOT_TOOL_CONFIGS
-from galaxy.util.shed_util import open_repository_files_folder, reversed_lower_upper_bounded_changelog, strip_path
+from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for_changeset, get_configured_ui, get_repository_file_contents, NOT_TOOL_CONFIGS
+from galaxy.util.shed_util import open_repository_files_folder, reversed_lower_upper_bounded_changelog, strip_path, to_html_escaped, update_repository
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -113,7 +113,7 @@
grids.GridColumn.__init__( self, col_name )
def get_value( self, trans, grid, repository ):
"""Display a SelectField whose options are the changeset_revision strings of all downloadable_revisions of this repository."""
- select_field = build_changeset_revision_select_field( trans, repository )
+ select_field = build_changeset_revision_select_field( trans, repository, downloadable_only=False )
if len( select_field.options ) > 1:
return select_field.get_html()
return repository.revision
@@ -268,7 +268,7 @@
grids.GridColumn.__init__( self, col_name )
def get_value( self, trans, grid, repository ):
"""Display a SelectField whose options are the changeset_revision strings of all download-able revisions of this repository."""
- select_field = build_changeset_revision_select_field( trans, repository )
+ select_field = build_changeset_revision_select_field( trans, repository, downloadable_only=True )
if len( select_field.options ) > 1:
return select_field.get_html()
return repository.revision
@@ -1346,19 +1346,14 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'error' )
webapp = get_webapp( trans, **kwd )
+ repository_clone_url = generate_clone_url( trans, repository_id )
repository = get_repository( trans, repository_id )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
ctx = get_changectx_for_changeset( repo, changeset_revision )
invalid_message = ''
- metadata_dict, invalid_files, deleted_sample_files = generate_metadata_for_changeset_revision( trans,
- repo,
- repository_id,
- ctx,
- changeset_revision,
- repo_dir,
- updating_tip=changeset_revision==repository.tip )
- for invalid_file_tup in invalid_files:
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, repo_dir, repository_clone_url )
+ for invalid_file_tup in invalid_file_tups:
invalid_tool_config, invalid_msg = invalid_file_tup
invalid_tool_config_name = strip_path( invalid_tool_config )
if tool_config == invalid_tool_config_name:
@@ -1554,7 +1549,8 @@
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
- add_id_to_name=False )
+ add_id_to_name=False,
+ downloadable_only=False )
revision_label = get_revision_label( trans, repository, changeset_revision )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
@@ -1657,7 +1653,8 @@
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
- add_id_to_name=False )
+ add_id_to_name=False,
+ downloadable_only=False )
return trans.fill_template( '/webapps/community/repository/preview_tools_in_changeset.mako',
repository=repository,
repository_metadata_id=repository_metadata_id,
@@ -2128,7 +2125,8 @@
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
- add_id_to_name=False )
+ add_id_to_name=False,
+ downloadable_only=False )
revision_label = get_revision_label( trans, repository, changeset_revision )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
@@ -2185,7 +2183,8 @@
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
- add_id_to_name=False )
+ add_id_to_name=False,
+ downloadable_only=False )
return trans.fill_template( "/webapps/community/repository/view_tool_metadata.mako",
repository=repository,
tool=tool,
@@ -2197,3 +2196,42 @@
webapp=webapp,
message=message,
status=status )
+
+# ----- Utility methods -----
+def build_changeset_revision_select_field( trans, repository, selected_value=None, add_id_to_name=True, downloadable_only=False ):
+ """Build a SelectField whose options are the changeset_rev strings of all downloadable revisions of the received repository."""
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
+ options = []
+ changeset_tups = []
+ refresh_on_change_values = []
+ if downloadable_only:
+ repository_metadata_revisions = repository.downloadable_revisions
+ else:
+ repository_metadata_revisions = repository.metadata_revisions
+ for repository_metadata in repository_metadata_revisions:
+ changeset_revision = repository_metadata.changeset_revision
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ if ctx:
+ rev = '%04d' % ctx.rev()
+ label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
+ else:
+ rev = '-1'
+ label = "-1:%s" % changeset_revision
+ changeset_tups.append( ( rev, label, changeset_revision ) )
+ refresh_on_change_values.append( changeset_revision )
+ # Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time,
+ # the changeset revisions may not be sorted correctly because setting metadata over time will reset update_time.
+ for changeset_tup in sorted( changeset_tups ):
+ # Display the latest revision first.
+ options.insert( 0, ( changeset_tup[1], changeset_tup[2] ) )
+ if add_id_to_name:
+ name = 'changeset_revision_%d' % repository.id
+ else:
+ name = 'changeset_revision'
+ select_field = SelectField( name=name,
+ refresh_on_change=True,
+ refresh_on_change_values=refresh_on_change_values )
+ for option_tup in options:
+ selected = selected_value and option_tup[1] == selected_value
+ select_field.add_option( option_tup[0], option_tup[1], selected=selected )
+ return select_field
diff -r 5f2db4a18d3d3bef78f0a330cc6073f34db7c88b -r 9f790bc90769df7a4f84f103707bdd8ceaf1115d lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -166,12 +166,13 @@
fp.close()
class RepositoryMetadata( object ):
- def __init__( self, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False ):
+ def __init__( self, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False, downloadable=False ):
self.repository_id = repository_id
self.changeset_revision = changeset_revision
self.metadata = metadata or dict()
self.tool_versions = tool_versions or dict()
self.malicious = malicious
+ self.downloadable = downloadable
class ItemRatingAssociation( object ):
def __init__( self, id=None, user=None, item=None, rating=0, comment='' ):
diff -r 5f2db4a18d3d3bef78f0a330cc6073f34db7c88b -r 9f790bc90769df7a4f84f103707bdd8ceaf1115d templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako
--- a/templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako
+++ b/templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako
@@ -43,14 +43,17 @@
${render_msg( message, status )}
%endif
+<div class="warningmessage">
+ Resetting metadata may take a while because this process clones each change set in each selected repository's change log to a temporary location on disk.
+ Wait until this page redirects after clicking the <b>Reset metadata on selected repositories</b> button, as doing anything else will not be helpful. Watch
+ the tool shed paster log to pass the time if necessary.
+</div>
+
<div class="toolForm"><div class="toolFormTitle">Reset all metadata on each selected repository</div><form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories' )}" method="post" ><div class="form-row">
- Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses. Resetting metadata
- may take a while because this process clones each change set in each selected repository's change log to a temporary location on disk.
- Wait until this page redirects after clicking the <b>Reset metadata on selected repositories</b> button, as doing anything else will not
- be helpful. Watch the tool shed paster log to pass the time if necessary.
+ Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses.
</div><div style="clear: both"></div><div class="form-row">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: fix to mako import and some up-stack error reporting in tools
by Bitbucket 08 Aug '12
by Bitbucket 08 Aug '12
08 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5f2db4a18d3d/
changeset: 5f2db4a18d3d
user: carlfeberhard
date: 2012-08-08 18:30:25
summary: fix to mako import and some up-stack error reporting in tools
affected #: 2 files
diff -r 68d253239882008ee143fd46d946d1a8a828d21e -r 5f2db4a18d3d3bef78f0a330cc6073f34db7c88b lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -4,6 +4,7 @@
import pkg_resources
pkg_resources.require( "simplejson" )
+pkg_resources.require( "Mako" )
import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess, random, math, traceback
import simplejson
@@ -2344,7 +2345,7 @@
command_line = command_line.replace( "\n", " " ).replace( "\r", " " ).strip()
except Exception, e:
# Modify exception message to be more clear
- #e.args = ( 'Error substituting into command line. Params: %r, Command: %s' % ( param_dict, self.command ) )
+ #e.args = ( 'Error substituting into command line. Params: %r, Command: %s' % ( param_dict, self.command ), )
raise
if self.interpreter:
# TODO: path munging for cluster/dataset server relocatability
@@ -2441,7 +2442,7 @@
if code:
return code( *args, **kwargs )
except Exception, e:
- e.args = ( "Error in '%s' hook '%s', original message: %s" % ( self.name, hook_name, e.args[0] ) )
+ e.args = ( "Error in '%s' hook '%s', original message: %s" % ( self.name, hook_name, e.args[0] ), )
raise
def exec_before_job( self, app, inp_data, out_data, param_dict={} ):
pass
diff -r 68d253239882008ee143fd46d946d1a8a828d21e -r 5f2db4a18d3d3bef78f0a330cc6073f34db7c88b tools/new_operations/operation_filter.py
--- a/tools/new_operations/operation_filter.py
+++ b/tools/new_operations/operation_filter.py
@@ -3,6 +3,9 @@
from galaxy import eggs
from galaxy import jobs
from galaxy.tools.parameters import DataToolParameter
+
+from galaxy.jobs.handler import JOB_ERROR
+
# Older py compatibility
try:
set()
@@ -63,8 +66,8 @@
raise Exception( stderr )
except Exception, exc:
- data.blurb = jobs.JOB_ERROR
- data.state = jobs.JOB_ERROR
+ data.blurb = JOB_ERROR
+ data.state = JOB_ERROR
## def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
## pass
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Remove contrib/multiprocess.sh since run.sh can run all of the processes itself now.
by Bitbucket 08 Aug '12
by Bitbucket 08 Aug '12
08 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/68d253239882/
changeset: 68d253239882
user: natefoo
date: 2012-08-08 17:12:42
summary: Remove contrib/multiprocess.sh since run.sh can run all of the processes itself now.
affected #: 1 file
diff -r 480b8c0003f1ab6fa78f88bd7854cf978f2ed575 -r 68d253239882008ee143fd46d946d1a8a828d21e contrib/multiproccess.sh
--- a/contrib/multiproccess.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-
-# copy this script to the top level galaxy directory and modify the following
-# for your environment
-
-web_server_names=(web{0..2}) # server names: web0 web1 web2
-runner_server_names=(runner0) # server name: runner0
-
-web_config='universe_wsgi.webapp.ini'
-runner_config='universe_wsgi.runner.ini'
-
-# actually do the requested action
-
-if [ -z "$1" ]; then
- echo "usage: multiprocess.sh <--daemon|--stop-daemon>"
- exit 1
-fi
-
-for server_name in ${web_server_names[@]}; do
- echo "[$server_name]"
- python ./scripts/paster.py serve $web_config --server-name=$server_name --pid-file=$server_name.pid --log-file=$server_name.log $@
-done
-for server_name in ${runner_server_names[@]}; do
- echo "[$server_name]"
- python ./scripts/paster.py serve $runner_config --server-name=$server_name --pid-file=$server_name.pid --log-file=$server_name.log $@
-done
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: Improved generation and display of local genome/indexes table. Handle missing tool data table entries.
by Bitbucket 08 Aug '12
by Bitbucket 08 Aug '12
08 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/480b8c0003f1/
changeset: 480b8c0003f1
user: inithello
date: 2012-08-08 16:37:27
summary: Improved generation and display of local genome/indexes table. Handle missing tool data table entries.
affected #: 4 files
diff -r 4d22e26e595a278f96b814908b8dc24b3f77a06b -r 480b8c0003f1ab6fa78f88bd7854cf978f2ed575 lib/galaxy/web/controllers/data_admin.py
--- a/lib/galaxy/web/controllers/data_admin.py
+++ b/lib/galaxy/web/controllers/data_admin.py
@@ -30,25 +30,28 @@
@web.expose
@web.require_admin
def manage_data( self, trans, **kwd ):
- genomes = dict()
if trans.app.config.get_bool( 'enable_beta_job_managers', False ) == False:
- return trans.fill_template( '/admin/data_admin/betajob.mako' )
- for line in trans.app.tool_data_tables.data_tables[ 'all_fasta' ].data:
- defstate = dict( state='Generate', style=self.jobstyles[ 'new' ] )
- indexers = dict( bowtie_indexes=defstate, bowtie2_indexes=defstate, bwa_indexes=defstate, perm_base_indexes=defstate, srma_indexes=defstate, sam_fa_indexes=defstate )
- dbkey = line[0]
- name = line[2]
- indexers[ 'name' ] = name
- indexers[ 'fapath' ] = line[3]
- genomes[ dbkey ] = indexers
- for table in [ 'bowtie_indexes', 'bowtie2_indexes', 'bwa_indexes', 'srma_indexes' ]:
- for line in trans.app.tool_data_tables.data_tables[ table ].data:
- dbkey = line[0]
- genomes[ dbkey ][ table ] = dict( state='Generated', style=self.jobstyles[ 'done' ] )
- for line in trans.app.tool_data_tables.data_tables[ 'sam_fa_indexes' ].data:
- genomes[ line[1] ][ 'sam_fa_indexes' ] = dict( state='Generated', style=self.jobstyles[ 'done' ] )
- for line in trans.app.tool_data_tables.data_tables[ 'perm_base_indexes' ].data:
- genomes[ line[1].split(':')[0] ][ 'perm_base_indexes' ] = dict( state='Generated', style=self.jobstyles[ 'done' ] )
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='This feature requires that enable_beta_job_managers be set to True in your Galaxy configuration.' )
+ if 'all_fasta' not in trans.app.tool_data_tables.data_tables:
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='The local data manager requires that an all_fasta entry exists in your tool_data_table_conf.xml.' )
+ indextable = {}
+ dbkeys = []
+ labels = { 'bowtie_indexes': 'Bowtie', 'bowtie2_indexes': 'Bowtie 2', 'bwa_indexes': 'BWA', 'srma_indexes': 'Picard', 'sam_fa_indexes': 'SAM', 'perm_base_indexes': 'PerM' }
+ tablenames = { 'Bowtie': 'bowtie_indexes', 'Bowtie 2': 'bowtie2_indexes', 'BWA': 'bwa_indexes', 'Picard': 'srma_indexes', 'SAM': 'sam_fa_indexes', 'PerM': 'perm_base_indexes' }
+ indexfuncs = dict( bowtie_indexes='bowtie', bowtie2_indexes='bowtie2', bwa_indexes='bwa', srma_indexes='picard', sam_fa_indexes='sam', perm_base_indexes='perm' )
+ for genome in trans.app.tool_data_tables.data_tables[ 'all_fasta' ].data:
+ dbkey = genome[0]
+ dbkeys.append( dbkey )
+ indextable[ dbkey ] = dict( indexes=dict(), name=genome[2], path=genome[3] )
+ for genome in indextable:
+ for label in labels:
+ indextable[ genome ][ 'indexes' ][ label ] = 'Generate'
+ if label not in trans.app.tool_data_tables.data_tables:
+ indextable[ genome ][ 'indexes' ][ label ] = 'Disabled'
+ else:
+ for row in trans.app.tool_data_tables.data_tables[ label ].data:
+ if genome in row or row[0].startswith( genome ):
+ indextable[ genome ][ 'indexes' ][ label ] = 'Generated'
jobgrid = []
sa_session = trans.app.model.context.current
jobs = sa_session.query( model.GenomeIndexToolData ).order_by( model.GenomeIndexToolData.created_time.desc() ).filter_by( user_id=trans.get_user().id ).group_by( model.GenomeIndexToolData.deferred ).limit( 20 ).all()
@@ -65,7 +68,8 @@
jobtype = 'index'
indexers = ', '.join( params['indexes'] )
jobgrid.append( dict( jobtype=jobtype, indexers=indexers, rowclass=state, deferred=job.deferred.id, state=state, intname=job.deferred.params[ 'intname' ], dbkey=job.deferred.params[ 'dbkey' ] ) )
- return trans.fill_template( '/admin/data_admin/local_data.mako', jobgrid=jobgrid, genomes=genomes )
+ styles = dict( Generate=self.jobstyles['new'], Generated=self.jobstyles['ok'], Disabled=self.jobstyles['error'] )
+ return trans.fill_template( '/admin/data_admin/local_data.mako', jobgrid=jobgrid, indextable=indextable, labels=labels, dbkeys=dbkeys, styles=styles, indexfuncs=indexfuncs )
@web.expose
@web.require_admin
diff -r 4d22e26e595a278f96b814908b8dc24b3f77a06b -r 480b8c0003f1ab6fa78f88bd7854cf978f2ed575 templates/admin/data_admin/betajob.mako
--- a/templates/admin/data_admin/betajob.mako
+++ /dev/null
@@ -1,35 +0,0 @@
-<%inherit file="/base.mako"/>
-<%namespace file="/message.mako" import="render_msg" />
-<%namespace file="/library/common/common.mako" import="common_javascripts" />
-
-<%!
- def inherit(context):
- if context.get('use_panels'):
- return '/webapps/galaxy/base_panels.mako'
- else:
- return '/base.mako'
-%>
-<%inherit file="${inherit(context)}"/>
-
-<%def name="init()">
-<%
- self.has_left_panel=False
- self.has_right_panel=False
- self.message_box_visible=False
- self.active_view="user"
- self.overlay_visible=False
- self.has_accessible_datasets = False
-%>
-</%def>
-<%def name="stylesheets()">
- ${parent.stylesheets()}
- ${h.css( "autocomplete_tagging" )}
-</%def>
-<%def name="javascripts()">
- ${parent.javascripts()}
- ${h.js("jquery.autocomplete", "autocomplete_tagging" )}
-</%def>
-##
-## Override methods from base.mako and base_panels.mako
-##
-<p class="panel-error-message">This feature requires that enable_beta_job_managers be set to True in your Galaxy configuration.</p>
\ No newline at end of file
diff -r 4d22e26e595a278f96b814908b8dc24b3f77a06b -r 480b8c0003f1ab6fa78f88bd7854cf978f2ed575 templates/admin/data_admin/generic_error.mako
--- /dev/null
+++ b/templates/admin/data_admin/generic_error.mako
@@ -0,0 +1,35 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/library/common/common.mako" import="common_javascripts" />
+
+<%!
+ def inherit(context):
+ if context.get('use_panels'):
+ return '/webapps/galaxy/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.message_box_visible=False
+ self.active_view="user"
+ self.overlay_visible=False
+ self.has_accessible_datasets = False
+%>
+</%def>
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ ${h.css( "autocomplete_tagging" )}
+</%def>
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.js("jquery.autocomplete", "autocomplete_tagging" )}
+</%def>
+##
+## Override methods from base.mako and base_panels.mako
+##
+<p class="panel-error-message">${message}</p>
\ No newline at end of file
diff -r 4d22e26e595a278f96b814908b8dc24b3f77a06b -r 480b8c0003f1ab6fa78f88bd7854cf978f2ed575 templates/admin/data_admin/local_data.mako
--- a/templates/admin/data_admin/local_data.mako
+++ b/templates/admin/data_admin/local_data.mako
@@ -44,6 +44,7 @@
td, th { padding-left: 10px; padding-right: 10px; }
td.state-color-new { text-decoration: underline; }
td.panel-done-message { background-image: none; padding: 0px 10px 0px 10px; }
+ td.panel-error-message { background-image: none; padding: 0px 10px 0px 10px; }
</style><div class="toolForm">
%if message:
@@ -52,19 +53,23 @@
<div class="toolFormTitle">Currently tracked builds <a class="action-button" href="${h.url_for( controller='data_admin', action='add_genome' )}">Add new</a></div><div class="toolFormBody"><h2>Locally cached data:</h2>
- <h3>NOTE: Indexers queued here will not be reflected in the table until Galaxy is restarted.</h3>
+ <h3>NOTE: Indexes generated here will not be reflected in the table until Galaxy is restarted.</h3><table id="locfiles">
- <tr><th>Database ID</th><th>Name</th><th>Bowtie</th><th>Bowtie 2</th><th>BWA</th><th>Sam</th><th>Picard</th><th>PerM</th></tr>
- %for dbkey in sorted(genomes.keys()):
+ <tr>
+ <th>DB Key</th>
+ <th>Name</th>
+ %for label in labels:
+ <th>${labels[label]}</th>
+ %endfor
+ </tr>
+ %for dbkey in sorted(dbkeys):
<tr><td>${dbkey}</td>
- <td>${genomes[dbkey]['name']}</td>
- <td id="${dbkey}-bowtie" class="indexcell ${genomes[dbkey]['bowtie_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bowtie" data-dbkey="${dbkey}">${genomes[dbkey]['bowtie_indexes']['state']}</td>
- <td id="${dbkey}-bowtie2" class="indexcell ${genomes[dbkey]['bowtie2_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bowtie2" data-dbkey="${dbkey}">${genomes[dbkey]['bowtie2_indexes']['state']}</td>
- <td id="${dbkey}-bwa" class="indexcell ${genomes[dbkey]['bwa_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bwa" data-dbkey="${dbkey}">${genomes[dbkey]['bwa_indexes']['state']}</td>
- <td id="${dbkey}-sam" class="indexcell ${genomes[dbkey]['sam_fa_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="sam" data-dbkey="${dbkey}">${genomes[dbkey]['sam_fa_indexes']['state']}</td>
- <td id="${dbkey}-picard" class="indexcell ${genomes[dbkey]['srma_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="picard" data-dbkey="${dbkey}">${genomes[dbkey]['srma_indexes']['state']}</td>
- <td id="${dbkey}-perm" class="indexcell ${genomes[dbkey]['perm_base_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="perm" data-dbkey="${dbkey}">${genomes[dbkey]['perm_base_indexes']['state']}</td>
+ <td>${indextable[dbkey]['name']}</td>
+ %for label in labels:
+ <td id="${dbkey}-${indexfuncs[label]}" class="indexcell ${styles[indextable[dbkey]['indexes'][label]]}" data-fapath="${indextable[dbkey]['path']}" data-longname="${indextable[dbkey]['name']}" data-index="${indexfuncs[label]}" data-dbkey="${dbkey}">${indextable[dbkey]['indexes'][label]}</td>
+ %endfor
+
</tr>
%endfor
</table>
@@ -124,6 +129,7 @@
jsondata["name"] = $('#job-' + jobid).attr('data-name');
jsondata["dbkey"] = $('#job-' + jobid).attr('data-dbkey');
jsondata["indexes"] = $('#job-' + jobid).attr('data-indexes');
+ tdid = jq(jsondata["dbkey"] + '-' + jsondata["indexes"]);
newhtml = makeNewHTML(jsondata);
$('#job-' + jobid).replaceWith(newhtml);
if ($.inArray(jsondata["status"], finalstates) == -1) {
@@ -133,7 +139,7 @@
});
}
if (jsondata["status"] == 'done' || jsondata["status"] == 'ok') {
- elem = $('#' + jsondata["dbkey"] + '-' + jsondata["indexes"]);
+ elem = $(tdid);
elem.html('Generated');
elem.attr('class', 'indexcell panel-done-message');
}
@@ -156,5 +162,8 @@
}
});
});
-
+
+ function jq(id) {
+ return '#' + id.replace(/(:|\.)/g,'\\$1');
+ }
</script>
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0