galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
June 2012
- 1 participants
- 98 discussions
commit/galaxy-central: greg: Refinements for setting metadata on the entire change log of tool shed repositories.
by Bitbucket 06 Jun '12
by Bitbucket 06 Jun '12
06 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3db661b25774/
changeset: 3db661b25774
user: greg
date: 2012-06-06 21:15:15
summary: Refinements for setting metadata on the entire change log of tool shed repositories.
affected #: 1 file
diff -r 8a06e3e264ec912b67fb8388cf3874b32399ac07 -r 3db661b25774e2cb3eba6c156220f181eb28cb95 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -962,6 +962,9 @@
# The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop
# completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list.
changeset_revisions = []
+ # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict.
+ metadata_changeset_revision = None
+ metadata_dict = None
ancestor_changeset_revision = None
ancestor_metadata_dict = None
for changeset in repo.changelog:
@@ -978,13 +981,16 @@
if deleted_sample_file not in missing_sample_files:
missing_sample_files.append( deleted_sample_file )
if current_metadata_dict:
+ if not metadata_changeset_revision and not metadata_dict:
+ # We're at the first change set in the change log.
+ metadata_changeset_revision = current_changeset_revision
+ metadata_dict = current_metadata_dict
if ancestor_changeset_revision:
# Compare metadata from ancestor and current. The value of comparsion will be one of:
# 'no metadata' - no metadata for either ancestor or current, so continue from current
# 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
# 'subset' - ancestor metadata is a subset of current metadata, so continue from current
- # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current
- # metadata, so persist ancestor metadata.
+ # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
comparison = compare_changeset_revisions( ancestor_changeset_revision,
ancestor_metadata_dict,
current_changeset_revision,
@@ -993,30 +999,38 @@
ancestor_changeset_revision = current_changeset_revision
ancestor_metadata_dict = current_metadata_dict
elif comparison == 'not equal and not subset':
- create_or_update_repository_metadata( trans, id, repository, ancestor_changeset_revision, ancestor_metadata_dict )
+ metadata_changeset_revision = ancestor_changeset_revision
+ metadata_dict = ancestor_metadata_dict
+ create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
# Keep track of the changeset_revisions that we've persisted.
- changeset_revisions.append( ancestor_changeset_revision )
+ changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = current_changeset_revision
ancestor_metadata_dict = current_metadata_dict
else:
- # We're either at the first change set in the change log or we have just created or updated
- # a repository_metadata record. At this point we set the ancestor changeset to the current
- # changeset for comparison in the next iteration.
+ # We're either at the first change set in the change log or we have just created or updated a repository_metadata record. At
+ # this point we set the ancestor changeset to the current changeset for comparison in the next iteration.
ancestor_changeset_revision = current_changeset_revision
ancestor_metadata_dict = current_metadata_dict
if not ctx.children():
+ metadata_changeset_revision = current_changeset_revision
+ metadata_dict = current_metadata_dict
# We're at the end of the change log.
- create_or_update_repository_metadata( trans, id, repository, current_changeset_revision, current_metadata_dict )
- changeset_revisions.append( current_changeset_revision )
+ create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = None
ancestor_metadata_dict = None
elif ancestor_metadata_dict:
+ # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
+ ancestor_changeset_revision = current_changeset_revision
+ metadata_changeset_revision = current_changeset_revision
+ metadata_dict = ancestor_metadata_dict
if not ctx.children():
# We're at the end of the change log.
- create_or_update_repository_metadata( trans, id, repository, current_changeset_revision, ancestor_metadata_dict )
- changeset_revisions.append( current_changeset_revision )
+ create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = None
ancestor_metadata_dict = None
+ # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
clean_repository_metadata( trans, id, changeset_revisions )
add_repository_metadata_tool_versions( trans, id, changeset_revisions )
if missing_sample_files:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Update default and help for Tophat2 coverage search parameter.
by Bitbucket 06 Jun '12
by Bitbucket 06 Jun '12
06 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8a06e3e264ec/
changeset: 8a06e3e264ec
user: jgoecks
date: 2012-06-06 20:42:21
summary: Update default and help for Tophat2 coverage search parameter.
affected #: 1 file
diff -r 41f72445078999fd2a3f6471ee319a0be977097c -r 8a06e3e264ec912b67fb8388cf3874b32399ac07 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -231,9 +231,9 @@
<!-- Coverage search. --><conditional name="coverage_search">
- <param name="use_search" type="select" label="Use Coverage Search">
- <option selected="true" value="Yes">Yes</option>
- <option value="No">No</option>
+ <param name="use_search" type="select" label="Use Coverage Search" help="Enables the coverage based search for junctions. Use when coverage search is disabled by default (such as for reads 75bp or longer), for maximum sensitivity.">
+ <option selected="true" value="No">No</option>
+ <option value="Yes">Yes</option></param><when value="Yes"><param name="min_coverage_intron" type="integer" value="50" label="Minimum intron length that may be found during coverage search" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for getting updates to installed tool shed repositories.
by Bitbucket 06 Jun '12
by Bitbucket 06 Jun '12
06 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/41f724450789/
changeset: 41f724450789
user: greg
date: 2012-06-06 19:26:15
summary: Fixes for getting updates to installed tool shed repositories.
affected #: 2 files
diff -r 5343150e167ea8682f25849e7b01e1f40c4b440d -r 41f72445078999fd2a3f6471ee319a0be977097c lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -174,6 +174,40 @@
return trans.response.send_redirect( url )
@web.expose
@web.require_admin
+ def check_installed_tool_dependencies( self, trans, repository_id, relative_install_dir ):
+ """See if any tool dependencies need to be installed."""
+ tool_dependencies_missing = False
+ repository = get_repository( trans, repository_id )
+ if repository.includes_tool_dependencies:
+ # Get the tool_dependencies.xml file from the repository.
+ work_dir = make_tmp_directory()
+ tool_dependencies_config = get_config_from_repository( trans.app,
+ 'tool_dependencies.xml',
+ repository,
+ repository.changeset_revision,
+ work_dir,
+ install_dir=relative_install_dir )
+ # Parse the tool_dependencies.xml config.
+ tree = ElementTree.parse( tool_dependencies_config )
+ root = tree.getroot()
+ ElementInclude.include( root )
+ fabric_version_checked = False
+ for elem in root:
+ if elem.tag == 'package':
+ package_name = elem.get( 'name', None )
+ package_version = elem.get( 'version', None )
+ if package_name and package_version:
+ install_dir = get_install_dir( trans.app, repository, repository.installed_changeset_revision, package_name, package_version )
+ if not_installed( install_dir ):
+ tool_dependencies_missing = True
+ break
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ return tool_dependencies_missing
+ @web.expose
+ @web.require_admin
def deactivate_or_uninstall_repository( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -540,7 +574,8 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- repository = get_repository( trans, kwd[ 'id' ] )
+ repository_id = kwd[ 'id' ]
+ repository = get_repository( trans, repository_id )
description = util.restore_text( params.get( 'description', repository.description ) )
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
@@ -558,34 +593,7 @@
trans.sa_session.add( repository )
trans.sa_session.flush()
message = "Repository metadata has been reset."
- tool_dependencies_missing = False
- if repository.includes_tool_dependencies:
- # See if any tool dependencies need to be installed, get the tool_dependencies.xml file from the repository.
- work_dir = make_tmp_directory()
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- repository,
- repository.changeset_revision,
- work_dir,
- install_dir=relative_install_dir )
- # Parse the tool_dependencies.xml config.
- tree = ElementTree.parse( tool_dependencies_config )
- root = tree.getroot()
- ElementInclude.include( root )
- fabric_version_checked = False
- for elem in root:
- if elem.tag == 'package':
- package_name = elem.get( 'name', None )
- package_version = elem.get( 'version', None )
- if package_name and package_version:
- install_dir = get_install_dir( trans.app, repository, repository.installed_changeset_revision, package_name, package_version )
- if not_installed( install_dir ):
- tool_dependencies_missing = True
- break
- try:
- shutil.rmtree( work_dir )
- except:
- pass
+ tool_dependencies_missing = self.check_installed_tool_dependencies( trans, repository_id, relative_install_dir )
return trans.fill_template( '/admin/tool_shed_repository/manage_repository.mako',
repository=repository,
description=description,
@@ -782,7 +790,7 @@
repository = get_repository_by_shed_name_owner_changeset_revision( trans.app, tool_shed_url, name, owner, changeset_revision )
if changeset_revision and latest_changeset_revision and latest_ctx_rev:
if changeset_revision == latest_changeset_revision:
- message = "The cloned tool shed repository named '%s' is current (there are no updates available)." % name
+ message = "The installed repository named '%s' is current, there are no updates available. " % name
else:
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
@@ -801,13 +809,17 @@
repository.update_available = False
trans.sa_session.add( repository )
trans.sa_session.flush()
- message = "The cloned repository named '%s' has been updated to change set revision '%s'." % \
- ( name, latest_changeset_revision )
+ message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
+ # See if any tool dependencies can be installed.
+ shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
+ tool_dependencies_missing = self.check_installed_tool_dependencies( trans, trans.security.encode_id( repository.id ), relative_install_dir )
+ if tool_dependencies_missing:
+ message += "Select <b>Install tool dependencies</b> from the repository's pop-up menu to install tool dependencies."
else:
- message = "The directory containing the cloned repository named '%s' cannot be found." % name
+ message = "The directory containing the installed repository named '%s' cannot be found. " % name
status = 'error'
else:
- message = "The latest changeset revision could not be retrieved for the repository named '%s'." % name
+ message = "The latest changeset revision could not be retrieved for the installed repository named '%s'. " % name
status = 'error'
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='manage_repository',
diff -r 5343150e167ea8682f25849e7b01e1f40c4b440d -r 41f72445078999fd2a3f6471ee319a0be977097c lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -599,6 +599,9 @@
repository = get_repository_by_name_and_owner( trans, name, owner )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
+ # Default to the current changeset revision.
+ update_to_ctx = get_changectx_for_changeset( repo, changeset_revision )
+ latest_changeset_revision = changeset_revision
from_update_manager = webapp == 'update_manager'
if from_update_manager:
update = 'true'
@@ -608,80 +611,45 @@
url = '%sadmin_toolshed/update_to_changeset_revision?tool_shed_url=%s' % ( galaxy_url, url_for( '/', qualified=True ) )
url += '&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % ( repository.name, repository.user.username, changeset_revision )
if changeset_revision == repository.tip:
- # If changeset_revision is the repository tip, we know there are no additional updates for the tools.
+ # If changeset_revision is the repository tip, there are no additional updates.
if from_update_manager:
return no_update
# Return the same value for changeset_revision and latest_changeset_revision.
- url += repository.tip
+ url += latest_changeset_revision
else:
repository_metadata = get_repository_metadata_by_changeset_revision( trans,
trans.security.encode_id( repository.id ),
changeset_revision )
if repository_metadata:
- # If changeset_revision is in the repository_metadata table for this repository, then we know there are no additional updates
- # for the tools.
+ # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
if from_update_manager:
return no_update
else:
# Return the same value for changeset_revision and latest_changeset_revision.
- url += changeset_revision
+ url += latest_changeset_revision
else:
- # TODO: Re-engineer this to define the change set for update to be the one just before the next change set in the repository_metadata
- # table for this repository.
# The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
- # repository was cloned. Load each tool in the repository's changeset_revision to generate a list of tool guids, since guids
- # differentiate tools by id and version.
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- if ctx is not None:
- work_dir = make_tmp_directory()
- tool_guids = []
- for filename in ctx:
- # Find all tool configs in this repository changeset_revision.
- if filename not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
- is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans,
- repo,
- repo_dir,
- ctx,
- filename,
- work_dir )
- if valid and tool is not None:
- tool_guids.append( generate_tool_guid( trans, repository, tool ) )
- tool_guids.sort()
- if tool_guids:
- # Compare our list of tool guids against those in each repository_metadata record for the repository to find the
- # repository_metadata record with the changeset_revision value we want to pass back to the caller.
- found = False
- for repository_metadata in get_repository_metadata_by_repository_id( trans, trans.security.encode_id( repository.id ) ):
- metadata = repository_metadata.metadata
- metadata_tool_guids = []
- for tool_dict in metadata[ 'tools' ]:
- metadata_tool_guids.append( tool_dict[ 'guid' ] )
- metadata_tool_guids.sort()
- if tool_guids == metadata_tool_guids:
- # We've found the repository_metadata record whose changeset_revision value has been updated.
- if from_update_manager:
- return update
- url += repository_metadata.changeset_revision
- # Get the ctx_rev for the discovered changeset_revision.
- latest_ctx = get_changectx_for_changeset( repo, repository_metadata.changeset_revision )
- found = True
- break
- if not found:
- # There must be a problem in the data, so we'll just send back the received changeset_revision.
- log.debug( "Possible data corruption - updated repository_metadata cannot be found for repository id %d." % repository.id )
- if from_update_manager:
- return no_update
- url += changeset_revision
- else:
- # There are no tools in the changeset_revision, so no tool updates are possible.
- if from_update_manager:
- return no_update
- url += changeset_revision
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- url += '&latest_ctx_rev=%s' % str( latest_ctx.rev() )
+ # repository was installed. We need to find the changeset_revision to which we need to update.
+ update_to_changeset_hash = None
+ for changeset in repo.changelog:
+ changeset_hash = str( repo.changectx( changeset ) )
+ ctx = get_changectx_for_changeset( repo, changeset_hash )
+ if update_to_changeset_hash:
+ if get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_hash ):
+ # We found a RepositoryMetadata record.
+ if changeset_hash == repository.tip:
+ # The current ctx is the repository tip, so use it.
+ update_to_ctx = get_changectx_for_changeset( repo, changeset_hash )
+ latest_changeset_revision = changeset_hash
+ else:
+ update_to_ctx = get_changectx_for_changeset( repo, update_to_changeset_hash )
+ latest_changeset_revision = update_to_changeset_hash
+ break
+ elif not update_to_changeset_hash and changeset_hash == changeset_revision:
+ # We've found the changeset in the changelog for which we need to get the next update.
+ update_to_changeset_hash = changeset_hash
+ url += str( latest_changeset_revision )
+ url += '&latest_ctx_rev=%s' % str( update_to_ctx.rev() )
return trans.response.send_redirect( url )
@web.expose
def contact_owner( self, trans, id, **kwd ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fix for rendering the url for cloning a tool shed repository.
by Bitbucket 06 Jun '12
by Bitbucket 06 Jun '12
06 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5343150e167e/
changeset: 5343150e167e
user: greg
date: 2012-06-06 15:48:15
summary: Fix for rendering the url for cloning a tool shed repository.
affected #: 1 file
diff -r da5b91a86b2f739eb9deb8dc51fe16446e5cda35 -r 5343150e167ea8682f25849e7b01e1f40c4b440d lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -407,12 +407,13 @@
def generate_clone_url( trans, repository_id ):
"""Generate the URL for cloning a repository."""
repository = get_repository( trans, repository_id )
- protocol, base = trans.request.base.split( '://' )
+ base_url = url_for( '/', qualified=True ).rstrip( '/' )
if trans.user:
+ protocol, base = base_url.split( '://' )
username = '%s@' % trans.user.username
+ return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
else:
- username = ''
- return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
+ return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
def generate_metadata_for_changeset_revision( trans, repo, id, ctx, changeset_revision, repo_dir, updating_tip=False ):
if updating_tip:
# If a push from the command line is occurring, update the repository files on disk before setting metadata.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: fubar: Added __admin_users__ as a parameter so tools can check __user_email__ and refuse to run unless
by Bitbucket 05 Jun '12
by Bitbucket 05 Jun '12
05 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/da5b91a86b2f/
changeset: da5b91a86b2f
user: fubar
date: 2012-06-06 06:02:41
summary: Added __admin_users__ as a parameter so tools can check __user_email__ and refuse to run unless
being invoked by a local admin - useful for any potentially dangerous tools.
affected #: 1 file
diff -r a4196ffaf11e22b9b0a76fe6aa1cbbcccd2fb219 -r da5b91a86b2f739eb9deb8dc51fe16446e5cda35 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2056,6 +2056,7 @@
# datatypes conf path, so we can load the datatypes registry
param_dict['__root_dir__'] = param_dict['GALAXY_ROOT_DIR'] = os.path.abspath( self.app.config.root )
param_dict['__datatypes_config__'] = param_dict['GALAXY_DATATYPES_CONF_FILE'] = self.app.datatypes_registry.integrated_datatypes_configs
+ param_dict['__admin_users__'] = self.app.config.admin_users
# Return the dictionary of parameters
return param_dict
def build_param_file( self, param_dict, directory=None ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: fubar: Adding a new datatype to datatypes_conf.xml.sample - toolshed compatible gzips created by a new automated
by Bitbucket 05 Jun '12
by Bitbucket 05 Jun '12
05 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a4196ffaf11e/
changeset: a4196ffaf11e
user: fubar
date: 2012-06-06 04:08:55
summary: Adding a new datatype to datatypes_conf.xml.sample - toolshed compatible gzips created by a new automated
script wrapper.
affected #: 1 file
diff -r 0b2d939a7870074977a30d7e9b57cd0c434fe637 -r a4196ffaf11e22b9b0a76fe6aa1cbbcccd2fb219 datatypes_conf.xml.sample
--- a/datatypes_conf.xml.sample
+++ b/datatypes_conf.xml.sample
@@ -89,6 +89,7 @@
<converter file="gff_to_interval_index_converter.xml" target_datatype="interval_index"/><converter file="gff_to_summary_tree_converter.xml" target_datatype="summary_tree"/></datatype>
+ <datatype extension="toolshed.gz" type="galaxy.datatypes.binary:Binary" mimetype="multipart/x-gzip" subclass="True" /><datatype extension="h5" type="galaxy.datatypes.binary:Binary" mimetype="application/octet-stream" subclass="True" /><datatype extension="html" type="galaxy.datatypes.images:Html" mimetype="text/html"/><datatype extension="interval" type="galaxy.datatypes.interval:Interval" display_in_upload="true">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Foundation for parameter sweeping visualization.
by Bitbucket 05 Jun '12
by Bitbucket 05 Jun '12
05 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0b2d939a7870/
changeset: 0b2d939a7870
user: jgoecks
date: 2012-06-05 23:10:23
summary: Foundation for parameter sweeping visualization.
affected #: 6 files
diff -r 91217d6ead5a58609e0cc0ac839d2c2a6bc94691 -r 0b2d939a7870074977a30d7e9b57cd0c434fe637 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2407,10 +2407,19 @@
if isinstance( input, DataToolParameter ):
param_dict.update( { 'type' : 'data', 'html' : urllib.quote( input.get_html( trans ) ) } )
elif isinstance( input, SelectToolParameter ):
- param_dict.update( { 'type' : 'select', 'html' : urllib.quote( input.get_html( trans ) ) } )
+ param_dict.update( { 'type' : 'select',
+ 'html' : urllib.quote( input.get_html( trans ) ),
+ 'options': input.static_options
+ } )
elif isinstance( input, Conditional ):
# TODO.
pass
+ elif isinstance( input, ( IntegerToolParameter, FloatToolParameter ) ):
+ param_dict.update( { 'type' : 'number', 'init_value' : input.value,
+ 'html' : urllib.quote( input.get_html( trans ) ),
+ 'min': input.min,
+ 'max': input.max
+ } )
else:
param_dict.update( { 'type' : '??', 'init_value' : input.value, \
'html' : urllib.quote( input.get_html( trans ) ) } )
diff -r 91217d6ead5a58609e0cc0ac839d2c2a6bc94691 -r 0b2d939a7870074977a30d7e9b57cd0c434fe637 lib/galaxy/web/controllers/tracks.py
--- a/lib/galaxy/web/controllers/tracks.py
+++ b/lib/galaxy/web/controllers/tracks.py
@@ -762,6 +762,15 @@
output_dataset = joda.dataset
return self.add_track_async( trans, output_dataset.id )
+
+ @web.expose
+ @web.require_login( "use Galaxy visualizations", use_panels=True )
+ def paramamonster( self, trans, hda_ldda, dataset_id ):
+ # Get dataset.
+ dataset = self._get_dataset( trans, hda_ldda, dataset_id )
+
+ return trans.fill_template_mako( "visualization/paramamonster.mako", dataset=dataset,
+ tool=self.app.toolbox.tools_by_id[ 'cufflinks' ].to_dict( trans, for_display=True ) )
@web.expose
@web.require_login( "use Galaxy visualizations", use_panels=True )
diff -r 91217d6ead5a58609e0cc0ac839d2c2a6bc94691 -r 0b2d939a7870074977a30d7e9b57cd0c434fe637 static/scripts/packed/viz/paramamonster.js
--- /dev/null
+++ b/static/scripts/packed/viz/paramamonster.js
@@ -0,0 +1,1 @@
+var ToolParameterTree=Backbone.Model.extend({defaults:{tool:null,samples:4},initialize:function(d){var c=this.get("tool"),b=this.get("samples"),a=c.get("inputs").filter(function(h){return(["number","select"].indexOf(h.get("type"))!==-1)}),f=_.map(a,function(h){return h.get("name")});sampling=_.map(a,function(h){var i=h.get("type");if(i==="number"){return d3.scale.linear().domain([h.get("min"),h.get("max")]).ticks(b)}else{if(i==="select"){return _.map(h.get("options"),function(j){return j[0]})}}});var g=function(j,h,i){if(h.length-1===i){return _.map(h[i],function(k){return{name:k}})}return _.map(h[i],function(k){return{name:j[i]+":"+k,children:g(j,h,i+1)}})};var e={name:"Parameter Tree for "+c.get("name"),children:g(f,sampling,0)};this.set("valid_inputs",a);this.set("tree_data",e)}});var TileView=Backbone.View.extend({});var ToolParameterTreeView=Backbone.View.extend({className:"paramamonster",initialize:function(a){this.model=a.model},render:function(){var e=960,b=2000;var a=d3.layout.cluster().size([b,e-160]);var d=d3.svg.diagonal().projection(function(i){return[i.y,i.x]});var h=d3.select(this.$el[0]).append("svg").attr("width",e).attr("height",b).append("g").attr("transform","translate(80, 0)");var c=a.nodes(this.model.get("tree_data"));var g=h.selectAll("path.link").data(a.links(c)).enter().append("path").attr("class","link").attr("d",d);var f=h.selectAll("g.node").data(c).enter().append("g").attr("class","node").attr("transform",function(i){return"translate("+i.y+","+i.x+")"});f.append("circle").attr("r",4.5);f.append("text").attr("dx",function(i){return i.children?-8:8}).attr("dy",3).attr("text-anchor",function(i){return i.children?"end":"start"}).text(function(i){return i.name})}});
\ No newline at end of file
diff -r 91217d6ead5a58609e0cc0ac839d2c2a6bc94691 -r 0b2d939a7870074977a30d7e9b57cd0c434fe637 static/scripts/viz/paramamonster.js
--- /dev/null
+++ b/static/scripts/viz/paramamonster.js
@@ -0,0 +1,123 @@
+/**
+ * Visualization and components for ParamaMonster, a visualization for exploring a tool's parameter space via
+ * genomic visualization.
+ */
+
+var ToolParameterTree = Backbone.Model.extend({
+ defaults: {
+ tool: null,
+ samples: 4
+ },
+
+
+ initialize: function(options) {
+ //
+ // -- Create tree data from tool. --
+ //
+
+ // Valid inputs for tree are number, select parameters.
+ var tool = this.get('tool'),
+ samples = this.get('samples'),
+ inputs = tool.get('inputs').filter(function(input) {
+ return ( ['number', 'select'].indexOf(input.get('type')) !== -1 );
+ }),
+ inputs_names = _.map(inputs, function(i) { return i.get('name')});
+ // Sample from all valid inputs.
+ sampling = _.map(inputs, function(input) {
+ var type = input.get('type');
+ if (type === 'number') {
+ return d3.scale.linear().domain([input.get('min'), input.get('max')]).ticks(samples);
+ }
+ else if (type === 'select') {
+ return _.map(input.get('options'), function(option) {
+ return option[0];
+ });
+ }
+ });
+
+ /**
+ * Returns tree data.
+ */
+ var create_tree_data = function(param_names, param_settings, index) {
+ // Terminate when last parameter setting is reached.
+ if (param_settings.length - 1 === index) {
+ return _.map(param_settings[index], function(setting) {
+ return {
+ name: setting
+ }
+ });
+ }
+
+ // Recurse to handle other parameters.
+ return _.map(param_settings[index], function(setting) {
+ return {
+ name: param_names[index] + ':' + setting,
+ children: create_tree_data(param_names, param_settings, index + 1)
+ }
+ });
+ };
+
+ var tree_data = {
+ name: 'Parameter Tree for ' + tool.get('name'),
+ children: create_tree_data(inputs_names, sampling, 0)
+ };
+
+ // Set valid inputs, tree data for later use.
+ this.set('valid_inputs', inputs);
+ this.set('tree_data', tree_data);
+ }
+
+});
+
+var TileView = Backbone.View.extend({
+
+});
+
+var ToolParameterTreeView = Backbone.View.extend({
+ className: 'paramamonster',
+
+ initialize: function(options) {
+ this.model = options.model;
+ },
+
+ render: function() {
+ var width = 960,
+ height = 2000;
+
+ var cluster = d3.layout.cluster()
+ .size([height, width - 160]);
+
+ var diagonal = d3.svg.diagonal()
+ .projection(function(d) { return [d.y, d.x]; });
+
+ var vis = d3.select(this.$el[0])
+ .append("svg")
+ .attr("width", width)
+ .attr("height", height)
+ .append("g")
+ .attr("transform", "translate(80, 0)");
+
+ var nodes = cluster.nodes(this.model.get('tree_data'));
+
+ var link = vis.selectAll("path.link")
+ .data(cluster.links(nodes))
+ .enter().append("path")
+ .attr("class", "link")
+ .attr("d", diagonal);
+
+ var node = vis.selectAll("g.node")
+ .data(nodes)
+ .enter().append("g")
+ .attr("class", "node")
+ .attr("transform", function(d) { return "translate(" + d.y + "," + d.x + ")"; })
+
+ node.append("circle")
+ .attr("r", 4.5);
+
+ node.append("text")
+ .attr("dx", function(d) { return d.children ? -8 : 8; })
+ .attr("dy", 3)
+ .attr("text-anchor", function(d) { return d.children ? "end" : "start"; })
+ .text(function(d) { return d.name; });
+ }
+});
\ No newline at end of file
diff -r 91217d6ead5a58609e0cc0ac839d2c2a6bc94691 -r 0b2d939a7870074977a30d7e9b57cd0c434fe637 templates/visualization/circster.mako
--- a/templates/visualization/circster.mako
+++ b/templates/visualization/circster.mako
@@ -16,7 +16,7 @@
<%def name="javascripts()">
${parent.javascripts()}
- ${h.js( "libs/d3", "mvc/visualization" )}
+ ${h.js( "libs/d3", "viz/visualization" )}
<script type="text/javascript">
$(function() {
@@ -81,7 +81,7 @@
// -- Render viz. --
circster.render();
- $('#vis').append(circster.$el);
+ $('#vis').append(circster.$el);
});
</script></%def>
diff -r 91217d6ead5a58609e0cc0ac839d2c2a6bc94691 -r 0b2d939a7870074977a30d7e9b57cd0c434fe637 templates/visualization/paramamonster.mako
--- /dev/null
+++ b/templates/visualization/paramamonster.mako
@@ -0,0 +1,62 @@
+<%inherit file="/webapps/galaxy/base_panels.mako"/>
+
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.active_view="visualization"
+ self.message_box_visible=False
+%>
+</%def>
+
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ <style>
+ .unified-panel-body {
+ overflow: auto;
+ }
+ .link {
+ fill: none;
+ stroke: #ccc;
+ stroke-width: 1.5px;
+ }
+ .node {
+ font: 10px sans-serif;
+ }
+ .node circle {
+ fill: #fff;
+ stroke: steelblue;
+ stroke-width: 1.5px;
+ }
+ </style>
+</%def>
+
+<%def name="javascripts()">
+ ${parent.javascripts()}
+
+ ${h.templates( "tool_link", "panel_section", "tool_search" )}
+ ${h.js( "libs/d3", "viz/visualization", "viz/paramamonster", "mvc/tools" )}
+
+ <script type="text/javascript">
+ $(function() {
+ // -- Viz set up. --
+
+ var tool = new Tool(JSON.parse('${ h.to_json_string( tool ) }')),
+ tool_param_tree = new ToolParameterTree({ tool: tool }),
+ tool_param_tree_view = new ToolParameterTreeView({ model: tool_param_tree });
+
+ tool_param_tree_view.render();
+ $('#vis').append(tool_param_tree_view.$el);
+ });
+ </script>
+</%def>
+
+<%def name="center_panel()">
+ <div class="unified-panel-header" unselectable="on">
+ <div class="unified-panel-header-inner">
+ <div style="float:left;" id="title"></div>
+ </div>
+ <div style="clear: both"></div>
+ </div>
+ <div id="vis" class="unified-panel-body"></div>
+</%def>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for setting metadata on tool shed repoistories.
by Bitbucket 05 Jun '12
by Bitbucket 05 Jun '12
05 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/91217d6ead5a/
changeset: 91217d6ead5a
user: greg
date: 2012-06-05 20:59:42
summary: Fixes for setting metadata on tool shed repoistories.
affected #: 2 files
diff -r d7580315fa206bcc64bc7607ee52bccfbf95e42f -r 91217d6ead5a58609e0cc0ac839d2c2a6bc94691 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -1010,12 +1010,12 @@
ancestor_changeset_revision = None
ancestor_metadata_dict = None
elif ancestor_metadata_dict:
- # Our current change set has no metadata, but our ancestor change set has metadata, so save it.
- create_or_update_repository_metadata( trans, id, repository, ancestor_changeset_revision, ancestor_metadata_dict )
- # Keep track of the changeset_revisions that we've persisted.
- changeset_revisions.append( ancestor_changeset_revision )
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
+ if not ctx.children():
+ # We're at the end of the change log.
+ create_or_update_repository_metadata( trans, id, repository, current_changeset_revision, ancestor_metadata_dict )
+ changeset_revisions.append( current_changeset_revision )
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
clean_repository_metadata( trans, id, changeset_revisions )
add_repository_metadata_tool_versions( trans, id, changeset_revisions )
if missing_sample_files:
@@ -1083,7 +1083,7 @@
repository_metadata.metadata = metadata_dict
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
- elif not invalid_files:
+ elif updating_tip and len( repo ) == 1 and not invalid_files:
message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( changeset_revision )
message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
status = "error"
@@ -1120,7 +1120,6 @@
status = 'error'
return message, status
def set_repository_metadata_due_to_new_tip( trans, id, repository, content_alert_str=None, **kwd ):
- message = util.restore_text( kwd.get( 'message', '' ) )
# Set metadata on the repository tip.
error_message, status = set_repository_metadata( trans, id, repository.tip, content_alert_str=content_alert_str, **kwd )
if not error_message:
diff -r d7580315fa206bcc64bc7607ee52bccfbf95e42f -r 91217d6ead5a58609e0cc0ac839d2c2a6bc94691 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -158,6 +158,8 @@
message += " %d files were removed from the repository root. " % len( files_to_remove )
kwd[ 'message' ] = message
set_repository_metadata_due_to_new_tip( trans, repository_id, repository, content_alert_str=content_alert_str, **kwd )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repository',
id=repository_id,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for handling tool shed repository files that have been renamed or deleted.
by Bitbucket 05 Jun '12
by Bitbucket 05 Jun '12
05 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d7580315fa20/
changeset: d7580315fa20
user: greg
date: 2012-06-05 18:51:54
summary: Fixes for handling tool shed repository files that have been renamed or deleted.
affected #: 3 files
diff -r f551610b687003fc09cd6ff7bbc506d237c67a75 -r d7580315fa206bcc64bc7607ee52bccfbf95e42f lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -856,14 +856,26 @@
response.close()
return ctx_rev
def get_named_tmpfile_from_ctx( ctx, filename, dir ):
- fctx = ctx[ filename ]
- fh = tempfile.NamedTemporaryFile( 'wb', dir=dir )
- tmp_filename = fh.name
- fh.close()
- fh = open( tmp_filename, 'wb' )
- fh.write( fctx.data() )
- fh.close()
- return tmp_filename
+ filename = strip_path( filename )
+ for ctx_file in ctx.files():
+ ctx_file_name = strip_path( ctx_file )
+ if filename == ctx_file_name:
+ try:
+ # If the file was moved, its destination file contents will be returned here.
+ fctx = ctx[ ctx_file ]
+ except LookupError, e:
+ # Continue looking in case the file was moved.
+ fctx = None
+ continue
+ if fctx:
+ fh = tempfile.NamedTemporaryFile( 'wb', dir=dir )
+ tmp_filename = fh.name
+ fh.close()
+ fh = open( tmp_filename, 'wb' )
+ fh.write( fctx.data() )
+ fh.close()
+ return tmp_filename
+ return None
def get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
sa_session = app.model.context.current
if tool_shed.find( '//' ) > 0:
diff -r f551610b687003fc09cd6ff7bbc506d237c67a75 -r d7580315fa206bcc64bc7607ee52bccfbf95e42f lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -529,17 +529,22 @@
def get_file_context_from_ctx( ctx, filename ):
# We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
# within ctx if the files were moved within the change set. For example, in the following ctx.files() list, the former may have been moved to
- # the latter:
- # ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']
+ # the latter: ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']. Another scenario
+ # is that the file has been deleted.
+ deleted = False
filename = strip_path( filename )
for ctx_file in ctx.files():
ctx_file_name = strip_path( ctx_file )
if filename == ctx_file_name:
try:
+ # If the file was moved, its destination will be returned here.
fctx = ctx[ ctx_file ]
return fctx
except LookupError, e:
- return 'DELETED'
+ # Set deleted for now, and continue looking in case the file was moved instead of deleted.
+ deleted = True
+ if deleted:
+ return 'DELETED'
return None
def get_latest_repository_metadata( trans, id ):
"""Get last metadata defined for a specified repository from the database"""
@@ -772,37 +777,37 @@
def load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config_filename ):
"""
Return a loaded tool whose tool config file name (e.g., filtering.xml) is the value of tool_config_filename. The value of changeset_revision
- is a valid (downloadable) changset revision. If changeset_revision is the repository tip, then the tool will be loaded from it's file on disk.
- Otherwise, the tool config will be located in the repository manifest between the received valid changeset revision and the previous valid
- changeset revision (if one exists) or the first changeset revision in the repository (if one doesn't).
+ is a valid (downloadable) changset revision. The tool config will be located in the repository manifest between the received valid changeset
+ revision and the first changeset revision in the repository, searching backwards.
"""
def load_from_tmp_config( ctx, ctx_file, work_dir ):
tool = None
message = ''
tmp_tool_config = get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir )
- element_tree = util.parse_xml( tmp_tool_config )
- element_tree_root = element_tree.getroot()
- # Look for code files required by the tool config.
- tmp_code_files = []
- for code_elem in element_tree_root.findall( 'code' ):
- code_file_name = code_elem.get( 'file' )
- tmp_code_file_name = copy_file_from_manifest( repo, ctx, code_file_name, work_dir )
- if tmp_code_file_name:
- tmp_code_files.append( tmp_code_file_name )
- try:
- tool = load_tool( trans, tmp_tool_config )
- except Exception, e:
- tool = None
- message = "Error loading tool: %s. " % str( e )
- for tmp_code_file in tmp_code_files:
+ if tmp_tool_config:
+ element_tree = util.parse_xml( tmp_tool_config )
+ element_tree_root = element_tree.getroot()
+ # Look for code files required by the tool config.
+ tmp_code_files = []
+ for code_elem in element_tree_root.findall( 'code' ):
+ code_file_name = code_elem.get( 'file' )
+ tmp_code_file_name = copy_file_from_manifest( repo, ctx, code_file_name, work_dir )
+ if tmp_code_file_name:
+ tmp_code_files.append( tmp_code_file_name )
try:
- os.unlink( tmp_code_file )
+ tool = load_tool( trans, tmp_tool_config )
+ except Exception, e:
+ tool = None
+ message = "Error loading tool: %s. " % str( e )
+ for tmp_code_file in tmp_code_files:
+ try:
+ os.unlink( tmp_code_file )
+ except:
+ pass
+ try:
+ os.unlink( tmp_tool_config )
except:
pass
- try:
- os.unlink( tmp_tool_config )
- except:
- pass
return tool, message
tool_config_filename = strip_path( tool_config_filename )
repository = get_repository( trans, repository_id )
@@ -842,41 +847,42 @@
valid = False
error_message = ''
tmp_config = get_named_tmpfile_from_ctx( ctx, filename, dir )
- if not ( check_binary( tmp_config ) or check_image( tmp_config ) or check_gzip( tmp_config )[ 0 ]
- or check_bz2( tmp_config )[ 0 ] or check_zip( tmp_config ) ):
- try:
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = util.parse_xml( tmp_config )
- element_tree_root = element_tree.getroot()
- is_tool_config = element_tree_root.tag == 'tool'
- except Exception, e:
- log.debug( "Error parsing %s, exception: %s" % ( tmp_config, str( e ) ) )
- is_tool_config = False
- if is_tool_config:
- # Load entries into the tool_data_tables if the tool requires them.
- tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', dir )
- if tool_data_table_config:
- error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
- # Look for code files required by the tool config. The directory to which dir refers should be removed by the caller.
- for code_elem in element_tree_root.findall( 'code' ):
- code_file_name = code_elem.get( 'file' )
- if not os.path.exists( os.path.join( dir, code_file_name ) ):
- tmp_code_file_name = copy_file_from_disk( code_file_name, repo_dir, dir )
- if tmp_code_file_name is None:
- tmp_code_file_name = copy_file_from_manifest( repo, ctx, code_file_name, dir )
+ if tmp_config:
+ if not ( check_binary( tmp_config ) or check_image( tmp_config ) or check_gzip( tmp_config )[ 0 ]
+ or check_bz2( tmp_config )[ 0 ] or check_zip( tmp_config ) ):
try:
- tool = load_tool( trans, tmp_config )
- valid = True
- except KeyError, e:
- valid = False
- error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e )
- error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
- error_message += 'this error. '
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( tmp_config )
+ element_tree_root = element_tree.getroot()
+ is_tool_config = element_tree_root.tag == 'tool'
except Exception, e:
- valid = False
- error_message = str( e )
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
+ log.debug( "Error parsing %s, exception: %s" % ( tmp_config, str( e ) ) )
+ is_tool_config = False
+ if is_tool_config:
+ # Load entries into the tool_data_tables if the tool requires them.
+ tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', dir )
+ if tool_data_table_config:
+ error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
+ # Look for code files required by the tool config. The directory to which dir refers should be removed by the caller.
+ for code_elem in element_tree_root.findall( 'code' ):
+ code_file_name = code_elem.get( 'file' )
+ if not os.path.exists( os.path.join( dir, code_file_name ) ):
+ tmp_code_file_name = copy_file_from_disk( code_file_name, repo_dir, dir )
+ if tmp_code_file_name is None:
+ tmp_code_file_name = copy_file_from_manifest( repo, ctx, code_file_name, dir )
+ try:
+ tool = load_tool( trans, tmp_config )
+ valid = True
+ except KeyError, e:
+ valid = False
+ error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e )
+ error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
+ error_message += 'this error. '
+ except Exception, e:
+ valid = False
+ error_message = str( e )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
return is_tool_config, valid, tool, error_message
def new_tool_metadata_required( trans, id, metadata_dict ):
"""
diff -r f551610b687003fc09cd6ff7bbc506d237c67a75 -r d7580315fa206bcc64bc7607ee52bccfbf95e42f lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -10,7 +10,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
-from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, get_named_tmpfile_from_ctx, make_tmp_directory, NOT_TOOL_CONFIGS, strip_path
+from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, make_tmp_directory, NOT_TOOL_CONFIGS, strip_path
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -1257,12 +1257,6 @@
repo = hg.repository( get_configured_ui(), repo_dir )
ctx = get_changectx_for_changeset( repo, changeset_revision )
invalid_message = ''
- work_dir = make_tmp_directory()
- for filename in ctx:
- ctx_file_name = strip_path( filename )
- if ctx_file_name == tool_config:
- tool_config_path = get_named_tmpfile_from_ctx( ctx, filename, work_dir )
- break
metadata_dict, invalid_files, deleted_sample_files = generate_metadata_for_changeset_revision( trans,
repo,
repository_id,
@@ -1275,17 +1269,11 @@
invalid_tool_config_name = strip_path( invalid_tool_config )
if tool_config == invalid_tool_config_name:
invalid_message = invalid_msg
- break
+ break
tool, error_message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
- #if error_message:
- # message += error_message
tool_state = self.__new_state( trans )
is_malicious = changeset_is_malicious( trans, repository_id, repository.tip )
try:
- shutil.rmtree( work_dir )
- except:
- pass
- try:
if invalid_message:
message = invalid_message
return trans.fill_template( "/webapps/community/repository/tool_form.mako",
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Refinements for locating desired files within a tool shed repository manifest and setting metadata on tool shed repositories.
by Bitbucket 05 Jun '12
by Bitbucket 05 Jun '12
05 Jun '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f551610b6870/
changeset: f551610b6870
user: greg
date: 2012-06-05 17:15:14
summary: Refinements for locating desired files within a tool shed repository manifest and setting metadata on tool shed repositories.
affected #: 5 files
diff -r 29b8e39db1094cb5a5586c4f7b404428641d518c -r f551610b687003fc09cd6ff7bbc506d237c67a75 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -21,6 +21,7 @@
log = logging.getLogger( __name__ )
+INITIAL_CHANGELOG_HASH = '000000000000'
# Characters that must be html escaped
MAPPED_CHARS = { '>' :'>',
'<' :'<',
@@ -771,28 +772,15 @@
if str( ctx ) == changeset_revision:
return ctx
return None
-def get_config( config_file, repo, repo_dir, ctx, dir ):
- """Return config_filename if it exists in some changeset of the repository."""
- # First look on disk.
- for root, dirs, files in os.walk( repo_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == config_file:
- dest_file_name = os.path.join( dir, name )
- shutil.copy( os.path.abspath( os.path.join( root, name ) ), dest_file_name )
- return os.path.abspath( dest_file_name )
- # Next look in the current change set.
- for filename in ctx:
- ctx_file_name = strip_path( filename )
- if ctx_file_name == config_file:
- return get_named_tmpfile_from_ctx( ctx, filename, dir )
- # Finally look in the repository manifest.
- for changeset in repo.changelog:
- prev_ctx = repo.changectx( changeset )
- for ctx_file in prev_ctx.files():
+def get_config( config_file, repo, ctx, dir ):
+ """Return the latest version of config_filename from the repository manifest."""
+ config_file = strip_path( config_file )
+ for changeset in reversed_upper_bounded_changelog( repo, ctx ):
+ changeset_ctx = repo.changectx( changeset )
+ for ctx_file in changeset_ctx.files():
ctx_file_name = strip_path( ctx_file )
if ctx_file_name == config_file:
- return get_named_tmpfile_from_ctx( prev_ctx, filename, dir )
+ return get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
return None
def get_config_from_disk( config_file, relative_install_dir ):
for root, dirs, files in os.walk( relative_install_dir ):
@@ -808,7 +796,7 @@
repo_files_dir = os.path.join( install_dir, repository.name )
repo = hg.repository( get_configured_ui(), repo_files_dir )
ctx = get_changectx_for_changeset( repo, changeset_revision )
- config = get_config( config_file, repo, repo_files_dir, ctx, dir )
+ config = get_config( config_file, repo, ctx, dir )
return config
def get_configured_ui():
# Configure any desired ui settings.
@@ -867,6 +855,15 @@
ctx_rev = response.read()
response.close()
return ctx_rev
+def get_named_tmpfile_from_ctx( ctx, filename, dir ):
+ fctx = ctx[ filename ]
+ fh = tempfile.NamedTemporaryFile( 'wb', dir=dir )
+ tmp_filename = fh.name
+ fh.close()
+ fh = open( tmp_filename, 'wb' )
+ fh.write( fctx.data() )
+ fh.close()
+ return tmp_filename
def get_repository_by_shed_name_owner_changeset_revision( app, tool_shed, name, owner, changeset_revision ):
sa_session = app.model.context.current
if tool_shed.find( '//' ) > 0:
@@ -1383,6 +1380,31 @@
def reset_tool_data_tables( app ):
# Reset the tool_data_tables to an empty dictionary.
app.tool_data_tables.data_tables = {}
+def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
+ """
+ Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and
+ including the included_upper_bounds_changeset_revision. The value of excluded_lower_bounds_changeset_revision will be the value of
+ INITIAL_CHANGELOG_HASH if no valid changesets exist before included_upper_bounds_changeset_revision.
+ """
+ # To set excluded_lower_bounds_changeset_revision, calling methods should do the following, where the value of changeset_revision
+ # is a downloadable changeset_revision.
+ # excluded_lower_bounds_changeset_revision = get_previous_valid_changset_revision( repository, repo, changeset_revision )
+ if excluded_lower_bounds_changeset_revision == INITIAL_CHANGELOG_HASH:
+ appending_started = True
+ else:
+ appending_started = False
+ reversed_changelog = []
+ for changeset in repo.changelog:
+ changeset_hash = str( repo.changectx( changeset ) )
+ if appending_started:
+ reversed_changelog.insert( 0, changeset )
+ if changeset_hash == excluded_lower_bounds_changeset_revision and not appending_started:
+ appending_started = True
+ if changeset_hash == included_upper_bounds_changeset_revision:
+ break
+ return reversed_changelog
+def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ):
+ return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision )
def strip_path( fpath ):
file_path, file_name = os.path.split( fpath )
return file_name
diff -r 29b8e39db1094cb5a5586c4f7b404428641d518c -r f551610b687003fc09cd6ff7bbc506d237c67a75 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -473,9 +473,12 @@
.filter( trans.model.Repository.table.c.deleted == False ):
try:
error_message, status = reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) )
- if error_message:
+ if status not in [ 'ok' ] and error_message:
log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, error_message ) )
unsuccessful_count += 1
+ elif status in [ 'ok' ] and error_message:
+ log.debug( "Successfully reset metadata on repository %s, but encountered this problem: %s" % ( repository.name, error_message ) )
+ successful_count += 1
else:
log.debug( "Successfully reset metadata on repository %s" % repository.name )
successful_count += 1
diff -r 29b8e39db1094cb5a5586c4f7b404428641d518c -r f551610b687003fc09cd6ff7bbc506d237c67a75 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -6,8 +6,9 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
from galaxy.util.shed_util import copy_sample_file, generate_datatypes_metadata, generate_tool_dependency_metadata, generate_tool_metadata
-from galaxy.util.shed_util import generate_workflow_metadata, get_changectx_for_changeset, get_config, get_configured_ui, handle_sample_tool_data_table_conf_file
-from galaxy.util.shed_util import make_tmp_directory, NOT_TOOL_CONFIGS, reset_tool_data_tables, strip_path, to_html_escaped, to_html_str, update_repository
+from galaxy.util.shed_util import generate_workflow_metadata, get_changectx_for_changeset, get_config, get_configured_ui, get_named_tmpfile_from_ctx
+from galaxy.util.shed_util import handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH, make_tmp_directory, NOT_TOOL_CONFIGS, reset_tool_data_tables
+from galaxy.util.shed_util import reversed_upper_bounded_changelog, strip_path, to_html_escaped, to_html_str, update_repository
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
from galaxy.model.orm import *
@@ -201,7 +202,7 @@
if options:
if options.tool_data_table or options.missing_tool_data_table_name:
# Make sure the repository contains a tool_data_table_conf.xml.sample file.
- sample_tool_data_table_conf = get_config( 'tool_data_table_conf.xml.sample', repo, repo_dir, ctx, dir )
+ sample_tool_data_table_conf = get_config( 'tool_data_table_conf.xml.sample', repo, ctx, dir )
if sample_tool_data_table_conf:
error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, sample_tool_data_table_conf )
if error:
@@ -254,9 +255,9 @@
# The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of
# current_changeset_revision which is associated with current_metadata_dict.
#
- # TODO: a new repository_metadata record will be created only when this method returns the string 'not equal and not subset'. However,
- # we're currently also returning the strings 'no metadata', 'equal' and 'subset', depending upon how the 2 change sets compare. We'll
- # leave things this way for the current time in case we discover a use for these additional result strings.
+ # A new repository_metadata record will be created only when this method returns the string 'not equal and not subset'. However, we're
+ # currently also returning the strings 'no metadata', 'equal' and 'subset', depending upon how the 2 change sets compare. We'll leave
+ # things this way for the current time in case we discover a use for these additional result strings.
ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] )
ancestor_tools = ancestor_metadata_dict.get( 'tools', [] )
ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ]
@@ -357,41 +358,18 @@
tmp_filename = None
return tmp_filename
def copy_file_from_manifest( repo, ctx, filename, dir ):
- """Copy a file named filename from somewhere in the repository manifest to the directory to which dir refers."""
- filename = strip_path( filename )
- fctx = None
- found = False
- # First see if the file is in ctx. We have to be careful in determining if we found the correct file because multiple files
- # with the same name may be in different directories within ctx if the repository owner moved the files as part of the change set.
- # For example, in the following ctx.files() list, the former may have been moved to the latter:
- # ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']
- for ctx_file in ctx.files():
- ctx_file_name = strip_path( ctx_file )
- if filename == ctx_file_name:
- try:
- fctx = ctx[ ctx_file ]
- found = True
- break
- except:
- continue
- if not found:
- # Find the file in the repository manifest.
- for changeset in repo.changelog:
- prev_ctx = repo.changectx( changeset )
- for ctx_file in prev_ctx.files():
- ctx_file_name = strip_path( ctx_file )
- if filename == ctx_file_name:
- try:
- fctx = prev_ctx[ ctx_file ]
- break
- except:
- continue
- if fctx:
- file_path = os.path.join( dir, filename )
- fh = open( file_path, 'wb' )
- fh.write( fctx.data() )
- fh.close()
- return file_path
+ """
+ Copy the latest version of the file named filename from the repository manifest to the directory to which dir refers.
+ """
+ for changeset in reversed_upper_bounded_changelog( repo, ctx ):
+ changeset_ctx = repo.changectx( changeset )
+ fctx = get_file_context_from_ctx( changeset_ctx, filename )
+ if fctx and fctx not in [ 'DELETED' ]:
+ file_path = os.path.join( dir, filename )
+ fh = open( file_path, 'wb' )
+ fh.write( fctx.data() )
+ fh.close()
+ return file_path
return None
def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -444,10 +422,10 @@
invalid_tool_configs = []
original_tool_data_path = trans.app.config.tool_data_path
work_dir = make_tmp_directory()
- datatypes_config = get_config( 'datatypes_conf.xml', repo, repo_dir, ctx, work_dir )
+ datatypes_config = get_config( 'datatypes_conf.xml', repo, ctx, work_dir )
if datatypes_config:
metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
- sample_files = get_sample_files( repo, repo_dir, dir=work_dir )
+ sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
# Handle the tool_data_table_conf.xml.sample file if it is included in the repository.
if 'tool_data_table_conf.xml.sample' in sample_files:
tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
@@ -515,7 +493,7 @@
invalid_files.append( ( ctx_file_name, str( e ) ) )
if 'tools' in metadata_dict:
# Find tool_dependencies.xml if it exists. This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config( 'tool_dependencies.xml', repo, repo_dir, ctx, work_dir )
+ tool_dependencies_config = get_config( 'tool_dependencies.xml', repo, ctx, work_dir )
if tool_dependencies_config:
metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
if invalid_tool_configs:
@@ -529,7 +507,7 @@
shutil.rmtree( work_dir )
except:
pass
- return metadata_dict, invalid_files
+ return metadata_dict, invalid_files, deleted_sample_files
def generate_tool_guid( trans, repository, tool ):
"""
Generate a guid for the received tool. The form of the guid is
@@ -548,21 +526,59 @@
return trans.sa_session.query( trans.model.Category ) \
.filter( trans.model.Category.table.c.deleted==False ) \
.order_by( trans.model.Category.table.c.name ).all()
+def get_file_context_from_ctx( ctx, filename ):
+ # We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
+ # within ctx if the files were moved within the change set. For example, in the following ctx.files() list, the former may have been moved to
+ # the latter:
+ # ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']
+ filename = strip_path( filename )
+ for ctx_file in ctx.files():
+ ctx_file_name = strip_path( ctx_file )
+ if filename == ctx_file_name:
+ try:
+ fctx = ctx[ ctx_file ]
+ return fctx
+ except LookupError, e:
+ return 'DELETED'
+ return None
def get_latest_repository_metadata( trans, id ):
"""Get last metadata defined for a specified repository from the database"""
return trans.sa_session.query( trans.model.RepositoryMetadata ) \
.filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
.order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \
.first()
-def get_named_tmpfile_from_ctx( ctx, filename, dir ):
- fctx = ctx[ filename ]
- fh = tempfile.NamedTemporaryFile( 'wb', dir=dir )
- tmp_filename = fh.name
- fh.close()
- fh = open( tmp_filename, 'wb' )
- fh.write( fctx.data() )
- fh.close()
- return tmp_filename
+def get_list_of_copied_sample_files( repo, ctx, dir ):
+ """
+ Find all sample files (files in the repository with the special .sample extension) in the reversed repository manifest up to ctx. Copy
+ each discovered file to dir and return the list of filenames. If a .sample file was added in a changeset and then deleted in a later
+ changeset, it will be returned in the deleted_sample_files list. The caller will set the value of app.config.tool_data_path to dir in
+ order to load the tools and generate metadata for them.
+ """
+ deleted_sample_files = []
+ sample_files = []
+ for changeset in reversed_upper_bounded_changelog( repo, ctx ):
+ changeset_ctx = repo.changectx( changeset )
+ for ctx_file in changeset_ctx.files():
+ ctx_file_name = strip_path( ctx_file )
+ # If we decide in the future that files deleted later in the changelog should not be used, we can use the following if statement.
+ # if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files and ctx_file_name not in deleted_sample_files:
+ if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files:
+ fctx = get_file_context_from_ctx( changeset_ctx, ctx_file )
+ if fctx in [ 'DELETED' ]:
+ # Since the possibly future used if statement above is commented out, the same file that was initially added will be
+ # discovered in an earlier changeset in the change log and fall through to the else block below. In other words, if
+ # a file named blast2go.loc.sample was added in change set 0 and then deleted in changeset 3, the deleted file in changeset
+ # 3 will be handled here, but the later discovered file in changeset 0 will be handled in the else block below. In this
+ # way, the file contents will always be found for future tools even though the file was deleted.
+ if ctx_file_name not in deleted_sample_files:
+ deleted_sample_files.append( ctx_file_name )
+ else:
+ sample_files.append( ctx_file_name )
+ tmp_ctx_file_name = os.path.join( dir, ctx_file_name.replace( '.sample', '' ) )
+ fh = open( tmp_ctx_file_name, 'wb' )
+ fh.write( fctx.data() )
+ fh.close()
+ return sample_files, deleted_sample_files
def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ):
parent_id = None
# Compare from most recent to oldest.
@@ -582,6 +598,10 @@
# The tool did not change through all of the changeset revisions.
return old_id
def get_previous_valid_changset_revision( repository, repo, before_changeset_revision ):
+ """
+ Return the downloadable changeset_revision in the repository changelog just prior to the changeset to which before_changeset_revision
+ refers. If there isn't one, return the hash value of an empty repository changlog, INITIAL_CHANGELOG_HASH.
+ """
changeset_tups = []
for repository_metadata in repository.downloadable_revisions:
changeset_revision = repository_metadata.changeset_revision
@@ -600,7 +620,7 @@
return previous_changeset_revision
else:
# Return the hash value of an empty repository changlog - note that this will not be a valid changset revision.
- return '000000000000'
+ return INITIAL_CHANGELOG_HASH
else:
previous_changeset_revision = current_changeset_revision
def get_repository( trans, id ):
@@ -640,29 +660,6 @@
return "%s:%s" % ( str( ctx.rev() ), changeset_revision )
else:
return "-1:%s" % changeset_revision
-def get_sample_files( repo, repo_dir, dir ):
- """Return a list of all files in the repository with the special .sample extension"""
- sample_files = []
- # Copy all discovered sample files to dir, and the caller will set the value of app.config.tool_data_path to dir
- # in order to load the tools and generate metadata for them. First look on disk.
- for root, dirs, files in os.walk( repo_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name.endswith( '.sample' ) and name not in sample_files:
- new_name = name.replace( '.sample', '' )
- file_path = os.path.join( dir, new_name )
- shutil.copy( os.path.abspath( os.path.join( root, name ) ), file_path )
- sample_files.append( name )
- # Next look in the repository manifest.
- for changeset in repo.changelog:
- ctx = repo.changectx( changeset )
- for ctx_file in ctx.files():
- ctx_file_name = strip_path( ctx_file )
- if ctx_file_name.endswith( '.sample' ) and ctx_file_name not in sample_files:
- new_ctx_file_name = ctx_file_name.replace( '.sample', '' )
- copy_file_from_manifest( repo, ctx, ctx_file, dir )
- sample_files.append( ctx_file_name )
- return sample_files
def get_user( trans, id ):
"""Get a user from the database by id"""
return trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( id ) )
@@ -796,7 +793,7 @@
tool = load_tool( trans, tmp_tool_config )
except Exception, e:
tool = None
- message = "Error loading tool: %s. Clicking <b>Reset metadata</b> may correct this error." % str( e )
+ message = "Error loading tool: %s. " % str( e )
for tmp_code_file in tmp_code_files:
try:
os.unlink( tmp_code_file )
@@ -819,40 +816,19 @@
tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
if tool_data_table_config:
error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
- if changeset_revision == repository.tip:
- # Load the tool fron it's config file on disk.
- try:
- copied_tool_config = copy_file_from_disk( tool_config_filename, repo_files_dir, work_dir )
- tool = load_tool( trans, copied_tool_config )
- except Exception, e:
- tool = None
- message = "Error loading tool from config '%s': %s." % ( tool_config_filename, str( e ) )
- else:
- found = False
- tool = None
- # Get the tool config from ctx if present.
- for ctx_file in ctx.files():
+ found = False
+ # Get the latest revision of the tool config from the repository manifest up to the value of changeset_revision.
+ for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
+ manifest_changeset_revision = str( repo.changectx( changeset ) )
+ manifest_ctx = repo.changectx( changeset )
+ for ctx_file in manifest_ctx.files():
ctx_file_name = strip_path( ctx_file )
if ctx_file_name == tool_config_filename:
found = True
break
if found:
- if found:
- tool, message = load_from_tmp_config( ctx, ctx_file, work_dir )
- else:
- # Get the tool config from the repository manifest between valid changeset revisions.
- previous_valid_changset_revision = get_previous_valid_changset_revision( repository, repo, changeset_revision )
- for changeset in reversed_filtered_changelog( repo, previous_valid_changset_revision, changeset_revision ):
- manifest_changeset_revision = str( repo.changectx( changeset ) )
- manifest_ctx = repo.changectx( changeset )
- for ctx_file in manifest_ctx.files():
- ctx_file_name = strip_path( ctx_file )
- if ctx_file_name == tool_config_filename:
- found = True
- break
- if found:
- tool, message = load_from_tmp_config( manifest_ctx, ctx_file, work_dir )
- break
+ tool, message = load_from_tmp_config( manifest_ctx, ctx_file, work_dir )
+ break
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( trans.app )
try:
@@ -968,6 +944,7 @@
log.debug( "Resetting all metadata on repository: %s" % repository.name )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
+ missing_sample_files = []
if len( repo ) == 1:
error_message, status = set_repository_metadata( trans, id, repository.tip, **kwd )
if error_message:
@@ -983,13 +960,16 @@
for changeset in repo.changelog:
current_changeset_revision = str( repo.changectx( changeset ) )
ctx = get_changectx_for_changeset( repo, current_changeset_revision )
- current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans,
- repo,
- id,
- ctx,
- current_changeset_revision,
- repo_dir,
- updating_tip=current_changeset_revision==repository.tip )
+ current_metadata_dict, invalid_files, deleted_sample_files = generate_metadata_for_changeset_revision( trans,
+ repo,
+ id,
+ ctx,
+ current_changeset_revision,
+ repo_dir,
+ updating_tip=current_changeset_revision==repository.tip )
+ for deleted_sample_file in deleted_sample_files:
+ if deleted_sample_file not in missing_sample_files:
+ missing_sample_files.append( deleted_sample_file )
if current_metadata_dict:
if ancestor_changeset_revision:
# Compare metadata from ancestor and current. The value of comparsion will be one of:
@@ -1032,27 +1012,12 @@
ancestor_metadata_dict = None
clean_repository_metadata( trans, id, changeset_revisions )
add_repository_metadata_tool_versions( trans, id, changeset_revisions )
+ if missing_sample_files:
+ message += "Metadata was successfully reset, but the following required sample files have been deleted from the repository so the version "
+ message += "of each file just prior to its deletion is being used. These files should be re-added to the repository as soon as possible: "
+ message += "<b>%s</b><br/>" % ', '.join( missing_sample_files )
+ return message, 'ok'
return '', 'ok'
-def reversed_filtered_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
- """
- Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and
- including the included_upper_bounds_changeset_revision. The value of excluded_lower_bounds_changeset_revision will be '000000000000'
- if no valid changesets exist before included_upper_bounds_changeset_revision.
- """
- if excluded_lower_bounds_changeset_revision == '000000000000':
- appending_started = True
- else:
- appending_started = False
- reversed_changelog = []
- for changeset in repo.changelog:
- changeset_hash = str( repo.changectx( changeset ) )
- if appending_started:
- reversed_changelog.insert( 0, changeset )
- if changeset_hash == excluded_lower_bounds_changeset_revision and not appending_started:
- appending_started = True
- if changeset_hash == included_upper_bounds_changeset_revision:
- break
- return reversed_changelog
def set_repository_metadata( trans, id, changeset_revision, content_alert_str='', **kwd ):
"""
Set repository metadata on the repository tip, returning specific error messages (if any) to alert the repository owner that the changeset
@@ -1068,13 +1033,13 @@
invalid_files = []
updating_tip = changeset_revision == repository.tip
if ctx is not None:
- metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans,
- repo,
- id,
- ctx,
- changeset_revision,
- repo_dir,
- updating_tip=updating_tip )
+ metadata_dict, invalid_files, deleted_sample_files = generate_metadata_for_changeset_revision( trans,
+ repo,
+ id,
+ ctx,
+ changeset_revision,
+ repo_dir,
+ updating_tip=updating_tip )
if metadata_dict:
if updating_tip:
if new_tool_metadata_required( trans, id, metadata_dict ) or new_workflow_metadata_required( trans, id, metadata_dict ):
@@ -1120,6 +1085,10 @@
# Here ctx is None.
message = "This repository does not include revision '%s'." % str( changeset_revision )
status = 'error'
+ if deleted_sample_files:
+ message += "Metadata was successfully reset, but the following required sample files have been deleted from the repository so the version "
+ message += "of each file just prior to its deletion is being used. These files should be re-added to the repository as soon as possible: "
+ message += "<b>%s</b><br/>" % ', '.join( deleted_sample_files )
if invalid_files:
if metadata_dict:
message = "Metadata was defined for some items in revision '%s'. " % str( changeset_revision )
diff -r 29b8e39db1094cb5a5586c4f7b404428641d518c -r f551610b687003fc09cd6ff7bbc506d237c67a75 lib/galaxy/webapps/community/controllers/hg.py
--- a/lib/galaxy/webapps/community/controllers/hg.py
+++ b/lib/galaxy/webapps/community/controllers/hg.py
@@ -27,8 +27,10 @@
repository = get_repository_by_name_and_owner( trans, name, owner )
if repository:
error_message, status = reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) )
- if error_message:
- log.debug( "Error resetting all metadata on repository '%s': %s" % ( str( repository.name ), str( error_message ) ) )
+ if status not in [ 'ok' ] and error_message:
+ log.debug( "Error resetting metadata on repository '%s': %s" % ( str( repository.name ), str( error_message ) ) )
+ elif status in [ 'ok' ] and error_message:
+ log.debug( "Successfully reset metadata on repository %s, but encountered problem: %s" % ( str( repository.name ), str( error_message ) ) )
return wsgi_app
def make_web_app():
diff -r 29b8e39db1094cb5a5586c4f7b404428641d518c -r f551610b687003fc09cd6ff7bbc506d237c67a75 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -10,7 +10,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
-from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, make_tmp_directory, NOT_TOOL_CONFIGS, strip_path
+from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, get_named_tmpfile_from_ctx, make_tmp_directory, NOT_TOOL_CONFIGS, strip_path
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -323,17 +323,26 @@
email_alerts_repository_list_grid = EmailAlertsRepositoryListGrid()
category_list_grid = CategoryListGrid()
- @web.expose
- def index( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- # See if there are any RepositoryMetadata records since menu items require them.
- repository_metadata = trans.sa_session.query( model.RepositoryMetadata ).first()
- return trans.fill_template( '/webapps/community/index.mako',
- repository_metadata=repository_metadata,
- message=message,
- status=status )
+ def __add_hgweb_config_entry( self, trans, repository, repository_path ):
+ # Add an entry in the hgweb.config file for a new repository. An entry looks something like:
+ # repos/test/mira_assembler = database/community_files/000/repo_123.
+ hgweb_config = "%s/hgweb.config" % trans.app.config.root
+ if repository_path.startswith( './' ):
+ repository_path = repository_path.replace( './', '', 1 )
+ entry = "repos/%s/%s = %s" % ( repository.user.username, repository.name, repository_path )
+ tmp_fd, tmp_fname = tempfile.mkstemp()
+ if os.path.exists( hgweb_config ):
+ # Make a backup of the hgweb.config file since we're going to be changing it.
+ self.__make_hgweb_config_copy( trans, hgweb_config )
+ new_hgweb_config = open( tmp_fname, 'wb' )
+ for i, line in enumerate( open( hgweb_config ) ):
+ new_hgweb_config.write( line )
+ else:
+ new_hgweb_config = open( tmp_fname, 'wb' )
+ new_hgweb_config.write( '[paths]\n' )
+ new_hgweb_config.write( "%s\n" % entry )
+ new_hgweb_config.flush()
+ shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
@web.expose
def browse_categories( self, trans, **kwd ):
if 'f-free-text-search' in kwd:
@@ -360,9 +369,146 @@
return trans.response.send_redirect( web.url_for( controller='repository',
action='browse_repositories',
**kwd ) )
- # Render the list view
return self.category_list_grid( trans, **kwd )
@web.expose
+ def browse_invalid_tools( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ webapp = params.get( 'webapp', 'community' )
+ cntrller = params.get( 'cntrller', 'repository' )
+ is_admin = trans.user_is_admin()
+ invalid_tools_dict = odict()
+ if is_admin and cntrller == 'admin':
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.deleted == False ) \
+ .order_by( trans.model.Repository.table.c.name ):
+ for downloadable_revision in repository.downloadable_revisions:
+ metadata = downloadable_revision.metadata
+ invalid_tools = metadata.get( 'invalid_tools', [] )
+ for invalid_tool_config in invalid_tools:
+ invalid_tools_dict[ invalid_tool_config ] = ( repository.id, repository.name, downloadable_revision.changeset_revision )
+ else:
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( and_( trans.model.Repository.table.c.deleted == False,
+ trans.model.Repository.table.c.user_id == trans.user.id ) ) \
+ .order_by( trans.model.Repository.table.c.name ):
+ for downloadable_revision in repository.downloadable_revisions:
+ metadata = downloadable_revision.metadata
+ invalid_tools = metadata.get( 'invalid_tools', [] )
+ for invalid_tool_config in invalid_tools:
+ invalid_tools_dict[ invalid_tool_config ] = ( repository.id, repository.name, downloadable_revision.changeset_revision )
+ return trans.fill_template( '/webapps/community/repository/browse_invalid_tools.mako',
+ cntrller=cntrller,
+ invalid_tools_dict=invalid_tools_dict,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ def browse_repositories( self, trans, **kwd ):
+ # We add params to the keyword dict in this method in order to rename the param
+ # with an "f-" prefix, simulating filtering by clicking a search link. We have
+ # to take this approach because the "-" character is illegal in HTTP requests.
+ if 'webapp' not in kwd:
+ kwd[ 'webapp' ] = 'community'
+ if 'operation' in kwd:
+ operation = kwd['operation'].lower()
+ if operation == "view_or_manage_repository":
+ repository_id = kwd[ 'id' ]
+ repository = get_repository( trans, repository_id )
+ is_admin = trans.user_is_admin()
+ if is_admin or repository.user == trans.user:
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ **kwd ) )
+ else:
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='view_repository',
+ **kwd ) )
+ elif operation == "edit_repository":
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='edit_repository',
+ **kwd ) )
+ elif operation == "repositories_by_user":
+ # Eliminate the current filters if any exist.
+ for k, v in kwd.items():
+ if k.startswith( 'f-' ):
+ del kwd[ k ]
+ if 'user_id' in kwd:
+ user = get_user( trans, kwd[ 'user_id' ] )
+ kwd[ 'f-email' ] = user.email
+ del kwd[ 'user_id' ]
+ else:
+ # The received id is the repository id, so we need to get the id of the user
+ # that uploaded the repository.
+ repository_id = kwd.get( 'id', None )
+ repository = get_repository( trans, repository_id )
+ kwd[ 'f-email' ] = repository.user.email
+ elif operation == "my_repositories":
+ # Eliminate the current filters if any exist.
+ for k, v in kwd.items():
+ if k.startswith( 'f-' ):
+ del kwd[ k ]
+ kwd[ 'f-email' ] = trans.user.email
+ elif operation == "repositories_by_category":
+ # Eliminate the current filters if any exist.
+ for k, v in kwd.items():
+ if k.startswith( 'f-' ):
+ del kwd[ k ]
+ category_id = kwd.get( 'id', None )
+ category = get_category( trans, category_id )
+ kwd[ 'f-Category.name' ] = category.name
+ elif operation == "receive email alerts":
+ if trans.user:
+ if kwd[ 'id' ]:
+ kwd[ 'caller' ] = 'browse_repositories'
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='set_email_alerts',
+ **kwd ) )
+ else:
+ kwd[ 'message' ] = 'You must be logged in to set email alerts.'
+ kwd[ 'status' ] = 'error'
+ del kwd[ 'operation' ]
+ # The changeset_revision_select_field in the RepositoryListGrid performs a refresh_on_change
+ # which sends in request parameters like changeset_revison_1, changeset_revision_2, etc. One
+ # of the many select fields on the grid performed the refresh_on_change, so we loop through
+ # all of the received values to see which value is not the repository tip. If we find it, we
+ # know the refresh_on_change occurred, and we have the necessary repository id and change set
+ # revision to pass on.
+ for k, v in kwd.items():
+ changset_revision_str = 'changeset_revision_'
+ if k.startswith( changset_revision_str ):
+ repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) )
+ repository = get_repository( trans, repository_id )
+ if repository.tip != v:
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories',
+ operation='view_or_manage_repository',
+ id=trans.security.encode_id( repository.id ),
+ changeset_revision=v ) )
+ return self.repository_list_grid( trans, **kwd )
+ @web.expose
+ def browse_repository( self, trans, id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ webapp = params.get( 'webapp', 'community' )
+ commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
+ repository = get_repository( trans, id )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
+ current_working_dir = os.getcwd()
+ # Update repository files for browsing.
+ update_repository( repo )
+ is_malicious = changeset_is_malicious( trans, id, repository.tip )
+ return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
+ repo=repo,
+ repository=repository,
+ commit_message=commit_message,
+ is_malicious=is_malicious,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
def browse_valid_repositories( self, trans, **kwd ):
webapp = kwd.get( 'webapp', 'community' )
galaxy_url = kwd.get( 'galaxy_url', None )
@@ -404,40 +550,400 @@
allow_multiple=False,
async_compatible=False ) ]
return self.valid_repository_list_grid( trans, **kwd )
+ def __build_allow_push_select_field( self, trans, current_push_list, selected_value='none' ):
+ options = []
+ for user in trans.sa_session.query( trans.model.User ):
+ if user.username not in current_push_list:
+ options.append( user )
+ return build_select_field( trans,
+ objs=options,
+ label_attr='username',
+ select_field_name='allow_push',
+ selected_value=selected_value,
+ refresh_on_change=False,
+ multiple=True )
+ def __change_hgweb_config_entry( self, trans, repository, old_repository_name, new_repository_name ):
+ # Change an entry in the hgweb.config file for a repository. This only happens when
+ # the owner changes the name of the repository. An entry looks something like:
+ # repos/test/mira_assembler = database/community_files/000/repo_123.
+ hgweb_config = "%s/hgweb.config" % trans.app.config.root
+ # Make a backup of the hgweb.config file since we're going to be changing it.
+ self.__make_hgweb_config_copy( trans, hgweb_config )
+ repo_dir = repository.repo_path
+ old_lhs = "repos/%s/%s" % ( repository.user.username, old_repository_name )
+ new_entry = "repos/%s/%s = %s\n" % ( repository.user.username, new_repository_name, repo_dir )
+ tmp_fd, tmp_fname = tempfile.mkstemp()
+ new_hgweb_config = open( tmp_fname, 'wb' )
+ for i, line in enumerate( open( hgweb_config ) ):
+ if line.startswith( old_lhs ):
+ new_hgweb_config.write( new_entry )
+ else:
+ new_hgweb_config.write( line )
+ new_hgweb_config.flush()
+ shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
@web.expose
- def browse_invalid_tools( self, trans, **kwd ):
+ def check_for_updates( self, trans, **kwd ):
+ """
+ Handle a request from a local Galaxy instance. If the request originated with the Galaxy instances' UpdateManager, the value of 'webapp'
+ will be 'update_manager'.
+ """
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ # If the request originated with the UpdateManager, it will not include a galaxy_url.
+ galaxy_url = kwd.get( 'galaxy_url', '' )
+ name = params.get( 'name', None )
+ owner = params.get( 'owner', None )
+ changeset_revision = params.get( 'changeset_revision', None )
+ webapp = params.get( 'webapp', 'community' )
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ from_update_manager = webapp == 'update_manager'
+ if from_update_manager:
+ update = 'true'
+ no_update = 'false'
+ else:
+ # Start building up the url to redirect back to the calling Galaxy instance.
+ url = '%sadmin_toolshed/update_to_changeset_revision?tool_shed_url=%s' % ( galaxy_url, url_for( '/', qualified=True ) )
+ url += '&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % ( repository.name, repository.user.username, changeset_revision )
+ if changeset_revision == repository.tip:
+ # If changeset_revision is the repository tip, we know there are no additional updates for the tools.
+ if from_update_manager:
+ return no_update
+ # Return the same value for changeset_revision and latest_changeset_revision.
+ url += repository.tip
+ else:
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans,
+ trans.security.encode_id( repository.id ),
+ changeset_revision )
+ if repository_metadata:
+ # If changeset_revision is in the repository_metadata table for this repository, then we know there are no additional updates
+ # for the tools.
+ if from_update_manager:
+ return no_update
+ else:
+ # Return the same value for changeset_revision and latest_changeset_revision.
+ url += changeset_revision
+ else:
+ # TODO: Re-engineer this to define the change set for update to be the one just before the next change set in the repository_metadata
+ # table for this repository.
+ # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
+ # repository was cloned. Load each tool in the repository's changeset_revision to generate a list of tool guids, since guids
+ # differentiate tools by id and version.
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ if ctx is not None:
+ work_dir = make_tmp_directory()
+ tool_guids = []
+ for filename in ctx:
+ # Find all tool configs in this repository changeset_revision.
+ if filename not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
+ is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans,
+ repo,
+ repo_dir,
+ ctx,
+ filename,
+ work_dir )
+ if valid and tool is not None:
+ tool_guids.append( generate_tool_guid( trans, repository, tool ) )
+ tool_guids.sort()
+ if tool_guids:
+ # Compare our list of tool guids against those in each repository_metadata record for the repository to find the
+ # repository_metadata record with the changeset_revision value we want to pass back to the caller.
+ found = False
+ for repository_metadata in get_repository_metadata_by_repository_id( trans, trans.security.encode_id( repository.id ) ):
+ metadata = repository_metadata.metadata
+ metadata_tool_guids = []
+ for tool_dict in metadata[ 'tools' ]:
+ metadata_tool_guids.append( tool_dict[ 'guid' ] )
+ metadata_tool_guids.sort()
+ if tool_guids == metadata_tool_guids:
+ # We've found the repository_metadata record whose changeset_revision value has been updated.
+ if from_update_manager:
+ return update
+ url += repository_metadata.changeset_revision
+ # Get the ctx_rev for the discovered changeset_revision.
+ latest_ctx = get_changectx_for_changeset( repo, repository_metadata.changeset_revision )
+ found = True
+ break
+ if not found:
+ # There must be a problem in the data, so we'll just send back the received changeset_revision.
+ log.debug( "Possible data corruption - updated repository_metadata cannot be found for repository id %d." % repository.id )
+ if from_update_manager:
+ return no_update
+ url += changeset_revision
+ else:
+ # There are no tools in the changeset_revision, so no tool updates are possible.
+ if from_update_manager:
+ return no_update
+ url += changeset_revision
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ url += '&latest_ctx_rev=%s' % str( latest_ctx.rev() )
+ return trans.response.send_redirect( url )
+ @web.expose
+ def contact_owner( self, trans, id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ repository = get_repository( trans, id )
+ if trans.user and trans.user.email:
+ return trans.fill_template( "/webapps/community/repository/contact_owner.mako",
+ repository=repository,
+ message=message,
+ status=status )
+ else:
+ # Do all we can to eliminate spam.
+ return trans.show_error_message( "You must be logged in to contact the owner of a repository." )
+ def __create_hgrc_file( self, repository ):
+ # At this point, an entry for the repository is required to be in the hgweb.config file so we can call repository.repo_path.
+ # Since we support both http and https, we set push_ssl to False to override the default (which is True) in the mercurial api.
+ # The hg purge extension purges all files and directories not being tracked by mercurial in the current repository. It'll
+ # remove unknown files and empty directories. This is not currently used because it is not supported in the mercurial API.
+ repo = hg.repository( get_configured_ui(), path=repository.repo_path )
+ fp = repo.opener( 'hgrc', 'wb' )
+ fp.write( '[paths]\n' )
+ fp.write( 'default = .\n' )
+ fp.write( 'default-push = .\n' )
+ fp.write( '[web]\n' )
+ fp.write( 'allow_push = %s\n' % repository.user.username )
+ fp.write( 'name = %s\n' % repository.name )
+ fp.write( 'push_ssl = false\n' )
+ fp.write( '[extensions]\n' )
+ fp.write( 'hgext.purge=' )
+ fp.close()
+ @web.expose
+ def create_repository( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ categories = get_categories( trans )
+ if not categories:
+ message = 'No categories have been configured in this instance of the Galaxy Tool Shed. ' + \
+ 'An administrator needs to create some via the Administrator control panel before creating repositories.',
+ status = 'error'
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories',
+ message=message,
+ status=status ) )
+ name = util.restore_text( params.get( 'name', '' ) )
+ description = util.restore_text( params.get( 'description', '' ) )
+ long_description = util.restore_text( params.get( 'long_description', '' ) )
+ category_ids = util.listify( params.get( 'category_id', '' ) )
+ selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
+ if params.get( 'create_repository_button', False ):
+ error = False
+ message = self.__validate_repository_name( name, trans.user )
+ if message:
+ error = True
+ if not description:
+ message = 'Enter a description.'
+ error = True
+ if not error:
+ # Add the repository record to the db
+ repository = trans.app.model.Repository( name=name,
+ description=description,
+ long_description=long_description,
+ user_id=trans.user.id )
+ # Flush to get the id
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ # Determine the repository's repo_path on disk
+ dir = os.path.join( trans.app.config.file_path, *directory_hash_id( repository.id ) )
+ # Create directory if it does not exist
+ if not os.path.exists( dir ):
+ os.makedirs( dir )
+ # Define repo name inside hashed directory
+ repository_path = os.path.join( dir, "repo_%d" % repository.id )
+ # Create local repository directory
+ if not os.path.exists( repository_path ):
+ os.makedirs( repository_path )
+ # Create the local repository
+ repo = hg.repository( get_configured_ui(), repository_path, create=True )
+ # Add an entry in the hgweb.config file for the local repository
+ # This enables calls to repository.repo_path
+ self.__add_hgweb_config_entry( trans, repository, repository_path )
+ # Create a .hg/hgrc file for the local repository
+ self.__create_hgrc_file( repository )
+ flush_needed = False
+ if category_ids:
+ # Create category associations
+ for category_id in category_ids:
+ category = trans.app.model.Category.get( trans.security.decode_id( category_id ) )
+ rca = trans.app.model.RepositoryCategoryAssociation( repository, category )
+ trans.sa_session.add( rca )
+ flush_needed = True
+ if flush_needed:
+ trans.sa_session.flush()
+ message = "Repository '%s' has been created." % repository.name
+ trans.response.send_redirect( web.url_for( controller='repository',
+ action='view_repository',
+ webapp='community',
+ message=message,
+ id=trans.security.encode_id( repository.id ) ) )
+ return trans.fill_template( '/webapps/community/repository/create_repository.mako',
+ name=name,
+ description=description,
+ long_description=long_description,
+ selected_categories=selected_categories,
+ categories=categories,
+ message=message,
+ status=status )
+ @web.expose
+ def display_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
webapp = params.get( 'webapp', 'community' )
- cntrller = params.get( 'cntrller', 'repository' )
- is_admin = trans.user_is_admin()
- invalid_tools_dict = odict()
- if is_admin and cntrller == 'admin':
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( trans.model.Repository.table.c.deleted == False ) \
- .order_by( trans.model.Repository.table.c.name ):
- for downloadable_revision in repository.downloadable_revisions:
- metadata = downloadable_revision.metadata
- invalid_tools = metadata.get( 'invalid_tools', [] )
- for invalid_tool_config in invalid_tools:
- invalid_tools_dict[ invalid_tool_config ] = ( repository.id, repository.name, downloadable_revision.changeset_revision )
- else:
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( and_( trans.model.Repository.table.c.deleted == False,
- trans.model.Repository.table.c.user_id == trans.user.id ) ) \
- .order_by( trans.model.Repository.table.c.name ):
- for downloadable_revision in repository.downloadable_revisions:
- metadata = downloadable_revision.metadata
- invalid_tools = metadata.get( 'invalid_tools', [] )
- for invalid_tool_config in invalid_tools:
- invalid_tools_dict[ invalid_tool_config ] = ( repository.id, repository.name, downloadable_revision.changeset_revision )
- return trans.fill_template( '/webapps/community/repository/browse_invalid_tools.mako',
- cntrller=cntrller,
- invalid_tools_dict=invalid_tools_dict,
+ repository = get_repository( trans, repository_id )
+ tool, message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
+ tool_state = self.__new_state( trans )
+ is_malicious = changeset_is_malicious( trans, repository_id, repository.tip )
+ try:
+ return trans.fill_template( "/webapps/community/repository/tool_form.mako",
+ repository=repository,
+ changeset_revision=changeset_revision,
+ tool=tool,
+ tool_state=tool_state,
+ is_malicious=is_malicious,
+ webapp=webapp,
+ message=message,
+ status=status )
+ except Exception, e:
+ message = "Error displaying tool, probably due to a problem in the tool config. The exception is: %s." % str( e )
+ if webapp == 'galaxy':
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='preview_tools_in_changeset',
+ repository_id=repository_id,
+ changeset_revision=changeset_revision,
+ message=message,
+ status='error' ) )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories',
+ operation='view_or_manage_repository',
+ id=repository_id,
+ changeset_revision=changeset_revision,
+ message=message,
+ status='error' ) )
+ @web.expose
+ def download( self, trans, repository_id, changeset_revision, file_type, **kwd ):
+ # Download an archive of the repository files compressed as zip, gz or bz2.
+ params = util.Params( kwd )
+ repository = get_repository( trans, repository_id )
+ # Allow hgweb to handle the download. This requires the tool shed
+ # server account's .hgrc file to include the following setting:
+ # [web]
+ # allow_archive = bz2, gz, zip
+ if file_type == 'zip':
+ file_type_str = '%s.zip' % changeset_revision
+ elif file_type == 'bz2':
+ file_type_str = '%s.tar.bz2' % changeset_revision
+ elif file_type == 'gz':
+ file_type_str = '%s.tar.gz' % changeset_revision
+ repository.times_downloaded += 1
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ download_url = '/repos/%s/%s/archive/%s' % ( repository.user.username, repository.name, file_type_str )
+ return trans.response.send_redirect( download_url )
+ def __encode_repo_info_dict( self, trans, webapp, repository_metadata_ids ):
+ repo_info_dict = {}
+ includes_tools = False
+ for repository_metadata_id in repository_metadata_ids:
+ repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
+ if not includes_tools and 'tools' in repository_metadata.metadata:
+ includes_tools = True
+ repository = get_repository( trans, trans.security.encode_id( repository_metadata.repository_id ) )
+ # Get the changelog rev for this changeset_revision.
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ changeset_revision = repository_metadata.changeset_revision
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ repository_id = trans.security.encode_id( repository.id )
+ repository_clone_url = generate_clone_url( trans, repository_id )
+ repo_info_dict[ repository.name ] = ( repository.description, repository_clone_url, changeset_revision, str( ctx.rev() ) )
+ return encode( repo_info_dict ), includes_tools
+ @web.expose
+ def find_tools( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ webapp = params.get( 'webapp', 'community' )
+ galaxy_url = kwd.get( 'galaxy_url', None )
+ if galaxy_url:
+ trans.set_cookie( galaxy_url, name='toolshedgalaxyurl' )
+ if 'operation' in kwd:
+ item_id = kwd.get( 'id', '' )
+ if item_id:
+ operation = kwd[ 'operation' ].lower()
+ is_admin = trans.user_is_admin()
+ if operation == "view_or_manage_repository":
+ # The received id is a RepositoryMetadata id, so we have to get the repository id.
+ repository_metadata = get_repository_metadata_by_id( trans, item_id )
+ repository_id = trans.security.encode_id( repository_metadata.repository.id )
+ repository = get_repository( trans, repository_id )
+ kwd[ 'id' ] = repository_id
+ kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
+ if webapp == 'community' and ( is_admin or repository.user == trans.user ):
+ a = 'manage_repository'
+ else:
+ a = 'view_repository'
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action=a,
+ **kwd ) )
+ if operation == "install":
+ galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
+ encoded_repo_info_dict, includes_tools = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) )
+ url = '%sadmin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s&includes_tools=%s' % \
+ ( galaxy_url, url_for( '/', qualified=True ), webapp, encoded_repo_info_dict, str( includes_tools ) )
+ return trans.response.send_redirect( url )
+ else:
+ # This can only occur when there is a multi-select grid with check boxes and an operation,
+ # and the user clicked the operation button without checking any of the check boxes.
+ return trans.show_error_message( "No items were selected." )
+ tool_ids = [ item.lower() for item in util.listify( kwd.get( 'tool_id', '' ) ) ]
+ tool_names = [ item.lower() for item in util.listify( kwd.get( 'tool_name', '' ) ) ]
+ tool_versions = [ item.lower() for item in util.listify( kwd.get( 'tool_version', '' ) ) ]
+ exact_matches = params.get( 'exact_matches', '' )
+ exact_matches_checked = CheckboxField.is_checked( exact_matches )
+ match_tuples = []
+ ok = True
+ if tool_ids or tool_names or tool_versions:
+ ok, match_tuples = self.__search_repository_metadata( trans, exact_matches_checked, tool_ids=tool_ids, tool_names=tool_names, tool_versions=tool_versions )
+ if ok:
+ kwd[ 'match_tuples' ] = match_tuples
+ # Render the list view
+ if webapp == 'galaxy':
+ # Our initial request originated from a Galaxy instance.
+ global_actions = [ grids.GridAction( "Browse valid repositories",
+ dict( controller='repository', action='browse_valid_repositories', webapp=webapp ) ),
+ grids.GridAction( "Search for valid tools",
+ dict( controller='repository', action='find_tools', webapp=webapp ) ),
+ grids.GridAction( "Search for workflows",
+ dict( controller='repository', action='find_workflows', webapp=webapp ) ) ]
+ self.install_matched_repository_list_grid.global_actions = global_actions
+ install_url_args = dict( controller='repository', action='find_tools', webapp=webapp )
+ operations = [ grids.GridOperation( "Install", url_args=install_url_args, allow_multiple=True, async_compatible=False ) ]
+ self.install_matched_repository_list_grid.operations = operations
+ return self.install_matched_repository_list_grid( trans, **kwd )
+ else:
+ kwd[ 'message' ] = "tool id: <b>%s</b><br/>tool name: <b>%s</b><br/>tool version: <b>%s</b><br/>exact matches only: <b>%s</b>" % \
+ ( self.__stringify( tool_ids ), self.__stringify( tool_names ), self.__stringify( tool_versions ), str( exact_matches_checked ) )
+ self.matched_repository_list_grid.title = "Repositories with matching tools"
+ return self.matched_repository_list_grid( trans, **kwd )
+ else:
+ message = "No search performed - each field must contain the same number of comma-separated items."
+ status = "error"
+ exact_matches_check_box = CheckboxField( 'exact_matches', checked=exact_matches_checked )
+ return trans.fill_template( '/webapps/community/repository/find_tools.mako',
webapp=webapp,
+ tool_id=self.__stringify( tool_ids ),
+ tool_name=self.__stringify( tool_names ),
+ tool_version=self.__stringify( tool_versions ),
+ exact_matches_check_box=exact_matches_check_box,
message=message,
- status=status )
+ status=status )
@web.expose
def find_workflows( self, trans, **kwd ):
params = util.Params( kwd )
@@ -521,160 +1027,127 @@
message=message,
status=status )
@web.expose
- def find_tools( self, trans, **kwd ):
+ def get_ctx_rev( self, trans, **kwd ):
+ """Given a repository and changeset_revision, return the correct ctx.rev() value."""
+ repository_name = kwd[ 'name' ]
+ repository_owner = kwd[ 'owner' ]
+ changeset_revision = kwd[ 'changeset_revision' ]
+ repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ if ctx:
+ return str( ctx.rev() )
+ return ''
+ @web.json
+ def get_file_contents( self, trans, file_path ):
+ # Avoid caching
+ trans.response.headers['Pragma'] = 'no-cache'
+ trans.response.headers['Expires'] = '0'
+ if is_gzip( file_path ):
+ to_html = to_html_str( '\ngzip compressed file\n' )
+ elif is_bz2( file_path ):
+ to_html = to_html_str( '\nbz2 compressed file\n' )
+ elif check_zip( file_path ):
+ to_html = to_html_str( '\nzip compressed file\n' )
+ elif check_binary( file_path ):
+ to_html = to_html_str( '\nBinary file\n' )
+ else:
+ to_html = ''
+ for i, line in enumerate( open( file_path ) ):
+ to_html = '%s%s' % ( to_html, to_html_str( line ) )
+ if len( to_html ) > MAX_CONTENT_SIZE:
+ large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
+ to_html = '%s%s' % ( to_html, to_html_str( large_str ) )
+ break
+ return to_html
+ def __get_files( self, trans, folder_path ):
+ contents = []
+ for item in os.listdir( folder_path ):
+ # Skip .hg directories
+ if str( item ).startswith( '.hg' ):
+ continue
+ if os.path.isdir( os.path.join( folder_path, item ) ):
+ # Append a '/' character so that our jquery dynatree will
+ # function properly.
+ item = '%s/' % item
+ contents.append( item )
+ if contents:
+ contents.sort()
+ return contents
+ @web.expose
+ def get_readme( self, trans, **kwd ):
+ """If the received changeset_revision includes a file named readme (case ignored), return it's contents."""
+ repository_name = kwd[ 'name' ]
+ repository_owner = kwd[ 'owner' ]
+ changeset_revision = kwd[ 'changeset_revision' ]
+ valid_filenames = [ r for r in README_FILES ]
+ for r in README_FILES:
+ valid_filenames.append( '%s.txt' % r )
+ valid_filenames.append( '%s.txt' % repository_name )
+ repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ repo_dir = repository.repo_path
+ for root, dirs, files in os.walk( repo_dir ):
+ for name in files:
+ if name.lower() in valid_filenames:
+ f = open( os.path.join( root, name ), 'r' )
+ text = f.read()
+ f.close()
+ return str( text )
+ return ''
+ @web.expose
+ def get_tool_dependencies( self, trans, **kwd ):
+ # Handle a request from a local Galaxy instance. If the request originated with the Galaxy instances' InstallManager, the value of 'webapp'
+ # will be 'install_manager'.
params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
+ message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
+ # If the request originated with the UpdateManager, it will not include a galaxy_url.
+ galaxy_url = kwd.get( 'galaxy_url', '' )
+ name = params.get( 'name', None )
+ owner = params.get( 'owner', None )
+ changeset_revision = params.get( 'changeset_revision', None )
webapp = params.get( 'webapp', 'community' )
- galaxy_url = kwd.get( 'galaxy_url', None )
- if galaxy_url:
- trans.set_cookie( galaxy_url, name='toolshedgalaxyurl' )
- if 'operation' in kwd:
- item_id = kwd.get( 'id', '' )
- if item_id:
- operation = kwd[ 'operation' ].lower()
- is_admin = trans.user_is_admin()
- if operation == "view_or_manage_repository":
- # The received id is a RepositoryMetadata id, so we have to get the repository id.
- repository_metadata = get_repository_metadata_by_id( trans, item_id )
- repository_id = trans.security.encode_id( repository_metadata.repository.id )
- repository = get_repository( trans, repository_id )
- kwd[ 'id' ] = repository_id
- kwd[ 'changeset_revision' ] = repository_metadata.changeset_revision
- if webapp == 'community' and ( is_admin or repository.user == trans.user ):
- a = 'manage_repository'
- else:
- a = 'view_repository'
- return trans.response.send_redirect( web.url_for( controller='repository',
- action=a,
- **kwd ) )
- if operation == "install":
- galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
- encoded_repo_info_dict, includes_tools = self.__encode_repo_info_dict( trans, webapp, util.listify( item_id ) )
- url = '%sadmin_toolshed/install_repository?tool_shed_url=%s&webapp=%s&repo_info_dict=%s&includes_tools=%s' % \
- ( galaxy_url, url_for( '/', qualified=True ), webapp, encoded_repo_info_dict, str( includes_tools ) )
- return trans.response.send_redirect( url )
- else:
- # This can only occur when there is a multi-select grid with check boxes and an operation,
- # and the user clicked the operation button without checking any of the check boxes.
- return trans.show_error_message( "No items were selected." )
- tool_ids = [ item.lower() for item in util.listify( kwd.get( 'tool_id', '' ) ) ]
- tool_names = [ item.lower() for item in util.listify( kwd.get( 'tool_name', '' ) ) ]
- tool_versions = [ item.lower() for item in util.listify( kwd.get( 'tool_version', '' ) ) ]
- exact_matches = params.get( 'exact_matches', '' )
- exact_matches_checked = CheckboxField.is_checked( exact_matches )
- match_tuples = []
- ok = True
- if tool_ids or tool_names or tool_versions:
- ok, match_tuples = self.__search_repository_metadata( trans, exact_matches_checked, tool_ids=tool_ids, tool_names=tool_names, tool_versions=tool_versions )
- if ok:
- kwd[ 'match_tuples' ] = match_tuples
- # Render the list view
- if webapp == 'galaxy':
- # Our initial request originated from a Galaxy instance.
- global_actions = [ grids.GridAction( "Browse valid repositories",
- dict( controller='repository', action='browse_valid_repositories', webapp=webapp ) ),
- grids.GridAction( "Search for valid tools",
- dict( controller='repository', action='find_tools', webapp=webapp ) ),
- grids.GridAction( "Search for workflows",
- dict( controller='repository', action='find_workflows', webapp=webapp ) ) ]
- self.install_matched_repository_list_grid.global_actions = global_actions
- install_url_args = dict( controller='repository', action='find_tools', webapp=webapp )
- operations = [ grids.GridOperation( "Install", url_args=install_url_args, allow_multiple=True, async_compatible=False ) ]
- self.install_matched_repository_list_grid.operations = operations
- return self.install_matched_repository_list_grid( trans, **kwd )
- else:
- kwd[ 'message' ] = "tool id: <b>%s</b><br/>tool name: <b>%s</b><br/>tool version: <b>%s</b><br/>exact matches only: <b>%s</b>" % \
- ( self.__stringify( tool_ids ), self.__stringify( tool_names ), self.__stringify( tool_versions ), str( exact_matches_checked ) )
- self.matched_repository_list_grid.title = "Repositories with matching tools"
- return self.matched_repository_list_grid( trans, **kwd )
- else:
- message = "No search performed - each field must contain the same number of comma-separated items."
- status = "error"
- exact_matches_check_box = CheckboxField( 'exact_matches', checked=exact_matches_checked )
- return trans.fill_template( '/webapps/community/repository/find_tools.mako',
- webapp=webapp,
- tool_id=self.__stringify( tool_ids ),
- tool_name=self.__stringify( tool_names ),
- tool_version=self.__stringify( tool_versions ),
- exact_matches_check_box=exact_matches_check_box,
- message=message,
- status=status )
- def __search_repository_metadata( self, trans, exact_matches_checked, tool_ids='', tool_names='', tool_versions='', workflow_names='', all_workflows=False ):
- match_tuples = []
- ok = True
- for repository_metadata in trans.sa_session.query( model.RepositoryMetadata ):
- metadata = repository_metadata.metadata
- if tool_ids or tool_names or tool_versions:
- if 'tools' in metadata:
- tools = metadata[ 'tools' ]
- else:
- tools = []
- for tool_dict in tools:
- if tool_ids and not tool_names and not tool_versions:
- for tool_id in tool_ids:
- if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id ):
- match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
- elif tool_names and not tool_ids and not tool_versions:
- for tool_name in tool_names:
- if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_name=tool_name ):
- match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
- elif tool_versions and not tool_ids and not tool_names:
- for tool_version in tool_versions:
- if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_version=tool_version ):
- match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
- elif tool_ids and tool_names and not tool_versions:
- if len( tool_ids ) == len( tool_names ):
- match_tuples = self.__search_ids_names( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_names )
- elif len( tool_ids ) == 1 or len( tool_names ) == 1:
- tool_ids, tool_names = self.__make_same_length( tool_ids, tool_names )
- match_tuples = self.__search_ids_names( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_names )
- else:
- ok = False
- elif tool_ids and tool_versions and not tool_names:
- if len( tool_ids ) == len( tool_versions ):
- match_tuples = self.__search_ids_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_versions )
- elif len( tool_ids ) == 1 or len( tool_versions ) == 1:
- tool_ids, tool_versions = self.__make_same_length( tool_ids, tool_versions )
- match_tuples = self.__search_ids_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_versions )
- else:
- ok = False
- elif tool_versions and tool_names and not tool_ids:
- if len( tool_versions ) == len( tool_names ):
- match_tuples = self.__search_names_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_names, tool_versions )
- elif len( tool_versions ) == 1 or len( tool_names ) == 1:
- tool_versions, tool_names = self.__make_same_length( tool_versions, tool_names )
- match_tuples = self.__search_names_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_names, tool_versions )
- else:
- ok = False
- elif tool_versions and tool_names and tool_ids:
- if len( tool_versions ) == len( tool_names ) and len( tool_names ) == len( tool_ids ):
- for i, tool_version in enumerate( tool_versions ):
- tool_name = tool_names[ i ]
- tool_id = tool_ids[ i ]
- if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id, tool_name=tool_name, tool_version=tool_version ):
- match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
- else:
- ok = False
- elif workflow_names:
- if 'workflows' in metadata:
- # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
- # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
- workflow_tups = metadata[ 'workflows' ]
- workflows = [ workflow_tup[1] for workflow_tup in workflow_tups ]
- else:
- workflows = []
- for workflow_dict in workflows:
- for workflow_name in workflow_names:
- if self.__in_workflow_dict( workflow_dict, exact_matches_checked, workflow_name ):
- match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
- elif all_workflows and 'workflows' in metadata:
- match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
- return ok, match_tuples
- def __in_workflow_dict( self, workflow_dict, exact_matches_checked, workflow_name ):
- workflow_dict_workflow_name = workflow_dict[ 'name' ].lower()
- return ( workflow_name == workflow_dict_workflow_name ) or \
- ( not exact_matches_checked and workflow_dict_workflow_name.find( workflow_name ) >= 0 )
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ for downloadable_revision in repository.downloadable_revisions:
+ if downloadable_revision.changeset_revision == changeset_revision:
+ break
+ metadata = downloadable_revision.metadata
+ tool_dependencies = metadata.get( 'tool_dependencies', '' )
+ if webapp == 'install_manager':
+ if tool_dependencies:
+ return tool_shed_encode( tool_dependencies )
+ return ''
+ # TODO: future handler where request comes from some Galaxy admin feature.
+ @web.expose
+ def get_tool_versions( self, trans, **kwd ):
+ """
+ For each valid /downloadable change set (up to the received changeset_revision) in the repository's change log, append the change
+ set's tool_versions dictionary to the list that will be returned.
+ """
+ name = kwd[ 'name' ]
+ owner = kwd[ 'owner' ]
+ changeset_revision = kwd[ 'changeset_revision' ]
+ repository = get_repository_by_name_and_owner( trans, name, owner )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ tool_version_dicts = []
+ for changeset in repo.changelog:
+ current_changeset_revision = str( repo.changectx( changeset ) )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), current_changeset_revision )
+ if repository_metadata and repository_metadata.tool_versions:
+ tool_version_dicts.append( repository_metadata.tool_versions )
+ if current_changeset_revision == changeset_revision:
+ break
+ if tool_version_dicts:
+ return to_json_string( tool_version_dicts )
+ return ''
+ @web.expose
+ def help( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ return trans.fill_template( '/webapps/community/repository/help.mako', message=message, status=status, **kwd )
def __in_tool_dict( self, tool_dict, exact_matches_checked, tool_id=None, tool_name=None, tool_version=None ):
found = False
if tool_id and not tool_name and not tool_version:
@@ -716,83 +1189,19 @@
tool_dict_tool_name.find( tool_name ) >= 0 and \
tool_dict_tool_id.find( tool_id ) >= 0 )
return found
- def __stringify( self, list ):
- if list:
- return ','.join( list )
- return ''
- def __make_same_length( self, list1, list2 ):
- # If either list is 1 item, we'll append to it until its
- # length is the same as the other.
- if len( list1 ) == 1:
- for i in range( 1, len( list2 ) ):
- list1.append( list1[ 0 ] )
- elif len( list2 ) == 1:
- for i in range( 1, len( list1 ) ):
- list2.append( list2[ 0 ] )
- return list1, list2
- def __search_ids_names( self, tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_names ):
- for i, tool_id in enumerate( tool_ids ):
- tool_name = tool_names[ i ]
- if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id, tool_name=tool_name ):
- match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
- return match_tuples
- def __search_ids_versions( self, tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_versions ):
- for i, tool_id in enumerate( tool_ids ):
- tool_version = tool_versions[ i ]
- if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id, tool_version=tool_version ):
- match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
- return match_tuples
- def __search_names_versions( self, tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_names, tool_versions ):
- for i, tool_name in enumerate( tool_names ):
- tool_version = tool_versions[ i ]
- if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_name=tool_name, tool_version=tool_version ):
- match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
- return match_tuples
- def __encode_repo_info_dict( self, trans, webapp, repository_metadata_ids ):
- repo_info_dict = {}
- includes_tools = False
- for repository_metadata_id in repository_metadata_ids:
- repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
- if not includes_tools and 'tools' in repository_metadata.metadata:
- includes_tools = True
- repository = get_repository( trans, trans.security.encode_id( repository_metadata.repository_id ) )
- # Get the changelog rev for this changeset_revision.
- repo_dir = repository.repo_path
- repo = hg.repository( get_configured_ui(), repo_dir )
- changeset_revision = repository_metadata.changeset_revision
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- repository_id = trans.security.encode_id( repository.id )
- repository_clone_url = generate_clone_url( trans, repository_id )
- repo_info_dict[ repository.name ] = ( repository.description, repository_clone_url, changeset_revision, str( ctx.rev() ) )
- return encode( repo_info_dict ), includes_tools
+ def __in_workflow_dict( self, workflow_dict, exact_matches_checked, workflow_name ):
+ workflow_dict_workflow_name = workflow_dict[ 'name' ].lower()
+ return ( workflow_name == workflow_dict_workflow_name ) or \
+ ( not exact_matches_checked and workflow_dict_workflow_name.find( workflow_name ) >= 0 )
@web.expose
- def preview_tools_in_changeset( self, trans, repository_id, **kwd ):
+ def index( self, trans, **kwd ):
params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
+ message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- webapp = params.get( 'webapp', 'community' )
- repository = get_repository( trans, repository_id )
- changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip ) )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
- if repository_metadata:
- repository_metadata_id = trans.security.encode_id( repository_metadata.id ),
- metadata = repository_metadata.metadata
- else:
- repository_metadata_id = None
- metadata = None
- revision_label = get_revision_label( trans, repository, changeset_revision )
- changeset_revision_select_field = build_changeset_revision_select_field( trans,
- repository,
- selected_value=changeset_revision,
- add_id_to_name=False )
- return trans.fill_template( '/webapps/community/repository/preview_tools_in_changeset.mako',
- repository=repository,
- repository_metadata_id=repository_metadata_id,
- changeset_revision=changeset_revision,
- revision_label=revision_label,
- changeset_revision_select_field=changeset_revision_select_field,
- metadata=metadata,
- webapp=webapp,
+ # See if there are any RepositoryMetadata records since menu items require them.
+ repository_metadata = trans.sa_session.query( model.RepositoryMetadata ).first()
+ return trans.fill_template( '/webapps/community/index.mako',
+ repository_metadata=repository_metadata,
message=message,
status=status )
@web.expose
@@ -838,633 +1247,117 @@
( galaxy_url, url_for( '/', qualified=True ), encoded_repo_info_dict, str( includes_tools ) )
return trans.response.send_redirect( url )
@web.expose
- def get_ctx_rev( self, trans, **kwd ):
- """Given a repository and changeset_revision, return the correct ctx.rev() value."""
- repository_name = kwd[ 'name' ]
- repository_owner = kwd[ 'owner' ]
- changeset_revision = kwd[ 'changeset_revision' ]
- repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ def load_invalid_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'error' )
+ webapp = params.get( 'webapp', 'community' )
+ repository = get_repository( trans, repository_id )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
ctx = get_changectx_for_changeset( repo, changeset_revision )
- if ctx:
- return str( ctx.rev() )
- return ''
- @web.expose
- def get_readme( self, trans, **kwd ):
- """If the received changeset_revision includes a file named readme (case ignored), return it's contents."""
- repository_name = kwd[ 'name' ]
- repository_owner = kwd[ 'owner' ]
- changeset_revision = kwd[ 'changeset_revision' ]
- valid_filenames = [ r for r in README_FILES ]
- for r in README_FILES:
- valid_filenames.append( '%s.txt' % r )
- valid_filenames.append( '%s.txt' % repository_name )
- repository = get_repository_by_name_and_owner( trans, repository_name, repository_owner )
- repo_dir = repository.repo_path
- for root, dirs, files in os.walk( repo_dir ):
- for name in files:
- if name.lower() in valid_filenames:
- f = open( os.path.join( root, name ), 'r' )
- text = f.read()
- f.close()
- return str( text )
- return ''
- @web.expose
- def get_tool_versions( self, trans, **kwd ):
- """
- For each valid /downloadable change set (up to the received changeset_revision) in the repository's change log, append the change
- set's tool_versions dictionary to the list that will be returned.
- """
- name = kwd[ 'name' ]
- owner = kwd[ 'owner' ]
- changeset_revision = kwd[ 'changeset_revision' ]
- repository = get_repository_by_name_and_owner( trans, name, owner )
- repo_dir = repository.repo_path
- repo = hg.repository( get_configured_ui(), repo_dir )
- tool_version_dicts = []
- for changeset in repo.changelog:
- current_changeset_revision = str( repo.changectx( changeset ) )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), current_changeset_revision )
- if repository_metadata and repository_metadata.tool_versions:
- tool_version_dicts.append( repository_metadata.tool_versions )
- if current_changeset_revision == changeset_revision:
- break
- if tool_version_dicts:
- return to_json_string( tool_version_dicts )
- return ''
- @web.expose
- def check_for_updates( self, trans, **kwd ):
- # Handle a request from a local Galaxy instance. If the request originated with the
- # Galaxy instances' UpdateManager, the value of 'webapp' will be 'update_manager'.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- # If the request originated with the UpdateManager, it will not include a galaxy_url.
- galaxy_url = kwd.get( 'galaxy_url', '' )
- name = params.get( 'name', None )
- owner = params.get( 'owner', None )
- changeset_revision = params.get( 'changeset_revision', None )
- webapp = params.get( 'webapp', 'community' )
- repository = get_repository_by_name_and_owner( trans, name, owner )
- repo_dir = repository.repo_path
- repo = hg.repository( get_configured_ui(), repo_dir )
- latest_ctx = get_changectx_for_changeset( repo, changeset_revision )
- from_update_manager = webapp == 'update_manager'
- if from_update_manager:
- update = 'true'
- no_update = 'false'
- else:
- # Start building up the url to redirect back to the calling Galaxy instance.
- url = '%sadmin_toolshed/update_to_changeset_revision?tool_shed_url=%s' % ( galaxy_url, url_for( '/', qualified=True ) )
- url += '&name=%s&owner=%s&changeset_revision=%s&latest_changeset_revision=' % \
- ( repository.name, repository.user.username, changeset_revision )
- if changeset_revision == repository.tip:
- # If changeset_revision is the repository tip, we know there are no additional updates for the tools.
- if from_update_manager:
- return no_update
- # Return the same value for changeset_revision and latest_changeset_revision.
- url += repository.tip
- else:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans,
- trans.security.encode_id( repository.id ),
- changeset_revision )
- if repository_metadata:
- # If changeset_revision is in the repository_metadata table for this repository, then we know there are no additional updates
- # for the tools.
- if from_update_manager:
- return no_update
- else:
- # Return the same value for changeset_revision and latest_changeset_revision.
- url += changeset_revision
- else:
- # TODO: Re-engineer this to define the change set for update to be the one just before the next change set in the repository_metadata
- # table for this repository.
- # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
- # repository was cloned. Load each tool in the repository's changeset_revision to generate a list of tool guids, since guids
- # differentiate tools by id and version.
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- if ctx is not None:
- work_dir = make_tmp_directory()
- tool_guids = []
- for filename in ctx:
- # Find all tool configs in this repository changeset_revision.
- if filename not in NOT_TOOL_CONFIGS and filename.endswith( '.xml' ):
- is_tool_config, valid, tool, error_message = load_tool_from_tmp_directory( trans,
+ invalid_message = ''
+ work_dir = make_tmp_directory()
+ for filename in ctx:
+ ctx_file_name = strip_path( filename )
+ if ctx_file_name == tool_config:
+ tool_config_path = get_named_tmpfile_from_ctx( ctx, filename, work_dir )
+ break
+ metadata_dict, invalid_files, deleted_sample_files = generate_metadata_for_changeset_revision( trans,
repo,
+ repository_id,
+ ctx,
+ changeset_revision,
repo_dir,
- ctx,
- filename,
- work_dir )
- if valid and tool is not None:
- tool_guids.append( generate_tool_guid( trans, repository, tool ) )
- tool_guids.sort()
- if tool_guids:
- # Compare our list of tool guids against those in each repository_metadata record for the repository to find the
- # repository_metadata record with the changeset_revision value we want to pass back to the caller.
- found = False
- for repository_metadata in get_repository_metadata_by_repository_id( trans, trans.security.encode_id( repository.id ) ):
- metadata = repository_metadata.metadata
- metadata_tool_guids = []
- for tool_dict in metadata[ 'tools' ]:
- metadata_tool_guids.append( tool_dict[ 'guid' ] )
- metadata_tool_guids.sort()
- if tool_guids == metadata_tool_guids:
- # We've found the repository_metadata record whose changeset_revision value has been updated.
- if from_update_manager:
- return update
- url += repository_metadata.changeset_revision
- # Get the ctx_rev for the discovered changeset_revision.
- latest_ctx = get_changectx_for_changeset( repo, repository_metadata.changeset_revision )
- found = True
- break
- if not found:
- # There must be a problem in the data, so we'll just send back the received changeset_revision.
- log.debug( "Possible data corruption - updated repository_metadata cannot be found for repository id %d." % repository.id )
- if from_update_manager:
- return no_update
- url += changeset_revision
- else:
- # There are no tools in the changeset_revision, so no tool updates are possible.
- if from_update_manager:
- return no_update
- url += changeset_revision
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- url += '&latest_ctx_rev=%s' % str( latest_ctx.rev() )
- return trans.response.send_redirect( url )
- @web.expose
- def get_tool_dependencies( self, trans, **kwd ):
- # Handle a request from a local Galaxy instance. If the request originated with the Galaxy instances' InstallManager, the value of 'webapp'
- # will be 'install_manager'.
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- # If the request originated with the UpdateManager, it will not include a galaxy_url.
- galaxy_url = kwd.get( 'galaxy_url', '' )
- name = params.get( 'name', None )
- owner = params.get( 'owner', None )
- changeset_revision = params.get( 'changeset_revision', None )
- webapp = params.get( 'webapp', 'community' )
- repository = get_repository_by_name_and_owner( trans, name, owner )
- for downloadable_revision in repository.downloadable_revisions:
- if downloadable_revision.changeset_revision == changeset_revision:
- break
- metadata = downloadable_revision.metadata
- tool_dependencies = metadata.get( 'tool_dependencies', '' )
- if webapp == 'install_manager':
- if tool_dependencies:
- return tool_shed_encode( tool_dependencies )
- return ''
- # TODO: future handler where request comes from some Galaxy admin feature.
- @web.expose
- def browse_repositories( self, trans, **kwd ):
- # We add params to the keyword dict in this method in order to rename the param
- # with an "f-" prefix, simulating filtering by clicking a search link. We have
- # to take this approach because the "-" character is illegal in HTTP requests.
- if 'webapp' not in kwd:
- kwd[ 'webapp' ] = 'community'
- if 'operation' in kwd:
- operation = kwd['operation'].lower()
- if operation == "view_or_manage_repository":
- repository_id = kwd[ 'id' ]
- repository = get_repository( trans, repository_id )
- is_admin = trans.user_is_admin()
- if is_admin or repository.user == trans.user:
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='manage_repository',
- **kwd ) )
- else:
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='view_repository',
- **kwd ) )
- elif operation == "edit_repository":
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='edit_repository',
- **kwd ) )
- elif operation == "repositories_by_user":
- # Eliminate the current filters if any exist.
- for k, v in kwd.items():
- if k.startswith( 'f-' ):
- del kwd[ k ]
- if 'user_id' in kwd:
- user = get_user( trans, kwd[ 'user_id' ] )
- kwd[ 'f-email' ] = user.email
- del kwd[ 'user_id' ]
- else:
- # The received id is the repository id, so we need to get the id of the user
- # that uploaded the repository.
- repository_id = kwd.get( 'id', None )
- repository = get_repository( trans, repository_id )
- kwd[ 'f-email' ] = repository.user.email
- elif operation == "my_repositories":
- # Eliminate the current filters if any exist.
- for k, v in kwd.items():
- if k.startswith( 'f-' ):
- del kwd[ k ]
- kwd[ 'f-email' ] = trans.user.email
- elif operation == "repositories_by_category":
- # Eliminate the current filters if any exist.
- for k, v in kwd.items():
- if k.startswith( 'f-' ):
- del kwd[ k ]
- category_id = kwd.get( 'id', None )
- category = get_category( trans, category_id )
- kwd[ 'f-Category.name' ] = category.name
- elif operation == "receive email alerts":
- if trans.user:
- if kwd[ 'id' ]:
- kwd[ 'caller' ] = 'browse_repositories'
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='set_email_alerts',
- **kwd ) )
- else:
- kwd[ 'message' ] = 'You must be logged in to set email alerts.'
- kwd[ 'status' ] = 'error'
- del kwd[ 'operation' ]
- # The changeset_revision_select_field in the RepositoryListGrid performs a refresh_on_change
- # which sends in request parameters like changeset_revison_1, changeset_revision_2, etc. One
- # of the many select fields on the grid performed the refresh_on_change, so we loop through
- # all of the received values to see which value is not the repository tip. If we find it, we
- # know the refresh_on_change occurred, and we have the necessary repository id and change set
- # revision to pass on.
- for k, v in kwd.items():
- changset_revision_str = 'changeset_revision_'
- if k.startswith( changset_revision_str ):
- repository_id = trans.security.encode_id( int( k.lstrip( changset_revision_str ) ) )
- repository = get_repository( trans, repository_id )
- if repository.tip != v:
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='browse_repositories',
- operation='view_or_manage_repository',
- id=trans.security.encode_id( repository.id ),
- changeset_revision=v ) )
- # Render the list view
- return self.repository_list_grid( trans, **kwd )
- @web.expose
- def create_repository( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- categories = get_categories( trans )
- if not categories:
- message = 'No categories have been configured in this instance of the Galaxy Tool Shed. ' + \
- 'An administrator needs to create some via the Administrator control panel before creating repositories.',
- status = 'error'
+ updating_tip=changeset_revision==repository.tip )
+ for invalid_file_tup in invalid_files:
+ invalid_tool_config, invalid_msg = invalid_file_tup
+ invalid_tool_config_name = strip_path( invalid_tool_config )
+ if tool_config == invalid_tool_config_name:
+ invalid_message = invalid_msg
+ break
+ tool, error_message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
+ #if error_message:
+ # message += error_message
+ tool_state = self.__new_state( trans )
+ is_malicious = changeset_is_malicious( trans, repository_id, repository.tip )
+ try:
+ shutil.rmtree( work_dir )
+ except:
+ pass
+ try:
+ if invalid_message:
+ message = invalid_message
+ return trans.fill_template( "/webapps/community/repository/tool_form.mako",
+ repository=repository,
+ changeset_revision=changeset_revision,
+ tool=tool,
+ tool_state=tool_state,
+ is_malicious=is_malicious,
+ webapp=webapp,
+ message=message,
+ status='error' )
+ except Exception, e:
+ message = "This tool is invalid because: %s." % str( e )
+ if webapp == 'galaxy':
return trans.response.send_redirect( web.url_for( controller='repository',
- action='browse_repositories',
+ action='preview_tools_in_changeset',
+ repository_id=repository_id,
+ changeset_revision=changeset_revision,
message=message,
- status=status ) )
- name = util.restore_text( params.get( 'name', '' ) )
- description = util.restore_text( params.get( 'description', '' ) )
- long_description = util.restore_text( params.get( 'long_description', '' ) )
- category_ids = util.listify( params.get( 'category_id', '' ) )
- selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
- if params.get( 'create_repository_button', False ):
- error = False
- message = self.__validate_repository_name( name, trans.user )
- if message:
- error = True
- if not description:
- message = 'Enter a description.'
- error = True
- if not error:
- # Add the repository record to the db
- repository = trans.app.model.Repository( name=name,
- description=description,
- long_description=long_description,
- user_id=trans.user.id )
- # Flush to get the id
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- # Determine the repository's repo_path on disk
- dir = os.path.join( trans.app.config.file_path, *directory_hash_id( repository.id ) )
- # Create directory if it does not exist
- if not os.path.exists( dir ):
- os.makedirs( dir )
- # Define repo name inside hashed directory
- repository_path = os.path.join( dir, "repo_%d" % repository.id )
- # Create local repository directory
- if not os.path.exists( repository_path ):
- os.makedirs( repository_path )
- # Create the local repository
- repo = hg.repository( get_configured_ui(), repository_path, create=True )
- # Add an entry in the hgweb.config file for the local repository
- # This enables calls to repository.repo_path
- self.__add_hgweb_config_entry( trans, repository, repository_path )
- # Create a .hg/hgrc file for the local repository
- self.__create_hgrc_file( repository )
- flush_needed = False
- if category_ids:
- # Create category associations
- for category_id in category_ids:
- category = trans.app.model.Category.get( trans.security.decode_id( category_id ) )
- rca = trans.app.model.RepositoryCategoryAssociation( repository, category )
- trans.sa_session.add( rca )
- flush_needed = True
- if flush_needed:
- trans.sa_session.flush()
- message = "Repository '%s' has been created." % repository.name
- trans.response.send_redirect( web.url_for( controller='repository',
- action='view_repository',
- webapp='community',
- message=message,
- id=trans.security.encode_id( repository.id ) ) )
- return trans.fill_template( '/webapps/community/repository/create_repository.mako',
- name=name,
- description=description,
- long_description=long_description,
- selected_categories=selected_categories,
- categories=categories,
- message=message,
- status=status )
- def __validate_repository_name( self, name, user ):
- # Repository names must be unique for each user, must be at least four characters
- # in length and must contain only lower-case letters, numbers, and the '_' character.
- if name in [ 'None', None, '' ]:
- return 'Enter the required repository name.'
- for repository in user.active_repositories:
- if repository.name == name:
- return "You already have a repository named '%s', so choose a different name." % name
- if len( name ) < 4:
- return "Repository names must be at least 4 characters in length."
- if len( name ) > 80:
- return "Repository names cannot be more than 80 characters in length."
- if not( VALID_REPOSITORYNAME_RE.match( name ) ):
- return "Repository names must contain only lower-case letters, numbers and underscore '_'."
- return ''
+ status='error' ) )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories',
+ operation='view_or_manage_repository',
+ id=repository_id,
+ changeset_revision=changeset_revision,
+ message=message,
+ status='error' ) )
def __make_hgweb_config_copy( self, trans, hgweb_config ):
# Make a backup of the hgweb.config file
today = date.today()
backup_date = today.strftime( "%Y_%m_%d" )
hgweb_config_copy = '%s/hgweb.config_%s_backup' % ( trans.app.config.root, backup_date )
shutil.copy( os.path.abspath( hgweb_config ), os.path.abspath( hgweb_config_copy ) )
- def __add_hgweb_config_entry( self, trans, repository, repository_path ):
- # Add an entry in the hgweb.config file for a new repository. An entry looks something like:
- # repos/test/mira_assembler = database/community_files/000/repo_123.
- hgweb_config = "%s/hgweb.config" % trans.app.config.root
- if repository_path.startswith( './' ):
- repository_path = repository_path.replace( './', '', 1 )
- entry = "repos/%s/%s = %s" % ( repository.user.username, repository.name, repository_path )
- tmp_fd, tmp_fname = tempfile.mkstemp()
- if os.path.exists( hgweb_config ):
- # Make a backup of the hgweb.config file since we're going to be changing it.
- self.__make_hgweb_config_copy( trans, hgweb_config )
- new_hgweb_config = open( tmp_fname, 'wb' )
- for i, line in enumerate( open( hgweb_config ) ):
- new_hgweb_config.write( line )
- else:
- new_hgweb_config = open( tmp_fname, 'wb' )
- new_hgweb_config.write( '[paths]\n' )
- new_hgweb_config.write( "%s\n" % entry )
- new_hgweb_config.flush()
- shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
- def __change_hgweb_config_entry( self, trans, repository, old_repository_name, new_repository_name ):
- # Change an entry in the hgweb.config file for a repository. This only happens when
- # the owner changes the name of the repository. An entry looks something like:
- # repos/test/mira_assembler = database/community_files/000/repo_123.
- hgweb_config = "%s/hgweb.config" % trans.app.config.root
- # Make a backup of the hgweb.config file since we're going to be changing it.
- self.__make_hgweb_config_copy( trans, hgweb_config )
- repo_dir = repository.repo_path
- old_lhs = "repos/%s/%s" % ( repository.user.username, old_repository_name )
- new_entry = "repos/%s/%s = %s\n" % ( repository.user.username, new_repository_name, repo_dir )
- tmp_fd, tmp_fname = tempfile.mkstemp()
- new_hgweb_config = open( tmp_fname, 'wb' )
- for i, line in enumerate( open( hgweb_config ) ):
- if line.startswith( old_lhs ):
- new_hgweb_config.write( new_entry )
- else:
- new_hgweb_config.write( line )
- new_hgweb_config.flush()
- shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
- def __create_hgrc_file( self, repository ):
- # At this point, an entry for the repository is required to be in the hgweb.config file so we can call repository.repo_path.
- # Since we support both http and https, we set push_ssl to False to override the default (which is True) in the mercurial api.
- # The hg purge extension purges all files and directories not being tracked by mercurial in the current repository. It'll
- # remove unknown files and empty directories. This is not currently used because it is not supported in the mercurial API.
- repo = hg.repository( get_configured_ui(), path=repository.repo_path )
- fp = repo.opener( 'hgrc', 'wb' )
- fp.write( '[paths]\n' )
- fp.write( 'default = .\n' )
- fp.write( 'default-push = .\n' )
- fp.write( '[web]\n' )
- fp.write( 'allow_push = %s\n' % repository.user.username )
- fp.write( 'name = %s\n' % repository.name )
- fp.write( 'push_ssl = false\n' )
- fp.write( '[extensions]\n' )
- fp.write( 'hgext.purge=' )
- fp.close()
+ def __make_same_length( self, list1, list2 ):
+ # If either list is 1 item, we'll append to it until its length is the same as the other.
+ if len( list1 ) == 1:
+ for i in range( 1, len( list2 ) ):
+ list1.append( list1[ 0 ] )
+ elif len( list2 ) == 1:
+ for i in range( 1, len( list1 ) ):
+ list2.append( list2[ 0 ] )
+ return list1, list2
@web.expose
- def browse_repository( self, trans, id, **kwd ):
+ @web.require_login( "manage email alerts" )
+ def manage_email_alerts( self, trans, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- webapp = params.get( 'webapp', 'community' )
- commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
- repository = get_repository( trans, id )
- repo = hg.repository( get_configured_ui(), repository.repo_path )
- current_working_dir = os.getcwd()
- # Update repository files for browsing.
- update_repository( repo )
- is_malicious = changeset_is_malicious( trans, id, repository.tip )
- return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
- repo=repo,
- repository=repository,
- commit_message=commit_message,
- is_malicious=is_malicious,
- webapp=webapp,
- message=message,
- status=status )
- @web.expose
- def contact_owner( self, trans, id, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- repository = get_repository( trans, id )
- if trans.user and trans.user.email:
- return trans.fill_template( "/webapps/community/repository/contact_owner.mako",
- repository=repository,
- message=message,
- status=status )
- else:
- # Do all we can to eliminate spam.
- return trans.show_error_message( "You must be logged in to contact the owner of a repository." )
- @web.expose
- def send_to_owner( self, trans, id, message='' ):
- repository = get_repository( trans, id )
- if not message:
- message = 'Enter a message'
- status = 'error'
- elif trans.user and trans.user.email:
- smtp_server = trans.app.config.smtp_server
- from_address = trans.app.config.email_from
- if smtp_server is None or from_address is None:
- return trans.show_error_message( "Mail is not configured for this Galaxy tool shed instance" )
- to_address = repository.user.email
- # Get the name of the server hosting the tool shed instance.
- host = trans.request.host
- # Build the email message
- body = string.Template( contact_owner_template ) \
- .safe_substitute( username=trans.user.username,
- repository_name=repository.name,
- email=trans.user.email,
- message=message,
- host=host )
- subject = "Regarding your tool shed repository named %s" % repository.name
- # Send it
- try:
- util.send_mail( from_address, to_address, subject, body, trans.app.config )
- message = "Your message has been sent"
- status = "done"
- except Exception, e:
- message = "An error occurred sending your message by email: %s" % str( e )
- status = "error"
- else:
- # Do all we can to eliminate spam.
- return trans.show_error_message( "You must be logged in to contact the owner of a repository." )
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='contact_owner',
- id=id,
- message=message,
- status=status ) )
- @web.expose
- def select_files_to_delete( self, trans, id, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
- repository = get_repository( trans, id )
- repo_dir = repository.repo_path
- repo = hg.repository( get_configured_ui(), repo_dir )
- selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) )
- if params.get( 'select_files_to_delete_button', False ):
- if selected_files_to_delete:
- selected_files_to_delete = selected_files_to_delete.split( ',' )
- current_working_dir = os.getcwd()
- # Get the current repository tip.
- tip = repository.tip
- for selected_file in selected_files_to_delete:
- try:
- commands.remove( repo.ui, repo, selected_file, force=True )
- except Exception, e:
- log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
- relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
- repo.dirstate.remove( relative_selected_file )
- repo.dirstate.write()
- absolute_selected_file = os.path.abspath( selected_file )
- if os.path.isdir( absolute_selected_file ):
- try:
- os.rmdir( absolute_selected_file )
- except OSError, e:
- # The directory is not empty
- pass
- elif os.path.isfile( absolute_selected_file ):
- os.remove( absolute_selected_file )
- dir = os.path.split( absolute_selected_file )[0]
- try:
- os.rmdir( dir )
- except OSError, e:
- # The directory is not empty
- pass
- # Commit the change set.
- if not commit_message:
- commit_message = 'Deleted selected files'
- commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
- handle_email_alerts( trans, repository )
- # Update the repository files for browsing.
- update_repository( repo )
- # Get the new repository tip.
- repo = hg.repository( get_configured_ui(), repo_dir )
- if tip == repository.tip:
- message += 'No changes to repository. '
- kwd[ 'message' ] = message
-
- else:
- message += 'The selected files were deleted from the repository. '
- kwd[ 'message' ] = message
- set_repository_metadata_due_to_new_tip( trans, id, repository, **kwd )
+ new_repo_alert = params.get( 'new_repo_alert', '' )
+ new_repo_alert_checked = CheckboxField.is_checked( new_repo_alert )
+ user = trans.user
+ if params.get( 'new_repo_alert_button', False ):
+ user.new_repo_alert = new_repo_alert_checked
+ trans.sa_session.add( user )
+ trans.sa_session.flush()
+ if new_repo_alert_checked:
+ message = 'You will receive email alerts for all new valid tool shed repositories.'
else:
- message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>."
- status = "error"
- is_malicious = changeset_is_malicious( trans, id, repository.tip )
- return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
- repo=repo,
- repository=repository,
- commit_message=commit_message,
- is_malicious=is_malicious,
- message=message,
- status=status )
- @web.expose
- def view_repository( self, trans, id, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- repository = get_repository( trans, id )
- webapp = params.get( 'webapp', 'community' )
- repo = hg.repository( get_configured_ui(), repository.repo_path )
- avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
- changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip ) )
- display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
- alerts = params.get( 'alerts', '' )
- alerts_checked = CheckboxField.is_checked( alerts )
- if repository.email_alerts:
- email_alerts = from_json_string( repository.email_alerts )
- else:
- email_alerts = []
- user = trans.user
- if user and params.get( 'receive_email_alerts_button', False ):
- flush_needed = False
- if alerts_checked:
- if user.email not in email_alerts:
- email_alerts.append( user.email )
- repository.email_alerts = to_json_string( email_alerts )
- flush_needed = True
- else:
- if user.email in email_alerts:
- email_alerts.remove( user.email )
- repository.email_alerts = to_json_string( email_alerts )
- flush_needed = True
- if flush_needed:
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- checked = alerts_checked or ( user and user.email in email_alerts )
- alerts_check_box = CheckboxField( 'alerts', checked=checked )
- changeset_revision_select_field = build_changeset_revision_select_field( trans,
- repository,
- selected_value=changeset_revision,
- add_id_to_name=False )
- revision_label = get_revision_label( trans, repository, changeset_revision )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- if repository_metadata:
- repository_metadata_id = trans.security.encode_id( repository_metadata.id ),
- metadata = repository_metadata.metadata
- else:
- repository_metadata_id = None
- metadata = None
- is_malicious = changeset_is_malicious( trans, id, repository.tip )
- if is_malicious:
- if trans.app.security_agent.can_push( trans.user, repository ):
- message += malicious_error_can_push
- else:
- message += malicious_error
- status = 'error'
- return trans.fill_template( '/webapps/community/repository/view_repository.mako',
- repo=repo,
- repository=repository,
- repository_metadata_id=repository_metadata_id,
- metadata=metadata,
- avg_rating=avg_rating,
- display_reviews=display_reviews,
- num_ratings=num_ratings,
- alerts_check_box=alerts_check_box,
- changeset_revision=changeset_revision,
- changeset_revision_select_field=changeset_revision_select_field,
- revision_label=revision_label,
- is_malicious=is_malicious,
- webapp=webapp,
+ message = 'You will not receive any email alerts for new valid tool shed repositories.'
+ checked = new_repo_alert_checked or ( user and user.new_repo_alert )
+ new_repo_alert_check_box = CheckboxField( 'new_repo_alert', checked=checked )
+ email_alert_repositories = []
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( and_( trans.model.Repository.table.c.deleted == False,
+ trans.model.Repository.table.c.email_alerts != None ) ) \
+ .order_by( trans.model.Repository.table.c.name ):
+ if user.email in repository.email_alerts:
+ email_alert_repositories.append( repository )
+ return trans.fill_template( "/webapps/community/user/manage_email_alerts.mako",
+ webapp='community',
+ new_repo_alert_check_box=new_repo_alert_check_box,
+ email_alert_repositories=email_alert_repositories,
message=message,
status=status )
@web.expose
@@ -1621,6 +1514,429 @@
message=message,
status=status )
@web.expose
+ @web.require_login( "multi select email alerts" )
+ def multi_select_email_alerts( self, trans, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ if 'webapp' not in kwd:
+ kwd[ 'webapp' ] = 'community'
+ if 'operation' in kwd:
+ operation = kwd['operation'].lower()
+ if operation == "receive email alerts":
+ if trans.user:
+ if kwd[ 'id' ]:
+ kwd[ 'caller' ] = 'multi_select_email_alerts'
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='set_email_alerts',
+ **kwd ) )
+ else:
+ kwd[ 'message' ] = 'You must be logged in to set email alerts.'
+ kwd[ 'status' ] = 'error'
+ del kwd[ 'operation' ]
+ return self.email_alerts_repository_list_grid( trans, **kwd )
+ def __new_state( self, trans, all_pages=False ):
+ """
+ Create a new `DefaultToolState` for this tool. It will not be initialized
+ with default values for inputs.
+
+ Only inputs on the first page will be initialized unless `all_pages` is
+ True, in which case all inputs regardless of page are initialized.
+ """
+ state = DefaultToolState()
+ state.inputs = {}
+ return state
+ @web.json
+ def open_folder( self, trans, repository_id, key ):
+ # The tool shed includes a repository source file browser, which currently depends upon
+ # copies of the hg repository file store in the repo_path for browsing.
+ # Avoid caching
+ trans.response.headers['Pragma'] = 'no-cache'
+ trans.response.headers['Expires'] = '0'
+ repository = trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( repository_id ) )
+ folder_path = key
+ try:
+ files_list = self.__get_files( trans, folder_path )
+ except OSError, e:
+ if str( e ).find( 'No such file or directory' ) >= 0:
+ # We have a repository with no contents.
+ return []
+ folder_contents = []
+ for filename in files_list:
+ is_folder = False
+ if filename and filename[-1] == os.sep:
+ is_folder = True
+ if filename:
+ full_path = os.path.join( folder_path, filename )
+ node = { "title": filename,
+ "isFolder": is_folder,
+ "isLazy": is_folder,
+ "tooltip": full_path,
+ "key": full_path }
+ folder_contents.append( node )
+ return folder_contents
+ @web.expose
+ def preview_tools_in_changeset( self, trans, repository_id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ webapp = params.get( 'webapp', 'community' )
+ repository = get_repository( trans, repository_id )
+ changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip ) )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ if repository_metadata:
+ repository_metadata_id = trans.security.encode_id( repository_metadata.id ),
+ metadata = repository_metadata.metadata
+ else:
+ repository_metadata_id = None
+ metadata = None
+ revision_label = get_revision_label( trans, repository, changeset_revision )
+ changeset_revision_select_field = build_changeset_revision_select_field( trans,
+ repository,
+ selected_value=changeset_revision,
+ add_id_to_name=False )
+ return trans.fill_template( '/webapps/community/repository/preview_tools_in_changeset.mako',
+ repository=repository,
+ repository_metadata_id=repository_metadata_id,
+ changeset_revision=changeset_revision,
+ revision_label=revision_label,
+ changeset_revision_select_field=changeset_revision_select_field,
+ metadata=metadata,
+ webapp=webapp,
+ message=message,
+ status=status )
+ @web.expose
+ @web.require_login( "rate repositories" )
+ def rate_repository( self, trans, **kwd ):
+ """ Rate a repository and return updated rating data. """
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ id = params.get( 'id', None )
+ if not id:
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories',
+ message='Select a repository to rate',
+ status='error' ) )
+ repository = get_repository( trans, id )
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
+ if repository.user == trans.user:
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='browse_repositories',
+ message="You are not allowed to rate your own repository",
+ status='error' ) )
+ if params.get( 'rate_button', False ):
+ rating = int( params.get( 'rating', '0' ) )
+ comment = util.restore_text( params.get( 'comment', '' ) )
+ rating = self.rate_item( trans, trans.user, repository, rating, comment )
+ avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
+ display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
+ rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model )
+ is_malicious = changeset_is_malicious( trans, id, repository.tip )
+ return trans.fill_template( '/webapps/community/repository/rate_repository.mako',
+ repository=repository,
+ avg_rating=avg_rating,
+ display_reviews=display_reviews,
+ num_ratings=num_ratings,
+ rra=rra,
+ is_malicious=is_malicious,
+ message=message,
+ status=status )
+ @web.expose
+ def reset_all_metadata( self, trans, id, **kwd ):
+ error_message, status = reset_all_metadata_on_repository( trans, id, **kwd )
+ if error_message:
+ message = error_message
+ status = 'error'
+ else:
+ message = "All repository metadata has been reset."
+ status = 'done'
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=id,
+ message=message,
+ status=status ) )
+ def __search_ids_names( self, tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_names ):
+ for i, tool_id in enumerate( tool_ids ):
+ tool_name = tool_names[ i ]
+ if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id, tool_name=tool_name ):
+ match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+ return match_tuples
+ def __search_ids_versions( self, tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_versions ):
+ for i, tool_id in enumerate( tool_ids ):
+ tool_version = tool_versions[ i ]
+ if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id, tool_version=tool_version ):
+ match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+ return match_tuples
+ def __search_names_versions( self, tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_names, tool_versions ):
+ for i, tool_name in enumerate( tool_names ):
+ tool_version = tool_versions[ i ]
+ if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_name=tool_name, tool_version=tool_version ):
+ match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+ return match_tuples
+ def __search_repository_metadata( self, trans, exact_matches_checked, tool_ids='', tool_names='', tool_versions='', workflow_names='', all_workflows=False ):
+ match_tuples = []
+ ok = True
+ for repository_metadata in trans.sa_session.query( model.RepositoryMetadata ):
+ metadata = repository_metadata.metadata
+ if tool_ids or tool_names or tool_versions:
+ if 'tools' in metadata:
+ tools = metadata[ 'tools' ]
+ else:
+ tools = []
+ for tool_dict in tools:
+ if tool_ids and not tool_names and not tool_versions:
+ for tool_id in tool_ids:
+ if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id ):
+ match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+ elif tool_names and not tool_ids and not tool_versions:
+ for tool_name in tool_names:
+ if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_name=tool_name ):
+ match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+ elif tool_versions and not tool_ids and not tool_names:
+ for tool_version in tool_versions:
+ if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_version=tool_version ):
+ match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+ elif tool_ids and tool_names and not tool_versions:
+ if len( tool_ids ) == len( tool_names ):
+ match_tuples = self.__search_ids_names( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_names )
+ elif len( tool_ids ) == 1 or len( tool_names ) == 1:
+ tool_ids, tool_names = self.__make_same_length( tool_ids, tool_names )
+ match_tuples = self.__search_ids_names( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_names )
+ else:
+ ok = False
+ elif tool_ids and tool_versions and not tool_names:
+ if len( tool_ids ) == len( tool_versions ):
+ match_tuples = self.__search_ids_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_versions )
+ elif len( tool_ids ) == 1 or len( tool_versions ) == 1:
+ tool_ids, tool_versions = self.__make_same_length( tool_ids, tool_versions )
+ match_tuples = self.__search_ids_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_ids, tool_versions )
+ else:
+ ok = False
+ elif tool_versions and tool_names and not tool_ids:
+ if len( tool_versions ) == len( tool_names ):
+ match_tuples = self.__search_names_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_names, tool_versions )
+ elif len( tool_versions ) == 1 or len( tool_names ) == 1:
+ tool_versions, tool_names = self.__make_same_length( tool_versions, tool_names )
+ match_tuples = self.__search_names_versions( tool_dict, exact_matches_checked, match_tuples, repository_metadata, tool_names, tool_versions )
+ else:
+ ok = False
+ elif tool_versions and tool_names and tool_ids:
+ if len( tool_versions ) == len( tool_names ) and len( tool_names ) == len( tool_ids ):
+ for i, tool_version in enumerate( tool_versions ):
+ tool_name = tool_names[ i ]
+ tool_id = tool_ids[ i ]
+ if self.__in_tool_dict( tool_dict, exact_matches_checked, tool_id=tool_id, tool_name=tool_name, tool_version=tool_version ):
+ match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+ else:
+ ok = False
+ elif workflow_names:
+ if 'workflows' in metadata:
+ # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
+ # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
+ workflow_tups = metadata[ 'workflows' ]
+ workflows = [ workflow_tup[1] for workflow_tup in workflow_tups ]
+ else:
+ workflows = []
+ for workflow_dict in workflows:
+ for workflow_name in workflow_names:
+ if self.__in_workflow_dict( workflow_dict, exact_matches_checked, workflow_name ):
+ match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+ elif all_workflows and 'workflows' in metadata:
+ match_tuples.append( ( repository_metadata.repository_id, repository_metadata.changeset_revision ) )
+ return ok, match_tuples
+ @web.expose
+ def select_files_to_delete( self, trans, id, **kwd ):
+ params = util.Params( kwd )
+ message = util.restore_text( params.get( 'message', '' ) )
+ status = params.get( 'status', 'done' )
+ commit_message = util.restore_text( params.get( 'commit_message', 'Deleted selected files' ) )
+ repository = get_repository( trans, id )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ selected_files_to_delete = util.restore_text( params.get( 'selected_files_to_delete', '' ) )
+ if params.get( 'select_files_to_delete_button', False ):
+ if selected_files_to_delete:
+ selected_files_to_delete = selected_files_to_delete.split( ',' )
+ current_working_dir = os.getcwd()
+ # Get the current repository tip.
+ tip = repository.tip
+ for selected_file in selected_files_to_delete:
+ try:
+ commands.remove( repo.ui, repo, selected_file, force=True )
+ except Exception, e:
+ log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
+ relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+ repo.dirstate.remove( relative_selected_file )
+ repo.dirstate.write()
+ absolute_selected_file = os.path.abspath( selected_file )
+ if os.path.isdir( absolute_selected_file ):
+ try:
+ os.rmdir( absolute_selected_file )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ elif os.path.isfile( absolute_selected_file ):
+ os.remove( absolute_selected_file )
+ dir = os.path.split( absolute_selected_file )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty
+ pass
+ # Commit the change set.
+ if not commit_message:
+ commit_message = 'Deleted selected files'
+ commands.commit( repo.ui, repo, repo_dir, user=trans.user.username, message=commit_message )
+ handle_email_alerts( trans, repository )
+ # Update the repository files for browsing.
+ update_repository( repo )
+ # Get the new repository tip.
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ if tip == repository.tip:
+ message += 'No changes to repository. '
+ kwd[ 'message' ] = message
+
+ else:
+ message += 'The selected files were deleted from the repository. '
+ kwd[ 'message' ] = message
+ set_repository_metadata_due_to_new_tip( trans, id, repository, **kwd )
+ else:
+ message = "Select at least 1 file to delete from the repository before clicking <b>Delete selected files</b>."
+ status = "error"
+ is_malicious = changeset_is_malicious( trans, id, repository.tip )
+ return trans.fill_template( '/webapps/community/repository/browse_repository.mako',
+ repo=repo,
+ repository=repository,
+ commit_message=commit_message,
+ is_malicious=is_malicious,
+ message=message,
+ status=status )
+ @web.expose
+ def send_to_owner( self, trans, id, message='' ):
+ repository = get_repository( trans, id )
+ if not message:
+ message = 'Enter a message'
+ status = 'error'
+ elif trans.user and trans.user.email:
+ smtp_server = trans.app.config.smtp_server
+ from_address = trans.app.config.email_from
+ if smtp_server is None or from_address is None:
+ return trans.show_error_message( "Mail is not configured for this Galaxy tool shed instance" )
+ to_address = repository.user.email
+ # Get the name of the server hosting the tool shed instance.
+ host = trans.request.host
+ # Build the email message
+ body = string.Template( contact_owner_template ) \
+ .safe_substitute( username=trans.user.username,
+ repository_name=repository.name,
+ email=trans.user.email,
+ message=message,
+ host=host )
+ subject = "Regarding your tool shed repository named %s" % repository.name
+ # Send it
+ try:
+ util.send_mail( from_address, to_address, subject, body, trans.app.config )
+ message = "Your message has been sent"
+ status = "done"
+ except Exception, e:
+ message = "An error occurred sending your message by email: %s" % str( e )
+ status = "error"
+ else:
+ # Do all we can to eliminate spam.
+ return trans.show_error_message( "You must be logged in to contact the owner of a repository." )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='contact_owner',
+ id=id,
+ message=message,
+ status=status ) )
+ @web.expose
+ @web.require_login( "set email alerts" )
+ def set_email_alerts( self, trans, **kwd ):
+ # Set email alerts for selected repositories
+ # This method is called from multiple grids, so
+ # the caller must be passed.
+ caller = kwd[ 'caller' ]
+ user = trans.user
+ if user:
+ repository_ids = util.listify( kwd.get( 'id', '' ) )
+ total_alerts_added = 0
+ total_alerts_removed = 0
+ flush_needed = False
+ for repository_id in repository_ids:
+ repository = get_repository( trans, repository_id )
+ if repository.email_alerts:
+ email_alerts = from_json_string( repository.email_alerts )
+ else:
+ email_alerts = []
+ if user.email in email_alerts:
+ email_alerts.remove( user.email )
+ repository.email_alerts = to_json_string( email_alerts )
+ trans.sa_session.add( repository )
+ flush_needed = True
+ total_alerts_removed += 1
+ else:
+ email_alerts.append( user.email )
+ repository.email_alerts = to_json_string( email_alerts )
+ trans.sa_session.add( repository )
+ flush_needed = True
+ total_alerts_added += 1
+ if flush_needed:
+ trans.sa_session.flush()
+ message = 'Total alerts added: %d, total alerts removed: %d' % ( total_alerts_added, total_alerts_removed )
+ kwd[ 'message' ] = message
+ kwd[ 'status' ] = 'done'
+ del kwd[ 'operation' ]
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action=caller,
+ **kwd ) )
+ @web.expose
+ @web.require_login( "set repository metadata" )
+ def set_metadata( self, trans, id, ctx_str, **kwd ):
+ malicious = kwd.get( 'malicious', '' )
+ if kwd.get( 'malicious_button', False ):
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, ctx_str )
+ malicious_checked = CheckboxField.is_checked( malicious )
+ repository_metadata.malicious = malicious_checked
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ if malicious_checked:
+ message = "The repository tip has been defined as malicious."
+ else:
+ message = "The repository tip has been defined as <b>not</b> malicious."
+ status = 'done'
+ else:
+ # The set_metadata_button was clicked
+ message, status = set_repository_metadata( trans, id, ctx_str, **kwd )
+ if not message:
+ message = "Metadata for change set revision '%s' has been reset." % str( ctx_str )
+ return trans.response.send_redirect( web.url_for( controller='repository',
+ action='manage_repository',
+ id=id,
+ changeset_revision=ctx_str,
+ malicious=malicious,
+ message=message,
+ status=status ) )
+ def __stringify( self, list ):
+ if list:
+ return ','.join( list )
+ return ''
+ def __validate_repository_name( self, name, user ):
+ # Repository names must be unique for each user, must be at least four characters
+ # in length and must contain only lower-case letters, numbers, and the '_' character.
+ if name in [ 'None', None, '' ]:
+ return 'Enter the required repository name.'
+ for repository in user.active_repositories:
+ if repository.name == name:
+ return "You already have a repository named '%s', so choose a different name." % name
+ if len( name ) < 4:
+ return "Repository names must be at least 4 characters in length."
+ if len( name ) > 80:
+ return "Repository names cannot be more than 80 characters in length."
+ if not( VALID_REPOSITORYNAME_RE.match( name ) ):
+ return "Repository names must contain only lower-case letters, numbers and underscore '_'."
+ return ''
+ @web.expose
def view_changelog( self, trans, id, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -1694,291 +2010,76 @@
message=message,
status=status )
@web.expose
- @web.require_login( "rate repositories" )
- def rate_repository( self, trans, **kwd ):
- """ Rate a repository and return updated rating data. """
+ def view_repository( self, trans, id, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
- id = params.get( 'id', None )
- if not id:
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='browse_repositories',
- message='Select a repository to rate',
- status='error' ) )
repository = get_repository( trans, id )
+ webapp = params.get( 'webapp', 'community' )
repo = hg.repository( get_configured_ui(), repository.repo_path )
- if repository.user == trans.user:
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='browse_repositories',
- message="You are not allowed to rate your own repository",
- status='error' ) )
- if params.get( 'rate_button', False ):
- rating = int( params.get( 'rating', '0' ) )
- comment = util.restore_text( params.get( 'comment', '' ) )
- rating = self.rate_item( trans, trans.user, repository, rating, comment )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
+ changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip ) )
display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
- rra = self.get_user_item_rating( trans.sa_session, trans.user, repository, webapp_model=trans.model )
+ alerts = params.get( 'alerts', '' )
+ alerts_checked = CheckboxField.is_checked( alerts )
+ if repository.email_alerts:
+ email_alerts = from_json_string( repository.email_alerts )
+ else:
+ email_alerts = []
+ user = trans.user
+ if user and params.get( 'receive_email_alerts_button', False ):
+ flush_needed = False
+ if alerts_checked:
+ if user.email not in email_alerts:
+ email_alerts.append( user.email )
+ repository.email_alerts = to_json_string( email_alerts )
+ flush_needed = True
+ else:
+ if user.email in email_alerts:
+ email_alerts.remove( user.email )
+ repository.email_alerts = to_json_string( email_alerts )
+ flush_needed = True
+ if flush_needed:
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ checked = alerts_checked or ( user and user.email in email_alerts )
+ alerts_check_box = CheckboxField( 'alerts', checked=checked )
+ changeset_revision_select_field = build_changeset_revision_select_field( trans,
+ repository,
+ selected_value=changeset_revision,
+ add_id_to_name=False )
+ revision_label = get_revision_label( trans, repository, changeset_revision )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if repository_metadata:
+ repository_metadata_id = trans.security.encode_id( repository_metadata.id ),
+ metadata = repository_metadata.metadata
+ else:
+ repository_metadata_id = None
+ metadata = None
is_malicious = changeset_is_malicious( trans, id, repository.tip )
- return trans.fill_template( '/webapps/community/repository/rate_repository.mako',
+ if is_malicious:
+ if trans.app.security_agent.can_push( trans.user, repository ):
+ message += malicious_error_can_push
+ else:
+ message += malicious_error
+ status = 'error'
+ return trans.fill_template( '/webapps/community/repository/view_repository.mako',
+ repo=repo,
repository=repository,
+ repository_metadata_id=repository_metadata_id,
+ metadata=metadata,
avg_rating=avg_rating,
display_reviews=display_reviews,
num_ratings=num_ratings,
- rra=rra,
+ alerts_check_box=alerts_check_box,
+ changeset_revision=changeset_revision,
+ changeset_revision_select_field=changeset_revision_select_field,
+ revision_label=revision_label,
is_malicious=is_malicious,
+ webapp=webapp,
message=message,
status=status )
@web.expose
- @web.require_login( "set email alerts" )
- def set_email_alerts( self, trans, **kwd ):
- # Set email alerts for selected repositories
- # This method is called from multiple grids, so
- # the caller must be passed.
- caller = kwd[ 'caller' ]
- user = trans.user
- if user:
- repository_ids = util.listify( kwd.get( 'id', '' ) )
- total_alerts_added = 0
- total_alerts_removed = 0
- flush_needed = False
- for repository_id in repository_ids:
- repository = get_repository( trans, repository_id )
- if repository.email_alerts:
- email_alerts = from_json_string( repository.email_alerts )
- else:
- email_alerts = []
- if user.email in email_alerts:
- email_alerts.remove( user.email )
- repository.email_alerts = to_json_string( email_alerts )
- trans.sa_session.add( repository )
- flush_needed = True
- total_alerts_removed += 1
- else:
- email_alerts.append( user.email )
- repository.email_alerts = to_json_string( email_alerts )
- trans.sa_session.add( repository )
- flush_needed = True
- total_alerts_added += 1
- if flush_needed:
- trans.sa_session.flush()
- message = 'Total alerts added: %d, total alerts removed: %d' % ( total_alerts_added, total_alerts_removed )
- kwd[ 'message' ] = message
- kwd[ 'status' ] = 'done'
- del kwd[ 'operation' ]
- return trans.response.send_redirect( web.url_for( controller='repository',
- action=caller,
- **kwd ) )
- @web.expose
- @web.require_login( "manage email alerts" )
- def manage_email_alerts( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- new_repo_alert = params.get( 'new_repo_alert', '' )
- new_repo_alert_checked = CheckboxField.is_checked( new_repo_alert )
- user = trans.user
- if params.get( 'new_repo_alert_button', False ):
- user.new_repo_alert = new_repo_alert_checked
- trans.sa_session.add( user )
- trans.sa_session.flush()
- if new_repo_alert_checked:
- message = 'You will receive email alerts for all new valid tool shed repositories.'
- else:
- message = 'You will not receive any email alerts for new valid tool shed repositories.'
- checked = new_repo_alert_checked or ( user and user.new_repo_alert )
- new_repo_alert_check_box = CheckboxField( 'new_repo_alert', checked=checked )
- email_alert_repositories = []
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( and_( trans.model.Repository.table.c.deleted == False,
- trans.model.Repository.table.c.email_alerts != None ) ) \
- .order_by( trans.model.Repository.table.c.name ):
- if user.email in repository.email_alerts:
- email_alert_repositories.append( repository )
- return trans.fill_template( "/webapps/community/user/manage_email_alerts.mako",
- webapp='community',
- new_repo_alert_check_box=new_repo_alert_check_box,
- email_alert_repositories=email_alert_repositories,
- message=message,
- status=status )
- @web.expose
- @web.require_login( "manage email alerts" )
- def multi_select_email_alerts( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- if 'webapp' not in kwd:
- kwd[ 'webapp' ] = 'community'
- if 'operation' in kwd:
- operation = kwd['operation'].lower()
- if operation == "receive email alerts":
- if trans.user:
- if kwd[ 'id' ]:
- kwd[ 'caller' ] = 'multi_select_email_alerts'
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='set_email_alerts',
- **kwd ) )
- else:
- kwd[ 'message' ] = 'You must be logged in to set email alerts.'
- kwd[ 'status' ] = 'error'
- del kwd[ 'operation' ]
- return self.email_alerts_repository_list_grid( trans, **kwd )
- @web.expose
- @web.require_login( "set repository metadata" )
- def set_metadata( self, trans, id, ctx_str, **kwd ):
- malicious = kwd.get( 'malicious', '' )
- if kwd.get( 'malicious_button', False ):
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, ctx_str )
- malicious_checked = CheckboxField.is_checked( malicious )
- repository_metadata.malicious = malicious_checked
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- if malicious_checked:
- message = "The repository tip has been defined as malicious."
- else:
- message = "The repository tip has been defined as <b>not</b> malicious."
- status = 'done'
- else:
- # The set_metadata_button was clicked
- message, status = set_repository_metadata( trans, id, ctx_str, **kwd )
- if not message:
- message = "Metadata for change set revision '%s' has been reset." % str( ctx_str )
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='manage_repository',
- id=id,
- changeset_revision=ctx_str,
- malicious=malicious,
- message=message,
- status=status ) )
- @web.expose
- def reset_all_metadata( self, trans, id, **kwd ):
- error_message, status = reset_all_metadata_on_repository( trans, id, **kwd )
- if error_message:
- message = error_message
- else:
- message = "All repository metadata has been reset."
- status = 'done'
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='manage_repository',
- id=id,
- message=message,
- status=status ) )
- @web.expose
- def display_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- webapp = params.get( 'webapp', 'community' )
- repository = get_repository( trans, repository_id )
- tool, message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
- tool_state = self.__new_state( trans )
- is_malicious = changeset_is_malicious( trans, repository_id, repository.tip )
- try:
- return trans.fill_template( "/webapps/community/repository/tool_form.mako",
- repository=repository,
- changeset_revision=changeset_revision,
- tool=tool,
- tool_state=tool_state,
- is_malicious=is_malicious,
- webapp=webapp,
- message=message,
- status=status )
- except Exception, e:
- message = "Error displaying tool, probably due to a problem in the tool config. The exception is: %s." % str( e )
- if webapp == 'galaxy':
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='preview_tools_in_changeset',
- repository_id=repository_id,
- changeset_revision=changeset_revision,
- message=message,
- status='error' ) )
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='browse_repositories',
- operation='view_or_manage_repository',
- id=repository_id,
- changeset_revision=changeset_revision,
- message=message,
- status='error' ) )
- @web.expose
- def load_invalid_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'error' )
- webapp = params.get( 'webapp', 'community' )
- repository = get_repository( trans, repository_id )
- repo_dir = repository.repo_path
- repo = hg.repository( get_configured_ui(), repo_dir )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- invalid_message = ''
- work_dir = make_tmp_directory()
- for filename in ctx:
- ctx_file_name = strip_path( filename )
- if ctx_file_name == tool_config:
- tool_config_path = get_named_tmpfile_from_ctx( ctx, filename, work_dir )
- break
- metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans,
- repo,
- repository_id,
- ctx,
- changeset_revision,
- repo_dir,
- updating_tip=changeset_revision==repository.tip )
- for invalid_file_tup in invalid_files:
- invalid_tool_config, invalid_msg = invalid_file_tup
- invalid_tool_config_name = strip_path( invalid_tool_config )
- if tool_config == invalid_tool_config_name:
- invalid_message = invalid_msg
- break
- tool, error_message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
- if error_message:
- message += error_message
- tool_state = self.__new_state( trans )
- is_malicious = changeset_is_malicious( trans, repository_id, repository.tip )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
- try:
- if invalid_message:
- message = invalid_message
- return trans.fill_template( "/webapps/community/repository/tool_form.mako",
- repository=repository,
- changeset_revision=changeset_revision,
- tool=tool,
- tool_state=tool_state,
- is_malicious=is_malicious,
- webapp=webapp,
- message=message,
- status='error' )
- except Exception, e:
- message = "This tool is invalid because: %s." % str( e )
- if webapp == 'galaxy':
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='preview_tools_in_changeset',
- repository_id=repository_id,
- changeset_revision=changeset_revision,
- message=message,
- status='error' ) )
- return trans.response.send_redirect( web.url_for( controller='repository',
- action='browse_repositories',
- operation='view_or_manage_repository',
- id=repository_id,
- changeset_revision=changeset_revision,
- message=message,
- status='error' ) )
- def __new_state( self, trans, all_pages=False ):
- """
- Create a new `DefaultToolState` for this tool. It will not be initialized
- with default values for inputs.
-
- Only inputs on the first page will be initialized unless `all_pages` is
- True, in which case all inputs regardless of page are initialized.
- """
- state = DefaultToolState()
- state.inputs = {}
- return state
- @web.expose
def view_tool_metadata( self, trans, repository_id, changeset_revision, tool_id, **kwd ):
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
@@ -2015,106 +2116,3 @@
webapp=webapp,
message=message,
status=status )
- @web.expose
- def download( self, trans, repository_id, changeset_revision, file_type, **kwd ):
- # Download an archive of the repository files compressed as zip, gz or bz2.
- params = util.Params( kwd )
- repository = get_repository( trans, repository_id )
- # Allow hgweb to handle the download. This requires the tool shed
- # server account's .hgrc file to include the following setting:
- # [web]
- # allow_archive = bz2, gz, zip
- if file_type == 'zip':
- file_type_str = '%s.zip' % changeset_revision
- elif file_type == 'bz2':
- file_type_str = '%s.tar.bz2' % changeset_revision
- elif file_type == 'gz':
- file_type_str = '%s.tar.gz' % changeset_revision
- repository.times_downloaded += 1
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- download_url = '/repos/%s/%s/archive/%s' % ( repository.user.username, repository.name, file_type_str )
- return trans.response.send_redirect( download_url )
- @web.json
- def open_folder( self, trans, repository_id, key ):
- # The tool shed includes a repository source file browser, which currently depends upon
- # copies of the hg repository file store in the repo_path for browsing.
- # Avoid caching
- trans.response.headers['Pragma'] = 'no-cache'
- trans.response.headers['Expires'] = '0'
- repository = trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( repository_id ) )
- folder_path = key
- try:
- files_list = self.__get_files( trans, folder_path )
- except OSError, e:
- if str( e ).find( 'No such file or directory' ) >= 0:
- # We have a repository with no contents.
- return []
- folder_contents = []
- for filename in files_list:
- is_folder = False
- if filename and filename[-1] == os.sep:
- is_folder = True
- if filename:
- full_path = os.path.join( folder_path, filename )
- node = { "title": filename,
- "isFolder": is_folder,
- "isLazy": is_folder,
- "tooltip": full_path,
- "key": full_path }
- folder_contents.append( node )
- return folder_contents
- def __get_files( self, trans, folder_path ):
- contents = []
- for item in os.listdir( folder_path ):
- # Skip .hg directories
- if str( item ).startswith( '.hg' ):
- continue
- if os.path.isdir( os.path.join( folder_path, item ) ):
- # Append a '/' character so that our jquery dynatree will
- # function properly.
- item = '%s/' % item
- contents.append( item )
- if contents:
- contents.sort()
- return contents
- @web.json
- def get_file_contents( self, trans, file_path ):
- # Avoid caching
- trans.response.headers['Pragma'] = 'no-cache'
- trans.response.headers['Expires'] = '0'
- if is_gzip( file_path ):
- to_html = to_html_str( '\ngzip compressed file\n' )
- elif is_bz2( file_path ):
- to_html = to_html_str( '\nbz2 compressed file\n' )
- elif check_zip( file_path ):
- to_html = to_html_str( '\nzip compressed file\n' )
- elif check_binary( file_path ):
- to_html = to_html_str( '\nBinary file\n' )
- else:
- to_html = ''
- for i, line in enumerate( open( file_path ) ):
- to_html = '%s%s' % ( to_html, to_html_str( line ) )
- if len( to_html ) > MAX_CONTENT_SIZE:
- large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
- to_html = '%s%s' % ( to_html, to_html_str( large_str ) )
- break
- return to_html
- @web.expose
- def help( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- return trans.fill_template( '/webapps/community/repository/help.mako', message=message, status=status, **kwd )
- def __build_allow_push_select_field( self, trans, current_push_list, selected_value='none' ):
- options = []
- for user in trans.sa_session.query( trans.model.User ):
- if user.username not in current_push_list:
- options.append( user )
- return build_select_field( trans,
- objs=options,
- label_attr='username',
- select_field_name='allow_push',
- selected_value=selected_value,
- refresh_on_change=False,
- multiple=True )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0