galaxy-commits
Threads by month
- ----- 2026 -----
- March
- February
- January
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
commit/galaxy-central: dan: Add ToolShed metadata comparison method for Data Managers.
by commits-noreply@bitbucket.org 17 May '13
by commits-noreply@bitbucket.org 17 May '13
17 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7aee96150913/
Changeset: 7aee96150913
User: dan
Date: 2013-05-17 17:26:00
Summary: Add ToolShed metadata comparison method for Data Managers.
Affected #: 1 file
diff -r 0481cf4b6cc3793902612af45cf0323de5dd3db6 -r 7aee9615091381ba6b4f037fa46d226f2d89d5a4 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -75,6 +75,7 @@
ancestor_repository_dependencies = ancestor_repository_dependencies_dict.get( 'repository_dependencies', [] )
ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', {} )
ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] )
+ ancestor_data_manager = ancestor_metadata_dict.get( 'data_manager', {} )
current_datatypes = current_metadata_dict.get( 'datatypes', [] )
current_tools = current_metadata_dict.get( 'tools', [] )
current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ]
@@ -84,6 +85,7 @@
current_repository_dependencies = current_repository_dependencies_dict.get( 'repository_dependencies', [] )
current_tool_dependencies = current_metadata_dict.get( 'tool_dependencies', {} )
current_workflows = current_metadata_dict.get( 'workflows', [] )
+ current_data_manager = current_metadata_dict.get( 'data_manager', {} )
# Handle case where no metadata exists for either changeset.
no_datatypes = not ancestor_datatypes and not current_datatypes
no_readme_files = not ancestor_readme_files and not current_readme_files
@@ -92,7 +94,8 @@
no_tool_dependencies = not ancestor_tool_dependencies and not current_tool_dependencies
no_tools = not ancestor_guids and not current_guids
no_workflows = not ancestor_workflows and not current_workflows
- if no_datatypes and no_readme_files and no_repository_dependencies and no_tool_dependencies and no_tools and no_workflows:
+ no_data_manager = not ancestor_data_manager and not current_data_manager
+ if no_datatypes and no_readme_files and no_repository_dependencies and no_tool_dependencies and no_tools and no_workflows and no_data_manager:
return 'no metadata'
# Uncomment the following if we decide that README files should affect how installable repository revisions are defined. See the NOTE in the
# compare_readme_files() method.
@@ -101,20 +104,25 @@
tool_dependency_comparison = compare_tool_dependencies( ancestor_tool_dependencies, current_tool_dependencies )
workflow_comparison = compare_workflows( ancestor_workflows, current_workflows )
datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes )
+ data_manager_comparison = compare_data_manager( ancestor_data_manager, current_data_manager )
# Handle case where all metadata is the same.
+ # TODO: these values, ('equal', etc), should be abstracted out to constants
if ancestor_guids == current_guids and \
repository_dependency_comparison == 'equal' and \
tool_dependency_comparison == 'equal' and \
workflow_comparison == 'equal' and \
- datatype_comparison == 'equal':
+ datatype_comparison == 'equal' and \
+ data_manager_comparison == 'equal':
return 'equal'
# Handle case where ancestor metadata is a subset of current metadata.
# readme_file_is_subset = readme_file_comparision in [ 'equal', 'subset' ]
+ # TODO: this list [ 'equal', 'subset' ] should be created once
repository_dependency_is_subset = repository_dependency_comparison in [ 'equal', 'subset' ]
tool_dependency_is_subset = tool_dependency_comparison in [ 'equal', 'subset' ]
workflow_dependency_is_subset = workflow_comparison in [ 'equal', 'subset' ]
datatype_is_subset = datatype_comparison in [ 'equal', 'subset' ]
- if repository_dependency_is_subset and tool_dependency_is_subset and workflow_dependency_is_subset and datatype_is_subset:
+ datamanager_is_subset = data_manager_comparison in [ 'equal', 'subset' ]
+ if repository_dependency_is_subset and tool_dependency_is_subset and workflow_dependency_is_subset and datatype_is_subset and datamanager_is_subset:
is_subset = True
for guid in ancestor_guids:
if guid not in current_guids:
@@ -124,6 +132,21 @@
return 'subset'
return 'not equal and not subset'
+def compare_data_manager( ancestor_metadata, current_metadata ):
+ """Determine if ancestor_metadata is the same as or a subset of current_metadata for data_managers."""
+ def __data_manager_dict_to_tuple_list( metadata_dict ):
+ # we do not check tool_guid or tool conf file name
+ return set( sorted( [ ( name, tuple( sorted( value.get( 'data_tables', [] ) ) ), value.get( 'guid' ), value.get( 'version' ), value.get( 'name' ), value.get( 'id' ) ) for name, value in metadata_dict.iteritems() ] ) )
+ # only compare valid entries, any invalid entries are ignored
+ ancestor_metadata = __data_manager_dict_to_tuple_list( ancestor_metadata.get( 'data_managers', {} ) )
+ current_metadata = __data_manager_dict_to_tuple_list( current_metadata.get( 'data_managers', {} ) )
+ # use set comparisons
+ if ancestor_metadata.issubset( current_metadata ):
+ if ancestor_metadata == current_metadata:
+ return 'equal'
+ return 'subset'
+ return 'not equal and not subset'
+
def compare_datatypes( ancestor_datatypes, current_datatypes ):
"""Determine if ancestor_datatypes is the same as or a subset of current_datatypes."""
# Each datatype dict looks something like: {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for setting data manager version.
by commits-noreply@bitbucket.org 17 May '13
by commits-noreply@bitbucket.org 17 May '13
17 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0481cf4b6cc3/
Changeset: 0481cf4b6cc3
User: dan
Date: 2013-05-17 17:21:00
Summary: Fix for setting data manager version.
Affected #: 1 file
diff -r 412d5434a559d1d49ebcd2b24b357f466aaf688b -r 0481cf4b6cc3793902612af45cf0323de5dd3db6 lib/galaxy/tools/data_manager/manager.py
--- a/lib/galaxy/tools/data_manager/manager.py
+++ b/lib/galaxy/tools/data_manager/manager.py
@@ -112,6 +112,7 @@
self.declared_id = elem.get( 'id', None )
self.guid = elem.get( 'guid', None )
path = elem.get( 'tool_file', None )
+ self.version = elem.get( 'version', self.version )
tool_guid = None
if path is None:
tool_elem = elem.find( 'tool' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for commit_util.UNDESIRABLE_DIRS in lib/galaxy/webapps/tool_shed/controllers/upload.py.
by commits-noreply@bitbucket.org 17 May '13
by commits-noreply@bitbucket.org 17 May '13
17 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/412d5434a559/
Changeset: 412d5434a559
User: dan
Date: 2013-05-17 16:47:30
Summary: Fix for commit_util.UNDESIRABLE_DIRS in lib/galaxy/webapps/tool_shed/controllers/upload.py.
Affected #: 1 file
diff -r 9c06caa86e2ac73f439e9b148fb200e036a06ea2 -r 412d5434a559d1d49ebcd2b24b357f466aaf688b lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -257,7 +257,7 @@
ok = os.path.basename( uploaded_file ) not in commit_util.UNDESIRABLE_FILES
if ok:
for file_path_item in relative_path.split( '/' ):
- if file_path_item in COMMIT_UTIL.UNDESIRABLE_DIRS:
+ if file_path_item in commit_util.UNDESIRABLE_DIRS:
undesirable_dirs_removed += 1
ok = False
break
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for committing to a tool shed repository - bug introduced when the tool shed uplaod controller was refactored.
by commits-noreply@bitbucket.org 17 May '13
by commits-noreply@bitbucket.org 17 May '13
17 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9c06caa86e2a/
Changeset: 9c06caa86e2a
User: greg
Date: 2013-05-17 13:30:53
Summary: Fixes for committing to a tool shed repository - bug introduced when the tool shed uplaod controller was refactored.
Affected #: 2 files
diff -r c499e581b0f603528b66df0efe7e0e656a8e16f9 -r 9c06caa86e2ac73f439e9b148fb200e036a06ea2 lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -23,10 +23,6 @@
log = logging.getLogger( __name__ )
-undesirable_dirs = [ '.hg', '.svn', '.git', '.cvs' ]
-undesirable_files = [ '.hg_archival.txt', 'hgrc', '.DS_Store' ]
-CHUNK_SIZE = 2**20 # 1Mb
-
class UploadController( BaseUIController ):
@@ -258,10 +254,10 @@
for root, dirs, files in os.walk( uploaded_directory ):
for uploaded_file in files:
relative_path = os.path.normpath( os.path.join( os.path.relpath( root, uploaded_directory ), uploaded_file ) )
- ok = os.path.basename( uploaded_file ) not in undesirable_files
+ ok = os.path.basename( uploaded_file ) not in commit_util.UNDESIRABLE_FILES
if ok:
for file_path_item in relative_path.split( '/' ):
- if file_path_item in undesirable_dirs:
+ if file_path_item in COMMIT_UTIL.UNDESIRABLE_DIRS:
undesirable_dirs_removed += 1
ok = False
break
@@ -300,10 +296,10 @@
full_path = os.path.abspath( repo_dir )
filenames_in_archive = []
for tarinfo_obj in tar.getmembers():
- ok = os.path.basename( tarinfo_obj.name ) not in undesirable_files
+ ok = os.path.basename( tarinfo_obj.name ) not in commit_util.UNDESIRABLE_FILES
if ok:
for file_path_item in tarinfo_obj.name.split( '/' ):
- if file_path_item in undesirable_dirs:
+ if file_path_item in commit_util.UNDESIRABLE_DIRS:
undesirable_dirs_removed += 1
ok = False
break
diff -r c499e581b0f603528b66df0efe7e0e656a8e16f9 -r 9c06caa86e2ac73f439e9b148fb200e036a06ea2 lib/tool_shed/util/commit_util.py
--- a/lib/tool_shed/util/commit_util.py
+++ b/lib/tool_shed/util/commit_util.py
@@ -16,6 +16,10 @@
log = logging.getLogger( __name__ )
+UNDESIRABLE_DIRS = [ '.hg', '.svn', '.git', '.cvs' ]
+UNDESIRABLE_FILES = [ '.hg_archival.txt', 'hgrc', '.DS_Store' ]
+CHUNK_SIZE = 2**20 # 1Mb
+
def check_archive( archive ):
for member in archive.getmembers():
# Allow regular files and directories only
@@ -113,11 +117,11 @@
# We have a repository that is not new (it contains files), so discover those files that are in the repository, but not in the uploaded archive.
for root, dirs, files in os.walk( full_path ):
if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- for undesirable_dir in undesirable_dirs:
+ for undesirable_dir in UNDESIRABLE_DIRS:
if undesirable_dir in dirs:
dirs.remove( undesirable_dir )
undesirable_dirs_removed += 1
- for undesirable_file in undesirable_files:
+ for undesirable_file in UNDESIRABLE_FILES:
if undesirable_file in files:
files.remove( undesirable_file )
undesirable_files_removed += 1
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Refactor the tool shed upload controller.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c499e581b0f6/
Changeset: c499e581b0f6
User: greg
Date: 2013-05-17 03:45:54
Summary: Refactor the tool shed upload controller.
Affected #: 2 files
diff -r 35de5a8a928bf63fd5de3d1ec066097602acf235 -r c499e581b0f603528b66df0efe7e0e656a8e16f9 lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -1,7 +1,6 @@
import logging
import os
import shutil
-import sys
import tarfile
import tempfile
import urllib
@@ -9,9 +8,8 @@
from galaxy import util
from galaxy import web
from galaxy.datatypes import checkers
-from galaxy.util import json
-
import tool_shed.util.shed_util_common as suc
+from tool_shed.util import commit_util
from tool_shed.util import metadata_util
from tool_shed.util import repository_dependency_util
from tool_shed.util import tool_dependency_util
@@ -32,278 +30,6 @@
class UploadController( BaseUIController ):
- def check_archive( self, archive ):
- for member in archive.getmembers():
- # Allow regular files and directories only
- if not ( member.isdir() or member.isfile() or member.islnk() ):
- message = "Uploaded archives can only include regular directories and files (no symbolic links, devices, etc). Offender: %s" % str( member )
- return False, message
- for item in [ '.hg', '..', '/' ]:
- if member.name.startswith( item ):
- message = "Uploaded archives cannot contain .hg directories, absolute filenames starting with '/', or filenames with two dots '..'."
- return False, message
- if member.name in [ 'hgrc' ]:
- message = "Uploaded archives cannot contain hgrc files."
- return False, message
- return True, ''
-
- def check_file_contents_for_email_alerts( self, trans ):
- """
- See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be
- checked for inappropriate content.
- """
- admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( trans.model.Repository.table.c.email_alerts != None ):
- email_alerts = json.from_json_string( repository.email_alerts )
- for user_email in email_alerts:
- if user_email in admin_users:
- return True
- return False
-
- def check_file_content_for_html_and_images( self, file_path ):
- message = ''
- if checkers.check_html( file_path ):
- message = 'The file "%s" contains HTML content.\n' % str( file_path )
- elif checkers.check_image( file_path ):
- message = 'The file "%s" contains image content.\n' % str( file_path )
- return message
-
- def create_and_write_tmp_file( self, text ):
- fh = tempfile.NamedTemporaryFile( 'wb' )
- tmp_filename = fh.name
- fh.close()
- fh = open( tmp_filename, 'wb' )
- fh.write( '<?xml version="1.0"?>\n' )
- fh.write( text )
- fh.close()
- return tmp_filename
-
- def get_upload_point( self, repository, **kwd ):
- upload_point = kwd.get( 'upload_point', None )
- if upload_point is not None:
- # The value of upload_point will be something like: database/community_files/000/repo_12/1.bed
- if os.path.exists( upload_point ):
- if os.path.isfile( upload_point ):
- # Get the parent directory
- upload_point, not_needed = os.path.split( upload_point )
- # Now the value of uplaod_point will be something like: database/community_files/000/repo_12/
- upload_point = upload_point.split( 'repo_%d' % repository.id )[ 1 ]
- if upload_point:
- upload_point = upload_point.lstrip( '/' )
- upload_point = upload_point.rstrip( '/' )
- # Now the value of uplaod_point will be something like: /
- if upload_point == '/':
- upload_point = None
- else:
- # Must have been an error selecting something that didn't exist, so default to repository root
- upload_point = None
- return upload_point
-
- def handle_bz2( self, repository, uploaded_file_name ):
- fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_bunzip2_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
- bzipped_file = bz2.BZ2File( uploaded_file_name, 'rb' )
- while 1:
- try:
- chunk = bzipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- log.exception( 'Problem uncompressing bz2 data "%s": %s' % ( uploaded_file_name, str( e ) ) )
- return
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- bzipped_file.close()
- shutil.move( uncompressed, uploaded_file_name )
-
- def handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
- undesirable_dirs_removed, undesirable_files_removed ):
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( suc.get_configured_ui(), repo_dir )
- content_alert_str = ''
- files_to_remove = []
- filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
- if remove_repo_files_not_in_tar and not repository.is_new( trans.app ):
- # We have a repository that is not new (it contains files), so discover those files that are in the repository, but not in the uploaded archive.
- for root, dirs, files in os.walk( full_path ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- for undesirable_dir in undesirable_dirs:
- if undesirable_dir in dirs:
- dirs.remove( undesirable_dir )
- undesirable_dirs_removed += 1
- for undesirable_file in undesirable_files:
- if undesirable_file in files:
- files.remove( undesirable_file )
- undesirable_files_removed += 1
- for name in files:
- full_name = os.path.join( root, name )
- if full_name not in filenames_in_archive:
- files_to_remove.append( full_name )
- for repo_file in files_to_remove:
- # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
- try:
- commands.remove( repo.ui, repo, repo_file, force=True )
- except Exception, e:
- log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
- relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
- repo.dirstate.remove( relative_selected_file )
- repo.dirstate.write()
- absolute_selected_file = os.path.abspath( selected_file )
- if os.path.isdir( absolute_selected_file ):
- try:
- os.rmdir( absolute_selected_file )
- except OSError, e:
- # The directory is not empty.
- pass
- elif os.path.isfile( absolute_selected_file ):
- os.remove( absolute_selected_file )
- dir = os.path.split( absolute_selected_file )[0]
- try:
- os.rmdir( dir )
- except OSError, e:
- # The directory is not empty.
- pass
- # See if any admin users have chosen to receive email alerts when a repository is
- # updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = self.check_file_contents_for_email_alerts( trans )
- for filename_in_archive in filenames_in_archive:
- # Check file content to ensure it is appropriate.
- if check_contents and os.path.isfile( filename_in_archive ):
- content_alert_str += self.check_file_content_for_html_and_images( filename_in_archive )
- commands.add( repo.ui, repo, filename_in_archive )
- if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
- # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
- # to the in-memory trans.app.tool_data_tables dictionary.
- error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
- if error:
- return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
- admin_only = len( repository.downloadable_revisions ) != 1
- suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
- return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
-
- def handle_gzip( self, repository, uploaded_file_name ):
- fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_gunzip_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
- gzipped_file = gzip.GzipFile( uploaded_file_name, 'rb' )
- while 1:
- try:
- chunk = gzipped_file.read( CHUNK_SIZE )
- except IOError, e:
- os.close( fd )
- os.remove( uncompressed )
- log.exception( 'Problem uncompressing gz data "%s": %s' % ( uploaded_file_name, str( e ) ) )
- return
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- gzipped_file.close()
- shutil.move( uncompressed, uploaded_file_name )
-
- def handle_repository_dependencies_definition( self, trans, repository_dependencies_config ):
- altered = False
- try:
- # Make sure we're looking at a valid repository_dependencies.xml file.
- tree = util.parse_xml( repository_dependencies_config )
- root = tree.getroot()
- except Exception, e:
- error_message = "Error parsing %s in handle_repository_dependencies_definition: " % str( repository_dependencies_config )
- log.exception( error_message )
- return False, None
- if root.tag == 'repositories':
- for index, elem in enumerate( root ):
- # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
- populated, elem = self.handle_repository_dependency_elem( trans, elem )
- if populated:
- root[ index ] = elem
- if not altered:
- altered = True
- return altered, root
- return False, None
-
- def handle_repository_dependency_elem( self, trans, elem ):
- # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
- populated = False
- name = elem.get( 'name' )
- owner = elem.get( 'owner' )
- changeset_revision = elem.get( 'changeset_revision' )
- if not changeset_revision:
- # Populate the changeset_revision attribute with the latest installable metadata revision for the defined repository.
- # We use the latest installable revision instead of the latest metadata revision to ensure that the contents of the
- # revision are valid.
- repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
- if repository:
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( suc.get_configured_ui(), repo_dir )
- lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( trans, repository, repo )
- if lastest_installable_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
- elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision
- populated = True
- return populated, elem
-
- def handle_tool_dependencies_definition( self, trans, tool_dependencies_config ):
- altered = False
- try:
- # Make sure we're looking at a valid tool_dependencies.xml file.
- tree = util.parse_xml( tool_dependencies_config )
- root = tree.getroot()
- except Exception, e:
- error_message = "Error parsing %s in handle_tool_dependencies_definition: " % str( tool_dependencies_config )
- log.exception( error_message )
- return False, None
- if root.tag == 'tool_dependency':
- for root_index, root_elem in enumerate( root ):
- # <package name="eigen" version="2.0.17">
- if root_elem.tag == 'package':
- package_altered = False
- for package_index, package_elem in enumerate( root_elem ):
- if package_elem.tag == 'repository':
- # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0" prior_installation_required="True" />
- populated, repository_elem = self.handle_repository_dependency_elem( trans, package_elem )
- if populated:
- root_elem[ package_index ] = repository_elem
- package_altered = True
- if not altered:
- altered = True
-
- elif package_elem.tag == 'install':
- # <install version="1.0">
- for actions_index, actions_elem in enumerate( package_elem ):
- for action_index, action_elem in enumerate( actions_elem ):
- action_type = action_elem.get( 'type' )
- if action_type == 'set_environment_for_install':
- # <action type="set_environment_for_install">
- # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0">
- # <package name="eigen" version="2.0.17" />
- # </repository>
- # </action>
- for repo_index, repo_elem in enumerate( action_elem ):
- populated, repository_elem = self.handle_repository_dependency_elem( trans, repo_elem )
- if populated:
- action_elem[ repo_index ] = repository_elem
- package_altered = True
- if not altered:
- altered = True
- if package_altered:
- actions_elem[ action_index ] = action_elem
- if package_altered:
- root_elem[ actions_index ] = actions_elem
-
- if package_altered:
- root[ root_index ] = root_elem
- return altered, root
- return False, None
-
- def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
- if isgzip:
- self.handle_gzip( repository, uploaded_file_name )
- return uploaded_file_filename.rstrip( '.gz' )
- if isbz2:
- self.handle_bz2( repository, uploaded_file_name )
- return uploaded_file_filename.rstrip( '.bz2' )
-
@web.expose
@web.require_login( 'upload', use_panels=True )
def upload( self, trans, **kwd ):
@@ -319,7 +45,7 @@
uncompress_file = util.string_as_bool( kwd.get( 'uncompress_file', 'true' ) )
remove_repo_files_not_in_tar = util.string_as_bool( kwd.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
- upload_point = self.get_upload_point( repository, **kwd )
+ upload_point = commit_util.get_upload_point( repository, **kwd )
tip = repository.tip( trans.app )
file_data = kwd.get( 'file_data', '' )
url = kwd.get( 'url', '' )
@@ -399,7 +125,7 @@
self.upload_directory( trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert )
else:
if ( isgzip or isbz2 ) and uncompress_file:
- uploaded_file_filename = self.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 )
+ uploaded_file_filename = commit_util.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 )
if upload_point is not None:
full_path = os.path.abspath( os.path.join( repo_dir, upload_point, uploaded_file_filename ) )
else:
@@ -407,18 +133,18 @@
# Move some version of the uploaded file to the load_point within the repository hierarchy.
if uploaded_file_filename in [ 'repository_dependencies.xml' ]:
# Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
- altered, root = self.handle_repository_dependencies_definition( trans, uploaded_file_name )
+ altered, root = commit_util.handle_repository_dependencies_definition( trans, uploaded_file_name )
if altered:
- tmp_filename = self.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
+ tmp_filename = commit_util.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
shutil.move( tmp_filename, full_path )
else:
shutil.move( uploaded_file_name, full_path )
elif uploaded_file_filename in [ 'tool_dependencies.xml' ]:
# Inspect the contents of the file to see if it defines a complex repository dependency definition whose changeset_revision values
# are missing and if so, set them appropriately.
- altered, root = self.handle_tool_dependencies_definition( trans, uploaded_file_name )
+ altered, root = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name )
if altered:
- tmp_filename = self.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
+ tmp_filename = commit_util.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
shutil.move( tmp_filename, full_path )
else:
shutil.move( uploaded_file_name, full_path )
@@ -426,9 +152,9 @@
shutil.move( uploaded_file_name, full_path )
# See if any admin users have chosen to receive email alerts when a repository is updated. If so, check every uploaded file to ensure
# content is appropriate.
- check_contents = self.check_file_contents_for_email_alerts( trans )
+ check_contents = commit_util.check_file_contents_for_email_alerts( trans )
if check_contents and os.path.isfile( full_path ):
- content_alert_str = self.check_file_content_for_html_and_images( full_path )
+ content_alert_str = commit_util.check_file_content_for_html_and_images( full_path )
else:
content_alert_str = ''
commands.add( repo.ui, repo, full_path )
@@ -553,8 +279,8 @@
os.remove( repo_path )
shutil.move( os.path.join( uploaded_directory, relative_path ), repo_path )
filenames_in_archive.append( relative_path )
- return self.handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
- undesirable_dirs_removed, undesirable_files_removed )
+ return commit_util.handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar,
+ new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed )
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
# Upload a tar archive of files.
@@ -562,7 +288,7 @@
repo = hg.repository( suc.get_configured_ui(), repo_dir )
undesirable_dirs_removed = 0
undesirable_files_removed = 0
- ok, message = self.check_archive( tar )
+ ok, message = commit_util.check_archive( tar )
if not ok:
tar.close()
uploaded_file.close()
@@ -589,12 +315,12 @@
tar.extractall( path=full_path )
tar.close()
uploaded_file.close()
- return self.handle_directory_changes( trans,
- repository,
- full_path,
- filenames_in_archive,
- remove_repo_files_not_in_tar,
- new_repo_alert,
- commit_message,
- undesirable_dirs_removed,
- undesirable_files_removed )
+ return commit_util.handle_directory_changes( trans,
+ repository,
+ full_path,
+ filenames_in_archive,
+ remove_repo_files_not_in_tar,
+ new_repo_alert,
+ commit_message,
+ undesirable_dirs_removed,
+ undesirable_files_removed )
diff -r 35de5a8a928bf63fd5de3d1ec066097602acf235 -r c499e581b0f603528b66df0efe7e0e656a8e16f9 lib/tool_shed/util/commit_util.py
--- /dev/null
+++ b/lib/tool_shed/util/commit_util.py
@@ -0,0 +1,289 @@
+import logging
+import os
+import shutil
+import tempfile
+from galaxy import util
+from galaxy.datatypes import checkers
+from galaxy.util import json
+import tool_shed.util.shed_util_common as suc
+from tool_shed.util import tool_util
+
+from galaxy import eggs
+eggs.require( 'mercurial' )
+from mercurial import commands
+from mercurial import hg
+from mercurial import ui
+
+log = logging.getLogger( __name__ )
+
+def check_archive( archive ):
+ for member in archive.getmembers():
+ # Allow regular files and directories only
+ if not ( member.isdir() or member.isfile() or member.islnk() ):
+ message = "Uploaded archives can only include regular directories and files (no symbolic links, devices, etc). Offender: %s" % str( member )
+ return False, message
+ for item in [ '.hg', '..', '/' ]:
+ if member.name.startswith( item ):
+ message = "Uploaded archives cannot contain .hg directories, absolute filenames starting with '/', or filenames with two dots '..'."
+ return False, message
+ if member.name in [ 'hgrc' ]:
+ message = "Uploaded archives cannot contain hgrc files."
+ return False, message
+ return True, ''
+
+def check_file_contents_for_email_alerts( trans ):
+ """
+ See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be
+ checked for inappropriate content.
+ """
+ admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.email_alerts != None ):
+ email_alerts = json.from_json_string( repository.email_alerts )
+ for user_email in email_alerts:
+ if user_email in admin_users:
+ return True
+ return False
+
+def check_file_content_for_html_and_images( file_path ):
+ message = ''
+ if checkers.check_html( file_path ):
+ message = 'The file "%s" contains HTML content.\n' % str( file_path )
+ elif checkers.check_image( file_path ):
+ message = 'The file "%s" contains image content.\n' % str( file_path )
+ return message
+
+def create_and_write_tmp_file( text ):
+ fh = tempfile.NamedTemporaryFile( 'wb' )
+ tmp_filename = fh.name
+ fh.close()
+ fh = open( tmp_filename, 'wb' )
+ fh.write( '<?xml version="1.0"?>\n' )
+ fh.write( text )
+ fh.close()
+ return tmp_filename
+
+def get_upload_point( repository, **kwd ):
+ upload_point = kwd.get( 'upload_point', None )
+ if upload_point is not None:
+ # The value of upload_point will be something like: database/community_files/000/repo_12/1.bed
+ if os.path.exists( upload_point ):
+ if os.path.isfile( upload_point ):
+ # Get the parent directory
+ upload_point, not_needed = os.path.split( upload_point )
+ # Now the value of uplaod_point will be something like: database/community_files/000/repo_12/
+ upload_point = upload_point.split( 'repo_%d' % repository.id )[ 1 ]
+ if upload_point:
+ upload_point = upload_point.lstrip( '/' )
+ upload_point = upload_point.rstrip( '/' )
+ # Now the value of uplaod_point will be something like: /
+ if upload_point == '/':
+ upload_point = None
+ else:
+ # Must have been an error selecting something that didn't exist, so default to repository root
+ upload_point = None
+ return upload_point
+
+def handle_bz2( repository, uploaded_file_name ):
+ fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_bunzip2_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
+ bzipped_file = bz2.BZ2File( uploaded_file_name, 'rb' )
+ while 1:
+ try:
+ chunk = bzipped_file.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ os.remove( uncompressed )
+ log.exception( 'Problem uncompressing bz2 data "%s": %s' % ( uploaded_file_name, str( e ) ) )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ bzipped_file.close()
+ shutil.move( uncompressed, uploaded_file_name )
+
+def handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
+ undesirable_dirs_removed, undesirable_files_removed ):
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ content_alert_str = ''
+ files_to_remove = []
+ filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
+ if remove_repo_files_not_in_tar and not repository.is_new( trans.app ):
+ # We have a repository that is not new (it contains files), so discover those files that are in the repository, but not in the uploaded archive.
+ for root, dirs, files in os.walk( full_path ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ for undesirable_dir in undesirable_dirs:
+ if undesirable_dir in dirs:
+ dirs.remove( undesirable_dir )
+ undesirable_dirs_removed += 1
+ for undesirable_file in undesirable_files:
+ if undesirable_file in files:
+ files.remove( undesirable_file )
+ undesirable_files_removed += 1
+ for name in files:
+ full_name = os.path.join( root, name )
+ if full_name not in filenames_in_archive:
+ files_to_remove.append( full_name )
+ for repo_file in files_to_remove:
+ # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
+ try:
+ commands.remove( repo.ui, repo, repo_file, force=True )
+ except Exception, e:
+ log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
+ relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+ repo.dirstate.remove( relative_selected_file )
+ repo.dirstate.write()
+ absolute_selected_file = os.path.abspath( selected_file )
+ if os.path.isdir( absolute_selected_file ):
+ try:
+ os.rmdir( absolute_selected_file )
+ except OSError, e:
+ # The directory is not empty.
+ pass
+ elif os.path.isfile( absolute_selected_file ):
+ os.remove( absolute_selected_file )
+ dir = os.path.split( absolute_selected_file )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty.
+ pass
+ # See if any admin users have chosen to receive email alerts when a repository is
+ # updated. If so, check every uploaded file to ensure content is appropriate.
+ check_contents = check_file_contents_for_email_alerts( trans )
+ for filename_in_archive in filenames_in_archive:
+ # Check file content to ensure it is appropriate.
+ if check_contents and os.path.isfile( filename_in_archive ):
+ content_alert_str += check_file_content_for_html_and_images( filename_in_archive )
+ commands.add( repo.ui, repo, filename_in_archive )
+ if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
+ # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
+ # to the in-memory trans.app.tool_data_tables dictionary.
+ error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
+ if error:
+ return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ admin_only = len( repository.downloadable_revisions ) != 1
+ suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+
+def handle_gzip( repository, uploaded_file_name ):
+ fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_gunzip_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
+ gzipped_file = gzip.GzipFile( uploaded_file_name, 'rb' )
+ while 1:
+ try:
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ except IOError, e:
+ os.close( fd )
+ os.remove( uncompressed )
+ log.exception( 'Problem uncompressing gz data "%s": %s' % ( uploaded_file_name, str( e ) ) )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ gzipped_file.close()
+ shutil.move( uncompressed, uploaded_file_name )
+
+def handle_repository_dependencies_definition( trans, repository_dependencies_config ):
+ altered = False
+ try:
+ # Make sure we're looking at a valid repository_dependencies.xml file.
+ tree = util.parse_xml( repository_dependencies_config )
+ root = tree.getroot()
+ except Exception, e:
+ error_message = "Error parsing %s in handle_repository_dependencies_definition: " % str( repository_dependencies_config )
+ log.exception( error_message )
+ return False, None
+ if root.tag == 'repositories':
+ for index, elem in enumerate( root ):
+ # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
+ populated, elem = handle_repository_dependency_elem( trans, elem )
+ if populated:
+ root[ index ] = elem
+ if not altered:
+ altered = True
+ return altered, root
+ return False, None
+
+def handle_repository_dependency_elem( trans, elem ):
+ # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
+ populated = False
+ name = elem.get( 'name' )
+ owner = elem.get( 'owner' )
+ changeset_revision = elem.get( 'changeset_revision' )
+ if not changeset_revision:
+ # Populate the changeset_revision attribute with the latest installable metadata revision for the defined repository.
+ # We use the latest installable revision instead of the latest metadata revision to ensure that the contents of the
+ # revision are valid.
+ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository:
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( trans, repository, repo )
+ if lastest_installable_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
+ elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision
+ populated = True
+ return populated, elem
+
+def handle_tool_dependencies_definition( trans, tool_dependencies_config ):
+ altered = False
+ try:
+ # Make sure we're looking at a valid tool_dependencies.xml file.
+ tree = util.parse_xml( tool_dependencies_config )
+ root = tree.getroot()
+ except Exception, e:
+ error_message = "Error parsing %s in handle_tool_dependencies_definition: " % str( tool_dependencies_config )
+ log.exception( error_message )
+ return False, None
+ if root.tag == 'tool_dependency':
+ for root_index, root_elem in enumerate( root ):
+ # <package name="eigen" version="2.0.17">
+ if root_elem.tag == 'package':
+ package_altered = False
+ for package_index, package_elem in enumerate( root_elem ):
+ if package_elem.tag == 'repository':
+ # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0" prior_installation_required="True" />
+ populated, repository_elem = handle_repository_dependency_elem( trans, package_elem )
+ if populated:
+ root_elem[ package_index ] = repository_elem
+ package_altered = True
+ if not altered:
+ altered = True
+
+ elif package_elem.tag == 'install':
+ # <install version="1.0">
+ for actions_index, actions_elem in enumerate( package_elem ):
+ for action_index, action_elem in enumerate( actions_elem ):
+ action_type = action_elem.get( 'type' )
+ if action_type == 'set_environment_for_install':
+ # <action type="set_environment_for_install">
+ # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0">
+ # <package name="eigen" version="2.0.17" />
+ # </repository>
+ # </action>
+ for repo_index, repo_elem in enumerate( action_elem ):
+ populated, repository_elem = handle_repository_dependency_elem( trans, repo_elem )
+ if populated:
+ action_elem[ repo_index ] = repository_elem
+ package_altered = True
+ if not altered:
+ altered = True
+ if package_altered:
+ actions_elem[ action_index ] = action_elem
+ if package_altered:
+ root_elem[ actions_index ] = actions_elem
+
+ if package_altered:
+ root[ root_index ] = root_elem
+ return altered, root
+ return False, None
+
+def uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
+ if isgzip:
+ handle_gzip( repository, uploaded_file_name )
+ return uploaded_file_filename.rstrip( '.gz' )
+ if isbz2:
+ handle_bz2( repository, uploaded_file_name )
+ return uploaded_file_filename.rstrip( '.bz2' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Eliminate the requirement for repository dependency tag sets to include a changeset_revision attribute.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/35de5a8a928b/
Changeset: 35de5a8a928b
User: greg
Date: 2013-05-16 22:57:10
Summary: Eliminate the requirement for repository dependency tag sets to include a changeset_revision attribute.
Here is an example of what used to be required:
<repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
Now the following is supported:
<repository name="molecule_datatypes" owner="test" />
When the changeset_revision is missing, the tool shed uplad process will automatically determine the latest installable changeset revsion for the repository
defined by the name and owner tag attributes and populate the tag with the changeset discovered changeset revision value.
Caveat emptor:
0) I wanted to get this changeset committed before the next stable branch is created on Monday, 5/20/13, so that it would be included in the next Galaxy release.
By definition, anything I do to this new feature from now on will be a bug fix. ;) This featue should be fully functional within the next week.
1) This currently only works when uplaoding a single file named repository_dependencies.xml or tool_dependencies.xml. It does not yet work when files with this name
are include in a tarball or are uploaded using the ftp or hg clone options in the tool shed upload form.
2) This feature is not yet fully tested, but doesn't break anything I've tried (all tool shed functional tests pass). I've tested it with both simple repository
dependency definitions and complex repository dependency definitions. I've also tested it with the recently supported <action type="set_environment_for_install">
tag set.
Affected #: 3 files
diff -r 03c0f4d30880d3286e0ccc61c3f2554343c8c53e -r 35de5a8a928bf63fd5de3d1ec066097602acf235 lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -32,6 +32,21 @@
class UploadController( BaseUIController ):
+ def check_archive( self, archive ):
+ for member in archive.getmembers():
+ # Allow regular files and directories only
+ if not ( member.isdir() or member.isfile() or member.islnk() ):
+ message = "Uploaded archives can only include regular directories and files (no symbolic links, devices, etc). Offender: %s" % str( member )
+ return False, message
+ for item in [ '.hg', '..', '/' ]:
+ if member.name.startswith( item ):
+ message = "Uploaded archives cannot contain .hg directories, absolute filenames starting with '/', or filenames with two dots '..'."
+ return False, message
+ if member.name in [ 'hgrc' ]:
+ message = "Uploaded archives cannot contain hgrc files."
+ return False, message
+ return True, ''
+
def check_file_contents_for_email_alerts( self, trans ):
"""
See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be
@@ -54,32 +69,266 @@
message = 'The file "%s" contains image content.\n' % str( file_path )
return message
+ def create_and_write_tmp_file( self, text ):
+ fh = tempfile.NamedTemporaryFile( 'wb' )
+ tmp_filename = fh.name
+ fh.close()
+ fh = open( tmp_filename, 'wb' )
+ fh.write( '<?xml version="1.0"?>\n' )
+ fh.write( text )
+ fh.close()
+ return tmp_filename
+
+ def get_upload_point( self, repository, **kwd ):
+ upload_point = kwd.get( 'upload_point', None )
+ if upload_point is not None:
+ # The value of upload_point will be something like: database/community_files/000/repo_12/1.bed
+ if os.path.exists( upload_point ):
+ if os.path.isfile( upload_point ):
+ # Get the parent directory
+ upload_point, not_needed = os.path.split( upload_point )
+ # Now the value of uplaod_point will be something like: database/community_files/000/repo_12/
+ upload_point = upload_point.split( 'repo_%d' % repository.id )[ 1 ]
+ if upload_point:
+ upload_point = upload_point.lstrip( '/' )
+ upload_point = upload_point.rstrip( '/' )
+ # Now the value of uplaod_point will be something like: /
+ if upload_point == '/':
+ upload_point = None
+ else:
+ # Must have been an error selecting something that didn't exist, so default to repository root
+ upload_point = None
+ return upload_point
+
+ def handle_bz2( self, repository, uploaded_file_name ):
+ fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_bunzip2_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
+ bzipped_file = bz2.BZ2File( uploaded_file_name, 'rb' )
+ while 1:
+ try:
+ chunk = bzipped_file.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ os.remove( uncompressed )
+ log.exception( 'Problem uncompressing bz2 data "%s": %s' % ( uploaded_file_name, str( e ) ) )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ bzipped_file.close()
+ shutil.move( uncompressed, uploaded_file_name )
+
+ def handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
+ undesirable_dirs_removed, undesirable_files_removed ):
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ content_alert_str = ''
+ files_to_remove = []
+ filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
+ if remove_repo_files_not_in_tar and not repository.is_new( trans.app ):
+ # We have a repository that is not new (it contains files), so discover those files that are in the repository, but not in the uploaded archive.
+ for root, dirs, files in os.walk( full_path ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ for undesirable_dir in undesirable_dirs:
+ if undesirable_dir in dirs:
+ dirs.remove( undesirable_dir )
+ undesirable_dirs_removed += 1
+ for undesirable_file in undesirable_files:
+ if undesirable_file in files:
+ files.remove( undesirable_file )
+ undesirable_files_removed += 1
+ for name in files:
+ full_name = os.path.join( root, name )
+ if full_name not in filenames_in_archive:
+ files_to_remove.append( full_name )
+ for repo_file in files_to_remove:
+ # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
+ try:
+ commands.remove( repo.ui, repo, repo_file, force=True )
+ except Exception, e:
+ log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
+ relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+ repo.dirstate.remove( relative_selected_file )
+ repo.dirstate.write()
+ absolute_selected_file = os.path.abspath( selected_file )
+ if os.path.isdir( absolute_selected_file ):
+ try:
+ os.rmdir( absolute_selected_file )
+ except OSError, e:
+ # The directory is not empty.
+ pass
+ elif os.path.isfile( absolute_selected_file ):
+ os.remove( absolute_selected_file )
+ dir = os.path.split( absolute_selected_file )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty.
+ pass
+ # See if any admin users have chosen to receive email alerts when a repository is
+ # updated. If so, check every uploaded file to ensure content is appropriate.
+ check_contents = self.check_file_contents_for_email_alerts( trans )
+ for filename_in_archive in filenames_in_archive:
+ # Check file content to ensure it is appropriate.
+ if check_contents and os.path.isfile( filename_in_archive ):
+ content_alert_str += self.check_file_content_for_html_and_images( filename_in_archive )
+ commands.add( repo.ui, repo, filename_in_archive )
+ if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
+ # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
+ # to the in-memory trans.app.tool_data_tables dictionary.
+ error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
+ if error:
+ return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ admin_only = len( repository.downloadable_revisions ) != 1
+ suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+
+ def handle_gzip( self, repository, uploaded_file_name ):
+ fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_gunzip_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
+ gzipped_file = gzip.GzipFile( uploaded_file_name, 'rb' )
+ while 1:
+ try:
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ except IOError, e:
+ os.close( fd )
+ os.remove( uncompressed )
+ log.exception( 'Problem uncompressing gz data "%s": %s' % ( uploaded_file_name, str( e ) ) )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ gzipped_file.close()
+ shutil.move( uncompressed, uploaded_file_name )
+
+ def handle_repository_dependencies_definition( self, trans, repository_dependencies_config ):
+ altered = False
+ try:
+ # Make sure we're looking at a valid repository_dependencies.xml file.
+ tree = util.parse_xml( repository_dependencies_config )
+ root = tree.getroot()
+ except Exception, e:
+ error_message = "Error parsing %s in handle_repository_dependencies_definition: " % str( repository_dependencies_config )
+ log.exception( error_message )
+ return False, None
+ if root.tag == 'repositories':
+ for index, elem in enumerate( root ):
+ # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
+ populated, elem = self.handle_repository_dependency_elem( trans, elem )
+ if populated:
+ root[ index ] = elem
+ if not altered:
+ altered = True
+ return altered, root
+ return False, None
+
+ def handle_repository_dependency_elem( self, trans, elem ):
+ # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
+ populated = False
+ name = elem.get( 'name' )
+ owner = elem.get( 'owner' )
+ changeset_revision = elem.get( 'changeset_revision' )
+ if not changeset_revision:
+ # Populate the changeset_revision attribute with the latest installable metadata revision for the defined repository.
+ # We use the latest installable revision instead of the latest metadata revision to ensure that the contents of the
+ # revision are valid.
+ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository:
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( trans, repository, repo )
+ if lastest_installable_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
+ elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision
+ populated = True
+ return populated, elem
+
+ def handle_tool_dependencies_definition( self, trans, tool_dependencies_config ):
+ altered = False
+ try:
+ # Make sure we're looking at a valid tool_dependencies.xml file.
+ tree = util.parse_xml( tool_dependencies_config )
+ root = tree.getroot()
+ except Exception, e:
+ error_message = "Error parsing %s in handle_tool_dependencies_definition: " % str( tool_dependencies_config )
+ log.exception( error_message )
+ return False, None
+ if root.tag == 'tool_dependency':
+ for root_index, root_elem in enumerate( root ):
+ # <package name="eigen" version="2.0.17">
+ if root_elem.tag == 'package':
+ package_altered = False
+ for package_index, package_elem in enumerate( root_elem ):
+ if package_elem.tag == 'repository':
+ # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0" prior_installation_required="True" />
+ populated, repository_elem = self.handle_repository_dependency_elem( trans, package_elem )
+ if populated:
+ root_elem[ package_index ] = repository_elem
+ package_altered = True
+ if not altered:
+ altered = True
+
+ elif package_elem.tag == 'install':
+ # <install version="1.0">
+ for actions_index, actions_elem in enumerate( package_elem ):
+ for action_index, action_elem in enumerate( actions_elem ):
+ action_type = action_elem.get( 'type' )
+ if action_type == 'set_environment_for_install':
+ # <action type="set_environment_for_install">
+ # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0">
+ # <package name="eigen" version="2.0.17" />
+ # </repository>
+ # </action>
+ for repo_index, repo_elem in enumerate( action_elem ):
+ populated, repository_elem = self.handle_repository_dependency_elem( trans, repo_elem )
+ if populated:
+ action_elem[ repo_index ] = repository_elem
+ package_altered = True
+ if not altered:
+ altered = True
+ if package_altered:
+ actions_elem[ action_index ] = action_elem
+ if package_altered:
+ root_elem[ actions_index ] = actions_elem
+
+ if package_altered:
+ root[ root_index ] = root_elem
+ return altered, root
+ return False, None
+
+ def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
+ if isgzip:
+ self.handle_gzip( repository, uploaded_file_name )
+ return uploaded_file_filename.rstrip( '.gz' )
+ if isbz2:
+ self.handle_bz2( repository, uploaded_file_name )
+ return uploaded_file_filename.rstrip( '.bz2' )
+
@web.expose
@web.require_login( 'upload', use_panels=True )
def upload( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- commit_message = util.restore_text( params.get( 'commit_message', 'Uploaded' ) )
- category_ids = util.listify( params.get( 'category_id', '' ) )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ commit_message = kwd.get( 'commit_message', 'Uploaded' )
+ category_ids = util.listify( kwd.get( 'category_id', '' ) )
categories = suc.get_categories( trans )
- repository_id = params.get( 'repository_id', '' )
+ repository_id = kwd.get( 'repository_id', '' )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
- uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) )
- remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) )
+ uncompress_file = util.string_as_bool( kwd.get( 'uncompress_file', 'true' ) )
+ remove_repo_files_not_in_tar = util.string_as_bool( kwd.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
- upload_point = self.__get_upload_point( repository, **kwd )
+ upload_point = self.get_upload_point( repository, **kwd )
tip = repository.tip( trans.app )
- file_data = params.get( 'file_data', '' )
- url = params.get( 'url', '' )
+ file_data = kwd.get( 'file_data', '' )
+ url = kwd.get( 'url', '' )
# Part of the upload process is sending email notification to those that have registered to
# receive them. One scenario occurs when the first change set is produced for the repository.
# See the suc.handle_email_alerts() method for the definition of the scenarios.
new_repo_alert = repository.is_new( trans.app )
uploaded_directory = None
- if params.get( 'upload_button', False ):
+ if kwd.get( 'upload_button', False ):
if file_data == '' and url == '':
message = 'No files were entered on the upload form.'
status = 'error'
@@ -155,10 +404,28 @@
full_path = os.path.abspath( os.path.join( repo_dir, upload_point, uploaded_file_filename ) )
else:
full_path = os.path.abspath( os.path.join( repo_dir, uploaded_file_filename ) )
- # Move the uploaded file to the load_point within the repository hierarchy.
- shutil.move( uploaded_file_name, full_path )
- # See if any admin users have chosen to receive email alerts when a repository is
- # updated. If so, check every uploaded file to ensure content is appropriate.
+ # Move some version of the uploaded file to the load_point within the repository hierarchy.
+ if uploaded_file_filename in [ 'repository_dependencies.xml' ]:
+ # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
+ altered, root = self.handle_repository_dependencies_definition( trans, uploaded_file_name )
+ if altered:
+ tmp_filename = self.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
+ shutil.move( tmp_filename, full_path )
+ else:
+ shutil.move( uploaded_file_name, full_path )
+ elif uploaded_file_filename in [ 'tool_dependencies.xml' ]:
+ # Inspect the contents of the file to see if it defines a complex repository dependency definition whose changeset_revision values
+ # are missing and if so, set them appropriately.
+ altered, root = self.handle_tool_dependencies_definition( trans, uploaded_file_name )
+ if altered:
+ tmp_filename = self.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
+ shutil.move( tmp_filename, full_path )
+ else:
+ shutil.move( uploaded_file_name, full_path )
+ else:
+ shutil.move( uploaded_file_name, full_path )
+ # See if any admin users have chosen to receive email alerts when a repository is updated. If so, check every uploaded file to ensure
+ # content is appropriate.
check_contents = self.check_file_contents_for_email_alerts( trans )
if check_contents and os.path.isfile( full_path ):
content_alert_str = self.check_file_content_for_html_and_images( full_path )
@@ -264,7 +531,7 @@
filenames_in_archive = []
for root, dirs, files in os.walk( uploaded_directory ):
for uploaded_file in files:
- relative_path = os.path.normpath(os.path.join(os.path.relpath(root, uploaded_directory), uploaded_file))
+ relative_path = os.path.normpath( os.path.join( os.path.relpath( root, uploaded_directory ), uploaded_file ) )
ok = os.path.basename( uploaded_file ) not in undesirable_files
if ok:
for file_path_item in relative_path.split( '/' ):
@@ -275,19 +542,19 @@
else:
undesirable_files_removed += 1
if ok:
- repo_path = os.path.join(full_path, relative_path)
- repo_basedir = os.path.normpath(os.path.join(repo_path, os.path.pardir))
- if not os.path.exists(repo_basedir):
- os.makedirs(repo_basedir)
- if os.path.exists(repo_path):
- if os.path.isdir(repo_path):
- shutil.rmtree(repo_path)
+ repo_path = os.path.join( full_path, relative_path )
+ repo_basedir = os.path.normpath( os.path.join( repo_path, os.path.pardir ) )
+ if not os.path.exists( repo_basedir ):
+ os.makedirs( repo_basedir )
+ if os.path.exists( repo_path ):
+ if os.path.isdir( repo_path ):
+ shutil.rmtree( repo_path )
else:
- os.remove(repo_path)
- shutil.move(os.path.join(uploaded_directory, relative_path), repo_path)
+ os.remove( repo_path )
+ shutil.move( os.path.join( uploaded_directory, relative_path ), repo_path )
filenames_in_archive.append( relative_path )
- return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
- undesirable_dirs_removed, undesirable_files_removed)
+ return self.handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
+ undesirable_dirs_removed, undesirable_files_removed )
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
# Upload a tar archive of files.
@@ -295,7 +562,7 @@
repo = hg.repository( suc.get_configured_ui(), repo_dir )
undesirable_dirs_removed = 0
undesirable_files_removed = 0
- ok, message = self.__check_archive( tar )
+ ok, message = self.check_archive( tar )
if not ok:
tar.close()
uploaded_file.close()
@@ -322,159 +589,12 @@
tar.extractall( path=full_path )
tar.close()
uploaded_file.close()
- return self.__handle_directory_changes( trans,
- repository,
- full_path,
- filenames_in_archive,
- remove_repo_files_not_in_tar,
- new_repo_alert,
- commit_message,
- undesirable_dirs_removed,
- undesirable_files_removed )
-
- def __handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
- undesirable_dirs_removed, undesirable_files_removed ):
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( suc.get_configured_ui(), repo_dir )
- content_alert_str = ''
- files_to_remove = []
- filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
- if remove_repo_files_not_in_tar and not repository.is_new( trans.app ):
- # We have a repository that is not new (it contains files), so discover
- # those files that are in the repository, but not in the uploaded archive.
- for root, dirs, files in os.walk( full_path ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- for undesirable_dir in undesirable_dirs:
- if undesirable_dir in dirs:
- dirs.remove( undesirable_dir )
- undesirable_dirs_removed += 1
- for undesirable_file in undesirable_files:
- if undesirable_file in files:
- files.remove( undesirable_file )
- undesirable_files_removed += 1
- for name in files:
- full_name = os.path.join( root, name )
- if full_name not in filenames_in_archive:
- files_to_remove.append( full_name )
- for repo_file in files_to_remove:
- # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
- try:
- commands.remove( repo.ui, repo, repo_file, force=True )
- except Exception, e:
- log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
- relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
- repo.dirstate.remove( relative_selected_file )
- repo.dirstate.write()
- absolute_selected_file = os.path.abspath( selected_file )
- if os.path.isdir( absolute_selected_file ):
- try:
- os.rmdir( absolute_selected_file )
- except OSError, e:
- # The directory is not empty
- pass
- elif os.path.isfile( absolute_selected_file ):
- os.remove( absolute_selected_file )
- dir = os.path.split( absolute_selected_file )[0]
- try:
- os.rmdir( dir )
- except OSError, e:
- # The directory is not empty
- pass
- # See if any admin users have chosen to receive email alerts when a repository is
- # updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = self.check_file_contents_for_email_alerts( trans )
- for filename_in_archive in filenames_in_archive:
- # Check file content to ensure it is appropriate.
- if check_contents and os.path.isfile( filename_in_archive ):
- content_alert_str += self.check_file_content_for_html_and_images( filename_in_archive )
- commands.add( repo.ui, repo, filename_in_archive )
- if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
- # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
- # to the in-memory trans.app.tool_data_tables dictionary.
- error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
- if error:
- return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
- admin_only = len( repository.downloadable_revisions ) != 1
- suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
- return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
-
- def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
- if isgzip:
- self.__handle_gzip( repository, uploaded_file_name )
- return uploaded_file_filename.rstrip( '.gz' )
- if isbz2:
- self.__handle_bz2( repository, uploaded_file_name )
- return uploaded_file_filename.rstrip( '.bz2' )
-
- def __handle_gzip( self, repository, uploaded_file_name ):
- fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_gunzip_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
- gzipped_file = gzip.GzipFile( uploaded_file_name, 'rb' )
- while 1:
- try:
- chunk = gzipped_file.read( CHUNK_SIZE )
- except IOError, e:
- os.close( fd )
- os.remove( uncompressed )
- log.exception( 'Problem uncompressing gz data "%s": %s' % ( uploaded_file_name, str( e ) ) )
- return
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- gzipped_file.close()
- shutil.move( uncompressed, uploaded_file_name )
-
- def __handle_bz2( self, repository, uploaded_file_name ):
- fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_bunzip2_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
- bzipped_file = bz2.BZ2File( uploaded_file_name, 'rb' )
- while 1:
- try:
- chunk = bzipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- log.exception( 'Problem uncompressing bz2 data "%s": %s' % ( uploaded_file_name, str( e ) ) )
- return
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- bzipped_file.close()
- shutil.move( uncompressed, uploaded_file_name )
-
- def __get_upload_point( self, repository, **kwd ):
- upload_point = kwd.get( 'upload_point', None )
- if upload_point is not None:
- # The value of upload_point will be something like: database/community_files/000/repo_12/1.bed
- if os.path.exists( upload_point ):
- if os.path.isfile( upload_point ):
- # Get the parent directory
- upload_point, not_needed = os.path.split( upload_point )
- # Now the value of uplaod_point will be something like: database/community_files/000/repo_12/
- upload_point = upload_point.split( 'repo_%d' % repository.id )[ 1 ]
- if upload_point:
- upload_point = upload_point.lstrip( '/' )
- upload_point = upload_point.rstrip( '/' )
- # Now the value of uplaod_point will be something like: /
- if upload_point == '/':
- upload_point = None
- else:
- # Must have been an error selecting something that didn't exist, so default to repository root
- upload_point = None
- return upload_point
-
- def __check_archive( self, archive ):
- for member in archive.getmembers():
- # Allow regular files and directories only
- if not ( member.isdir() or member.isfile() or member.islnk() ):
- message = "Uploaded archives can only include regular directories and files (no symbolic links, devices, etc). Offender: %s" % str( member )
- return False, message
- for item in [ '.hg', '..', '/' ]:
- if member.name.startswith( item ):
- message = "Uploaded archives cannot contain .hg directories, absolute filenames starting with '/', or filenames with two dots '..'."
- return False, message
- if member.name in [ 'hgrc' ]:
- message = "Uploaded archives cannot contain hgrc files."
- return False, message
- return True, ''
+ return self.handle_directory_changes( trans,
+ repository,
+ full_path,
+ filenames_in_archive,
+ remove_repo_files_not_in_tar,
+ new_repo_alert,
+ commit_message,
+ undesirable_dirs_removed,
+ undesirable_files_removed )
diff -r 03c0f4d30880d3286e0ccc61c3f2554343c8c53e -r 35de5a8a928bf63fd5de3d1ec066097602acf235 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1283,6 +1283,13 @@
new_repository_dependencies_metadata = metadata_dict.get( 'repository_dependencies', None )
if new_repository_dependencies_metadata:
new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ]
+ # TODO: We used to include the following here to handle the case where repository dependency definitions were deleted. However,
+ # this erroneously returned True in cases where is should not have done so. This usually occurred where multiple single files
+ # were uploaded when a single tarball should have been. We need to implement support for handling deleted repository dependency
+ # definitions so that we can guarantee reproducibility, but we need to do it in a way that is better than the following.
+ # for new_repository_dependency in new_repository_dependencies:
+ # if new_repository_dependency not in saved_repository_dependencies:
+ # return True
# The saved metadata must be a subset of the new metadata.
for saved_repository_dependency in saved_repository_dependencies:
if saved_repository_dependency not in new_repository_dependencies:
@@ -1314,6 +1321,13 @@
saved_tool_dependencies = metadata[ 'tool_dependencies' ]
new_tool_dependencies = metadata_dict.get( 'tool_dependencies', None )
if new_tool_dependencies:
+ # TODO: We used to include the following here to handle the case where tool dependency definitions were deleted. However,
+ # this erroneously returned True in cases where is should not have done so. This usually occurred where multiple single files
+ # were uploaded when a single tarball should have been. We need to implement support for handling deleted tool dependency
+ # definitions so that we can guarantee reproducibility, but we need to do it in a way that is better than the following.
+ # for new_tool_dependency in new_tool_dependencies:
+ # if new_tool_dependency not in saved_tool_dependencies:
+ # return True
# The saved metadata must be a subset of the new metadata.
for saved_tool_dependency in saved_tool_dependencies:
if saved_tool_dependency not in new_tool_dependencies:
diff -r 03c0f4d30880d3286e0ccc61c3f2554343c8c53e -r 35de5a8a928bf63fd5de3d1ec066097602acf235 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -492,6 +492,13 @@
"""Get a tool shed repository record from the Galaxy database defined by the id."""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+def get_latest_downloadable_changeset_revision( trans, repository, repo ):
+ repository_tip = repository.tip( trans.app )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), repository_tip )
+ if repository_metadata and repository_metadata.downloadable:
+ return repository_tip
+ return get_previous_metadata_changeset_revision( repository, repo, repository_tip, downloadable=True )
+
def get_named_tmpfile_from_ctx( ctx, filename, dir ):
"""Return a named temporary file created from a specified file with a given name included in a repository changeset revision."""
filename = strip_path( filename )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Only display the tool test results folder if a repository has been installed and tested.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/03c0f4d30880/
Changeset: 03c0f4d30880
User: Dave Bouvier
Date: 2013-05-16 21:55:01
Summary: Only display the tool test results folder if a repository has been installed and tested.
Affected #: 1 file
diff -r 487b92cec404614ddf29b6dfb36b6b47ba6fc086 -r 03c0f4d30880d3286e0ccc61c3f2554343c8c53e lib/galaxy/webapps/tool_shed/util/container_util.py
--- a/lib/galaxy/webapps/tool_shed/util/container_util.py
+++ b/lib/galaxy/webapps/tool_shed/util/container_util.py
@@ -799,8 +799,11 @@
containers_dict[ 'valid_tools' ] = valid_tools_root_folder
# Tool test results container.
if tool_test_results:
- folder_id, tool_test_results_root_folder = build_tool_test_results_folder( trans, folder_id, tool_test_results, time_last_tested=time_last_tested )
- containers_dict[ 'tool_test_results' ] = tool_test_results_root_folder
+ # Only create and populate this folder if there are actual tool test results to display, since the
+ # display of the 'Test environment' folder by itself can be misleading.
+ if 'passed_tests' in tool_test_results or 'failed_tests' in tool_test_results or 'installation_errors' in tool_test_results:
+ folder_id, tool_test_results_root_folder = build_tool_test_results_folder( trans, folder_id, tool_test_results, time_last_tested=time_last_tested )
+ containers_dict[ 'tool_test_results' ] = tool_test_results_root_folder
# Workflows container.
if metadata:
if 'workflows' in metadata:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Add tool shed functional test temporary directory to .hgignore.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/487b92cec404/
Changeset: 487b92cec404
User: Dave Bouvier
Date: 2013-05-16 21:50:58
Summary: Add tool shed functional test temporary directory to .hgignore.
Affected #: 1 file
diff -r 1001212761bbd65fc3b3a69a2ae57ffa257f29f3 -r 487b92cec404614ddf29b6dfb36b6b47ba6fc086 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -81,6 +81,7 @@
# Test output
run_functional_tests.html
+test/tool_shed/tmp/*
# Project files
*.kpf
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Additional exception handling when testing repositories that fail installation before a row is created in the Galaxy database.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/99565a21b772/
Changeset: 99565a21b772
User: Dave Bouvier
Date: 2013-05-16 21:22:18
Summary: Additional exception handling when testing repositories that fail installation before a row is created in the Galaxy database.
Affected #: 1 file
diff -r 23e0fd64cfd4f985aafc8499e123167b28cfe8a0 -r 99565a21b77259aa8ad3a470e49de877fe57f2c8 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -650,7 +650,8 @@
repository = test_db_util.get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision )
except:
log.exception( 'Error getting installed repository.' )
- continue
+ success = False
+ pass
# If the installation succeeds, configure and run functional tests for this repository. This is equivalent to
# sh run_functional_tests.sh -installed
if success:
@@ -921,7 +922,11 @@
else:
# Even if the repository failed to install, execute the uninstall method, in case a dependency did succeed.
log.debug( 'Uninstalling repository %s', repository_info_dict[ 'name' ] )
- repository = test_db_util.get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision )
+ try:
+ repository = test_db_util.get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision )
+ except:
+ log.exception( 'Unable to uninstall, no installed repository found.' )
+ continue
test_result = dict( tool_shed=repository.tool_shed,
name=repository.name,
owner=repository.owner,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Backward compatibility fix for installing repositories with repository dependencies.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/23e0fd64cfd4/
Changeset: 23e0fd64cfd4
User: Dave Bouvier
Date: 2013-05-16 21:18:39
Summary: Backward compatibility fix for installing repositories with repository dependencies.
Affected #: 1 file
diff -r 477979aa944f14ab7189aa51e67168da333f0840 -r 23e0fd64cfd4f985aafc8499e123167b28cfe8a0 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1087,7 +1087,7 @@
name = repository_elem.get( 'name' )
owner = repository_elem.get( 'owner' )
changeset_revision = repository_elem.get( 'changeset_revision' )
- prior_installation_required = repository_elem.get( 'prior_installation_required', False )
+ prior_installation_required = str( repository_elem.get( 'prior_installation_required', False ) )
repository_dependency_tup = [ toolshed, name, owner, changeset_revision, prior_installation_required ]
user = None
repository = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0