galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
May 2013
- 1 participants
- 218 discussions
commit/galaxy-central: greg: Refactor the tool shed upload controller.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c499e581b0f6/
Changeset: c499e581b0f6
User: greg
Date: 2013-05-17 03:45:54
Summary: Refactor the tool shed upload controller.
Affected #: 2 files
diff -r 35de5a8a928bf63fd5de3d1ec066097602acf235 -r c499e581b0f603528b66df0efe7e0e656a8e16f9 lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -1,7 +1,6 @@
import logging
import os
import shutil
-import sys
import tarfile
import tempfile
import urllib
@@ -9,9 +8,8 @@
from galaxy import util
from galaxy import web
from galaxy.datatypes import checkers
-from galaxy.util import json
-
import tool_shed.util.shed_util_common as suc
+from tool_shed.util import commit_util
from tool_shed.util import metadata_util
from tool_shed.util import repository_dependency_util
from tool_shed.util import tool_dependency_util
@@ -32,278 +30,6 @@
class UploadController( BaseUIController ):
- def check_archive( self, archive ):
- for member in archive.getmembers():
- # Allow regular files and directories only
- if not ( member.isdir() or member.isfile() or member.islnk() ):
- message = "Uploaded archives can only include regular directories and files (no symbolic links, devices, etc). Offender: %s" % str( member )
- return False, message
- for item in [ '.hg', '..', '/' ]:
- if member.name.startswith( item ):
- message = "Uploaded archives cannot contain .hg directories, absolute filenames starting with '/', or filenames with two dots '..'."
- return False, message
- if member.name in [ 'hgrc' ]:
- message = "Uploaded archives cannot contain hgrc files."
- return False, message
- return True, ''
-
- def check_file_contents_for_email_alerts( self, trans ):
- """
- See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be
- checked for inappropriate content.
- """
- admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( trans.model.Repository.table.c.email_alerts != None ):
- email_alerts = json.from_json_string( repository.email_alerts )
- for user_email in email_alerts:
- if user_email in admin_users:
- return True
- return False
-
- def check_file_content_for_html_and_images( self, file_path ):
- message = ''
- if checkers.check_html( file_path ):
- message = 'The file "%s" contains HTML content.\n' % str( file_path )
- elif checkers.check_image( file_path ):
- message = 'The file "%s" contains image content.\n' % str( file_path )
- return message
-
- def create_and_write_tmp_file( self, text ):
- fh = tempfile.NamedTemporaryFile( 'wb' )
- tmp_filename = fh.name
- fh.close()
- fh = open( tmp_filename, 'wb' )
- fh.write( '<?xml version="1.0"?>\n' )
- fh.write( text )
- fh.close()
- return tmp_filename
-
- def get_upload_point( self, repository, **kwd ):
- upload_point = kwd.get( 'upload_point', None )
- if upload_point is not None:
- # The value of upload_point will be something like: database/community_files/000/repo_12/1.bed
- if os.path.exists( upload_point ):
- if os.path.isfile( upload_point ):
- # Get the parent directory
- upload_point, not_needed = os.path.split( upload_point )
- # Now the value of uplaod_point will be something like: database/community_files/000/repo_12/
- upload_point = upload_point.split( 'repo_%d' % repository.id )[ 1 ]
- if upload_point:
- upload_point = upload_point.lstrip( '/' )
- upload_point = upload_point.rstrip( '/' )
- # Now the value of uplaod_point will be something like: /
- if upload_point == '/':
- upload_point = None
- else:
- # Must have been an error selecting something that didn't exist, so default to repository root
- upload_point = None
- return upload_point
-
- def handle_bz2( self, repository, uploaded_file_name ):
- fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_bunzip2_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
- bzipped_file = bz2.BZ2File( uploaded_file_name, 'rb' )
- while 1:
- try:
- chunk = bzipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- log.exception( 'Problem uncompressing bz2 data "%s": %s' % ( uploaded_file_name, str( e ) ) )
- return
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- bzipped_file.close()
- shutil.move( uncompressed, uploaded_file_name )
-
- def handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
- undesirable_dirs_removed, undesirable_files_removed ):
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( suc.get_configured_ui(), repo_dir )
- content_alert_str = ''
- files_to_remove = []
- filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
- if remove_repo_files_not_in_tar and not repository.is_new( trans.app ):
- # We have a repository that is not new (it contains files), so discover those files that are in the repository, but not in the uploaded archive.
- for root, dirs, files in os.walk( full_path ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- for undesirable_dir in undesirable_dirs:
- if undesirable_dir in dirs:
- dirs.remove( undesirable_dir )
- undesirable_dirs_removed += 1
- for undesirable_file in undesirable_files:
- if undesirable_file in files:
- files.remove( undesirable_file )
- undesirable_files_removed += 1
- for name in files:
- full_name = os.path.join( root, name )
- if full_name not in filenames_in_archive:
- files_to_remove.append( full_name )
- for repo_file in files_to_remove:
- # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
- try:
- commands.remove( repo.ui, repo, repo_file, force=True )
- except Exception, e:
- log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
- relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
- repo.dirstate.remove( relative_selected_file )
- repo.dirstate.write()
- absolute_selected_file = os.path.abspath( selected_file )
- if os.path.isdir( absolute_selected_file ):
- try:
- os.rmdir( absolute_selected_file )
- except OSError, e:
- # The directory is not empty.
- pass
- elif os.path.isfile( absolute_selected_file ):
- os.remove( absolute_selected_file )
- dir = os.path.split( absolute_selected_file )[0]
- try:
- os.rmdir( dir )
- except OSError, e:
- # The directory is not empty.
- pass
- # See if any admin users have chosen to receive email alerts when a repository is
- # updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = self.check_file_contents_for_email_alerts( trans )
- for filename_in_archive in filenames_in_archive:
- # Check file content to ensure it is appropriate.
- if check_contents and os.path.isfile( filename_in_archive ):
- content_alert_str += self.check_file_content_for_html_and_images( filename_in_archive )
- commands.add( repo.ui, repo, filename_in_archive )
- if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
- # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
- # to the in-memory trans.app.tool_data_tables dictionary.
- error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
- if error:
- return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
- admin_only = len( repository.downloadable_revisions ) != 1
- suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
- return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
-
- def handle_gzip( self, repository, uploaded_file_name ):
- fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_gunzip_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
- gzipped_file = gzip.GzipFile( uploaded_file_name, 'rb' )
- while 1:
- try:
- chunk = gzipped_file.read( CHUNK_SIZE )
- except IOError, e:
- os.close( fd )
- os.remove( uncompressed )
- log.exception( 'Problem uncompressing gz data "%s": %s' % ( uploaded_file_name, str( e ) ) )
- return
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- gzipped_file.close()
- shutil.move( uncompressed, uploaded_file_name )
-
- def handle_repository_dependencies_definition( self, trans, repository_dependencies_config ):
- altered = False
- try:
- # Make sure we're looking at a valid repository_dependencies.xml file.
- tree = util.parse_xml( repository_dependencies_config )
- root = tree.getroot()
- except Exception, e:
- error_message = "Error parsing %s in handle_repository_dependencies_definition: " % str( repository_dependencies_config )
- log.exception( error_message )
- return False, None
- if root.tag == 'repositories':
- for index, elem in enumerate( root ):
- # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
- populated, elem = self.handle_repository_dependency_elem( trans, elem )
- if populated:
- root[ index ] = elem
- if not altered:
- altered = True
- return altered, root
- return False, None
-
- def handle_repository_dependency_elem( self, trans, elem ):
- # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
- populated = False
- name = elem.get( 'name' )
- owner = elem.get( 'owner' )
- changeset_revision = elem.get( 'changeset_revision' )
- if not changeset_revision:
- # Populate the changeset_revision attribute with the latest installable metadata revision for the defined repository.
- # We use the latest installable revision instead of the latest metadata revision to ensure that the contents of the
- # revision are valid.
- repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
- if repository:
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( suc.get_configured_ui(), repo_dir )
- lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( trans, repository, repo )
- if lastest_installable_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
- elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision
- populated = True
- return populated, elem
-
- def handle_tool_dependencies_definition( self, trans, tool_dependencies_config ):
- altered = False
- try:
- # Make sure we're looking at a valid tool_dependencies.xml file.
- tree = util.parse_xml( tool_dependencies_config )
- root = tree.getroot()
- except Exception, e:
- error_message = "Error parsing %s in handle_tool_dependencies_definition: " % str( tool_dependencies_config )
- log.exception( error_message )
- return False, None
- if root.tag == 'tool_dependency':
- for root_index, root_elem in enumerate( root ):
- # <package name="eigen" version="2.0.17">
- if root_elem.tag == 'package':
- package_altered = False
- for package_index, package_elem in enumerate( root_elem ):
- if package_elem.tag == 'repository':
- # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0" prior_installation_required="True" />
- populated, repository_elem = self.handle_repository_dependency_elem( trans, package_elem )
- if populated:
- root_elem[ package_index ] = repository_elem
- package_altered = True
- if not altered:
- altered = True
-
- elif package_elem.tag == 'install':
- # <install version="1.0">
- for actions_index, actions_elem in enumerate( package_elem ):
- for action_index, action_elem in enumerate( actions_elem ):
- action_type = action_elem.get( 'type' )
- if action_type == 'set_environment_for_install':
- # <action type="set_environment_for_install">
- # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0">
- # <package name="eigen" version="2.0.17" />
- # </repository>
- # </action>
- for repo_index, repo_elem in enumerate( action_elem ):
- populated, repository_elem = self.handle_repository_dependency_elem( trans, repo_elem )
- if populated:
- action_elem[ repo_index ] = repository_elem
- package_altered = True
- if not altered:
- altered = True
- if package_altered:
- actions_elem[ action_index ] = action_elem
- if package_altered:
- root_elem[ actions_index ] = actions_elem
-
- if package_altered:
- root[ root_index ] = root_elem
- return altered, root
- return False, None
-
- def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
- if isgzip:
- self.handle_gzip( repository, uploaded_file_name )
- return uploaded_file_filename.rstrip( '.gz' )
- if isbz2:
- self.handle_bz2( repository, uploaded_file_name )
- return uploaded_file_filename.rstrip( '.bz2' )
-
@web.expose
@web.require_login( 'upload', use_panels=True )
def upload( self, trans, **kwd ):
@@ -319,7 +45,7 @@
uncompress_file = util.string_as_bool( kwd.get( 'uncompress_file', 'true' ) )
remove_repo_files_not_in_tar = util.string_as_bool( kwd.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
- upload_point = self.get_upload_point( repository, **kwd )
+ upload_point = commit_util.get_upload_point( repository, **kwd )
tip = repository.tip( trans.app )
file_data = kwd.get( 'file_data', '' )
url = kwd.get( 'url', '' )
@@ -399,7 +125,7 @@
self.upload_directory( trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert )
else:
if ( isgzip or isbz2 ) and uncompress_file:
- uploaded_file_filename = self.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 )
+ uploaded_file_filename = commit_util.uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 )
if upload_point is not None:
full_path = os.path.abspath( os.path.join( repo_dir, upload_point, uploaded_file_filename ) )
else:
@@ -407,18 +133,18 @@
# Move some version of the uploaded file to the load_point within the repository hierarchy.
if uploaded_file_filename in [ 'repository_dependencies.xml' ]:
# Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
- altered, root = self.handle_repository_dependencies_definition( trans, uploaded_file_name )
+ altered, root = commit_util.handle_repository_dependencies_definition( trans, uploaded_file_name )
if altered:
- tmp_filename = self.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
+ tmp_filename = commit_util.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
shutil.move( tmp_filename, full_path )
else:
shutil.move( uploaded_file_name, full_path )
elif uploaded_file_filename in [ 'tool_dependencies.xml' ]:
# Inspect the contents of the file to see if it defines a complex repository dependency definition whose changeset_revision values
# are missing and if so, set them appropriately.
- altered, root = self.handle_tool_dependencies_definition( trans, uploaded_file_name )
+ altered, root = commit_util.handle_tool_dependencies_definition( trans, uploaded_file_name )
if altered:
- tmp_filename = self.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
+ tmp_filename = commit_util.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
shutil.move( tmp_filename, full_path )
else:
shutil.move( uploaded_file_name, full_path )
@@ -426,9 +152,9 @@
shutil.move( uploaded_file_name, full_path )
# See if any admin users have chosen to receive email alerts when a repository is updated. If so, check every uploaded file to ensure
# content is appropriate.
- check_contents = self.check_file_contents_for_email_alerts( trans )
+ check_contents = commit_util.check_file_contents_for_email_alerts( trans )
if check_contents and os.path.isfile( full_path ):
- content_alert_str = self.check_file_content_for_html_and_images( full_path )
+ content_alert_str = commit_util.check_file_content_for_html_and_images( full_path )
else:
content_alert_str = ''
commands.add( repo.ui, repo, full_path )
@@ -553,8 +279,8 @@
os.remove( repo_path )
shutil.move( os.path.join( uploaded_directory, relative_path ), repo_path )
filenames_in_archive.append( relative_path )
- return self.handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
- undesirable_dirs_removed, undesirable_files_removed )
+ return commit_util.handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar,
+ new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed )
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
# Upload a tar archive of files.
@@ -562,7 +288,7 @@
repo = hg.repository( suc.get_configured_ui(), repo_dir )
undesirable_dirs_removed = 0
undesirable_files_removed = 0
- ok, message = self.check_archive( tar )
+ ok, message = commit_util.check_archive( tar )
if not ok:
tar.close()
uploaded_file.close()
@@ -589,12 +315,12 @@
tar.extractall( path=full_path )
tar.close()
uploaded_file.close()
- return self.handle_directory_changes( trans,
- repository,
- full_path,
- filenames_in_archive,
- remove_repo_files_not_in_tar,
- new_repo_alert,
- commit_message,
- undesirable_dirs_removed,
- undesirable_files_removed )
+ return commit_util.handle_directory_changes( trans,
+ repository,
+ full_path,
+ filenames_in_archive,
+ remove_repo_files_not_in_tar,
+ new_repo_alert,
+ commit_message,
+ undesirable_dirs_removed,
+ undesirable_files_removed )
diff -r 35de5a8a928bf63fd5de3d1ec066097602acf235 -r c499e581b0f603528b66df0efe7e0e656a8e16f9 lib/tool_shed/util/commit_util.py
--- /dev/null
+++ b/lib/tool_shed/util/commit_util.py
@@ -0,0 +1,289 @@
+import logging
+import os
+import shutil
+import tempfile
+from galaxy import util
+from galaxy.datatypes import checkers
+from galaxy.util import json
+import tool_shed.util.shed_util_common as suc
+from tool_shed.util import tool_util
+
+from galaxy import eggs
+eggs.require( 'mercurial' )
+from mercurial import commands
+from mercurial import hg
+from mercurial import ui
+
+log = logging.getLogger( __name__ )
+
+def check_archive( archive ):
+ for member in archive.getmembers():
+ # Allow regular files and directories only
+ if not ( member.isdir() or member.isfile() or member.islnk() ):
+ message = "Uploaded archives can only include regular directories and files (no symbolic links, devices, etc). Offender: %s" % str( member )
+ return False, message
+ for item in [ '.hg', '..', '/' ]:
+ if member.name.startswith( item ):
+ message = "Uploaded archives cannot contain .hg directories, absolute filenames starting with '/', or filenames with two dots '..'."
+ return False, message
+ if member.name in [ 'hgrc' ]:
+ message = "Uploaded archives cannot contain hgrc files."
+ return False, message
+ return True, ''
+
+def check_file_contents_for_email_alerts( trans ):
+ """
+ See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be
+ checked for inappropriate content.
+ """
+ admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.email_alerts != None ):
+ email_alerts = json.from_json_string( repository.email_alerts )
+ for user_email in email_alerts:
+ if user_email in admin_users:
+ return True
+ return False
+
+def check_file_content_for_html_and_images( file_path ):
+ message = ''
+ if checkers.check_html( file_path ):
+ message = 'The file "%s" contains HTML content.\n' % str( file_path )
+ elif checkers.check_image( file_path ):
+ message = 'The file "%s" contains image content.\n' % str( file_path )
+ return message
+
+def create_and_write_tmp_file( text ):
+ fh = tempfile.NamedTemporaryFile( 'wb' )
+ tmp_filename = fh.name
+ fh.close()
+ fh = open( tmp_filename, 'wb' )
+ fh.write( '<?xml version="1.0"?>\n' )
+ fh.write( text )
+ fh.close()
+ return tmp_filename
+
+def get_upload_point( repository, **kwd ):
+ upload_point = kwd.get( 'upload_point', None )
+ if upload_point is not None:
+ # The value of upload_point will be something like: database/community_files/000/repo_12/1.bed
+ if os.path.exists( upload_point ):
+ if os.path.isfile( upload_point ):
+ # Get the parent directory
+ upload_point, not_needed = os.path.split( upload_point )
+ # Now the value of uplaod_point will be something like: database/community_files/000/repo_12/
+ upload_point = upload_point.split( 'repo_%d' % repository.id )[ 1 ]
+ if upload_point:
+ upload_point = upload_point.lstrip( '/' )
+ upload_point = upload_point.rstrip( '/' )
+ # Now the value of uplaod_point will be something like: /
+ if upload_point == '/':
+ upload_point = None
+ else:
+ # Must have been an error selecting something that didn't exist, so default to repository root
+ upload_point = None
+ return upload_point
+
+def handle_bz2( repository, uploaded_file_name ):
+ fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_bunzip2_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
+ bzipped_file = bz2.BZ2File( uploaded_file_name, 'rb' )
+ while 1:
+ try:
+ chunk = bzipped_file.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ os.remove( uncompressed )
+ log.exception( 'Problem uncompressing bz2 data "%s": %s' % ( uploaded_file_name, str( e ) ) )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ bzipped_file.close()
+ shutil.move( uncompressed, uploaded_file_name )
+
+def handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
+ undesirable_dirs_removed, undesirable_files_removed ):
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ content_alert_str = ''
+ files_to_remove = []
+ filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
+ if remove_repo_files_not_in_tar and not repository.is_new( trans.app ):
+ # We have a repository that is not new (it contains files), so discover those files that are in the repository, but not in the uploaded archive.
+ for root, dirs, files in os.walk( full_path ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ for undesirable_dir in undesirable_dirs:
+ if undesirable_dir in dirs:
+ dirs.remove( undesirable_dir )
+ undesirable_dirs_removed += 1
+ for undesirable_file in undesirable_files:
+ if undesirable_file in files:
+ files.remove( undesirable_file )
+ undesirable_files_removed += 1
+ for name in files:
+ full_name = os.path.join( root, name )
+ if full_name not in filenames_in_archive:
+ files_to_remove.append( full_name )
+ for repo_file in files_to_remove:
+ # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
+ try:
+ commands.remove( repo.ui, repo, repo_file, force=True )
+ except Exception, e:
+ log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
+ relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+ repo.dirstate.remove( relative_selected_file )
+ repo.dirstate.write()
+ absolute_selected_file = os.path.abspath( selected_file )
+ if os.path.isdir( absolute_selected_file ):
+ try:
+ os.rmdir( absolute_selected_file )
+ except OSError, e:
+ # The directory is not empty.
+ pass
+ elif os.path.isfile( absolute_selected_file ):
+ os.remove( absolute_selected_file )
+ dir = os.path.split( absolute_selected_file )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty.
+ pass
+ # See if any admin users have chosen to receive email alerts when a repository is
+ # updated. If so, check every uploaded file to ensure content is appropriate.
+ check_contents = check_file_contents_for_email_alerts( trans )
+ for filename_in_archive in filenames_in_archive:
+ # Check file content to ensure it is appropriate.
+ if check_contents and os.path.isfile( filename_in_archive ):
+ content_alert_str += check_file_content_for_html_and_images( filename_in_archive )
+ commands.add( repo.ui, repo, filename_in_archive )
+ if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
+ # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
+ # to the in-memory trans.app.tool_data_tables dictionary.
+ error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
+ if error:
+ return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ admin_only = len( repository.downloadable_revisions ) != 1
+ suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+
+def handle_gzip( repository, uploaded_file_name ):
+ fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_gunzip_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
+ gzipped_file = gzip.GzipFile( uploaded_file_name, 'rb' )
+ while 1:
+ try:
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ except IOError, e:
+ os.close( fd )
+ os.remove( uncompressed )
+ log.exception( 'Problem uncompressing gz data "%s": %s' % ( uploaded_file_name, str( e ) ) )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ gzipped_file.close()
+ shutil.move( uncompressed, uploaded_file_name )
+
+def handle_repository_dependencies_definition( trans, repository_dependencies_config ):
+ altered = False
+ try:
+ # Make sure we're looking at a valid repository_dependencies.xml file.
+ tree = util.parse_xml( repository_dependencies_config )
+ root = tree.getroot()
+ except Exception, e:
+ error_message = "Error parsing %s in handle_repository_dependencies_definition: " % str( repository_dependencies_config )
+ log.exception( error_message )
+ return False, None
+ if root.tag == 'repositories':
+ for index, elem in enumerate( root ):
+ # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
+ populated, elem = handle_repository_dependency_elem( trans, elem )
+ if populated:
+ root[ index ] = elem
+ if not altered:
+ altered = True
+ return altered, root
+ return False, None
+
+def handle_repository_dependency_elem( trans, elem ):
+ # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
+ populated = False
+ name = elem.get( 'name' )
+ owner = elem.get( 'owner' )
+ changeset_revision = elem.get( 'changeset_revision' )
+ if not changeset_revision:
+ # Populate the changeset_revision attribute with the latest installable metadata revision for the defined repository.
+ # We use the latest installable revision instead of the latest metadata revision to ensure that the contents of the
+ # revision are valid.
+ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository:
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( trans, repository, repo )
+ if lastest_installable_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
+ elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision
+ populated = True
+ return populated, elem
+
+def handle_tool_dependencies_definition( trans, tool_dependencies_config ):
+ altered = False
+ try:
+ # Make sure we're looking at a valid tool_dependencies.xml file.
+ tree = util.parse_xml( tool_dependencies_config )
+ root = tree.getroot()
+ except Exception, e:
+ error_message = "Error parsing %s in handle_tool_dependencies_definition: " % str( tool_dependencies_config )
+ log.exception( error_message )
+ return False, None
+ if root.tag == 'tool_dependency':
+ for root_index, root_elem in enumerate( root ):
+ # <package name="eigen" version="2.0.17">
+ if root_elem.tag == 'package':
+ package_altered = False
+ for package_index, package_elem in enumerate( root_elem ):
+ if package_elem.tag == 'repository':
+ # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0" prior_installation_required="True" />
+ populated, repository_elem = handle_repository_dependency_elem( trans, package_elem )
+ if populated:
+ root_elem[ package_index ] = repository_elem
+ package_altered = True
+ if not altered:
+ altered = True
+
+ elif package_elem.tag == 'install':
+ # <install version="1.0">
+ for actions_index, actions_elem in enumerate( package_elem ):
+ for action_index, action_elem in enumerate( actions_elem ):
+ action_type = action_elem.get( 'type' )
+ if action_type == 'set_environment_for_install':
+ # <action type="set_environment_for_install">
+ # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0">
+ # <package name="eigen" version="2.0.17" />
+ # </repository>
+ # </action>
+ for repo_index, repo_elem in enumerate( action_elem ):
+ populated, repository_elem = handle_repository_dependency_elem( trans, repo_elem )
+ if populated:
+ action_elem[ repo_index ] = repository_elem
+ package_altered = True
+ if not altered:
+ altered = True
+ if package_altered:
+ actions_elem[ action_index ] = action_elem
+ if package_altered:
+ root_elem[ actions_index ] = actions_elem
+
+ if package_altered:
+ root[ root_index ] = root_elem
+ return altered, root
+ return False, None
+
+def uncompress( repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
+ if isgzip:
+ handle_gzip( repository, uploaded_file_name )
+ return uploaded_file_filename.rstrip( '.gz' )
+ if isbz2:
+ handle_bz2( repository, uploaded_file_name )
+ return uploaded_file_filename.rstrip( '.bz2' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Eliminate the requirement for repository dependency tag sets to include a changeset_revision attribute.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/35de5a8a928b/
Changeset: 35de5a8a928b
User: greg
Date: 2013-05-16 22:57:10
Summary: Eliminate the requirement for repository dependency tag sets to include a changeset_revision attribute.
Here is an example of what used to be required:
<repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
Now the following is supported:
<repository name="molecule_datatypes" owner="test" />
When the changeset_revision is missing, the tool shed uplad process will automatically determine the latest installable changeset revsion for the repository
defined by the name and owner tag attributes and populate the tag with the changeset discovered changeset revision value.
Caveat emptor:
0) I wanted to get this changeset committed before the next stable branch is created on Monday, 5/20/13, so that it would be included in the next Galaxy release.
By definition, anything I do to this new feature from now on will be a bug fix. ;) This featue should be fully functional within the next week.
1) This currently only works when uplaoding a single file named repository_dependencies.xml or tool_dependencies.xml. It does not yet work when files with this name
are include in a tarball or are uploaded using the ftp or hg clone options in the tool shed upload form.
2) This feature is not yet fully tested, but doesn't break anything I've tried (all tool shed functional tests pass). I've tested it with both simple repository
dependency definitions and complex repository dependency definitions. I've also tested it with the recently supported <action type="set_environment_for_install">
tag set.
Affected #: 3 files
diff -r 03c0f4d30880d3286e0ccc61c3f2554343c8c53e -r 35de5a8a928bf63fd5de3d1ec066097602acf235 lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -32,6 +32,21 @@
class UploadController( BaseUIController ):
+ def check_archive( self, archive ):
+ for member in archive.getmembers():
+ # Allow regular files and directories only
+ if not ( member.isdir() or member.isfile() or member.islnk() ):
+ message = "Uploaded archives can only include regular directories and files (no symbolic links, devices, etc). Offender: %s" % str( member )
+ return False, message
+ for item in [ '.hg', '..', '/' ]:
+ if member.name.startswith( item ):
+ message = "Uploaded archives cannot contain .hg directories, absolute filenames starting with '/', or filenames with two dots '..'."
+ return False, message
+ if member.name in [ 'hgrc' ]:
+ message = "Uploaded archives cannot contain hgrc files."
+ return False, message
+ return True, ''
+
def check_file_contents_for_email_alerts( self, trans ):
"""
See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be
@@ -54,32 +69,266 @@
message = 'The file "%s" contains image content.\n' % str( file_path )
return message
+ def create_and_write_tmp_file( self, text ):
+ fh = tempfile.NamedTemporaryFile( 'wb' )
+ tmp_filename = fh.name
+ fh.close()
+ fh = open( tmp_filename, 'wb' )
+ fh.write( '<?xml version="1.0"?>\n' )
+ fh.write( text )
+ fh.close()
+ return tmp_filename
+
+ def get_upload_point( self, repository, **kwd ):
+ upload_point = kwd.get( 'upload_point', None )
+ if upload_point is not None:
+ # The value of upload_point will be something like: database/community_files/000/repo_12/1.bed
+ if os.path.exists( upload_point ):
+ if os.path.isfile( upload_point ):
+ # Get the parent directory
+ upload_point, not_needed = os.path.split( upload_point )
+ # Now the value of uplaod_point will be something like: database/community_files/000/repo_12/
+ upload_point = upload_point.split( 'repo_%d' % repository.id )[ 1 ]
+ if upload_point:
+ upload_point = upload_point.lstrip( '/' )
+ upload_point = upload_point.rstrip( '/' )
+ # Now the value of uplaod_point will be something like: /
+ if upload_point == '/':
+ upload_point = None
+ else:
+ # Must have been an error selecting something that didn't exist, so default to repository root
+ upload_point = None
+ return upload_point
+
+ def handle_bz2( self, repository, uploaded_file_name ):
+ fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_bunzip2_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
+ bzipped_file = bz2.BZ2File( uploaded_file_name, 'rb' )
+ while 1:
+ try:
+ chunk = bzipped_file.read( CHUNK_SIZE )
+ except IOError:
+ os.close( fd )
+ os.remove( uncompressed )
+ log.exception( 'Problem uncompressing bz2 data "%s": %s' % ( uploaded_file_name, str( e ) ) )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ bzipped_file.close()
+ shutil.move( uncompressed, uploaded_file_name )
+
+ def handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
+ undesirable_dirs_removed, undesirable_files_removed ):
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ content_alert_str = ''
+ files_to_remove = []
+ filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
+ if remove_repo_files_not_in_tar and not repository.is_new( trans.app ):
+ # We have a repository that is not new (it contains files), so discover those files that are in the repository, but not in the uploaded archive.
+ for root, dirs, files in os.walk( full_path ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ for undesirable_dir in undesirable_dirs:
+ if undesirable_dir in dirs:
+ dirs.remove( undesirable_dir )
+ undesirable_dirs_removed += 1
+ for undesirable_file in undesirable_files:
+ if undesirable_file in files:
+ files.remove( undesirable_file )
+ undesirable_files_removed += 1
+ for name in files:
+ full_name = os.path.join( root, name )
+ if full_name not in filenames_in_archive:
+ files_to_remove.append( full_name )
+ for repo_file in files_to_remove:
+ # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
+ try:
+ commands.remove( repo.ui, repo, repo_file, force=True )
+ except Exception, e:
+ log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
+ relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
+ repo.dirstate.remove( relative_selected_file )
+ repo.dirstate.write()
+ absolute_selected_file = os.path.abspath( selected_file )
+ if os.path.isdir( absolute_selected_file ):
+ try:
+ os.rmdir( absolute_selected_file )
+ except OSError, e:
+ # The directory is not empty.
+ pass
+ elif os.path.isfile( absolute_selected_file ):
+ os.remove( absolute_selected_file )
+ dir = os.path.split( absolute_selected_file )[0]
+ try:
+ os.rmdir( dir )
+ except OSError, e:
+ # The directory is not empty.
+ pass
+ # See if any admin users have chosen to receive email alerts when a repository is
+ # updated. If so, check every uploaded file to ensure content is appropriate.
+ check_contents = self.check_file_contents_for_email_alerts( trans )
+ for filename_in_archive in filenames_in_archive:
+ # Check file content to ensure it is appropriate.
+ if check_contents and os.path.isfile( filename_in_archive ):
+ content_alert_str += self.check_file_content_for_html_and_images( filename_in_archive )
+ commands.add( repo.ui, repo, filename_in_archive )
+ if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
+ # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
+ # to the in-memory trans.app.tool_data_tables dictionary.
+ error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
+ if error:
+ return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ admin_only = len( repository.downloadable_revisions ) != 1
+ suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
+ return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+
+ def handle_gzip( self, repository, uploaded_file_name ):
+ fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_gunzip_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
+ gzipped_file = gzip.GzipFile( uploaded_file_name, 'rb' )
+ while 1:
+ try:
+ chunk = gzipped_file.read( CHUNK_SIZE )
+ except IOError, e:
+ os.close( fd )
+ os.remove( uncompressed )
+ log.exception( 'Problem uncompressing gz data "%s": %s' % ( uploaded_file_name, str( e ) ) )
+ return
+ if not chunk:
+ break
+ os.write( fd, chunk )
+ os.close( fd )
+ gzipped_file.close()
+ shutil.move( uncompressed, uploaded_file_name )
+
+ def handle_repository_dependencies_definition( self, trans, repository_dependencies_config ):
+ altered = False
+ try:
+ # Make sure we're looking at a valid repository_dependencies.xml file.
+ tree = util.parse_xml( repository_dependencies_config )
+ root = tree.getroot()
+ except Exception, e:
+ error_message = "Error parsing %s in handle_repository_dependencies_definition: " % str( repository_dependencies_config )
+ log.exception( error_message )
+ return False, None
+ if root.tag == 'repositories':
+ for index, elem in enumerate( root ):
+ # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
+ populated, elem = self.handle_repository_dependency_elem( trans, elem )
+ if populated:
+ root[ index ] = elem
+ if not altered:
+ altered = True
+ return altered, root
+ return False, None
+
+ def handle_repository_dependency_elem( self, trans, elem ):
+ # <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
+ populated = False
+ name = elem.get( 'name' )
+ owner = elem.get( 'owner' )
+ changeset_revision = elem.get( 'changeset_revision' )
+ if not changeset_revision:
+ # Populate the changeset_revision attribute with the latest installable metadata revision for the defined repository.
+ # We use the latest installable revision instead of the latest metadata revision to ensure that the contents of the
+ # revision are valid.
+ repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
+ if repository:
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( suc.get_configured_ui(), repo_dir )
+ lastest_installable_changeset_revision = suc.get_latest_downloadable_changeset_revision( trans, repository, repo )
+ if lastest_installable_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
+ elem.attrib[ 'changeset_revision' ] = lastest_installable_changeset_revision
+ populated = True
+ return populated, elem
+
+ def handle_tool_dependencies_definition( self, trans, tool_dependencies_config ):
+ altered = False
+ try:
+ # Make sure we're looking at a valid tool_dependencies.xml file.
+ tree = util.parse_xml( tool_dependencies_config )
+ root = tree.getroot()
+ except Exception, e:
+ error_message = "Error parsing %s in handle_tool_dependencies_definition: " % str( tool_dependencies_config )
+ log.exception( error_message )
+ return False, None
+ if root.tag == 'tool_dependency':
+ for root_index, root_elem in enumerate( root ):
+ # <package name="eigen" version="2.0.17">
+ if root_elem.tag == 'package':
+ package_altered = False
+ for package_index, package_elem in enumerate( root_elem ):
+ if package_elem.tag == 'repository':
+ # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0" prior_installation_required="True" />
+ populated, repository_elem = self.handle_repository_dependency_elem( trans, package_elem )
+ if populated:
+ root_elem[ package_index ] = repository_elem
+ package_altered = True
+ if not altered:
+ altered = True
+
+ elif package_elem.tag == 'install':
+ # <install version="1.0">
+ for actions_index, actions_elem in enumerate( package_elem ):
+ for action_index, action_elem in enumerate( actions_elem ):
+ action_type = action_elem.get( 'type' )
+ if action_type == 'set_environment_for_install':
+ # <action type="set_environment_for_install">
+ # <repository name="package_eigen_2_0" owner="test" changeset_revision="09eb05087cd0">
+ # <package name="eigen" version="2.0.17" />
+ # </repository>
+ # </action>
+ for repo_index, repo_elem in enumerate( action_elem ):
+ populated, repository_elem = self.handle_repository_dependency_elem( trans, repo_elem )
+ if populated:
+ action_elem[ repo_index ] = repository_elem
+ package_altered = True
+ if not altered:
+ altered = True
+ if package_altered:
+ actions_elem[ action_index ] = action_elem
+ if package_altered:
+ root_elem[ actions_index ] = actions_elem
+
+ if package_altered:
+ root[ root_index ] = root_elem
+ return altered, root
+ return False, None
+
+ def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
+ if isgzip:
+ self.handle_gzip( repository, uploaded_file_name )
+ return uploaded_file_filename.rstrip( '.gz' )
+ if isbz2:
+ self.handle_bz2( repository, uploaded_file_name )
+ return uploaded_file_filename.rstrip( '.bz2' )
+
@web.expose
@web.require_login( 'upload', use_panels=True )
def upload( self, trans, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
- commit_message = util.restore_text( params.get( 'commit_message', 'Uploaded' ) )
- category_ids = util.listify( params.get( 'category_id', '' ) )
+ message = kwd.get( 'message', '' )
+ status = kwd.get( 'status', 'done' )
+ commit_message = kwd.get( 'commit_message', 'Uploaded' )
+ category_ids = util.listify( kwd.get( 'category_id', '' ) )
categories = suc.get_categories( trans )
- repository_id = params.get( 'repository_id', '' )
+ repository_id = kwd.get( 'repository_id', '' )
repository = suc.get_repository_in_tool_shed( trans, repository_id )
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
- uncompress_file = util.string_as_bool( params.get( 'uncompress_file', 'true' ) )
- remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) )
+ uncompress_file = util.string_as_bool( kwd.get( 'uncompress_file', 'true' ) )
+ remove_repo_files_not_in_tar = util.string_as_bool( kwd.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
- upload_point = self.__get_upload_point( repository, **kwd )
+ upload_point = self.get_upload_point( repository, **kwd )
tip = repository.tip( trans.app )
- file_data = params.get( 'file_data', '' )
- url = params.get( 'url', '' )
+ file_data = kwd.get( 'file_data', '' )
+ url = kwd.get( 'url', '' )
# Part of the upload process is sending email notification to those that have registered to
# receive them. One scenario occurs when the first change set is produced for the repository.
# See the suc.handle_email_alerts() method for the definition of the scenarios.
new_repo_alert = repository.is_new( trans.app )
uploaded_directory = None
- if params.get( 'upload_button', False ):
+ if kwd.get( 'upload_button', False ):
if file_data == '' and url == '':
message = 'No files were entered on the upload form.'
status = 'error'
@@ -155,10 +404,28 @@
full_path = os.path.abspath( os.path.join( repo_dir, upload_point, uploaded_file_filename ) )
else:
full_path = os.path.abspath( os.path.join( repo_dir, uploaded_file_filename ) )
- # Move the uploaded file to the load_point within the repository hierarchy.
- shutil.move( uploaded_file_name, full_path )
- # See if any admin users have chosen to receive email alerts when a repository is
- # updated. If so, check every uploaded file to ensure content is appropriate.
+ # Move some version of the uploaded file to the load_point within the repository hierarchy.
+ if uploaded_file_filename in [ 'repository_dependencies.xml' ]:
+ # Inspect the contents of the file to see if changeset_revision values are missing and if so, set them appropriately.
+ altered, root = self.handle_repository_dependencies_definition( trans, uploaded_file_name )
+ if altered:
+ tmp_filename = self.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
+ shutil.move( tmp_filename, full_path )
+ else:
+ shutil.move( uploaded_file_name, full_path )
+ elif uploaded_file_filename in [ 'tool_dependencies.xml' ]:
+ # Inspect the contents of the file to see if it defines a complex repository dependency definition whose changeset_revision values
+ # are missing and if so, set them appropriately.
+ altered, root = self.handle_tool_dependencies_definition( trans, uploaded_file_name )
+ if altered:
+ tmp_filename = self.create_and_write_tmp_file( util.xml_to_string( root, pretty=True ) )
+ shutil.move( tmp_filename, full_path )
+ else:
+ shutil.move( uploaded_file_name, full_path )
+ else:
+ shutil.move( uploaded_file_name, full_path )
+ # See if any admin users have chosen to receive email alerts when a repository is updated. If so, check every uploaded file to ensure
+ # content is appropriate.
check_contents = self.check_file_contents_for_email_alerts( trans )
if check_contents and os.path.isfile( full_path ):
content_alert_str = self.check_file_content_for_html_and_images( full_path )
@@ -264,7 +531,7 @@
filenames_in_archive = []
for root, dirs, files in os.walk( uploaded_directory ):
for uploaded_file in files:
- relative_path = os.path.normpath(os.path.join(os.path.relpath(root, uploaded_directory), uploaded_file))
+ relative_path = os.path.normpath( os.path.join( os.path.relpath( root, uploaded_directory ), uploaded_file ) )
ok = os.path.basename( uploaded_file ) not in undesirable_files
if ok:
for file_path_item in relative_path.split( '/' ):
@@ -275,19 +542,19 @@
else:
undesirable_files_removed += 1
if ok:
- repo_path = os.path.join(full_path, relative_path)
- repo_basedir = os.path.normpath(os.path.join(repo_path, os.path.pardir))
- if not os.path.exists(repo_basedir):
- os.makedirs(repo_basedir)
- if os.path.exists(repo_path):
- if os.path.isdir(repo_path):
- shutil.rmtree(repo_path)
+ repo_path = os.path.join( full_path, relative_path )
+ repo_basedir = os.path.normpath( os.path.join( repo_path, os.path.pardir ) )
+ if not os.path.exists( repo_basedir ):
+ os.makedirs( repo_basedir )
+ if os.path.exists( repo_path ):
+ if os.path.isdir( repo_path ):
+ shutil.rmtree( repo_path )
else:
- os.remove(repo_path)
- shutil.move(os.path.join(uploaded_directory, relative_path), repo_path)
+ os.remove( repo_path )
+ shutil.move( os.path.join( uploaded_directory, relative_path ), repo_path )
filenames_in_archive.append( relative_path )
- return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
- undesirable_dirs_removed, undesirable_files_removed)
+ return self.handle_directory_changes( trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
+ undesirable_dirs_removed, undesirable_files_removed )
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
# Upload a tar archive of files.
@@ -295,7 +562,7 @@
repo = hg.repository( suc.get_configured_ui(), repo_dir )
undesirable_dirs_removed = 0
undesirable_files_removed = 0
- ok, message = self.__check_archive( tar )
+ ok, message = self.check_archive( tar )
if not ok:
tar.close()
uploaded_file.close()
@@ -322,159 +589,12 @@
tar.extractall( path=full_path )
tar.close()
uploaded_file.close()
- return self.__handle_directory_changes( trans,
- repository,
- full_path,
- filenames_in_archive,
- remove_repo_files_not_in_tar,
- new_repo_alert,
- commit_message,
- undesirable_dirs_removed,
- undesirable_files_removed )
-
- def __handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
- undesirable_dirs_removed, undesirable_files_removed ):
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( suc.get_configured_ui(), repo_dir )
- content_alert_str = ''
- files_to_remove = []
- filenames_in_archive = [ os.path.join( full_path, name ) for name in filenames_in_archive ]
- if remove_repo_files_not_in_tar and not repository.is_new( trans.app ):
- # We have a repository that is not new (it contains files), so discover
- # those files that are in the repository, but not in the uploaded archive.
- for root, dirs, files in os.walk( full_path ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- for undesirable_dir in undesirable_dirs:
- if undesirable_dir in dirs:
- dirs.remove( undesirable_dir )
- undesirable_dirs_removed += 1
- for undesirable_file in undesirable_files:
- if undesirable_file in files:
- files.remove( undesirable_file )
- undesirable_files_removed += 1
- for name in files:
- full_name = os.path.join( root, name )
- if full_name not in filenames_in_archive:
- files_to_remove.append( full_name )
- for repo_file in files_to_remove:
- # Remove files in the repository (relative to the upload point) that are not in the uploaded archive.
- try:
- commands.remove( repo.ui, repo, repo_file, force=True )
- except Exception, e:
- log.debug( "Error removing files using the mercurial API, so trying a different approach, the error was: %s" % str( e ))
- relative_selected_file = selected_file.split( 'repo_%d' % repository.id )[1].lstrip( '/' )
- repo.dirstate.remove( relative_selected_file )
- repo.dirstate.write()
- absolute_selected_file = os.path.abspath( selected_file )
- if os.path.isdir( absolute_selected_file ):
- try:
- os.rmdir( absolute_selected_file )
- except OSError, e:
- # The directory is not empty
- pass
- elif os.path.isfile( absolute_selected_file ):
- os.remove( absolute_selected_file )
- dir = os.path.split( absolute_selected_file )[0]
- try:
- os.rmdir( dir )
- except OSError, e:
- # The directory is not empty
- pass
- # See if any admin users have chosen to receive email alerts when a repository is
- # updated. If so, check every uploaded file to ensure content is appropriate.
- check_contents = self.check_file_contents_for_email_alerts( trans )
- for filename_in_archive in filenames_in_archive:
- # Check file content to ensure it is appropriate.
- if check_contents and os.path.isfile( filename_in_archive ):
- content_alert_str += self.check_file_content_for_html_and_images( filename_in_archive )
- commands.add( repo.ui, repo, filename_in_archive )
- if filename_in_archive.endswith( 'tool_data_table_conf.xml.sample' ):
- # Handle the special case where a tool_data_table_conf.xml.sample file is being uploaded by parsing the file and adding new entries
- # to the in-memory trans.app.tool_data_tables dictionary.
- error, message = tool_util.handle_sample_tool_data_table_conf_file( trans.app, filename_in_archive )
- if error:
- return False, message, files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
- admin_only = len( repository.downloadable_revisions ) != 1
- suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
- return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
-
- def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
- if isgzip:
- self.__handle_gzip( repository, uploaded_file_name )
- return uploaded_file_filename.rstrip( '.gz' )
- if isbz2:
- self.__handle_bz2( repository, uploaded_file_name )
- return uploaded_file_filename.rstrip( '.bz2' )
-
- def __handle_gzip( self, repository, uploaded_file_name ):
- fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_gunzip_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
- gzipped_file = gzip.GzipFile( uploaded_file_name, 'rb' )
- while 1:
- try:
- chunk = gzipped_file.read( CHUNK_SIZE )
- except IOError, e:
- os.close( fd )
- os.remove( uncompressed )
- log.exception( 'Problem uncompressing gz data "%s": %s' % ( uploaded_file_name, str( e ) ) )
- return
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- gzipped_file.close()
- shutil.move( uncompressed, uploaded_file_name )
-
- def __handle_bz2( self, repository, uploaded_file_name ):
- fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_bunzip2_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
- bzipped_file = bz2.BZ2File( uploaded_file_name, 'rb' )
- while 1:
- try:
- chunk = bzipped_file.read( CHUNK_SIZE )
- except IOError:
- os.close( fd )
- os.remove( uncompressed )
- log.exception( 'Problem uncompressing bz2 data "%s": %s' % ( uploaded_file_name, str( e ) ) )
- return
- if not chunk:
- break
- os.write( fd, chunk )
- os.close( fd )
- bzipped_file.close()
- shutil.move( uncompressed, uploaded_file_name )
-
- def __get_upload_point( self, repository, **kwd ):
- upload_point = kwd.get( 'upload_point', None )
- if upload_point is not None:
- # The value of upload_point will be something like: database/community_files/000/repo_12/1.bed
- if os.path.exists( upload_point ):
- if os.path.isfile( upload_point ):
- # Get the parent directory
- upload_point, not_needed = os.path.split( upload_point )
- # Now the value of uplaod_point will be something like: database/community_files/000/repo_12/
- upload_point = upload_point.split( 'repo_%d' % repository.id )[ 1 ]
- if upload_point:
- upload_point = upload_point.lstrip( '/' )
- upload_point = upload_point.rstrip( '/' )
- # Now the value of uplaod_point will be something like: /
- if upload_point == '/':
- upload_point = None
- else:
- # Must have been an error selecting something that didn't exist, so default to repository root
- upload_point = None
- return upload_point
-
- def __check_archive( self, archive ):
- for member in archive.getmembers():
- # Allow regular files and directories only
- if not ( member.isdir() or member.isfile() or member.islnk() ):
- message = "Uploaded archives can only include regular directories and files (no symbolic links, devices, etc). Offender: %s" % str( member )
- return False, message
- for item in [ '.hg', '..', '/' ]:
- if member.name.startswith( item ):
- message = "Uploaded archives cannot contain .hg directories, absolute filenames starting with '/', or filenames with two dots '..'."
- return False, message
- if member.name in [ 'hgrc' ]:
- message = "Uploaded archives cannot contain hgrc files."
- return False, message
- return True, ''
+ return self.handle_directory_changes( trans,
+ repository,
+ full_path,
+ filenames_in_archive,
+ remove_repo_files_not_in_tar,
+ new_repo_alert,
+ commit_message,
+ undesirable_dirs_removed,
+ undesirable_files_removed )
diff -r 03c0f4d30880d3286e0ccc61c3f2554343c8c53e -r 35de5a8a928bf63fd5de3d1ec066097602acf235 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1283,6 +1283,13 @@
new_repository_dependencies_metadata = metadata_dict.get( 'repository_dependencies', None )
if new_repository_dependencies_metadata:
new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ]
+ # TODO: We used to include the following here to handle the case where repository dependency definitions were deleted. However,
+ # this erroneously returned True in cases where is should not have done so. This usually occurred where multiple single files
+ # were uploaded when a single tarball should have been. We need to implement support for handling deleted repository dependency
+ # definitions so that we can guarantee reproducibility, but we need to do it in a way that is better than the following.
+ # for new_repository_dependency in new_repository_dependencies:
+ # if new_repository_dependency not in saved_repository_dependencies:
+ # return True
# The saved metadata must be a subset of the new metadata.
for saved_repository_dependency in saved_repository_dependencies:
if saved_repository_dependency not in new_repository_dependencies:
@@ -1314,6 +1321,13 @@
saved_tool_dependencies = metadata[ 'tool_dependencies' ]
new_tool_dependencies = metadata_dict.get( 'tool_dependencies', None )
if new_tool_dependencies:
+ # TODO: We used to include the following here to handle the case where tool dependency definitions were deleted. However,
+ # this erroneously returned True in cases where is should not have done so. This usually occurred where multiple single files
+ # were uploaded when a single tarball should have been. We need to implement support for handling deleted tool dependency
+ # definitions so that we can guarantee reproducibility, but we need to do it in a way that is better than the following.
+ # for new_tool_dependency in new_tool_dependencies:
+ # if new_tool_dependency not in saved_tool_dependencies:
+ # return True
# The saved metadata must be a subset of the new metadata.
for saved_tool_dependency in saved_tool_dependencies:
if saved_tool_dependency not in new_tool_dependencies:
diff -r 03c0f4d30880d3286e0ccc61c3f2554343c8c53e -r 35de5a8a928bf63fd5de3d1ec066097602acf235 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -492,6 +492,13 @@
"""Get a tool shed repository record from the Galaxy database defined by the id."""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
+def get_latest_downloadable_changeset_revision( trans, repository, repo ):
+ repository_tip = repository.tip( trans.app )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), repository_tip )
+ if repository_metadata and repository_metadata.downloadable:
+ return repository_tip
+ return get_previous_metadata_changeset_revision( repository, repo, repository_tip, downloadable=True )
+
def get_named_tmpfile_from_ctx( ctx, filename, dir ):
"""Return a named temporary file created from a specified file with a given name included in a repository changeset revision."""
filename = strip_path( filename )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Only display the tool test results folder if a repository has been installed and tested.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/03c0f4d30880/
Changeset: 03c0f4d30880
User: Dave Bouvier
Date: 2013-05-16 21:55:01
Summary: Only display the tool test results folder if a repository has been installed and tested.
Affected #: 1 file
diff -r 487b92cec404614ddf29b6dfb36b6b47ba6fc086 -r 03c0f4d30880d3286e0ccc61c3f2554343c8c53e lib/galaxy/webapps/tool_shed/util/container_util.py
--- a/lib/galaxy/webapps/tool_shed/util/container_util.py
+++ b/lib/galaxy/webapps/tool_shed/util/container_util.py
@@ -799,8 +799,11 @@
containers_dict[ 'valid_tools' ] = valid_tools_root_folder
# Tool test results container.
if tool_test_results:
- folder_id, tool_test_results_root_folder = build_tool_test_results_folder( trans, folder_id, tool_test_results, time_last_tested=time_last_tested )
- containers_dict[ 'tool_test_results' ] = tool_test_results_root_folder
+ # Only create and populate this folder if there are actual tool test results to display, since the
+ # display of the 'Test environment' folder by itself can be misleading.
+ if 'passed_tests' in tool_test_results or 'failed_tests' in tool_test_results or 'installation_errors' in tool_test_results:
+ folder_id, tool_test_results_root_folder = build_tool_test_results_folder( trans, folder_id, tool_test_results, time_last_tested=time_last_tested )
+ containers_dict[ 'tool_test_results' ] = tool_test_results_root_folder
# Workflows container.
if metadata:
if 'workflows' in metadata:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Add tool shed functional test temporary directory to .hgignore.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/487b92cec404/
Changeset: 487b92cec404
User: Dave Bouvier
Date: 2013-05-16 21:50:58
Summary: Add tool shed functional test temporary directory to .hgignore.
Affected #: 1 file
diff -r 1001212761bbd65fc3b3a69a2ae57ffa257f29f3 -r 487b92cec404614ddf29b6dfb36b6b47ba6fc086 .hgignore
--- a/.hgignore
+++ b/.hgignore
@@ -81,6 +81,7 @@
# Test output
run_functional_tests.html
+test/tool_shed/tmp/*
# Project files
*.kpf
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Additional exception handling when testing repositories that fail installation before a row is created in the Galaxy database.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/99565a21b772/
Changeset: 99565a21b772
User: Dave Bouvier
Date: 2013-05-16 21:22:18
Summary: Additional exception handling when testing repositories that fail installation before a row is created in the Galaxy database.
Affected #: 1 file
diff -r 23e0fd64cfd4f985aafc8499e123167b28cfe8a0 -r 99565a21b77259aa8ad3a470e49de877fe57f2c8 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -650,7 +650,8 @@
repository = test_db_util.get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision )
except:
log.exception( 'Error getting installed repository.' )
- continue
+ success = False
+ pass
# If the installation succeeds, configure and run functional tests for this repository. This is equivalent to
# sh run_functional_tests.sh -installed
if success:
@@ -921,7 +922,11 @@
else:
# Even if the repository failed to install, execute the uninstall method, in case a dependency did succeed.
log.debug( 'Uninstalling repository %s', repository_info_dict[ 'name' ] )
- repository = test_db_util.get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision )
+ try:
+ repository = test_db_util.get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision )
+ except:
+ log.exception( 'Unable to uninstall, no installed repository found.' )
+ continue
test_result = dict( tool_shed=repository.tool_shed,
name=repository.name,
owner=repository.owner,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Backward compatibility fix for installing repositories with repository dependencies.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/23e0fd64cfd4/
Changeset: 23e0fd64cfd4
User: Dave Bouvier
Date: 2013-05-16 21:18:39
Summary: Backward compatibility fix for installing repositories with repository dependencies.
Affected #: 1 file
diff -r 477979aa944f14ab7189aa51e67168da333f0840 -r 23e0fd64cfd4f985aafc8499e123167b28cfe8a0 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1087,7 +1087,7 @@
name = repository_elem.get( 'name' )
owner = repository_elem.get( 'owner' )
changeset_revision = repository_elem.get( 'changeset_revision' )
- prior_installation_required = repository_elem.get( 'prior_installation_required', False )
+ prior_installation_required = str( repository_elem.get( 'prior_installation_required', False ) )
repository_dependency_tup = [ toolshed, name, owner, changeset_revision, prior_installation_required ]
user = None
repository = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for determining if a new metadata revision is required when inspecting tool shed repository dependency definitions.
by commits-noreply@bitbucket.org 16 May '13
by commits-noreply@bitbucket.org 16 May '13
16 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/477979aa944f/
Changeset: 477979aa944f
User: greg
Date: 2013-05-16 17:55:41
Summary: Fixes for determining if a new metadata revision is required when inspecting tool shed repository dependency definitions.
Affected #: 2 files
diff -r 311cf2ae1623289551b0b568975e9840d46097a2 -r 477979aa944f14ab7189aa51e67168da333f0840 lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -1,15 +1,27 @@
-import sys, os, shutil, logging, tarfile, tempfile, urllib
+import logging
+import os
+import shutil
+import sys
+import tarfile
+import tempfile
+import urllib
from galaxy.web.base.controller import BaseUIController
-from galaxy import web, util
+from galaxy import util
+from galaxy import web
from galaxy.datatypes import checkers
from galaxy.util import json
import tool_shed.util.shed_util_common as suc
-from tool_shed.util import metadata_util, repository_dependency_util, tool_dependency_util, tool_util
+from tool_shed.util import metadata_util
+from tool_shed.util import repository_dependency_util
+from tool_shed.util import tool_dependency_util
+from tool_shed.util import tool_util
from galaxy import eggs
-eggs.require('mercurial')
-from mercurial import hg, ui, commands
+eggs.require( 'mercurial' )
+from mercurial import commands
+from mercurial import hg
+from mercurial import ui
log = logging.getLogger( __name__ )
@@ -17,7 +29,9 @@
undesirable_files = [ '.hg_archival.txt', 'hgrc', '.DS_Store' ]
CHUNK_SIZE = 2**20 # 1Mb
+
class UploadController( BaseUIController ):
+
def check_file_contents_for_email_alerts( self, trans ):
"""
See if any admin users have chosen to receive email alerts when a repository is updated. If so, the file contents of the update must be
@@ -31,6 +45,7 @@
if user_email in admin_users:
return True
return False
+
def check_file_content_for_html_and_images( self, file_path ):
message = ''
if checkers.check_html( file_path ):
@@ -38,6 +53,7 @@
elif checkers.check_image( file_path ):
message = 'The file "%s" contains image content.\n' % str( file_path )
return message
+
@web.expose
@web.require_login( 'upload', use_panels=True )
def upload( self, trans, **kwd ):
@@ -235,6 +251,7 @@
remove_repo_files_not_in_tar=remove_repo_files_not_in_tar,
message=message,
status=status )
+
def upload_directory( self, trans, repository, uploaded_directory, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
@@ -269,7 +286,9 @@
os.remove(repo_path)
shutil.move(os.path.join(uploaded_directory, relative_path), repo_path)
filenames_in_archive.append( relative_path )
- return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed)
+ return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
+ undesirable_dirs_removed, undesirable_files_removed)
+
def upload_tar( self, trans, repository, tar, uploaded_file, upload_point, remove_repo_files_not_in_tar, commit_message, new_repo_alert ):
# Upload a tar archive of files.
repo_dir = repository.repo_path( trans.app )
@@ -303,8 +322,18 @@
tar.extractall( path=full_path )
tar.close()
uploaded_file.close()
- return self.__handle_directory_changes(trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed)
- def __handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message, undesirable_dirs_removed, undesirable_files_removed ):
+ return self.__handle_directory_changes( trans,
+ repository,
+ full_path,
+ filenames_in_archive,
+ remove_repo_files_not_in_tar,
+ new_repo_alert,
+ commit_message,
+ undesirable_dirs_removed,
+ undesirable_files_removed )
+
+ def __handle_directory_changes( self, trans, repository, full_path, filenames_in_archive, remove_repo_files_not_in_tar, new_repo_alert, commit_message,
+ undesirable_dirs_removed, undesirable_files_removed ):
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
content_alert_str = ''
@@ -369,6 +398,7 @@
admin_only = len( repository.downloadable_revisions ) != 1
suc.handle_email_alerts( trans, repository, content_alert_str=content_alert_str, new_repo_alert=new_repo_alert, admin_only=admin_only )
return True, '', files_to_remove, content_alert_str, undesirable_dirs_removed, undesirable_files_removed
+
def uncompress( self, repository, uploaded_file_name, uploaded_file_filename, isgzip, isbz2 ):
if isgzip:
self.__handle_gzip( repository, uploaded_file_name )
@@ -376,6 +406,7 @@
if isbz2:
self.__handle_bz2( repository, uploaded_file_name )
return uploaded_file_filename.rstrip( '.bz2' )
+
def __handle_gzip( self, repository, uploaded_file_name ):
fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_gunzip_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
gzipped_file = gzip.GzipFile( uploaded_file_name, 'rb' )
@@ -393,6 +424,7 @@
os.close( fd )
gzipped_file.close()
shutil.move( uncompressed, uploaded_file_name )
+
def __handle_bz2( self, repository, uploaded_file_name ):
fd, uncompressed = tempfile.mkstemp( prefix='repo_%d_upload_bunzip2_' % repository.id, dir=os.path.dirname( uploaded_file_name ), text=False )
bzipped_file = bz2.BZ2File( uploaded_file_name, 'rb' )
@@ -410,6 +442,7 @@
os.close( fd )
bzipped_file.close()
shutil.move( uncompressed, uploaded_file_name )
+
def __get_upload_point( self, repository, **kwd ):
upload_point = kwd.get( 'upload_point', None )
if upload_point is not None:
@@ -430,6 +463,7 @@
# Must have been an error selecting something that didn't exist, so default to repository root
upload_point = None
return upload_point
+
def __check_archive( self, archive ):
for member in archive.getmembers():
# Allow regular files and directories only
diff -r 311cf2ae1623289551b0b568975e9840d46097a2 -r 477979aa944f14ab7189aa51e67168da333f0840 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1088,7 +1088,7 @@
owner = repository_elem.get( 'owner' )
changeset_revision = repository_elem.get( 'changeset_revision' )
prior_installation_required = repository_elem.get( 'prior_installation_required', False )
- repository_dependency_tup = ( toolshed, name, owner, changeset_revision, prior_installation_required )
+ repository_dependency_tup = [ toolshed, name, owner, changeset_revision, prior_installation_required ]
user = None
repository = None
if app.name == 'galaxy':
@@ -1284,11 +1284,8 @@
if new_repository_dependencies_metadata:
new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ]
# The saved metadata must be a subset of the new metadata.
- for new_repository_dependency in new_repository_dependencies:
- if new_repository_dependency not in saved_repository_dependencies:
- return True
- for saved_repository_dependency_metadata in saved_repository_dependencies:
- if saved_repository_dependency_metadata not in new_repository_dependencies:
+ for saved_repository_dependency in saved_repository_dependencies:
+ if saved_repository_dependency not in new_repository_dependencies:
return True
return False
else:
@@ -1318,9 +1315,6 @@
new_tool_dependencies = metadata_dict.get( 'tool_dependencies', None )
if new_tool_dependencies:
# The saved metadata must be a subset of the new metadata.
- for new_tool_dependency in new_tool_dependencies:
- if new_tool_dependency not in saved_tool_dependencies:
- return True
for saved_tool_dependency in saved_tool_dependencies:
if saved_tool_dependency not in new_tool_dependencies:
return True
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Add exception handling for edge case when a repository record is not created by the install method.
by commits-noreply@bitbucket.org 15 May '13
by commits-noreply@bitbucket.org 15 May '13
15 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/311cf2ae1623/
Changeset: 311cf2ae1623
User: Dave Bouvier
Date: 2013-05-16 05:51:39
Summary: Add exception handling for edge case when a repository record is not created by the install method.
Affected #: 1 file
diff -r 53d3c967d373c3b0589a31a4e400b01786cea091 -r 311cf2ae1623289551b0b568975e9840d46097a2 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -646,7 +646,11 @@
repository_status[ 'passed_tests' ] = []
repository_status[ 'failed_tests' ] = []
repository_status[ 'installation_errors' ] = dict( current_repository=[], repository_dependencies=[], tool_dependencies=[] )
- repository = test_db_util.get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision )
+ try:
+ repository = test_db_util.get_installed_repository_by_name_owner_changeset_revision( name, owner, changeset_revision )
+ except:
+ log.exception( 'Error getting installed repository.' )
+ continue
# If the installation succeeds, configure and run functional tests for this repository. This is equivalent to
# sh run_functional_tests.sh -installed
if success:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add the ability for a repository owner to check a "Skip automated testing of tools in this revision" checkbox on the Manage repository page. If checked, the tool shed automated test framework will skip testing the tools in that revision.
by commits-noreply@bitbucket.org 15 May '13
by commits-noreply@bitbucket.org 15 May '13
15 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/53d3c967d373/
Changeset: 53d3c967d373
User: greg
Date: 2013-05-16 02:15:11
Summary: Add the ability for a repository owner to check a "Skip automated testing of tools in this revision" checkbox on the Manage repository page. If checked, the tool shed automated test framework will skip testing the tools in that revision.
Affected #: 2 files
diff -r f2e183a522d8b1d08ff293f23cce254cc4597721 -r 53d3c967d373c3b0589a31a4e400b01786cea091 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -341,7 +341,7 @@
message += '<ul>'
message += '<li>you are authorized to update them</li>'
message += '<li>the latest installable revision contains at least 1 tool</li>'
- message += '<li>the latest installable revision has <b>Skip tool tests</b> checked</li>'
+ message += '<li>the latest installable revision has <b>Skip automated testing of tools in this revision</b> checked</li>'
message += '</ul>'
kwd[ 'message' ] = message
kwd[ 'status' ] = 'warning'
@@ -649,7 +649,7 @@
message = 'This list contains repositories that match the following criteria:<br>'
message += '<ul>'
message += '<li>the latest installable revision contains at least 1 tool</li>'
- message += '<li>the latest installable revision has <b>Skip tool tests</b> checked</li>'
+ message += '<li>the latest installable revision has <b>Skip automated testing of tools in this revision</b> checked</li>'
message += '</ul>'
kwd[ 'message' ] = message
kwd[ 'status' ] = 'warning'
@@ -1976,29 +1976,31 @@
@web.expose
@web.require_login( "manage repository" )
def manage_repository( self, trans, id, **kwd ):
- params = util.Params( kwd )
- message = util.restore_text( params.get( 'message', '' ) )
- status = params.get( 'status', 'done' )
+ message = util.restore_text( kwd.get( 'message', '' ) )
+ status = kwd.get( 'status', 'done' )
repository = suc.get_repository_in_tool_shed( trans, id )
repo_dir = repository.repo_path( trans.app )
repo = hg.repository( suc.get_configured_ui(), repo_dir )
- repo_name = util.restore_text( params.get( 'repo_name', repository.name ) )
- changeset_revision = util.restore_text( params.get( 'changeset_revision', repository.tip( trans.app ) ) )
- description = util.restore_text( params.get( 'description', repository.description ) )
- long_description = util.restore_text( params.get( 'long_description', repository.long_description ) )
+ repo_name = util.restore_text( kwd.get( 'repo_name', repository.name ) )
+ changeset_revision = util.restore_text( kwd.get( 'changeset_revision', repository.tip( trans.app ) ) )
+ description = util.restore_text( kwd.get( 'description', repository.description ) )
+ long_description = util.restore_text( kwd.get( 'long_description', repository.long_description ) )
avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
- display_reviews = util.string_as_bool( params.get( 'display_reviews', False ) )
- alerts = params.get( 'alerts', '' )
+ display_reviews = util.string_as_bool( kwd.get( 'display_reviews', False ) )
+ alerts = kwd.get( 'alerts', '' )
alerts_checked = CheckboxField.is_checked( alerts )
- category_ids = util.listify( params.get( 'category_id', '' ) )
+ skip_tool_tests = kwd.get( 'skip_tool_tests', '' )
+ skip_tool_tests_checked = CheckboxField.is_checked( skip_tool_tests )
+ skip_tool_tests_comment = kwd.get( 'skip_tool_tests_comment', '' )
+ category_ids = util.listify( kwd.get( 'category_id', '' ) )
if repository.email_alerts:
email_alerts = json.from_json_string( repository.email_alerts )
else:
email_alerts = []
- allow_push = params.get( 'allow_push', '' )
+ allow_push = kwd.get( 'allow_push', '' )
error = False
user = trans.user
- if params.get( 'edit_repository_button', False ):
+ if kwd.get( 'edit_repository_button', False ):
flush_needed = False
# TODO: add a can_manage in the security agent.
if not ( user.email == repository.user.email or trans.user_is_admin() ):
@@ -2034,7 +2036,32 @@
trans.sa_session.add( repository )
trans.sa_session.flush()
message += "The repository information has been updated."
- elif params.get( 'manage_categories_button', False ):
+ elif kwd.get( 'skip_tool_tests_button', False ):
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ skip_tool_test = repository_metadata.skip_tool_tests
+ if skip_tool_test:
+ # Handle the mapper behavior.
+ skip_tool_test = skip_tool_test[ 0 ]
+ if skip_tool_tests_checked:
+ if skip_tool_test:
+ comment = skip_tool_test.comment
+ if comment != skip_tool_tests_comment:
+ skip_tool_test.comment = skip_tool_tests_comment
+ trans.sa_session.add( skip_tool_test )
+ trans.sa_session.flush()
+ else:
+ skip_tool_test = trans.model.SkipToolTest( repository_metadata_id=repository_metadata.id,
+ initial_changeset_revision=changeset_revision,
+ comment=skip_tool_tests_comment )
+ trans.sa_session.add( skip_tool_test )
+ trans.sa_session.flush()
+ message = "Tools in this revision will be tested by the automated test framework."
+ else:
+ if skip_tool_test:
+ trans.sa_session.delete( skip_tool_test )
+ trans.sa_session.flush()
+ message = "Tools in this revision will not be tested by the automated test framework."
+ elif kwd.get( 'manage_categories_button', False ):
flush_needed = False
# Delete all currently existing categories.
for rca in repository.categories:
@@ -2043,14 +2070,14 @@
if category_ids:
# Create category associations
for category_id in category_ids:
- category = trans.sa_session.query(model.Category).get( trans.security.decode_id( category_id ) )
+ category = trans.sa_session.query( trans.model.Category ).get( trans.security.decode_id( category_id ) )
rca = trans.app.model.RepositoryCategoryAssociation( repository, category )
trans.sa_session.add( rca )
trans.sa_session.flush()
message = "The repository information has been updated."
- elif params.get( 'user_access_button', False ):
+ elif kwd.get( 'user_access_button', False ):
if allow_push not in [ 'none' ]:
- remove_auth = params.get( 'remove_auth', '' )
+ remove_auth = kwd.get( 'remove_auth', '' )
if remove_auth:
usernames = ''
else:
@@ -2062,7 +2089,7 @@
usernames = ','.join( usernames )
repository.set_allow_push( trans.app, usernames, remove_auth=remove_auth )
message = "The repository information has been updated."
- elif params.get( 'receive_email_alerts_button', False ):
+ elif kwd.get( 'receive_email_alerts_button', False ):
flush_needed = False
if alerts_checked:
if user.email not in email_alerts:
@@ -2097,6 +2124,7 @@
repository_metadata = None
metadata = None
is_malicious = False
+ skip_tool_test = None
repository_dependencies = None
if changeset_revision != suc.INITIAL_CHANGELOG_HASH:
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -2114,6 +2142,11 @@
metadata = repository_metadata.metadata
is_malicious = repository_metadata.malicious
if repository_metadata:
+ skip_tool_test = repository_metadata.skip_tool_tests
+ if skip_tool_test:
+ # Handle the mapper behavior.
+ skip_tool_test = skip_tool_test[ 0 ]
+ skip_tool_tests_checked = True
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
repository_dependencies = \
@@ -2136,6 +2169,7 @@
message += malicious_error
status = 'error'
malicious_check_box = CheckboxField( 'malicious', checked=is_malicious )
+ skip_tool_tests_check_box = CheckboxField( 'skip_tool_tests', checked=skip_tool_tests_checked )
categories = suc.get_categories( trans )
selected_categories = [ rca.category_id for rca in repository.categories ]
containers_dict = container_util.build_repository_containers_for_tool_shed( trans, repository, changeset_revision, repository_dependencies, repository_metadata )
@@ -2159,6 +2193,8 @@
display_reviews=display_reviews,
num_ratings=num_ratings,
alerts_check_box=alerts_check_box,
+ skip_tool_tests_check_box=skip_tool_tests_check_box,
+ skip_tool_test=skip_tool_test,
malicious_check_box=malicious_check_box,
message=message,
status=status )
diff -r f2e183a522d8b1d08ff293f23cce254cc4597721 -r 53d3c967d373c3b0589a31a4e400b01786cea091 templates/webapps/tool_shed/repository/manage_repository.mako
--- a/templates/webapps/tool_shed/repository/manage_repository.mako
+++ b/templates/webapps/tool_shed/repository/manage_repository.mako
@@ -58,6 +58,11 @@
can_view_change_log = not is_new
+ if repository_metadata and repository_metadata.includes_tools:
+ includes_tools = True
+ else:
+ includes_tools = False
+
if changeset_revision_is_repository_tip:
tip_str = 'repository tip'
sharable_link_label = 'Sharable link to this repository:'
@@ -193,6 +198,36 @@
</div></div>
${render_repository_items( metadata, containers_dict, can_set_metadata=True )}
+%if includes_tools:
+ <p/>
+ <div class="toolForm">
+ <div class="toolFormTitle">Automated tool tests</div>
+ <div class="toolFormBody">
+ <form name="skip_tool_tests" id="skip_tool_tests" action="${h.url_for( controller='repository', action='manage_repository', id=trans.security.encode_id( repository.id ), changeset_revision=repository_metadata.changeset_revision )}" method="post" >
+ <div class="form-row">
+ <label>Skip automated testing of tools in this revision:</label>
+ ${skip_tool_tests_check_box.get_html()}
+ <div class="toolParamHelp" style="clear: both;">
+ Check the box and click <b>Save</b> to skip automated testing of the tools in this revision.
+ </div>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <label>Reason for skipping automated testing:</label>
+ %if skip_tool_test:
+ <pre><textarea name="skip_tool_tests_comment" rows="3" cols="80">${skip_tool_test.comment | h}</textarea></pre>
+ %else:
+ <textarea name="skip_tool_tests_comment" rows="3" cols="80"></textarea>
+ %endif
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <input type="submit" name="skip_tool_tests_button" value="Save"/>
+ </div>
+ </form>
+ </div>
+ </div>
+%endif
<p/><div class="toolForm"><div class="toolFormTitle">Manage categories</div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Fixes for determing if a new repository_metadata record is required for a tool shed repository.
by commits-noreply@bitbucket.org 15 May '13
by commits-noreply@bitbucket.org 15 May '13
15 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f2e183a522d8/
Changeset: f2e183a522d8
User: greg
Date: 2013-05-16 02:06:44
Summary: Fixes for determing if a new repository_metadata record is required for a tool shed repository.
Affected #: 2 files
diff -r 590f27390d0dca446fe804a99ba819663653207e -r f2e183a522d8b1d08ff293f23cce254cc4597721 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -300,18 +300,17 @@
skip_tool_test = suc.get_skip_tool_test_by_changeset_revision( trans, changeset_hash )
if skip_tool_test:
# We found a skip_tool_test record associated with the changeset_revision, so see if it has a valid repository_revision.
- try:
- repository_revision = skip_tool_test.repository_revision
+ repository_revision = suc.get_repository_metadata_by_id( trans, trans.security.encode_id( repository_metadata.id ) )
+ if repository_revision:
# The skip_tool_test record is associated with a valid repository_metadata record, so proceed.
continue
- except:
- # We found a skip_tool_test record that is associated with an invalid repository_metadata record, so update it to point to
- # the newly created repository_metadata record. In some special cases there may be multiple skip_tool_test records that
- # require updating, so we won't break here, we'll continue to inspect the rest of the changelog up to the received
- # changeset_revision.
- skip_tool_test.repository_revision = repository_metadata
- trans.sa_session.add( skip_tool_test )
- trans.sa_session.flush()
+ # We found a skip_tool_test record that is associated with an invalid repository_metadata record, so update it to point to
+ # the newly created repository_metadata record. In some special cases there may be multiple skip_tool_test records that
+ # require updating, so we won't break here, we'll continue to inspect the rest of the changelog up to the received
+ # changeset_revision.
+ skip_tool_test.repository_metadata_id = repository_metadata.id
+ trans.sa_session.add( skip_tool_test )
+ trans.sa_session.flush()
if changeset_hash == changeset_revision:
# Proceed no further than the received changeset_revision.
break
@@ -1285,16 +1284,19 @@
if new_repository_dependencies_metadata:
new_repository_dependencies = metadata_dict[ 'repository_dependencies' ][ 'repository_dependencies' ]
# The saved metadata must be a subset of the new metadata.
- for new_repository_dependency_metadata in new_repository_dependencies:
- if new_repository_dependency_metadata not in saved_repository_dependencies:
+ for new_repository_dependency in new_repository_dependencies:
+ if new_repository_dependency not in saved_repository_dependencies:
return True
for saved_repository_dependency_metadata in saved_repository_dependencies:
if saved_repository_dependency_metadata not in new_repository_dependencies:
return True
+ return False
else:
# The repository_dependencies.xml file must have been deleted, so create a new repository_metadata record so we always have
# access to the deleted file.
return True
+ else:
+ return False
else:
if 'repository_dependencies' in metadata_dict:
# There is no saved repository metadata, so we need to create a new repository_metadata record.
@@ -1316,16 +1318,19 @@
new_tool_dependencies = metadata_dict.get( 'tool_dependencies', None )
if new_tool_dependencies:
# The saved metadata must be a subset of the new metadata.
- for new_repository_dependency_metadata in new_tool_dependencies:
- if new_repository_dependency_metadata not in saved_tool_dependencies:
+ for new_tool_dependency in new_tool_dependencies:
+ if new_tool_dependency not in saved_tool_dependencies:
return True
- for saved_repository_dependency_metadata in saved_tool_dependencies:
- if saved_repository_dependency_metadata not in new_tool_dependencies:
+ for saved_tool_dependency in saved_tool_dependencies:
+ if saved_tool_dependency not in new_tool_dependencies:
return True
+ return False
else:
# The tool_dependencies.xml file must have been deleted, so create a new repository_metadata record so we always have
# access to the deleted file.
return True
+ else:
+ return False
else:
# We have repository metadata that does not include metadata for any tool dependencies in the repository, so we can update
# the existing repository metadata.
@@ -1367,6 +1372,7 @@
for new_tool_metadata_dict in metadata_dict[ 'tools' ]:
if new_tool_metadata_dict[ 'id' ] not in saved_tool_ids:
return True
+ return False
else:
# The new metadata includes tools, but the stored metadata does not, so we can update the stored metadata.
return False
diff -r 590f27390d0dca446fe804a99ba819663653207e -r f2e183a522d8b1d08ff293f23cce254cc4597721 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -724,6 +724,9 @@
return all_metadata_records[ 0 ]
return None
+def get_repository_metadata_by_id( trans, id ):
+ return trans.sa_session.query( trans.model.RepositoryMetadata ).get( trans.security.decode_id( id ) )
+
def get_repository_owner( cleaned_repository_url ):
"""Gvien a "cleaned" repository clone URL, return the owner of the repository."""
items = cleaned_repository_url.split( '/repos/' )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0