galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
May 2013
- 1 participants
- 218 discussions
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/2f4642f06044/
Changeset: 2f4642f06044
User: jmchilton
Date: 2013-05-26 22:10:43
Summary: Fix API uploads (must have broken with API/Web transaction unification).
Affected #: 3 files
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r 2f4642f0604418e3bc33cb779aa352d3f632ca0c lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py
+++ b/lib/galaxy/tools/actions/upload.py
@@ -16,7 +16,7 @@
incoming = upload_common.persist_uploads( incoming )
# We can pass an empty string as the cntrller here since it is used to check whether we
# are in an admin view, and this tool is currently not used there.
- uploaded_datasets = upload_common.get_uploaded_datasets( trans, '', incoming, precreated_datasets, dataset_upload_inputs )
+ uploaded_datasets = upload_common.get_uploaded_datasets( trans, '', incoming, precreated_datasets, dataset_upload_inputs, history=history )
upload_common.cleanup_unused_precreated_datasets( precreated_datasets )
if not uploaded_datasets:
@@ -24,4 +24,4 @@
json_file_path = upload_common.create_paramfile( trans, uploaded_datasets )
data_list = [ ud.data for ud in uploaded_datasets ]
- return upload_common.create_job( trans, incoming, tool, json_file_path, data_list )
+ return upload_common.create_job( trans, incoming, tool, json_file_path, data_list, history=history )
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r 2f4642f0604418e3bc33cb779aa352d3f632ca0c lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py
+++ b/lib/galaxy/tools/actions/upload_common.py
@@ -107,11 +107,13 @@
data.state = data.states.ERROR
data.info = 'No file contents were available.'
-def new_history_upload( trans, uploaded_dataset, state=None ):
+def __new_history_upload( trans, uploaded_dataset, history=None, state=None ):
+ if not history:
+ history = trans.history
hda = trans.app.model.HistoryDatasetAssociation( name = uploaded_dataset.name,
extension = uploaded_dataset.file_type,
dbkey = uploaded_dataset.dbkey,
- history = trans.history,
+ history = history,
create_dataset = True,
sa_session = trans.sa_session )
if state:
@@ -120,12 +122,13 @@
hda.state = hda.states.QUEUED
trans.sa_session.add( hda )
trans.sa_session.flush()
- trans.history.add_dataset( hda, genome_build = uploaded_dataset.dbkey )
- permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
+ history.add_dataset( hda, genome_build=uploaded_dataset.dbkey )
+ permissions = trans.app.security_agent.history_get_default_permissions( history )
trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions )
trans.sa_session.flush()
return hda
-def new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state=None ):
+
+def __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state=None ):
current_user_roles = trans.get_current_user_roles()
if not ( ( trans.user_is_admin() and cntrller in [ 'library_admin', 'api' ] ) or trans.app.security_agent.can_add_library_item( current_user_roles, library_bunch.folder ) ):
# This doesn't have to be pretty - the only time this should happen is if someone's being malicious.
@@ -206,19 +209,22 @@
trans.sa_session.add( dp )
trans.sa_session.flush()
return ldda
-def new_upload( trans, cntrller, uploaded_dataset, library_bunch=None, state=None ):
+def new_upload( trans, cntrller, uploaded_dataset, library_bunch=None, history=None, state=None ):
if library_bunch:
- return new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state )
+ return __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state )
+ elif history:
+ return __new_history_upload( trans, uploaded_dataset, history=history, state=state )
else:
- return new_history_upload( trans, uploaded_dataset, state )
-def get_uploaded_datasets( trans, cntrller, params, precreated_datasets, dataset_upload_inputs, library_bunch=None ):
+ raise Exception("new_upload must be called with empty values for library_bunch and history")
+
+def get_uploaded_datasets( trans, cntrller, params, precreated_datasets, dataset_upload_inputs, library_bunch=None, history=None ):
uploaded_datasets = []
for dataset_upload_input in dataset_upload_inputs:
uploaded_datasets.extend( dataset_upload_input.get_uploaded_datasets( trans, params ) )
for uploaded_dataset in uploaded_datasets:
data = get_precreated_dataset( precreated_datasets, uploaded_dataset.name )
if not data:
- data = new_upload( trans, cntrller, uploaded_dataset, library_bunch )
+ data = new_upload( trans, cntrller, uploaded_dataset, library_bunch=library_bunch, history=history )
else:
data.extension = uploaded_dataset.file_type
data.dbkey = uploaded_dataset.dbkey
@@ -246,7 +252,9 @@
trans.sa_session.add( info_association )
trans.sa_session.flush()
else:
- trans.history.genome_build = uploaded_dataset.dbkey
+ if not history:
+ history = trans.history
+ history.genome_build = uploaded_dataset.dbkey
uploaded_dataset.data = data
return uploaded_datasets
def create_paramfile( trans, uploaded_datasets ):
@@ -320,7 +328,7 @@
if trans.app.config.external_chown_script:
_chown( json_file_path )
return json_file_path
-def create_job( trans, params, tool, json_file_path, data_list, folder=None ):
+def create_job( trans, params, tool, json_file_path, data_list, folder=None, history=None ):
"""
Create the upload job.
"""
@@ -333,7 +341,9 @@
if folder:
job.library_folder_id = folder.id
else:
- job.history_id = trans.history.id
+ if not history:
+ history = trans.history
+ job.history_id = history.id
job.tool_id = tool.id
job.tool_version = tool.version
job.state = job.states.UPLOAD
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r 2f4642f0604418e3bc33cb779aa352d3f632ca0c lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -78,7 +78,6 @@
if history_id:
target_history = trans.sa_session.query(trans.app.model.History).get(
trans.security.decode_id(history_id))
- trans.galaxy_session.current_history = target_history
else:
target_history = None
https://bitbucket.org/galaxy/galaxy-central/commits/f3f543b06ba6/
Changeset: f3f543b06ba6
User: dannon
Date: 2013-05-28 20:24:38
Summary: Merged in jmchilton/galaxy-central-library-refactoring (pull request #168)
BugFix: Fix API uploads (must have broken with API/Web transaction unification).
Affected #: 3 files
diff -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 -r f3f543b06ba66a6867b18d8331a1d6f5d52e4d0b lib/galaxy/tools/actions/upload.py
--- a/lib/galaxy/tools/actions/upload.py
+++ b/lib/galaxy/tools/actions/upload.py
@@ -16,7 +16,7 @@
incoming = upload_common.persist_uploads( incoming )
# We can pass an empty string as the cntrller here since it is used to check whether we
# are in an admin view, and this tool is currently not used there.
- uploaded_datasets = upload_common.get_uploaded_datasets( trans, '', incoming, precreated_datasets, dataset_upload_inputs )
+ uploaded_datasets = upload_common.get_uploaded_datasets( trans, '', incoming, precreated_datasets, dataset_upload_inputs, history=history )
upload_common.cleanup_unused_precreated_datasets( precreated_datasets )
if not uploaded_datasets:
@@ -24,4 +24,4 @@
json_file_path = upload_common.create_paramfile( trans, uploaded_datasets )
data_list = [ ud.data for ud in uploaded_datasets ]
- return upload_common.create_job( trans, incoming, tool, json_file_path, data_list )
+ return upload_common.create_job( trans, incoming, tool, json_file_path, data_list, history=history )
diff -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 -r f3f543b06ba66a6867b18d8331a1d6f5d52e4d0b lib/galaxy/tools/actions/upload_common.py
--- a/lib/galaxy/tools/actions/upload_common.py
+++ b/lib/galaxy/tools/actions/upload_common.py
@@ -107,11 +107,13 @@
data.state = data.states.ERROR
data.info = 'No file contents were available.'
-def new_history_upload( trans, uploaded_dataset, state=None ):
+def __new_history_upload( trans, uploaded_dataset, history=None, state=None ):
+ if not history:
+ history = trans.history
hda = trans.app.model.HistoryDatasetAssociation( name = uploaded_dataset.name,
extension = uploaded_dataset.file_type,
dbkey = uploaded_dataset.dbkey,
- history = trans.history,
+ history = history,
create_dataset = True,
sa_session = trans.sa_session )
if state:
@@ -120,12 +122,13 @@
hda.state = hda.states.QUEUED
trans.sa_session.add( hda )
trans.sa_session.flush()
- trans.history.add_dataset( hda, genome_build = uploaded_dataset.dbkey )
- permissions = trans.app.security_agent.history_get_default_permissions( trans.history )
+ history.add_dataset( hda, genome_build=uploaded_dataset.dbkey )
+ permissions = trans.app.security_agent.history_get_default_permissions( history )
trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions )
trans.sa_session.flush()
return hda
-def new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state=None ):
+
+def __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state=None ):
current_user_roles = trans.get_current_user_roles()
if not ( ( trans.user_is_admin() and cntrller in [ 'library_admin', 'api' ] ) or trans.app.security_agent.can_add_library_item( current_user_roles, library_bunch.folder ) ):
# This doesn't have to be pretty - the only time this should happen is if someone's being malicious.
@@ -206,19 +209,22 @@
trans.sa_session.add( dp )
trans.sa_session.flush()
return ldda
-def new_upload( trans, cntrller, uploaded_dataset, library_bunch=None, state=None ):
+def new_upload( trans, cntrller, uploaded_dataset, library_bunch=None, history=None, state=None ):
if library_bunch:
- return new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state )
+ return __new_library_upload( trans, cntrller, uploaded_dataset, library_bunch, state )
+ elif history:
+ return __new_history_upload( trans, uploaded_dataset, history=history, state=state )
else:
- return new_history_upload( trans, uploaded_dataset, state )
-def get_uploaded_datasets( trans, cntrller, params, precreated_datasets, dataset_upload_inputs, library_bunch=None ):
+ raise Exception("new_upload must be called with empty values for library_bunch and history")
+
+def get_uploaded_datasets( trans, cntrller, params, precreated_datasets, dataset_upload_inputs, library_bunch=None, history=None ):
uploaded_datasets = []
for dataset_upload_input in dataset_upload_inputs:
uploaded_datasets.extend( dataset_upload_input.get_uploaded_datasets( trans, params ) )
for uploaded_dataset in uploaded_datasets:
data = get_precreated_dataset( precreated_datasets, uploaded_dataset.name )
if not data:
- data = new_upload( trans, cntrller, uploaded_dataset, library_bunch )
+ data = new_upload( trans, cntrller, uploaded_dataset, library_bunch=library_bunch, history=history )
else:
data.extension = uploaded_dataset.file_type
data.dbkey = uploaded_dataset.dbkey
@@ -246,7 +252,9 @@
trans.sa_session.add( info_association )
trans.sa_session.flush()
else:
- trans.history.genome_build = uploaded_dataset.dbkey
+ if not history:
+ history = trans.history
+ history.genome_build = uploaded_dataset.dbkey
uploaded_dataset.data = data
return uploaded_datasets
def create_paramfile( trans, uploaded_datasets ):
@@ -320,7 +328,7 @@
if trans.app.config.external_chown_script:
_chown( json_file_path )
return json_file_path
-def create_job( trans, params, tool, json_file_path, data_list, folder=None ):
+def create_job( trans, params, tool, json_file_path, data_list, folder=None, history=None ):
"""
Create the upload job.
"""
@@ -333,7 +341,9 @@
if folder:
job.library_folder_id = folder.id
else:
- job.history_id = trans.history.id
+ if not history:
+ history = trans.history
+ job.history_id = history.id
job.tool_id = tool.id
job.tool_version = tool.version
job.state = job.states.UPLOAD
diff -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 -r f3f543b06ba66a6867b18d8331a1d6f5d52e4d0b lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -78,7 +78,6 @@
if history_id:
target_history = trans.sa_session.query(trans.app.model.History).get(
trans.security.decode_id(history_id))
- trans.galaxy_session.current_history = target_history
else:
target_history = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Merge with next-stable.
by commits-noreply@bitbucket.org 28 May '13
by commits-noreply@bitbucket.org 28 May '13
28 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3c8ebea6f6ca/
Changeset: 3c8ebea6f6ca
User: Dave Bouvier
Date: 2013-05-28 18:30:14
Summary: Merge with next-stable.
Affected #: 18 files
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/galaxy_install/__init__.py
--- a/lib/tool_shed/galaxy_install/__init__.py
+++ b/lib/tool_shed/galaxy_install/__init__.py
@@ -5,12 +5,7 @@
import tool_shed.util.shed_util_common
import tool_shed.util.datatype_util
from galaxy.model.orm import and_
-
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree, ElementInclude
+from tool_shed.util import xml_util
log = logging.getLogger( __name__ )
@@ -25,13 +20,10 @@
self.installed_repository_dicts = []
def get_repository_install_dir( self, tool_shed_repository ):
for tool_config in self.tool_configs:
- try:
- tree = ElementTree.parse( tool_config )
- except Exception, e:
- log.debug( "Exception attempting to parse %s: %s" % ( str( tool_config ), str( e ) ) )
+ tree, error_message = xml_util.parse_xml( tool_config )
+ if tree is None:
return None
root = tree.getroot()
- ElementInclude.include( root )
tool_path = root.get( 'tool_path', None )
if tool_path:
ts = tool_shed.util.shed_util_common.clean_tool_shed_url( tool_shed_repository.tool_shed )
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -38,50 +38,56 @@
self.proprietary_tool_confs = self.non_shed_tool_panel_configs
self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number )
# Set the location where the repositories will be installed by retrieving the tool_path setting from migrated_tools_config.
- tree = xml_util.parse_xml( migrated_tools_config )
- root = tree.getroot()
- self.tool_path = root.get( 'tool_path' )
- print "Repositories will be installed into configured tool_path location ", str( self.tool_path )
- # Parse tool_shed_install_config to check each of the tools.
- self.tool_shed_install_config = tool_shed_install_config
- tree = xml_util.parse_xml( tool_shed_install_config )
- root = tree.getroot()
- self.tool_shed = suc.clean_tool_shed_url( root.get( 'name' ) )
- self.repository_owner = common_util.REPOSITORY_OWNER
- index, self.shed_config_dict = suc.get_shed_tool_conf_dict( app, self.migrated_tools_config )
- # Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
- # tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
- # The default behavior is that the tool shed is down.
- tool_shed_accessible = False
- tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
- if tool_panel_configs:
- # The missing_tool_configs_dict contents are something like:
- # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
- tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
+ tree, error_message = xml_util.parse_xml( migrated_tools_config )
+ if tree is None:
+ print error_message
else:
- # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
- # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
- # the database.
- tool_shed_accessible = True
- missing_tool_configs_dict = odict()
- if tool_shed_accessible:
- if len( self.proprietary_tool_confs ) == 1:
- plural = ''
- file_names = self.proprietary_tool_confs[ 0 ]
+ root = tree.getroot()
+ self.tool_path = root.get( 'tool_path' )
+ print "Repositories will be installed into configured tool_path location ", str( self.tool_path )
+ # Parse tool_shed_install_config to check each of the tools.
+ self.tool_shed_install_config = tool_shed_install_config
+ tree, error_message = xml_util.parse_xml( tool_shed_install_config )
+ if tree is None:
+ print error_message
else:
- plural = 's'
- file_names = ', '.join( self.proprietary_tool_confs )
- if missing_tool_configs_dict:
- for repository_elem in root:
- self.install_repository( repository_elem, install_dependencies )
- else:
- message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number )
- message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names )
- print message
- else:
- message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % str( latest_migration_script_number )
- message += "Try again later.\n"
- print message
+ root = tree.getroot()
+ self.tool_shed = suc.clean_tool_shed_url( root.get( 'name' ) )
+ self.repository_owner = common_util.REPOSITORY_OWNER
+ index, self.shed_config_dict = suc.get_shed_tool_conf_dict( app, self.migrated_tools_config )
+ # Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
+ # tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
+ # The default behavior is that the tool shed is down.
+ tool_shed_accessible = False
+ tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
+ if tool_panel_configs:
+ # The missing_tool_configs_dict contents are something like:
+ # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
+ tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
+ else:
+ # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
+ # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
+ # the database.
+ tool_shed_accessible = True
+ missing_tool_configs_dict = odict()
+ if tool_shed_accessible:
+ if len( self.proprietary_tool_confs ) == 1:
+ plural = ''
+ file_names = self.proprietary_tool_confs[ 0 ]
+ else:
+ plural = 's'
+ file_names = ', '.join( self.proprietary_tool_confs )
+ if missing_tool_configs_dict:
+ for repository_elem in root:
+ self.install_repository( repository_elem, install_dependencies )
+ else:
+ message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number )
+ message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names )
+ print message
+ else:
+ message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % str( latest_migration_script_number )
+ message += "Try again later.\n"
+ print message
def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
if self.shed_config_dict.get( 'tool_path' ):
@@ -108,7 +114,9 @@
tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
# Parse the XML and load the file attributes for later checking against the integrated elements from self.proprietary_tool_confs.
migrated_tool_configs = []
- tree = xml_util.parse_xml( tools_xml_file_path )
+ tree, error_message = xml_util.parse_xml( tools_xml_file_path )
+ if tree is None:
+ return []
root = tree.getroot()
for elem in root:
if elem.tag == 'repository':
@@ -117,7 +125,9 @@
# Parse each file in self.proprietary_tool_confs and generate the integrated list of tool panel Elements that contain them.
tool_panel_elems = []
for proprietary_tool_conf in self.proprietary_tool_confs:
- tree = xml_util.parse_xml( proprietary_tool_conf )
+ tree, error_message = xml_util.parse_xml( proprietary_tool_conf )
+ if tree is None:
+ return []
root = tree.getroot()
for elem in root:
if elem.tag == 'tool':
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -19,6 +19,7 @@
from tool_shed.util import metadata_util
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
+from xml.etree import ElementTree as XmlET
from galaxy import eggs
import pkg_resources
@@ -28,10 +29,6 @@
from mercurial import hg
from mercurial import ui
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree.ElementTree import Element
-
log = logging.getLogger( __name__ )
def create_repo_info_dict( trans, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None,
@@ -401,7 +398,7 @@
else:
# Appending a new section to trans.app.toolbox.tool_panel
log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = Element( 'section' )
+ elem = XmlET.Element( 'section' )
elem.attrib[ 'name' ] = new_tool_panel_section
elem.attrib[ 'id' ] = section_id
elem.attrib[ 'version' ] = ''
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -14,15 +14,6 @@
from galaxy.model.orm import and_
from galaxy.web import url_for
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-from elementtree.ElementTree import Element
-from elementtree.ElementTree import SubElement
-
log = logging.getLogger( __name__ )
def clean_tool_shed_url( base_url ):
@@ -479,7 +470,7 @@
tool_dependency = None
action_dict = {}
if tool_dependencies_config:
- required_td_tree = xml_util.parse_xml( tool_dependencies_config )
+ required_td_tree, error_message = xml_util.parse_xml( tool_dependencies_config )
if required_td_tree:
required_td_root = required_td_tree.getroot()
for required_td_elem in required_td_root:
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/tool_shed_registry.py
--- a/lib/tool_shed/tool_shed_registry.py
+++ b/lib/tool_shed/tool_shed_registry.py
@@ -14,25 +14,28 @@
self.tool_sheds_auth = odict()
if root_dir and config:
# Parse tool_sheds_conf.xml
- tree = xml_util.parse_xml( config )
- root = tree.getroot()
- log.debug( 'Loading references to tool sheds from %s' % config )
- for elem in root.findall( 'tool_shed' ):
- try:
- name = elem.get( 'name', None )
- url = elem.get( 'url', None )
- username = elem.get( 'user', None )
- password = elem.get( 'pass', None )
- if name and url:
- self.tool_sheds[ name ] = url
- self.tool_sheds_auth[ name ] = None
- log.debug( 'Loaded reference to tool shed: %s' % name )
- if name and url and username and password:
- pass_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
- pass_mgr.add_password( None, url, username, password )
- self.tool_sheds_auth[ name ] = pass_mgr
- except Exception, e:
- log.warning( 'Error loading reference to tool shed "%s", problem: %s' % ( name, str( e ) ) )
+ tree, error_message = xml_util.parse_xml( config )
+ if tree is None:
+ log.warning( "Unable to load references to tool sheds defined in file %s" % str( config ) )
+ else:
+ root = tree.getroot()
+ log.debug( 'Loading references to tool sheds from %s' % config )
+ for elem in root.findall( 'tool_shed' ):
+ try:
+ name = elem.get( 'name', None )
+ url = elem.get( 'url', None )
+ username = elem.get( 'user', None )
+ password = elem.get( 'pass', None )
+ if name and url:
+ self.tool_sheds[ name ] = url
+ self.tool_sheds_auth[ name ] = None
+ log.debug( 'Loaded reference to tool shed: %s' % name )
+ if name and url and username and password:
+ pass_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
+ pass_mgr.add_password( None, url, username, password )
+ self.tool_sheds_auth[ name ] = pass_mgr
+ except Exception, e:
+ log.warning( 'Error loading reference to tool shed "%s", problem: %s' % ( name, str( e ) ) )
def password_manager_for_url( self, url ):
"""
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/util/commit_util.py
--- a/lib/tool_shed/util/commit_util.py
+++ b/lib/tool_shed/util/commit_util.py
@@ -185,14 +185,11 @@
def handle_repository_dependencies_definition( trans, repository_dependencies_config ):
altered = False
- try:
- # Make sure we're looking at a valid repository_dependencies.xml file.
- tree = xml_util.parse_xml( repository_dependencies_config )
- root = tree.getroot()
- except Exception, e:
- error_message = "Error parsing %s in handle_repository_dependencies_definition: " % str( repository_dependencies_config )
- log.exception( error_message )
+ # Make sure we're looking at a valid repository_dependencies.xml file.
+ tree, error_message = xml_util.parse_xml( repository_dependencies_config )
+ if tree is None:
return False, None
+ root = tree.getroot()
if root.tag == 'repositories':
for index, elem in enumerate( root ):
# <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
@@ -232,14 +229,11 @@
def handle_tool_dependencies_definition( trans, tool_dependencies_config ):
altered = False
- try:
- # Make sure we're looking at a valid tool_dependencies.xml file.
- tree = xml_util.parse_xml( tool_dependencies_config )
- root = tree.getroot()
- except Exception, e:
- error_message = "Error parsing %s in handle_tool_dependencies_definition: " % str( tool_dependencies_config )
- log.exception( error_message )
+ # Make sure we're looking at a valid tool_dependencies.xml file.
+ tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+ if tree is None:
return False, None
+ root = tree.getroot()
if root.tag == 'tool_dependency':
for root_index, root_elem in enumerate( root ):
# <package name="eigen" version="2.0.17">
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -13,17 +13,10 @@
from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
from tool_shed.util import tool_util
+from tool_shed.util import xml_util
from tool_shed.galaxy_install.tool_dependencies.install_util import install_package
from tool_shed.galaxy_install.tool_dependencies.install_util import set_environment
-import pkg_resources
-
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-from elementtree.ElementTree import Element
-from elementtree.ElementTree import SubElement
-
log = logging.getLogger( __name__ )
def activate_repository( trans, repository ):
@@ -318,13 +311,10 @@
sa_session = app.model.context.current
installed_tool_dependencies = []
# Parse the tool_dependencies.xml config.
- try:
- tree = ElementTree.parse( tool_dependencies_config )
- except Exception, e:
- log.debug( "Exception attempting to parse %s: %s" % ( str( tool_dependencies_config ), str( e ) ) )
+ tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+ if tree is None:
return installed_tool_dependencies
root = tree.getroot()
- ElementInclude.include( root )
fabric_version_checked = False
for elem in root:
if elem.tag == 'package':
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/util/common_util.py
--- a/lib/tool_shed/util/common_util.py
+++ b/lib/tool_shed/util/common_util.py
@@ -11,7 +11,9 @@
tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
# Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
migrated_tool_configs_dict = odict()
- tree = xml_util.parse_xml( tools_xml_file_path )
+ tree, error_message = xml_util.parse_xml( tools_xml_file_path )
+ if tree is None:
+ return False, odict()
root = tree.getroot()
tool_shed = root.get( 'name' )
tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
@@ -48,15 +50,16 @@
# Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
missing_tool_configs_dict = odict()
for tool_panel_config in tool_panel_configs:
- tree = xml_util.parse_xml( tool_panel_config )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'tool':
- missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
- elif elem.tag == 'section':
- for section_elem in elem:
- if section_elem.tag == 'tool':
- missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+ tree, error_message = xml_util.parse_xml( tool_panel_config )
+ if tree:
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'tool':
+ missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+ elif elem.tag == 'section':
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
else:
exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file. ' % ( tool_shed, app.config.tool_sheds_config )
exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n'
@@ -78,7 +81,9 @@
for config_filename in app.config.tool_configs:
# Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
# <toolbox tool_path="../shed_tools">
- tree = xml_util.parse_xml( config_filename )
+ tree, error_message = xml_util.parse_xml( config_filename )
+ if tree is None:
+ continue
root = tree.getroot()
tool_path = root.get( 'tool_path', None )
if tool_path is None:
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/util/data_manager_util.py
--- a/lib/tool_shed/util/data_manager_util.py
+++ b/lib/tool_shed/util/data_manager_util.py
@@ -21,7 +21,10 @@
for tool_tup in repository_tools_tups:
repository_tools_by_guid[ tool_tup[ 1 ] ] = dict( tool_config_filename=tool_tup[ 0 ], tool=tool_tup[ 2 ] )
# Load existing data managers.
- config_elems = [ elem for elem in xml_util.parse_xml( shed_data_manager_conf_filename ).getroot() ]
+ tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
+ if tree is None:
+ return rval
+ config_elems = [ elem for elem in tree.getroot() ]
repo_data_manager_conf_filename = metadata_dict['data_manager'].get( 'config_filename', None )
if repo_data_manager_conf_filename is None:
log.debug( "No data_manager_conf.xml file has been defined." )
@@ -29,7 +32,9 @@
data_manager_config_has_changes = False
relative_repo_data_manager_dir = os.path.join( shed_config_dict.get( 'tool_path', '' ), relative_install_dir )
repo_data_manager_conf_filename = os.path.join( relative_repo_data_manager_dir, repo_data_manager_conf_filename )
- tree = xml_util.parse_xml( repo_data_manager_conf_filename )
+ tree, error_message = xml_util.parse_xml( repo_data_manager_conf_filename )
+ if tree is None:
+ return rval
root = tree.getroot()
for elem in root:
if elem.tag == 'data_manager':
@@ -89,37 +94,38 @@
metadata_dict = repository.metadata
if metadata_dict and 'data_manager' in metadata_dict:
shed_data_manager_conf_filename = app.config.shed_data_manager_config_file
- tree = xml_util.parse_xml( shed_data_manager_conf_filename )
- root = tree.getroot()
- assert root.tag == 'data_managers', 'The file provided (%s) for removing data managers from is not a valid data manager xml file.' % ( shed_data_manager_conf_filename )
- guids = [ data_manager_dict.get( 'guid' ) for data_manager_dict in metadata_dict.get( 'data_manager', {} ).get( 'data_managers', {} ).itervalues() if 'guid' in data_manager_dict ]
- load_old_data_managers_by_guid = {}
- data_manager_config_has_changes = False
- config_elems = []
- for elem in root:
- # Match Data Manager elements by guid and installed_changeset_revision
- elem_matches_removed_data_manager = False
- if elem.tag == 'data_manager':
- guid = elem.get( 'guid', None )
- if guid in guids:
- tool_elem = elem.find( 'tool' )
- if tool_elem is not None:
- installed_changeset_revision_elem = tool_elem.find( 'installed_changeset_revision' )
- if installed_changeset_revision_elem is not None:
- if installed_changeset_revision_elem.text == repository.installed_changeset_revision:
- elem_matches_removed_data_manager = True
- else:
- # This is a different version, which had been previously overridden
- load_old_data_managers_by_guid[ guid ] = elem
- if elem_matches_removed_data_manager:
- data_manager_config_has_changes = True
- else:
- config_elems.append( elem )
- # Remove data managers from in memory
- app.data_managers.remove_manager( guids )
- # Load other versions of any now uninstalled data managers, if any
- for elem in load_old_data_managers_by_guid.itervalues():
- app.data_managers.load_manager_from_elem( elem )
- # Persist the altered shed_data_manager_config file.
- if data_manager_config_has_changes:
- data_manager_config_elems_to_xml_file( app, config_elems, shed_data_manager_conf_filename )
+ tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
+ if tree:
+ root = tree.getroot()
+ assert root.tag == 'data_managers', 'The file provided (%s) for removing data managers from is not a valid data manager xml file.' % ( shed_data_manager_conf_filename )
+ guids = [ data_manager_dict.get( 'guid' ) for data_manager_dict in metadata_dict.get( 'data_manager', {} ).get( 'data_managers', {} ).itervalues() if 'guid' in data_manager_dict ]
+ load_old_data_managers_by_guid = {}
+ data_manager_config_has_changes = False
+ config_elems = []
+ for elem in root:
+ # Match Data Manager elements by guid and installed_changeset_revision
+ elem_matches_removed_data_manager = False
+ if elem.tag == 'data_manager':
+ guid = elem.get( 'guid', None )
+ if guid in guids:
+ tool_elem = elem.find( 'tool' )
+ if tool_elem is not None:
+ installed_changeset_revision_elem = tool_elem.find( 'installed_changeset_revision' )
+ if installed_changeset_revision_elem is not None:
+ if installed_changeset_revision_elem.text == repository.installed_changeset_revision:
+ elem_matches_removed_data_manager = True
+ else:
+ # This is a different version, which had been previously overridden
+ load_old_data_managers_by_guid[ guid ] = elem
+ if elem_matches_removed_data_manager:
+ data_manager_config_has_changes = True
+ else:
+ config_elems.append( elem )
+ # Remove data managers from in memory
+ app.data_managers.remove_manager( guids )
+ # Load other versions of any now uninstalled data managers, if any
+ for elem in load_old_data_managers_by_guid.itervalues():
+ app.data_managers.load_manager_from_elem( elem )
+ # Persist the altered shed_data_manager_config file.
+ if data_manager_config_has_changes:
+ data_manager_config_elems_to_xml_file( app, config_elems, shed_data_manager_conf_filename )
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/util/datatype_util.py
--- a/lib/tool_shed/util/datatype_util.py
+++ b/lib/tool_shed/util/datatype_util.py
@@ -5,14 +5,6 @@
from tool_shed.util import xml_util
import tool_shed.util.shed_util_common as suc
-import pkg_resources
-
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-from elementtree.ElementTree import Element
-from elementtree.ElementTree import SubElement
-
log = logging.getLogger( __name__ )
def alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=False, override=True ):
@@ -23,10 +15,8 @@
be False when a tool shed repository is being installed. Since installation is occurring after the datatypes registry
has been initialized, the registry's contents cannot be overridden by conflicting data types.
"""
- try:
- tree = xml_util.parse_xml( datatypes_config )
- except Exception, e:
- log.debug( "Error parsing %s, exception: %s" % ( datatypes_config, str( e ) ) )
+ tree, error_message = xml_util.parse_xml( datatypes_config )
+ if tree is None:
return None, None
datatypes_config_root = tree.getroot()
registration = datatypes_config_root.find( 'registration' )
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -26,10 +26,6 @@
from mercurial import hg
from mercurial import ui
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-
log = logging.getLogger( __name__ )
REPOSITORY_DATA_MANAGER_CONFIG_FILENAME = "data_manager_conf.xml"
@@ -363,12 +359,9 @@
'invalid_data_managers': invalid_data_managers,
'error_messages': [] }
metadata_dict[ 'data_manager' ] = data_manager_metadata
- try:
- tree = xml_util.parse_xml( data_manager_config_filename )
- except Exception, e:
+ tree, error_message = xml_util.parse_xml( data_manager_config_filename )
+ if tree is None:
# We are not able to load any data managers.
- error_message = 'There was an error parsing your Data Manager config file "%s": %s' % ( data_manager_config_filename, e )
- log.error( error_message )
data_manager_metadata[ 'error_messages' ].append( error_message )
return metadata_dict
tool_path = None
@@ -436,13 +429,10 @@
def generate_datatypes_metadata( app, repository, repository_clone_url, repository_files_dir, datatypes_config, metadata_dict ):
"""Update the received metadata_dict with information from the parsed datatypes_config."""
- try:
- tree = ElementTree.parse( datatypes_config )
- except Exception, e:
- log.debug( "Exception attempting to parse %s: %s" % ( str( datatypes_config ), str( e ) ) )
+ tree, error_message = xml_util.parse_xml( datatypes_config )
+ if tree is None:
return metadata_dict
root = tree.getroot()
- ElementInclude.include( root )
repository_datatype_code_files = []
datatype_files = root.find( 'datatype_files' )
if datatype_files:
@@ -637,14 +627,13 @@
if os.path.getsize( full_path ) > 0:
if not ( checkers.check_binary( full_path ) or checkers.check_image( full_path ) or checkers.check_gzip( full_path )[ 0 ]
or checkers.check_bz2( full_path )[ 0 ] or checkers.check_zip( full_path ) ):
- try:
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = xml_util.parse_xml( full_path )
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree, error_message = xml_util.parse_xml( full_path )
+ if element_tree is None:
+ is_tool = False
+ else:
element_tree_root = element_tree.getroot()
is_tool = element_tree_root.tag == 'tool'
- except Exception, e:
- log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) )
- is_tool = False
if is_tool:
tool, valid, error_message = tool_util.load_tool_from_config( app, app.security.encode_id( repository.id ), full_path )
if tool is None:
@@ -750,15 +739,13 @@
is called from the tool shed as well as from Galaxy.
"""
error_message = ''
- try:
- # Make sure we're looking at a valid repository_dependencies.xml file.
- tree = xml_util.parse_xml( repository_dependencies_config )
+ # Make sure we're looking at a valid repository_dependencies.xml file.
+ tree, error_message = xml_util.parse_xml( repository_dependencies_config )
+ if tree is None:
+ xml_is_valid = False
+ else:
root = tree.getroot()
xml_is_valid = root.tag == 'repositories'
- except Exception, e:
- error_message = "Error parsing %s, exception: %s" % ( repository_dependencies_config, str( e ) )
- log.debug( error_message )
- xml_is_valid = False
if xml_is_valid:
invalid_repository_dependencies_dict = dict( description=root.get( 'description' ) )
invalid_repository_dependency_tups = []
@@ -795,14 +782,10 @@
else:
original_valid_tool_dependencies_dict = None
original_invalid_tool_dependencies_dict = None
- try:
- tree = ElementTree.parse( tool_dependencies_config )
- except Exception, e:
- error_message = "Exception attempting to parse tool_dependencies.xml: %s" %str( e )
- log.debug( error_message )
+ tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+ if tree is None:
return metadata_dict, error_message
root = tree.getroot()
- ElementInclude.include( root )
tool_dependency_is_valid = True
valid_tool_dependencies_dict = {}
invalid_tool_dependencies_dict = {}
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -15,18 +15,13 @@
import sqlalchemy.orm.exc
from tool_shed.util import common_util
from tool_shed.util import xml_util
+from xml.etree import ElementTree as XmlET
from galaxy import eggs
import pkg_resources
pkg_resources.require( 'mercurial' )
from mercurial import hg, ui, commands
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-from elementtree.ElementTree import Element
-from elementtree.ElementTree import SubElement
-
eggs.require( 'markupsafe' )
import markupsafe
@@ -290,22 +285,22 @@
def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
"""Create and return an ElementTree tool Element."""
if tool_section is not None:
- tool_elem = SubElement( tool_section, 'tool' )
+ tool_elem = XmlET.SubElement( tool_section, 'tool' )
else:
- tool_elem = Element( 'tool' )
+ tool_elem = XmlET.Element( 'tool' )
tool_elem.attrib[ 'file' ] = tool_file_path
tool_elem.attrib[ 'guid' ] = tool.guid
- tool_shed_elem = SubElement( tool_elem, 'tool_shed' )
+ tool_shed_elem = XmlET.SubElement( tool_elem, 'tool_shed' )
tool_shed_elem.text = tool_shed
- repository_name_elem = SubElement( tool_elem, 'repository_name' )
+ repository_name_elem = XmlET.SubElement( tool_elem, 'repository_name' )
repository_name_elem.text = repository_name
- repository_owner_elem = SubElement( tool_elem, 'repository_owner' )
+ repository_owner_elem = XmlET.SubElement( tool_elem, 'repository_owner' )
repository_owner_elem.text = owner
- changeset_revision_elem = SubElement( tool_elem, 'installed_changeset_revision' )
+ changeset_revision_elem = XmlET.SubElement( tool_elem, 'installed_changeset_revision' )
changeset_revision_elem.text = changeset_revision
- id_elem = SubElement( tool_elem, 'id' )
+ id_elem = XmlET.SubElement( tool_elem, 'id' )
id_elem.text = tool.id
- version_elem = SubElement( tool_elem, 'version' )
+ version_elem = XmlET.SubElement( tool_elem, 'version' )
version_elem.text = tool.version
return tool_elem
@@ -335,7 +330,9 @@
file_name = strip_path( tool_config )
guids_and_configs[ guid ] = file_name
# Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
- tree = xml_util.parse_xml( shed_tool_conf )
+ tree, error_message = xml_util.parse_xml( shed_tool_conf )
+ if tree is None:
+ return tool_panel_dict
root = tree.getroot()
for elem in root:
if elem.tag == 'tool':
@@ -1253,20 +1250,21 @@
for tool_config_filename, guid, tool in repository_tools_tups:
guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
config_elems = []
- tree = xml_util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'section':
- for i, tool_elem in enumerate( elem ):
- guid = tool_elem.attrib.get( 'guid' )
+ tree, error_message = xml_util.parse_xml( shed_tool_conf )
+ if tree:
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'section':
+ for i, tool_elem in enumerate( elem ):
+ guid = tool_elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem[i] = guid_to_tool_elem_dict[ guid ]
+ elif elem.tag == 'tool':
+ guid = elem.attrib.get( 'guid' )
if guid in guid_to_tool_elem_dict:
- elem[i] = guid_to_tool_elem_dict[ guid ]
- elif elem.tag == 'tool':
- guid = elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem = guid_to_tool_elem_dict[ guid ]
- config_elems.append( elem )
- config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
+ elem = guid_to_tool_elem_dict[ guid ]
+ config_elems.append( elem )
+ config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def update_repository( repo, ctx_rev=None ):
"""
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -5,10 +5,7 @@
from galaxy import util
from galaxy.model.orm import and_
import tool_shed.util.shed_util_common as suc
-
-eggs.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
+from tool_shed.util import xml_util
log = logging.getLogger( __name__ )
@@ -67,13 +64,10 @@
relative_install_dir = os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
- try:
- tree = ElementTree.parse( tool_dependencies_config )
- except Exception, e:
- log.debug( "Exception attempting to parse tool_dependencies.xml: %s" %str( e ) )
+ tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+ if tree is None:
return tool_dependency_objects
root = tree.getroot()
- ElementInclude.include( root )
fabric_version_checked = False
for elem in root:
tool_dependency_type = elem.tag
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -14,6 +14,7 @@
from galaxy.web.form_builder import SelectField
from tool_shed.util import xml_util
import tool_shed.util.shed_util_common as suc
+from xml.etree import ElementTree as XmlET
import pkg_resources
@@ -22,12 +23,6 @@
from mercurial import hg
from mercurial import ui
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-from elementtree.ElementTree import Element
-from elementtree.ElementTree import SubElement
-
log = logging.getLogger( __name__ )
def add_to_shed_tool_config( app, shed_tool_conf_dict, elem_list ):
@@ -37,15 +32,16 @@
shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
tool_path = shed_tool_conf_dict[ 'tool_path' ]
config_elems = []
- tree = xml_util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- config_elems.append( elem )
- # Add the elements to the in-memory list of config_elems.
- for elem_entry in elem_list:
- config_elems.append( elem_entry )
- # Persist the altered shed_tool_config file.
- suc.config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
+ tree, error_message = xml_util.parse_xml( shed_tool_conf )
+ if tree:
+ root = tree.getroot()
+ for elem in root:
+ config_elems.append( elem )
+ # Add the elements to the in-memory list of config_elems.
+ for elem_entry in elem_list:
+ config_elems.append( elem_entry )
+ # Persist the altered shed_tool_config file.
+ suc.config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def add_to_tool_panel( app, repository_name, repository_clone_url, changeset_revision, repository_tools_tups, owner, shed_tool_conf, tool_panel_dict,
new_install=True ):
@@ -381,7 +377,7 @@
# { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
if tool_section_dict[ 'id' ]:
# Create a new tool section.
- tool_section = Element( 'section' )
+ tool_section = XmlET.Element( 'section' )
tool_section.attrib[ 'id' ] = tool_section_dict[ 'id' ]
tool_section.attrib[ 'name' ] = tool_section_dict[ 'name' ]
tool_section.attrib[ 'version' ] = tool_section_dict[ 'version' ]
@@ -639,7 +635,7 @@
tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
else:
# The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
- elem = Element( 'section' )
+ elem = XmlET.Element( 'section' )
elem.attrib[ 'name' ] = original_section_name
elem.attrib[ 'id' ] = original_section_id
elem.attrib[ 'version' ] = ''
@@ -657,7 +653,7 @@
else:
# Appending a new section to trans.app.toolbox.tool_panel
log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = Element( 'section' )
+ elem = XmlET.Element( 'section' )
elem.attrib[ 'name' ] = new_tool_panel_section
elem.attrib[ 'id' ] = section_id
elem.attrib[ 'version' ] = ''
@@ -804,7 +800,9 @@
message = ''
tmp_tool_config = suc.get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir )
if tmp_tool_config:
- element_tree = xml_util.parse_xml( tmp_tool_config )
+ element_tree, error_message = xml_util.parse_xml( tmp_tool_config )
+ if element_tree is None:
+ return tool, message
element_tree_root = element_tree.getroot()
# Look for code files required by the tool config.
tmp_code_files = []
@@ -846,30 +844,31 @@
shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
tool_path = shed_tool_conf_dict[ 'tool_path' ]
config_elems = []
- tree = xml_util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- config_elems.append( elem )
- config_elems_to_remove = []
- for config_elem in config_elems:
- if config_elem.tag == 'section':
- tool_elems_to_remove = []
- for tool_elem in config_elem:
- if tool_elem.get( 'guid' ) in guids_to_remove:
- tool_elems_to_remove.append( tool_elem )
- for tool_elem in tool_elems_to_remove:
- # Remove all of the appropriate tool sub-elements from the section element.
- config_elem.remove( tool_elem )
- if len( config_elem ) < 1:
- # Keep a list of all empty section elements so they can be removed.
- config_elems_to_remove.append( config_elem )
- elif config_elem.tag == 'tool':
- if config_elem.get( 'guid' ) in guids_to_remove:
- config_elems_to_remove.append( config_elem )
- for config_elem in config_elems_to_remove:
- config_elems.remove( config_elem )
- # Persist the altered in-memory version of the tool config.
- suc.config_elems_to_xml_file( trans.app, config_elems, shed_tool_conf, tool_path )
+ tree, error_message = xml_util.parse_xml( shed_tool_conf )
+ if tree:
+ root = tree.getroot()
+ for elem in root:
+ config_elems.append( elem )
+ config_elems_to_remove = []
+ for config_elem in config_elems:
+ if config_elem.tag == 'section':
+ tool_elems_to_remove = []
+ for tool_elem in config_elem:
+ if tool_elem.get( 'guid' ) in guids_to_remove:
+ tool_elems_to_remove.append( tool_elem )
+ for tool_elem in tool_elems_to_remove:
+ # Remove all of the appropriate tool sub-elements from the section element.
+ config_elem.remove( tool_elem )
+ if len( config_elem ) < 1:
+ # Keep a list of all empty section elements so they can be removed.
+ config_elems_to_remove.append( config_elem )
+ elif config_elem.tag == 'tool':
+ if config_elem.get( 'guid' ) in guids_to_remove:
+ config_elems_to_remove.append( config_elem )
+ for config_elem in config_elems_to_remove:
+ config_elems.remove( config_elem )
+ # Persist the altered in-memory version of the tool config.
+ suc.config_elems_to_xml_file( trans.app, config_elems, shed_tool_conf, tool_path )
def remove_from_tool_panel( trans, repository, shed_tool_conf, uninstall ):
"""A tool shed repository is being deactivated or uninstalled so handle tool panel alterations accordingly."""
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 lib/tool_shed/util/xml_util.py
--- a/lib/tool_shed/util/xml_util.py
+++ b/lib/tool_shed/util/xml_util.py
@@ -1,13 +1,17 @@
import logging
import os
+import sys
import tempfile
from xml.etree import ElementTree as XmlET
import xml.etree.ElementTree
log = logging.getLogger( __name__ )
+using_python_27 = sys.version_info[ :2 ] >= ( 2, 7 )
-class CommentedTreeBuilder ( XmlET.XMLTreeBuilder ):
+class Py26CommentedTreeBuilder ( XmlET.XMLTreeBuilder ):
+ # Python 2.6 uses ElementTree 1.2.x.
+
def __init__ ( self, html=0, target=None ):
XmlET.XMLTreeBuilder.__init__( self, html, target )
self._parser.CommentHandler = self.handle_comment
@@ -17,6 +21,15 @@
self._target.data( data )
self._target.end( XmlET.Comment )
+
+class Py27CommentedTreeBuilder ( XmlET.TreeBuilder ):
+ # Python 2.7 uses ElementTree 1.3.x.
+
+ def comment( self, data ):
+ self.start( XmlET.Comment, {} )
+ self.data( data )
+ self.end( XmlET.Comment )
+
def create_and_write_tmp_file( elem ):
tmp_str = xml_to_string( elem )
fh = tempfile.NamedTemporaryFile( 'wb' )
@@ -30,14 +43,30 @@
def parse_xml( file_name ):
"""Returns a parsed xml tree with comments intact."""
- try:
- fobj = open( file_name, 'r' )
- tree = XmlET.parse( fobj, parser=CommentedTreeBuilder() )
- fobj.close()
- except Exception, e:
- log.exception( "Exception attempting to parse %s: %s" % ( str( file_name ), str( e ) ) )
- return None
- return tree
+ error_message = ''
+ fobj = open( file_name, 'r' )
+ if using_python_27:
+ try:
+ tree = XmlET.parse( fobj, parser=XmlET.XMLParser( target=Py27CommentedTreeBuilder() ) )
+ except Exception, e:
+ fobj.close()
+ error_message = "Exception attempting to parse %s: %s" % ( str( file_name ), str( e ) )
+ log.exception( error_message )
+ return None, error_message
+ else:
+ try:
+ tree = XmlET.parse( fobj, parser=Py26CommentedTreeBuilder() )
+ except Exception, e:
+ fobj.close()
+ error_message = "Exception attempting to parse %s: %s" % ( str( file_name ), str( e ) )
+ log.exception( error_message )
+ return None, error_message
+ fobj.close()
+ return tree, error_message
def xml_to_string( elem, encoding='utf-8' ):
- return '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
+ if using_python_27:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
+ else:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
+ return xml_str
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
@@ -118,7 +118,7 @@
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded malformed tool dependency XML.',
- strings_displayed=[ 'Exception attempting to parse tool_dependencies.xml', 'not well-formed' ],
+ strings_displayed=[ 'Exception attempting to parse', 'not well-formed' ],
strings_not_displayed=[] )
def test_0030_upload_invalid_tool_dependency_xml( self ):
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -71,7 +71,7 @@
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded malformed tool dependency XML.',
- strings_displayed=[ 'Exception attempting to parse tool_dependencies.xml', 'not well-formed' ],
+ strings_displayed=[ 'Exception attempting to parse', 'not well-formed' ],
strings_not_displayed=[] )
self.upload_file( repository,
filename=os.path.join( 'freebayes', 'invalid_tool_dependencies', 'tool_dependencies.xml' ),
diff -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 -r 3c8ebea6f6caf344ff4d168610d8f0fe426adb85 test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
@@ -65,7 +65,7 @@
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded malformed tool dependency XML.',
- strings_displayed=[ 'Exception attempting to parse tool_dependencies.xml', 'not well-formed' ],
+ strings_displayed=[ 'Exception attempting to parse', 'not well-formed' ],
strings_not_displayed=[] )
self.upload_file( repository,
filename=os.path.join( 'freebayes', 'invalid_tool_dependencies', 'tool_dependencies.xml' ),
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Eliminate the use of Galaxy's elementree egg within the tool shed.
by commits-noreply@bitbucket.org 28 May '13
by commits-noreply@bitbucket.org 28 May '13
28 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4df405e7a2af/
Changeset: 4df405e7a2af
Branch: next-stable
User: greg
Date: 2013-05-28 18:24:52
Summary: Eliminate the use of Galaxy's elementree egg within the tool shed.
Affected #: 18 files
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/galaxy_install/__init__.py
--- a/lib/tool_shed/galaxy_install/__init__.py
+++ b/lib/tool_shed/galaxy_install/__init__.py
@@ -5,12 +5,7 @@
import tool_shed.util.shed_util_common
import tool_shed.util.datatype_util
from galaxy.model.orm import and_
-
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree, ElementInclude
+from tool_shed.util import xml_util
log = logging.getLogger( __name__ )
@@ -25,13 +20,10 @@
self.installed_repository_dicts = []
def get_repository_install_dir( self, tool_shed_repository ):
for tool_config in self.tool_configs:
- try:
- tree = ElementTree.parse( tool_config )
- except Exception, e:
- log.debug( "Exception attempting to parse %s: %s" % ( str( tool_config ), str( e ) ) )
+ tree, error_message = xml_util.parse_xml( tool_config )
+ if tree is None:
return None
root = tree.getroot()
- ElementInclude.include( root )
tool_path = root.get( 'tool_path', None )
if tool_path:
ts = tool_shed.util.shed_util_common.clean_tool_shed_url( tool_shed_repository.tool_shed )
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -38,50 +38,56 @@
self.proprietary_tool_confs = self.non_shed_tool_panel_configs
self.proprietary_tool_panel_elems = self.get_proprietary_tool_panel_elems( latest_migration_script_number )
# Set the location where the repositories will be installed by retrieving the tool_path setting from migrated_tools_config.
- tree = xml_util.parse_xml( migrated_tools_config )
- root = tree.getroot()
- self.tool_path = root.get( 'tool_path' )
- print "Repositories will be installed into configured tool_path location ", str( self.tool_path )
- # Parse tool_shed_install_config to check each of the tools.
- self.tool_shed_install_config = tool_shed_install_config
- tree = xml_util.parse_xml( tool_shed_install_config )
- root = tree.getroot()
- self.tool_shed = suc.clean_tool_shed_url( root.get( 'name' ) )
- self.repository_owner = common_util.REPOSITORY_OWNER
- index, self.shed_config_dict = suc.get_shed_tool_conf_dict( app, self.migrated_tools_config )
- # Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
- # tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
- # The default behavior is that the tool shed is down.
- tool_shed_accessible = False
- tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
- if tool_panel_configs:
- # The missing_tool_configs_dict contents are something like:
- # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
- tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
+ tree, error_message = xml_util.parse_xml( migrated_tools_config )
+ if tree is None:
+ print error_message
else:
- # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
- # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
- # the database.
- tool_shed_accessible = True
- missing_tool_configs_dict = odict()
- if tool_shed_accessible:
- if len( self.proprietary_tool_confs ) == 1:
- plural = ''
- file_names = self.proprietary_tool_confs[ 0 ]
+ root = tree.getroot()
+ self.tool_path = root.get( 'tool_path' )
+ print "Repositories will be installed into configured tool_path location ", str( self.tool_path )
+ # Parse tool_shed_install_config to check each of the tools.
+ self.tool_shed_install_config = tool_shed_install_config
+ tree, error_message = xml_util.parse_xml( tool_shed_install_config )
+ if tree is None:
+ print error_message
else:
- plural = 's'
- file_names = ', '.join( self.proprietary_tool_confs )
- if missing_tool_configs_dict:
- for repository_elem in root:
- self.install_repository( repository_elem, install_dependencies )
- else:
- message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number )
- message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names )
- print message
- else:
- message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % str( latest_migration_script_number )
- message += "Try again later.\n"
- print message
+ root = tree.getroot()
+ self.tool_shed = suc.clean_tool_shed_url( root.get( 'name' ) )
+ self.repository_owner = common_util.REPOSITORY_OWNER
+ index, self.shed_config_dict = suc.get_shed_tool_conf_dict( app, self.migrated_tools_config )
+ # Since tool migration scripts can be executed any number of times, we need to make sure the appropriate tools are defined in
+ # tool_conf.xml. If no tools associated with the migration stage are defined, no repositories will be installed on disk.
+ # The default behavior is that the tool shed is down.
+ tool_shed_accessible = False
+ tool_panel_configs = common_util.get_non_shed_tool_panel_configs( app )
+ if tool_panel_configs:
+ # The missing_tool_configs_dict contents are something like:
+ # {'emboss_antigenic.xml': [('emboss', '5.0.0', 'package', '\nreadme blah blah blah\n')]}
+ tool_shed_accessible, missing_tool_configs_dict = common_util.check_for_missing_tools( app, tool_panel_configs, latest_migration_script_number )
+ else:
+ # It doesn't matter if the tool shed is accessible since there are no migrated tools defined in the local Galaxy instance, but
+ # we have to set the value of tool_shed_accessible to True so that the value of migrate_tools.version can be correctly set in
+ # the database.
+ tool_shed_accessible = True
+ missing_tool_configs_dict = odict()
+ if tool_shed_accessible:
+ if len( self.proprietary_tool_confs ) == 1:
+ plural = ''
+ file_names = self.proprietary_tool_confs[ 0 ]
+ else:
+ plural = 's'
+ file_names = ', '.join( self.proprietary_tool_confs )
+ if missing_tool_configs_dict:
+ for repository_elem in root:
+ self.install_repository( repository_elem, install_dependencies )
+ else:
+ message = "\nNo tools associated with migration stage %s are defined in your " % str( latest_migration_script_number )
+ message += "file%s named %s,\nso no repositories will be installed on disk.\n" % ( plural, file_names )
+ print message
+ else:
+ message = "\nThe main Galaxy tool shed is not currently available, so skipped migration stage %s.\n" % str( latest_migration_script_number )
+ message += "Try again later.\n"
+ print message
def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
if self.shed_config_dict.get( 'tool_path' ):
@@ -108,7 +114,9 @@
tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
# Parse the XML and load the file attributes for later checking against the integrated elements from self.proprietary_tool_confs.
migrated_tool_configs = []
- tree = xml_util.parse_xml( tools_xml_file_path )
+ tree, error_message = xml_util.parse_xml( tools_xml_file_path )
+ if tree is None:
+ return []
root = tree.getroot()
for elem in root:
if elem.tag == 'repository':
@@ -117,7 +125,9 @@
# Parse each file in self.proprietary_tool_confs and generate the integrated list of tool panel Elements that contain them.
tool_panel_elems = []
for proprietary_tool_conf in self.proprietary_tool_confs:
- tree = xml_util.parse_xml( proprietary_tool_conf )
+ tree, error_message = xml_util.parse_xml( proprietary_tool_conf )
+ if tree is None:
+ return []
root = tree.getroot()
for elem in root:
if elem.tag == 'tool':
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -19,6 +19,7 @@
from tool_shed.util import metadata_util
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
+from xml.etree import ElementTree as XmlET
from galaxy import eggs
import pkg_resources
@@ -28,10 +29,6 @@
from mercurial import hg
from mercurial import ui
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree.ElementTree import Element
-
log = logging.getLogger( __name__ )
def create_repo_info_dict( trans, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_name=None, repository=None,
@@ -401,7 +398,7 @@
else:
# Appending a new section to trans.app.toolbox.tool_panel
log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = Element( 'section' )
+ elem = XmlET.Element( 'section' )
elem.attrib[ 'name' ] = new_tool_panel_section
elem.attrib[ 'id' ] = section_id
elem.attrib[ 'version' ] = ''
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -14,15 +14,6 @@
from galaxy.model.orm import and_
from galaxy.web import url_for
-from galaxy import eggs
-import pkg_resources
-
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-from elementtree.ElementTree import Element
-from elementtree.ElementTree import SubElement
-
log = logging.getLogger( __name__ )
def clean_tool_shed_url( base_url ):
@@ -479,7 +470,7 @@
tool_dependency = None
action_dict = {}
if tool_dependencies_config:
- required_td_tree = xml_util.parse_xml( tool_dependencies_config )
+ required_td_tree, error_message = xml_util.parse_xml( tool_dependencies_config )
if required_td_tree:
required_td_root = required_td_tree.getroot()
for required_td_elem in required_td_root:
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/tool_shed_registry.py
--- a/lib/tool_shed/tool_shed_registry.py
+++ b/lib/tool_shed/tool_shed_registry.py
@@ -10,29 +10,32 @@
class Registry( object ):
def __init__( self, root_dir=None, config=None ):
- self.tool_sheds = odict()
+ self.tool_sheds = odict()
self.tool_sheds_auth = odict()
if root_dir and config:
# Parse tool_sheds_conf.xml
- tree = xml_util.parse_xml( config )
- root = tree.getroot()
- log.debug( 'Loading references to tool sheds from %s' % config )
- for elem in root.findall( 'tool_shed' ):
- try:
- name = elem.get( 'name', None )
- url = elem.get( 'url', None )
- username = elem.get( 'user', None )
- password = elem.get( 'pass', None )
- if name and url:
- self.tool_sheds[ name ] = url
- self.tool_sheds_auth[ name ] = None
- log.debug( 'Loaded reference to tool shed: %s' % name )
- if name and url and username and password:
- pass_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
- pass_mgr.add_password(None, url, username, password)
- self.tool_sheds_auth[ name ] = pass_mgr
- except Exception, e:
- log.warning( 'Error loading reference to tool shed "%s", problem: %s' % ( name, str( e ) ) )
+ tree, error_message = xml_util.parse_xml( config )
+ if tree is None:
+ log.warning( "Unable to load references to tool sheds defined in file %s" % str( config ) )
+ else:
+ root = tree.getroot()
+ log.debug( 'Loading references to tool sheds from %s' % config )
+ for elem in root.findall( 'tool_shed' ):
+ try:
+ name = elem.get( 'name', None )
+ url = elem.get( 'url', None )
+ username = elem.get( 'user', None )
+ password = elem.get( 'pass', None )
+ if name and url:
+ self.tool_sheds[ name ] = url
+ self.tool_sheds_auth[ name ] = None
+ log.debug( 'Loaded reference to tool shed: %s' % name )
+ if name and url and username and password:
+ pass_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
+ pass_mgr.add_password( None, url, username, password )
+ self.tool_sheds_auth[ name ] = pass_mgr
+ except Exception, e:
+ log.warning( 'Error loading reference to tool shed "%s", problem: %s' % ( name, str( e ) ) )
def password_manager_for_url( self, url ):
"""
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/util/commit_util.py
--- a/lib/tool_shed/util/commit_util.py
+++ b/lib/tool_shed/util/commit_util.py
@@ -185,14 +185,11 @@
def handle_repository_dependencies_definition( trans, repository_dependencies_config ):
altered = False
- try:
- # Make sure we're looking at a valid repository_dependencies.xml file.
- tree = xml_util.parse_xml( repository_dependencies_config )
- root = tree.getroot()
- except Exception, e:
- error_message = "Error parsing %s in handle_repository_dependencies_definition: " % str( repository_dependencies_config )
- log.exception( error_message )
+ # Make sure we're looking at a valid repository_dependencies.xml file.
+ tree, error_message = xml_util.parse_xml( repository_dependencies_config )
+ if tree is None:
return False, None
+ root = tree.getroot()
if root.tag == 'repositories':
for index, elem in enumerate( root ):
# <repository name="molecule_datatypes" owner="test" changeset_revision="1a070566e9c6" />
@@ -232,14 +229,11 @@
def handle_tool_dependencies_definition( trans, tool_dependencies_config ):
altered = False
- try:
- # Make sure we're looking at a valid tool_dependencies.xml file.
- tree = xml_util.parse_xml( tool_dependencies_config )
- root = tree.getroot()
- except Exception, e:
- error_message = "Error parsing %s in handle_tool_dependencies_definition: " % str( tool_dependencies_config )
- log.exception( error_message )
+ # Make sure we're looking at a valid tool_dependencies.xml file.
+ tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+ if tree is None:
return False, None
+ root = tree.getroot()
if root.tag == 'tool_dependency':
for root_index, root_elem in enumerate( root ):
# <package name="eigen" version="2.0.17">
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -13,17 +13,10 @@
from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
from tool_shed.util import tool_util
+from tool_shed.util import xml_util
from tool_shed.galaxy_install.tool_dependencies.install_util import install_package
from tool_shed.galaxy_install.tool_dependencies.install_util import set_environment
-import pkg_resources
-
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-from elementtree.ElementTree import Element
-from elementtree.ElementTree import SubElement
-
log = logging.getLogger( __name__ )
def activate_repository( trans, repository ):
@@ -318,13 +311,10 @@
sa_session = app.model.context.current
installed_tool_dependencies = []
# Parse the tool_dependencies.xml config.
- try:
- tree = ElementTree.parse( tool_dependencies_config )
- except Exception, e:
- log.debug( "Exception attempting to parse %s: %s" % ( str( tool_dependencies_config ), str( e ) ) )
+ tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+ if tree is None:
return installed_tool_dependencies
root = tree.getroot()
- ElementInclude.include( root )
fabric_version_checked = False
for elem in root:
if elem.tag == 'package':
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/util/common_util.py
--- a/lib/tool_shed/util/common_util.py
+++ b/lib/tool_shed/util/common_util.py
@@ -11,7 +11,9 @@
tools_xml_file_path = os.path.abspath( os.path.join( 'scripts', 'migrate_tools', '%04d_tools.xml' % latest_tool_migration_script_number ) )
# Parse the XML and load the file attributes for later checking against the proprietary tool_panel_config.
migrated_tool_configs_dict = odict()
- tree = xml_util.parse_xml( tools_xml_file_path )
+ tree, error_message = xml_util.parse_xml( tools_xml_file_path )
+ if tree is None:
+ return False, odict()
root = tree.getroot()
tool_shed = root.get( 'name' )
tool_shed_url = get_tool_shed_url_from_tools_xml_file_path( app, tool_shed )
@@ -48,15 +50,16 @@
# Parse the proprietary tool_panel_configs (the default is tool_conf.xml) and generate the list of missing tool config file names.
missing_tool_configs_dict = odict()
for tool_panel_config in tool_panel_configs:
- tree = xml_util.parse_xml( tool_panel_config )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'tool':
- missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
- elif elem.tag == 'section':
- for section_elem in elem:
- if section_elem.tag == 'tool':
- missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+ tree, error_message = xml_util.parse_xml( tool_panel_config )
+ if tree:
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'tool':
+ missing_tool_configs_dict = check_tool_tag_set( elem, migrated_tool_configs_dict, missing_tool_configs_dict )
+ elif elem.tag == 'section':
+ for section_elem in elem:
+ if section_elem.tag == 'tool':
+ missing_tool_configs_dict = check_tool_tag_set( section_elem, migrated_tool_configs_dict, missing_tool_configs_dict )
else:
exception_msg = '\n\nThe entry for the main Galaxy tool shed at %s is missing from the %s file. ' % ( tool_shed, app.config.tool_sheds_config )
exception_msg += 'The entry for this tool shed must always be available in this file, so re-add it before attempting to start your Galaxy server.\n'
@@ -78,7 +81,9 @@
for config_filename in app.config.tool_configs:
# Any config file that includes a tool_path attribute in the root tag set like the following is shed-related.
# <toolbox tool_path="../shed_tools">
- tree = xml_util.parse_xml( config_filename )
+ tree, error_message = xml_util.parse_xml( config_filename )
+ if tree is None:
+ continue
root = tree.getroot()
tool_path = root.get( 'tool_path', None )
if tool_path is None:
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/util/data_manager_util.py
--- a/lib/tool_shed/util/data_manager_util.py
+++ b/lib/tool_shed/util/data_manager_util.py
@@ -21,7 +21,10 @@
for tool_tup in repository_tools_tups:
repository_tools_by_guid[ tool_tup[ 1 ] ] = dict( tool_config_filename=tool_tup[ 0 ], tool=tool_tup[ 2 ] )
# Load existing data managers.
- config_elems = [ elem for elem in xml_util.parse_xml( shed_data_manager_conf_filename ).getroot() ]
+ tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
+ if tree is None:
+ return rval
+ config_elems = [ elem for elem in tree.getroot() ]
repo_data_manager_conf_filename = metadata_dict['data_manager'].get( 'config_filename', None )
if repo_data_manager_conf_filename is None:
log.debug( "No data_manager_conf.xml file has been defined." )
@@ -29,7 +32,9 @@
data_manager_config_has_changes = False
relative_repo_data_manager_dir = os.path.join( shed_config_dict.get( 'tool_path', '' ), relative_install_dir )
repo_data_manager_conf_filename = os.path.join( relative_repo_data_manager_dir, repo_data_manager_conf_filename )
- tree = xml_util.parse_xml( repo_data_manager_conf_filename )
+ tree, error_message = xml_util.parse_xml( repo_data_manager_conf_filename )
+ if tree is None:
+ return rval
root = tree.getroot()
for elem in root:
if elem.tag == 'data_manager':
@@ -89,37 +94,38 @@
metadata_dict = repository.metadata
if metadata_dict and 'data_manager' in metadata_dict:
shed_data_manager_conf_filename = app.config.shed_data_manager_config_file
- tree = xml_util.parse_xml( shed_data_manager_conf_filename )
- root = tree.getroot()
- assert root.tag == 'data_managers', 'The file provided (%s) for removing data managers from is not a valid data manager xml file.' % ( shed_data_manager_conf_filename )
- guids = [ data_manager_dict.get( 'guid' ) for data_manager_dict in metadata_dict.get( 'data_manager', {} ).get( 'data_managers', {} ).itervalues() if 'guid' in data_manager_dict ]
- load_old_data_managers_by_guid = {}
- data_manager_config_has_changes = False
- config_elems = []
- for elem in root:
- # Match Data Manager elements by guid and installed_changeset_revision
- elem_matches_removed_data_manager = False
- if elem.tag == 'data_manager':
- guid = elem.get( 'guid', None )
- if guid in guids:
- tool_elem = elem.find( 'tool' )
- if tool_elem is not None:
- installed_changeset_revision_elem = tool_elem.find( 'installed_changeset_revision' )
- if installed_changeset_revision_elem is not None:
- if installed_changeset_revision_elem.text == repository.installed_changeset_revision:
- elem_matches_removed_data_manager = True
- else:
- # This is a different version, which had been previously overridden
- load_old_data_managers_by_guid[ guid ] = elem
- if elem_matches_removed_data_manager:
- data_manager_config_has_changes = True
- else:
- config_elems.append( elem )
- # Remove data managers from in memory
- app.data_managers.remove_manager( guids )
- # Load other versions of any now uninstalled data managers, if any
- for elem in load_old_data_managers_by_guid.itervalues():
- app.data_managers.load_manager_from_elem( elem )
- # Persist the altered shed_data_manager_config file.
- if data_manager_config_has_changes:
- data_manager_config_elems_to_xml_file( app, config_elems, shed_data_manager_conf_filename )
+ tree, error_message = xml_util.parse_xml( shed_data_manager_conf_filename )
+ if tree:
+ root = tree.getroot()
+ assert root.tag == 'data_managers', 'The file provided (%s) for removing data managers from is not a valid data manager xml file.' % ( shed_data_manager_conf_filename )
+ guids = [ data_manager_dict.get( 'guid' ) for data_manager_dict in metadata_dict.get( 'data_manager', {} ).get( 'data_managers', {} ).itervalues() if 'guid' in data_manager_dict ]
+ load_old_data_managers_by_guid = {}
+ data_manager_config_has_changes = False
+ config_elems = []
+ for elem in root:
+ # Match Data Manager elements by guid and installed_changeset_revision
+ elem_matches_removed_data_manager = False
+ if elem.tag == 'data_manager':
+ guid = elem.get( 'guid', None )
+ if guid in guids:
+ tool_elem = elem.find( 'tool' )
+ if tool_elem is not None:
+ installed_changeset_revision_elem = tool_elem.find( 'installed_changeset_revision' )
+ if installed_changeset_revision_elem is not None:
+ if installed_changeset_revision_elem.text == repository.installed_changeset_revision:
+ elem_matches_removed_data_manager = True
+ else:
+ # This is a different version, which had been previously overridden
+ load_old_data_managers_by_guid[ guid ] = elem
+ if elem_matches_removed_data_manager:
+ data_manager_config_has_changes = True
+ else:
+ config_elems.append( elem )
+ # Remove data managers from in memory
+ app.data_managers.remove_manager( guids )
+ # Load other versions of any now uninstalled data managers, if any
+ for elem in load_old_data_managers_by_guid.itervalues():
+ app.data_managers.load_manager_from_elem( elem )
+ # Persist the altered shed_data_manager_config file.
+ if data_manager_config_has_changes:
+ data_manager_config_elems_to_xml_file( app, config_elems, shed_data_manager_conf_filename )
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/util/datatype_util.py
--- a/lib/tool_shed/util/datatype_util.py
+++ b/lib/tool_shed/util/datatype_util.py
@@ -5,14 +5,6 @@
from tool_shed.util import xml_util
import tool_shed.util.shed_util_common as suc
-import pkg_resources
-
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-from elementtree.ElementTree import Element
-from elementtree.ElementTree import SubElement
-
log = logging.getLogger( __name__ )
def alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=False, override=True ):
@@ -23,10 +15,8 @@
be False when a tool shed repository is being installed. Since installation is occurring after the datatypes registry
has been initialized, the registry's contents cannot be overridden by conflicting data types.
"""
- try:
- tree = xml_util.parse_xml( datatypes_config )
- except Exception, e:
- log.debug( "Error parsing %s, exception: %s" % ( datatypes_config, str( e ) ) )
+ tree, error_message = xml_util.parse_xml( datatypes_config )
+ if tree is None:
return None, None
datatypes_config_root = tree.getroot()
registration = datatypes_config_root.find( 'registration' )
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -26,10 +26,6 @@
from mercurial import hg
from mercurial import ui
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-
log = logging.getLogger( __name__ )
REPOSITORY_DATA_MANAGER_CONFIG_FILENAME = "data_manager_conf.xml"
@@ -363,12 +359,9 @@
'invalid_data_managers': invalid_data_managers,
'error_messages': [] }
metadata_dict[ 'data_manager' ] = data_manager_metadata
- try:
- tree = xml_util.parse_xml( data_manager_config_filename )
- except Exception, e:
+ tree, error_message = xml_util.parse_xml( data_manager_config_filename )
+ if tree is None:
# We are not able to load any data managers.
- error_message = 'There was an error parsing your Data Manager config file "%s": %s' % ( data_manager_config_filename, e )
- log.error( error_message )
data_manager_metadata[ 'error_messages' ].append( error_message )
return metadata_dict
tool_path = None
@@ -436,13 +429,10 @@
def generate_datatypes_metadata( app, repository, repository_clone_url, repository_files_dir, datatypes_config, metadata_dict ):
"""Update the received metadata_dict with information from the parsed datatypes_config."""
- try:
- tree = ElementTree.parse( datatypes_config )
- except Exception, e:
- log.debug( "Exception attempting to parse %s: %s" % ( str( datatypes_config ), str( e ) ) )
+ tree, error_message = xml_util.parse_xml( datatypes_config )
+ if tree is None:
return metadata_dict
root = tree.getroot()
- ElementInclude.include( root )
repository_datatype_code_files = []
datatype_files = root.find( 'datatype_files' )
if datatype_files:
@@ -637,14 +627,13 @@
if os.path.getsize( full_path ) > 0:
if not ( checkers.check_binary( full_path ) or checkers.check_image( full_path ) or checkers.check_gzip( full_path )[ 0 ]
or checkers.check_bz2( full_path )[ 0 ] or checkers.check_zip( full_path ) ):
- try:
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = xml_util.parse_xml( full_path )
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree, error_message = xml_util.parse_xml( full_path )
+ if element_tree is None:
+ is_tool = False
+ else:
element_tree_root = element_tree.getroot()
is_tool = element_tree_root.tag == 'tool'
- except Exception, e:
- log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) )
- is_tool = False
if is_tool:
tool, valid, error_message = tool_util.load_tool_from_config( app, app.security.encode_id( repository.id ), full_path )
if tool is None:
@@ -750,15 +739,13 @@
is called from the tool shed as well as from Galaxy.
"""
error_message = ''
- try:
- # Make sure we're looking at a valid repository_dependencies.xml file.
- tree = xml_util.parse_xml( repository_dependencies_config )
+ # Make sure we're looking at a valid repository_dependencies.xml file.
+ tree, error_message = xml_util.parse_xml( repository_dependencies_config )
+ if tree is None:
+ xml_is_valid = False
+ else:
root = tree.getroot()
xml_is_valid = root.tag == 'repositories'
- except Exception, e:
- error_message = "Error parsing %s, exception: %s" % ( repository_dependencies_config, str( e ) )
- log.debug( error_message )
- xml_is_valid = False
if xml_is_valid:
invalid_repository_dependencies_dict = dict( description=root.get( 'description' ) )
invalid_repository_dependency_tups = []
@@ -795,14 +782,10 @@
else:
original_valid_tool_dependencies_dict = None
original_invalid_tool_dependencies_dict = None
- try:
- tree = ElementTree.parse( tool_dependencies_config )
- except Exception, e:
- error_message = "Exception attempting to parse tool_dependencies.xml: %s" %str( e )
- log.debug( error_message )
+ tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+ if tree is None:
return metadata_dict, error_message
root = tree.getroot()
- ElementInclude.include( root )
tool_dependency_is_valid = True
valid_tool_dependencies_dict = {}
invalid_tool_dependencies_dict = {}
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -15,18 +15,13 @@
import sqlalchemy.orm.exc
from tool_shed.util import common_util
from tool_shed.util import xml_util
+from xml.etree import ElementTree as XmlET
from galaxy import eggs
import pkg_resources
pkg_resources.require( 'mercurial' )
from mercurial import hg, ui, commands
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-from elementtree.ElementTree import Element
-from elementtree.ElementTree import SubElement
-
eggs.require( 'markupsafe' )
import markupsafe
@@ -290,22 +285,22 @@
def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
"""Create and return an ElementTree tool Element."""
if tool_section is not None:
- tool_elem = SubElement( tool_section, 'tool' )
+ tool_elem = XmlET.SubElement( tool_section, 'tool' )
else:
- tool_elem = Element( 'tool' )
+ tool_elem = XmlET.Element( 'tool' )
tool_elem.attrib[ 'file' ] = tool_file_path
tool_elem.attrib[ 'guid' ] = tool.guid
- tool_shed_elem = SubElement( tool_elem, 'tool_shed' )
+ tool_shed_elem = XmlET.SubElement( tool_elem, 'tool_shed' )
tool_shed_elem.text = tool_shed
- repository_name_elem = SubElement( tool_elem, 'repository_name' )
+ repository_name_elem = XmlET.SubElement( tool_elem, 'repository_name' )
repository_name_elem.text = repository_name
- repository_owner_elem = SubElement( tool_elem, 'repository_owner' )
+ repository_owner_elem = XmlET.SubElement( tool_elem, 'repository_owner' )
repository_owner_elem.text = owner
- changeset_revision_elem = SubElement( tool_elem, 'installed_changeset_revision' )
+ changeset_revision_elem = XmlET.SubElement( tool_elem, 'installed_changeset_revision' )
changeset_revision_elem.text = changeset_revision
- id_elem = SubElement( tool_elem, 'id' )
+ id_elem = XmlET.SubElement( tool_elem, 'id' )
id_elem.text = tool.id
- version_elem = SubElement( tool_elem, 'version' )
+ version_elem = XmlET.SubElement( tool_elem, 'version' )
version_elem.text = tool.version
return tool_elem
@@ -335,7 +330,9 @@
file_name = strip_path( tool_config )
guids_and_configs[ guid ] = file_name
# Parse the shed_tool_conf file in which all of this repository's tools are defined and generate the tool_panel_dict.
- tree = xml_util.parse_xml( shed_tool_conf )
+ tree, error_message = xml_util.parse_xml( shed_tool_conf )
+ if tree is None:
+ return tool_panel_dict
root = tree.getroot()
for elem in root:
if elem.tag == 'tool':
@@ -1253,20 +1250,21 @@
for tool_config_filename, guid, tool in repository_tools_tups:
guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
config_elems = []
- tree = xml_util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'section':
- for i, tool_elem in enumerate( elem ):
- guid = tool_elem.attrib.get( 'guid' )
+ tree, error_message = xml_util.parse_xml( shed_tool_conf )
+ if tree:
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'section':
+ for i, tool_elem in enumerate( elem ):
+ guid = tool_elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem[i] = guid_to_tool_elem_dict[ guid ]
+ elif elem.tag == 'tool':
+ guid = elem.attrib.get( 'guid' )
if guid in guid_to_tool_elem_dict:
- elem[i] = guid_to_tool_elem_dict[ guid ]
- elif elem.tag == 'tool':
- guid = elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem = guid_to_tool_elem_dict[ guid ]
- config_elems.append( elem )
- config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
+ elem = guid_to_tool_elem_dict[ guid ]
+ config_elems.append( elem )
+ config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def update_repository( repo, ctx_rev=None ):
"""
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/util/tool_dependency_util.py
--- a/lib/tool_shed/util/tool_dependency_util.py
+++ b/lib/tool_shed/util/tool_dependency_util.py
@@ -5,10 +5,7 @@
from galaxy import util
from galaxy.model.orm import and_
import tool_shed.util.shed_util_common as suc
-
-eggs.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
+from tool_shed.util import xml_util
log = logging.getLogger( __name__ )
@@ -67,13 +64,10 @@
relative_install_dir = os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = suc.get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
- try:
- tree = ElementTree.parse( tool_dependencies_config )
- except Exception, e:
- log.debug( "Exception attempting to parse tool_dependencies.xml: %s" %str( e ) )
+ tree, error_message = xml_util.parse_xml( tool_dependencies_config )
+ if tree is None:
return tool_dependency_objects
root = tree.getroot()
- ElementInclude.include( root )
fabric_version_checked = False
for elem in root:
tool_dependency_type = elem.tag
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -14,6 +14,7 @@
from galaxy.web.form_builder import SelectField
from tool_shed.util import xml_util
import tool_shed.util.shed_util_common as suc
+from xml.etree import ElementTree as XmlET
import pkg_resources
@@ -22,12 +23,6 @@
from mercurial import hg
from mercurial import ui
-pkg_resources.require( 'elementtree' )
-from elementtree import ElementTree
-from elementtree import ElementInclude
-from elementtree.ElementTree import Element
-from elementtree.ElementTree import SubElement
-
log = logging.getLogger( __name__ )
def add_to_shed_tool_config( app, shed_tool_conf_dict, elem_list ):
@@ -37,15 +32,16 @@
shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
tool_path = shed_tool_conf_dict[ 'tool_path' ]
config_elems = []
- tree = xml_util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- config_elems.append( elem )
- # Add the elements to the in-memory list of config_elems.
- for elem_entry in elem_list:
- config_elems.append( elem_entry )
- # Persist the altered shed_tool_config file.
- suc.config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
+ tree, error_message = xml_util.parse_xml( shed_tool_conf )
+ if tree:
+ root = tree.getroot()
+ for elem in root:
+ config_elems.append( elem )
+ # Add the elements to the in-memory list of config_elems.
+ for elem_entry in elem_list:
+ config_elems.append( elem_entry )
+ # Persist the altered shed_tool_config file.
+ suc.config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def add_to_tool_panel( app, repository_name, repository_clone_url, changeset_revision, repository_tools_tups, owner, shed_tool_conf, tool_panel_dict,
new_install=True ):
@@ -381,7 +377,7 @@
# { id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}
if tool_section_dict[ 'id' ]:
# Create a new tool section.
- tool_section = Element( 'section' )
+ tool_section = XmlET.Element( 'section' )
tool_section.attrib[ 'id' ] = tool_section_dict[ 'id' ]
tool_section.attrib[ 'name' ] = tool_section_dict[ 'name' ]
tool_section.attrib[ 'version' ] = tool_section_dict[ 'version' ]
@@ -639,7 +635,7 @@
tool_section = trans.app.toolbox.tool_panel[ tool_panel_section_key ]
else:
# The section in which the tool was originally loaded used to be in the tool panel, but no longer is.
- elem = Element( 'section' )
+ elem = XmlET.Element( 'section' )
elem.attrib[ 'name' ] = original_section_name
elem.attrib[ 'id' ] = original_section_id
elem.attrib[ 'version' ] = ''
@@ -657,7 +653,7 @@
else:
# Appending a new section to trans.app.toolbox.tool_panel
log.debug( "Loading new tool panel section: %s" % new_tool_panel_section )
- elem = Element( 'section' )
+ elem = XmlET.Element( 'section' )
elem.attrib[ 'name' ] = new_tool_panel_section
elem.attrib[ 'id' ] = section_id
elem.attrib[ 'version' ] = ''
@@ -804,7 +800,9 @@
message = ''
tmp_tool_config = suc.get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir )
if tmp_tool_config:
- element_tree = xml_util.parse_xml( tmp_tool_config )
+ element_tree, error_message = xml_util.parse_xml( tmp_tool_config )
+ if element_tree is None:
+ return tool, message
element_tree_root = element_tree.getroot()
# Look for code files required by the tool config.
tmp_code_files = []
@@ -846,30 +844,31 @@
shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
tool_path = shed_tool_conf_dict[ 'tool_path' ]
config_elems = []
- tree = xml_util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- config_elems.append( elem )
- config_elems_to_remove = []
- for config_elem in config_elems:
- if config_elem.tag == 'section':
- tool_elems_to_remove = []
- for tool_elem in config_elem:
- if tool_elem.get( 'guid' ) in guids_to_remove:
- tool_elems_to_remove.append( tool_elem )
- for tool_elem in tool_elems_to_remove:
- # Remove all of the appropriate tool sub-elements from the section element.
- config_elem.remove( tool_elem )
- if len( config_elem ) < 1:
- # Keep a list of all empty section elements so they can be removed.
- config_elems_to_remove.append( config_elem )
- elif config_elem.tag == 'tool':
- if config_elem.get( 'guid' ) in guids_to_remove:
- config_elems_to_remove.append( config_elem )
- for config_elem in config_elems_to_remove:
- config_elems.remove( config_elem )
- # Persist the altered in-memory version of the tool config.
- suc.config_elems_to_xml_file( trans.app, config_elems, shed_tool_conf, tool_path )
+ tree, error_message = xml_util.parse_xml( shed_tool_conf )
+ if tree:
+ root = tree.getroot()
+ for elem in root:
+ config_elems.append( elem )
+ config_elems_to_remove = []
+ for config_elem in config_elems:
+ if config_elem.tag == 'section':
+ tool_elems_to_remove = []
+ for tool_elem in config_elem:
+ if tool_elem.get( 'guid' ) in guids_to_remove:
+ tool_elems_to_remove.append( tool_elem )
+ for tool_elem in tool_elems_to_remove:
+ # Remove all of the appropriate tool sub-elements from the section element.
+ config_elem.remove( tool_elem )
+ if len( config_elem ) < 1:
+ # Keep a list of all empty section elements so they can be removed.
+ config_elems_to_remove.append( config_elem )
+ elif config_elem.tag == 'tool':
+ if config_elem.get( 'guid' ) in guids_to_remove:
+ config_elems_to_remove.append( config_elem )
+ for config_elem in config_elems_to_remove:
+ config_elems.remove( config_elem )
+ # Persist the altered in-memory version of the tool config.
+ suc.config_elems_to_xml_file( trans.app, config_elems, shed_tool_conf, tool_path )
def remove_from_tool_panel( trans, repository, shed_tool_conf, uninstall ):
"""A tool shed repository is being deactivated or uninstalled so handle tool panel alterations accordingly."""
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d lib/tool_shed/util/xml_util.py
--- a/lib/tool_shed/util/xml_util.py
+++ b/lib/tool_shed/util/xml_util.py
@@ -1,13 +1,17 @@
import logging
import os
+import sys
import tempfile
from xml.etree import ElementTree as XmlET
import xml.etree.ElementTree
log = logging.getLogger( __name__ )
+using_python_27 = sys.version_info[ :2 ] >= ( 2, 7 )
-class CommentedTreeBuilder ( XmlET.XMLTreeBuilder ):
+class Py26CommentedTreeBuilder ( XmlET.XMLTreeBuilder ):
+ # Python 2.6 uses ElementTree 1.2.x.
+
def __init__ ( self, html=0, target=None ):
XmlET.XMLTreeBuilder.__init__( self, html, target )
self._parser.CommentHandler = self.handle_comment
@@ -17,6 +21,15 @@
self._target.data( data )
self._target.end( XmlET.Comment )
+
+class Py27CommentedTreeBuilder ( XmlET.TreeBuilder ):
+ # Python 2.7 uses ElementTree 1.3.x.
+
+ def comment( self, data ):
+ self.start( XmlET.Comment, {} )
+ self.data( data )
+ self.end( XmlET.Comment )
+
def create_and_write_tmp_file( elem ):
tmp_str = xml_to_string( elem )
fh = tempfile.NamedTemporaryFile( 'wb' )
@@ -30,14 +43,30 @@
def parse_xml( file_name ):
"""Returns a parsed xml tree with comments intact."""
- try:
- fobj = open( file_name, 'r' )
- tree = XmlET.parse( fobj, parser=CommentedTreeBuilder() )
- fobj.close()
- except Exception, e:
- log.exception( "Exception attempting to parse %s: %s" % ( str( file_name ), str( e ) ) )
- return None
- return tree
+ error_message = ''
+ fobj = open( file_name, 'r' )
+ if using_python_27:
+ try:
+ tree = XmlET.parse( fobj, parser=XmlET.XMLParser( target=Py27CommentedTreeBuilder() ) )
+ except Exception, e:
+ fobj.close()
+ error_message = "Exception attempting to parse %s: %s" % ( str( file_name ), str( e ) )
+ log.exception( error_message )
+ return None, error_message
+ else:
+ try:
+ tree = XmlET.parse( fobj, parser=Py26CommentedTreeBuilder() )
+ except Exception, e:
+ fobj.close()
+ error_message = "Exception attempting to parse %s: %s" % ( str( file_name ), str( e ) )
+ log.exception( error_message )
+ return None, error_message
+ fobj.close()
+ return tree, error_message
def xml_to_string( elem, encoding='utf-8' ):
- return '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
+ if using_python_27:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding, method="xml" )
+ else:
+ xml_str = '%s\n' % xml.etree.ElementTree.tostring( elem, encoding=encoding )
+ return xml_str
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_0010_repository_with_tool_dependencies.py
@@ -118,7 +118,7 @@
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded malformed tool dependency XML.',
- strings_displayed=[ 'Exception attempting to parse tool_dependencies.xml', 'not well-formed' ],
+ strings_displayed=[ 'Exception attempting to parse', 'not well-formed' ],
strings_not_displayed=[] )
def test_0030_upload_invalid_tool_dependency_xml( self ):
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -71,7 +71,7 @@
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded malformed tool dependency XML.',
- strings_displayed=[ 'Exception attempting to parse tool_dependencies.xml', 'not well-formed' ],
+ strings_displayed=[ 'Exception attempting to parse', 'not well-formed' ],
strings_not_displayed=[] )
self.upload_file( repository,
filename=os.path.join( 'freebayes', 'invalid_tool_dependencies', 'tool_dependencies.xml' ),
diff -r c666aa204369e3c36498e1d4211e1ec591c3aeeb -r 4df405e7a2aff50c4ef83806c55d01448227cc5d test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1210_uninstall_reinstall_repository_with_tool_dependencies.py
@@ -65,7 +65,7 @@
uncompress_file=False,
remove_repo_files_not_in_tar=False,
commit_message='Uploaded malformed tool dependency XML.',
- strings_displayed=[ 'Exception attempting to parse tool_dependencies.xml', 'not well-formed' ],
+ strings_displayed=[ 'Exception attempting to parse', 'not well-formed' ],
strings_not_displayed=[] )
self.upload_file( repository,
filename=os.path.join( 'freebayes', 'invalid_tool_dependencies', 'tool_dependencies.xml' ),
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c666aa204369/
Changeset: c666aa204369
Branch: next-stable
User: Dave Bouvier
Date: 2013-05-28 15:53:36
Summary: Fixes for backward compatibility with 2012-12-20 release of Galaxy.
Affected #: 6 files
diff -r b51fa0eb15244712e2d410dc2930b8c4fb657d59 -r c666aa204369e3c36498e1d4211e1ec591c3aeeb lib/galaxy/webapps/tool_shed/util/container_util.py
--- a/lib/galaxy/webapps/tool_shed/util/container_util.py
+++ b/lib/galaxy/webapps/tool_shed/util/container_util.py
@@ -68,7 +68,7 @@
repository_name=name,
repository_owner=owner,
changeset_revision=changeset_revision,
- prior_installation_required=prior_installation_required )
+ prior_installation_required=asbool( prior_installation_required ) )
class DataManager( object ):
@@ -467,7 +467,7 @@
repository_name=name,
repository_owner=owner,
changeset_revision=changeset_revision,
- prior_installation_required=prior_installation_required,
+ prior_installation_required=asbool( prior_installation_required ),
error=error )
folder.invalid_repository_dependencies.append( ird )
invalid_repository_dependencies_folder.folders.append( folder )
@@ -1328,7 +1328,7 @@
repository_name=repository_name,
repository_owner=repository_owner,
changeset_revision=changeset_revision,
- prior_installation_required=prior_installation_required,
+ prior_installation_required=asbool( prior_installation_required ),
installation_status=installation_status,
tool_shed_repository_id=tool_shed_repository_id )
# Insert the repository_dependency into the folder.
@@ -1338,7 +1338,7 @@
def is_subfolder_of( folder, repository_dependency ):
toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required = \
suc.parse_repository_dependency_tuple( repository_dependency )
- key = generate_repository_dependencies_key_for_repository( toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required )
+ key = generate_repository_dependencies_key_for_repository( toolshed, repository_name, repository_owner, changeset_revision, asbool( prior_installation_required ) )
for sub_folder in folder.folders:
if key == sub_folder.key:
return True
diff -r b51fa0eb15244712e2d410dc2930b8c4fb657d59 -r c666aa204369e3c36498e1d4211e1ec591c3aeeb lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -4,6 +4,7 @@
import tempfile
import threading
from galaxy import tools
+from galaxy.util import asbool
from galaxy.util import json
from galaxy import web
from galaxy.model.orm import or_
@@ -214,7 +215,7 @@
continue
for rd_tup in rd_tups:
tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( rd_tup )
- if prior_installation_required:
+ if asbool( prior_installation_required ):
repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision )
if repository:
encoded_repository_id = trans.security.encode_id( repository.id )
diff -r b51fa0eb15244712e2d410dc2930b8c4fb657d59 -r c666aa204369e3c36498e1d4211e1ec591c3aeeb lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -202,7 +202,7 @@
tmp_repo_info_tuple = ( None, tmp_clone_url, changeset_revision, None, owner, None, None )
repository, current_changeset_revision = suc.repository_was_previously_installed( trans, tool_shed, name, tmp_repo_info_tuple )
if repository:
- new_rd_tup = [ tool_shed, name, owner, changeset_revision, prior_installation_required, repository.id, repository.status ]
+ new_rd_tup = [ tool_shed, name, owner, changeset_revision, str( prior_installation_required ), repository.id, repository.status ]
if repository.status == trans.model.ToolShedRepository.installation_status.INSTALLED:
if new_rd_tup not in installed_rd_tups:
installed_rd_tups.append( new_rd_tup )
@@ -210,7 +210,7 @@
if new_rd_tup not in missing_rd_tups:
missing_rd_tups.append( new_rd_tup )
else:
- new_rd_tup = [ tool_shed, name, owner, changeset_revision, prior_installation_required, None, 'Never installed' ]
+ new_rd_tup = [ tool_shed, name, owner, changeset_revision, str( prior_installation_required ), None, 'Never installed' ]
if new_rd_tup not in missing_rd_tups:
missing_rd_tups.append( new_rd_tup )
if installed_rd_tups:
diff -r b51fa0eb15244712e2d410dc2930b8c4fb657d59 -r c666aa204369e3c36498e1d4211e1ec591c3aeeb lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -206,7 +206,7 @@
current_repository_name == ancestor_repository_name and \
current_repository_owner == ancestor_repository_owner and \
current_changeset_revision == ancestor_changeset_revision and \
- current_prior_installation_required == ancestor_prior_installation_required:
+ util.string_as_bool( current_prior_installation_required ) == util.string_as_bool( ancestor_prior_installation_required ):
found_in_current = True
break
if not found_in_current:
@@ -771,7 +771,7 @@
else:
# Append the error_message to the repository dependencies tuple.
toolshed, name, owner, changeset_revision, prior_installation_required = repository_dependency_tup
- repository_dependency_tup = ( toolshed, name, owner, changeset_revision, prior_installation_required, error_message )
+ repository_dependency_tup = ( toolshed, name, owner, changeset_revision, str( prior_installation_required ), error_message )
invalid_repository_dependency_tups.append( repository_dependency_tup )
if invalid_repository_dependency_tups:
invalid_repository_dependencies_dict[ 'repository_dependencies' ] = invalid_repository_dependency_tups
@@ -823,7 +823,7 @@
tool_dependency_is_valid = False
# Append the error message to the invalid repository dependency tuple.
toolshed, name, owner, changeset_revision, prior_installation_required = repository_dependency_tup
- repository_dependency_tup = ( toolshed, name, owner, changeset_revision, prior_installation_required, message )
+ repository_dependency_tup = ( toolshed, name, owner, changeset_revision, str( prior_installation_required ), message )
invalid_repository_dependency_tups.append( repository_dependency_tup )
error_message = '%s %s' % ( error_message, message )
elif elem.tag == 'set_environment':
@@ -1112,7 +1112,7 @@
owner = repository_elem.get( 'owner' )
changeset_revision = repository_elem.get( 'changeset_revision' )
prior_installation_required = str( repository_elem.get( 'prior_installation_required', False ) )
- repository_dependency_tup = [ toolshed, name, owner, changeset_revision, prior_installation_required ]
+ repository_dependency_tup = [ toolshed, name, owner, changeset_revision, str( prior_installation_required ) ]
user = None
repository = None
if app.name == 'galaxy':
@@ -1699,8 +1699,8 @@
else:
log.debug( "Successfully reset metadata on repository %s" % repository.name )
successful_count += 1
- except Exception, e:
- log.debug( "Error attempting to reset metadata on repository: %s" % str( e ) )
+ except:
+ log.exception( "Error attempting to reset metadata on repository %s", repository.name )
unsuccessful_count += 1
message = "Successfully reset metadata on %d %s. " % ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
if unsuccessful_count:
@@ -1929,7 +1929,6 @@
tool_shed, name, owner, changeset_revision, prior_installation_required = repository_dependency_tup
else:
tool_shed, name, owner, changeset_revision, prior_installation_required, error_message = repository_dependency_tup
- prior_installation_required = util.asbool( str( prior_installation_required ) )
if repository_dependencies_dict:
repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
for repository_dependency_tup in repository_dependency_tups:
diff -r b51fa0eb15244712e2d410dc2930b8c4fb657d59 -r c666aa204369e3c36498e1d4211e1ec591c3aeeb lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -372,7 +372,7 @@
changeset_revision )
if repository_metadata:
new_key_rd_dict = {}
- new_key_rd_dict[ key ] = [ rd_toolshed, rd_name, rd_owner, repository_metadata.changeset_revision, rd_prior_installation_required ]
+ new_key_rd_dict[ key ] = [ rd_toolshed, rd_name, rd_owner, repository_metadata.changeset_revision, str( rd_prior_installation_required ) ]
# We have the updated changset revision.
updated_key_rd_dicts.append( new_key_rd_dict )
else:
@@ -681,7 +681,7 @@
def get_repository_dependency_as_key( repository_dependency ):
tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( repository_dependency )
- return container_util.generate_repository_dependencies_key_for_repository( tool_shed, name, owner, changeset_revision, prior_installation_required )
+ return container_util.generate_repository_dependencies_key_for_repository( tool_shed, name, owner, changeset_revision, str( prior_installation_required ) )
def get_repository_dependency_by_repository_id( trans, decoded_repository_id ):
return trans.sa_session.query( trans.model.RepositoryDependency ) \
diff -r b51fa0eb15244712e2d410dc2930b8c4fb657d59 -r c666aa204369e3c36498e1d4211e1ec591c3aeeb lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1060,7 +1060,7 @@
prior_installation_required = False
elif len( repository_dependency_tuple ) == 6:
toolshed, name, owner, changeset_revision, prior_installation_required, error = repository_dependency_tuple
- prior_installation_required = util.asbool( str( prior_installation_required ) )
+ prior_installation_required = str( prior_installation_required )
return toolshed, name, owner, changeset_revision, prior_installation_required, error
else:
if len( repository_dependency_tuple ) == 4:
@@ -1070,7 +1070,7 @@
prior_installation_required = False
elif len( repository_dependency_tuple ) == 5:
tool_shed, name, owner, changeset_revision, prior_installation_required = repository_dependency_tuple
- prior_installation_required = util.asbool( str( prior_installation_required ) )
+ prior_installation_required = str( prior_installation_required )
return tool_shed, name, owner, changeset_revision, prior_installation_required
def pretty_print( dict=None ):
https://bitbucket.org/galaxy/galaxy-central/commits/f639dfdbf3a8/
Changeset: f639dfdbf3a8
User: Dave Bouvier
Date: 2013-05-28 15:53:59
Summary: Merged in fixes from next-stable.
Affected #: 6 files
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 lib/galaxy/webapps/tool_shed/util/container_util.py
--- a/lib/galaxy/webapps/tool_shed/util/container_util.py
+++ b/lib/galaxy/webapps/tool_shed/util/container_util.py
@@ -68,7 +68,7 @@
repository_name=name,
repository_owner=owner,
changeset_revision=changeset_revision,
- prior_installation_required=prior_installation_required )
+ prior_installation_required=asbool( prior_installation_required ) )
class DataManager( object ):
@@ -467,7 +467,7 @@
repository_name=name,
repository_owner=owner,
changeset_revision=changeset_revision,
- prior_installation_required=prior_installation_required,
+ prior_installation_required=asbool( prior_installation_required ),
error=error )
folder.invalid_repository_dependencies.append( ird )
invalid_repository_dependencies_folder.folders.append( folder )
@@ -1329,7 +1329,7 @@
repository_name=repository_name,
repository_owner=repository_owner,
changeset_revision=changeset_revision,
- prior_installation_required=prior_installation_required,
+ prior_installation_required=asbool( prior_installation_required ),
installation_status=installation_status,
tool_shed_repository_id=tool_shed_repository_id )
# Insert the repository_dependency into the folder.
@@ -1339,7 +1339,7 @@
def is_subfolder_of( folder, repository_dependency ):
toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required = \
suc.parse_repository_dependency_tuple( repository_dependency )
- key = generate_repository_dependencies_key_for_repository( toolshed, repository_name, repository_owner, changeset_revision, prior_installation_required )
+ key = generate_repository_dependencies_key_for_repository( toolshed, repository_name, repository_owner, changeset_revision, asbool( prior_installation_required ) )
for sub_folder in folder.folders:
if key == sub_folder.key:
return True
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -4,6 +4,7 @@
import tempfile
import threading
from galaxy import tools
+from galaxy.util import asbool
from galaxy.util import json
from galaxy import web
from galaxy.model.orm import or_
@@ -214,7 +215,7 @@
continue
for rd_tup in rd_tups:
tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( rd_tup )
- if prior_installation_required:
+ if asbool( prior_installation_required ):
repository = suc.get_repository_for_dependency_relationship( trans.app, tool_shed, name, owner, changeset_revision )
if repository:
encoded_repository_id = trans.security.encode_id( repository.id )
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -202,7 +202,7 @@
tmp_repo_info_tuple = ( None, tmp_clone_url, changeset_revision, None, owner, None, None )
repository, current_changeset_revision = suc.repository_was_previously_installed( trans, tool_shed, name, tmp_repo_info_tuple )
if repository:
- new_rd_tup = [ tool_shed, name, owner, changeset_revision, prior_installation_required, repository.id, repository.status ]
+ new_rd_tup = [ tool_shed, name, owner, changeset_revision, str( prior_installation_required ), repository.id, repository.status ]
if repository.status == trans.model.ToolShedRepository.installation_status.INSTALLED:
if new_rd_tup not in installed_rd_tups:
installed_rd_tups.append( new_rd_tup )
@@ -210,7 +210,7 @@
if new_rd_tup not in missing_rd_tups:
missing_rd_tups.append( new_rd_tup )
else:
- new_rd_tup = [ tool_shed, name, owner, changeset_revision, prior_installation_required, None, 'Never installed' ]
+ new_rd_tup = [ tool_shed, name, owner, changeset_revision, str( prior_installation_required ), None, 'Never installed' ]
if new_rd_tup not in missing_rd_tups:
missing_rd_tups.append( new_rd_tup )
if installed_rd_tups:
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -206,7 +206,7 @@
current_repository_name == ancestor_repository_name and \
current_repository_owner == ancestor_repository_owner and \
current_changeset_revision == ancestor_changeset_revision and \
- current_prior_installation_required == ancestor_prior_installation_required:
+ util.string_as_bool( current_prior_installation_required ) == util.string_as_bool( ancestor_prior_installation_required ):
found_in_current = True
break
if not found_in_current:
@@ -771,7 +771,7 @@
else:
# Append the error_message to the repository dependencies tuple.
toolshed, name, owner, changeset_revision, prior_installation_required = repository_dependency_tup
- repository_dependency_tup = ( toolshed, name, owner, changeset_revision, prior_installation_required, error_message )
+ repository_dependency_tup = ( toolshed, name, owner, changeset_revision, str( prior_installation_required ), error_message )
invalid_repository_dependency_tups.append( repository_dependency_tup )
if invalid_repository_dependency_tups:
invalid_repository_dependencies_dict[ 'repository_dependencies' ] = invalid_repository_dependency_tups
@@ -823,7 +823,7 @@
tool_dependency_is_valid = False
# Append the error message to the invalid repository dependency tuple.
toolshed, name, owner, changeset_revision, prior_installation_required = repository_dependency_tup
- repository_dependency_tup = ( toolshed, name, owner, changeset_revision, prior_installation_required, message )
+ repository_dependency_tup = ( toolshed, name, owner, changeset_revision, str( prior_installation_required ), message )
invalid_repository_dependency_tups.append( repository_dependency_tup )
error_message = '%s %s' % ( error_message, message )
elif elem.tag == 'set_environment':
@@ -1112,7 +1112,7 @@
owner = repository_elem.get( 'owner' )
changeset_revision = repository_elem.get( 'changeset_revision' )
prior_installation_required = str( repository_elem.get( 'prior_installation_required', False ) )
- repository_dependency_tup = [ toolshed, name, owner, changeset_revision, prior_installation_required ]
+ repository_dependency_tup = [ toolshed, name, owner, changeset_revision, str( prior_installation_required ) ]
user = None
repository = None
if app.name == 'galaxy':
@@ -1699,8 +1699,8 @@
else:
log.debug( "Successfully reset metadata on repository %s" % repository.name )
successful_count += 1
- except Exception, e:
- log.debug( "Error attempting to reset metadata on repository: %s" % str( e ) )
+ except:
+ log.exception( "Error attempting to reset metadata on repository %s", repository.name )
unsuccessful_count += 1
message = "Successfully reset metadata on %d %s. " % ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
if unsuccessful_count:
@@ -1929,7 +1929,6 @@
tool_shed, name, owner, changeset_revision, prior_installation_required = repository_dependency_tup
else:
tool_shed, name, owner, changeset_revision, prior_installation_required, error_message = repository_dependency_tup
- prior_installation_required = util.asbool( str( prior_installation_required ) )
if repository_dependencies_dict:
repository_dependencies = repository_dependencies_dict.get( 'repository_dependencies', [] )
for repository_dependency_tup in repository_dependency_tups:
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -372,7 +372,7 @@
changeset_revision )
if repository_metadata:
new_key_rd_dict = {}
- new_key_rd_dict[ key ] = [ rd_toolshed, rd_name, rd_owner, repository_metadata.changeset_revision, rd_prior_installation_required ]
+ new_key_rd_dict[ key ] = [ rd_toolshed, rd_name, rd_owner, repository_metadata.changeset_revision, str( rd_prior_installation_required ) ]
# We have the updated changset revision.
updated_key_rd_dicts.append( new_key_rd_dict )
else:
@@ -681,7 +681,7 @@
def get_repository_dependency_as_key( repository_dependency ):
tool_shed, name, owner, changeset_revision, prior_installation_required = suc.parse_repository_dependency_tuple( repository_dependency )
- return container_util.generate_repository_dependencies_key_for_repository( tool_shed, name, owner, changeset_revision, prior_installation_required )
+ return container_util.generate_repository_dependencies_key_for_repository( tool_shed, name, owner, changeset_revision, str( prior_installation_required ) )
def get_repository_dependency_by_repository_id( trans, decoded_repository_id ):
return trans.sa_session.query( trans.model.RepositoryDependency ) \
diff -r 31714646a7b441f34a43748065278a1b08940c3c -r f639dfdbf3a844ad54eba487afe71d8c100f15d2 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1060,7 +1060,7 @@
prior_installation_required = False
elif len( repository_dependency_tuple ) == 6:
toolshed, name, owner, changeset_revision, prior_installation_required, error = repository_dependency_tuple
- prior_installation_required = util.asbool( str( prior_installation_required ) )
+ prior_installation_required = str( prior_installation_required )
return toolshed, name, owner, changeset_revision, prior_installation_required, error
else:
if len( repository_dependency_tuple ) == 4:
@@ -1070,7 +1070,7 @@
prior_installation_required = False
elif len( repository_dependency_tuple ) == 5:
tool_shed, name, owner, changeset_revision, prior_installation_required = repository_dependency_tuple
- prior_installation_required = util.asbool( str( prior_installation_required ) )
+ prior_installation_required = str( prior_installation_required )
return tool_shed, name, owner, changeset_revision, prior_installation_required
def pretty_print( dict=None ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add some important documentation to some tool shed methods.
by commits-noreply@bitbucket.org 24 May '13
by commits-noreply@bitbucket.org 24 May '13
24 May '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/31714646a7b4/
Changeset: 31714646a7b4
User: greg
Date: 2013-05-24 21:14:08
Summary: Add some important documentation to some tool shed methods.
Affected #: 2 files
diff -r 308719efff061598955f5b54fae2621c4f902ee9 -r 31714646a7b441f34a43748065278a1b08940c3c lib/galaxy/webapps/tool_shed/util/container_util.py
--- a/lib/galaxy/webapps/tool_shed/util/container_util.py
+++ b/lib/galaxy/webapps/tool_shed/util/container_util.py
@@ -798,12 +798,13 @@
label='Valid tools' )
containers_dict[ 'valid_tools' ] = valid_tools_root_folder
# Tool test results container.
- if tool_test_results:
- # Only create and populate this folder if there are actual tool test results to display, since the
- # display of the 'Test environment' folder by itself can be misleading.
- if len( tool_test_results ) > 1:
- folder_id, tool_test_results_root_folder = build_tool_test_results_folder( trans, folder_id, tool_test_results, time_last_tested=time_last_tested )
- containers_dict[ 'tool_test_results' ] = tool_test_results_root_folder
+ if tool_test_results and len( tool_test_results ) > 1:
+ # Only create and populate this folder if there are actual tool test results to display, since the display of the 'Test environment'
+ # folder by itself can be misleading. We check for more than a single entry in the tool_test_results dictionary because it may have
+ # only the "test_environment" entry, but we want at least 1 of "passed_tests", "failed_tests", "installation_errors", "missing_test_components"
+ # "skipped_tests", "not_tested" or any other entry that may be added in the future.
+ folder_id, tool_test_results_root_folder = build_tool_test_results_folder( trans, folder_id, tool_test_results, time_last_tested=time_last_tested )
+ containers_dict[ 'tool_test_results' ] = tool_test_results_root_folder
# Workflows container.
if metadata:
if 'workflows' in metadata:
diff -r 308719efff061598955f5b54fae2621c4f902ee9 -r 31714646a7b441f34a43748065278a1b08940c3c lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -51,6 +51,70 @@
return output.return_code
def handle_environment_variables( app, tool_dependency, install_dir, env_var_dict, set_prior_environment_commands ):
+ """
+ This method works with with a combination of three tool dependency definition tag sets, which are defined in the tool_dependencies.xml file in the
+ order discussed here. The example for this discussion is the tool_dependencies.xml file contained in the osra repository, which is available at:
+
+ http://testtoolshed.g2.bx.psu.edu/view/bgruening/osra
+
+ The first tag set defines a complex repository dependency like this. This tag set ensures that changeset revision XXX of the repository named
+ package_graphicsmagick_1_3 owned by YYY in the tool shed ZZZ has been previously installed.
+
+ <tool_dependency>
+ <package name="graphicsmagick" version="1.3.18">
+ <repository changeset_revision="XXX" name="package_graphicsmagick_1_3" owner="YYY" prior_installation_required="True" toolshed="ZZZ" />
+ </package>
+ ...
+
+ * By the way, there is an env.sh file associated with version 1.3.18 of the graphicsmagick package which looks something like this (we'll reference
+ this file later in this discussion.
+ ----
+ GRAPHICSMAGICK_ROOT_DIR=/<my configured tool dependency path>/graphicsmagick/1.3.18/YYY/package_graphicsmagick_1_3/XXX/gmagick;
+ export GRAPHICSMAGICK_ROOT_DIR
+ ----
+
+ The second tag set defines a specific package dependency that has been previously installed (guaranteed by the tag set discussed above) and compiled,
+ where the compiled dependency is needed by the tool dependency currently being installed (osra version 2.0.0 in this case) and complied in order for
+ it's installation and compilation to succeed. This tag set is contained within the <package name="osra" version="2.0.0"> tag set, which implies that
+ version 2.0.0 of the osra package requires version 1.3.18 of the graphicsmagick package in order to successfully compile. When this tag set is handled,
+ one of the effects is that the env.sh file associated with graphicsmagick version 1.3.18 is "sourced", which undoubtedly sets or alters certain environment
+ variables (e.g. PATH, PYTHONPATH, etc).
+
+ <!-- populate the environment variables from the dependent repositories -->
+ <action type="set_environment_for_install">
+ <repository changeset_revision="XXX" name="package_graphicsmagick_1_3" owner="YYY" toolshed="ZZZ">
+ <package name="graphicsmagick" version="1.3.18" />
+ </repository>
+ </action>
+
+ The third tag set enables discovery of the same required package dependency discussed above for correctly compiling the osra version 2.0.0 package, but
+ in this case the package can be discovered at tool execution time. Using the $ENV[] option as shown in this example, the value of the environment
+ variable named GRAPHICSMAGICK_ROOT_DIR (which was set in the environment using the second tag set described above) will be used to automatically alter
+ the env.sh file associated with the osra version 2.0.0 tool dependency when it is installed into Galaxy. * Refer to where we discussed the env.sh file
+ for version 1.3.18 of the graphicsmagick package above.
+
+ <action type="set_environment">
+ <environment_variable action="prepend_to" name="LD_LIBRARY_PATH">$ENV[$GRAPHICSMAGICK_ROOT_DIR]/lib/</environment_variable>
+ <environment_variable action="prepend_to" name="LD_LIBRARY_PATH">$INSTALL_DIR/potrace/build/lib/</environment_variable>
+ <environment_variable action="prepend_to" name="PATH">$INSTALL_DIR/bin</environment_variable>
+ <!-- OSRA_DATA_FILES is only used by the galaxy wrapper and is not part of OSRA -->
+ <environment_variable action="set_to" name="OSRA_DATA_FILES">$INSTALL_DIR/share</environment_variable>
+ </action>
+
+ The above tag will produce an env.sh file for version 2.0.0 of the osra package when it it installed into Galaxy that looks something like this. Notice
+ that the path to the gmagick binary is included here since it expands the defined $ENV[$GRAPHICSMAGICK_ROOT_DIR] value in the above tag set.
+
+ ----
+ LD_LIBRARY_PATH=/<my configured tool dependency path>/graphicsmagick/1.3.18/YYY/package_graphicsmagick_1_3/XXX/gmagick/lib/:$LD_LIBRARY_PATH;
+ export LD_LIBRARY_PATH
+ LD_LIBRARY_PATH=/<my configured tool dependency path>/osra/1.4.0/YYY/depends_on/XXX/potrace/build/lib/:$LD_LIBRARY_PATH;
+ export LD_LIBRARY_PATH
+ PATH=/<my configured tool dependency path>/osra/1.4.0/YYY/depends_on/XXX/bin:$PATH;
+ export PATH
+ OSRA_DATA_FILES=/<my configured tool dependency path>/osra/1.4.0/YYY/depends_on/XXX/share;
+ export OSRA_DATA_FILES
+ ----
+ """
env_var_value = env_var_dict[ 'value' ]
if '$ENV[' in env_var_value and ']' in env_var_value:
# Pull out the name of the environment variable to populate.
@@ -107,6 +171,8 @@
filtered_actions = actions[ 1: ]
url = action_dict[ 'url' ]
if 'target_filename' in action_dict:
+ # Sometimes compressed archives extracts their content to a folder other than the default defined file name. Using this
+ # attribute will ensure that the file name is set appropriately and can be located after download, decompression and extraction.
downloaded_filename = action_dict[ 'target_filename' ]
else:
downloaded_filename = os.path.split( url )[ -1 ]
@@ -137,6 +203,8 @@
filtered_actions = actions[ 1: ]
url = action_dict[ 'url' ]
if action_dict[ 'target_filename' ]:
+ # Sometimes compressed archives extracts their content to a folder other than the default defined file name. Using this
+ # attribute will ensure that the file name is set appropriately and can be located after download, decompression and extraction.
filename = action_dict[ 'target_filename' ]
else:
filename = url.split( '/' )[ -1 ]
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b51fa0eb1524/
Changeset: b51fa0eb1524
Branch: next-stable
User: Dave Bouvier
Date: 2013-05-24 20:02:14
Summary: Fix for missing_test_components always being present, causing misleading test result display on repository pages.
Affected #: 1 file
diff -r 12978bbb5ffa91743afdf6aed29355843735f96f -r b51fa0eb15244712e2d410dc2930b8c4fb657d59 lib/tool_shed/scripts/check_repositories_for_functional_tests.py
--- a/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
+++ b/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
@@ -117,6 +117,8 @@
and test repositories script to process. If the tested changeset revision does not have a test-data directory, this script will also mark the revision
not to be tested.
+ TODO: Update this dict structure with the recently added components.
+
If any error is encountered, the script will update the repository_metadata.tool_test_results attribute following this structure:
{
"test_environment":
@@ -137,24 +139,62 @@
"tool_id": "The tool ID that was tested",
"tool_version": "The tool version that was tested",
},
- ],
+ ]
"failed_tests":
[
{
"test_id": "The test ID, generated by twill",
"tool_id": "The tool ID that was tested",
"tool_version": "The tool version that was tested",
- "stderr": "The output of the test, or a more detailed description of what was tested and what the error was.",
- "traceback": "The traceback, if any."
+ "stderr": "The output of the test, or a more detailed description of what was tested and what the outcome was."
+ "traceback": "The captured traceback."
},
- ],
+ ]
+ "installation_errors":
+ {
+ 'tool_dependencies':
+ [
+ {
+ 'type': 'Type of tool dependency, e.g. package, set_environment, etc.',
+ 'name': 'Name of the tool dependency.',
+ 'version': 'Version if this is a package, otherwise blank.',
+ 'error_message': 'The error message returned when installation was attempted.',
+ },
+ ],
+ 'repository_dependencies':
+ [
+ {
+ 'tool_shed': 'The tool shed that this repository was installed from.',
+ 'name': 'The name of the repository that failed to install.',
+ 'owner': 'Owner of the failed repository.',
+ 'changeset_revision': 'Changeset revision of the failed repository.',
+ 'error_message': 'The error message that was returned when the repository failed to install.',
+ },
+ ],
+ 'current_repository':
+ [
+ {
+ 'tool_shed': 'The tool shed that this repository was installed from.',
+ 'name': 'The name of the repository that failed to install.',
+ 'owner': 'Owner of the failed repository.',
+ 'changeset_revision': 'Changeset revision of the failed repository.',
+ 'error_message': 'The error message that was returned when the repository failed to install.',
+ },
+ ],
+ {
+ "name": "The name of the repository.",
+ "owner": "The owner of the repository.",
+ "changeset_revision": "The changeset revision of the repository.",
+ "error_message": "The message stored in tool_dependency.error_message."
+ },
+ }
"missing_test_components":
[
{
- "tool_id": "The tool ID that is missing functional test definitions and/or test data.",
- "tool_version": "The version of the tool.",
- "tool_guid": "The guid of the tool.",
- "missing_components": "The components that are missing for this tool to be considered testable."
+ "tool_id": "The tool ID that missing components.",
+ "tool_version": "The version of the tool."
+ "tool_guid": "The guid of the tool."
+ "missing_components": "Which components are missing, e.g. the test data filename, or the test-data directory."
},
]
}
@@ -167,22 +207,22 @@
no_tools = 0
valid_revisions = 0
invalid_revisions = 0
+ records_checked = 0
# Get the list of metadata records to check for functional tests and test data. Limit this to records that have not been flagged do_not_test,
# since there's no need to check them again if they won't be tested anyway. Also filter out changeset revisions that are not downloadable,
# because it's redundant to test a revision that a user can't install.
- metadata_records_to_check = app.sa_session.query( app.model.RepositoryMetadata ) \
- .filter( and_( app.model.RepositoryMetadata.table.c.downloadable == True,
- app.model.RepositoryMetadata.table.c.includes_tools == True,
- app.model.RepositoryMetadata.table.c.do_not_test == False ) ) \
- .all()
- for metadata_record in metadata_records_to_check:
+ for metadata_record in app.sa_session.query( app.model.RepositoryMetadata ) \
+ .filter( and_( app.model.RepositoryMetadata.table.c.downloadable == True,
+ app.model.RepositoryMetadata.table.c.includes_tools == True,
+ app.model.RepositoryMetadata.table.c.do_not_test == False ) ):
+ records_checked += 1
# Initialize the repository_status dict with the test environment, but leave the test_errors empty.
repository_status = {}
if metadata_record.tool_test_results:
repository_status = metadata_record.tool_test_results
# Clear any old invalid tests for this metadata revision, since this could lead to duplication of invalid test rows,
# or tests incorrectly labeled as invalid.
- repository_status[ 'missing_test_components' ] = []
+ missing_test_components = []
if 'test_environment' in repository_status:
repository_status[ 'test_environment' ] = get_test_environment( repository_status[ 'test_environment' ] )
else:
@@ -295,7 +335,7 @@
# "tool_id": "The tool ID that was tested",
# "tool_version": "The tool version that was tested",
# },
- # ],
+ # ]
# "failed_tests":
# [
# {
@@ -305,14 +345,52 @@
# "stderr": "The output of the test, or a more detailed description of what was tested and what the outcome was."
# "traceback": "The captured traceback."
# },
- # ],
+ # ]
+ # "installation_errors":
+ # {
+ # 'tool_dependencies':
+ # [
+ # {
+ # 'type': 'Type of tool dependency, e.g. package, set_environment, etc.',
+ # 'name': 'Name of the tool dependency.',
+ # 'version': 'Version if this is a package, otherwise blank.',
+ # 'error_message': 'The error message returned when installation was attempted.',
+ # },
+ # ],
+ # 'repository_dependencies':
+ # [
+ # {
+ # 'tool_shed': 'The tool shed that this repository was installed from.',
+ # 'name': 'The name of the repository that failed to install.',
+ # 'owner': 'Owner of the failed repository.',
+ # 'changeset_revision': 'Changeset revision of the failed repository.',
+ # 'error_message': 'The error message that was returned when the repository failed to install.',
+ # },
+ # ],
+ # 'current_repository':
+ # [
+ # {
+ # 'tool_shed': 'The tool shed that this repository was installed from.',
+ # 'name': 'The name of the repository that failed to install.',
+ # 'owner': 'Owner of the failed repository.',
+ # 'changeset_revision': 'Changeset revision of the failed repository.',
+ # 'error_message': 'The error message that was returned when the repository failed to install.',
+ # },
+ # ],
+ # {
+ # "name": "The name of the repository.",
+ # "owner": "The owner of the repository.",
+ # "changeset_revision": "The changeset revision of the repository.",
+ # "error_message": "The message stored in tool_dependency.error_message."
+ # },
+ # }
# "missing_test_components":
# [
# {
- # "tool_id": "The ID of the tool that does not have valid tests.",
+ # "tool_id": "The tool ID that missing components.",
# "tool_version": "The version of the tool."
# "tool_guid": "The guid of the tool."
- # "missing_components": "The components that are missing for this tool to be considered testable."
+ # "missing_components": "Which components are missing, e.g. the test data filename, or the test-data directory."
# },
# ]
# }
@@ -321,14 +399,14 @@
# than the list relevant to what it is testing.
# Only append this error dict if it hasn't already been added.
if problem_found:
- if test_errors not in repository_status[ 'missing_test_components' ]:
- repository_status[ 'missing_test_components' ].append( test_errors )
+ if test_errors not in missing_test_components:
+ missing_test_components.append( test_errors )
if tool_has_tests and has_test_files:
testable_revision_found = True
# Remove the cloned repository path. This has to be done after the check for required test files, for obvious reasons.
if os.path.exists( work_dir ):
shutil.rmtree( work_dir )
- if not repository_status[ 'missing_test_components' ]:
+ if not missing_test_components:
valid_revisions += 1
if verbosity >= 1:
print '# All tools have functional tests in changeset revision %s of repository %s owned by %s.' % ( changeset_revision, name, owner )
@@ -337,13 +415,13 @@
if verbosity >= 1:
print '# Some tools have problematic functional tests in changeset revision %s of repository %s owned by %s.' % ( changeset_revision, name, owner )
if verbosity >= 2:
- for invalid_test in repository_status[ 'missing_test_components' ]:
+ for invalid_test in missing_test_components:
if 'missing_components' in invalid_test:
print '# %s' % invalid_test[ 'missing_components' ]
if not info_only:
# If repository_status[ 'test_errors' ] is empty, no issues were found, and we can just update time_last_tested with the platform
# on which this script was run.
- if repository_status[ 'missing_test_components' ]:
+ if missing_test_components:
# If functional test definitions or test data are missing, set do_not_test = True if no tool with valid tests has been
# found in this revision, and:
# a) There are multiple downloadable revisions, and the revision being tested is not the most recent downloadable revision.
@@ -358,13 +436,14 @@
metadata_record.do_not_test = True
metadata_record.tools_functionally_correct = False
metadata_record.missing_test_components = True
+ repository_status[ 'missing_test_components' ] = missing_test_components
metadata_record.tool_test_results = repository_status
metadata_record.time_last_tested = datetime.utcnow()
app.sa_session.add( metadata_record )
app.sa_session.flush()
stop = time.time()
print '# -------------------------------------------------------------------------------------------'
- print '# Checked %d repositories with %d tools in %d changeset revisions.' % ( len( checked_repository_ids ), tool_count, len( metadata_records_to_check ) )
+ print '# Checked %d repositories with %d tools in %d changeset revisions.' % ( len( checked_repository_ids ), tool_count, records_checked )
print '# %d revisions found with functional tests and test data for all tools.' % valid_revisions
print '# %d revisions found with one or more tools missing functional tests and/or test data.' % invalid_revisions
print '# Found %d tools without functional tests.' % no_tests
https://bitbucket.org/galaxy/galaxy-central/commits/308719efff06/
Changeset: 308719efff06
User: Dave Bouvier
Date: 2013-05-24 20:02:41
Summary: Merge in next-stable.
Affected #: 1 file
diff -r ffa2061034a8266d27c34dc47defb485addf469e -r 308719efff061598955f5b54fae2621c4f902ee9 lib/tool_shed/scripts/check_repositories_for_functional_tests.py
--- a/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
+++ b/lib/tool_shed/scripts/check_repositories_for_functional_tests.py
@@ -117,6 +117,8 @@
and test repositories script to process. If the tested changeset revision does not have a test-data directory, this script will also mark the revision
not to be tested.
+ TODO: Update this dict structure with the recently added components.
+
If any error is encountered, the script will update the repository_metadata.tool_test_results attribute following this structure:
{
"test_environment":
@@ -137,24 +139,62 @@
"tool_id": "The tool ID that was tested",
"tool_version": "The tool version that was tested",
},
- ],
+ ]
"failed_tests":
[
{
"test_id": "The test ID, generated by twill",
"tool_id": "The tool ID that was tested",
"tool_version": "The tool version that was tested",
- "stderr": "The output of the test, or a more detailed description of what was tested and what the error was.",
- "traceback": "The traceback, if any."
+ "stderr": "The output of the test, or a more detailed description of what was tested and what the outcome was."
+ "traceback": "The captured traceback."
},
- ],
+ ]
+ "installation_errors":
+ {
+ 'tool_dependencies':
+ [
+ {
+ 'type': 'Type of tool dependency, e.g. package, set_environment, etc.',
+ 'name': 'Name of the tool dependency.',
+ 'version': 'Version if this is a package, otherwise blank.',
+ 'error_message': 'The error message returned when installation was attempted.',
+ },
+ ],
+ 'repository_dependencies':
+ [
+ {
+ 'tool_shed': 'The tool shed that this repository was installed from.',
+ 'name': 'The name of the repository that failed to install.',
+ 'owner': 'Owner of the failed repository.',
+ 'changeset_revision': 'Changeset revision of the failed repository.',
+ 'error_message': 'The error message that was returned when the repository failed to install.',
+ },
+ ],
+ 'current_repository':
+ [
+ {
+ 'tool_shed': 'The tool shed that this repository was installed from.',
+ 'name': 'The name of the repository that failed to install.',
+ 'owner': 'Owner of the failed repository.',
+ 'changeset_revision': 'Changeset revision of the failed repository.',
+ 'error_message': 'The error message that was returned when the repository failed to install.',
+ },
+ ],
+ {
+ "name": "The name of the repository.",
+ "owner": "The owner of the repository.",
+ "changeset_revision": "The changeset revision of the repository.",
+ "error_message": "The message stored in tool_dependency.error_message."
+ },
+ }
"missing_test_components":
[
{
- "tool_id": "The tool ID that is missing functional test definitions and/or test data.",
- "tool_version": "The version of the tool.",
- "tool_guid": "The guid of the tool.",
- "missing_components": "The components that are missing for this tool to be considered testable."
+ "tool_id": "The tool ID that missing components.",
+ "tool_version": "The version of the tool."
+ "tool_guid": "The guid of the tool."
+ "missing_components": "Which components are missing, e.g. the test data filename, or the test-data directory."
},
]
}
@@ -167,22 +207,22 @@
no_tools = 0
valid_revisions = 0
invalid_revisions = 0
+ records_checked = 0
# Get the list of metadata records to check for functional tests and test data. Limit this to records that have not been flagged do_not_test,
# since there's no need to check them again if they won't be tested anyway. Also filter out changeset revisions that are not downloadable,
# because it's redundant to test a revision that a user can't install.
- metadata_records_to_check = app.sa_session.query( app.model.RepositoryMetadata ) \
- .filter( and_( app.model.RepositoryMetadata.table.c.downloadable == True,
- app.model.RepositoryMetadata.table.c.includes_tools == True,
- app.model.RepositoryMetadata.table.c.do_not_test == False ) ) \
- .all()
- for metadata_record in metadata_records_to_check:
+ for metadata_record in app.sa_session.query( app.model.RepositoryMetadata ) \
+ .filter( and_( app.model.RepositoryMetadata.table.c.downloadable == True,
+ app.model.RepositoryMetadata.table.c.includes_tools == True,
+ app.model.RepositoryMetadata.table.c.do_not_test == False ) ):
+ records_checked += 1
# Initialize the repository_status dict with the test environment, but leave the test_errors empty.
repository_status = {}
if metadata_record.tool_test_results:
repository_status = metadata_record.tool_test_results
# Clear any old invalid tests for this metadata revision, since this could lead to duplication of invalid test rows,
# or tests incorrectly labeled as invalid.
- repository_status[ 'missing_test_components' ] = []
+ missing_test_components = []
if 'test_environment' in repository_status:
repository_status[ 'test_environment' ] = get_test_environment( repository_status[ 'test_environment' ] )
else:
@@ -295,7 +335,7 @@
# "tool_id": "The tool ID that was tested",
# "tool_version": "The tool version that was tested",
# },
- # ],
+ # ]
# "failed_tests":
# [
# {
@@ -305,14 +345,52 @@
# "stderr": "The output of the test, or a more detailed description of what was tested and what the outcome was."
# "traceback": "The captured traceback."
# },
- # ],
+ # ]
+ # "installation_errors":
+ # {
+ # 'tool_dependencies':
+ # [
+ # {
+ # 'type': 'Type of tool dependency, e.g. package, set_environment, etc.',
+ # 'name': 'Name of the tool dependency.',
+ # 'version': 'Version if this is a package, otherwise blank.',
+ # 'error_message': 'The error message returned when installation was attempted.',
+ # },
+ # ],
+ # 'repository_dependencies':
+ # [
+ # {
+ # 'tool_shed': 'The tool shed that this repository was installed from.',
+ # 'name': 'The name of the repository that failed to install.',
+ # 'owner': 'Owner of the failed repository.',
+ # 'changeset_revision': 'Changeset revision of the failed repository.',
+ # 'error_message': 'The error message that was returned when the repository failed to install.',
+ # },
+ # ],
+ # 'current_repository':
+ # [
+ # {
+ # 'tool_shed': 'The tool shed that this repository was installed from.',
+ # 'name': 'The name of the repository that failed to install.',
+ # 'owner': 'Owner of the failed repository.',
+ # 'changeset_revision': 'Changeset revision of the failed repository.',
+ # 'error_message': 'The error message that was returned when the repository failed to install.',
+ # },
+ # ],
+ # {
+ # "name": "The name of the repository.",
+ # "owner": "The owner of the repository.",
+ # "changeset_revision": "The changeset revision of the repository.",
+ # "error_message": "The message stored in tool_dependency.error_message."
+ # },
+ # }
# "missing_test_components":
# [
# {
- # "tool_id": "The ID of the tool that does not have valid tests.",
+ # "tool_id": "The tool ID that missing components.",
# "tool_version": "The version of the tool."
# "tool_guid": "The guid of the tool."
- # "missing_components": "The components that are missing for this tool to be considered testable."
+ # "missing_components": "Which components are missing, e.g. the test data filename, or the test-data directory."
# },
# ]
# }
@@ -321,14 +399,14 @@
# than the list relevant to what it is testing.
# Only append this error dict if it hasn't already been added.
if problem_found:
- if test_errors not in repository_status[ 'missing_test_components' ]:
- repository_status[ 'missing_test_components' ].append( test_errors )
+ if test_errors not in missing_test_components:
+ missing_test_components.append( test_errors )
if tool_has_tests and has_test_files:
testable_revision_found = True
# Remove the cloned repository path. This has to be done after the check for required test files, for obvious reasons.
if os.path.exists( work_dir ):
shutil.rmtree( work_dir )
- if not repository_status[ 'missing_test_components' ]:
+ if not missing_test_components:
valid_revisions += 1
if verbosity >= 1:
print '# All tools have functional tests in changeset revision %s of repository %s owned by %s.' % ( changeset_revision, name, owner )
@@ -337,13 +415,13 @@
if verbosity >= 1:
print '# Some tools have problematic functional tests in changeset revision %s of repository %s owned by %s.' % ( changeset_revision, name, owner )
if verbosity >= 2:
- for invalid_test in repository_status[ 'missing_test_components' ]:
+ for invalid_test in missing_test_components:
if 'missing_components' in invalid_test:
print '# %s' % invalid_test[ 'missing_components' ]
if not info_only:
# If repository_status[ 'test_errors' ] is empty, no issues were found, and we can just update time_last_tested with the platform
# on which this script was run.
- if repository_status[ 'missing_test_components' ]:
+ if missing_test_components:
# If functional test definitions or test data are missing, set do_not_test = True if no tool with valid tests has been
# found in this revision, and:
# a) There are multiple downloadable revisions, and the revision being tested is not the most recent downloadable revision.
@@ -358,13 +436,14 @@
metadata_record.do_not_test = True
metadata_record.tools_functionally_correct = False
metadata_record.missing_test_components = True
+ repository_status[ 'missing_test_components' ] = missing_test_components
metadata_record.tool_test_results = repository_status
metadata_record.time_last_tested = datetime.utcnow()
app.sa_session.add( metadata_record )
app.sa_session.flush()
stop = time.time()
print '# -------------------------------------------------------------------------------------------'
- print '# Checked %d repositories with %d tools in %d changeset revisions.' % ( len( checked_repository_ids ), tool_count, len( metadata_records_to_check ) )
+ print '# Checked %d repositories with %d tools in %d changeset revisions.' % ( len( checked_repository_ids ), tool_count, records_checked )
print '# %d revisions found with functional tests and test data for all tools.' % valid_revisions
print '# %d revisions found with one or more tools missing functional tests and/or test data.' % invalid_revisions
print '# Found %d tools without functional tests.' % no_tests
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/dfeca47bd70e/
Changeset: dfeca47bd70e
User: dan
Date: 2013-05-24 19:59:26
Summary: Unicode fixes for tool parameters
Affected #: 1 file
diff -r 9a97a7fd23bce7bdc6e7b5faa421b2fba932fd39 -r dfeca47bd70ec4252b2cde76411d1d771dab9dba lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -7,7 +7,7 @@
from galaxy import config, datatypes, util
from galaxy.web import form_builder
from galaxy.util.bunch import Bunch
-from galaxy.util import string_as_bool, sanitize_param
+from galaxy.util import string_as_bool, sanitize_param, unicodify
from sanitize import ToolParameterSanitizer
import validation, dynamic_options
# For BaseURLToolParameter
@@ -112,7 +112,9 @@
def to_string( self, value, app ):
"""Convert a value to a string representation suitable for persisting"""
- return str( value )
+ if not isinstance( value, basestring ):
+ value = str( value )
+ return unicodify( value )
def to_python( self, value, app ):
"""Convert a value created with to_string back to an object representation"""
@@ -144,13 +146,13 @@
Convert a value to a text representation suitable for displaying to
the user
"""
- return value
+ return unicodify( value )
def to_param_dict_string( self, value, other_values={} ):
"""Called via __str__ when used in the Cheetah template"""
if value is None:
value = ""
- else:
+ elif not isinstance( value, basestring ):
value = str( value )
if self.tool is None or self.tool.options.sanitize:
if self.sanitizer:
https://bitbucket.org/galaxy/galaxy-central/commits/12978bbb5ffa/
Changeset: 12978bbb5ffa
Branch: next-stable
User: dan
Date: 2013-05-24 19:59:26
Summary: Unicode fixes for tool parameters
Affected #: 1 file
diff -r d57826b9c6052bd6278631228f8ce09ebfbabb98 -r 12978bbb5ffa91743afdf6aed29355843735f96f lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -7,7 +7,7 @@
from galaxy import config, datatypes, util
from galaxy.web import form_builder
from galaxy.util.bunch import Bunch
-from galaxy.util import string_as_bool, sanitize_param
+from galaxy.util import string_as_bool, sanitize_param, unicodify
from sanitize import ToolParameterSanitizer
import validation, dynamic_options
# For BaseURLToolParameter
@@ -112,7 +112,9 @@
def to_string( self, value, app ):
"""Convert a value to a string representation suitable for persisting"""
- return str( value )
+ if not isinstance( value, basestring ):
+ value = str( value )
+ return unicodify( value )
def to_python( self, value, app ):
"""Convert a value created with to_string back to an object representation"""
@@ -144,13 +146,13 @@
Convert a value to a text representation suitable for displaying to
the user
"""
- return value
+ return unicodify( value )
def to_param_dict_string( self, value, other_values={} ):
"""Called via __str__ when used in the Cheetah template"""
if value is None:
value = ""
- else:
+ elif not isinstance( value, basestring ):
value = str( value )
if self.tool is None or self.tool.options.sanitize:
if self.sanitizer:
https://bitbucket.org/galaxy/galaxy-central/commits/ffa2061034a8/
Changeset: ffa2061034a8
User: dan
Date: 2013-05-24 19:59:49
Summary: merge
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/58de45b27662/
Changeset: 58de45b27662
User: dan
Date: 2013-05-24 19:56:42
Summary: Fix for displaying unicode characters in TextField in form_builder.
Affected #: 1 file
diff -r ca744d4ef4abccc800226e8573b0cb4b3406efb5 -r 58de45b27662ce59994ca6990df1aa8e25f83d53 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -34,8 +34,12 @@
self.size = int( size or 10 )
self.value = value or ""
def get_html( self, prefix="", disabled=False ):
+ value = self.value
+ if not isinstance( value, basestring ):
+ value = str( value )
+ value = unicodify( value )
return unicodify( '<input type="text" name="%s%s" size="%d" value="%s"%s>' \
- % ( prefix, self.name, self.size, escape( str( self.value ), quote=True ), self.get_disabled_str( disabled ) ) )
+ % ( prefix, self.name, self.size, escape( value, quote=True ), self.get_disabled_str( disabled ) ) )
def set_size(self, size):
self.size = int( size )
https://bitbucket.org/galaxy/galaxy-central/commits/d57826b9c605/
Changeset: d57826b9c605
Branch: next-stable
User: dan
Date: 2013-05-24 19:56:42
Summary: Fix for displaying unicode characters in TextField in form_builder.
Affected #: 1 file
diff -r 9c138ae94a5ee5b7f58d2ce9ff6a2222744b4b31 -r d57826b9c6052bd6278631228f8ce09ebfbabb98 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -34,8 +34,12 @@
self.size = int( size or 10 )
self.value = value or ""
def get_html( self, prefix="", disabled=False ):
+ value = self.value
+ if not isinstance( value, basestring ):
+ value = str( value )
+ value = unicodify( value )
return unicodify( '<input type="text" name="%s%s" size="%d" value="%s"%s>' \
- % ( prefix, self.name, self.size, escape( str( self.value ), quote=True ), self.get_disabled_str( disabled ) ) )
+ % ( prefix, self.name, self.size, escape( value, quote=True ), self.get_disabled_str( disabled ) ) )
def set_size(self, size):
self.size = int( size )
https://bitbucket.org/galaxy/galaxy-central/commits/9a97a7fd23bc/
Changeset: 9a97a7fd23bc
User: dan
Date: 2013-05-24 19:57:02
Summary: merge
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e9613543d7e0/
Changeset: e9613543d7e0
User: dan
Date: 2013-05-24 19:43:25
Summary: Fix for displaying unicode characters in select checkboxes in form_builder.
Affected #: 1 file
diff -r 60761f32895fa480de3f5c9267c4be01992979d0 -r e9613543d7e07b9f3695b284d80a78d032b0bf3a lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -300,7 +300,12 @@
rval.append ( '<div class="checkUncheckAllPlaceholder" checkbox_name="%s%s"></div>' % ( prefix, self.name ) ) #placeholder for the insertion of the Select All/Unselect All buttons
for text, value, selected in self.options:
style = ""
- escaped_value = escape( str( value ), quote=True )
+ if not isinstance( value, basestring ):
+ value = str( value )
+ if not isinstance( text, basestring ):
+ text = str( text )
+ text = unicodify( text )
+ escaped_value = escape( unicodify( value ), quote=True )
uniq_id = "%s%s|%s" % (prefix, self.name, escaped_value)
if len(self.options) > 2 and ctr % 2 == 1:
style = " class=\"odd_row\""
@@ -308,7 +313,7 @@
if selected:
selected_text = " checked='checked'"
rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s" id="%s"%s%s><label class="inline" for="%s">%s</label></div>' % \
- ( style, prefix, self.name, escaped_value, uniq_id, selected_text, self.get_disabled_str( disabled ), uniq_id, escape( str( text ), quote=True ) ) )
+ ( style, prefix, self.name, escaped_value, uniq_id, selected_text, self.get_disabled_str( disabled ), uniq_id, escape( text, quote=True ) ) )
ctr += 1
return unicodify( "\n".join( rval ) )
def get_html_radio( self, prefix="", disabled=False ):
https://bitbucket.org/galaxy/galaxy-central/commits/9c138ae94a5e/
Changeset: 9c138ae94a5e
Branch: next-stable
User: dan
Date: 2013-05-24 19:43:25
Summary: Fix for displaying unicode characters in select checkboxes in form_builder.
Affected #: 1 file
diff -r 733ce3f5d61fc82aedcbee6955dbdb4a41d6ca90 -r 9c138ae94a5ee5b7f58d2ce9ff6a2222744b4b31 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -300,7 +300,12 @@
rval.append ( '<div class="checkUncheckAllPlaceholder" checkbox_name="%s%s"></div>' % ( prefix, self.name ) ) #placeholder for the insertion of the Select All/Unselect All buttons
for text, value, selected in self.options:
style = ""
- escaped_value = escape( str( value ), quote=True )
+ if not isinstance( value, basestring ):
+ value = str( value )
+ if not isinstance( text, basestring ):
+ text = str( text )
+ text = unicodify( text )
+ escaped_value = escape( unicodify( value ), quote=True )
uniq_id = "%s%s|%s" % (prefix, self.name, escaped_value)
if len(self.options) > 2 and ctr % 2 == 1:
style = " class=\"odd_row\""
@@ -308,7 +313,7 @@
if selected:
selected_text = " checked='checked'"
rval.append( '<div%s><input type="checkbox" name="%s%s" value="%s" id="%s"%s%s><label class="inline" for="%s">%s</label></div>' % \
- ( style, prefix, self.name, escaped_value, uniq_id, selected_text, self.get_disabled_str( disabled ), uniq_id, escape( str( text ), quote=True ) ) )
+ ( style, prefix, self.name, escaped_value, uniq_id, selected_text, self.get_disabled_str( disabled ), uniq_id, escape( text, quote=True ) ) )
ctr += 1
return unicodify( "\n".join( rval ) )
def get_html_radio( self, prefix="", disabled=False ):
https://bitbucket.org/galaxy/galaxy-central/commits/ca744d4ef4ab/
Changeset: ca744d4ef4ab
User: dan
Date: 2013-05-24 19:43:58
Summary: merge
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/304d6d2712ed/
Changeset: 304d6d2712ed
User: dan
Date: 2013-05-24 19:36:17
Summary: Fix for displaying unicode characters in select menus in form_builder.
Affected #: 1 file
diff -r 975f94138a55ef728f394c014c02ee54ef4079b1 -r 304d6d2712ed20e68eb54a10a9ff9e6c98adaf85 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -351,11 +351,17 @@
if selected:
selected_text = " selected"
last_selected_value = value
+ if not isinstance( last_selected_value, basestring ):
+ last_selected_value = str( last_selected_value )
else:
selected_text = ""
- rval.append( '<option value="%s"%s>%s</option>' % ( escape( str( value ), quote=True ), selected_text, escape( str( text ), quote=True ) ) )
+ if not isinstance( value, basestring ):
+ value = str( value )
+ if not isinstance( text, basestring ):
+ text = str( text )
+ rval.append( '<option value="%s"%s>%s</option>' % ( escape( unicodify( value ), quote=True ), selected_text, escape( unicodify( text ), quote=True ) ) )
if last_selected_value:
- last_selected_value = ' last_selected_value="%s"' % escape( str( last_selected_value ), quote=True )
+ last_selected_value = ' last_selected_value="%s"' % escape( unicodify( last_selected_value ), quote=True )
rval.insert( 0, '<select name="%s%s"%s%s%s%s%s>' % \
( prefix, self.name, multiple, size, self.refresh_on_change_text, last_selected_value, self.get_disabled_str( disabled ) ) )
rval.append( '</select>' )
https://bitbucket.org/galaxy/galaxy-central/commits/733ce3f5d61f/
Changeset: 733ce3f5d61f
Branch: next-stable
User: dan
Date: 2013-05-24 19:36:17
Summary: Fix for displaying unicode characters in select menus in form_builder.
Affected #: 1 file
diff -r ec277b165b13131d02998c3cbab3b3e664141a58 -r 733ce3f5d61fc82aedcbee6955dbdb4a41d6ca90 lib/galaxy/web/form_builder.py
--- a/lib/galaxy/web/form_builder.py
+++ b/lib/galaxy/web/form_builder.py
@@ -351,11 +351,17 @@
if selected:
selected_text = " selected"
last_selected_value = value
+ if not isinstance( last_selected_value, basestring ):
+ last_selected_value = str( last_selected_value )
else:
selected_text = ""
- rval.append( '<option value="%s"%s>%s</option>' % ( escape( str( value ), quote=True ), selected_text, escape( str( text ), quote=True ) ) )
+ if not isinstance( value, basestring ):
+ value = str( value )
+ if not isinstance( text, basestring ):
+ text = str( text )
+ rval.append( '<option value="%s"%s>%s</option>' % ( escape( unicodify( value ), quote=True ), selected_text, escape( unicodify( text ), quote=True ) ) )
if last_selected_value:
- last_selected_value = ' last_selected_value="%s"' % escape( str( last_selected_value ), quote=True )
+ last_selected_value = ' last_selected_value="%s"' % escape( unicodify( last_selected_value ), quote=True )
rval.insert( 0, '<select name="%s%s"%s%s%s%s%s>' % \
( prefix, self.name, multiple, size, self.refresh_on_change_text, last_selected_value, self.get_disabled_str( disabled ) ) )
rval.append( '</select>' )
https://bitbucket.org/galaxy/galaxy-central/commits/60761f32895f/
Changeset: 60761f32895f
User: dan
Date: 2013-05-24 19:36:44
Summary: merge
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0