commit/galaxy-central: greg: Enhance tool shed repository installation process to automatically reset the attributes of a previously installed repository (that is no longer in the installed state) so that it can be installed. This streamlines the installation process when installing multiple repositories so that one or more of them do not need to be uninstalled.
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b31177004e80/
Changeset: b31177004e80
User: greg
Date: 2013-06-21 20:03:03
Summary: Enhance tool shed repository installation process to automatically reset the attributes of a previously installed repository (that is no longer in the installed state) so that it can be installed. This streamlines the installation process when installing multiple repositories so that one or more of them do not need to be uninstalled.
Affected #: 3 files
diff -r ecda22758c813923fee40c2a0b07c84427d24bd9 -r b31177004e80656b8efdb553291244b2a3e53d6f lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -147,41 +147,48 @@
dist_to_shed = installed_tool_shed_repository.dist_to_shed
elif installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
# The current tool shed repository is deactivated, so updating it's database record is not necessary - just activate it.
+ log.debug( "Reactivating deactivated tool_shed_repository '%s'." % str( installed_tool_shed_repository.name ) )
common_install_util.activate_repository( trans, installed_tool_shed_repository )
can_update = False
else:
# The tool shed repository currently being processed is already installed or is in the process of being installed, so it's record
# in the database cannot be updated.
+ if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED ]:
+ log.debug( "Skipping installation of tool_shed_repository '%s' because it is already installed." % \
+ str( installed_tool_shed_repository.name ) )
+ else:
+ log.debug( "Skipping installation of tool_shed_repository '%s' because it's installation status is '%s'." % \
+ ( str( installed_tool_shed_repository.name ), str( installed_tool_shed_repository.status ) ) )
can_update = False
else:
# This block will be reached only if reinstalling is True, install_repository_dependencies is False and is_in_repo_info_dicts is False.
# The tool shed repository currently being processed must be a repository dependency that the user elected to not install, so it's
# record in the database cannot be updated.
+ debug_msg = "Skipping installation of tool_shed_repository '%s' because it is likely a " % str( installed_tool_shed_repository.name )
+ debug_msg += "repository dependency that was elected to not be installed."
+ log.debug( debug_msg )
can_update = False
else:
# This block will be reached only if reinstalling is False and install_repository_dependencies is False. This implies that the tool shed
# repository currently being processed has already been installed.
- if len( all_repo_info_dicts ) == 1:
- # If only a single repository is being installed, return an informative message to the user.
- message += "Revision <b>%s</b> of tool shed repository <b>%s</b> owned by <b>%s</b> " % ( changeset_revision, name, repository_owner )
- if installed_changeset_revision != changeset_revision:
- message += "was previously installed using changeset revision <b>%s</b>. " % installed_changeset_revision
- else:
- message += "was previously installed. "
- if installed_tool_shed_repository.uninstalled:
- message += "The repository has been uninstalled, however, so reinstall the original repository instead of installing it again. "
- elif installed_tool_shed_repository.deleted:
- message += "The repository has been deactivated, however, so activate the original repository instead of installing it again. "
- if installed_changeset_revision != changeset_revision:
- message += "You can get the latest updates for the repository using the <b>Get updates</b> option from the repository's "
- message += "<b>Repository Actions</b> pop-up menu. "
- created_or_updated_tool_shed_repositories.append( installed_tool_shed_repository )
- tool_panel_section_keys.append( tool_panel_section_key )
- return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts, message
+ if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED ]:
+ # Since the repository currently being processed is already in the INSTALLED state, skip it and process the next repository in the
+ # list if there is one.
+ log.debug( "Skipping installation of tool_shed_repository '%s' because it's installation status is '%s'." % \
+ ( str( installed_tool_shed_repository.name ), str( installed_tool_shed_repository.status ) ) )
+ can_update = False
else:
- # We're in the process of installing multiple tool shed repositories into Galaxy. Since the repository currently being processed
- # has already been installed, skip it and process the next repository in the list.
- can_update = False
+ # The repository currently being processed is in some state other than INSTALLED, so reset it for installation.
+ debug_msg = "Resetting tool_shed_repository '%s' for installation.\n" % str( installed_tool_shed_repository.name )
+ debug_msg += "The current state of the tool_shed_repository is:\n"
+ debug_msg += "deleted: %s\n" % str( installed_tool_shed_repository.deleted )
+ debug_msg += "update_available: %s\n" % str( installed_tool_shed_repository.update_available )
+ debug_msg += "uninstalled: %s\n" % str( installed_tool_shed_repository.uninstalled )
+ debug_msg += "status: %s\n" % str( installed_tool_shed_repository.status )
+ debug_msg += "error_message: %s\n" % str( installed_tool_shed_repository.error_message )
+ log.debug( debug_msg )
+ suc.reset_previously_installed_repository( trans, installed_tool_shed_repository )
+ can_update = True
else:
# A tool shed repository is being installed into a Galaxy instance for the first time, or we're attempting to install it or reinstall it resulted
# in an error. In the latter case, the repository record in the database has no metadata and it's status has been set to 'New'. In either case,
diff -r ecda22758c813923fee40c2a0b07c84427d24bd9 -r b31177004e80656b8efdb553291244b2a3e53d6f lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1182,6 +1182,19 @@
return tool_shed_repository, previous_changeset_revision
return None, None
+def reset_previously_installed_repository( trans, repository ):
+ """
+ Reset the atrributes of a tool_shed_repository that was previsouly installed. The repository will be in some state other than with a
+ status of INSTALLED, so all atributes will be set to the default NEW state. This will enable the repository to be freshly installed.
+ """
+ repository.deleted = False
+ repository.update_available = False
+ repository.uninstalled = False
+ repository.status = trans.model.ToolShedRepository.installation_status.NEW
+ repository.error_message = None
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+
def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
"""
Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and
diff -r ecda22758c813923fee40c2a0b07c84427d24bd9 -r b31177004e80656b8efdb553291244b2a3e53d6f test/tool_shed/functional/test_1000_install_basic_repository.py
--- a/test/tool_shed/functional/test_1000_install_basic_repository.py
+++ b/test/tool_shed/functional/test_1000_install_basic_repository.py
@@ -106,20 +106,21 @@
self.verify_tool_metadata_for_installed_repository( installed_repository )
def test_0030_install_filtering_repository_again( self ):
- '''Attempt to install the already installed filtering repository, and check for the resulting error message.'''
+ '''Attempt to install the already installed filtering repository.'''
installed_repository = test_db_util.get_installed_repository_by_name_owner( 'filtering_0000', common.test_user_1_name )
- post_submit_strings_displayed = [ installed_repository.name,
- installed_repository.owner,
- installed_repository.installed_changeset_revision,
- 'was previously installed',
- 'to manage the repository' ]
+ # The page displayed after installation is the ajaxian "Montior installing tool shed repositories" page. Since the filter
+ # repository was already installed, nothing will be in the process of being installed, so the grid will display 'No Items'.
+ post_submit_strings_displayed = [ 'No Items' ]
self.install_repository( 'filtering_0000',
common.test_user_1_name,
'Test 0000 Basic Repository Features 1',
post_submit_strings_displayed=post_submit_strings_displayed )
strings_displayed = [ 'filtering_0000',
- 'user1',
+ "Galaxy's filtering tool",
+ 'user1',
+ self.url.replace( 'http://', '' ),
installed_repository.installed_changeset_revision ]
+ self.display_installed_repository_manage_page( installed_repository, strings_displayed=strings_displayed )
self.display_galaxy_browse_repositories_page( strings_displayed=strings_displayed )
def test_0035_verify_installed_repository_metadata( self ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 7 months
commit/galaxy-central: jgoecks: Add semicolon to the list of delimiters in the 'Convert delimiters to whitespace tool'.
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ecda22758c81/
Changeset: ecda22758c81
User: jgoecks
Date: 2013-06-21 17:59:13
Summary: Add semicolon to the list of delimiters in the 'Convert delimiters to whitespace tool'.
Affected #: 2 files
diff -r 01dd0d00de0112b53c05eac5fe3ffbb6ce1526b1 -r ecda22758c813923fee40c2a0b07c84427d24bd9 tools/filters/convert_characters.py
--- a/tools/filters/convert_characters.py
+++ b/tools/filters/convert_characters.py
@@ -24,7 +24,17 @@
except:
stop_err("Output file cannot be opened for writing.")
- char_dict = {'T':'\t','s':'\s','Dt':'\.','C':',','D':'-','U':'_','P':'\|','Co':':'}
+ char_dict = {
+ 'T': '\t',
+ 's': '\s',
+ 'Dt': '\.',
+ 'C': ',',
+ 'D': '-',
+ 'U': '_',
+ 'P': '\|',
+ 'Co': ':',
+ 'Sc': ';'
+ }
from_ch = char_dict[from_char] + '+' #making an RE to match 1 or more occurences.
skipped = 0
diff -r 01dd0d00de0112b53c05eac5fe3ffbb6ce1526b1 -r ecda22758c813923fee40c2a0b07c84427d24bd9 tools/filters/convert_characters.xml
--- a/tools/filters/convert_characters.xml
+++ b/tools/filters/convert_characters.xml
@@ -12,6 +12,7 @@
<option value="U">Underscores</option><option value="P">Pipes</option><option value="Co">Colons</option>
+ <option value="Sc">Semicolons</option></param><param format="txt" name="input" type="data" label="in Dataset"/></inputs>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 7 months
commit/galaxy-central: greg: Fix for generating information about a repository being installed that has no defined repository dependencies.
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/01dd0d00de01/
Changeset: 01dd0d00de01
User: greg
Date: 2013-06-21 17:56:20
Summary: Fix for generating information about a repository being installed that has no defined repository dependencies.
Affected #: 1 file
diff -r b5887125a2164ef4552464031d78dd783f08f06b -r 01dd0d00de0112b53c05eac5fe3ffbb6ce1526b1 lib/tool_shed/util/common_install_util.py
--- a/lib/tool_shed/util/common_install_util.py
+++ b/lib/tool_shed/util/common_install_util.py
@@ -74,6 +74,7 @@
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, installed_td = \
suc.get_repo_info_tuple_contents( repo_info_tuple )
if repository_dependencies:
+ # We have a repository with one or more defined repository dependencies.
missing_td = {}
# Handle the scenario where a repository was installed, then uninstalled and an error occurred during the re-installation process.
# In this case, a record for the repository will exist in the database with the status of 'New'.
@@ -125,9 +126,13 @@
if td_key not in missing_td:
missing_td[ td_key ] = td_dict
else:
- has_repository_dependencies = False
- includes_tools = False
- includes_tools_for_display_in_tool_panel = False
+ # We have a single repository with no defined repository dependencies.
+ all_repo_info_dict = get_required_repo_info_dicts( trans, tool_shed_url, util.listify( repo_info_dict ) )
+ has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
+ includes_tools_for_display_in_tool_panel = all_repo_info_dict.get( 'includes_tools_for_display_in_tool_panel', False )
+ includes_tool_dependencies = all_repo_info_dict.get( 'includes_tool_dependencies', False )
+ includes_tools = all_repo_info_dict.get( 'includes_tools', False )
+ required_repo_info_dicts = all_repo_info_dict.get( 'all_repo_info_dicts', [] )
installed_rd = None
missing_rd = None
missing_td = None
@@ -293,6 +298,10 @@
for components_list in val:
if components_list not in required_repository_tups:
required_repository_tups.append( components_list )
+ else:
+ # We have a single repository with no dependencies.
+ components_list = [ tool_shed_url, repository_name, repository_owner, changeset_revision, 'False' ]
+ required_repository_tups.append( components_list )
if required_repository_tups:
# The value of required_repository_tups is a list of tuples, so we need to encode it.
encoded_required_repository_tups = []
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 7 months
commit/galaxy-central: carlfeberhard: api/library_contents.create: encode ldda api ids when copying from hda
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b5887125a216/
Changeset: b5887125a216
User: carlfeberhard
Date: 2013-06-21 17:28:22
Summary: api/library_contents.create: encode ldda api ids when copying from hda
Affected #: 1 file
diff -r afe5cf0ab4c084ec427ffaa3824bfbef973ce9db -r b5887125a2164ef4552464031d78dd783f08f06b lib/galaxy/webapps/galaxy/api/library_contents.py
--- a/lib/galaxy/webapps/galaxy/api/library_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/library_contents.py
@@ -220,7 +220,8 @@
return { 'error' : 'user has no permission to add to library folder (%s)' %( folder_id ) }
ldda = self.copy_hda_to_library_folder( trans, hda, folder, ldda_message=ldda_message )
- rval = ldda.get_api_value()
+ ldda_dict = ldda.get_api_value()
+ rval = trans.security.encode_dict_ids( ldda_dict )
except Exception, exc:
#TODO: grrr...
@@ -234,7 +235,6 @@
return rval
-
@web.expose_api
def update( self, trans, id, library_id, payload, **kwd ):
"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 7 months
commit/galaxy-central: dannon: Add package requirements tag to bam_to_bigwig converter to support galaxy dependency management installations (cloud)
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/afe5cf0ab4c0/
Changeset: afe5cf0ab4c0
User: dannon
Date: 2013-06-21 17:07:47
Summary: Add package requirements tag to bam_to_bigwig converter to support galaxy dependency management installations (cloud)
Affected #: 1 file
diff -r 41fc6d8c37db638d8379536639fae02e6e004e1d -r afe5cf0ab4c084ec427ffaa3824bfbef973ce9db lib/galaxy/datatypes/converters/bam_to_bigwig_converter.xml
--- a/lib/galaxy/datatypes/converters/bam_to_bigwig_converter.xml
+++ b/lib/galaxy/datatypes/converters/bam_to_bigwig_converter.xml
@@ -1,5 +1,9 @@
<tool id="CONVERTER_bam_to_bigwig_0" name="Convert BAM to BigWig" version="1.0.0" hidden="true"><!-- <description>__NOT_USED_CURRENTLY_FOR_CONVERTERS__</description> -->
+ <requirements>
+ <requirement type="package">ucsc_tools</requirement>
+ <requirement type="package">bedtools</requirement>
+ </requirements><command>
bedtools genomecov -bg -split -ibam $input -g $chromInfo
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 7 months
commit/galaxy-central: greg: Use constants in the tool shed's metadata utility.
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/41fc6d8c37db/
Changeset: 41fc6d8c37db
User: greg
Date: 2013-06-21 15:44:55
Summary: Use constants in the tool shed's metadata utility.
Affected #: 1 file
diff -r 3180391de8912dc275a7babfca54e25dfe8a75ee -r 41fc6d8c37db638d8379536639fae02e6e004e1d lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -28,7 +28,14 @@
log = logging.getLogger( __name__ )
-REPOSITORY_DATA_MANAGER_CONFIG_FILENAME = "data_manager_conf.xml"
+# Repository metadata comparisons for changeset revisions.
+EQUAL = 'equal'
+NO_METADATA = 'no metadata'
+NOT_EQUAL_AND_NOT_SUBSET = 'not equal and not subset'
+SUBSET = 'subset'
+SUBSET_VALUES = [ EQUAL, SUBSET ]
+
+REPOSITORY_DATA_MANAGER_CONFIG_FILENAME = 'data_manager_conf.xml'
NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'repository_dependencies.xml', 'tool_dependencies.xml', REPOSITORY_DATA_MANAGER_CONFIG_FILENAME ]
def add_tool_versions( trans, id, repository_metadata, changeset_revisions ):
@@ -62,7 +69,7 @@
"""Compare the contents of two changeset revisions to determine if a new repository metadata revision should be created."""
# The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of
# current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only
- # when this method returns the string 'not equal and not subset'.
+ # when this method returns the constant value NOT_EQUAL_AND_NOT_SUBSET.
ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] )
ancestor_tools = ancestor_metadata_dict.get( 'tools', [] )
ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ]
@@ -93,7 +100,7 @@
no_workflows = not ancestor_workflows and not current_workflows
no_data_manager = not ancestor_data_manager and not current_data_manager
if no_datatypes and no_readme_files and no_repository_dependencies and no_tool_dependencies and no_tools and no_workflows and no_data_manager:
- return 'no metadata'
+ return NO_METADATA
# Uncomment the following if we decide that README files should affect how installable repository revisions are defined. See the NOTE in the
# compare_readme_files() method.
# readme_file_comparision = compare_readme_files( ancestor_readme_files, current_readme_files )
@@ -103,22 +110,20 @@
datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes )
data_manager_comparison = compare_data_manager( ancestor_data_manager, current_data_manager )
# Handle case where all metadata is the same.
- # TODO: these values, ('equal', etc), should be abstracted out to constants
if ancestor_guids == current_guids and \
- repository_dependency_comparison == 'equal' and \
- tool_dependency_comparison == 'equal' and \
- workflow_comparison == 'equal' and \
- datatype_comparison == 'equal' and \
- data_manager_comparison == 'equal':
- return 'equal'
+ repository_dependency_comparison == EQUAL and \
+ tool_dependency_comparison == EQUAL and \
+ workflow_comparison == EQUAL and \
+ datatype_comparison == EQUAL and \
+ data_manager_comparison == EQUAL:
+ return EQUAL
# Handle case where ancestor metadata is a subset of current metadata.
- # readme_file_is_subset = readme_file_comparision in [ 'equal', 'subset' ]
- # TODO: this list [ 'equal', 'subset' ] should be created once
- repository_dependency_is_subset = repository_dependency_comparison in [ 'equal', 'subset' ]
- tool_dependency_is_subset = tool_dependency_comparison in [ 'equal', 'subset' ]
- workflow_dependency_is_subset = workflow_comparison in [ 'equal', 'subset' ]
- datatype_is_subset = datatype_comparison in [ 'equal', 'subset' ]
- datamanager_is_subset = data_manager_comparison in [ 'equal', 'subset' ]
+ # readme_file_is_subset = readme_file_comparision in [ EQUAL, SUBSET ]
+ repository_dependency_is_subset = repository_dependency_comparison in SUBSET_VALUES
+ tool_dependency_is_subset = tool_dependency_comparison in SUBSET_VALUES
+ workflow_dependency_is_subset = workflow_comparison in SUBSET_VALUES
+ datatype_is_subset = datatype_comparison in SUBSET_VALUES
+ datamanager_is_subset = data_manager_comparison in SUBSET_VALUES
if repository_dependency_is_subset and tool_dependency_is_subset and workflow_dependency_is_subset and datatype_is_subset and datamanager_is_subset:
is_subset = True
for guid in ancestor_guids:
@@ -126,8 +131,8 @@
is_subset = False
break
if is_subset:
- return 'subset'
- return 'not equal and not subset'
+ return SUBSET
+ return NOT_EQUAL_AND_NOT_SUBSET
def compare_data_manager( ancestor_metadata, current_metadata ):
"""Determine if ancestor_metadata is the same as or a subset of current_metadata for data_managers."""
@@ -140,9 +145,9 @@
# use set comparisons
if ancestor_metadata.issubset( current_metadata ):
if ancestor_metadata == current_metadata:
- return 'equal'
- return 'subset'
- return 'not equal and not subset'
+ return EQUAL
+ return SUBSET
+ return NOT_EQUAL_AND_NOT_SUBSET
def compare_datatypes( ancestor_datatypes, current_datatypes ):
"""Determine if ancestor_datatypes is the same as or a subset of current_datatypes."""
@@ -161,12 +166,12 @@
found_in_current = True
break
if not found_in_current:
- return 'not equal and not subset'
+ return NOT_EQUAL_AND_NOT_SUBSET
if len( ancestor_datatypes ) == len( current_datatypes ):
- return 'equal'
+ return EQUAL
else:
- return 'subset'
- return 'not equal and not subset'
+ return SUBSET
+ return NOT_EQUAL_AND_NOT_SUBSET
def compare_readme_files( ancestor_readme_files, current_readme_files ):
"""Determine if ancestor_readme_files is equal to or a subset of current_readme_files."""
@@ -181,12 +186,12 @@
if len( ancestor_readme_files ) <= len( current_readme_files ):
for ancestor_readme_file in ancestor_readme_files:
if ancestor_readme_file not in current_readme_files:
- return 'not equal and not subset'
+ return NOT_EQUAL_AND_NOT_SUBSET
if len( ancestor_readme_files ) == len( current_readme_files ):
- return 'equal'
+ return EQUAL
else:
- return 'subset'
- return 'not equal and not subset'
+ return SUBSET
+ return NOT_EQUAL_AND_NOT_SUBSET
def compare_repository_dependencies( ancestor_repository_dependencies, current_repository_dependencies ):
"""Determine if ancestor_repository_dependencies is the same as or a subset of current_repository_dependencies."""
@@ -206,12 +211,12 @@
found_in_current = True
break
if not found_in_current:
- return 'not equal and not subset'
+ return NOT_EQUAL_AND_NOT_SUBSET
if len( ancestor_repository_dependencies ) == len( current_repository_dependencies ):
- return 'equal'
+ return EQUAL
else:
- return 'subset'
- return 'not equal and not subset'
+ return SUBSET
+ return NOT_EQUAL_AND_NOT_SUBSET
def compare_tool_dependencies( ancestor_tool_dependencies, current_tool_dependencies ):
"""Determine if ancestor_tool_dependencies is the same as or a subset of current_tool_dependencies."""
@@ -225,13 +230,13 @@
# shouldn't be generated.
continue
else:
- return 'not equal and not subset'
+ return NOT_EQUAL_AND_NOT_SUBSET
# At this point we know that ancestor_tool_dependencies is at least a subset of current_tool_dependencies.
if len( ancestor_tool_dependencies ) == len( current_tool_dependencies ):
- return 'equal'
+ return EQUAL
else:
- return 'subset'
- return 'not equal and not subset'
+ return SUBSET
+ return NOT_EQUAL_AND_NOT_SUBSET
def compare_workflows( ancestor_workflows, current_workflows ):
"""Determine if ancestor_workflows is the same as current_workflows or if ancestor_workflows is a subset of current_workflows."""
@@ -251,12 +256,12 @@
found_in_current = True
break
if not found_in_current:
- return 'not equal and not subset'
+ return NOT_EQUAL_AND_NOT_SUBSET
if len( ancestor_workflows ) == len( current_workflows ):
- return 'equal'
+ return EQUAL
else:
- return 'subset'
- return 'not equal and not subset'
+ return SUBSET
+ return NOT_EQUAL_AND_NOT_SUBSET
def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
"""Create or update a repository_metadatqa record in the tool shed."""
@@ -1212,7 +1217,7 @@
ancestor_datatypes = metadata[ 'datatypes' ]
# The saved metadata must be a subset of the new metadata.
datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes )
- if datatype_comparison == 'not equal and not subset':
+ if datatype_comparison == NOT_EQUAL_AND_NOT_SUBSET:
return True
else:
return False
@@ -1265,7 +1270,7 @@
ancestor_readme_files = metadata[ 'readme_files' ]
# The saved metadata must be a subset of the new metadata.
readme_file_comparison = compare_readme_files( ancestor_readme_files, current_readme_files )
- if readme_file_comparison == 'not equal and not subset':
+ if readme_file_comparison == NOT_EQUAL_AND_NOT_SUBSET:
return True
else:
return False
@@ -1610,18 +1615,18 @@
metadata_dict = current_metadata_dict
if ancestor_changeset_revision:
# Compare metadata from ancestor and current. The value of comparison will be one of:
- # 'no metadata' - no metadata for either ancestor or current, so continue from current
- # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
- # 'subset' - ancestor metadata is a subset of current metadata, so continue from current
- # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
+ # NO_METADATA - no metadata for either ancestor or current, so continue from current
+ # EQUAL - ancestor metadata is equivalent to current metadata, so continue from current
+ # SUBSET - ancestor metadata is a subset of current metadata, so continue from current
+ # NOT_EQUAL_AND_NOT_SUBSET - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
comparison = compare_changeset_revisions( ancestor_changeset_revision,
ancestor_metadata_dict,
current_changeset_revision,
current_metadata_dict )
- if comparison in [ 'no metadata', 'equal', 'subset' ]:
+ if comparison in [ NO_METADATA, EQUAL, SUBSET ]:
ancestor_changeset_revision = current_changeset_revision
ancestor_metadata_dict = current_metadata_dict
- elif comparison == 'not equal and not subset':
+ elif comparison == NOT_EQUAL_AND_NOT_SUBSET:
metadata_changeset_revision = ancestor_changeset_revision
metadata_dict = ancestor_metadata_dict
repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 7 months
commit/galaxy-central: 2 new changesets
by commits-noreply@bitbucket.org
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/3180391de891/
Changeset: 3180391de891
User: jgoecks
Date: 2013-06-21 15:25:03
Summary: Remove unneeded webapp parameter.
Affected #: 1 file
diff -r 6fc05699ebafaf8acaae58f281bd576bebcfcd90 -r 3180391de8912dc275a7babfca54e25dfe8a75ee lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -98,7 +98,7 @@
grids.GridOperation( "Import",
condition=( lambda item: not item.deleted ),
allow_multiple=False,
- url_args=dict( webapp="galaxy", action="imp" ) ),
+ url_args=dict( action="imp" ) ),
grids.GridOperation( "Save as File",
condition=( lambda item: not item.deleted ),
allow_multiple=False,
https://bitbucket.org/galaxy/galaxy-central/commits/10810850afaa/
Changeset: 10810850afaa
Branch: list_published_export
User: jgoecks
Date: 2013-06-21 15:27:35
Summary: Close list_published_export branch
Affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 7 months
commit/galaxy-central: 7 new changesets
by commits-noreply@bitbucket.org
7 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b16496b8311b/
Changeset: b16496b8311b
Branch: list_published_export
User: saketkc
Date: 2013-06-17 21:03:41
Summary: Adding new branch for export support of published_workflows
Affected #: 0 files
https://bitbucket.org/galaxy/galaxy-central/commits/21a6b29982e9/
Changeset: 21a6b29982e9
Branch: list_published_export
User: saketkc
Date: 2013-06-17 21:35:15
Summary: Add support for exporting published workflows directly from the main '/list_published page'
Affected #: 1 file
diff -r b16496b8311b8e09ac11aa0c634dd2fbe4bdac4f -r 21a6b29982e97a764097464595c024b8777e713f lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -81,7 +81,7 @@
default_filter = dict( public_url="All", username="All", tags="All" )
use_async = True
columns = [
- grids.PublicURLColumn( "Name", key="name", filterable="advanced" ),
+ grids.PublicURLColumn( "Name", key="name", filterable="advanced", attach_popup=True ),
grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.StoredWorkflowAnnotationAssociation, filterable="advanced" ),
grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
grids.CommunityRatingColumn( "Community Rating", key="rating" ),
@@ -94,7 +94,16 @@
cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
key="free-text-search", visible=False, filterable="standard" )
)
- operations = []
+ operations = [
+ grids.GridOperation( "Import",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="imp" ) ),
+ grids.GridOperation( "Export",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="export_to_file" ) ),
+ ]
def build_initial_query( self, trans, **kwargs ):
# Join so that searching stored_workflow.user makes sense.
@@ -1013,7 +1022,9 @@
return self._workflow_to_dict( trans, stored )
@web.json_pretty
- def export_to_file( self, trans, id ):
+ def export_to_file( self, trans, id, **kwd ):
+ ## NOTE: kwd added to allow passing custom named arguments like '?webapp=galaxy' as while
+ ## exporting published_workflows directly from '/list_published'
"""
Get the latest Workflow for the StoredWorkflow identified by `id` and
encode it as a json string that can be imported back into Galaxy
@@ -1869,7 +1880,7 @@
stored.user = trans.user
if data[ 'annotation' ]:
self.add_item_annotation( trans.sa_session, stored.user, stored, data[ 'annotation' ] )
-
+
# Persist
trans.sa_session.add( stored )
trans.sa_session.flush()
https://bitbucket.org/galaxy/galaxy-central/commits/1a1120f46692/
Changeset: 1a1120f46692
Branch: list_published_export
User: saketkc
Date: 2013-06-18 05:40:44
Summary: Removing optional arguments from export_to_file as it was not required from the last commit
Affected #: 1 file
diff -r 21a6b29982e97a764097464595c024b8777e713f -r 1a1120f466923ca6b1e3613f4b0ff9efdb5345e7 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -102,7 +102,7 @@
grids.GridOperation( "Export",
condition=( lambda item: not item.deleted ),
allow_multiple=False,
- url_args=dict( webapp="galaxy", action="export_to_file" ) ),
+ url_args=dict( action="export_to_file" ) ),
]
def build_initial_query( self, trans, **kwargs ):
@@ -1023,8 +1023,6 @@
@web.json_pretty
def export_to_file( self, trans, id, **kwd ):
- ## NOTE: kwd added to allow passing custom named arguments like '?webapp=galaxy' as while
- ## exporting published_workflows directly from '/list_published'
"""
Get the latest Workflow for the StoredWorkflow identified by `id` and
encode it as a json string that can be imported back into Galaxy
https://bitbucket.org/galaxy/galaxy-central/commits/e3d7bd8ef0df/
Changeset: e3d7bd8ef0df
Branch: list_published_export
User: saketkc
Date: 2013-06-19 08:59:39
Summary: Changing 'export' text
Affected #: 1 file
diff -r 1a1120f466923ca6b1e3613f4b0ff9efdb5345e7 -r e3d7bd8ef0df4637bedbbdd8690a280c713addea lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -99,7 +99,7 @@
condition=( lambda item: not item.deleted ),
allow_multiple=False,
url_args=dict( webapp="galaxy", action="imp" ) ),
- grids.GridOperation( "Export",
+ grids.GridOperation( "Save as File",
condition=( lambda item: not item.deleted ),
allow_multiple=False,
url_args=dict( action="export_to_file" ) ),
https://bitbucket.org/galaxy/galaxy-central/commits/fe1fe2a8ad70/
Changeset: fe1fe2a8ad70
Branch: list_published_export
User: saketkc
Date: 2013-06-21 06:55:06
Summary: *kwd argument is not required
Affected #: 1 file
diff -r e3d7bd8ef0df4637bedbbdd8690a280c713addea -r fe1fe2a8ad706ef577596ef9841c32dda2a06544 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -1022,7 +1022,7 @@
return self._workflow_to_dict( trans, stored )
@web.json_pretty
- def export_to_file( self, trans, id, **kwd ):
+ def export_to_file( self, trans, id):
"""
Get the latest Workflow for the StoredWorkflow identified by `id` and
encode it as a json string that can be imported back into Galaxy
https://bitbucket.org/galaxy/galaxy-central/commits/1d71126419b2/
Changeset: 1d71126419b2
Branch: list_published_export
User: saketkc
Date: 2013-06-21 07:03:28
Summary: fixed the spacing
Affected #: 1 file
diff -r fe1fe2a8ad706ef577596ef9841c32dda2a06544 -r 1d71126419b29293159599ff5d42b42aee9ad62a lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -1022,7 +1022,7 @@
return self._workflow_to_dict( trans, stored )
@web.json_pretty
- def export_to_file( self, trans, id):
+ def export_to_file( self, trans, id ):
"""
Get the latest Workflow for the StoredWorkflow identified by `id` and
encode it as a json string that can be imported back into Galaxy
https://bitbucket.org/galaxy/galaxy-central/commits/6fc05699ebaf/
Changeset: 6fc05699ebaf
User: jgoecks
Date: 2013-06-21 14:58:47
Summary: Merged in saketkc/galaxy-central/list_published_export (pull request #183)
UI Add Support for directly exporting pulished workflows from the main 'list_published' page
Affected #: 1 file
diff -r 434335f801613d882ff57ea8c7dcdc00a1b8d4f2 -r 6fc05699ebafaf8acaae58f281bd576bebcfcd90 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -81,7 +81,7 @@
default_filter = dict( public_url="All", username="All", tags="All" )
use_async = True
columns = [
- grids.PublicURLColumn( "Name", key="name", filterable="advanced" ),
+ grids.PublicURLColumn( "Name", key="name", filterable="advanced", attach_popup=True ),
grids.OwnerAnnotationColumn( "Annotation", key="annotation", model_annotation_association_class=model.StoredWorkflowAnnotationAssociation, filterable="advanced" ),
grids.OwnerColumn( "Owner", key="username", model_class=model.User, filterable="advanced" ),
grids.CommunityRatingColumn( "Community Rating", key="rating" ),
@@ -94,7 +94,16 @@
cols_to_filter=[ columns[0], columns[1], columns[2], columns[4] ],
key="free-text-search", visible=False, filterable="standard" )
)
- operations = []
+ operations = [
+ grids.GridOperation( "Import",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=False,
+ url_args=dict( webapp="galaxy", action="imp" ) ),
+ grids.GridOperation( "Save as File",
+ condition=( lambda item: not item.deleted ),
+ allow_multiple=False,
+ url_args=dict( action="export_to_file" ) ),
+ ]
def build_initial_query( self, trans, **kwargs ):
# Join so that searching stored_workflow.user makes sense.
@@ -1827,7 +1836,7 @@
stored.user = trans.user
if data[ 'annotation' ]:
self.add_item_annotation( trans.sa_session, stored.user, stored, data[ 'annotation' ] )
-
+
# Persist
trans.sa_session.add( stored )
trans.sa_session.flush()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 7 months
commit/galaxy-central: dannon: Workflow import fix for 10117
by commits-noreply@bitbucket.org
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/434335f80161/
Changeset: 434335f80161
User: dannon
Date: 2013-06-21 10:37:07
Summary: Workflow import fix for 10117
Affected #: 1 file
diff -r cf58e35b3a973311ab291e90b501c6f4bc31b6d3 -r 434335f801613d882ff57ea8c7dcdc00a1b8d4f2 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -246,8 +246,8 @@
return module_factory.from_dict(trans, from_json_string(step.config), secure=False)
module = Class( trans, tool_id )
module.state = galaxy.tools.DefaultToolState()
- if step.tool_version and (step.tool_version != tool.version):
- module.version_changes.append("%s: using version '%s' instead of version '%s' indicated in this workflow." % (tool_id, tool.version, step.tool_version))
+ if step.tool_version and (step.tool_version != module.tool.version):
+ module.version_changes.append("%s: using version '%s' instead of version '%s' indicated in this workflow." % (tool_id, module.tool.version, step.tool_version))
module.state.inputs = module.tool.params_from_strings( step.tool_inputs, trans.app, ignore_errors=True )
module.errors = step.tool_errors
# module.post_job_actions = step.post_job_actions
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 7 months
commit/galaxy-central: 2 new changesets
by commits-noreply@bitbucket.org
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/cf58e35b3a97/
Changeset: cf58e35b3a97
User: dan
Date: 2013-06-20 22:24:33
Summary: Fix for GenomeSpace Export root directory listing.
Affected #: 2 files
diff -r a5dd3ca6f5ef97eadf62bc4642c8d36c4ed3bce4 -r cf58e35b3a973311ab291e90b501c6f4bc31b6d3 tools/genomespace/genomespace_exporter.py
--- a/tools/genomespace/genomespace_exporter.py
+++ b/tools/genomespace/genomespace_exporter.py
@@ -70,7 +70,7 @@
return ( dir_dict, path )
def get_default_directory( url_opener, dm_url ):
- return get_directory( url_opener, dm_url, ["defaultdirectory"] )[0]
+ return get_directory( url_opener, dm_url, ["%s/defaultdirectory" % ( GENOMESPACE_API_VERSION_STRING ) ] )[0]
def get_personal_directory( url_opener, dm_url ):
return get_directory( url_opener, dm_url, [ "%s/personaldirectory" % ( GENOMESPACE_API_VERSION_STRING ) ] )[0]
@@ -143,8 +143,9 @@
url_opener = get_cookie_opener( username, token )
genomespace_site_dict = get_genomespace_site_urls()[ genomespace_site ]
dm_url = genomespace_site_dict['dmServer']
- #get default directory
- directory_dict = get_default_directory( url_opener, dm_url ).get( 'directory', None )
+ #get export root directory
+ #directory_dict = get_default_directory( url_opener, dm_url ).get( 'directory', None ) #This directory contains shares and other items outside of the users home
+ directory_dict = get_personal_directory( url_opener, dm_url ).get( 'directory', None ) #Limit export list to only user's home dir
if directory_dict is None:
return []
#what directory to stuff this in
diff -r a5dd3ca6f5ef97eadf62bc4642c8d36c4ed3bce4 -r cf58e35b3a973311ab291e90b501c6f4bc31b6d3 tools/genomespace/genomespace_exporter.xml
--- a/tools/genomespace/genomespace_exporter.xml
+++ b/tools/genomespace/genomespace_exporter.xml
@@ -1,5 +1,5 @@
<?xml version="1.0"?>
-<tool name="GenomeSpace Exporter" id="genomespace_exporter" require_login="True" version="0.0.1">
+<tool name="GenomeSpace Exporter" id="genomespace_exporter" require_login="True" version="0.0.2"><description> - send data to GenomeSpace</description><command interpreter="python">genomespace_exporter.py
--genomespace_site "prod"
https://bitbucket.org/galaxy/galaxy-central/commits/5a8069419967/
Changeset: 5a8069419967
Branch: stable
User: dan
Date: 2013-06-20 22:24:33
Summary: Fix for GenomeSpace Export root directory listing.
Affected #: 2 files
diff -r adee6fc31991ee4291a28e4e209ef9583ba811ab -r 5a8069419967cfdb2157482ad239f75624354b78 tools/genomespace/genomespace_exporter.py
--- a/tools/genomespace/genomespace_exporter.py
+++ b/tools/genomespace/genomespace_exporter.py
@@ -70,7 +70,7 @@
return ( dir_dict, path )
def get_default_directory( url_opener, dm_url ):
- return get_directory( url_opener, dm_url, ["defaultdirectory"] )[0]
+ return get_directory( url_opener, dm_url, ["%s/defaultdirectory" % ( GENOMESPACE_API_VERSION_STRING ) ] )[0]
def get_personal_directory( url_opener, dm_url ):
return get_directory( url_opener, dm_url, [ "%s/personaldirectory" % ( GENOMESPACE_API_VERSION_STRING ) ] )[0]
@@ -143,8 +143,9 @@
url_opener = get_cookie_opener( username, token )
genomespace_site_dict = get_genomespace_site_urls()[ genomespace_site ]
dm_url = genomespace_site_dict['dmServer']
- #get default directory
- directory_dict = get_default_directory( url_opener, dm_url ).get( 'directory', None )
+ #get export root directory
+ #directory_dict = get_default_directory( url_opener, dm_url ).get( 'directory', None ) #This directory contains shares and other items outside of the users home
+ directory_dict = get_personal_directory( url_opener, dm_url ).get( 'directory', None ) #Limit export list to only user's home dir
if directory_dict is None:
return []
#what directory to stuff this in
diff -r adee6fc31991ee4291a28e4e209ef9583ba811ab -r 5a8069419967cfdb2157482ad239f75624354b78 tools/genomespace/genomespace_exporter.xml
--- a/tools/genomespace/genomespace_exporter.xml
+++ b/tools/genomespace/genomespace_exporter.xml
@@ -1,5 +1,5 @@
<?xml version="1.0"?>
-<tool name="GenomeSpace Exporter" id="genomespace_exporter" require_login="True" version="0.0.1">
+<tool name="GenomeSpace Exporter" id="genomespace_exporter" require_login="True" version="0.0.2"><description> - send data to GenomeSpace</description><command interpreter="python">genomespace_exporter.py
--genomespace_site "prod"
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
9 years, 7 months