galaxy-commits
Threads by month
- ----- 2026 -----
- February
- January
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
commit/galaxy-central: greg: Add the framework for discovering and installing simple repository dependencies.
by Bitbucket 17 Dec '12
by Bitbucket 17 Dec '12
17 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/fbfa0c78ea29/
changeset: fbfa0c78ea29
user: greg
date: 2012-12-17 21:02:54
summary: Add the framework for discovering and installing simple repository dependencies.
affected #: 9 files
diff -r a3ecba6f3e0c1ecc719a42b6bccfa0b32746ba27 -r fbfa0c78ea29b1e8d1ad339ab2d7d6ababb17e2a lib/galaxy/tool_shed/common_util.py
--- a/lib/galaxy/tool_shed/common_util.py
+++ b/lib/galaxy/tool_shed/common_util.py
@@ -1,7 +1,7 @@
import os, urllib2
from galaxy import util
from galaxy.util.odict import odict
-from galaxy.tool_shed.encoding_util import tool_shed_decode
+from galaxy.tool_shed import encoding_util
REPOSITORY_OWNER = 'devteam'
@@ -36,7 +36,7 @@
print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
if tool_shed_accessible:
if text:
- tool_dependencies_dict = tool_shed_decode( text )
+ tool_dependencies_dict = encoding_util.tool_shed_decode( text )
for dependency_key, requirements_dict in tool_dependencies_dict.items():
tool_dependency_name = requirements_dict[ 'name' ]
tool_dependency_version = requirements_dict[ 'version' ]
diff -r a3ecba6f3e0c1ecc719a42b6bccfa0b32746ba27 -r fbfa0c78ea29b1e8d1ad339ab2d7d6ababb17e2a lib/galaxy/tool_shed/encoding_util.py
--- a/lib/galaxy/tool_shed/encoding_util.py
+++ b/lib/galaxy/tool_shed/encoding_util.py
@@ -11,6 +11,7 @@
log = logging.getLogger( __name__ )
encoding_sep = '__esep__'
+encoding_sep2 = '__esepii__'
def tool_shed_decode( value ):
# Extract and verify hash
@@ -23,12 +24,12 @@
try:
values = simplejson.loads( value )
except Exception, e:
- log.debug( "Decoding json value from tool shed threw exception: %s" % str( e ) )
+ log.debug( "Decoding json value from tool shed for value '%s' threw exception: %s" % ( str( value ), str( e ) ) )
if values is not None:
try:
return json_fix( values )
except Exception, e:
- log.debug( "Fixing decoded json value from tool shed threw exception: %s" % str( e ) )
+ log.debug( "Fixing decoded json values '%s' from tool shed threw exception: %s" % ( str( values ), str( e ) ) )
fixed_values = values
if values is None:
values = value
diff -r a3ecba6f3e0c1ecc719a42b6bccfa0b32746ba27 -r fbfa0c78ea29b1e8d1ad339ab2d7d6ababb17e2a lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -1,7 +1,7 @@
import sys, os, subprocess, tempfile
import common_util
import fabric_util
-from galaxy.tool_shed.encoding_util import encoding_sep, tool_shed_encode, tool_shed_decode
+from galaxy.tool_shed import encoding_util
from galaxy.model.orm import and_
from galaxy import eggs
@@ -214,10 +214,10 @@
for action_elem in param_elem:
actions.append( action_elem.text.replace( '$INSTALL_DIR', install_dir ) )
if actions:
- params_str += 'actions=%s,' % tool_shed_encode( encoding_sep.join( actions ) )
+ params_str += 'actions=%s,' % encoding_util.tool_shed_encode( encoding_util.encoding_sep.join( actions ) )
else:
if param_elem.text:
- param_value = tool_shed_encode( param_elem.text )
+ param_value = encoding_util.tool_shed_encode( param_elem.text )
params_str += '%s=%s,' % ( param_name, param_value )
if package_name:
params_str += 'package_name=%s' % package_name
diff -r a3ecba6f3e0c1ecc719a42b6bccfa0b32746ba27 -r fbfa0c78ea29b1e8d1ad339ab2d7d6ababb17e2a lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1279,6 +1279,14 @@
valid_filenames.append( '%s.txt' % r )
valid_filenames.append( '%s.txt' % repository_name )
return valid_filenames
+def get_repo_info_tuple_contents( repo_info_tuple ):
+ # Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced.
+ if len( repo_info_tuple ) == 6:
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
+ repository_dependencies = None
+ elif len( repo_info_tuple ) == 7:
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+ return description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies
def get_repository_by_name_and_owner( trans, name, owner ):
"""Get a repository from the database via name and owner"""
if trans.webapp.name == 'galaxy':
diff -r a3ecba6f3e0c1ecc719a42b6bccfa0b32746ba27 -r fbfa0c78ea29b1e8d1ad339ab2d7d6ababb17e2a lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -11,7 +11,7 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import and_
import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import tool_shed_encode
+from galaxy.tool_shed import encoding_util
import common
from galaxy import eggs
@@ -1264,14 +1264,14 @@
update_dict = dict( changeset_revision=changeset_revision, ctx_rev=ctx_rev )
if changeset_revision == repository.tip( trans.app ):
# If changeset_revision is the repository tip, there are no additional updates.
- return tool_shed_encode( update_dict )
+ return encoding_util.tool_shed_encode( update_dict )
else:
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
trans.security.encode_id( repository.id ),
changeset_revision )
if repository_metadata:
# If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
- return tool_shed_encode( update_dict )
+ return encoding_util.tool_shed_encode( update_dict )
else:
# The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
# repository was installed. We need to find the changeset_revision to which we need to update.
@@ -1295,7 +1295,7 @@
update_to_changeset_hash = changeset_hash
update_dict[ 'changeset_revision' ] = str( latest_changeset_revision )
update_dict[ 'ctx_rev' ] = str( update_to_ctx.rev() )
- return tool_shed_encode( update_dict )
+ return encoding_util.tool_shed_encode( update_dict )
@web.expose
def get_ctx_rev( self, trans, **kwd ):
"""Given a repository and changeset_revision, return the correct ctx.rev() value."""
@@ -1328,7 +1328,20 @@
return repository_metadata.metadata
return None
@web.json
+ def get_readme_files( self, trans, **kwd ):
+ """
+ This method is called when installing or re-installing a single repository into a Galaxy instance. If the received changeset_revision
+ includes one or more readme files, return them in a dictionary.
+ """
+ repository_name = kwd[ 'name' ]
+ repository_owner = kwd[ 'owner' ]
+ changeset_revision = kwd[ 'changeset_revision' ]
+ repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
+ return suc.build_readme_files_dict( repository_metadata )
+ @web.json
def get_repository_dependencies( self, trans, **kwd ):
+ """Return an encoded dictionary of all repositories upon which the contents of the received repository depends."""
params = util.Params( kwd )
name = params.get( 'name', None )
owner = params.get( 'owner', None )
@@ -1339,7 +1352,6 @@
if repository_metadata:
metadata = repository_metadata.metadata
if metadata:
- # Get a dictionary of all repositories upon which the contents of the received repository depends.
repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
repository=repository,
repository_metadata=repository_metadata,
@@ -1349,7 +1361,7 @@
handled_key_rd_dicts=None,
circular_repository_dependencies=None )
if repository_dependencies:
- return tool_shed_encode( repository_dependencies )
+ return encoding_util.tool_shed_encode( repository_dependencies )
return ''
@web.json
def get_repository_information( self, trans, repository_ids, changeset_revisions, **kwd ):
@@ -1385,26 +1397,34 @@
repository=repository,
metadata=None,
repository_metadata=repository_metadata )
- repo_info_dicts.append( tool_shed_encode( repo_info_dict ) )
+ repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
return dict( includes_tools=includes_tools,
includes_repository_dependencies=includes_repository_dependencies,
includes_tool_dependencies=includes_tool_dependencies,
repo_info_dicts=repo_info_dicts )
@web.json
- def get_readme_files( self, trans, **kwd ):
- """
- This method is called when installing or re-installing a single repository into a Galaxy instance. If the received changeset_revision
- includes one or more readme files, return them in a dictionary.
- """
- repository_name = kwd[ 'name' ]
- repository_owner = kwd[ 'owner' ]
- changeset_revision = kwd[ 'changeset_revision' ]
- repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner )
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
- return suc.build_readme_files_dict( repository_metadata )
+ def get_required_repo_info_dict( self, trans, encoded_str ):
+ """Retrive a list of dictionaries that each contain all of the information needed to install the list of repositories defined by encoded_str."""
+ encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
+ encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 )
+ decoded_required_repository_tups = []
+ for encoded_required_repository_tup in encoded_required_repository_tups:
+ decoded_required_repository_tups.append( encoded_required_repository_tup.split( encoding_util.encoding_sep ) )
+ encoded_repository_ids = []
+ changeset_revisions = []
+ for required_repository_tup in decoded_required_repository_tups:
+ tool_shed, name, owner, changeset_revision = required_repository_tup
+ repository = suc.get_repository_by_name_and_owner( trans, name, owner )
+ encoded_repository_ids.append( trans.security.encode_id( repository.id ) )
+ changeset_revisions.append( changeset_revision )
+ if encoded_repository_ids and changeset_revisions:
+ repo_info_dict = from_json_string( self.get_repository_information( trans, encoded_repository_ids, changeset_revisions ) )
+ else:
+ repo_info_dict = {}
+ return repo_info_dict
@web.expose
def get_tool_dependencies( self, trans, **kwd ):
- """Handle a request from a local Galaxy instance."""
+ """Handle a request from the InstallManager of a local Galaxy instance."""
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
@@ -1422,9 +1442,8 @@
from_install_manager = kwd.get( 'from_install_manager', False )
if from_install_manager:
if tool_dependencies:
- return tool_shed_encode( tool_dependencies )
- return ''
- # TODO: future handler where request comes from some Galaxy admin feature.
+ return encoding_util.tool_shed_encode( tool_dependencies )
+ return ''
@web.expose
def get_tool_versions( self, trans, **kwd ):
"""
diff -r a3ecba6f3e0c1ecc719a42b6bccfa0b32746ba27 -r fbfa0c78ea29b1e8d1ad339ab2d7d6ababb17e2a lib/galaxy/webapps/community/controllers/workflow.py
--- a/lib/galaxy/webapps/community/controllers/workflow.py
+++ b/lib/galaxy/webapps/community/controllers/workflow.py
@@ -11,7 +11,7 @@
from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps
import common
import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
+from galaxy.tool_shed import encoding_util
class RepoInputDataModule( InputDataModule ):
@@ -141,7 +141,7 @@
repository_metadata_id = kwd.get( 'repository_metadata_id', '' )
workflow_name = kwd.get( 'workflow_name', '' )
if workflow_name:
- workflow_name = tool_shed_decode( workflow_name )
+ workflow_name = encoding_util.tool_shed_decode( workflow_name )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
@@ -160,7 +160,7 @@
repository_id = trans.security.encode_id( repository_metadata.repository_id )
changeset_revision = repository_metadata.changeset_revision
metadata = repository_metadata.metadata
- workflow_name = tool_shed_decode( workflow_name )
+ workflow_name = encoding_util.tool_shed_decode( workflow_name )
# metadata[ 'workflows' ] is a list of tuples where each contained tuple is
# [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
for workflow_tup in metadata[ 'workflows' ]:
@@ -386,7 +386,7 @@
repository_metadata_id = kwd.get( 'repository_metadata_id', '' )
workflow_name = kwd.get( 'workflow_name', '' )
if workflow_name:
- workflow_name = tool_shed_decode( workflow_name )
+ workflow_name = encoding_util.tool_shed_decode( workflow_name )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
@@ -403,7 +403,7 @@
return open( tmp_fname )
galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
url = '%sworkflow/import_workflow?tool_shed_url=%s&repository_metadata_id=%s&workflow_name=%s' % \
- ( galaxy_url, url_for( '/', qualified=True ), repository_metadata_id, tool_shed_encode( workflow_name ) )
+ ( galaxy_url, url_for( '/', qualified=True ), repository_metadata_id, encoding_util.tool_shed_encode( workflow_name ) )
return trans.response.send_redirect( url )
return trans.response.send_redirect( web.url_for( controller='workflow',
action='view_workflow',
diff -r a3ecba6f3e0c1ecc719a42b6bccfa0b32746ba27 -r fbfa0c78ea29b1e8d1ad339ab2d7d6ababb17e2a lib/galaxy/webapps/galaxy/controllers/admin.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin.py
@@ -10,7 +10,7 @@
from galaxy.web.params import QuotaParamParser
from galaxy.exceptions import *
from galaxy.util.odict import *
-from galaxy.tool_shed.encoding_util import tool_shed_decode
+from galaxy.tool_shed import encoding_util
import galaxy.datatypes.registry
import logging, imp, subprocess, urllib2
@@ -717,7 +717,7 @@
text = response.read()
response.close()
if text:
- tool_dependencies_dict = tool_shed_decode( text )
+ tool_dependencies_dict = encoding_util.tool_shed_decode( text )
for dependency_key, requirements_dict in tool_dependencies_dict.items():
tool_dependency_name = requirements_dict[ 'name' ]
tool_dependency_version = requirements_dict[ 'version' ]
diff -r a3ecba6f3e0c1ecc719a42b6bccfa0b32746ba27 -r fbfa0c78ea29b1e8d1ad339ab2d7d6ababb17e2a lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -3,7 +3,8 @@
from galaxy.util.json import from_json_string, to_json_string
import galaxy.util.shed_util as shed_util
import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
+from galaxy.tool_shed import encoding_util
+from galaxy.webapps.community.util import container_util
from galaxy import eggs, tools
eggs.require( 'mercurial' )
@@ -584,11 +585,59 @@
raw_text = response.read()
response.close()
if len( raw_text ) > 2:
- text = json.from_json_string( tool_shed_decode( raw_text ) )
+ text = json.from_json_string( encoding_util.tool_shed_decode( raw_text ) )
log.debug( text )
else:
text = ''
return text
+ def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ):
+ """
+ Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list. All
+ repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
+ this methid is required to retrieve all repository dependencies.
+ """
+ if repo_info_dicts:
+ all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
+ # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids.
+ required_repository_tups = []
+ for repo_info_dict in repo_info_dicts:
+ for repository_name, repo_info_tup in repo_info_dict.items():
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tup )
+ if repository_dependencies:
+ for key, val in repository_dependencies.items():
+ if key in [ 'root_key', 'description' ]:
+ continue
+ toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key )
+ components_list = [ toolshed, name, owner, changeset_revision ]
+ if components_list not in required_repository_tups:
+ required_repository_tups.append( components_list )
+ for components_list in val:
+ if components_list not in required_repository_tups:
+ required_repository_tups.append( components_list )
+ if required_repository_tups:
+ # The value of required_repository_tups is a list of tuples, so we need to encode it.
+ encoded_required_repository_tups = []
+ for required_repository_tup in required_repository_tups:
+ encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
+ encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
+ encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
+ url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str )
+ response = urllib2.urlopen( url )
+ text = response.read()
+ response.close()
+ if text:
+ required_repo_info_dict = from_json_string( text )
+ required_repo_info_dicts = []
+ encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
+ for encoded_dict_str in encoded_dict_strings:
+ decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
+ required_repo_info_dicts.append( decoded_dict )
+ if required_repo_info_dicts:
+ for required_repo_info_dict in required_repo_info_dicts:
+ if required_repo_info_dict not in all_repo_info_dicts:
+ all_repo_info_dicts.append( required_repo_info_dict )
+ return all_repo_info_dicts
def get_versions_of_tool( self, app, guid ):
tool_version = shed_util.get_tool_version( app, guid )
return tool_version.get_version_ids( app, reverse=True )
@@ -694,7 +743,7 @@
tool_section = None
for tup in zip( tool_shed_repositories, repo_info_dicts ):
tool_shed_repository, repo_info_dict = tup
- repo_info_dict = tool_shed_decode( repo_info_dict )
+ repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
# Clone each repository to the configured location.
shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
@@ -939,7 +988,7 @@
elif operation == "install":
reinstalling = util.string_as_bool( params.get( 'reinstalling', False ) )
encoded_kwd = kwd[ 'encoded_kwd' ]
- decoded_kwd = tool_shed_decode( encoded_kwd )
+ decoded_kwd = encoding_util.tool_shed_decode( encoded_kwd )
tsr_ids = decoded_kwd[ 'tool_shed_repository_ids' ]
repositories_for_installation = []
for tsr_id in tsr_ids:
@@ -1095,7 +1144,7 @@
includes_repository_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_repository_dependencies', False ) )
includes_tool_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_tool_dependencies', False ) )
encoded_repo_info_dicts = util.listify( repo_information_dict.get( 'repo_info_dicts', [] ) )
- repo_info_dicts = [ tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
+ repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
if ( not includes_tools and not includes_repository_dependencies ) or \
( ( includes_tools or includes_repository_dependencies ) and kwd.get( 'select_tool_panel_section_button', False ) ):
install_repository_dependencies = CheckboxField.is_checked( install_repository_dependencies )
@@ -1122,14 +1171,12 @@
created_or_updated_tool_shed_repositories = []
# Repositories will be filtered (e.g., if already installed, etc), so filter the associated repo_info_dicts accordingly.
filtered_repo_info_dicts = []
+ # Disciver all repository dependencies and retrieve information for installing them.
+ repo_info_dicts = self.get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
for repo_info_dict in repo_info_dicts:
for name, repo_info_tuple in repo_info_dict.items():
- # Take care in handling the repo_info_tuple as it evolves over time as new features are introduced.
- if len( repo_info_tuple ) == 6:
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
- repository_dependencies = None
- elif len( repo_info_tuple ) == 7:
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, changeset_revision ) )
relative_install_dir = os.path.join( clone_dir, name )
# Make sure the repository was not already installed.
@@ -1173,7 +1220,7 @@
owner=repository_owner,
dist_to_shed=False )
created_or_updated_tool_shed_repositories.append( tool_shed_repository )
- filtered_repo_info_dicts.append( tool_shed_encode( repo_info_dict ) )
+ filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
if created_or_updated_tool_shed_repositories:
if includes_tools and ( new_tool_panel_section or tool_panel_section ):
if new_tool_panel_section:
@@ -1201,6 +1248,7 @@
tsrids_list = [ trans.security.encode_id( tsr.id ) for tsr in created_or_updated_tool_shed_repositories ]
new_kwd = dict( includes_tools=includes_tools,
includes_repository_dependencies=includes_repository_dependencies,
+ install_repository_dependencies=install_repository_dependencies,
includes_tool_dependencies=includes_tool_dependencies,
install_tool_dependencies=install_tool_dependencies,
message=message,
@@ -1211,7 +1259,7 @@
tool_panel_section_key=tool_panel_section_key,
tool_shed_repository_ids=tsrids_list,
tool_shed_url=tool_shed_url )
- encoded_kwd = tool_shed_encode( new_kwd )
+ encoded_kwd = encoding_util.tool_shed_encode( new_kwd )
tsrids_str = ','.join( tsrids_list )
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='initiate_repository_installation',
@@ -1391,7 +1439,7 @@
repository_metadata=None,
metadata=metadata,
repository_dependencies=repository_dependencies )
- repo_info_dict = tool_shed_encode( repo_info_dict )
+ repo_info_dict = encoding_util.tool_shed_encode( repo_info_dict )
new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies,
includes_tools=tool_shed_repository.includes_tools,
install_tool_dependencies=install_tool_dependencies,
@@ -1405,7 +1453,7 @@
tool_panel_section_key=tool_panel_section_key,
tool_shed_repository_ids=[ repository_id ],
tool_shed_url=tool_shed_url )
- encoded_kwd = tool_shed_encode( new_kwd )
+ encoded_kwd = encoding_util.tool_shed_encode( new_kwd )
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='initiate_repository_installation',
shed_repository_ids=repository_id,
@@ -1437,11 +1485,8 @@
# Handle case where the repository was previously installed using an older changeset_revsion, but later the repository was updated
# in the tool shed and now we're trying to install the latest changeset revision of the same repository instead of updating the one
# that was previously installed. We'll look in the database instead of on disk since the repository may be uninstalled.
- if len( repo_info_tuple ) == 6:
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
- repository_dependencies = None
- elif len( repo_info_tuple ) == 7:
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
# Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
# revision to see if it was previously installed using one of them.
@@ -1553,7 +1598,7 @@
install_tool_dependencies_check_box=install_tool_dependencies_check_box,
containers_dict=containers_dict,
tool_panel_section_select_field=tool_panel_section_select_field,
- encoded_repo_info_dict=tool_shed_encode( repo_info_dict ),
+ encoded_repo_info_dict=encoding_util.tool_shed_encode( repo_info_dict ),
repo_info_dict=repo_info_dict,
message=message,
status=status )
diff -r a3ecba6f3e0c1ecc719a42b6bccfa0b32746ba27 -r fbfa0c78ea29b1e8d1ad339ab2d7d6ababb17e2a lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -14,7 +14,7 @@
from galaxy.util.odict import odict
from galaxy.util.sanitize_html import sanitize_html
from galaxy.util.topsort import topsort, topsort_levels, CycleError
-from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
+from galaxy.tool_shed import encoding_util
from galaxy.workflow.modules import *
from galaxy import model
from galaxy import util
@@ -1044,7 +1044,7 @@
# from a Galaxy tool shed, in which case the value was encoded.
workflow_name = kwd.get( 'workflow_name', '' )
if workflow_name:
- workflow_name = tool_shed_decode( workflow_name )
+ workflow_name = encoding_util.tool_shed_decode( workflow_name )
# The following parameters will have a value only if the import originated
# from a tool shed repository installed locally or from the API.
installed_repository_file = kwd.get( 'installed_repository_file', '' )
@@ -1057,7 +1057,7 @@
if tool_shed_url and not import_button:
# Use urllib (send another request to the tool shed) to retrieve the workflow.
workflow_url = '%s/workflow/import_workflow?repository_metadata_id=%s&workflow_name=%s&open_for_url=true' % \
- ( tool_shed_url, repository_metadata_id, tool_shed_encode( workflow_name ) )
+ ( tool_shed_url, repository_metadata_id, encoding_util.tool_shed_encode( workflow_name ) )
response = urllib2.urlopen( workflow_url )
workflow_text = response.read()
response.close()
@@ -1164,7 +1164,7 @@
# We've received the textual representation of a workflow from a Galaxy tool shed.
message = "Workflow <b>%s</b> imported successfully." % workflow.name
url = '%s/workflow/view_workflow?repository_metadata_id=%s&workflow_name=%s&message=%s' % \
- ( tool_shed_url, repository_metadata_id, tool_shed_encode( workflow_name ), message )
+ ( tool_shed_url, repository_metadata_id, encoding_util.tool_shed_encode( workflow_name ), message )
return trans.response.send_redirect( url )
elif installed_repository_file:
# The workflow was read from a file included with an installed tool shed repository.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: api/history_contents: temporarily disabling get_display_types
by Bitbucket 17 Dec '12
by Bitbucket 17 Dec '12
17 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a3ecba6f3e0c/
changeset: a3ecba6f3e0c
user: carlfeberhard
date: 2012-12-17 16:50:34
summary: api/history_contents: temporarily disabling get_display_types
affected #: 1 file
diff -r 4033d81b3311e105f79377cfe64df5861851f5d1 -r a3ecba6f3e0c1ecc719a42b6bccfa0b32746ba27 lib/galaxy/webapps/galaxy/api/history_contents.py
--- a/lib/galaxy/webapps/galaxy/api/history_contents.py
+++ b/lib/galaxy/webapps/galaxy/api/history_contents.py
@@ -196,7 +196,7 @@
hda_dict[ 'meta_files' ] = meta_files
hda_dict[ 'display_apps' ] = get_display_apps( trans, hda )
- hda_dict[ 'display_types' ] = get_display_types( trans, hda )
+ #hda_dict[ 'display_types' ] = get_display_types( trans, hda )
hda_dict[ 'visualizations' ] = hda.get_visualizations()
hda_dict[ 'peek' ] = to_unicode( hda.display_peek() )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4033d81b3311/
changeset: 4033d81b3311
user: greg
date: 2012-12-14 22:37:42
summary: Missed an import.
affected #: 1 file
diff -r f14ca41c8cf34779d0b89979320488ebe04eb943 -r 4033d81b3311e105f79377cfe64df5861851f5d1 lib/galaxy/tool_shed/tool_dependencies/install_util.py
--- a/lib/galaxy/tool_shed/tool_dependencies/install_util.py
+++ b/lib/galaxy/tool_shed/tool_dependencies/install_util.py
@@ -1,7 +1,7 @@
import sys, os, subprocess, tempfile
import common_util
import fabric_util
-from galaxy.tool_shed.encoding_util import tool_shed_encode
+from galaxy.tool_shed.encoding_util import encoding_sep, tool_shed_encode, tool_shed_decode
from galaxy.model.orm import and_
from galaxy import eggs
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: inithello: More tool shed functional test enhancements.
by Bitbucket 14 Dec '12
by Bitbucket 14 Dec '12
14 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f14ca41c8cf3/
changeset: f14ca41c8cf3
user: inithello
date: 2012-12-14 22:20:22
summary: More tool shed functional test enhancements.
affected #: 13 files
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 lib/galaxy/visualization/genomes.py
--- a/lib/galaxy/visualization/genomes.py
+++ b/lib/galaxy/visualization/genomes.py
@@ -1,9 +1,11 @@
-import os, re, sys, glob
+import os, re, sys, glob, logging
from bx.seq.twobit import TwoBitFile
from galaxy.util.json import from_json_string
from galaxy import model
from galaxy.util.bunch import Bunch
+log = logging.getLogger( __name__ )
+
# FIXME: copied from tracks.py
# Message strings returned to browser
messages = Bunch(
@@ -166,13 +168,18 @@
self.genomes[ key ] = Genome( key, len_file=f )
# Add genome data (twobit files) to genomes.
- for line in open( os.path.join( app.config.tool_data_path, "twobit.loc" ) ):
- if line.startswith("#"): continue
- val = line.split()
- if len( val ) == 2:
- key, path = val
- if key in self.genomes:
- self.genomes[ key ].twobit_file = path
+ # FIXME: If a galaxy instance does not have ~/tool-data/twobit.loc file, the following error is thrown:
+ # IOError: [Errno 2] No such file or directory: '~/tool-data/twobit.loc'
+ try:
+ for line in open( os.path.join( app.config.tool_data_path, "twobit.loc" ) ):
+ if line.startswith("#"): continue
+ val = line.split()
+ if len( val ) == 2:
+ key, path = val
+ if key in self.genomes:
+ self.genomes[ key ].twobit_file = path
+ except IOError, e:
+ log.exception( str( e ) )
def get_build( self, dbkey ):
""" Returns build for the given key. """
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/base/twilltestcase.py
--- a/test/tool_shed/base/twilltestcase.py
+++ b/test/tool_shed/base/twilltestcase.py
@@ -1,6 +1,7 @@
import galaxy.webapps.community.util.hgweb_config
import galaxy.model as galaxy_model
-import common, string, os, re, test_db_util
+import common, string, os, re, test_db_util, simplejson
+import galaxy.util as util
from base.twilltestcase import tc, from_json_string, TwillTestCase, security, urllib
from galaxy.tool_shed.encoding_util import tool_shed_encode
@@ -23,8 +24,10 @@
self.galaxy_host = os.environ.get( 'GALAXY_TEST_HOST' )
self.galaxy_port = os.environ.get( 'GALAXY_TEST_PORT' )
self.galaxy_url = "http://%s:%s" % ( self.galaxy_host, self.galaxy_port )
+ self.shed_tool_data_table_conf = os.environ.get( 'TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF' )
self.file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', None )
self.tool_shed_test_file = None
+ self.tool_data_path = os.environ.get( 'GALAXY_TEST_TOOL_DATA_PATH' )
self.shed_tools_dict = {}
self.home()
def browse_category( self, category, strings_displayed=[], strings_not_displayed=[] ):
@@ -439,9 +442,14 @@
repo = hg.repository( ui.ui(), self.get_repo_path( repository ) )
tip_ctx = repo.changectx( repo.changelog.tip() )
return tip_ctx.rev() < 0
+ def reset_installed_repository_metadata( self, repository ):
+ url = '/admin_toolshed/reset_repository_metadata?id=%s' % self.security.encode_id( repository.id )
+ self.visit_galaxy_url( url )
+ self.check_for_strings( [ 'Metadata has been reset' ] )
def reset_repository_metadata( self, repository ):
url = '/repository/reset_all_metadata?id=%s' % self.security.encode_id( repository.id )
self.visit_url( url )
+ self.check_for_strings( [ 'All repository metadata has been reset.' ] )
def revoke_write_access( self, repository, username ):
url = '/repository/manage_repository?user_access_button=Remove&id=%s&remove_auth=%s' % \
( self.security.encode_id( repository.id ), username )
@@ -466,6 +474,10 @@
def tip_has_metadata( self, repository ):
tip = self.get_repository_tip( repository )
return test_db_util.get_repository_metadata_by_repository_id_changeset_revision( repository.id, tip )
+ def update_installed_repository( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
+ url = '/admin_toolshed/check_for_updates?id=%s' % self.security.encode_id( installed_repository.id )
+ self.visit_galaxy_url( url )
+ self.check_for_strings( strings_displayed, strings_not_displayed )
def upload_file( self,
repository,
filename,
@@ -482,6 +494,27 @@
tc.formfile( "1", "file_data", self.get_filename( filename, filepath ) )
tc.submit( "upload_button" )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def verify_installed_repository_metadata_unchanged( self, name, owner ):
+ installed_repository = test_db_util.get_installed_repository_by_name_owner( name, owner )
+ differs = False
+ metadata = installed_repository.metadata
+ self.reset_installed_repository_metadata( installed_repository )
+ new_metadata = installed_repository.metadata
+ # This test assumes that the different metadata components will always appear in the same order. If this is ever not
+ # the case, this test must be updated.
+ for metadata_key in [ 'datatypes', 'tools', 'tool_dependencies', 'repository_dependencies', 'workflows' ]:
+ if ( metadata_key in metadata and metadata_key not in new_metadata ) or \
+ ( metadata_key not in metadata and metadata_key in new_metadata ):
+ differs = True
+ break
+ elif metadata_key not in metadata and metadata_key not in new_metadata:
+ continue
+ else:
+ if metadata[ metadata_key ] != new_metadata[ metadata_key ]:
+ differs = True
+ break
+ if differs:
+ raise AssertionError( 'Metadata for installed repository %s differs after metadata reset.' % name )
def verify_installed_repository_on_browse_page( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
url = '/admin_toolshed/browse_repositories'
self.visit_galaxy_url( url )
@@ -491,6 +524,17 @@
installed_repository.tool_shed,
installed_repository.installed_changeset_revision ] )
self.check_for_strings( strings_displayed, strings_not_displayed )
+ def verify_installed_repository_data_table_entries( self, data_tables=[] ):
+ data_table = util.parse_xml( self.shed_tool_data_table_conf )
+ found = False
+ for table_elem in data_table.findall( 'table' ):
+ for data_table in data_tables:
+ if 'name' in table_elem.attrib and table_elem.attrib[ 'name' ] == data_table:
+ file_elem = table_elem.find( 'file' )
+ assert os.path.exists( file_elem.attrib[ 'path' ] ), 'Tool data table file %s not found.' % file_elem.path
+ found = True
+ break
+ assert found, 'No entry for %s in %s.' % ( data_table, self.shed_tool_data_table_conf )
def verify_tool_metadata_for_installed_repository( self, installed_repository, strings_displayed=[], strings_not_displayed=[] ):
repository_id = self.security.encode_id( installed_repository.id )
for tool in installed_repository.metadata[ 'tools' ]:
@@ -499,6 +543,11 @@
url = '/admin_toolshed/view_tool_metadata?repository_id=%s&tool_id=%s' % ( repository_id, urllib.quote_plus( tool[ 'id' ] ) )
self.visit_galaxy_url( url )
self.check_for_strings( strings, strings_not_displayed )
+ def verify_unchanged_repository_metadata( self, repository ):
+ self.check_repository_changelog( repository )
+ html = self.last_page()
+ self.reset_repository_metadata( repository )
+ self.check_repository_changelog( repository, strings_displayed=[ html ] )
def visit_galaxy_url( self, url ):
url = '%s%s' % ( self.galaxy_url, url )
self.visit_url( url )
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/functional/test_0000_basic_repository_features.py
--- a/test/tool_shed/functional/test_0000_basic_repository_features.py
+++ b/test/tool_shed/functional/test_0000_basic_repository_features.py
@@ -155,3 +155,7 @@
tip_changeset = self.get_repository_tip( repository )
search_fields = dict( tool_id='Filter1', tool_name='filter', tool_version='2.2.0' )
self.search_for_valid_tools( search_fields=search_fields, strings_displayed=[ tip_changeset ], strings_not_displayed=[] )
+ def test_0085_verify_repository_metadata( self ):
+ '''Verify that resetting the metadata does not change it.'''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.verify_unchanged_repository_metadata( repository )
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/functional/test_0020_basic_repository_dependencies.py
--- a/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
+++ b/test/tool_shed/functional/test_0020_basic_repository_dependencies.py
@@ -68,3 +68,9 @@
changeset_revision = self.get_repository_tip( datatypes_repository )
strings_displayed = [ datatypes_repository_name, common.test_user_1_name, changeset_revision, 'Repository dependencies' ]
self.display_manage_repository_page( repository, strings_displayed=strings_displayed )
+ def test_0040_verify_repository_metadata( self ):
+ '''Verify that resetting the metadata does not change it.'''
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ self.verify_unchanged_repository_metadata( emboss_repository )
+ self.verify_unchanged_repository_metadata( datatypes_repository )
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/functional/test_0030_repository_dependency_revisions.py
--- a/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
+++ b/test/tool_shed/functional/test_0030_repository_dependency_revisions.py
@@ -131,3 +131,11 @@
self.display_manage_repository_page( repository,
changeset_revision=changeset_revision,
strings_displayed=[ str( metadata ) for metadata in repository_dependency_metadata ] )
+ def test_0040_verify_repository_metadata( self ):
+ '''Verify that resetting the metadata does not change it.'''
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ emboss_5_repository = test_db_util.get_repository_by_name_and_owner( emboss_5_repository_name, common.test_user_1_name )
+ emboss_6_repository = test_db_util.get_repository_by_name_and_owner( emboss_6_repository_name, common.test_user_1_name )
+ datatypes_repository = test_db_util.get_repository_by_name_and_owner( datatypes_repository_name, common.test_user_1_name )
+ for repository in [ emboss_repository, emboss_5_repository, emboss_6_repository, datatypes_repository ]:
+ self.verify_unchanged_repository_metadata( repository )
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/functional/test_0040_repository_circular_dependencies.py
--- a/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
+++ b/test/tool_shed/functional/test_0040_repository_circular_dependencies.py
@@ -97,3 +97,9 @@
# In this case, the displayed dependency will specify the tip revision, but this will not always be the case.
self.check_repository_dependency( filtering_repository, freebayes_repository, self.get_repository_tip( freebayes_repository ) )
self.check_repository_dependency( freebayes_repository, filtering_repository, self.get_repository_tip( filtering_repository ) )
+ def test_0035_verify_repository_metadata( self ):
+ '''Verify that resetting the metadata does not change it.'''
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ for repository in [ freebayes_repository, filtering_repository ]:
+ self.verify_unchanged_repository_metadata( repository )
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/functional/test_0050_circular_n_levels.py
--- a/test/tool_shed/functional/test_0050_circular_n_levels.py
+++ b/test/tool_shed/functional/test_0050_circular_n_levels.py
@@ -147,3 +147,11 @@
for changeset_revision in self.get_repository_metadata_revisions( emboss_repository ):
self.check_repository_dependency( freebayes_repository, emboss_repository, changeset_revision )
self.display_manage_repository_page( freebayes_repository, strings_displayed=[ 'Freebayes depends on the filtering repository.' ] )
+ def test_0035_verify_repository_metadata( self ):
+ '''Verify that resetting the metadata does not change it.'''
+ emboss_datatypes_repository = test_db_util.get_repository_by_name_and_owner( emboss_datatypes_repository_name, common.test_user_1_name )
+ emboss_repository = test_db_util.get_repository_by_name_and_owner( emboss_repository_name, common.test_user_1_name )
+ filtering_repository = test_db_util.get_repository_by_name_and_owner( filtering_repository_name, common.test_user_1_name )
+ freebayes_repository = test_db_util.get_repository_by_name_and_owner( freebayes_repository_name, common.test_user_1_name )
+ for repository in [ emboss_datatypes_repository, emboss_repository, freebayes_repository, filtering_repository ]:
+ self.verify_unchanged_repository_metadata( repository )
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/functional/test_0060_workflows.py
--- a/test/tool_shed/functional/test_0060_workflows.py
+++ b/test/tool_shed/functional/test_0060_workflows.py
@@ -54,3 +54,7 @@
commit_message="Uploaded filtering 2.2.0",
remove_repo_files_not_in_tar='No' )
self.load_workflow_image( repository, workflow_name, strings_not_displayed=[ '#EBBCB2' ] )
+ def test_0025_verify_repository_metadata( self ):
+ '''Verify that resetting the metadata does not change it.'''
+ repository = test_db_util.get_repository_by_name_and_owner( repository_name, common.test_user_1_name )
+ self.verify_unchanged_repository_metadata( repository )
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/functional/test_1000_install_basic_repository.py
--- a/test/tool_shed/functional/test_1000_install_basic_repository.py
+++ b/test/tool_shed/functional/test_1000_install_basic_repository.py
@@ -28,4 +28,7 @@
self.verify_installed_repository_on_browse_page( installed_repository )
self.display_installed_repository_manage_page( installed_repository,
strings_displayed=[ 'Installed tool shed repository', 'Tools', 'Filter1' ] )
- self.verify_tool_metadata_for_installed_repository( installed_repository )
\ No newline at end of file
+ self.verify_tool_metadata_for_installed_repository( installed_repository )
+ def test_0025_verify_installed_repository_metadata( self ):
+ '''Verify that resetting the metadata on an installed repository does not change the metadata.'''
+ self.verify_installed_repository_metadata_unchanged( 'filtering_0000', common.test_user_1_name )
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
--- a/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
+++ b/test/tool_shed/functional/test_1010_install_repository_with_tool_dependencies.py
@@ -25,3 +25,10 @@
strings_displayed=[ 'Installed tool shed repository', 'Tools', 'FreeBayes' ] )
self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
+ def test_0020_verify_installed_repository_metadata( self ):
+ '''Verify that resetting the metadata on an installed repository does not change the metadata.'''
+ self.verify_installed_repository_metadata_unchanged( 'freebayes_0010', common.test_user_1_name )
+ def test_0025_verify_sample_files( self ):
+ '''Verify that the installed repository populated shed_tool_data_table.xml and the sample files.'''
+ self.verify_installed_repository_data_table_entries( data_tables=[ 'sam_fa_indexes' ] )
+
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
--- a/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
+++ b/test/tool_shed/functional/test_1020_install_repository_with_repository_dependencies.py
@@ -25,3 +25,6 @@
strings_displayed=[ 'Installed tool shed repository', 'Tools', 'antigenic' ] )
self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
+ def test_0020_verify_installed_repository_metadata( self ):
+ '''Verify that resetting the metadata on an installed repository does not change the metadata.'''
+ self.verify_installed_repository_metadata_unchanged( 'emboss_0020', common.test_user_1_name )
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
--- a/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
+++ b/test/tool_shed/functional/test_1030_install_repository_with_dependency_revisions.py
@@ -27,3 +27,7 @@
strings_displayed=[ 'Installed tool shed repository', 'Tools', 'antigenic' ] )
self.check_installed_repository_tool_dependencies( installed_repository, dependencies_installed=False )
self.verify_tool_metadata_for_installed_repository( installed_repository )
+ self.update_installed_repository( installed_repository, strings_displayed=[ "there are no updates available" ] )
+ def test_0025_verify_installed_repository_metadata( self ):
+ '''Verify that resetting the metadata on an installed repository does not change the metadata.'''
+ self.verify_installed_repository_metadata_unchanged( 'emboss_0030', common.test_user_1_name )
diff -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 -r f14ca41c8cf34779d0b89979320488ebe04eb943 test/tool_shed/functional_tests.py
--- a/test/tool_shed/functional_tests.py
+++ b/test/tool_shed/functional_tests.py
@@ -68,6 +68,14 @@
</toolbox>
'''
+tool_conf_xml = '''<?xml version="1.0"?>
+<toolbox>
+ <section name="Get Data" id="getext">
+ <tool file="data_source/upload.xml"/>
+ </section>
+</toolbox>
+'''
+
tool_data_table_conf_xml_template = '''<?xml version="1.0"?><tables></tables>
@@ -113,8 +121,14 @@
shed_db_path = os.path.join( tempdir, 'database' )
shed_tool_data_table_conf_file = os.environ.get( 'TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF', os.path.join( tool_shed_test_tmp_dir, 'shed_tool_data_table_conf.xml' ) )
galaxy_tool_data_table_conf_file = os.environ.get( 'GALAXY_TEST_TOOL_DATA_TABLE_CONF', os.path.join( tool_shed_test_tmp_dir, 'tool_data_table_conf.xml' ) )
- galaxy_shed_tool_conf_file = os.environ.get( 'GALAXY_TEST_TOOL_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_tool_conf.xml' ) )
+ galaxy_tool_conf_file = os.environ.get( 'GALAXY_TEST_TOOL_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_tool_conf.xml' ) )
+ galaxy_shed_tool_conf_file = os.environ.get( 'GALAXY_TEST_SHED_TOOL_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_shed_tool_conf.xml' ) )
galaxy_tool_sheds_conf_file = os.environ.get( 'GALAXY_TEST_SHED_TOOLS_CONF', os.path.join( tool_shed_test_tmp_dir, 'test_sheds_conf.xml' ) )
+ if 'GALAXY_TEST_TOOL_DATA_PATH' in os.environ:
+ tool_data_path = os.environ.get( 'GALAXY_TEST_TOOL_DATA_PATH' )
+ else:
+ tool_data_path = tempfile.mkdtemp( dir=tool_shed_test_tmp_dir )
+ os.environ[ 'GALAXY_TEST_TOOL_DATA_PATH' ] = tool_data_path
if 'GALAXY_TEST_DBPATH' in os.environ:
galaxy_db_path = os.environ[ 'GALAXY_TEST_DBPATH' ]
else:
@@ -153,7 +167,7 @@
file( galaxy_tool_data_table_conf_file, 'w' ).write( tool_data_table_conf_xml_template )
# Generate the shed_tool_data_table_conf.xml file.
file( shed_tool_data_table_conf_file, 'w' ).write( tool_data_table_conf_xml_template )
-
+ os.environ[ 'TOOL_SHED_TEST_TOOL_DATA_TABLE_CONF' ] = shed_tool_data_table_conf_file
# ---- Build Tool Shed Application --------------------------------------------------
toolshedapp = None
global_conf = { '__file__' : 'community_wsgi.ini.sample' }
@@ -231,6 +245,8 @@
# ---- Optionally start up a Galaxy instance ------------------------------------------------------
if 'TOOL_SHED_TEST_OMIT_GALAXY' not in os.environ:
+ # Generate the tool_conf.xml file.
+ file( galaxy_tool_conf_file, 'w' ).write( tool_conf_xml )
# Generate the shed_tool_conf.xml file.
tool_sheds_conf_template_parser = string.Template( tool_sheds_conf_xml_template )
tool_sheds_conf_xml = tool_sheds_conf_template_parser.safe_substitute( shed_url=tool_shed_test_host, shed_port=tool_shed_test_port )
@@ -251,10 +267,11 @@
database_engine_option_pool_size = '10',
file_path = galaxy_file_path,
tool_path = tool_path,
+ tool_data_path = tool_data_path,
tool_dependency_dir=galaxy_tool_dependency_dir,
shed_tool_path=galaxy_shed_tool_path,
update_integrated_tool_panel = False,
- tool_config_file = galaxy_shed_tool_conf_file,
+ tool_config_file = [ galaxy_tool_conf_file, galaxy_shed_tool_conf_file ],
tool_sheds_config_file = galaxy_tool_sheds_conf_file,
datatype_converters_config_file = "datatype_converters_conf.xml.sample",
tool_parse_help = False,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: history panel: do not render body html until hda is expanded
by Bitbucket 14 Dec '12
by Bitbucket 14 Dec '12
14 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/33d4ab5cc642/
changeset: 33d4ab5cc642
user: carlfeberhard
date: 2012-12-14 21:41:51
summary: history panel: do not render body html until hda is expanded
affected #: 1 file
diff -r f45730d9e8904783f5f8a29cf2a2a0c93e5a2047 -r 33d4ab5cc642f2bdc5ad8670abf4400063bb8415 static/scripts/mvc/dataset/hda-base.js
--- a/static/scripts/mvc/dataset/hda-base.js
+++ b/static/scripts/mvc/dataset/hda-base.js
@@ -77,6 +77,7 @@
itemWrapper.append( this._render_warnings() );
itemWrapper.append( this._render_titleBar() );
+
this.body = $( this._render_body() );
itemWrapper.append( this.body );
@@ -321,18 +322,31 @@
},
// ......................................................................... state body renderers
- /** Render the (expanded) body of an HDA, dispatching to other functions based on the HDA state
+ /** Render the enclosing div of the hda body and, if expanded, the html in the body
* @returns {jQuery} rendered DOM
*/
//TODO: only render these on expansion (or already expanded)
_render_body : function(){
- //this.log( this + '_render_body' );
-
var body = $( '<div/>' )
.attr( 'id', 'info-' + this.model.get( 'id' ) )
.addClass( 'historyItemBody' )
- .attr( 'style', 'display: block' );
+ .attr( 'style', 'display: none' );
+ if( this.expanded ){
+ // only render the body html if it's being shown
+ this._render_body_html( body );
+ body.show();
+ }
+ return body;
+ },
+
+ /** Render the (expanded) body of an HDA, dispatching to other functions based on the HDA state
+ * @param {jQuery} body the body element to append the html to
+ */
+ //TODO: only render these on expansion (or already expanded)
+ _render_body_html : function( body ){
+ //this.log( this + '_render_body' );
+ body.html( '' );
//TODO: not a fan of this dispatch
switch( this.model.get( 'state' ) ){
case HistoryDatasetAssociation.STATES.NEW :
@@ -375,13 +389,6 @@
body.append( $( '<div>Error: unknown dataset state "' + this.model.get( 'state' ) + '".</div>' ) );
}
body.append( '<div style="clear: both"></div>' );
-
- if( this.expanded ){
- body.show();
- } else {
- body.hide();
- }
- return body;
},
/** Render inaccessible, not-owned by curr user.
@@ -514,17 +521,17 @@
* @fires body-collapsed when a body has been collapsed
*/
toggleBodyVisibility : function( event, expanded ){
- var hdaView = this,
- $body = this.$el.find( '.historyItemBody' );
- expanded = ( expanded === undefined )?( !$body.is( ':visible' ) ):( expanded );
+ var hdaView = this;
+ this.expanded = ( expanded === undefined )?( !this.body.is( ':visible' ) ):( expanded );
//this.log( 'toggleBodyVisibility, expanded:', expanded, '$body:', $body );
- if( expanded ){
- $body.slideDown( 'fast', function(){
+ if( this.expanded ){
+ hdaView._render_body_html( hdaView.body );
+ this.body.slideDown( 'fast', function(){
hdaView.trigger( 'body-expanded', hdaView.model.get( 'id' ) );
});
} else {
- $body.slideUp( 'fast', function(){
+ this.body.slideUp( 'fast', function(){
hdaView.trigger( 'body-collapsed', hdaView.model.get( 'id' ) );
});
}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
14 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f45730d9e890/
changeset: f45730d9e890
user: carlfeberhard
date: 2012-12-14 20:53:41
summary: Make hda name text selectable
affected #: 1 file
diff -r 4b4ea995233684e49259a8126257a2d226b8a026 -r f45730d9e8904783f5f8a29cf2a2a0c93e5a2047 templates/root/alternate_history.mako
--- a/templates/root/alternate_history.mako
+++ b/templates/root/alternate_history.mako
@@ -537,9 +537,6 @@
.historyItemTitle {
text-decoration: underline;
cursor: pointer;
- -webkit-user-select: none;
- -moz-user-select: none;
- -khtml-user-select: none;
}
.historyItemTitle:hover {
text-decoration: underline;
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Remove deprecated parameters from Tophat2 wrapper.
by Bitbucket 14 Dec '12
by Bitbucket 14 Dec '12
14 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4b4ea9952336/
changeset: 4b4ea9952336
user: jgoecks
date: 2012-12-14 19:10:39
summary: Remove deprecated parameters from Tophat2 wrapper.
affected #: 2 files
diff -r 52ecc79edd361051da70e81d63cb670a3a265dc7 -r 4b4ea995233684e49259a8126257a2d226b8a026 tools/ngs_rna/tophat2_wrapper.py
--- a/tools/ngs_rna/tophat2_wrapper.py
+++ b/tools/ngs_rna/tophat2_wrapper.py
@@ -20,8 +20,6 @@
where each end is 50bp, you should set -r to be 200. There is no default, \
and this parameter is required for paired end runs.')
parser.add_option( '', '--mate-std-dev', dest='mate_std_dev', help='Standard deviation of distribution on inner distances between male pairs.' )
- parser.add_option( '-n', '--transcriptome-mismatches', dest='transcriptome_mismatches' )
- parser.add_option( '', '--genome-read-mismatches', dest='genome_read_mismatches' )
parser.add_option( '', '--read-mismatches', dest='read_mismatches' )
parser.add_option( '', '--bowtie-n', action="store_true", dest='bowtie_n' )
parser.add_option( '', '--report-discordant-pair-alignments', action="store_true", dest='report_discordant_pairs' )
@@ -171,10 +169,6 @@
# need to warn user of this fact
#sys.stdout.write( "Max insertion length and max deletion length options don't work in Tophat v1.2.0\n" )
- if options.transcriptome_mismatches:
- opts += ' --transcriptome-mismatches %i' % int( options.transcriptome_mismatches )
- if options.genome_read_mismatches:
- opts += ' --genome-read-mismatches %i' % int( options.genome_read_mismatches )
if options.read_mismatches:
opts += ' --read-mismatches %i' % int( options.read_mismatches )
if options.bowtie_n:
diff -r 52ecc79edd361051da70e81d63cb670a3a265dc7 -r 4b4ea995233684e49259a8126257a2d226b8a026 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -45,8 +45,6 @@
## Set params.
--settings=$params.settingsType
#if $params.settingsType == "full":
- -n $params.transcriptome_mismatches
- --genome-read-mismatches $params.genome_read_mismatches
--read-mismatches $params.read_mismatches
#if str($params.bowtie_n) == "Yes":
--bowtie-n
@@ -167,8 +165,6 @@
<option value="fr-firststrand">FR First Strand</option><option value="fr-secondstrand">FR Second Strand</option></param>
- <param name="transcriptome_mismatches" type="integer" value="2" label="Transcriptome mismatches" help="Maximum number of mismatches allowed when reads are aligned to the transcriptome. When Bowtie2 is used, this number is also used to decide whether or not to further re-align some of the transcriptome-mapped reads to the genome. If the alignment score of the best alignment among multiple candidates for a read is lower than 'bowtie2-min-score', which is internally defined as (max_penalty - 1) * max_mismatches, then the reads will be kept for re-alignment through the rest of the pipeline. You can specify max_penalty via '--b2-mp' option." />
- <param name="genome_read_mismatches" type="integer" value="2" label="Genome read mismatches" help="When whole reads are first mapped on the genome, this many mismatches in each read alignment are allowed. The default is 2. This number is also used to decide whether to further re-align some of the reads (by splitting them into segments) with a similar scoring threshold scheme as described for the --transcriptome-mismatches option above." /><param name="read_mismatches" type="integer" value="2" label="Final read mismatches" help="Final read alignments having more than these many mismatches are discarded." /><param name="bowtie_n" type="select" label="Use bowtie -n mode"><option selected="true" value="No">No</option>
@@ -425,8 +421,6 @@
<param name="ownFile" value="tophat_in1.fasta"/><param name="settingsType" value="full"/><param name="library_type" value="FR Unstranded"/>
- <param name="transcriptome_mismatches" value="2"/>
- <param name="genome_read_mismatches" value="2"/><param name="read_mismatches" value="2"/><param name="bowtie_n" value="No"/><param name="anchor_length" value="8"/>
@@ -478,8 +472,6 @@
<param name="mate_inner_distance" value="20"/><param name="settingsType" value="full"/><param name="library_type" value="FR Unstranded"/>
- <param name="transcriptome_mismatches" value="3"/>
- <param name="genome_read_mismatches" value="4"/><param name="read_mismatches" value="5"/><param name="bowtie_n" value="Yes"/><param name="mate_std_dev" value="20"/>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
14 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/52ecc79edd36/
changeset: 52ecc79edd36
user: jgoecks
date: 2012-12-14 18:47:44
summary: Allow spaces in Cuffdiff group names.
affected #: 2 files
diff -r 3a71d40d7b9922e13c1e2d9d50d4ed6b85408137 -r 52ecc79edd361051da70e81d63cb670a3a265dc7 tools/ngs_rna/cuffdiff_wrapper.py
--- a/tools/ngs_rna/cuffdiff_wrapper.py
+++ b/tools/ngs_rna/cuffdiff_wrapper.py
@@ -172,7 +172,7 @@
if options.groups:
cmd += " --labels "
for label in options.labels:
- cmd += label + ","
+ cmd += '"%s",' % label
cmd = cmd[:-1]
cmd += " " + options.inputA + " "
diff -r 3a71d40d7b9922e13c1e2d9d50d4ed6b85408137 -r 52ecc79edd361051da70e81d63cb670a3a265dc7 tools/ngs_rna/cuffdiff_wrapper.xml
--- a/tools/ngs_rna/cuffdiff_wrapper.xml
+++ b/tools/ngs_rna/cuffdiff_wrapper.xml
@@ -59,7 +59,7 @@
## Replicates.
--labels
#for $group in $group_analysis.groups
- ${group.group}
+ "${group.group}"
#end for
--files
#for $group in $group_analysis.groups
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: History importing/exporting: (a) create BAIs when importing BAM datasets and (b) bug fixes and code cleanup.
by Bitbucket 14 Dec '12
by Bitbucket 14 Dec '12
14 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3a71d40d7b99/
changeset: 3a71d40d7b99
user: jgoecks
date: 2012-12-14 17:48:27
summary: History importing/exporting: (a) create BAIs when importing BAM datasets and (b) bug fixes and code cleanup.
affected #: 4 files
diff -r 4929a37823b5454178bf811b6b65f2216c7ac4c3 -r 3a71d40d7b9922e13c1e2d9d50d4ed6b85408137 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -686,7 +686,7 @@
if self.app.config.set_metadata_externally:
self.external_output_metadata.cleanup_external_metadata( self.sa_session )
galaxy.tools.imp_exp.JobExportHistoryArchiveWrapper( self.job_id ).cleanup_after_job( self.sa_session )
- galaxy.tools.imp_exp.JobImportHistoryArchiveWrapper( self.job_id ).cleanup_after_job( self.sa_session )
+ galaxy.tools.imp_exp.JobImportHistoryArchiveWrapper( self.app, self.job_id ).cleanup_after_job()
galaxy.tools.genome_index.GenomeIndexToolWrapper( self.job_id ).postprocessing( self.sa_session, self.app )
self.app.object_store.delete(self.get_job(), base_dir='job_work', entire_dir=True, dir_only=True, extra_dir=str(self.job_id))
except:
diff -r 4929a37823b5454178bf811b6b65f2216c7ac4c3 -r 3a71d40d7b9922e13c1e2d9d50d4ed6b85408137 lib/galaxy/tools/actions/history_imp_exp.py
--- a/lib/galaxy/tools/actions/history_imp_exp.py
+++ b/lib/galaxy/tools/actions/history_imp_exp.py
@@ -34,8 +34,7 @@
archive_dir = os.path.abspath( tempfile.mkdtemp() )
jiha = trans.app.model.JobImportHistoryArchive( job=job, archive_dir=archive_dir )
trans.sa_session.add( jiha )
- job_wrapper = JobImportHistoryArchiveWrapper( job )
-
+
#
# Add parameters to job_parameter table.
#
diff -r 4929a37823b5454178bf811b6b65f2216c7ac4c3 -r 3a71d40d7b9922e13c1e2d9d50d4ed6b85408137 lib/galaxy/tools/actions/metadata.py
--- a/lib/galaxy/tools/actions/metadata.py
+++ b/lib/galaxy/tools/actions/metadata.py
@@ -13,17 +13,14 @@
"""
Execute using a web transaction.
"""
- user_id = None
- if trans.user:
- user_id = trans.user.id
job, odict = self.execute_via_app( tool, trans.app, trans.get_galaxy_session().id,
- trans.history.id, user_id, incoming, set_output_hid,
+ trans.history.id, trans.user, incoming, set_output_hid,
overwrite, history, job_params )
# FIXME: can remove this when logging in execute_via_app method.
trans.log_event( "Added set external metadata job to the job queue, id: %s" % str(job.id), tool_id=job.tool_id )
return job, odict
- def execute_via_app( self, tool, app, session_id, history_id, user_id = None,
+ def execute_via_app( self, tool, app, session_id, history_id, user=None,
incoming = {}, set_output_hid = False, overwrite = True,
history=None, job_params=None ):
"""
@@ -50,8 +47,8 @@
job.session_id = session_id
job.history_id = history_id
job.tool_id = tool.id
- if user_id:
- job.user_id = user_id
+ if user:
+ job.user_id = user.id
if job_params:
job.params = to_json_string( job_params )
start_job_state = job.state #should be job.states.NEW
diff -r 4929a37823b5454178bf811b6b65f2216c7ac4c3 -r 3a71d40d7b9922e13c1e2d9d50d4ed6b85408137 lib/galaxy/tools/imp_exp/__init__.py
--- a/lib/galaxy/tools/imp_exp/__init__.py
+++ b/lib/galaxy/tools/imp_exp/__init__.py
@@ -3,7 +3,7 @@
from galaxy.tools.parameters.basic import UnvalidatedValue
from galaxy.web.framework.helpers import to_unicode
from galaxy.model.item_attrs import UsesAnnotations
-from galaxy.util.json import *
+from galaxy.util.json import from_json_string, to_json_string
from galaxy.web.base.controller import UsesHistoryMixin
log = logging.getLogger(__name__)
@@ -47,10 +47,12 @@
Class provides support for performing jobs that import a history from
an archive.
"""
- def __init__( self, job_id ):
+ def __init__( self, app, job_id ):
+ self.app = app
self.job_id = job_id
+ self.sa_session = self.app.model.context
- def cleanup_after_job( self, db_session ):
+ def cleanup_after_job( self ):
""" Set history, datasets, and jobs' attributes and clean up archive directory. """
#
@@ -88,7 +90,7 @@
# Import history.
#
- jiha = db_session.query( model.JobImportHistoryArchive ).filter_by( job_id=self.job_id ).first()
+ jiha = self.sa_session.query( model.JobImportHistoryArchive ).filter_by( job_id=self.job_id ).first()
if jiha:
try:
archive_dir = jiha.archive_dir
@@ -107,13 +109,13 @@
new_history.importing = True
new_history.hid_counter = history_attrs['hid_counter']
new_history.genome_build = history_attrs['genome_build']
- db_session.add( new_history )
+ self.sa_session.add( new_history )
jiha.history = new_history
- db_session.flush()
+ self.sa_session.flush()
# Add annotation, tags.
if user:
- self.add_item_annotation( db_session, user, new_history, history_attrs[ 'annotation' ] )
+ self.add_item_annotation( self.sa_session, user, new_history, history_attrs[ 'annotation' ] )
"""
TODO: figure out to how add tags to item.
for tag, value in history_attrs[ 'tags' ].items():
@@ -153,16 +155,16 @@
metadata = metadata,
history = new_history,
create_dataset = True,
- sa_session = db_session )
+ sa_session = self.sa_session )
hda.state = hda.states.OK
- db_session.add( hda )
- db_session.flush()
+ self.sa_session.add( hda )
+ self.sa_session.flush()
new_history.add_dataset( hda, genome_build = None )
hda.hid = dataset_attrs['hid'] # Overwrite default hid set when HDA added to history.
# TODO: Is there a way to recover permissions? Is this needed?
#permissions = trans.app.security_agent.history_get_default_permissions( new_history )
#trans.app.security_agent.set_all_dataset_permissions( hda.dataset, permissions )
- db_session.flush()
+ self.sa_session.flush()
# Do security check and move/copy dataset data.
temp_dataset_file_name = \
@@ -177,13 +179,25 @@
# Set tags, annotations.
if user:
- self.add_item_annotation( db_session, user, hda, dataset_attrs[ 'annotation' ] )
+ self.add_item_annotation( self.sa_session, user, hda, dataset_attrs[ 'annotation' ] )
# TODO: Set tags.
"""
for tag, value in dataset_attrs[ 'tags' ].items():
trans.app.tag_handler.apply_item_tags( trans, trans.user, hda, get_tag_str( tag, value ) )
- db_session.flush()
+ self.sa_session.flush()
"""
+
+ # Although metadata is set above, need to set metadata to recover BAI for BAMs.
+ if hda.extension == 'bam':
+ if self.app.config.set_metadata_externally:
+ self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute_via_app(
+ self.app.datatypes_registry.set_external_metadata_tool, self.app, jiha.job.session_id,
+ new_history.id, jiha.job.user, incoming={ 'input1': hda }, overwrite=False
+ )
+ else:
+ message = 'Attributes updated'
+ hda.set_meta()
+ hda.datatype.after_setting_metadata( hda )
#
# Create jobs.
@@ -198,7 +212,7 @@
""" Hook to 'decode' an HDA; method uses history and HID to get the HDA represented by
the encoded object. This only works because HDAs are created above. """
if obj_dct.get( '__HistoryDatasetAssociation__', False ):
- return db_session.query( model.HistoryDatasetAssociation ) \
+ return self.sa_session.query( model.HistoryDatasetAssociation ) \
.filter_by( history=new_history, hid=obj_dct['hid'] ).first()
return obj_dct
jobs_attrs = from_json_string( jobs_attr_str, object_hook=as_hda )
@@ -214,8 +228,8 @@
imported_job.tool_version = job_attrs[ 'tool_version' ]
imported_job.set_state( job_attrs[ 'state' ] )
imported_job.imported = True
- db_session.add( imported_job )
- db_session.flush()
+ self.sa_session.add( imported_job )
+ self.sa_session.flush()
class HistoryDatasetAssociationIDEncoder( simplejson.JSONEncoder ):
""" Custom JSONEncoder for a HistoryDatasetAssociation that encodes an HDA as its ID. """
@@ -235,7 +249,7 @@
# Transform parameter values when necessary.
if isinstance( value, model.HistoryDatasetAssociation ):
# HDA input: use hid to find input.
- input_hda = db_session.query( model.HistoryDatasetAssociation ) \
+ input_hda = self.sa_session.query( model.HistoryDatasetAssociation ) \
.filter_by( history=new_history, hid=value.hid ).first()
value = input_hda.id
#print "added parameter %s-->%s to job %i" % ( name, value, imported_job.id )
@@ -246,22 +260,23 @@
# Connect jobs to output datasets.
for output_hid in job_attrs[ 'output_datasets' ]:
#print "%s job has output dataset %i" % (imported_job.id, output_hid)
- output_hda = db_session.query( model.HistoryDatasetAssociation ) \
+ output_hda = self.sa_session.query( model.HistoryDatasetAssociation ) \
.filter_by( history=new_history, hid=output_hid ).first()
if output_hda:
imported_job.add_output_dataset( output_hda.name, output_hda )
- # Done importing.
- new_history.importing = False
+ self.sa_session.flush()
- db_session.flush()
+ # Done importing.
+ new_history.importing = False
+ self.sa_session.flush()
# Cleanup.
if os.path.exists( archive_dir ):
shutil.rmtree( archive_dir )
except Exception, e:
jiha.job.stderr += "Error cleaning up history import job: %s" % e
- db_session.flush()
+ self.sa_session.flush()
class JobExportHistoryArchiveWrapper( object, UsesHistoryMixin, UsesAnnotations ):
"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: api/histories, show: remove get_api_value for each dataset in get_dataset_state_summaries
by Bitbucket 14 Dec '12
by Bitbucket 14 Dec '12
14 Dec '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4929a37823b5/
changeset: 4929a37823b5
user: carlfeberhard
date: 2012-12-14 17:05:43
summary: api/histories, show: remove get_api_value for each dataset in get_dataset_state_summaries
affected #: 1 file
diff -r 742aac57ec941aa89ac7ca07548bf531127bc31b -r 4929a37823b5454178bf811b6b65f2216c7ac4c3 lib/galaxy/webapps/galaxy/api/histories.py
--- a/lib/galaxy/webapps/galaxy/api/histories.py
+++ b/lib/galaxy/webapps/galaxy/api/histories.py
@@ -75,14 +75,10 @@
# cycle through datasets saving each ds' state
for dataset in datasets:
- dataset_dict = dataset.get_api_value( view='element' )
- item_state = dataset_dict[ 'state' ]
-
- if not dataset_dict['deleted']:
+ item_state = dataset.state
+ if not dataset.deleted:
state_counts[ item_state ] = state_counts[ item_state ] + 1
-
- state_ids[ item_state ].append( trans.security.encode_id( dataset_dict[ 'id' ] ) )
-
+ state_ids[ item_state ].append( trans.security.encode_id( dataset.id ) )
return ( state_counts, state_ids )
# try to load the history, by most_recently_used or the given id
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0