galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
July 2014
- 1 participants
- 146 discussions
commit/galaxy-central: greg: Eliminate the use of trans in all remaining functions in the metadata_util.py tool shed module.
by commits-noreply@bitbucket.org 15 Jul '14
by commits-noreply@bitbucket.org 15 Jul '14
15 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/70272fb20051/
Changeset: 70272fb20051
User: greg
Date: 2014-07-15 22:44:02
Summary: Eliminate the use of trans in all remaining functions in the metadata_util.py tool shed module.
Affected #: 5 files
diff -r 0874eb058a0cb79508672cc5ebf25a4ce5063d75 -r 70272fb2005161f34e13173df323b8e5363a9319 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -779,7 +779,7 @@
trans.install_model.context.add( repository )
trans.install_model.context.flush()
message = "The repository information has been updated."
- containers_dict = metadata_util.populate_containers_dict_from_repository_metadata( trans=trans,
+ containers_dict = metadata_util.populate_containers_dict_from_repository_metadata( app=trans.app,
tool_shed_url=tool_shed_url,
tool_path=tool_path,
repository=repository,
@@ -1697,7 +1697,7 @@
@web.require_admin
def reset_metadata_on_selected_installed_repositories( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- message, status = metadata_util.reset_metadata_on_selected_repositories( trans, **kwd )
+ message, status = metadata_util.reset_metadata_on_selected_repositories( trans.app, trans.user, **kwd )
else:
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
@@ -1796,7 +1796,7 @@
status = 'error'
shed_tool_conf, tool_path, relative_install_dir = suc.get_tool_panel_config_tool_path_install_dir( trans.app, repository )
repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
- containers_dict = metadata_util.populate_containers_dict_from_repository_metadata( trans=trans,
+ containers_dict = metadata_util.populate_containers_dict_from_repository_metadata( app=trans.app,
tool_shed_url=tool_shed_url,
tool_path=tool_path,
repository=repository,
diff -r 0874eb058a0cb79508672cc5ebf25a4ce5063d75 -r 70272fb2005161f34e13173df323b8e5363a9319 lib/galaxy/webapps/tool_shed/api/repositories.py
--- a/lib/galaxy/webapps/tool_shed/api/repositories.py
+++ b/lib/galaxy/webapps/tool_shed/api/repositories.py
@@ -429,7 +429,7 @@
repository_id = trans.security.encode_id( repository.id )
try:
invalid_file_tups, metadata_dict = \
- metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
+ metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans.app, trans.user, repository_id )
if invalid_file_tups:
message = tool_util.generate_message_for_invalid_tools( trans.app,
invalid_file_tups,
@@ -509,7 +509,8 @@
repository_status=[] )
try:
invalid_file_tups, metadata_dict = \
- metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans,
+ metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans.app,
+ trans.user,
trans.security.encode_id( repository.id ) )
if invalid_file_tups:
message = tool_util.generate_message_for_invalid_tools( trans.app,
diff -r 0874eb058a0cb79508672cc5ebf25a4ce5063d75 -r 70272fb2005161f34e13173df323b8e5363a9319 lib/galaxy/webapps/tool_shed/controllers/admin.py
--- a/lib/galaxy/webapps/tool_shed/controllers/admin.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/admin.py
@@ -344,7 +344,7 @@
@web.require_admin
def reset_metadata_on_selected_repositories_in_tool_shed( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- message, status = metadata_util.reset_metadata_on_selected_repositories( trans, **kwd )
+ message, status = metadata_util.reset_metadata_on_selected_repositories( trans.app, trans.user, **kwd )
else:
message = util.restore_text( kwd.get( 'message', '' ) )
status = kwd.get( 'status', 'done' )
diff -r 0874eb058a0cb79508672cc5ebf25a4ce5063d75 -r 70272fb2005161f34e13173df323b8e5363a9319 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -2733,7 +2733,8 @@
def reset_all_metadata( self, trans, id, **kwd ):
"""Reset all metadata on the complete changelog for a single repository in the tool shed."""
# This method is called only from the ~/templates/webapps/tool_shed/repository/manage_repository.mako template.
- invalid_file_tups, metadata_dict = metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans, id, **kwd )
+ invalid_file_tups, metadata_dict = \
+ metadata_util.reset_all_metadata_on_repository_in_tool_shed( trans.app, trans.user, id, **kwd )
if invalid_file_tups:
repository = suc.get_repository_in_tool_shed( trans.app, id )
message = tool_util.generate_message_for_invalid_tools( trans.app, invalid_file_tups, repository, metadata_dict )
@@ -2750,7 +2751,7 @@
@web.expose
def reset_metadata_on_my_writable_repositories_in_tool_shed( self, trans, **kwd ):
if 'reset_metadata_on_selected_repositories_button' in kwd:
- message, status = metadata_util.reset_metadata_on_selected_repositories( trans, **kwd )
+ message, status = metadata_util.reset_metadata_on_selected_repositories( trans.app, trans.user, **kwd )
else:
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
diff -r 0874eb058a0cb79508672cc5ebf25a4ce5063d75 -r 70272fb2005161f34e13173df323b8e5363a9319 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -58,19 +58,22 @@
sa_session.add( repository_metadata )
sa_session.flush()
-def clean_repository_metadata( trans, id, changeset_revisions ):
- # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
- # We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
- # records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later..
+def clean_repository_metadata( app, id, changeset_revisions ):
+ # Delete all repository_metadata records associated with the repository that have
+ # a changeset_revision that is not in changeset_revisions. We sometimes see multiple
+ # records with the same changeset revision value - no idea how this happens. We'll
+ # assume we can delete the older records, so we'll order by update_time descending and
+ # delete records that have the same changeset_revision we come across later.
+ sa_session = app.model.context.current
changeset_revisions_checked = []
- for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
- .order_by( trans.model.RepositoryMetadata.table.c.changeset_revision,
- trans.model.RepositoryMetadata.table.c.update_time.desc() ):
+ for repository_metadata in sa_session.query( app.model.RepositoryMetadata ) \
+ .filter( app.model.RepositoryMetadata.table.c.repository_id == app.security.decode_id( id ) ) \
+ .order_by( app.model.RepositoryMetadata.table.c.changeset_revision,
+ app.model.RepositoryMetadata.table.c.update_time.desc() ):
changeset_revision = repository_metadata.changeset_revision
if changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions:
- trans.sa_session.delete( repository_metadata )
- trans.sa_session.flush()
+ sa_session.delete( repository_metadata )
+ sa_session.flush()
def compare_changeset_revisions( app, ancestor_changeset_revision, ancestor_metadata_dict,
current_changeset_revision, current_metadata_dict ):
@@ -1547,13 +1550,15 @@
return True
return False
-def new_readme_files_metadata_required( trans, repository_metadata, metadata_dict ):
+def new_readme_files_metadata_required( repository_metadata, metadata_dict ):
"""
- Compare the last saved metadata for each readme file in the repository with the new metadata in metadata_dict to determine if a new
- repository_metadata table record is required or if the last saved metadata record can be updated for readme files instead.
+ Compare the last saved metadata for each readme file in the repository with the new metadata
+ in metadata_dict to determine if a new repository_metadata table record is required or if the
+ last saved metadata record can be updated for readme files instead.
"""
- # Repository README files are kind of a special case because they have no effect on reproducibility. We'll simply inspect the file names to
- # determine if any that exist in the saved metadata are eliminated from the new metadata in the received metadata_dict.
+ # Repository README files are kind of a special case because they have no effect on reproducibility.
+ # We'll simply inspect the file names to determine if any that exist in the saved metadata are
+ # eliminated from the new metadata in the received metadata_dict.
if 'readme_files' in metadata_dict:
current_readme_files = metadata_dict[ 'readme_files' ]
if repository_metadata:
@@ -1568,15 +1573,19 @@
else:
return False
else:
- # The new metadata includes readme_files, but the stored metadata does not, so we can update the stored metadata.
+ # The new metadata includes readme_files, but the stored metadata does not, so
+ # we can update the stored metadata.
return False
else:
- # There is no stored metadata, so we can update the metadata column in the repository_metadata table.
+ # There is no stored metadata, so we can update the metadata column in the repository_metadata
+ # table.
return False
else:
- # There is no stored repository metadata, so we need to create a new repository_metadata table record.
+ # There is no stored repository metadata, so we need to create a new repository_metadata
+ # table record.
return True
- # The received metadata_dict includes no metadata for readme_files, so a new repository_metadata table record is not needed.
+ # The received metadata_dict includes no metadata for readme_files, so a new repository_metadata
+ # table record is not needed.
return False
def new_repository_dependency_metadata_required( app, repository_metadata, metadata_dict ):
@@ -1728,7 +1737,7 @@
# The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed.
return False
-def populate_containers_dict_from_repository_metadata( trans, tool_shed_url, tool_path, repository,
+def populate_containers_dict_from_repository_metadata( app, tool_shed_url, tool_path, repository,
reinstalling=False, required_repo_info_dicts=None ):
"""
Retrieve necessary information from the received repository's metadata to populate the
@@ -1744,19 +1753,19 @@
invalid_tools = metadata.get( 'invalid_tools', None )
# Handle README files.
if repository.has_readme_files:
- if reinstalling or repository.status not in [ trans.install_model.ToolShedRepository.installation_status.DEACTIVATED,
- trans.install_model.ToolShedRepository.installation_status.INSTALLED ]:
+ if reinstalling or repository.status not in [ app.install_model.ToolShedRepository.installation_status.DEACTIVATED,
+ app.install_model.ToolShedRepository.installation_status.INSTALLED ]:
# Since we're reinstalling, we need to send a request to the tool shed to get the README files.
- tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, tool_shed_url )
+ tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, tool_shed_url )
params = '?name=%s&owner=%s&changeset_revision=%s' % ( str( repository.name ),
str( repository.owner ),
str( repository.installed_changeset_revision ) )
url = common_util.url_join( tool_shed_url,
'repository/get_readme_files%s' % params )
- raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
+ raw_text = common_util.tool_shed_get( app, tool_shed_url, url )
readme_files_dict = json.from_json_string( raw_text )
else:
- readme_files_dict = readme_util.build_readme_files_dict( trans.app,
+ readme_files_dict = readme_util.build_readme_files_dict( app,
repository,
repository.changeset_revision,
repository.metadata, tool_path )
@@ -1764,7 +1773,7 @@
readme_files_dict = None
# Handle repository dependencies.
installed_repository_dependencies, missing_repository_dependencies = \
- trans.app.installed_repository_manager.get_installed_and_missing_repository_dependencies( repository )
+ app.installed_repository_manager.get_installed_and_missing_repository_dependencies( repository )
# Handle the current repository's tool dependencies.
repository_tool_dependencies = metadata.get( 'tool_dependencies', None )
# Make sure to display missing tool dependencies as well.
@@ -1774,17 +1783,17 @@
repository_tool_dependencies = {}
repository_tool_dependencies.update( repository_invalid_tool_dependencies )
repository_installed_tool_dependencies, repository_missing_tool_dependencies = \
- tool_dependency_util.get_installed_and_missing_tool_dependencies_for_installed_repository( trans,
+ tool_dependency_util.get_installed_and_missing_tool_dependencies_for_installed_repository( app,
repository,
repository_tool_dependencies )
if reinstalling:
installed_tool_dependencies, missing_tool_dependencies = \
- tool_dependency_util.populate_tool_dependencies_dicts( app=trans.app,
- tool_shed_url=tool_shed_url,
- tool_path=tool_path,
- repository_installed_tool_dependencies=repository_installed_tool_dependencies,
- repository_missing_tool_dependencies=repository_missing_tool_dependencies,
- required_repo_info_dicts=required_repo_info_dicts )
+ tool_dependency_util.populate_tool_dependencies_dicts( app,
+ tool_shed_url,
+ tool_path,
+ repository_installed_tool_dependencies,
+ repository_missing_tool_dependencies,
+ required_repo_info_dicts )
else:
installed_tool_dependencies = repository_installed_tool_dependencies
missing_tool_dependencies = repository_missing_tool_dependencies
@@ -1800,7 +1809,7 @@
valid_data_managers = metadata['data_manager'].get( 'data_managers', None )
invalid_data_managers = metadata['data_manager'].get( 'invalid_data_managers', None )
data_managers_errors = metadata['data_manager'].get( 'messages', None )
- containers_dict = container_util.build_repository_containers_for_galaxy( app=trans.app,
+ containers_dict = container_util.build_repository_containers_for_galaxy( app=app,
repository=repository,
datatypes=datatypes,
invalid_tools=invalid_tools,
@@ -1858,35 +1867,38 @@
log.debug( 'Error locating installation directory for repository %s.' % repository.name )
return invalid_file_tups, metadata_dict
-def reset_all_metadata_on_repository_in_tool_shed( trans, id ):
+def reset_all_metadata_on_repository_in_tool_shed( app, user, id ):
"""Reset all metadata on a single repository in a tool shed."""
- def reset_all_tool_versions( trans, id, repo ):
+ def reset_all_tool_versions( app, id, repo ):
"""Reset tool version lineage for those changeset revisions that include valid tools."""
+ sa_session = app.model.context.current
changeset_revisions_that_contain_tools = []
for changeset in repo.changelog:
changeset_revision = str( repo.changectx( changeset ) )
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( app, id, changeset_revision )
if repository_metadata:
metadata = repository_metadata.metadata
if metadata:
if metadata.get( 'tools', None ):
changeset_revisions_that_contain_tools.append( changeset_revision )
- # The list of changeset_revisions_that_contain_tools is now filtered to contain only those that are downloadable and contain tools.
- # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
+ # The list of changeset_revisions_that_contain_tools is now filtered to contain only those that
+ # are downloadable and contain tools. If a repository includes tools, build a dictionary of
+ # { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
for index, changeset_revision in enumerate( changeset_revisions_that_contain_tools ):
tool_versions_dict = {}
- repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, id, changeset_revision )
+ repository_metadata = suc.get_repository_metadata_by_changeset_revision( app, id, changeset_revision )
metadata = repository_metadata.metadata
tool_dicts = metadata[ 'tools' ]
if index == 0:
- # The first changeset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools.
- # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config.
+ # The first changeset_revision is a special case because it will have no ancestor
+ # changeset_revisions in which to match tools. The parent tool id for tools in the
+ # first changeset_revision will be the "old_id" in the tool config.
for tool_dict in tool_dicts:
tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
else:
for tool_dict in tool_dicts:
- parent_id = get_parent_id( trans.app,
+ parent_id = get_parent_id( app,
id,
tool_dict[ 'id' ],
tool_dict[ 'version' ],
@@ -1895,24 +1907,26 @@
tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
if tool_versions_dict:
repository_metadata.tool_versions = tool_versions_dict
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
+ sa_session.add( repository_metadata )
+ sa_session.flush()
- repository = suc.get_repository_in_tool_shed( trans.app, id )
+ repository = suc.get_repository_in_tool_shed( app, id )
log.debug( "Resetting all metadata on repository: %s" % repository.name )
- repo_dir = repository.repo_path( trans.app )
- repo = hg_util.get_repo_for_repository( trans.app, repository=None, repo_path=repo_dir, create=False )
- repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( trans.user, repository )
- # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop
- # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list.
+ repo_dir = repository.repo_path( app )
+ repo = hg_util.get_repo_for_repository( app, repository=None, repo_path=repo_dir, create=False )
+ repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( user, repository )
+ # The list of changeset_revisions refers to repository_metadata records that have been created
+ # or updated. When the following loop completes, we'll delete all repository_metadata records
+ # for this repository that do not have a changeset_revision value in this list.
changeset_revisions = []
- # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict.
+ # When a new repository_metadata record is created, it always uses the values of
+ # metadata_changeset_revision and metadata_dict.
metadata_changeset_revision = None
metadata_dict = None
ancestor_changeset_revision = None
ancestor_metadata_dict = None
invalid_file_tups = []
- for changeset in repository.get_changesets_for_setting_metadata( trans.app ):
+ for changeset in repository.get_changesets_for_setting_metadata( app ):
work_dir = tempfile.mkdtemp( prefix="tmp-toolshed-ramorits" )
current_changeset_revision = str( repo.changectx( changeset ) )
ctx = repo.changectx( changeset )
@@ -1920,18 +1934,19 @@
cloned_ok, error_message = hg_util.clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
if cloned_ok:
log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
- current_metadata_dict, invalid_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- changeset_revision=current_changeset_revision,
- repository_clone_url=repository_clone_url,
- relative_install_dir=repo_dir,
- repository_files_dir=work_dir,
- resetting_all_metadata_on_repository=True,
- updating_installed_repository=False,
- persist=False )
+ current_metadata_dict, invalid_tups = \
+ generate_metadata_for_changeset_revision( app=app,
+ repository=repository,
+ changeset_revision=current_changeset_revision,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=repo_dir,
+ repository_files_dir=work_dir,
+ resetting_all_metadata_on_repository=True,
+ updating_installed_repository=False,
+ persist=False )
# We'll only display error messages for the repository tip (it may be better to display error
# messages for each installable changeset revision).
- if current_changeset_revision == repository.tip( trans.app ):
+ if current_changeset_revision == repository.tip( app ):
invalid_file_tups.extend( invalid_tups )
if current_metadata_dict:
if metadata_changeset_revision is None and metadata_dict is None:
@@ -1945,7 +1960,7 @@
# SUBSET - ancestor metadata is a subset of current metadata, so continue from current
# NOT_EQUAL_AND_NOT_SUBSET - ancestor metadata is neither equal to nor a subset of current
# metadata, so persist ancestor metadata.
- comparison = compare_changeset_revisions( trans.app,
+ comparison = compare_changeset_revisions( app,
ancestor_changeset_revision,
ancestor_metadata_dict,
current_changeset_revision,
@@ -1956,7 +1971,7 @@
elif comparison == NOT_EQUAL_AND_NOT_SUBSET:
metadata_changeset_revision = ancestor_changeset_revision
metadata_dict = ancestor_metadata_dict
- repository_metadata = create_or_update_repository_metadata( trans.app,
+ repository_metadata = create_or_update_repository_metadata( app,
id,
repository,
metadata_changeset_revision,
@@ -1972,7 +1987,7 @@
metadata_changeset_revision = current_changeset_revision
metadata_dict = current_metadata_dict
# We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans.app,
+ repository_metadata = create_or_update_repository_metadata( app,
id,
repository,
metadata_changeset_revision,
@@ -1984,7 +1999,7 @@
# We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
if not ctx.children():
# We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans.app,
+ repository_metadata = create_or_update_repository_metadata( app,
id,
repository,
metadata_changeset_revision,
@@ -1995,15 +2010,15 @@
basic_util.remove_dir( work_dir )
# Delete all repository_metadata records for this repository that do not have a changeset_revision
# value in changeset_revisions.
- clean_repository_metadata( trans, id, changeset_revisions )
+ clean_repository_metadata( app, id, changeset_revisions )
# Set tool version information for all downloadable changeset revisions. Get the list of changeset
# revisions from the changelog.
- reset_all_tool_versions( trans, id, repo )
+ reset_all_tool_versions( app, id, repo )
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- tool_util.reset_tool_data_tables( trans.app )
+ tool_util.reset_tool_data_tables( app )
return invalid_file_tups, metadata_dict
-def reset_metadata_on_selected_repositories( trans, **kwd ):
+def reset_metadata_on_selected_repositories( app, user, **kwd ):
"""
Inspect the repository changelog to reset metadata for all appropriate changeset revisions.
This method is called from both Galaxy and the Tool Shed.
@@ -2016,18 +2031,23 @@
unsuccessful_count = 0
for repository_id in repository_ids:
try:
- if trans.webapp.name == 'tool_shed':
+ if app.name == 'tool_shed':
# We're in the tool shed.
- repository = suc.get_repository_in_tool_shed( trans.app, repository_id )
+ repository = suc.get_repository_in_tool_shed( app, repository_id )
owner = str( repository.user.username )
- invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
+ invalid_file_tups, metadata_dict = \
+ reset_all_metadata_on_repository_in_tool_shed( app, user, repository_id )
else:
# We're in Galaxy.
- repository = suc.get_installed_tool_shed_repository( trans.app, repository_id )
+ repository = suc.get_installed_tool_shed_repository( app, repository_id )
owner = str( repository.owner )
- invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans.app, repository_id )
+ invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( app, repository_id )
if invalid_file_tups:
- message = tool_util.generate_message_for_invalid_tools( trans.app, invalid_file_tups, repository, None, as_html=False )
+ message = tool_util.generate_message_for_invalid_tools( app,
+ invalid_file_tups,
+ repository,
+ None,
+ as_html=False )
log.debug( message )
unsuccessful_count += 1
else:
@@ -2036,10 +2056,11 @@
except:
log.exception( "Error attempting to reset metadata on repository %s", str( repository.name ) )
unsuccessful_count += 1
- message = "Successfully reset metadata on %d %s. " % ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
+ message = "Successfully reset metadata on %d %s. " % \
+ ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
if unsuccessful_count:
- message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count,
- inflector.cond_plural( unsuccessful_count, "repository" ) )
+ message += "Error setting metadata on %d %s - see the paster log for details. " % \
+ ( unsuccessful_count, inflector.cond_plural( unsuccessful_count, "repository" ) )
else:
message = 'Select at least one repository to on which to reset all metadata.'
status = 'error'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/47d7c03dc46f/
Changeset: 47d7c03dc46f
User: davebgx
Date: 2014-07-15 22:13:28
Summary: Fix rare scenario where installing a set of repositories that depend on each other would result in one of them being installed twice.
Affected #: 1 file
diff -r 1b1748573103faca069962324a96ded3c801bc7a -r 47d7c03dc46f5fad021af94c44d377192dc534f7 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -1091,9 +1091,10 @@
tsr_ids,
repo_info_dicts,
tool_panel_section_keys=tool_panel_section_keys )
- ordered_tsr_ids.append( tsr_id )
- ordered_repo_info_dicts.append( repo_info_dict )
- ordered_tool_panel_section_keys.append( tool_panel_section_key )
+ if tsr_id not in ordered_tsr_ids:
+ ordered_tsr_ids.append( tsr_id )
+ ordered_repo_info_dicts.append( repo_info_dict )
+ ordered_tool_panel_section_keys.append( tool_panel_section_key )
return ordered_tsr_ids, ordered_repo_info_dicts, ordered_tool_panel_section_keys
def populate_containers_dict_for_new_install( self, tool_shed_url, tool_path, readme_files_dict, installed_repository_dependencies,
https://bitbucket.org/galaxy/galaxy-central/commits/0874eb058a0c/
Changeset: 0874eb058a0c
User: davebgx
Date: 2014-07-15 22:30:35
Summary: Fix display of job stdio when using the twill interactor.
Affected #: 2 files
diff -r 47d7c03dc46f5fad021af94c44d377192dc534f7 -r 0874eb058a0cb79508672cc5ebf25a4ce5063d75 test/base/interactor.py
--- a/test/base/interactor.py
+++ b/test/base/interactor.py
@@ -386,7 +386,8 @@
self.twill_test_case.verify_dataset_correctness( outfile, hid=hid, attributes=attributes, shed_tool_id=shed_tool_id, maxseconds=maxseconds )
def get_job_stream( self, history_id, output_data, stream ):
- return self.twill_test_case._get_job_stream_output( output_data.get( 'id' ), stream=stream, format=False )
+ data_id = self.twill_test_case.security.encode_id( output_data.get( 'id' ) )
+ return self.twill_test_case._get_job_stream_output( data_id, stream=stream, format=False )
def stage_data_async( self, test_data, history, shed_tool_id, async=True ):
name = test_data.get( 'name', None )
diff -r 47d7c03dc46f5fad021af94c44d377192dc534f7 -r 0874eb058a0cb79508672cc5ebf25a4ce5063d75 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -2496,7 +2496,7 @@
return msg
def _get_job_stream_output( self, hda_id, stream, format ):
- self.visit_url( "/datasets/%s/%s" % ( self.security.encode_id( hda_id ), stream ) )
+ self.visit_url( "/datasets/%s/%s" % ( hda_id, stream ) )
output = self.last_page()
return self._format_stream( output, stream, format )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Visualization framework bug fixes + remove debugging statement.
by commits-noreply@bitbucket.org 15 Jul '14
by commits-noreply@bitbucket.org 15 Jul '14
15 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1b1748573103/
Changeset: 1b1748573103
User: jgoecks
Date: 2014-07-15 21:41:38
Summary: Visualization framework bug fixes + remove debugging statement.
Affected #: 3 files
diff -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 -r 1b1748573103faca069962324a96ded3c801bc7a lib/galaxy/visualization/data_providers/cigar.py
--- a/lib/galaxy/visualization/data_providers/cigar.py
+++ b/lib/galaxy/visualization/data_providers/cigar.py
@@ -18,7 +18,7 @@
return read_seq, cigar
# Set up position for reference, read.
- ref_seq_pos = read_start
+ ref_seq_pos = read_start - ref_seq_start
read_pos = 0
# Create new read sequence, cigar.
diff -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 -r 1b1748573103faca069962324a96ded3c801bc7a lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -203,7 +203,8 @@
# get longer, this will need to be increased and/or a handle to the genomic data may be need
# to be given to the data provider.
region = self.app.genomes.reference( trans, dbkey=dataset.dbkey, chrom=chrom,
- low=( int( low ) - 500 ), high=( int( high ) + 500 ) )
+ low=( max( 0, int( low ) - 500 ) ),
+ high=( int( high ) + 500 ) )
# Get mean depth.
if not indexer:
@@ -211,7 +212,6 @@
stats = indexer.get_data( chrom, low, high, stats=True )
mean_depth = stats[ 'data' ][ 'mean' ]
-
# Get and return data from data_provider.
result = data_provider.get_data( chrom, int( low ), int( high ), int( start_val ), int( max_vals ),
ref_seq=region, mean_depth=mean_depth, **kwargs )
diff -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 -r 1b1748573103faca069962324a96ded3c801bc7a static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -1385,7 +1385,6 @@
// captures most (all?) state change that needs to be saved.
var self = this;
drawable.config.on('change', function() {
- console.log(drawable.config.get_value('name') + " changed");
self.changed();
});
},
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add a new RelationBuilder class to handle building repository relationships, eliminating the use of the repository_dependency_util.py module, and rename the RepositoryDependencyManager class to be RepositoryDependencyInstallManager.
by commits-noreply@bitbucket.org 15 Jul '14
by commits-noreply@bitbucket.org 15 Jul '14
15 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7628e1d9d8e2/
Changeset: 7628e1d9d8e2
User: greg
Date: 2014-07-15 19:56:12
Summary: Add a new RelationBuilder class to handle building repository relationships, eliminating the use of the repository_dependency_util.py module, and rename the RepositoryDependencyManager class to be RepositoryDependencyInstallManager.
Affected #: 13 files
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -22,7 +22,6 @@
from tool_shed.util import hg_util
from tool_shed.util import metadata_util
from tool_shed.util import readme_util
-from tool_shed.util import repository_dependency_util
from tool_shed.util import repository_maintenance_util
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
@@ -31,7 +30,7 @@
from tool_shed.galaxy_install import install_manager
from tool_shed.galaxy_install.repair_repository_manager import RepairRepositoryManager
import tool_shed.galaxy_install.grids.admin_toolshed_grids as admin_toolshed_grids
-from tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager import RepositoryDependencyManager
+from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
log = logging.getLogger( __name__ )
@@ -1301,7 +1300,7 @@
Reinstall a tool shed repository that has been previously uninstalled, making sure to handle all repository
and tool dependencies of the repository.
"""
- rdm = RepositoryDependencyManager( trans.app )
+ rdim = repository_dependency_manager.RepositoryDependencyInstallManager( trans.app )
message = kwd.get( 'message', '' )
status = kwd.get( 'status', 'done' )
repository_id = kwd[ 'id' ]
@@ -1365,35 +1364,33 @@
else:
# Entering this else block occurs only if the tool_shed_repository does not include any valid tools.
if install_repository_dependencies:
- repository_dependencies = rdm.get_repository_dependencies_for_installed_tool_shed_repository( trans.app,
- tool_shed_repository )
+ repository_dependencies = \
+ rdim.get_repository_dependencies_for_installed_tool_shed_repository( trans.app,
+ tool_shed_repository )
else:
repository_dependencies = None
if metadata:
tool_dependencies = metadata.get( 'tool_dependencies', None )
else:
tool_dependencies = None
- repo_info_dict = \
- repository_maintenance_util.create_repo_info_dict( app=trans.app,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.changeset_revision,
- ctx_rev=ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- repository=None,
- repository_metadata=None,
- tool_dependencies=tool_dependencies,
- repository_dependencies=repository_dependencies )
+ repo_info_dict = repository_maintenance_util.create_repo_info_dict( trans.app,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ tool_dependencies=tool_dependencies,
+ repository_dependencies=repository_dependencies )
if repo_info_dict not in repo_info_dicts:
repo_info_dicts.append( repo_info_dict )
# Make sure all tool_shed_repository records exist.
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts = \
- rdm.create_repository_dependency_objects( tool_path=tool_path,
- tool_shed_url=tool_shed_url,
- repo_info_dicts=repo_info_dicts,
- install_repository_dependencies=install_repository_dependencies,
- no_changes_checked=no_changes_checked,
- tool_panel_section_id=tool_panel_section_id )
+ rdim.create_repository_dependency_objects( tool_path=tool_path,
+ tool_shed_url=tool_shed_url,
+ repo_info_dicts=repo_info_dicts,
+ install_repository_dependencies=install_repository_dependencies,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section_id=tool_panel_section_id )
# Default the selected tool panel location for loading tools included in each newly installed required
# tool shed repository to the location selected for the repository selected for re-installation.
for index, tps_key in enumerate( tool_panel_section_keys ):
@@ -1518,10 +1515,11 @@
@web.require_admin
def reselect_tool_panel_section( self, trans, **kwd ):
"""
- Select or change the tool panel section to contain the tools included in the tool shed repository being reinstalled. If there are updates
- available for the repository in the tool shed, the tool_dependencies and repository_dependencies associated with the updated changeset revision
- will have been retrieved from the tool shed and passed in the received kwd. In this case, the stored tool shed repository metadata from the
- Galaxy database will not be used since it is outdated.
+ Select or change the tool panel section to contain the tools included in the tool shed repository
+ being reinstalled. If there are updates available for the repository in the tool shed, the
+ tool_dependencies and repository_dependencies associated with the updated changeset revision will
+ have been retrieved from the tool shed and passed in the received kwd. In this case, the stored
+ tool shed repository metadata from the Galaxy database will not be used since it is outdated.
"""
message = ''
status = 'done'
@@ -1534,8 +1532,8 @@
tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( trans.app, str( tool_shed_repository.tool_shed ) )
tool_path, relative_install_dir = tool_shed_repository.get_tool_relative_path( trans.app )
if latest_changeset_revision and latest_ctx_rev:
- # There are updates available in the tool shed for the repository, so use the receieved dependency information which was retrieved from
- # the tool shed.
+ # There are updates available in the tool shed for the repository, so use the received
+ # dependency information which was retrieved from the tool shed.
encoded_updated_repo_info_dict = kwd.get( 'updated_repo_info_dict', None )
updated_repo_info_dict = encoding_util.tool_shed_decode( encoded_updated_repo_info_dict )
readme_files_dict = updated_repo_info_dict.get( 'readme_files_dict', None )
@@ -1584,20 +1582,18 @@
raw_text = common_util.tool_shed_get( trans.app, tool_shed_url, url )
readme_files_dict = json.from_json_string( raw_text )
tool_dependencies = metadata.get( 'tool_dependencies', None )
- rdm = RepositoryDependencyManager( trans.app )
- repository_dependencies = rdm.get_repository_dependencies_for_installed_tool_shed_repository( trans.app,
- tool_shed_repository )
- repo_info_dict = \
- repository_maintenance_util.create_repo_info_dict( app=trans.app,
- repository_clone_url=repository_clone_url,
- changeset_revision=tool_shed_repository.installed_changeset_revision,
- ctx_rev=tool_shed_repository.ctx_rev,
- repository_owner=tool_shed_repository.owner,
- repository_name=tool_shed_repository.name,
- repository=None,
- repository_metadata=None,
- tool_dependencies=tool_dependencies,
- repository_dependencies=repository_dependencies )
+ rdim = repository_dependency_manager.RepositoryDependencyInstallManager( trans.app )
+ repository_dependencies = \
+ rdim.get_repository_dependencies_for_installed_tool_shed_repository( trans.app,
+ tool_shed_repository )
+ repo_info_dict = repository_maintenance_util.create_repo_info_dict( trans.app,
+ repository_clone_url=repository_clone_url,
+ changeset_revision=tool_shed_repository.installed_changeset_revision,
+ ctx_rev=tool_shed_repository.ctx_rev,
+ repository_owner=tool_shed_repository.owner,
+ repository_name=tool_shed_repository.name,
+ tool_dependencies=tool_dependencies,
+ repository_dependencies=repository_dependencies )
irm = trans.app.installed_repository_manager
dependencies_for_repository_dict = irm.get_dependencies_for_repository( tool_shed_url,
repo_info_dict,
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/galaxy/webapps/tool_shed/controllers/repository.py
--- a/lib/galaxy/webapps/tool_shed/controllers/repository.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/repository.py
@@ -15,6 +15,7 @@
from galaxy.util import json
from galaxy.model.orm import and_
from tool_shed.capsule import capsule_manager
+from tool_shed.dependencies.repository import relation_builder
from tool_shed.util import basic_util
from tool_shed.util import common_util
from tool_shed.util import container_util
@@ -22,7 +23,6 @@
from tool_shed.util import hg_util
from tool_shed.util import metadata_util
from tool_shed.util import readme_util
-from tool_shed.util import repository_dependency_util
from tool_shed.util import repository_maintenance_util
from tool_shed.util import review_util
from tool_shed.util import search_util
@@ -1251,15 +1251,10 @@
return opened_archive
repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans.app, repository_id, changeset_revision )
metadata = repository_metadata.metadata
+ toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+ rb = relation_builder.RelationBuilder( trans.app, repository, repository_metadata, toolshed_base_url )
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = \
- repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
if repository_dependencies:
# Only display repository dependencies if they exist.
exclude = [ 'datatypes', 'invalid_repository_dependencies', 'invalid_tool_dependencies', 'invalid_tools',
@@ -1777,15 +1772,9 @@
if repository_metadata:
metadata = repository_metadata.metadata
if metadata:
- repository_dependencies = \
- repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None,
- circular_repository_dependencies=None )
+ toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+ rb = relation_builder.RelationBuilder( trans.app, repository, repository_metadata, toolshed_base_url )
+ repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
if repository_dependencies:
return encoding_util.tool_shed_encode( repository_dependencies )
return ''
@@ -2413,14 +2402,9 @@
skip_tool_tests_checked = True
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = \
- repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+ rb = relation_builder.RelationBuilder( trans.app, repository, repository_metadata, toolshed_base_url )
+ repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
if str( repository.type ) != rt_util.REPOSITORY_SUITE_DEFINITION:
# Handle messaging for resetting repository type to the optimal value.
change_repository_type_message = rt_util.generate_message_for_repository_type_change( trans.app,
@@ -2461,9 +2445,9 @@
repository_metadata )
heads = hg_util.get_repository_heads( repo )
deprecated_repository_dependency_tups = \
- repository_dependency_util.get_repository_dependency_tups_from_repository_metadata( trans.app,
- repository_metadata,
- deprecated_only=True )
+ metadata_util.get_repository_dependency_tups_from_repository_metadata( trans.app,
+ repository_metadata,
+ deprecated_only=True )
return trans.fill_template( '/webapps/tool_shed/repository/manage_repository.mako',
repo_name=repo_name,
description=description,
@@ -2614,14 +2598,8 @@
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
- repository_dependencies = \
- repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=toolshed_base_url,
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ rb = relation_builder.RelationBuilder( trans.app, repository, repository_metadata, toolshed_base_url )
+ repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
if metadata:
if 'repository_dependencies' in metadata and not repository_dependencies:
# See if we have an invalid repository dependency definition or if the repository dependency is required
@@ -2636,8 +2614,8 @@
invalid = True
break
if invalid:
- message = repository_dependency_util.generate_message_for_invalid_repository_dependencies( metadata,
- error_from_tuple=False )
+ message = metadata_util.generate_message_for_invalid_repository_dependencies( metadata,
+ error_from_tuple=False )
status = 'error'
else:
repository_metadata_id = None
@@ -3329,14 +3307,9 @@
if repository_metadata:
metadata = repository_metadata.metadata
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
- repository_dependencies = \
- repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=str( web.url_for( '/', qualified=True ) ).rstrip( '/' ),
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+ rb = relation_builder.RelationBuilder( trans.app, repository, repository_metadata, toolshed_base_url )
+ repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
if str( repository.type ) != rt_util.TOOL_DEPENDENCY_DEFINITION:
# Handle messaging for orphan tool dependency definitions.
orphan_message = tool_dependency_util.generate_message_for_orphan_tool_dependencies( trans, repository, metadata )
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/galaxy/webapps/tool_shed/controllers/upload.py
--- a/lib/galaxy/webapps/tool_shed/controllers/upload.py
+++ b/lib/galaxy/webapps/tool_shed/controllers/upload.py
@@ -14,7 +14,6 @@
from tool_shed.util import commit_util
from tool_shed.util import hg_util
from tool_shed.util import metadata_util
-from tool_shed.util import repository_dependency_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
@@ -303,8 +302,8 @@
status = 'error'
# Handle messaging for invalid repository dependencies.
invalid_repository_dependencies_message = \
- repository_dependency_util.generate_message_for_invalid_repository_dependencies( metadata_dict,
- error_from_tuple=True )
+ metadata_util.generate_message_for_invalid_repository_dependencies( metadata_dict,
+ error_from_tuple=True )
if invalid_repository_dependencies_message:
message += invalid_repository_dependencies_message
status = 'error'
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/tool_shed/capsule/capsule_manager.py
--- a/lib/tool_shed/capsule/capsule_manager.py
+++ b/lib/tool_shed/capsule/capsule_manager.py
@@ -12,18 +12,19 @@
from galaxy.util import asbool
from galaxy.util import CHUNK_SIZE
from galaxy.util.odict import odict
-from tool_shed.dependencies import dependency_manager
+from tool_shed.dependencies.repository.relation_builder import RelationBuilder
+from tool_shed.dependencies.dependency_manager import RepositoryDependencyAttributeHandler
+from tool_shed.dependencies.dependency_manager import ToolDependencyAttributeHandler
from tool_shed.util import basic_util
from tool_shed.util import commit_util
from tool_shed.util import common_util
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
from tool_shed.util import metadata_util
-from tool_shed.util import repository_dependency_util
from tool_shed.util import repository_maintenance_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import xml_util
-from tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager import RepositoryDependencyManager
+from tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager import RepositoryDependencyInstallManager
log = logging.getLogger( __name__ )
@@ -129,8 +130,8 @@
return sub_elements
def generate_repository_archive( self, repository, changeset_revision, work_dir ):
- rdah = dependency_manager.RepositoryDependencyAttributeHandler( self.app, unpopulate=True )
- tdah = dependency_manager.ToolDependencyAttributeHandler( self.app, unpopulate=True )
+ rdah = RepositoryDependencyAttributeHandler( self.app, unpopulate=True )
+ tdah = ToolDependencyAttributeHandler( self.app, unpopulate=True )
file_type_str = basic_util.get_file_type_str( changeset_revision, self.file_type )
file_name = '%s-%s' % ( repository.name, file_type_str )
return_code, error_message = hg_util.archive_repository_revision( self.app,
@@ -235,21 +236,15 @@
Return a list of dictionaries defining repositories that are required by the repository
associated with self.repository_id.
"""
- rdm = RepositoryDependencyManager( self.app )
+ rdim = RepositoryDependencyInstallManager( self.app )
repository = suc.get_repository_in_tool_shed( self.app, self.repository_id )
repository_metadata = suc.get_repository_metadata_by_changeset_revision( self.app,
self.repository_id,
self.changeset_revision )
# Get a dictionary of all repositories upon which the contents of the current repository_metadata record depend.
toolshed_base_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
- repository_dependencies = \
- repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=self.app,
- repository=self.repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=toolshed_base_url,
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None )
+ rb = RelationBuilder( self.app, repository, repository_metadata, toolshed_base_url )
+ repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
repo = hg_util.get_repo_for_repository( self.app,
repository=self.repository,
repo_path=None,
@@ -265,7 +260,7 @@
str( self.repository.user.username ),
repository_dependencies,
None )
- all_required_repo_info_dict = rdm.get_required_repo_info_dicts( self.tool_shed_url, [ repo_info_dict ] )
+ all_required_repo_info_dict = rdim.get_required_repo_info_dicts( self.tool_shed_url, [ repo_info_dict ] )
all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] )
return all_repo_info_dicts
@@ -651,8 +646,8 @@
def import_repository_archive( self, repository, repository_archive_dict ):
"""Import a repository archive contained within a repository capsule."""
- rdah = dependency_manager.RepositoryDependencyAttributeHandler( self.app, unpopulate=False )
- tdah = dependency_manager.ToolDependencyAttributeHandler( self.app, unpopulate=False )
+ rdah = RepositoryDependencyAttributeHandler( self.app, unpopulate=False )
+ tdah = ToolDependencyAttributeHandler( self.app, unpopulate=False )
archive_file_name = repository_archive_dict.get( 'archive_file_name', None )
capsule_file_name = repository_archive_dict[ 'capsule_file_name' ]
encoded_file_path = repository_archive_dict[ 'encoded_file_path' ]
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/tool_shed/dependencies/repository/relation_builder.py
--- /dev/null
+++ b/lib/tool_shed/dependencies/repository/relation_builder.py
@@ -0,0 +1,497 @@
+import logging
+import os
+
+from galaxy.util import asbool
+from galaxy.util import listify
+from tool_shed.util import common_util
+from tool_shed.util import container_util
+from tool_shed.util import hg_util
+from tool_shed.util import metadata_util
+from tool_shed.util import shed_util_common as suc
+
+log = logging.getLogger( __name__ )
+
+
+class RelationBuilder( object ):
+
+ def __init__( self, app, repository, repository_metadata, tool_shed_url ):
+ self.all_repository_dependencies = {}
+ self.app = app
+ self.circular_repository_dependencies = []
+ self.repository = repository
+ self.repository_metadata = repository_metadata
+ self.handled_key_rd_dicts = []
+ self.key_rd_dicts_to_be_processed = []
+ self.tool_shed_url = tool_shed_url
+
+ def can_add_to_key_rd_dicts( self, key_rd_dict, key_rd_dicts ):
+ """Handle the case where an update to the changeset revision was done."""
+ k = key_rd_dict.keys()[ 0 ]
+ rd = key_rd_dict[ k ]
+ partial_rd = rd[ 0:3 ]
+ for kr_dict in key_rd_dicts:
+ key = kr_dict.keys()[ 0 ]
+ if key == k:
+ repository_dependency = kr_dict[ key ]
+ if repository_dependency[ 0:3 ] == partial_rd:
+ return False
+ return True
+
+ def filter_only_if_compiling_contained_td( self, key_rd_dict ):
+ """
+ Return a copy of the received key_rd_dict with repository dependencies that are needed
+ only_if_compiling_contained_td filtered out of the list of repository dependencies for
+ each rd_key.
+ """
+ filtered_key_rd_dict = {}
+ for rd_key, required_rd_tup in key_rd_dict.items():
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( required_rd_tup )
+ if not asbool( only_if_compiling_contained_td ):
+ filtered_key_rd_dict[ rd_key ] = required_rd_tup
+ return filtered_key_rd_dict
+
+ def get_prior_installation_required_and_only_if_compiling_contained_td( self ):
+ """
+ This method is called from the tool shed and never Galaxy. If self.all_repository_dependencies
+ contains a repository dependency tuple that is associated with self.repository, return the
+ value of the tuple's prior_installation_required component.
+ """
+ cleaned_toolshed_base_url = common_util.remove_protocol_from_tool_shed_url( self.tool_shed_url )
+ if self.all_repository_dependencies:
+ for rd_key, rd_tups in self.all_repository_dependencies.items():
+ if rd_key in [ 'root_key', 'description' ]:
+ continue
+ for rd_tup in rd_tups:
+ rd_toolshed, \
+ rd_name, \
+ rd_owner, \
+ rd_changeset_revision, \
+ rd_prior_installation_required, \
+ rd_only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( rd_tup )
+ cleaned_rd_toolshed = common_util.remove_protocol_from_tool_shed_url( rd_toolshed )
+ if cleaned_rd_toolshed == cleaned_toolshed_base_url and \
+ rd_name == self.repository.name and \
+ rd_owner == self.repository.user.username and \
+ rd_changeset_revision == self.repository_metadata.changeset_revision:
+ return rd_prior_installation_required, rd_only_if_compiling_contained_td
+ elif self.repository_metadata:
+ # Get the list of changeset revisions from the tool shed to which self.repository may be updated.
+ metadata = self.repository_metadata.metadata
+ current_changeset_revision = str( self.repository_metadata.changeset_revision )
+ # Get the changeset revision to which the current value of required_repository_changeset_revision
+ # should be updated if it's not current.
+ text = suc.get_updated_changeset_revisions( self.app,
+ name=str( self.repository.name ),
+ owner=str( self.repository.user.username ),
+ changeset_revision=current_changeset_revision )
+ if text:
+ valid_changeset_revisions = listify( text )
+ if current_changeset_revision not in valid_changeset_revisions:
+ valid_changeset_revisions.append( current_changeset_revision )
+ else:
+ valid_changeset_revisions = [ current_changeset_revision ]
+ repository_dependencies_dict = metadata[ 'repository_dependencies' ]
+ rd_tups = repository_dependencies_dict.get( 'repository_dependencies', [] )
+ for rd_tup in rd_tups:
+ rd_toolshed, \
+ rd_name, \
+ rd_owner, \
+ rd_changeset_revision, \
+ rd_prior_installation_required, \
+ rd_only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( rd_tup )
+ cleaned_rd_toolshed = common_util.remove_protocol_from_tool_shed_url( rd_toolshed )
+ if cleaned_rd_toolshed == cleaned_toolshed_base_url and \
+ rd_name == self.repository.name and \
+ rd_owner == self.repository.user.username and \
+ rd_changeset_revision in valid_changeset_revisions:
+ return rd_prior_installation_required, rd_only_if_compiling_contained_td
+ # Default both prior_installation_required and only_if_compiling_contained_td to False.
+ return 'False', 'False'
+
+ def get_key_for_repository_changeset_revision( self ):
+ # The received toolshed_base_url must include the port, but doesn't have to include the protocol.
+ prior_installation_required, only_if_compiling_contained_td = \
+ self.get_prior_installation_required_and_only_if_compiling_contained_td()
+ # Create a key with the value of prior_installation_required defaulted to False.
+ key = container_util.generate_repository_dependencies_key_for_repository( self.tool_shed_url,
+ self.repository.name,
+ self.repository.user.username,
+ self.repository_metadata.changeset_revision,
+ prior_installation_required,
+ only_if_compiling_contained_td )
+ return key
+
+ def get_repository_dependencies_for_changeset_revision( self ):
+ """
+ Return a dictionary of all repositories upon which the contents of self.repository_metadata
+ record depend. The dictionary keys are name-spaced values consisting of:
+ self.tool_shed_url/repository_name/repository_owner/changeset_revision
+ and the values are lists of repository_dependency tuples consisting of:
+ ( self.tool_shed_url, repository_name, repository_owner, changeset_revision ).
+ This method ensures that all required repositories to the nth degree are returned.
+ """
+ # Assume the current repository does not have repository dependencies defined for it.
+ current_repository_key = None
+ metadata = self.repository_metadata.metadata
+ if metadata:
+ # The value of self.tool_shed_url must include the port, but doesn't have to include
+ # the protocol.
+ if 'repository_dependencies' in metadata:
+ current_repository_key = self.get_key_for_repository_changeset_revision()
+ repository_dependencies_dict = metadata[ 'repository_dependencies' ]
+ if not self.all_repository_dependencies:
+ self.initialize_all_repository_dependencies( current_repository_key, repository_dependencies_dict )
+ # Handle the repository dependencies defined in the current repository, if any, and populate
+ # the various repository dependency objects for this round of processing.
+ current_repository_key_rd_dicts = \
+ self.populate_repository_dependency_objects_for_processing( current_repository_key,
+ repository_dependencies_dict )
+ if current_repository_key:
+ if current_repository_key_rd_dicts:
+ # There should be only a single current_repository_key_rd_dict in this list.
+ current_repository_key_rd_dict = current_repository_key_rd_dicts[ 0 ]
+ # Handle circular repository dependencies.
+ if not self.in_circular_repository_dependencies( current_repository_key_rd_dict ):
+ if current_repository_key in self.all_repository_dependencies:
+ self.handle_current_repository_dependency( current_repository_key )
+ elif self.key_rd_dicts_to_be_processed:
+ self.handle_next_repository_dependency()
+ elif self.key_rd_dicts_to_be_processed:
+ self.handle_next_repository_dependency()
+ elif self.key_rd_dicts_to_be_processed:
+ self.handle_next_repository_dependency()
+ self.all_repository_dependencies = self.prune_invalid_repository_dependencies( self.all_repository_dependencies )
+ return self.all_repository_dependencies
+
+ def get_repository_dependency_as_key( self, repository_dependency ):
+ tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( repository_dependency )
+ return container_util.generate_repository_dependencies_key_for_repository( tool_shed,
+ name,
+ owner,
+ changeset_revision,
+ prior_installation_required,
+ only_if_compiling_contained_td )
+
+ def get_updated_changeset_revisions_for_repository_dependencies( self, key_rd_dicts ):
+ updated_key_rd_dicts = []
+ for key_rd_dict in key_rd_dicts:
+ key = key_rd_dict.keys()[ 0 ]
+ repository_dependency = key_rd_dict[ key ]
+ rd_toolshed, \
+ rd_name, \
+ rd_owner, \
+ rd_changeset_revision, \
+ rd_prior_installation_required, \
+ rd_only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( repository_dependency )
+ if suc.tool_shed_is_this_tool_shed( rd_toolshed ):
+ repository = suc.get_repository_by_name_and_owner( self.app, rd_name, rd_owner )
+ if repository:
+ repository_id = self.app.security.encode_id( repository.id )
+ repository_metadata = \
+ metadata_util.get_repository_metadata_by_repository_id_changeset_revision( self.app,
+ repository_id,
+ rd_changeset_revision )
+ if repository_metadata:
+ # The repository changeset_revision is installable, so no updates are available.
+ new_key_rd_dict = {}
+ new_key_rd_dict[ key ] = repository_dependency
+ updated_key_rd_dicts.append( key_rd_dict )
+ else:
+ # The repository changeset_revision is no longer installable, so see if there's been an update.
+ repo = hg_util.get_repo_for_repository( self.app, repository=repository, repo_path=None, create=False )
+ changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, rd_changeset_revision )
+ repository_metadata = \
+ metadata_util.get_repository_metadata_by_repository_id_changeset_revision( self.app,
+ repository_id,
+ changeset_revision )
+ if repository_metadata:
+ new_key_rd_dict = {}
+ new_key_rd_dict[ key ] = \
+ [ rd_toolshed, \
+ rd_name, \
+ rd_owner, \
+ repository_metadata.changeset_revision, \
+ rd_prior_installation_required, \
+ rd_only_if_compiling_contained_td ]
+ # We have the updated changset revision.
+ updated_key_rd_dicts.append( new_key_rd_dict )
+ else:
+ repository_components_tuple = container_util.get_components_from_key( key )
+ components_list = suc.extract_components_from_tuple( repository_components_tuple )
+ toolshed, repository_name, repository_owner, repository_changeset_revision = components_list[ 0:4 ]
+ # For backward compatibility to the 12/20/12 Galaxy release.
+ if len( components_list ) == 4:
+ prior_installation_required = 'False'
+ rd_only_if_compiling_contained_td = 'False'
+ elif len( components_list ) == 5:
+ rd_only_if_compiling_contained_td = 'False'
+ message = "The revision %s defined for repository %s owned by %s is invalid, so repository " % \
+ ( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ) )
+ message += "dependencies defined for repository %s will be ignored." % str( repository_name )
+ log.debug( message )
+ else:
+ repository_components_tuple = container_util.get_components_from_key( key )
+ components_list = suc.extract_components_from_tuple( repository_components_tuple )
+ toolshed, repository_name, repository_owner, repository_changeset_revision = components_list[ 0:4 ]
+ message = "The revision %s defined for repository %s owned by %s is invalid, so repository " % \
+ ( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ) )
+ message += "dependencies defined for repository %s will be ignored." % str( repository_name )
+ log.debug( message )
+ return updated_key_rd_dicts
+
+ def handle_circular_repository_dependency( self, repository_key, repository_dependency ):
+ all_repository_dependencies_root_key = self.all_repository_dependencies[ 'root_key' ]
+ repository_dependency_as_key = self.get_repository_dependency_as_key( repository_dependency )
+ repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
+ self.update_circular_repository_dependencies( repository_key,
+ repository_dependency,
+ self.all_repository_dependencies[ repository_dependency_as_key ] )
+ if all_repository_dependencies_root_key != repository_dependency_as_key:
+ self.all_repository_dependencies[ repository_key ] = [ repository_dependency ]
+
+ def handle_current_repository_dependency( self, current_repository_key ):
+ current_repository_key_rd_dicts = []
+ for rd in self.all_repository_dependencies[ current_repository_key ]:
+ rd_copy = [ str( item ) for item in rd ]
+ new_key_rd_dict = {}
+ new_key_rd_dict[ current_repository_key ] = rd_copy
+ current_repository_key_rd_dicts.append( new_key_rd_dict )
+ if current_repository_key_rd_dicts:
+ self.handle_key_rd_dicts_for_repository( current_repository_key, current_repository_key_rd_dicts )
+ return self.get_repository_dependencies_for_changeset_revision()
+
+ def handle_key_rd_dicts_for_repository( self, current_repository_key, repository_key_rd_dicts ):
+ key_rd_dict = repository_key_rd_dicts.pop( 0 )
+ repository_dependency = key_rd_dict[ current_repository_key ]
+ toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( repository_dependency )
+ if suc.tool_shed_is_this_tool_shed( toolshed ):
+ required_repository = suc.get_repository_by_name_and_owner( self.app, name, owner )
+ self.repository = required_repository
+ repository_id = self.app.security.encode_id( required_repository.id )
+ required_repository_metadata = \
+ metadata_util.get_repository_metadata_by_repository_id_changeset_revision( self.app,
+ repository_id,
+ changeset_revision )
+ self.repository_metadata = required_repository_metadata
+ if required_repository_metadata:
+ # The required_repository_metadata changeset_revision is installable.
+ required_metadata = required_repository_metadata.metadata
+ if required_metadata:
+ for current_repository_key_rd_dict in repository_key_rd_dicts:
+ if not self.in_key_rd_dicts( current_repository_key_rd_dict, self.key_rd_dicts_to_be_processed ):
+ # Add the current repository_dependency into self.key_rd_dicts_to_be_processed.
+ self.key_rd_dicts_to_be_processed.append( current_repository_key_rd_dict )
+ if not self.in_key_rd_dicts( key_rd_dict, self.handled_key_rd_dicts ):
+ # Add the current repository_dependency into self.handled_key_rd_dicts.
+ self.handled_key_rd_dicts.append( key_rd_dict )
+ if self.in_key_rd_dicts( key_rd_dict, self.key_rd_dicts_to_be_processed ):
+ # Remove the current repository from self.key_rd_dicts_to_be_processed.
+ self.key_rd_dicts_to_be_processed = self.remove_from_key_rd_dicts( key_rd_dict, self.key_rd_dicts_to_be_processed )
+ else:
+ # The repository is in a different tool shed, so build an url and send a request.
+ error_message = "Repository dependencies are currently supported only within the same Tool Shed. "
+ error_message += "Ignoring repository dependency definition for tool shed "
+ error_message += "%s, name %s, owner %s, changeset revision %s" % ( toolshed, name, owner, changeset_revision )
+ log.debug( error_message )
+
+ def handle_next_repository_dependency( self ):
+ next_repository_key_rd_dict = self.key_rd_dicts_to_be_processed.pop( 0 )
+ next_repository_key_rd_dicts = [ next_repository_key_rd_dict ]
+ next_repository_key = next_repository_key_rd_dict.keys()[ 0 ]
+ self.handle_key_rd_dicts_for_repository( next_repository_key, next_repository_key_rd_dicts )
+ return self.get_repository_dependencies_for_changeset_revision()
+
+ def in_all_repository_dependencies( self, repository_key, repository_dependency ):
+ """
+ Return True if { repository_key : repository_dependency } is in self.all_repository_dependencies.
+ """
+ for key, val in self.all_repository_dependencies.items():
+ if key != repository_key:
+ continue
+ if repository_dependency in val:
+ return True
+ return False
+
+ def in_circular_repository_dependencies( self, repository_key_rd_dict ):
+ """
+ Return True if any combination of a circular dependency tuple is the key : value pair defined
+ in the received repository_key_rd_dict. This means that each circular dependency tuple is converted
+ into the key : value pair for comparison.
+ """
+ for tup in self.circular_repository_dependencies:
+ rd_0, rd_1 = tup
+ rd_0_as_key = self.get_repository_dependency_as_key( rd_0 )
+ rd_1_as_key = self.get_repository_dependency_as_key( rd_1 )
+ if rd_0_as_key in repository_key_rd_dict and repository_key_rd_dict[ rd_0_as_key ] == rd_1:
+ return True
+ if rd_1_as_key in repository_key_rd_dict and repository_key_rd_dict[ rd_1_as_key ] == rd_0:
+ return True
+ return False
+
+ def in_key_rd_dicts( self, key_rd_dict, key_rd_dicts ):
+ """Return True if key_rd_dict is contained in the list of key_rd_dicts."""
+ k = key_rd_dict.keys()[ 0 ]
+ v = key_rd_dict[ k ]
+ for key_rd_dict in key_rd_dicts:
+ for key, val in key_rd_dict.items():
+ if key == k and val == v:
+ return True
+ return False
+
+ def initialize_all_repository_dependencies( self, current_repository_key, repository_dependencies_dict ):
+ """Initialize the self.all_repository_dependencies dictionary."""
+ # It's safe to assume that current_repository_key in this case will have a value.
+ self.all_repository_dependencies[ 'root_key' ] = current_repository_key
+ self.all_repository_dependencies[ current_repository_key ] = []
+ # Store the value of the 'description' key only once, the first time through this recursive method.
+ description = repository_dependencies_dict.get( 'description', None )
+ self.all_repository_dependencies[ 'description' ] = description
+
+ def is_circular_repository_dependency( self, repository_key, repository_dependency ):
+ """
+ Return True if the received repository_dependency is a key in self.all_repository_dependencies
+ whose list of repository dependencies includes the received repository_key.
+ """
+ repository_dependency_as_key = self.get_repository_dependency_as_key( repository_dependency )
+ repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
+ for key, val in self.all_repository_dependencies.items():
+ if key != repository_dependency_as_key:
+ continue
+ if repository_key_as_repository_dependency in val:
+ return True
+ return False
+
+ def populate_repository_dependency_objects_for_processing( self, current_repository_key, repository_dependencies_dict ):
+ """
+ The process that discovers all repository dependencies for a specified repository's changeset
+ revision uses this method to populate the following items for the current processing loop:
+ filtered_current_repository_key_rd_dicts, self.key_rd_dicts_to_be_processed,
+ self.handled_key_rd_dicts, self.all_repository_dependencies. Each processing loop may discover
+ more repository dependencies, so this method is repeatedly called until all repository
+ dependencies have been discovered.
+ """
+ current_repository_key_rd_dicts = []
+ filtered_current_repository_key_rd_dicts = []
+ for rd_tup in repository_dependencies_dict[ 'repository_dependencies' ]:
+ new_key_rd_dict = {}
+ new_key_rd_dict[ current_repository_key ] = rd_tup
+ current_repository_key_rd_dicts.append( new_key_rd_dict )
+ if current_repository_key_rd_dicts and current_repository_key:
+ # Remove all repository dependencies that point to a revision within its own repository.
+ current_repository_key_rd_dicts = \
+ self.remove_ropository_dependency_reference_to_self( current_repository_key_rd_dicts )
+ current_repository_key_rd_dicts = \
+ self.get_updated_changeset_revisions_for_repository_dependencies( current_repository_key_rd_dicts )
+ for key_rd_dict in current_repository_key_rd_dicts:
+ # Filter out repository dependencies that are required only if compiling the dependent
+ # repository's tool dependency.
+ key_rd_dict = self.filter_only_if_compiling_contained_td( key_rd_dict )
+ if key_rd_dict:
+ is_circular = False
+ in_handled_key_rd_dicts = self.in_key_rd_dicts( key_rd_dict, self.handled_key_rd_dicts )
+ in_key_rd_dicts_to_be_processed = self.in_key_rd_dicts( key_rd_dict, self.key_rd_dicts_to_be_processed )
+ if not in_handled_key_rd_dicts and not in_key_rd_dicts_to_be_processed:
+ filtered_current_repository_key_rd_dicts.append( key_rd_dict )
+ repository_dependency = key_rd_dict[ current_repository_key ]
+ if current_repository_key in self.all_repository_dependencies:
+ # Add all repository dependencies for the current repository into its entry
+ # in self.all_repository_dependencies.
+ all_repository_dependencies_val = self.all_repository_dependencies[ current_repository_key ]
+ if repository_dependency not in all_repository_dependencies_val:
+ all_repository_dependencies_val.append( repository_dependency )
+ self.all_repository_dependencies[ current_repository_key ] = all_repository_dependencies_val
+ elif not self.in_all_repository_dependencies( current_repository_key, repository_dependency ):
+ # Handle circular repository dependencies.
+ if self.is_circular_repository_dependency( current_repository_key, repository_dependency ):
+ is_circular = True
+ self.handle_circular_repository_dependency( current_repository_key, repository_dependency )
+ else:
+ self.all_repository_dependencies[ current_repository_key ] = [ repository_dependency ]
+ if not is_circular and self.can_add_to_key_rd_dicts( key_rd_dict, self.key_rd_dicts_to_be_processed ):
+ new_key_rd_dict = {}
+ new_key_rd_dict[ current_repository_key ] = repository_dependency
+ self.key_rd_dicts_to_be_processed.append( new_key_rd_dict )
+ return filtered_current_repository_key_rd_dicts
+
+ def prune_invalid_repository_dependencies( self, repository_dependencies ):
+ """
+ Eliminate all invalid entries in the received repository_dependencies dictionary. An entry
+ is invalid if the value_list of the key/value pair is empty. This occurs when an invalid
+ combination of tool shed, name , owner, changeset_revision is used and a repository_metadata
+ record is not found.
+ """
+ valid_repository_dependencies = {}
+ description = repository_dependencies.get( 'description', None )
+ root_key = repository_dependencies.get( 'root_key', None )
+ if root_key is None:
+ return valid_repository_dependencies
+ for key, value in repository_dependencies.items():
+ if key in [ 'description', 'root_key' ]:
+ continue
+ if value:
+ valid_repository_dependencies[ key ] = value
+ if valid_repository_dependencies:
+ valid_repository_dependencies[ 'description' ] = description
+ valid_repository_dependencies[ 'root_key' ] = root_key
+ return valid_repository_dependencies
+
+ def remove_from_key_rd_dicts( self, key_rd_dict, key_rd_dicts ):
+ """Eliminate the key_rd_dict from the list of key_rd_dicts if it is contained in the list."""
+ k = key_rd_dict.keys()[ 0 ]
+ v = key_rd_dict[ k ]
+ clean_key_rd_dicts = []
+ for krd_dict in key_rd_dicts:
+ key = krd_dict.keys()[ 0 ]
+ val = krd_dict[ key ]
+ if key == k and val == v:
+ continue
+ clean_key_rd_dicts.append( krd_dict )
+ return clean_key_rd_dicts
+
+ def remove_ropository_dependency_reference_to_self( self, key_rd_dicts ):
+ """Remove all repository dependencies that point to a revision within its own repository."""
+ clean_key_rd_dicts = []
+ key = key_rd_dicts[ 0 ].keys()[ 0 ]
+ repository_tup = key.split( container_util.STRSEP )
+ rd_toolshed, \
+ rd_name, \
+ rd_owner, \
+ rd_changeset_revision, \
+ rd_prior_installation_required, \
+ rd_only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( repository_tup )
+ cleaned_rd_toolshed = common_util.remove_protocol_from_tool_shed_url( rd_toolshed )
+ for key_rd_dict in key_rd_dicts:
+ k = key_rd_dict.keys()[ 0 ]
+ repository_dependency = key_rd_dict[ k ]
+ toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
+ common_util.parse_repository_dependency_tuple( repository_dependency )
+ cleaned_toolshed = common_util.remove_protocol_from_tool_shed_url( toolshed )
+ if cleaned_rd_toolshed == cleaned_toolshed and rd_name == name and rd_owner == owner:
+ debug_msg = "Removing repository dependency for repository %s owned by %s " % ( name, owner )
+ debug_msg += 'since it refers to a revision within itself.'
+ log.debug( debug_msg )
+ else:
+ new_key_rd_dict = {}
+ new_key_rd_dict[ key ] = repository_dependency
+ clean_key_rd_dicts.append( new_key_rd_dict )
+ return clean_key_rd_dicts
+
+ def update_circular_repository_dependencies( self, repository_key, repository_dependency, repository_dependencies ):
+ repository_dependency_as_key = self.get_repository_dependency_as_key( repository_dependency )
+ repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
+ if repository_key_as_repository_dependency in repository_dependencies:
+ found = False
+ for tup in self.circular_repository_dependencies:
+ if repository_dependency in tup and repository_key_as_repository_dependency in tup:
+ # The circular dependency has already been included.
+ found = True
+ if not found:
+ new_circular_tup = [ repository_dependency, repository_key_as_repository_dependency ]
+ self.circular_repository_dependencies.append( new_circular_tup )
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/tool_shed/galaxy_install/install_manager.py
--- a/lib/tool_shed/galaxy_install/install_manager.py
+++ b/lib/tool_shed/galaxy_install/install_manager.py
@@ -25,7 +25,6 @@
from tool_shed.util import encoding_util
from tool_shed.util import hg_util
from tool_shed.util import metadata_util
-from tool_shed.util import repository_dependency_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
@@ -35,7 +34,7 @@
from tool_shed.galaxy_install.tool_dependencies.recipe.install_environment import InstallEnvironment
from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import StepManager
from tool_shed.galaxy_install.tool_dependencies.recipe.recipe_manager import TagManager
-from tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager import RepositoryDependencyManager
+from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
log = logging.getLogger( __name__ )
@@ -612,15 +611,15 @@
tool_panel_section_id = installation_dict[ 'tool_panel_section_id' ]
tool_path = installation_dict[ 'tool_path' ]
tool_shed_url = installation_dict[ 'tool_shed_url' ]
- rdm = RepositoryDependencyManager( self.app )
+ rdim = repository_dependency_manager.RepositoryDependencyInstallManager( self.app )
created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts = \
- rdm.create_repository_dependency_objects( tool_path=tool_path,
- tool_shed_url=tool_shed_url,
- repo_info_dicts=repo_info_dicts,
- install_repository_dependencies=install_repository_dependencies,
- no_changes_checked=no_changes_checked,
- tool_panel_section_id=tool_panel_section_id,
- new_tool_panel_section_label=new_tool_panel_section_label )
+ rdim.create_repository_dependency_objects( tool_path=tool_path,
+ tool_shed_url=tool_shed_url,
+ repo_info_dicts=repo_info_dicts,
+ install_repository_dependencies=install_repository_dependencies,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section_id=tool_panel_section_id,
+ new_tool_panel_section_label=new_tool_panel_section_label )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, repo_info_dicts, filtered_repo_info_dicts
def initiate_repository_installation( self, installation_dict ):
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/tool_shed/galaxy_install/installed_repository_manager.py
--- a/lib/tool_shed/galaxy_install/installed_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/installed_repository_manager.py
@@ -9,14 +9,13 @@
from tool_shed.util import container_util
from tool_shed.util import data_manager_util
from tool_shed.util import datatype_util
-from tool_shed.util import repository_dependency_util
from tool_shed.util import shed_util_common as suc
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
from tool_shed.util import xml_util
from galaxy.model.orm import and_
-from tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager import RepositoryDependencyManager
+from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
log = logging.getLogger( __name__ )
@@ -229,7 +228,7 @@
Return dictionaries containing the sets of installed and missing tool dependencies and repository
dependencies associated with the repository defined by the received repo_info_dict.
"""
- rdm = RepositoryDependencyManager( self.app )
+ rdim = repository_dependency_manager.RepositoryDependencyInstallManager( self.app )
repository = None
installed_rd = {}
installed_td = {}
@@ -261,7 +260,7 @@
installed_rd, missing_rd = \
self.get_installed_and_missing_repository_dependencies_for_new_or_updated_install( repo_info_tuple )
# Discover all repository dependencies and retrieve information for installing them.
- all_repo_info_dict = rdm.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ all_repo_info_dict = rdim.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
has_repository_dependencies_only_if_compiling_contained_td = \
all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
@@ -299,7 +298,7 @@
missing_td[ td_key ] = td_dict
else:
# We have a single repository with (possibly) no defined repository dependencies.
- all_repo_info_dict = rdm.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
+ all_repo_info_dict = rdim.get_required_repo_info_dicts( tool_shed_url, util.listify( repo_info_dict ) )
has_repository_dependencies = all_repo_info_dict.get( 'has_repository_dependencies', False )
has_repository_dependencies_only_if_compiling_contained_td = \
all_repo_info_dict.get( 'has_repository_dependencies_only_if_compiling_contained_td', False )
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/tool_shed/galaxy_install/repair_repository_manager.py
--- a/lib/tool_shed/galaxy_install/repair_repository_manager.py
+++ b/lib/tool_shed/galaxy_install/repair_repository_manager.py
@@ -4,12 +4,11 @@
log = logging.getLogger( __name__ )
from tool_shed.galaxy_install import install_manager
-from tool_shed.galaxy_install.repository_dependencies.repository_dependency_manager import RepositoryDependencyManager
+from tool_shed.galaxy_install.repository_dependencies import repository_dependency_manager
from tool_shed.util import common_util
from tool_shed.util import container_util
from tool_shed.util import shed_util_common as suc
-from tool_shed.util import repository_dependency_util
from tool_shed.util import repository_maintenance_util
from tool_shed.util import tool_dependency_util
from tool_shed.util import tool_util
@@ -56,7 +55,7 @@
issues with an installed repository that has installation problems somewhere in its
dependency hierarchy.
"""
- rdm = RepositoryDependencyManager( self.app )
+ rdim = repository_dependency_manager.RepositoryDependencyInstallManager( self.app )
tsr_ids = []
repo_info_dicts = []
tool_panel_section_keys = []
@@ -64,8 +63,8 @@
irm = install_manager.InstallRepositoryManager( self.app )
# Get a dictionary of all repositories upon which the contents of the current repository_metadata
#record depend.
- repository_dependencies_dict = rdm.get_repository_dependencies_for_installed_tool_shed_repository( self.app,
- repository )
+ repository_dependencies_dict = rdim.get_repository_dependencies_for_installed_tool_shed_repository( self.app,
+ repository )
if repository_dependencies_dict:
# Generate the list of installed repositories from the information contained in the
# repository_dependencies dictionary.
@@ -75,14 +74,14 @@
# repaired in the required order.
for installed_repository in installed_repositories:
tsr_ids.append( self.app.security.encode_id( installed_repository.id ) )
- repo_info_dict, tool_panel_section_key = self.get_repo_info_dict_for_repair( rdm,
+ repo_info_dict, tool_panel_section_key = self.get_repo_info_dict_for_repair( rdim,
installed_repository )
tool_panel_section_keys.append( tool_panel_section_key )
repo_info_dicts.append( repo_info_dict )
else:
# The received repository has no repository dependencies.
tsr_ids.append( self.app.security.encode_id( repository.id ) )
- repo_info_dict, tool_panel_section_key = self.get_repo_info_dict_for_repair( rdm,
+ repo_info_dict, tool_panel_section_key = self.get_repo_info_dict_for_repair( rdim,
repository )
tool_panel_section_keys.append( tool_panel_section_key )
repo_info_dicts.append( repo_info_dict )
@@ -95,11 +94,11 @@
repair_dict[ 'ordered_tool_panel_section_keys' ] = ordered_tool_panel_section_keys
return repair_dict
- def get_repo_info_dict_for_repair( self, rdm, repository ):
+ def get_repo_info_dict_for_repair( self, rdim, repository ):
tool_panel_section_key = None
repository_clone_url = common_util.generate_clone_url_for_installed_repository( self.app, repository )
- repository_dependencies = rdm.get_repository_dependencies_for_installed_tool_shed_repository( self.app,
- repository )
+ repository_dependencies = rdim.get_repository_dependencies_for_installed_tool_shed_repository( self.app,
+ repository )
metadata = repository.metadata
if metadata:
tool_dependencies = metadata.get( 'tool_dependencies', None )
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
--- a/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
+++ b/lib/tool_shed/galaxy_install/repository_dependencies/repository_dependency_manager.py
@@ -19,7 +19,7 @@
log = logging.getLogger( __name__ )
-class RepositoryDependencyManager( object ):
+class RepositoryDependencyInstallManager( object ):
def __init__( self, app ):
self.app = app
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -607,6 +607,48 @@
tmp_url = common_util.remove_protocol_and_user_from_clone_url( repository_clone_url )
return '%s/%s/%s/%s' % ( tmp_url, guid_type, obj_id, version )
+def generate_message_for_invalid_repository_dependencies( metadata_dict, error_from_tuple=False ):
+ """Get or generate and return an error message associated with an invalid repository dependency."""
+ message = ''
+ if metadata_dict:
+ if error_from_tuple:
+ # Return the error messages associated with a set of one or more invalid repository dependency tuples.
+ invalid_repository_dependencies_dict = metadata_dict.get( 'invalid_repository_dependencies', None )
+ if invalid_repository_dependencies_dict is not None:
+ invalid_repository_dependencies = invalid_repository_dependencies_dict.get( 'invalid_repository_dependencies', [] )
+ for repository_dependency_tup in invalid_repository_dependencies:
+ toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, error = \
+ common_util.parse_repository_dependency_tuple( repository_dependency_tup, contains_error=True )
+ if error:
+ message += '%s ' % str( error )
+ else:
+ # The complete dependency hierarchy could not be determined for a repository being installed into
+ # Galaxy. This is likely due to invalid repository dependency definitions, so we'll get them from
+ # the metadata and parse them for display in an error message. This will hopefully communicate the
+ # problem to the user in such a way that a resolution can be determined.
+ message += 'The complete dependency hierarchy could not be determined for this repository, so no required '
+ message += 'repositories will not be installed. This is likely due to invalid repository dependency definitions. '
+ repository_dependencies_dict = metadata_dict.get( 'repository_dependencies', None )
+ if repository_dependencies_dict is not None:
+ rd_tups = repository_dependencies_dict.get( 'repository_dependencies', None )
+ if rd_tups is not None:
+ message += 'Here are the attributes of the dependencies defined for this repository to help determine the '
+ message += 'cause of this problem.<br/>'
+ message += '<table cellpadding="2" cellspacing="2">'
+ message += '<tr><th>Tool shed</th><th>Repository name</th><th>Owner</th><th>Changeset revision</th>'
+ message += '<th>Prior install required</th></tr>'
+ for rd_tup in rd_tups:
+ tool_shed, name, owner, changeset_revision, pir, oicct = \
+ common_util.parse_repository_dependency_tuple( rd_tup )
+ if util.asbool( pir ):
+ pir_str = 'True'
+ else:
+ pir_str = ''
+ message += '<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>' % \
+ ( tool_shed, name, owner, changeset_revision, pir_str )
+ message += '</table>'
+ return message
+
def generate_metadata_for_changeset_revision( app, repository, changeset_revision, repository_clone_url,
shed_config_dict=None, relative_install_dir=None, repository_files_dir=None,
resetting_all_metadata_on_repository=False, updating_installed_repository=False,
@@ -1156,6 +1198,36 @@
relative_path_to_file = relative_path_to_file[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
return relative_path_to_file
+def get_repository_dependency_tups_from_repository_metadata( app, repository_metadata, deprecated_only=False ):
+ """
+ Return a list of of tuples defining repository objects required by the received repository. The returned
+ list defines the entire repository dependency tree. This method is called only from the Tool Shed.
+ """
+ dependency_tups = []
+ if repository_metadata is not None:
+ metadata = repository_metadata.metadata
+ if metadata:
+ repository_dependencies_dict = metadata.get( 'repository_dependencies', None )
+ if repository_dependencies_dict is not None:
+ repository_dependency_tups = repository_dependencies_dict.get( 'repository_dependencies', None )
+ if repository_dependency_tups is not None:
+ # The value of repository_dependency_tups is a list of repository dependency tuples like this:
+ # ['http://localhost:9009', 'package_samtools_0_1_18', 'devteam', 'ef37fc635cb9', 'False', 'False']
+ for repository_dependency_tup in repository_dependency_tups:
+ toolshed, name, owner, changeset_revision, pir, oicct = \
+ common_util.parse_repository_dependency_tuple( repository_dependency_tup )
+ repository = suc.get_repository_by_name_and_owner( app, name, owner )
+ if repository:
+ if deprecated_only:
+ if repository.deprecated:
+ dependency_tups.append( repository_dependency_tup )
+ else:
+ dependency_tups.append( repository_dependency_tup )
+ else:
+ log.debug( "Cannot locate repository %s owned by %s for inclusion in repository dependency tups." % \
+ ( name, owner ) )
+ return dependency_tups
+
def get_repository_metadata_by_id( app, id ):
"""Get repository metadata from the database"""
sa_session = app.model.context.current
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ /dev/null
@@ -1,609 +0,0 @@
-import json
-import logging
-import os
-
-from galaxy.util import asbool
-from galaxy.util import listify
-
-import tool_shed.util.shed_util_common as suc
-from tool_shed.util import common_util
-from tool_shed.util import container_util
-from tool_shed.util import encoding_util
-from tool_shed.util import hg_util
-from tool_shed.util import metadata_util
-from tool_shed.util import tool_util
-
-log = logging.getLogger( __name__ )
-
-def can_add_to_key_rd_dicts( key_rd_dict, key_rd_dicts ):
- """Handle the case where an update to the changeset revision was done."""
- k = key_rd_dict.keys()[ 0 ]
- rd = key_rd_dict[ k ]
- partial_rd = rd[ 0:3 ]
- for kr_dict in key_rd_dicts:
- key = kr_dict.keys()[ 0 ]
- if key == k:
- repository_dependency = kr_dict[ key ]
- if repository_dependency[ 0:3 ] == partial_rd:
- return False
- return True
-
-def generate_message_for_invalid_repository_dependencies( metadata_dict, error_from_tuple=False ):
- """Get or generate and return an error message associated with an invalid repository dependency."""
- message = ''
- if metadata_dict:
- if error_from_tuple:
- # Return the error messages associated with a set of one or more invalid repository dependency tuples.
- invalid_repository_dependencies_dict = metadata_dict.get( 'invalid_repository_dependencies', None )
- if invalid_repository_dependencies_dict is not None:
- invalid_repository_dependencies = invalid_repository_dependencies_dict.get( 'invalid_repository_dependencies', [] )
- for repository_dependency_tup in invalid_repository_dependencies:
- toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td, error = \
- common_util.parse_repository_dependency_tuple( repository_dependency_tup, contains_error=True )
- if error:
- message += '%s ' % str( error )
- else:
- # The complete dependency hierarchy could not be determined for a repository being installed into
- # Galaxy. This is likely due to invalid repository dependency definitions, so we'll get them from
- # the metadata and parse them for display in an error message. This will hopefully communicate the
- # problem to the user in such a way that a resolution can be determined.
- message += 'The complete dependency hierarchy could not be determined for this repository, so no required '
- message += 'repositories will not be installed. This is likely due to invalid repository dependency definitions. '
- repository_dependencies_dict = metadata_dict.get( 'repository_dependencies', None )
- if repository_dependencies_dict is not None:
- rd_tups = repository_dependencies_dict.get( 'repository_dependencies', None )
- if rd_tups is not None:
- message += 'Here are the attributes of the dependencies defined for this repository to help determine the '
- message += 'cause of this problem.<br/>'
- message += '<table cellpadding="2" cellspacing="2">'
- message += '<tr><th>Tool shed</th><th>Repository name</th><th>Owner</th><th>Changeset revision</th>'
- message += '<th>Prior install required</th></tr>'
- for rd_tup in rd_tups:
- tool_shed, name, owner, changeset_revision, pir, oicct = \
- common_util.parse_repository_dependency_tuple( rd_tup )
- if asbool( pir ):
- pir_str = 'True'
- else:
- pir_str = ''
- message += '<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>' % \
- ( tool_shed, name, owner, changeset_revision, pir_str )
- message += '</table>'
- return message
-
-def get_key_for_repository_changeset_revision( app, toolshed_base_url, repository, repository_metadata, all_repository_dependencies ):
- # The received toolshed_base_url must include the port, but doesn't have to include the protocol.
- prior_installation_required, only_if_compiling_contained_td = \
- get_prior_installation_required_and_only_if_compiling_contained_td( app,
- toolshed_base_url,
- repository,
- repository_metadata,
- all_repository_dependencies )
- # Create a key with the value of prior_installation_required defaulted to False.
- key = container_util.generate_repository_dependencies_key_for_repository( toolshed_base_url=toolshed_base_url,
- repository_name=repository.name,
- repository_owner=repository.user.username,
- changeset_revision=repository_metadata.changeset_revision,
- prior_installation_required=prior_installation_required,
- only_if_compiling_contained_td=only_if_compiling_contained_td )
- return key
-
-def get_prior_installation_required_and_only_if_compiling_contained_td( app, toolshed_base_url, repository, repository_metadata,
- all_repository_dependencies ):
- """
- This method is called from the tool shed and never Galaxy. If all_repository_dependencies contains
- a repository dependency tuple that is associated with the received repository, return the value of
- the tuple's prior_installation_required component.
- """
- cleaned_toolshed_base_url = common_util.remove_protocol_from_tool_shed_url( toolshed_base_url )
- if all_repository_dependencies:
- for rd_key, rd_tups in all_repository_dependencies.items():
- if rd_key in [ 'root_key', 'description' ]:
- continue
- for rd_tup in rd_tups:
- rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required, rd_only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( rd_tup )
- cleaned_rd_toolshed = common_util.remove_protocol_from_tool_shed_url( rd_toolshed )
- if cleaned_rd_toolshed == cleaned_toolshed_base_url and \
- rd_name == repository.name and \
- rd_owner == repository.user.username and \
- rd_changeset_revision == repository_metadata.changeset_revision:
- return rd_prior_installation_required, rd_only_if_compiling_contained_td
- elif repository_metadata:
- # Get the list of changeset revisions from the tool shed to which the repository may be updated.
- metadata = repository_metadata.metadata
- current_changeset_revision = str( repository_metadata.changeset_revision )
- # Get the changeset revision to which the current value of required_repository_changeset_revision should be updated if it's not current.
- text = suc.get_updated_changeset_revisions( app,
- name=str( repository.name ),
- owner=str( repository.user.username ),
- changeset_revision=current_changeset_revision )
- if text:
- valid_changeset_revisions = listify( text )
- if current_changeset_revision not in valid_changeset_revisions:
- valid_changeset_revisions.append( current_changeset_revision )
- else:
- valid_changeset_revisions = [ current_changeset_revision ]
- repository_dependencies_dict = metadata[ 'repository_dependencies' ]
- rd_tups = repository_dependencies_dict.get( 'repository_dependencies', [] )
- for rd_tup in rd_tups:
- rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required, rd_only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( rd_tup )
- cleaned_rd_toolshed = common_util.remove_protocol_from_tool_shed_url( rd_toolshed )
- if cleaned_rd_toolshed == cleaned_toolshed_base_url and \
- rd_name == repository.name and \
- rd_owner == repository.user.username and \
- rd_changeset_revision in valid_changeset_revisions:
- return rd_prior_installation_required, rd_only_if_compiling_contained_td
- # Default both prior_installation_required and only_if_compiling_contained_td to False.
- return 'False', 'False'
-
-def get_repository_dependency_as_key( repository_dependency ):
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( repository_dependency )
- return container_util.generate_repository_dependencies_key_for_repository( tool_shed,
- name,
- owner,
- changeset_revision,
- prior_installation_required,
- only_if_compiling_contained_td )
-
-def get_repository_dependencies_for_changeset_revision( app, repository, repository_metadata, toolshed_base_url,
- key_rd_dicts_to_be_processed=None, all_repository_dependencies=None,
- handled_key_rd_dicts=None, circular_repository_dependencies=None ):
- """
- Return a dictionary of all repositories upon which the contents of the received
- repository_metadata record depend. The dictionary keys are name-spaced values
- consisting of:
- toolshed_base_url/repository_name/repository_owner/changeset_revision
- and the values are lists of repository_dependency tuples consisting of:
- ( toolshed_base_url, repository_name, repository_owner, changeset_revision ).
- This method ensures that all required repositories to the nth degree are returned.
- """
- if handled_key_rd_dicts is None:
- handled_key_rd_dicts = []
- if all_repository_dependencies is None:
- all_repository_dependencies = {}
- if key_rd_dicts_to_be_processed is None:
- key_rd_dicts_to_be_processed = []
- if circular_repository_dependencies is None:
- circular_repository_dependencies = []
- # Assume the current repository does not have repository dependencies defined for it.
- current_repository_key = None
- metadata = repository_metadata.metadata
- if metadata:
- # The value of the received toolshed_base_url must include the port, but doesn't have
- # to include the protocol.
- if 'repository_dependencies' in metadata:
- current_repository_key = get_key_for_repository_changeset_revision( app,
- toolshed_base_url,
- repository,
- repository_metadata,
- all_repository_dependencies )
- repository_dependencies_dict = metadata[ 'repository_dependencies' ]
- if not all_repository_dependencies:
- all_repository_dependencies = initialize_all_repository_dependencies( current_repository_key,
- repository_dependencies_dict,
- all_repository_dependencies )
- # Handle the repository dependencies defined in the current repository, if any, and populate
- # the various repository dependency objects for this round of processing.
- current_repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, all_repository_dependencies = \
- populate_repository_dependency_objects_for_processing( app,
- current_repository_key,
- repository_dependencies_dict,
- key_rd_dicts_to_be_processed,
- handled_key_rd_dicts,
- circular_repository_dependencies,
- all_repository_dependencies )
- if current_repository_key:
- if current_repository_key_rd_dicts:
- # There should be only a single current_repository_key_rd_dict in this list.
- current_repository_key_rd_dict = current_repository_key_rd_dicts[ 0 ]
- # Handle circular repository dependencies.
- if not in_circular_repository_dependencies( current_repository_key_rd_dict,
- circular_repository_dependencies ):
- if current_repository_key in all_repository_dependencies:
- handle_current_repository_dependency( app,
- current_repository_key,
- key_rd_dicts_to_be_processed,
- all_repository_dependencies,
- handled_key_rd_dicts,
- circular_repository_dependencies )
- elif key_rd_dicts_to_be_processed:
- handle_next_repository_dependency( app,
- key_rd_dicts_to_be_processed,
- all_repository_dependencies,
- handled_key_rd_dicts,
- circular_repository_dependencies )
- elif key_rd_dicts_to_be_processed:
- handle_next_repository_dependency( app,
- key_rd_dicts_to_be_processed,
- all_repository_dependencies,
- handled_key_rd_dicts,
- circular_repository_dependencies )
- elif key_rd_dicts_to_be_processed:
- handle_next_repository_dependency( app,
- key_rd_dicts_to_be_processed,
- all_repository_dependencies,
- handled_key_rd_dicts,
- circular_repository_dependencies )
- all_repository_dependencies = prune_invalid_repository_dependencies( all_repository_dependencies )
- return all_repository_dependencies
-
-def get_repository_dependency_tups_from_repository_metadata( app, repository_metadata, deprecated_only=False ):
- """
- Return a list of of tuples defining repository objects required by the received repository. The returned
- list defines the entire repository dependency tree. This method is called only from the Tool Shed.
- """
- dependency_tups = []
- if repository_metadata is not None:
- metadata = repository_metadata.metadata
- if metadata:
- repository_dependencies_dict = metadata.get( 'repository_dependencies', None )
- if repository_dependencies_dict is not None:
- repository_dependency_tups = repository_dependencies_dict.get( 'repository_dependencies', None )
- if repository_dependency_tups is not None:
- # The value of repository_dependency_tups is a list of repository dependency tuples like this:
- # ['http://localhost:9009', 'package_samtools_0_1_18', 'devteam', 'ef37fc635cb9', 'False', 'False']
- for repository_dependency_tup in repository_dependency_tups:
- toolshed, name, owner, changeset_revision, pir, oicct = \
- common_util.parse_repository_dependency_tuple( repository_dependency_tup )
- repository = suc.get_repository_by_name_and_owner( app, name, owner )
- if repository:
- if deprecated_only:
- if repository.deprecated:
- dependency_tups.append( repository_dependency_tup )
- else:
- dependency_tups.append( repository_dependency_tup )
- else:
- log.debug( "Cannot locate repository %s owned by %s for inclusion in repository dependency tups." % \
- ( name, owner ) )
- return dependency_tups
-
-def get_updated_changeset_revisions_for_repository_dependencies( app, key_rd_dicts ):
- updated_key_rd_dicts = []
- for key_rd_dict in key_rd_dicts:
- key = key_rd_dict.keys()[ 0 ]
- repository_dependency = key_rd_dict[ key ]
- rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required, rd_only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( repository_dependency )
- if suc.tool_shed_is_this_tool_shed( rd_toolshed ):
- repository = suc.get_repository_by_name_and_owner( app, rd_name, rd_owner )
- if repository:
- repository_metadata = \
- metadata_util.get_repository_metadata_by_repository_id_changeset_revision( app,
- app.security.encode_id( repository.id ),
- rd_changeset_revision )
- if repository_metadata:
- # The repository changeset_revision is installable, so no updates are available.
- new_key_rd_dict = {}
- new_key_rd_dict[ key ] = repository_dependency
- updated_key_rd_dicts.append( key_rd_dict )
- else:
- # The repository changeset_revision is no longer installable, so see if there's been an update.
- repo = hg_util.get_repo_for_repository( app, repository=repository, repo_path=None, create=False )
- changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, rd_changeset_revision )
- repository_metadata = \
- metadata_util.get_repository_metadata_by_repository_id_changeset_revision( app,
- app.security.encode_id( repository.id ),
- changeset_revision )
- if repository_metadata:
- new_key_rd_dict = {}
- new_key_rd_dict[ key ] = \
- [ rd_toolshed, rd_name, rd_owner, repository_metadata.changeset_revision, rd_prior_installation_required, rd_only_if_compiling_contained_td ]
- # We have the updated changset revision.
- updated_key_rd_dicts.append( new_key_rd_dict )
- else:
- repository_components_tuple = container_util.get_components_from_key( key )
- components_list = suc.extract_components_from_tuple( repository_components_tuple )
- toolshed, repository_name, repository_owner, repository_changeset_revision = components_list[ 0:4 ]
- # For backward compatibility to the 12/20/12 Galaxy release.
- if len( components_list ) == 4:
- prior_installation_required = 'False'
- rd_only_if_compiling_contained_td = 'False'
- elif len( components_list ) == 5:
- rd_only_if_compiling_contained_td = 'False'
- message = "The revision %s defined for repository %s owned by %s is invalid, so repository dependencies defined for repository %s will be ignored." % \
- ( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ), str( repository_name ) )
- log.debug( message )
- else:
- repository_components_tuple = container_util.get_components_from_key( key )
- components_list = suc.extract_components_from_tuple( repository_components_tuple )
- toolshed, repository_name, repository_owner, repository_changeset_revision = components_list[ 0:4 ]
- message = "The revision %s defined for repository %s owned by %s is invalid, so repository dependencies defined for repository %s will be ignored." % \
- ( str( rd_changeset_revision ), str( rd_name ), str( rd_owner ), str( repository_name ) )
- log.debug( message )
- return updated_key_rd_dicts
-
-def handle_circular_repository_dependency( repository_key, repository_dependency, circular_repository_dependencies,
- handled_key_rd_dicts, all_repository_dependencies ):
- all_repository_dependencies_root_key = all_repository_dependencies[ 'root_key' ]
- repository_dependency_as_key = get_repository_dependency_as_key( repository_dependency )
- repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
- update_circular_repository_dependencies( repository_key,
- repository_dependency,
- all_repository_dependencies[ repository_dependency_as_key ],
- circular_repository_dependencies )
- if all_repository_dependencies_root_key != repository_dependency_as_key:
- all_repository_dependencies[ repository_key ] = [ repository_dependency ]
- return circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies
-
-def handle_current_repository_dependency( app, current_repository_key, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts,
- circular_repository_dependencies ):
- current_repository_key_rd_dicts = []
- for rd in all_repository_dependencies[ current_repository_key ]:
- rd_copy = [ str( item ) for item in rd ]
- new_key_rd_dict = {}
- new_key_rd_dict[ current_repository_key ] = rd_copy
- current_repository_key_rd_dicts.append( new_key_rd_dict )
- if current_repository_key_rd_dicts:
- toolshed, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
- handle_key_rd_dicts_for_repository( app,
- current_repository_key,
- current_repository_key_rd_dicts,
- key_rd_dicts_to_be_processed,
- handled_key_rd_dicts,
- circular_repository_dependencies )
- return get_repository_dependencies_for_changeset_revision( app=app,
- repository=required_repository,
- repository_metadata=required_repository_metadata,
- toolshed_base_url=toolshed,
- key_rd_dicts_to_be_processed=key_rd_dicts_to_be_processed,
- all_repository_dependencies=all_repository_dependencies,
- handled_key_rd_dicts=handled_key_rd_dicts,
- circular_repository_dependencies=circular_repository_dependencies )
-
-def handle_key_rd_dicts_for_repository( app, current_repository_key, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, circular_repository_dependencies ):
- key_rd_dict = repository_key_rd_dicts.pop( 0 )
- repository_dependency = key_rd_dict[ current_repository_key ]
- toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( repository_dependency )
- if suc.tool_shed_is_this_tool_shed( toolshed ):
- required_repository = suc.get_repository_by_name_and_owner( app, name, owner )
- required_repository_metadata = \
- metadata_util.get_repository_metadata_by_repository_id_changeset_revision( app,
- app.security.encode_id( required_repository.id ),
- changeset_revision )
- if required_repository_metadata:
- # The required_repository_metadata changeset_revision is installable.
- required_metadata = required_repository_metadata.metadata
- if required_metadata:
- for current_repository_key_rd_dict in repository_key_rd_dicts:
- if not in_key_rd_dicts( current_repository_key_rd_dict, key_rd_dicts_to_be_processed ):
- key_rd_dicts_to_be_processed.append( current_repository_key_rd_dict )
- # Mark the current repository_dependency as handled_key_rd_dicts.
- if not in_key_rd_dicts( key_rd_dict, handled_key_rd_dicts ):
- handled_key_rd_dicts.append( key_rd_dict )
- # Remove the current repository from the list of repository_dependencies to be processed.
- if in_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ):
- key_rd_dicts_to_be_processed = remove_from_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed )
- else:
- # The repository is in a different tool shed, so build an url and send a request.
- error_message = "Repository dependencies are currently supported only within the same Tool Shed. Ignoring repository dependency definition "
- error_message += "for tool shed %s, name %s, owner %s, changeset revision %s" % ( toolshed, name, owner, changeset_revision )
- log.debug( error_message )
- return toolshed, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts
-
-def handle_next_repository_dependency( app, key_rd_dicts_to_be_processed, all_repository_dependencies, handled_key_rd_dicts,
- circular_repository_dependencies ):
- next_repository_key_rd_dict = key_rd_dicts_to_be_processed.pop( 0 )
- next_repository_key_rd_dicts = [ next_repository_key_rd_dict ]
- next_repository_key = next_repository_key_rd_dict.keys()[ 0 ]
- toolshed, required_repository, required_repository_metadata, repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts = \
- handle_key_rd_dicts_for_repository( app,
- next_repository_key,
- next_repository_key_rd_dicts,
- key_rd_dicts_to_be_processed,
- handled_key_rd_dicts,
- circular_repository_dependencies )
- return get_repository_dependencies_for_changeset_revision( app=app,
- repository=required_repository,
- repository_metadata=required_repository_metadata,
- toolshed_base_url=toolshed,
- key_rd_dicts_to_be_processed=key_rd_dicts_to_be_processed,
- all_repository_dependencies=all_repository_dependencies,
- handled_key_rd_dicts=handled_key_rd_dicts,
- circular_repository_dependencies=circular_repository_dependencies )
-
-def in_all_repository_dependencies( repository_key, repository_dependency, all_repository_dependencies ):
- """Return True if { repository_key : repository_dependency } is in all_repository_dependencies."""
- for key, val in all_repository_dependencies.items():
- if key != repository_key:
- continue
- if repository_dependency in val:
- return True
- return False
-
-def in_circular_repository_dependencies( repository_key_rd_dict, circular_repository_dependencies ):
- """
- Return True if any combination of a circular dependency tuple is the key : value pair defined
- in the received repository_key_rd_dict. This means that each circular dependency tuple is converted
- into the key : value pair for comparison.
- """
- for tup in circular_repository_dependencies:
- rd_0, rd_1 = tup
- rd_0_as_key = get_repository_dependency_as_key( rd_0 )
- rd_1_as_key = get_repository_dependency_as_key( rd_1 )
- if rd_0_as_key in repository_key_rd_dict and repository_key_rd_dict[ rd_0_as_key ] == rd_1:
- return True
- if rd_1_as_key in repository_key_rd_dict and repository_key_rd_dict[ rd_1_as_key ] == rd_0:
- return True
- return False
-
-def initialize_all_repository_dependencies( current_repository_key, repository_dependencies_dict, all_repository_dependencies ):
- """Initialize the all_repository_dependencies dictionary."""
- # It's safe to assume that current_repository_key in this case will have a value.
- all_repository_dependencies[ 'root_key' ] = current_repository_key
- all_repository_dependencies[ current_repository_key ] = []
- # Store the value of the 'description' key only once, the first time through this recursive method.
- description = repository_dependencies_dict.get( 'description', None )
- all_repository_dependencies[ 'description' ] = description
- return all_repository_dependencies
-
-def in_key_rd_dicts( key_rd_dict, key_rd_dicts ):
- """Return True if key_rd_dict is contained in the list of key_rd_dicts."""
- k = key_rd_dict.keys()[ 0 ]
- v = key_rd_dict[ k ]
- for key_rd_dict in key_rd_dicts:
- for key, val in key_rd_dict.items():
- if key == k and val == v:
- return True
- return False
-
-def is_circular_repository_dependency( repository_key, repository_dependency, all_repository_dependencies ):
- """
- Return True if the received repository_dependency is a key in all_repository_dependencies whose list of repository dependencies
- includes the received repository_key.
- """
- repository_dependency_as_key = get_repository_dependency_as_key( repository_dependency )
- repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
- for key, val in all_repository_dependencies.items():
- if key != repository_dependency_as_key:
- continue
- if repository_key_as_repository_dependency in val:
- return True
- return False
-
-def filter_only_if_compiling_contained_td( key_rd_dict ):
- """
- Return a copy of the received key_rd_dict with repository dependencies that are needed
- only_if_compiling_contained_td filtered out of the list of repository dependencies for
- each rd_key.
- """
- filtered_key_rd_dict = {}
- for rd_key, required_rd_tup in key_rd_dict.items():
- tool_shed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( required_rd_tup )
- if not asbool( only_if_compiling_contained_td ):
- filtered_key_rd_dict[ rd_key ] = required_rd_tup
- return filtered_key_rd_dict
-
-def populate_repository_dependency_objects_for_processing( app, current_repository_key, repository_dependencies_dict,
- key_rd_dicts_to_be_processed, handled_key_rd_dicts,
- circular_repository_dependencies, all_repository_dependencies ):
- """
- The process that discovers all repository dependencies for a specified repository's
- changeset revision uses this method to populate the following items for the current
- processing loop: filtered_current_repository_key_rd_dicts, key_rd_dicts_to_be_processed,
- handled_key_rd_dicts, all_repository_dependencies. Each processing loop may discover
- more repository dependencies, so this method is repeatedly called until all repository
- dependencies have been discovered.
- """
- current_repository_key_rd_dicts = []
- filtered_current_repository_key_rd_dicts = []
- for rd_tup in repository_dependencies_dict[ 'repository_dependencies' ]:
- new_key_rd_dict = {}
- new_key_rd_dict[ current_repository_key ] = rd_tup
- current_repository_key_rd_dicts.append( new_key_rd_dict )
- if current_repository_key_rd_dicts and current_repository_key:
- # Remove all repository dependencies that point to a revision within its own repository.
- current_repository_key_rd_dicts = remove_ropository_dependency_reference_to_self( current_repository_key_rd_dicts )
- current_repository_key_rd_dicts = \
- get_updated_changeset_revisions_for_repository_dependencies( app, current_repository_key_rd_dicts )
- for key_rd_dict in current_repository_key_rd_dicts:
- # Filter out repository dependencies that are required only if compiling the dependent repository's tool dependency.
- key_rd_dict = filter_only_if_compiling_contained_td( key_rd_dict )
- if key_rd_dict:
- is_circular = False
- if not in_key_rd_dicts( key_rd_dict, handled_key_rd_dicts ) and not in_key_rd_dicts( key_rd_dict,
- key_rd_dicts_to_be_processed ):
- filtered_current_repository_key_rd_dicts.append( key_rd_dict )
- repository_dependency = key_rd_dict[ current_repository_key ]
- if current_repository_key in all_repository_dependencies:
- # Add all repository dependencies for the current repository into its entry in all_repository_dependencies.
- all_repository_dependencies_val = all_repository_dependencies[ current_repository_key ]
- if repository_dependency not in all_repository_dependencies_val:
- all_repository_dependencies_val.append( repository_dependency )
- all_repository_dependencies[ current_repository_key ] = all_repository_dependencies_val
- elif not in_all_repository_dependencies( current_repository_key, repository_dependency, all_repository_dependencies ):
- # Handle circular repository dependencies.
- if is_circular_repository_dependency( current_repository_key,
- repository_dependency,
- all_repository_dependencies ):
- is_circular = True
- circular_repository_dependencies, handled_key_rd_dicts, all_repository_dependencies = \
- handle_circular_repository_dependency( current_repository_key,
- repository_dependency,
- circular_repository_dependencies,
- handled_key_rd_dicts,
- all_repository_dependencies )
- else:
- all_repository_dependencies[ current_repository_key ] = [ repository_dependency ]
- if not is_circular and can_add_to_key_rd_dicts( key_rd_dict, key_rd_dicts_to_be_processed ):
- new_key_rd_dict = {}
- new_key_rd_dict[ current_repository_key ] = repository_dependency
- key_rd_dicts_to_be_processed.append( new_key_rd_dict )
- return filtered_current_repository_key_rd_dicts, key_rd_dicts_to_be_processed, handled_key_rd_dicts, all_repository_dependencies
-
-def prune_invalid_repository_dependencies( repository_dependencies ):
- """
- Eliminate all invalid entries in the received repository_dependencies dictionary. An entry
- is invalid if the value_list of the key/value pair is empty. This occurs when an invalid
- combination of tool shed, name , owner, changeset_revision is used and a repository_metadata
- record is not found.
- """
- valid_repository_dependencies = {}
- description = repository_dependencies.get( 'description', None )
- root_key = repository_dependencies.get( 'root_key', None )
- if root_key is None:
- return valid_repository_dependencies
- for key, value in repository_dependencies.items():
- if key in [ 'description', 'root_key' ]:
- continue
- if value:
- valid_repository_dependencies[ key ] = value
- if valid_repository_dependencies:
- valid_repository_dependencies[ 'description' ] = description
- valid_repository_dependencies[ 'root_key' ] = root_key
- return valid_repository_dependencies
-
-def remove_from_key_rd_dicts( key_rd_dict, key_rd_dicts ):
- """Eliminate the key_rd_dict from the list of key_rd_dicts if it is contained in the list."""
- k = key_rd_dict.keys()[ 0 ]
- v = key_rd_dict[ k ]
- clean_key_rd_dicts = []
- for krd_dict in key_rd_dicts:
- key = krd_dict.keys()[ 0 ]
- val = krd_dict[ key ]
- if key == k and val == v:
- continue
- clean_key_rd_dicts.append( krd_dict )
- return clean_key_rd_dicts
-
-def remove_ropository_dependency_reference_to_self( key_rd_dicts ):
- """Remove all repository dependencies that point to a revision within its own repository."""
- clean_key_rd_dicts = []
- key = key_rd_dicts[ 0 ].keys()[ 0 ]
- repository_tup = key.split( container_util.STRSEP )
- rd_toolshed, rd_name, rd_owner, rd_changeset_revision, rd_prior_installation_required, rd_only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( repository_tup )
- cleaned_rd_toolshed = common_util.remove_protocol_from_tool_shed_url( rd_toolshed )
- for key_rd_dict in key_rd_dicts:
- k = key_rd_dict.keys()[ 0 ]
- repository_dependency = key_rd_dict[ k ]
- toolshed, name, owner, changeset_revision, prior_installation_required, only_if_compiling_contained_td = \
- common_util.parse_repository_dependency_tuple( repository_dependency )
- cleaned_toolshed = common_util.remove_protocol_from_tool_shed_url( toolshed )
- if cleaned_rd_toolshed == cleaned_toolshed and rd_name == name and rd_owner == owner:
- debug_msg = "Removing repository dependency for repository %s owned by %s " % ( name, owner )
- debug_msg += 'since it refers to a revision within itself.'
- log.debug( debug_msg )
- else:
- new_key_rd_dict = {}
- new_key_rd_dict[ key ] = repository_dependency
- clean_key_rd_dicts.append( new_key_rd_dict )
- return clean_key_rd_dicts
-
-def update_circular_repository_dependencies( repository_key, repository_dependency, repository_dependencies,
- circular_repository_dependencies ):
- repository_dependency_as_key = get_repository_dependency_as_key( repository_dependency )
- repository_key_as_repository_dependency = repository_key.split( container_util.STRSEP )
- if repository_key_as_repository_dependency in repository_dependencies:
- found = False
- for tup in circular_repository_dependencies:
- if repository_dependency in tup and repository_key_as_repository_dependency in tup:
- # The circular dependency has already been included.
- found = True
- if not found:
- new_circular_tup = [ repository_dependency, repository_key_as_repository_dependency ]
- circular_repository_dependencies.append( new_circular_tup )
- return circular_repository_dependencies
diff -r 893a727a7b7cc83355bc977d034622da50af16e5 -r 7628e1d9d8e2900cda5fb90ff239a32c0e091df9 lib/tool_shed/util/repository_maintenance_util.py
--- a/lib/tool_shed/util/repository_maintenance_util.py
+++ b/lib/tool_shed/util/repository_maintenance_util.py
@@ -2,10 +2,10 @@
import logging
import os
import re
-import tool_shed.util.shed_util_common as suc
+from tool_shed.dependencies.repository import relation_builder
from tool_shed.util import common_util
from tool_shed.util import hg_util
-from tool_shed.util import repository_dependency_util
+from tool_shed.util import shed_util_common as suc
from galaxy import util
from galaxy import web
from galaxy.web.form_builder import build_select_field
@@ -90,16 +90,9 @@
metadata = repository_metadata.metadata
if metadata:
tool_shed_url = str( web.url_for( '/', qualified=True ) ).rstrip( '/' )
+ rb = relation_builder.RelationBuilder( app, repository, repository_metadata, tool_shed_url )
# Get a dictionary of all repositories upon which the contents of the received repository depends.
- repository_dependencies = \
- repository_dependency_util.get_repository_dependencies_for_changeset_revision( app=app,
- repository=repository,
- repository_metadata=repository_metadata,
- toolshed_base_url=tool_shed_url,
- key_rd_dicts_to_be_processed=None,
- all_repository_dependencies=None,
- handled_key_rd_dicts=None,
- circular_repository_dependencies=None )
+ repository_dependencies = rb.get_repository_dependencies_for_changeset_revision()
tool_dependencies = metadata.get( 'tool_dependencies', {} )
if tool_dependencies:
new_tool_dependencies = {}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/518d296e4ed8/
Changeset: 518d296e4ed8
User: jgoecks
Date: 2014-07-15 17:07:41
Summary: Trackster: readd long lost code to detect unsaved changes and prompt to save on exit.
Affected #: 1 file
diff -r 7c5d92187dfd5b92fd05e0fcab163a8a0ef89ffa -r 518d296e4ed8646b9e738e5293dab9f77834bf17 static/scripts/viz/trackster_ui.js
--- a/static/scripts/viz/trackster_ui.js
+++ b/static/scripts/viz/trackster_ui.js
@@ -35,9 +35,62 @@
},
/**
+ * Save visualization, returning a Deferred object for the remote call to save.
+ */
+ save_viz: function() {
+ // show dialog
+ Galaxy.modal.show({title: "Saving...", body: "progress" });
+
+ // Save bookmarks.
+ var bookmarks = [];
+ $(".bookmark").each(function() {
+ bookmarks.push({
+ position: $(this).children(".position").text(),
+ annotation: $(this).children(".annotation").text()
+ });
+ });
+
+ // FIXME: give unique IDs to Drawables and save overview as ID.
+ var overview_track_name = (view.overview_drawable ? view.overview_drawable.config.get_value('name') : null),
+ viz_config = {
+ 'view': view.to_dict(),
+ 'viewport': { 'chrom': view.chrom, 'start': view.low , 'end': view.high, 'overview': overview_track_name },
+ 'bookmarks': bookmarks
+ };
+
+ // Make call to save visualization.
+ return $.ajax({
+ url: galaxy_config.root + "visualization/save",
+ type: "POST",
+ dataType: "json",
+ data: {
+ 'id' : view.vis_id,
+ 'title' : view.config.get_value('name'),
+ 'dbkey' : view.dbkey,
+ 'type' : 'trackster',
+ 'vis_json' : JSON.stringify(viz_config)
+ }
+ }).success(function(vis_info) {
+ Galaxy.modal.hide();
+ view.vis_id = vis_info.vis_id;
+ view.has_changes = false;
+
+ // Needed to set URL when first saving a visualization.
+ window.history.pushState({}, "", vis_info.url + window.location.hash);
+ }).error(function() {
+ // show dialog
+ Galaxy.modal.show({
+ title : "Could Not Save",
+ body : "Could not save visualization. Please try again later.",
+ buttons : { "Cancel": function() { Galaxy.modal.hide(); } }
+ });
+ });
+ },
+
+ /**
* Create button menu
*/
- createButtonMenu: function() {
+ createButtonMenu: function() {
var self = this,
menu = create_icon_buttons_menu([
{ icon_class: 'plus-button', title: 'Add tracks', on_click: function() {
@@ -62,58 +115,13 @@
}
},
{ icon_class: 'disk--arrow', title: 'Save', on_click: function() {
- // show dialog
- Galaxy.modal.show({title: "Saving...", body: "progress" });
-
- // Save bookmarks.
- var bookmarks = [];
- $(".bookmark").each(function() {
- bookmarks.push({
- position: $(this).children(".position").text(),
- annotation: $(this).children(".annotation").text()
- });
- });
-
- // FIXME: give unique IDs to Drawables and save overview as ID.
- var overview_track_name = (view.overview_drawable ? view.overview_drawable.config.get_value('name') : null),
- viz_config = {
- 'view': view.to_dict(),
- 'viewport': { 'chrom': view.chrom, 'start': view.low , 'end': view.high, 'overview': overview_track_name },
- 'bookmarks': bookmarks
- };
-
- $.ajax({
- url: galaxy_config.root + "visualization/save",
- type: "POST",
- dataType: "json",
- data: {
- 'id' : view.vis_id,
- 'title' : view.config.get_value('name'),
- 'dbkey' : view.dbkey,
- 'type' : 'trackster',
- 'vis_json' : JSON.stringify(viz_config)
- }
- }).success(function(vis_info) {
- Galaxy.modal.hide();
- view.vis_id = vis_info.vis_id;
- view.has_changes = false;
-
- // Needed to set URL when first saving a visualization.
- window.history.pushState({}, "", vis_info.url + window.location.hash);
- }).error(function() {
- // show dialog
- Galaxy.modal.show({
- title : "Could Not Save",
- body : "Could not save visualization. Please try again later.",
- buttons : { "Cancel": function() { Galaxy.modal.hide() } }
- });
- });
+ self.save_viz();
} },
{
icon_class: 'cross-circle',
title: 'Close',
on_click: function() {
- window.location = galaxy_config.root + 'visualization';
+ self.handle_unsaved_changes(view);
}
}
],
@@ -302,7 +310,7 @@
/**
* Set up keyboard navigation for a visualization.
*/
- init_keyboard_nav: function(view) {
+ init_keyboard_nav: function(view) {
// Keyboard navigation. Scroll ~7% of height when scrolling up/down.
$(document).keyup(function(e) {
// Do not navigate if arrow keys used in input element.
@@ -328,6 +336,35 @@
break;
}
});
+ },
+
+ /**
+ * Handle unsaved changes in visualization.
+ */
+ handle_unsaved_changes: function(view) {
+ if (view.has_changes) {
+ var self = this;
+ Galaxy.modal.show({
+ title: "Close visualization",
+ body: "There are unsaved changes to your visualization which will be lost if you do not save them.",
+ buttons: {
+ "Cancel": function() { Galaxy.modal.hide(); },
+ "Leave without Saving" : function() {
+ window.onbeforeunload = undefined;
+ window.location = galaxy_config.root + 'visualization';
+ },
+ "Save" : function() {
+ $.when(self.save_viz()).then(function() {
+ window.location = galaxy_config.root + 'visualization';
+ });
+ }
+ }
+ });
+
+ }
+ else {
+ window.location = galaxy_config.root + 'visualization';
+ }
}
});
https://bitbucket.org/galaxy/galaxy-central/commits/08a1e9d70ebc/
Changeset: 08a1e9d70ebc
User: jgoecks
Date: 2014-07-15 18:19:47
Summary: Trackster: add dialog to indicate changes will be lost if navigating away from the viz.
Affected #: 1 file
diff -r 518d296e4ed8646b9e738e5293dab9f77834bf17 -r 08a1e9d70ebcf08bd8ec850e678a6a5a79cf9f5f static/scripts/viz/trackster.js
--- a/static/scripts/viz/trackster.js
+++ b/static/scripts/viz/trackster.js
@@ -211,6 +211,10 @@
// initialize keyboard
ui.init_keyboard_nav(view);
+
+ $(window).on('beforeunload', function() {
+ return "There are unsaved changes to your visualization that will be lost if you leave this page.";
+ });
}
});
https://bitbucket.org/galaxy/galaxy-central/commits/a34d766b17c7/
Changeset: a34d766b17c7
User: jgoecks
Date: 2014-07-15 18:33:54
Summary: Trackster: bug fixes for managing state.
Affected #: 2 files
diff -r 08a1e9d70ebcf08bd8ec850e678a6a5a79cf9f5f -r a34d766b17c7f3a7e6683e6b2d7aa6cb2a3b7215 static/scripts/viz/trackster.js
--- a/static/scripts/viz/trackster.js
+++ b/static/scripts/viz/trackster.js
@@ -213,7 +213,9 @@
ui.init_keyboard_nav(view);
$(window).on('beforeunload', function() {
- return "There are unsaved changes to your visualization that will be lost if you leave this page.";
+ if (view.has_changes) {
+ return "There are unsaved changes to your visualization that will be lost if you leave this page.";
+ }
});
}
});
diff -r 08a1e9d70ebcf08bd8ec850e678a6a5a79cf9f5f -r a34d766b17c7f3a7e6683e6b2d7aa6cb2a3b7215 static/scripts/viz/trackster_ui.js
--- a/static/scripts/viz/trackster_ui.js
+++ b/static/scripts/viz/trackster_ui.js
@@ -267,9 +267,6 @@
}
}
- // Need to update intro div after drawables have been added.
- view.update_intro_div();
-
// Set overview.
var overview_drawable;
for (var i = 0; i < view.drawables.length; i++) {
https://bitbucket.org/galaxy/galaxy-central/commits/893a727a7b7c/
Changeset: 893a727a7b7c
User: jgoecks
Date: 2014-07-15 18:41:06
Summary: Trackster: mark visualization changed when tracks change.
Affected #: 1 file
diff -r a34d766b17c7f3a7e6683e6b2d7aa6cb2a3b7215 -r 893a727a7b7cc83355bc977d034622da50af16e5 static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -1380,6 +1380,14 @@
drawable.init();
this.changed();
this.update_intro_div();
+
+ // When drawable config changes, mark view as changed. This
+ // captures most (all?) state change that needs to be saved.
+ var self = this;
+ drawable.config.on('change', function() {
+ console.log(drawable.config.get_value('name') + " changed");
+ self.changed();
+ });
},
add_label_track: function (label_track) {
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Merged in dannon/galaxy-central-prmaker/stable (pull request #439)
by commits-noreply@bitbucket.org 15 Jul '14
by commits-noreply@bitbucket.org 15 Jul '14
15 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d3b1f484c4b6/
Changeset: d3b1f484c4b6
Branch: stable
User: natefoo
Date: 2014-07-15 17:33:46
Summary: Merged in dannon/galaxy-central-prmaker/stable (pull request #439)
#STABLE migrate104 fix w/ threadlocal. "103 is not 105"
Affected #: 1 file
diff -r deba1986ea22e42fb1f290f50a62b51d4cfad762 -r d3b1f484c4b6bbb3daa50fa167eef97a384890b3 lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
--- a/lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
+++ b/lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
@@ -23,6 +23,8 @@
handler.setFormatter( formatter )
log.addHandler( handler )
+metadata = MetaData()
+context = scoped_session( sessionmaker( autoflush=False, autocommit=True ) )
class DeferredJob( object ):
states = Bunch( NEW = 'new',
@@ -37,12 +39,8 @@
self.params = params
def upgrade(migrate_engine):
- metadata = MetaData()
metadata.bind = migrate_engine
- Session = sessionmaker( bind=migrate_engine)
- context = Session()
-
DeferredJob.table = Table( "deferred_job", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
@@ -70,12 +68,8 @@
context.flush()
def downgrade(migrate_engine):
- metadata = MetaData()
metadata.bind = migrate_engine
- Session = sessionmaker( bind=migrate_engine)
- context = Session()
-
jobs = context.query( DeferredJob ).filter_by( plugin='GenomeTransferPlugin' ).all()
for job in jobs:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/0af879ff3097/
Changeset: 0af879ff3097
Branch: stable
User: dannon
Date: 2014-07-11 17:54:27
Summary: Migration 104 was using a Session() object that conflicts with the threadlocal strategy, which has been causing the '103 is not 105' migration errors we've seen. scoped_session works, but the threadlocal should probably be completely deprecated moving forward (removed from universe_wsgi.ini.sample by nate just now). See http://docs.sqlalchemy.org/en/rel_0_9/core/connections.html#threadlocal-str…
Affected #: 1 file
diff -r b1441a96b8401fd14ab3364c5d6dfa0733df263e -r 0af879ff30978e2c3b2c6d5df2de93ae40a39a16 lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
--- a/lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
+++ b/lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
@@ -23,6 +23,8 @@
handler.setFormatter( formatter )
log.addHandler( handler )
+metadata = MetaData()
+context = scoped_session( sessionmaker( autoflush=False, autocommit=True ) )
class DeferredJob( object ):
states = Bunch( NEW = 'new',
@@ -37,12 +39,8 @@
self.params = params
def upgrade(migrate_engine):
- metadata = MetaData()
metadata.bind = migrate_engine
- Session = sessionmaker( bind=migrate_engine)
- context = Session()
-
DeferredJob.table = Table( "deferred_job", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
@@ -70,12 +68,8 @@
context.flush()
def downgrade(migrate_engine):
- metadata = MetaData()
metadata.bind = migrate_engine
- Session = sessionmaker( bind=migrate_engine)
- context = Session()
-
jobs = context.query( DeferredJob ).filter_by( plugin='GenomeTransferPlugin' ).all()
for job in jobs:
https://bitbucket.org/galaxy/galaxy-central/commits/d3b1f484c4b6/
Changeset: d3b1f484c4b6
Branch: stable
User: natefoo
Date: 2014-07-15 17:33:46
Summary: Merged in dannon/galaxy-central-prmaker/stable (pull request #439)
#STABLE migrate104 fix w/ threadlocal. "103 is not 105"
Affected #: 1 file
diff -r deba1986ea22e42fb1f290f50a62b51d4cfad762 -r d3b1f484c4b6bbb3daa50fa167eef97a384890b3 lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
--- a/lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
+++ b/lib/galaxy/model/migrate/versions/0104_update_genome_downloader_job_parameters.py
@@ -23,6 +23,8 @@
handler.setFormatter( formatter )
log.addHandler( handler )
+metadata = MetaData()
+context = scoped_session( sessionmaker( autoflush=False, autocommit=True ) )
class DeferredJob( object ):
states = Bunch( NEW = 'new',
@@ -37,12 +39,8 @@
self.params = params
def upgrade(migrate_engine):
- metadata = MetaData()
metadata.bind = migrate_engine
- Session = sessionmaker( bind=migrate_engine)
- context = Session()
-
DeferredJob.table = Table( "deferred_job", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
@@ -70,12 +68,8 @@
context.flush()
def downgrade(migrate_engine):
- metadata = MetaData()
metadata.bind = migrate_engine
- Session = sessionmaker( bind=migrate_engine)
- context = Session()
-
jobs = context.query( DeferredJob ).filter_by( plugin='GenomeTransferPlugin' ).all()
for job in jobs:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Visualization bug fixes.
by commits-noreply@bitbucket.org 14 Jul '14
by commits-noreply@bitbucket.org 14 Jul '14
14 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/7c5d92187dfd/
Changeset: 7c5d92187dfd
User: jgoecks
Date: 2014-07-14 20:07:04
Summary: Visualization bug fixes.
Affected #: 2 files
diff -r d09b3a2bd7b2f0fe07f9894923b9a73d02176a82 -r 7c5d92187dfd5b92fd05e0fcab163a8a0ef89ffa lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -194,7 +194,7 @@
max_vals = data_provider.get_default_max_vals()
# Get reference sequence and mean depth for region; these is used by providers for aligned reads.
- ref_seq = None
+ region = None
mean_depth = None
if isinstance( data_provider, (SamDataProvider, BamDataProvider ) ):
# Get reference sequence.
diff -r d09b3a2bd7b2f0fe07f9894923b9a73d02176a82 -r 7c5d92187dfd5b92fd05e0fcab163a8a0ef89ffa static/scripts/viz/trackster/painters.js
--- a/static/scripts/viz/trackster/painters.js
+++ b/static/scripts/viz/trackster/painters.js
@@ -1462,6 +1462,7 @@
if (num_samples === 1) {
row_height = feature_height =
(w_scale < ctx.canvas.manager.char_width_px ? this.prefs.summary_height : row_height);
+ paint_utils.row_height = row_height;
// No summary when there's a single sample.
draw_summary = false;
}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/46df437c4059/
Changeset: 46df437c4059
User: jgoecks
Date: 2014-07-14 16:13:44
Summary: Clean up for fetching genome reference data for visualization.
Affected #: 3 files
diff -r 018d08d81c1e58e9aa42e8c5b179c413f6fb509b -r 46df437c405989b2d93fcccc0937aa804f0d9555 lib/galaxy/visualization/genomes.py
--- a/lib/galaxy/visualization/genomes.py
+++ b/lib/galaxy/visualization/genomes.py
@@ -31,10 +31,11 @@
A genomic region on an individual chromosome.
"""
- def __init__( self, chrom = None, start = 0, end = 0 ):
+ def __init__( self, chrom = None, start = 0, end = 0, sequence=None ):
self.chrom = chrom
self.start = int( start )
self.end = int( end )
+ self.sequence = sequence
def __str__( self ):
return self.chrom + ":" + str( self.start ) + "-" + str( self.end )
@@ -356,6 +357,6 @@
twobit = TwoBitFile( open( twobit_file_name ) )
if chrom in twobit:
seq_data = twobit[chrom].get( int(low), int(high) )
- return { 'dataset_type': 'refseq', 'data': seq_data }
+ return GenomeRegion( chrom=chrom, start=low, end=high, sequence=seq_data )
except IOError:
return None
diff -r 018d08d81c1e58e9aa42e8c5b179c413f6fb509b -r 46df437c405989b2d93fcccc0937aa804f0d9555 lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -199,9 +199,9 @@
if isinstance( data_provider, (SamDataProvider, BamDataProvider ) ):
# Get reference sequence.
if dataset.dbkey:
- data_dict = self.app.genomes.reference( trans, dbkey=dataset.dbkey, chrom=chrom, low=low, high=high )
- if data_dict:
- ref_seq = data_dict[ 'data' ]
+ region = self.app.genomes.reference( trans, dbkey=dataset.dbkey, chrom=chrom, low=low, high=high )
+ if region:
+ ref_seq = region.sequence
# Get mean depth.
if not indexer:
diff -r 018d08d81c1e58e9aa42e8c5b179c413f6fb509b -r 46df437c405989b2d93fcccc0937aa804f0d9555 lib/galaxy/webapps/galaxy/api/genomes.py
--- a/lib/galaxy/webapps/galaxy/api/genomes.py
+++ b/lib/galaxy/webapps/galaxy/api/genomes.py
@@ -36,7 +36,8 @@
# Return info.
rval = None
if reference:
- rval = self.app.genomes.reference( trans, dbkey=id, chrom=chrom, low=low, high=high )
+ region = self.app.genomes.reference( trans, dbkey=id, chrom=chrom, low=low, high=high )
+ rval = { 'dataset_type': 'refseq', 'data': region.sequence }
else:
rval = self.app.genomes.chroms( trans, dbkey=id, num=num, chrom=chrom, low=low )
return rval
https://bitbucket.org/galaxy/galaxy-central/commits/d09b3a2bd7b2/
Changeset: d09b3a2bd7b2
User: jgoecks
Date: 2014-07-14 17:35:14
Summary: Aligned read visualization data providers: do reference-based compression for complete read rather than read in requested region so that read representation is uniform.
Affected #: 3 files
diff -r 46df437c405989b2d93fcccc0937aa804f0d9555 -r d09b3a2bd7b2f0fe07f9894923b9a73d02176a82 lib/galaxy/visualization/data_providers/cigar.py
--- a/lib/galaxy/visualization/data_providers/cigar.py
+++ b/lib/galaxy/visualization/data_providers/cigar.py
@@ -18,7 +18,7 @@
return read_seq, cigar
# Set up position for reference, read.
- ref_seq_pos = read_start - ref_seq_start
+ ref_seq_pos = read_start
read_pos = 0
# Create new read sequence, cigar.
@@ -27,42 +27,30 @@
cigar_ops = 'MIDNSHP=X'
for op_tuple in cigar:
op, op_len = op_tuple
-
+
# Op is index into string 'MIDNSHP=X'
if op == 0: # Match
- # If region falls outside ref_seq data, leave as M.
- if ref_seq_start - read_start > op_len:
- # Region falls completely outside of reference.
- new_cigar += '%iM' % ( op_len )
- else:
- # Some of region overlap reference.
- total_count = 0
- if read_start < ref_seq_start:
- new_cigar += '%iM' % ( ref_seq_start - read_start )
- read_pos = ref_seq_start - read_start
- ref_seq_pos = 0
- total_count = read_pos
+ # Transform Ms to =s and Xs using reference.
+ new_op = ''
+ total_count = 0
+ while total_count < op_len and ref_seq_pos < len( ref_seq ):
+ match, count = _match_mismatch_counter( read_seq, read_pos, ref_seq, ref_seq_pos )
+ # Use min because count cannot exceed remainder of operation.
+ count = min( count, op_len - total_count )
+ if match:
+ new_op = '='
+ else:
+ new_op = 'X'
+ # Include mismatched bases in new read sequence.
+ new_read_seq += read_seq[ read_pos:read_pos + count ]
+ new_cigar += '%i%s' % ( count, new_op )
+ total_count += count
+ read_pos += count
+ ref_seq_pos += count
- # Transform Ms to =s and Xs using reference.
- new_op = ''
- while total_count < op_len and ref_seq_pos < len( ref_seq ):
- match, count = _match_mismatch_counter( read_seq, read_pos, ref_seq, ref_seq_pos )
- # Use min because count cannot exceed remainder of operation.
- count = min( count, op_len - total_count )
- if match:
- new_op = '='
- else:
- new_op = 'X'
- # Include mismatched bases in new read sequence.
- new_read_seq += read_seq[ read_pos:read_pos + count ]
- new_cigar += '%i%s' % ( count, new_op )
- total_count += count
- read_pos += count
- ref_seq_pos += count
-
- # If end of read falls outside of ref_seq data, leave as M.
- if total_count < op_len:
- new_cigar += '%iM' % ( op_len - total_count )
+ # If end of read falls outside of ref_seq data, leave as M.
+ if total_count < op_len:
+ new_cigar += '%iM' % ( op_len - total_count )
elif op == 1: # Insertion
new_cigar += '%i%s' % ( op_len, cigar_ops[ op ] )
# Include insertion bases in new read sequence.
diff -r 46df437c405989b2d93fcccc0937aa804f0d9555 -r d09b3a2bd7b2f0fe07f9894923b9a73d02176a82 lib/galaxy/visualization/data_providers/genome.py
--- a/lib/galaxy/visualization/data_providers/genome.py
+++ b/lib/galaxy/visualization/data_providers/genome.py
@@ -1068,8 +1068,8 @@
'''
read_seq, read_cigar = get_ref_based_read_seq_and_cigar( read[ seq_field ].upper(),
read[ start_field ],
- ref_seq,
- start,
+ ref_seq.sequence,
+ ref_seq.start,
read[ cigar_field ] )
read[ seq_field ] = read_seq
read[ cigar_field ] = read_cigar
@@ -1088,7 +1088,7 @@
# if possible. Otherwise, convert cigar.
if ref_seq:
# Uppercase for easy comparison.
- ref_seq = ref_seq.upper()
+ ref_seq.sequence = ref_seq.sequence.upper()
process_read = compress_seq_and_cigar
else:
process_read = convert_cigar
diff -r 46df437c405989b2d93fcccc0937aa804f0d9555 -r d09b3a2bd7b2f0fe07f9894923b9a73d02176a82 lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -199,9 +199,11 @@
if isinstance( data_provider, (SamDataProvider, BamDataProvider ) ):
# Get reference sequence.
if dataset.dbkey:
- region = self.app.genomes.reference( trans, dbkey=dataset.dbkey, chrom=chrom, low=low, high=high )
- if region:
- ref_seq = region.sequence
+ # FIXME: increase region 500bp each way to provide sequence for overlapping reads. As reads
+ # get longer, this will need to be increased and/or a handle to the genomic data may be need
+ # to be given to the data provider.
+ region = self.app.genomes.reference( trans, dbkey=dataset.dbkey, chrom=chrom,
+ low=( int( low ) - 500 ), high=( int( high ) + 500 ) )
# Get mean depth.
if not indexer:
@@ -212,7 +214,7 @@
# Get and return data from data_provider.
result = data_provider.get_data( chrom, int( low ), int( high ), int( start_val ), int( max_vals ),
- ref_seq=ref_seq, mean_depth=mean_depth, **kwargs )
+ ref_seq=region, mean_depth=mean_depth, **kwargs )
result.update( { 'dataset_type': data_provider.dataset_type, 'extra_info': extra_info } )
return result
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
14 Jul '14
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/018d08d81c1e/
Changeset: 018d08d81c1e
User: davebgx
Date: 2014-07-14 15:19:10
Summary: Merge fix from stable.
Affected #: 1 file
diff -r 5b7e495d6f2fefe2b51da8bc438e4053b2a7a6af -r 018d08d81c1e58e9aa42e8c5b179c413f6fb509b lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/recipe/step_handler.py
@@ -703,7 +703,9 @@
filename = action_dict[ 'target_filename' ]
else:
filename = url.split( '/' )[ -1 ]
- self.url_download( work_dir, filename, url )
+ if current_dir is not None:
+ work_dir = current_dir
+ self.url_download( work_dir, filename, url, extract=action_dict[ 'extract' ] )
if initial_download:
dir = os.path.curdir
return tool_dependency, filtered_actions, dir
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0