galaxy-commits
Threads by month
- ----- 2025 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4212a41ba5ee/
changeset: 4212a41ba5ee
user: greg
date: 2012-11-16 15:19:11
summary: More shed_util refactoring.
affected #: 2 files
diff -r 4f68935907f4f86313f63cad33a310e65b2ee02c -r 4212a41ba5ee1f2a45f4d5f84c110808b167c87a lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -596,19 +596,6 @@
fh.write( fctx.data() )
fh.close()
return sample_files, deleted_sample_files
-def get_repository_files( trans, folder_path ):
- contents = []
- for item in os.listdir( folder_path ):
- # Skip .hg directories
- if str( item ).startswith( '.hg' ):
- continue
- if os.path.isdir( os.path.join( folder_path, item ) ):
- # Append a '/' character so that our jquery dynatree will function properly.
- item = '%s/' % item
- contents.append( item )
- if contents:
- contents.sort()
- return contents
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( 'repos' )
repo_path = items[ 1 ]
diff -r 4f68935907f4f86313f63cad33a310e65b2ee02c -r 4212a41ba5ee1f2a45f4d5f84c110808b167c87a lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -768,6 +768,19 @@
to_html = '%s%s' % ( to_html, to_html_str( large_str ) )
break
return to_html
+def get_repository_files( trans, folder_path ):
+ contents = []
+ for item in os.listdir( folder_path ):
+ # Skip .hg directories
+ if str( item ).startswith( '.hg' ):
+ continue
+ if os.path.isdir( os.path.join( folder_path, item ) ):
+ # Append a '/' character so that our jquery dynatree will function properly.
+ item = '%s/' % item
+ contents.append( item )
+ if contents:
+ contents.sort()
+ return contents
def get_repository_in_tool_shed( trans, id ):
"""Get a repository on the tool shed side from the database via id"""
return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4f68935907f4/
changeset: 4f68935907f4
user: greg
date: 2012-11-15 23:01:56
summary: Fix imports in shed_util_common.py
affected #: 1 file
diff -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 -r 4f68935907f4f86313f63cad33a310e65b2ee02c lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -13,6 +13,10 @@
pkg_resources.require( 'mercurial' )
from mercurial import hg, ui, commands
+pkg_resources.require( 'elementtree' )
+from elementtree import ElementTree, ElementInclude
+from elementtree.ElementTree import Element, SubElement
+
log = logging.getLogger( __name__ )
INITIAL_CHANGELOG_HASH = '000000000000'
@@ -592,8 +596,8 @@
try:
# Make sure we're looking at a valid repository_dependencies.xml file.
tree = util.parse_xml( repository_dependencies_config )
- root = element_tree.getroot()
- is_valid = element_tree_root.tag == 'repositories'
+ root = tree.getroot()
+ is_valid = root.tag == 'repositories'
except Exception, e:
log.debug( "Error parsing %s, exception: %s" % ( repository_dependencies_config, str( e ) ) )
is_valid = False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a43b16b64b1a/
changeset: a43b16b64b1a
user: greg
date: 2012-11-15 22:54:10
summary: More shed_util refactoring.
affected #: 6 files
diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -157,154 +157,6 @@
except:
pass
return converter_path, display_path
-def can_generate_tool_dependency_metadata( root, metadata_dict ):
- """
- Make sure the combination of name, version and type (the type will be the value of elem.tag) of each root element tag in the tool_dependencies.xml
- file is defined in the <requirement> tag for at least one tool in the repository.
- """
- can_generate_dependency_metadata = False
- for elem in root:
- tool_dependency_type = elem.tag
- tool_dependency_version = elem.get( 'version', None )
- if tool_dependency_type == 'package':
- can_generate_dependency_metadata = False
- tool_dependency_name = elem.get( 'name', None )
- if tool_dependency_name and tool_dependency_version:
- for tool_dict in metadata_dict.get( 'tools', [] ):
- requirements = tool_dict.get( 'requirements', [] )
- for requirement_dict in requirements:
- req_name = requirement_dict.get( 'name', None )
- req_version = requirement_dict.get( 'version', None )
- req_type = requirement_dict.get( 'type', None )
- if req_name==tool_dependency_name and req_version==tool_dependency_version and req_type==tool_dependency_type:
- can_generate_dependency_metadata = True
- break
- if requirements and not can_generate_dependency_metadata:
- # We've discovered at least 1 combination of name, version and type that is not defined in the <requirement>
- # tag for any tool in the repository.
- break
- if not can_generate_dependency_metadata:
- break
- elif tool_dependency_type == 'set_environment':
- # Here elem is something like: <set_environment version="1.0">
- for env_var_elem in elem:
- can_generate_dependency_metadata = False
- # <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
- env_var_name = env_var_elem.get( 'name', None )
- if env_var_name:
- for tool_dict in metadata_dict.get( 'tools', [] ):
- requirements = tool_dict.get( 'requirements', [] )
- for requirement_dict in requirements:
- # {"name": "R_SCRIPT_PATH", "type": "set_environment", "version": null}
- req_name = requirement_dict.get( 'name', None )
- req_type = requirement_dict.get( 'type', None )
- if req_name==env_var_name and req_type==tool_dependency_type:
- can_generate_dependency_metadata = True
- break
- if requirements and not can_generate_dependency_metadata:
- # We've discovered at least 1 combination of name, version and type that is not defined in the <requirement>
- # tag for any tool in the repository.
- break
- return can_generate_dependency_metadata
-def clean_repository_metadata( trans, id, changeset_revisions ):
- # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
- # We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
- # records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later..
- changeset_revisions_checked = []
- for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
- .order_by( trans.model.RepositoryMetadata.table.c.changeset_revision,
- trans.model.RepositoryMetadata.table.c.update_time.desc() ):
- changeset_revision = repository_metadata.changeset_revision
- can_delete = changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions
- if can_delete:
- trans.sa_session.delete( repository_metadata )
- trans.sa_session.flush()
-def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
- # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of
- # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only
- # when this method returns the string 'not equal and not subset'.
- ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] )
- ancestor_tools = ancestor_metadata_dict.get( 'tools', [] )
- ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ]
- ancestor_guids.sort()
- ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', [] )
- ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] )
- current_datatypes = current_metadata_dict.get( 'datatypes', [] )
- current_tools = current_metadata_dict.get( 'tools', [] )
- current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ]
- current_guids.sort()
- current_tool_dependencies = current_metadata_dict.get( 'tool_dependencies', [] )
- current_workflows = current_metadata_dict.get( 'workflows', [] )
- # Handle case where no metadata exists for either changeset.
- if not ancestor_guids and not current_guids and not ancestor_workflows and not current_workflows and not ancestor_datatypes and not current_datatypes:
- return 'no metadata'
- workflow_comparison = compare_workflows( ancestor_workflows, current_workflows )
- datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes )
- # Handle case where all metadata is the same.
- if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal':
- return 'equal'
- if workflow_comparison in [ 'equal', 'subset' ] and datatype_comparison in [ 'equal', 'subset' ]:
- is_subset = True
- for guid in ancestor_guids:
- if guid not in current_guids:
- is_subset = False
- break
- if is_subset:
- return 'subset'
- return 'not equal and not subset'
-def compare_datatypes( ancestor_datatypes, current_datatypes ):
- # Determine if ancestor_datatypes is the same as current_datatypes
- # or if ancestor_datatypes is a subset of current_datatypes. Each
- # datatype dict looks something like:
- # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"}
- if len( ancestor_datatypes ) <= len( current_datatypes ):
- for ancestor_datatype in ancestor_datatypes:
- # Currently the only way to differentiate datatypes is by name.
- ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ]
- ancestor_datatype_extension = ancestor_datatype[ 'extension' ]
- ancestor_datatype_mimetype = ancestor_datatype.get( 'mimetype', None )
- found_in_current = False
- for current_datatype in current_datatypes:
- if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \
- current_datatype[ 'extension' ] == ancestor_datatype_extension and \
- current_datatype.get( 'mimetype', None ) == ancestor_datatype_mimetype:
- found_in_current = True
- break
- if not found_in_current:
- return 'not equal and not subset'
- if len( ancestor_datatypes ) == len( current_datatypes ):
- return 'equal'
- else:
- return 'subset'
- return 'not equal and not subset'
-def compare_workflows( ancestor_workflows, current_workflows ):
- # Determine if ancestor_workflows is the same as current_workflows
- # or if ancestor_workflows is a subset of current_workflows.
- if len( ancestor_workflows ) <= len( current_workflows ):
- for ancestor_workflow_tup in ancestor_workflows:
- # ancestor_workflows is a list of tuples where each contained tuple is
- # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
- ancestor_workflow_dict = ancestor_workflow_tup[1]
- # Currently the only way to differentiate workflows is by name.
- ancestor_workflow_name = ancestor_workflow_dict[ 'name' ]
- num_ancestor_workflow_steps = len( ancestor_workflow_dict[ 'steps' ] )
- found_in_current = False
- for current_workflow_tup in current_workflows:
- current_workflow_dict = current_workflow_tup[1]
- # Assume that if the name and number of steps are euqal,
- # then the workflows are the same. Of course, this may
- # not be true...
- if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps:
- found_in_current = True
- break
- if not found_in_current:
- return 'not equal and not subset'
- if len( ancestor_workflows ) == len( current_workflows ):
- return 'equal'
- else:
- return 'subset'
- return 'not equal and not subset'
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
# Persist the current in-memory list of config_elems to a file named by the value of config_filename.
fd, filename = tempfile.mkstemp()
@@ -326,20 +178,6 @@
copy_sample_file( trans.app, relative_path, dest_path=dest_path )
sample_files.append( name )
return sample_files
-def clean_repository_clone_url( repository_clone_url ):
- if repository_clone_url.find( '@' ) > 0:
- # We have an url that includes an authenticated user, something like:
- # http://test@bx.psu.edu:9009/repos/some_username/column
- items = repository_clone_url.split( '@' )
- tmp_url = items[ 1 ]
- elif repository_clone_url.find( '//' ) > 0:
- # We have an url that includes only a protocol, something like:
- # http://bx.psu.edu:9009/repos/some_username/column
- items = repository_clone_url.split( '//' )
- tmp_url = items[ 1 ]
- else:
- tmp_url = repository_clone_url
- return tmp_url
def clean_tool_shed_url( tool_shed_url ):
if tool_shed_url.find( ':' ) > 0:
# Eliminate the port, if any, since it will result in an invalid directory name.
@@ -472,166 +310,6 @@
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
-def generate_datatypes_metadata( datatypes_config, metadata_dict ):
- """Update the received metadata_dict with information from the parsed datatypes_config."""
- tree = ElementTree.parse( datatypes_config )
- root = tree.getroot()
- ElementInclude.include( root )
- repository_datatype_code_files = []
- datatype_files = root.find( 'datatype_files' )
- if datatype_files:
- for elem in datatype_files.findall( 'datatype_file' ):
- name = elem.get( 'name', None )
- repository_datatype_code_files.append( name )
- metadata_dict[ 'datatype_files' ] = repository_datatype_code_files
- datatypes = []
- registration = root.find( 'registration' )
- if registration:
- for elem in registration.findall( 'datatype' ):
- datatypes_dict = {}
- display_in_upload = elem.get( 'display_in_upload', None )
- if display_in_upload:
- datatypes_dict[ 'display_in_upload' ] = display_in_upload
- dtype = elem.get( 'type', None )
- if dtype:
- datatypes_dict[ 'dtype' ] = dtype
- extension = elem.get( 'extension', None )
- if extension:
- datatypes_dict[ 'extension' ] = extension
- max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
- if max_optional_metadata_filesize:
- datatypes_dict[ 'max_optional_metadata_filesize' ] = max_optional_metadata_filesize
- mimetype = elem.get( 'mimetype', None )
- if mimetype:
- datatypes_dict[ 'mimetype' ] = mimetype
- subclass = elem.get( 'subclass', None )
- if subclass:
- datatypes_dict[ 'subclass' ] = subclass
- if datatypes_dict:
- datatypes.append( datatypes_dict )
- if datatypes:
- metadata_dict[ 'datatypes' ] = datatypes
- return metadata_dict
-def generate_environment_dependency_metadata( elem, tool_dependencies_dict ):
- """The value of env_var_name must match the value of the "set_environment" type in the tool config's <requirements> tag set."""
- requirements_dict = {}
- for env_elem in elem:
- env_name = env_elem.get( 'name', None )
- if env_name:
- requirements_dict [ 'name' ] = env_name
- requirements_dict [ 'type' ] = 'environment variable'
- if requirements_dict:
- if 'set_environment' in tool_dependencies_dict:
- tool_dependencies_dict[ 'set_environment' ].append( requirements_dict )
- else:
- tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ]
- return tool_dependencies_dict
-def generate_package_dependency_metadata( elem, tool_dependencies_dict ):
- """The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set."""
- requirements_dict = {}
- package_name = elem.get( 'name', None )
- package_version = elem.get( 'version', None )
- if package_name and package_version:
- dependency_key = '%s/%s' % ( package_name, package_version )
- requirements_dict [ 'name' ] = package_name
- requirements_dict [ 'version' ] = package_version
- requirements_dict [ 'type' ] = 'package'
- for sub_elem in elem:
- if sub_elem.tag == 'readme':
- requirements_dict[ 'readme' ] = sub_elem.text
- if requirements_dict:
- tool_dependencies_dict[ dependency_key ] = requirements_dict
- return tool_dependencies_dict
-def generate_tool_dependency_metadata( app, repository, tool_dependencies_config, metadata_dict, original_repository_metadata=None ):
- """
- If the combination of name, version and type of each element is defined in the <requirement> tag for at least one tool in the repository,
- then update the received metadata_dict with information from the parsed tool_dependencies_config.
- """
- if original_repository_metadata:
- # Keep a copy of the original tool dependencies dictionary in the metadata.
- original_tool_dependencies_dict = original_repository_metadata.get( 'tool_dependencies', None )
- else:
- original_tool_dependencies_dict = None
- try:
- tree = ElementTree.parse( tool_dependencies_config )
- except Exception, e:
- log.debug( "Exception attempting to parse tool_dependencies.xml: %s" %str( e ) )
- return metadata_dict
- root = tree.getroot()
- ElementInclude.include( root )
- tool_dependencies_dict = {}
- if can_generate_tool_dependency_metadata( root, metadata_dict ):
- for elem in root:
- if elem.tag == 'package':
- tool_dependencies_dict = generate_package_dependency_metadata( elem, tool_dependencies_dict )
- elif elem.tag == 'set_environment':
- tool_dependencies_dict = generate_environment_dependency_metadata( elem, tool_dependencies_dict )
- # Handle tool dependency installation via other means here (future).
- if tool_dependencies_dict:
- metadata_dict[ 'tool_dependencies' ] = tool_dependencies_dict
- else:
- log.debug( "Name, version and type from the <requirement> tag does not match the information in the tool_dependencies.xml file. Tool dependencies will be ignored." )
- if tool_dependencies_dict:
- if original_tool_dependencies_dict:
- # We're generating metadata on an update pulled to a tool shed repository installed into a Galaxy instance, so handle changes to
- # tool dependencies appropriately.
- handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_tool_dependencies_dict, tool_dependencies_dict )
- metadata_dict[ 'tool_dependencies' ] = tool_dependencies_dict
- return metadata_dict
-def generate_tool_guid( repository_clone_url, tool ):
- """
- Generate a guid for the installed tool. It is critical that this guid matches the guid for
- the tool in the Galaxy tool shed from which it is being installed. The form of the guid is
- <tool shed host>/repos/<repository owner>/<repository name>/<tool id>/<tool version>
- """
- tmp_url = clean_repository_clone_url( repository_clone_url )
- return '%s/%s/%s' % ( tmp_url, tool.id, tool.version )
-def generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ):
- """Update the received metadata_dict with changes that have been applied to the received tool."""
- # Generate the guid
- guid = generate_tool_guid( repository_clone_url, tool )
- # Handle tool.requirements.
- tool_requirements = []
- for tr in tool.requirements:
- requirement_dict = dict( name=tr.name,
- type=tr.type,
- version=tr.version )
- tool_requirements.append( requirement_dict )
- # Handle tool.tests.
- tool_tests = []
- if tool.tests:
- for ttb in tool.tests:
- required_files = []
- for required_file in ttb.required_files:
- value, extra = required_file
- required_files.append( ( value ) )
- inputs = []
- for input in ttb.inputs:
- name, value, extra = input
- inputs.append( ( name, value ) )
- outputs = []
- for output in ttb.outputs:
- name, file_name, extra = output
- outputs.append( ( name, strip_path( file_name ) if file_name else None ) )
- test_dict = dict( name=ttb.name,
- required_files=required_files,
- inputs=inputs,
- outputs=outputs )
- tool_tests.append( test_dict )
- tool_dict = dict( id=tool.id,
- guid=guid,
- name=tool.name,
- version=tool.version,
- description=tool.description,
- version_string_cmd = tool.version_string_cmd,
- tool_config=tool_config,
- requirements=tool_requirements,
- tests=tool_tests )
- if 'tools' in metadata_dict:
- metadata_dict[ 'tools' ].append( tool_dict )
- else:
- metadata_dict[ 'tools' ] = [ tool_dict ]
- return metadata_dict
def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
if tool_section is not None:
tool_elem = SubElement( tool_section, 'tool' )
@@ -652,7 +330,6 @@
version_elem = SubElement( tool_elem, 'version' )
version_elem.text = tool.version
return tool_elem
-
def generate_tool_panel_elem_list( repository_name, repository_clone_url, changeset_revision, tool_panel_dict, repository_tools_tups, owner='' ):
"""Generate a list of ElementTree Element objects for each section or tool."""
elem_list = []
@@ -958,34 +635,6 @@
if tool:
repository_tools_tups.append( ( relative_path, guid, tool ) )
return repository_tools_tups
-def get_sample_files_from_disk( repository_files_dir, tool_path = None, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
- if resetting_all_metadata_on_repository:
- # Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata.
- work_dir = repository_files_dir
- sample_file_metadata_paths = []
- sample_file_copy_paths = []
- for root, dirs, files in os.walk( repository_files_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name.endswith( '.sample' ):
- if resetting_all_metadata_on_repository:
- full_path_to_sample_file = os.path.join( root, name )
- stripped_path_to_sample_file = full_path_to_sample_file.replace( work_dir, '' )
- if stripped_path_to_sample_file.startswith( '/' ):
- stripped_path_to_sample_file = stripped_path_to_sample_file[ 1: ]
- relative_path_to_sample_file = os.path.join( relative_install_dir, stripped_path_to_sample_file )
- if os.path.exists( relative_path_to_sample_file ):
- sample_file_copy_paths.append( relative_path_to_sample_file )
- else:
- sample_file_copy_paths.append( full_path_to_sample_file )
- else:
- relative_path_to_sample_file = os.path.join( root, name )
- sample_file_copy_paths.append( relative_path_to_sample_file )
- if tool_path and relative_install_dir:
- if relative_path_to_sample_file.startswith( os.path.join( tool_path, relative_install_dir ) ):
- relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
- sample_file_metadata_paths.append( relative_path_to_sample_file )
- return sample_file_metadata_paths, sample_file_copy_paths
def get_shed_tool_conf_dict( app, shed_tool_conf ):
"""
Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
@@ -1120,13 +769,6 @@
return shed_url
# The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
return None
-def get_readme_file_names( repository_name ):
- readme_files = [ 'readme', 'read_me', 'install' ]
- valid_filenames = [ r for r in readme_files ]
- for r in readme_files:
- valid_filenames.append( '%s.txt' % r )
- valid_filenames.append( '%s.txt' % repository_name )
- return valid_filenames
def handle_missing_data_table_entry( app, relative_install_dir, tool_path, repository_tools_tups ):
"""
Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the
@@ -1522,34 +1164,6 @@
removed = True
error_message = ''
return removed, error_message
-def reset_all_metadata_on_installed_repository( trans, id ):
- """Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
- repository = get_installed_tool_shed_repository( trans, id )
- tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
- repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
- tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
- if relative_install_dir:
- original_metadata_dict = repository.metadata
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_clone_url=repository_clone_url,
- shed_config_dict = repository.get_shed_config_dict( trans.app ),
- relative_install_dir=relative_install_dir,
- repository_files_dir=None,
- resetting_all_metadata_on_repository=False,
- updating_installed_repository=False,
- persist=False )
- repository.metadata = metadata_dict
- if metadata_dict != original_metadata_dict:
- update_in_shed_tool_config( trans.app, repository )
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- log.debug( 'Metadata has been reset on repository %s.' % repository.name )
- else:
- log.debug( 'Metadata did not need to be reset on repository %s.' % repository.name )
- else:
- log.debug( 'Error locating installation directory for repository %s.' % repository.name )
- return invalid_file_tups, metadata_dict
def to_html_str( text ):
"""Translates the characters in text to an html string"""
translated = []
diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 lib/galaxy/util/shed_util_common.py
--- a/lib/galaxy/util/shed_util_common.py
+++ b/lib/galaxy/util/shed_util_common.py
@@ -1,4 +1,4 @@
-import os, shutil, tempfile, logging
+import os, shutil, tempfile, logging, string
from galaxy import util
from galaxy.tools import parameters
from galaxy.util import inflector
@@ -23,7 +23,7 @@
'&' : '&',
'\'' : ''' }
MAX_CONTENT_SIZE = 32768
-NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'tool_dependencies.xml' ]
+NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'repository_dependencies.xml', 'tool_dependencies.xml' ]
GALAXY_ADMIN_TOOL_SHED_CONTROLLER = 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER'
TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER'
VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" )
@@ -49,6 +49,55 @@
option_value = trans.security.encode_id( repository.id )
repositories_select_field.add_option( option_label, option_value )
return repositories_select_field
+def can_generate_tool_dependency_metadata( root, metadata_dict ):
+ """
+ Make sure the combination of name, version and type (the type will be the value of elem.tag) of each root element tag in the tool_dependencies.xml
+ file is defined in the <requirement> tag for at least one tool in the repository.
+ """
+ can_generate_dependency_metadata = False
+ for elem in root:
+ tool_dependency_type = elem.tag
+ tool_dependency_version = elem.get( 'version', None )
+ if tool_dependency_type == 'package':
+ can_generate_dependency_metadata = False
+ tool_dependency_name = elem.get( 'name', None )
+ if tool_dependency_name and tool_dependency_version:
+ for tool_dict in metadata_dict.get( 'tools', [] ):
+ requirements = tool_dict.get( 'requirements', [] )
+ for requirement_dict in requirements:
+ req_name = requirement_dict.get( 'name', None )
+ req_version = requirement_dict.get( 'version', None )
+ req_type = requirement_dict.get( 'type', None )
+ if req_name==tool_dependency_name and req_version==tool_dependency_version and req_type==tool_dependency_type:
+ can_generate_dependency_metadata = True
+ break
+ if requirements and not can_generate_dependency_metadata:
+ # We've discovered at least 1 combination of name, version and type that is not defined in the <requirement>
+ # tag for any tool in the repository.
+ break
+ if not can_generate_dependency_metadata:
+ break
+ elif tool_dependency_type == 'set_environment':
+ # Here elem is something like: <set_environment version="1.0">
+ for env_var_elem in elem:
+ can_generate_dependency_metadata = False
+ # <environment_variable name="R_SCRIPT_PATH" action="set_to">$REPOSITORY_INSTALL_DIR</environment_variable>
+ env_var_name = env_var_elem.get( 'name', None )
+ if env_var_name:
+ for tool_dict in metadata_dict.get( 'tools', [] ):
+ requirements = tool_dict.get( 'requirements', [] )
+ for requirement_dict in requirements:
+ # {"name": "R_SCRIPT_PATH", "type": "set_environment", "version": null}
+ req_name = requirement_dict.get( 'name', None )
+ req_type = requirement_dict.get( 'type', None )
+ if req_name==env_var_name and req_type==tool_dependency_type:
+ can_generate_dependency_metadata = True
+ break
+ if requirements and not can_generate_dependency_metadata:
+ # We've discovered at least 1 combination of name, version and type that is not defined in the <requirement>
+ # tag for any tool in the repository.
+ break
+ return can_generate_dependency_metadata
def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
"""
Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
@@ -93,6 +142,34 @@
correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
return invalid_files_and_errors_tups
+def clean_repository_clone_url( repository_clone_url ):
+ if repository_clone_url.find( '@' ) > 0:
+ # We have an url that includes an authenticated user, something like:
+ # http://test@bx.psu.edu:9009/repos/some_username/column
+ items = repository_clone_url.split( '@' )
+ tmp_url = items[ 1 ]
+ elif repository_clone_url.find( '//' ) > 0:
+ # We have an url that includes only a protocol, something like:
+ # http://bx.psu.edu:9009/repos/some_username/column
+ items = repository_clone_url.split( '//' )
+ tmp_url = items[ 1 ]
+ else:
+ tmp_url = repository_clone_url
+ return tmp_url
+def clean_repository_metadata( trans, id, changeset_revisions ):
+ # Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
+ # We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
+ # records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later..
+ changeset_revisions_checked = []
+ for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
+ .order_by( trans.model.RepositoryMetadata.table.c.changeset_revision,
+ trans.model.RepositoryMetadata.table.c.update_time.desc() ):
+ changeset_revision = repository_metadata.changeset_revision
+ can_delete = changeset_revision in changeset_revisions_checked or changeset_revision not in changeset_revisions
+ if can_delete:
+ trans.sa_session.delete( repository_metadata )
+ trans.sa_session.flush()
def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
"""Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository."""
try:
@@ -107,6 +184,91 @@
error_message = 'Error cloning repository: %s' % str( e )
log.debug( error_message )
return False, error_message
+def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
+ # The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of
+ # current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only
+ # when this method returns the string 'not equal and not subset'.
+ ancestor_datatypes = ancestor_metadata_dict.get( 'datatypes', [] )
+ ancestor_tools = ancestor_metadata_dict.get( 'tools', [] )
+ ancestor_guids = [ tool_dict[ 'guid' ] for tool_dict in ancestor_tools ]
+ ancestor_guids.sort()
+ ancestor_tool_dependencies = ancestor_metadata_dict.get( 'tool_dependencies', [] )
+ ancestor_workflows = ancestor_metadata_dict.get( 'workflows', [] )
+ current_datatypes = current_metadata_dict.get( 'datatypes', [] )
+ current_tools = current_metadata_dict.get( 'tools', [] )
+ current_guids = [ tool_dict[ 'guid' ] for tool_dict in current_tools ]
+ current_guids.sort()
+ current_tool_dependencies = current_metadata_dict.get( 'tool_dependencies', [] )
+ current_workflows = current_metadata_dict.get( 'workflows', [] )
+ # Handle case where no metadata exists for either changeset.
+ if not ancestor_guids and not current_guids and not ancestor_workflows and not current_workflows and not ancestor_datatypes and not current_datatypes:
+ return 'no metadata'
+ workflow_comparison = compare_workflows( ancestor_workflows, current_workflows )
+ datatype_comparison = compare_datatypes( ancestor_datatypes, current_datatypes )
+ # Handle case where all metadata is the same.
+ if ancestor_guids == current_guids and workflow_comparison == 'equal' and datatype_comparison == 'equal':
+ return 'equal'
+ if workflow_comparison in [ 'equal', 'subset' ] and datatype_comparison in [ 'equal', 'subset' ]:
+ is_subset = True
+ for guid in ancestor_guids:
+ if guid not in current_guids:
+ is_subset = False
+ break
+ if is_subset:
+ return 'subset'
+ return 'not equal and not subset'
+def compare_datatypes( ancestor_datatypes, current_datatypes ):
+ # Determine if ancestor_datatypes is the same as current_datatypes
+ # or if ancestor_datatypes is a subset of current_datatypes. Each
+ # datatype dict looks something like:
+ # {"dtype": "galaxy.datatypes.images:Image", "extension": "pdf", "mimetype": "application/pdf"}
+ if len( ancestor_datatypes ) <= len( current_datatypes ):
+ for ancestor_datatype in ancestor_datatypes:
+ # Currently the only way to differentiate datatypes is by name.
+ ancestor_datatype_dtype = ancestor_datatype[ 'dtype' ]
+ ancestor_datatype_extension = ancestor_datatype[ 'extension' ]
+ ancestor_datatype_mimetype = ancestor_datatype.get( 'mimetype', None )
+ found_in_current = False
+ for current_datatype in current_datatypes:
+ if current_datatype[ 'dtype' ] == ancestor_datatype_dtype and \
+ current_datatype[ 'extension' ] == ancestor_datatype_extension and \
+ current_datatype.get( 'mimetype', None ) == ancestor_datatype_mimetype:
+ found_in_current = True
+ break
+ if not found_in_current:
+ return 'not equal and not subset'
+ if len( ancestor_datatypes ) == len( current_datatypes ):
+ return 'equal'
+ else:
+ return 'subset'
+ return 'not equal and not subset'
+def compare_workflows( ancestor_workflows, current_workflows ):
+ # Determine if ancestor_workflows is the same as current_workflows
+ # or if ancestor_workflows is a subset of current_workflows.
+ if len( ancestor_workflows ) <= len( current_workflows ):
+ for ancestor_workflow_tup in ancestor_workflows:
+ # ancestor_workflows is a list of tuples where each contained tuple is
+ # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
+ ancestor_workflow_dict = ancestor_workflow_tup[1]
+ # Currently the only way to differentiate workflows is by name.
+ ancestor_workflow_name = ancestor_workflow_dict[ 'name' ]
+ num_ancestor_workflow_steps = len( ancestor_workflow_dict[ 'steps' ] )
+ found_in_current = False
+ for current_workflow_tup in current_workflows:
+ current_workflow_dict = current_workflow_tup[1]
+ # Assume that if the name and number of steps are euqal,
+ # then the workflows are the same. Of course, this may
+ # not be true...
+ if current_workflow_dict[ 'name' ] == ancestor_workflow_name and len( current_workflow_dict[ 'steps' ] ) == num_ancestor_workflow_steps:
+ found_in_current = True
+ break
+ if not found_in_current:
+ return 'not equal and not subset'
+ if len( ancestor_workflows ) == len( current_workflows ):
+ return 'equal'
+ else:
+ return 'subset'
+ return 'not equal and not subset'
def concat_messages( msg1, msg2 ):
if msg1:
if msg2:
@@ -165,6 +327,60 @@
return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
else:
return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
+def generate_datatypes_metadata( datatypes_config, metadata_dict ):
+ """Update the received metadata_dict with information from the parsed datatypes_config."""
+ tree = ElementTree.parse( datatypes_config )
+ root = tree.getroot()
+ ElementInclude.include( root )
+ repository_datatype_code_files = []
+ datatype_files = root.find( 'datatype_files' )
+ if datatype_files:
+ for elem in datatype_files.findall( 'datatype_file' ):
+ name = elem.get( 'name', None )
+ repository_datatype_code_files.append( name )
+ metadata_dict[ 'datatype_files' ] = repository_datatype_code_files
+ datatypes = []
+ registration = root.find( 'registration' )
+ if registration:
+ for elem in registration.findall( 'datatype' ):
+ datatypes_dict = {}
+ display_in_upload = elem.get( 'display_in_upload', None )
+ if display_in_upload:
+ datatypes_dict[ 'display_in_upload' ] = display_in_upload
+ dtype = elem.get( 'type', None )
+ if dtype:
+ datatypes_dict[ 'dtype' ] = dtype
+ extension = elem.get( 'extension', None )
+ if extension:
+ datatypes_dict[ 'extension' ] = extension
+ max_optional_metadata_filesize = elem.get( 'max_optional_metadata_filesize', None )
+ if max_optional_metadata_filesize:
+ datatypes_dict[ 'max_optional_metadata_filesize' ] = max_optional_metadata_filesize
+ mimetype = elem.get( 'mimetype', None )
+ if mimetype:
+ datatypes_dict[ 'mimetype' ] = mimetype
+ subclass = elem.get( 'subclass', None )
+ if subclass:
+ datatypes_dict[ 'subclass' ] = subclass
+ if datatypes_dict:
+ datatypes.append( datatypes_dict )
+ if datatypes:
+ metadata_dict[ 'datatypes' ] = datatypes
+ return metadata_dict
+def generate_environment_dependency_metadata( elem, tool_dependencies_dict ):
+ """The value of env_var_name must match the value of the "set_environment" type in the tool config's <requirements> tag set."""
+ requirements_dict = {}
+ for env_elem in elem:
+ env_name = env_elem.get( 'name', None )
+ if env_name:
+ requirements_dict [ 'name' ] = env_name
+ requirements_dict [ 'type' ] = 'environment variable'
+ if requirements_dict:
+ if 'set_environment' in tool_dependencies_dict:
+ tool_dependencies_dict[ 'set_environment' ].append( requirements_dict )
+ else:
+ tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ]
+ return tool_dependencies_dict
def generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ):
if as_html:
new_line = '<br/>'
@@ -201,7 +417,7 @@
correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
return message
-def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, shed_config_dict={}, relative_install_dir=None, repository_files_dir=None,
+def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, shed_config_dict=None, relative_install_dir=None, repository_files_dir=None,
resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=False ):
"""
Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
@@ -212,6 +428,8 @@
The value of persist will be True when the installed repository contains a valid tool_data_table_conf.xml.sample file, in which case the entries
should ultimately be persisted to the file referred to by app.config.shed_tool_data_table_config.
"""
+ if shed_config_dict is None:
+ shed_config_dict = {}
if updating_installed_repository:
# Keep the original tool shed repository metadata if setting metadata on a repository installed into a local Galaxy instance for which
# we have pulled updates.
@@ -270,18 +488,23 @@
if '.hg' in dirs:
dirs.remove( '.hg' )
for name in files:
+ # See if we have a repository dependencies defined.
+ if name == 'repository_dependencies.xml':
+ relative_path_to_repository_dependencies = get_relative_path_to_repository_file( root,
+ name,
+ relative_install_dir,
+ work_dir,
+ shed_config_dict,
+ resetting_all_metadata_on_repository )
+ metadata_dict = generate_repository_dependency_metadata( relative_path_to_repository_dependencies, metadata_dict )
# See if we have a READ_ME file.
- if name.lower() in readme_file_names:
- if resetting_all_metadata_on_repository:
- full_path_to_readme = os.path.join( root, name )
- stripped_path_to_readme = full_path_to_readme.replace( work_dir, '' )
- if stripped_path_to_readme.startswith( '/' ):
- stripped_path_to_readme = stripped_path_to_readme[ 1: ]
- relative_path_to_readme = os.path.join( relative_install_dir, stripped_path_to_readme )
- else:
- relative_path_to_readme = os.path.join( root, name )
- if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_readme.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
- relative_path_to_readme = relative_path_to_readme[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
+ elif name.lower() in readme_file_names:
+ relative_path_to_readme = get_relative_path_to_repository_file( root,
+ name,
+ relative_install_dir,
+ work_dir,
+ shed_config_dict,
+ resetting_all_metadata_on_repository )
metadata_dict[ 'readme' ] = relative_path_to_readme
# See if we have a tool config.
elif name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
@@ -311,16 +534,14 @@
invalid_tool_configs.append( name )
break
if can_set_metadata:
- if resetting_all_metadata_on_repository:
- full_path_to_tool_config = os.path.join( root, name )
- stripped_path_to_tool_config = full_path_to_tool_config.replace( work_dir, '' )
- if stripped_path_to_tool_config.startswith( '/' ):
- stripped_path_to_tool_config = stripped_path_to_tool_config[ 1: ]
- relative_path_to_tool_config = os.path.join( relative_install_dir, stripped_path_to_tool_config )
- else:
- relative_path_to_tool_config = os.path.join( root, name )
- if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_tool_config.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
- relative_path_to_tool_config = relative_path_to_tool_config[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
+ relative_path_to_tool_config = get_relative_path_to_repository_file( root,
+ name,
+ relative_install_dir,
+ work_dir,
+ shed_config_dict,
+ resetting_all_metadata_on_repository )
+
+
metadata_dict = generate_tool_metadata( relative_path_to_tool_config, tool, repository_clone_url, metadata_dict )
else:
for tup in invalid_files_and_errors_tups:
@@ -350,6 +571,131 @@
app.config.tool_data_path = original_tool_data_path
app.config.tool_data_table_config_path = original_tool_data_table_config_path
return metadata_dict, invalid_file_tups
+def generate_package_dependency_metadata( elem, tool_dependencies_dict ):
+ """The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set."""
+ requirements_dict = {}
+ package_name = elem.get( 'name', None )
+ package_version = elem.get( 'version', None )
+ if package_name and package_version:
+ dependency_key = '%s/%s' % ( package_name, package_version )
+ requirements_dict [ 'name' ] = package_name
+ requirements_dict [ 'version' ] = package_version
+ requirements_dict [ 'type' ] = 'package'
+ for sub_elem in elem:
+ if sub_elem.tag == 'readme':
+ requirements_dict[ 'readme' ] = sub_elem.text
+ if requirements_dict:
+ tool_dependencies_dict[ dependency_key ] = requirements_dict
+ return tool_dependencies_dict
+def generate_repository_dependency_metadata( repository_dependencies_config, metadata_dict ):
+ repository_dependencies_tups = []
+ try:
+ # Make sure we're looking at a valid repository_dependencies.xml file.
+ tree = util.parse_xml( repository_dependencies_config )
+ root = element_tree.getroot()
+ is_valid = element_tree_root.tag == 'repositories'
+ except Exception, e:
+ log.debug( "Error parsing %s, exception: %s" % ( repository_dependencies_config, str( e ) ) )
+ is_valid = False
+ if is_valid:
+ for repository_elem in root.findall( 'repository' ):
+ repository_dependencies_tups.append( ( repository_elem.attrib[ 'toolshed' ],
+ repository_elem.attrib[ 'name' ],
+ repository_elem.attrib[ 'owner'],
+ repository_elem.attrib[ 'changeset_revision' ] ) )
+ if repository_dependencies_tups:
+ metadata_dict[ 'repository_dependencies' ] = repository_dependencies_tups
+ return metadata_dict
+def generate_tool_dependency_metadata( app, repository, tool_dependencies_config, metadata_dict, original_repository_metadata=None ):
+ """
+ If the combination of name, version and type of each element is defined in the <requirement> tag for at least one tool in the repository,
+ then update the received metadata_dict with information from the parsed tool_dependencies_config.
+ """
+ if original_repository_metadata:
+ # Keep a copy of the original tool dependencies dictionary in the metadata.
+ original_tool_dependencies_dict = original_repository_metadata.get( 'tool_dependencies', None )
+ else:
+ original_tool_dependencies_dict = None
+ try:
+ tree = ElementTree.parse( tool_dependencies_config )
+ except Exception, e:
+ log.debug( "Exception attempting to parse tool_dependencies.xml: %s" %str( e ) )
+ return metadata_dict
+ root = tree.getroot()
+ ElementInclude.include( root )
+ tool_dependencies_dict = {}
+ if can_generate_tool_dependency_metadata( root, metadata_dict ):
+ for elem in root:
+ if elem.tag == 'package':
+ tool_dependencies_dict = generate_package_dependency_metadata( elem, tool_dependencies_dict )
+ elif elem.tag == 'set_environment':
+ tool_dependencies_dict = generate_environment_dependency_metadata( elem, tool_dependencies_dict )
+ # Handle tool dependency installation via other means here (future).
+ if tool_dependencies_dict:
+ metadata_dict[ 'tool_dependencies' ] = tool_dependencies_dict
+ else:
+ log.debug( "Name, version and type from the <requirement> tag does not match the information in the tool_dependencies.xml file. Tool dependencies will be ignored." )
+ if tool_dependencies_dict:
+ if original_tool_dependencies_dict:
+ # We're generating metadata on an update pulled to a tool shed repository installed into a Galaxy instance, so handle changes to
+ # tool dependencies appropriately.
+ handle_existing_tool_dependencies_that_changed_in_update( app, repository, original_tool_dependencies_dict, tool_dependencies_dict )
+ metadata_dict[ 'tool_dependencies' ] = tool_dependencies_dict
+ return metadata_dict
+def generate_tool_guid( repository_clone_url, tool ):
+ """
+ Generate a guid for the installed tool. It is critical that this guid matches the guid for
+ the tool in the Galaxy tool shed from which it is being installed. The form of the guid is
+ <tool shed host>/repos/<repository owner>/<repository name>/<tool id>/<tool version>
+ """
+ tmp_url = clean_repository_clone_url( repository_clone_url )
+ return '%s/%s/%s' % ( tmp_url, tool.id, tool.version )
+def generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict ):
+ """Update the received metadata_dict with changes that have been applied to the received tool."""
+ # Generate the guid
+ guid = generate_tool_guid( repository_clone_url, tool )
+ # Handle tool.requirements.
+ tool_requirements = []
+ for tr in tool.requirements:
+ requirement_dict = dict( name=tr.name,
+ type=tr.type,
+ version=tr.version )
+ tool_requirements.append( requirement_dict )
+ # Handle tool.tests.
+ tool_tests = []
+ if tool.tests:
+ for ttb in tool.tests:
+ required_files = []
+ for required_file in ttb.required_files:
+ value, extra = required_file
+ required_files.append( ( value ) )
+ inputs = []
+ for input in ttb.inputs:
+ name, value, extra = input
+ inputs.append( ( name, value ) )
+ outputs = []
+ for output in ttb.outputs:
+ name, file_name, extra = output
+ outputs.append( ( name, strip_path( file_name ) if file_name else None ) )
+ test_dict = dict( name=ttb.name,
+ required_files=required_files,
+ inputs=inputs,
+ outputs=outputs )
+ tool_tests.append( test_dict )
+ tool_dict = dict( id=tool.id,
+ guid=guid,
+ name=tool.name,
+ version=tool.version,
+ description=tool.description,
+ version_string_cmd = tool.version_string_cmd,
+ tool_config=tool_config,
+ requirements=tool_requirements,
+ tests=tool_tests )
+ if 'tools' in metadata_dict:
+ metadata_dict[ 'tools' ].append( tool_dict )
+ else:
+ metadata_dict[ 'tools' ] = [ tool_dict ]
+ return metadata_dict
def get_changectx_for_changeset( repo, changeset_revision, **kwd ):
"""Retrieve a specified changectx from a repository"""
for changeset in repo.changelog:
@@ -393,6 +739,13 @@
if deleted:
return 'DELETED'
return None
+def get_readme_file_names( repository_name ):
+ readme_files = [ 'readme', 'read_me', 'install' ]
+ valid_filenames = [ r for r in readme_files ]
+ for r in readme_files:
+ valid_filenames.append( '%s.txt' % r )
+ valid_filenames.append( '%s.txt' % repository_name )
+ return valid_filenames
def get_repository_file_contents( file_path ):
if is_gzip( file_path ):
to_html = to_html_str( '\ngzip compressed file\n' )
@@ -454,6 +807,34 @@
fh.close()
return tmp_filename
return None
+def get_sample_files_from_disk( repository_files_dir, tool_path = None, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
+ if resetting_all_metadata_on_repository:
+ # Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata.
+ work_dir = repository_files_dir
+ sample_file_metadata_paths = []
+ sample_file_copy_paths = []
+ for root, dirs, files in os.walk( repository_files_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name.endswith( '.sample' ):
+ if resetting_all_metadata_on_repository:
+ full_path_to_sample_file = os.path.join( root, name )
+ stripped_path_to_sample_file = full_path_to_sample_file.replace( work_dir, '' )
+ if stripped_path_to_sample_file.startswith( '/' ):
+ stripped_path_to_sample_file = stripped_path_to_sample_file[ 1: ]
+ relative_path_to_sample_file = os.path.join( relative_install_dir, stripped_path_to_sample_file )
+ if os.path.exists( relative_path_to_sample_file ):
+ sample_file_copy_paths.append( relative_path_to_sample_file )
+ else:
+ sample_file_copy_paths.append( full_path_to_sample_file )
+ else:
+ relative_path_to_sample_file = os.path.join( root, name )
+ sample_file_copy_paths.append( relative_path_to_sample_file )
+ if tool_path and relative_install_dir:
+ if relative_path_to_sample_file.startswith( os.path.join( tool_path, relative_install_dir ) ):
+ relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
+ sample_file_metadata_paths.append( relative_path_to_sample_file )
+ return sample_file_metadata_paths, sample_file_copy_paths
def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ):
parent_id = None
# Compare from most recent to oldest.
@@ -472,6 +853,20 @@
if parent_id is None:
# The tool did not change through all of the changeset revisions.
return old_id
+def get_relative_path_to_repository_file( root, name, relative_install_dir, work_dir, shed_config_dict, resetting_all_metadata_on_repository ):
+ if resetting_all_metadata_on_repository:
+ full_path_to_file = os.path.join( root, name )
+ stripped_path_to_file = full_path_to_file.replace( work_dir, '' )
+ if stripped_path_to_file.startswith( '/' ):
+ stripped_path_to_file = stripped_path_to_file[ 1: ]
+ relative_path_to_file = os.path.join( relative_install_dir, stripped_path_to_file )
+ else:
+ relative_path_to_file = os.path.join( root, name )
+ if relative_install_dir and \
+ shed_config_dict.get( 'tool_path' ) and \
+ relative_path_to_file.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
+ relative_path_to_file = relative_path_to_file[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
+ return relative_path_to_file
def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ):
# Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories.
message = ''
@@ -566,6 +961,34 @@
shutil.rmtree( dir )
except:
pass
+def reset_all_metadata_on_installed_repository( trans, id ):
+ """Reset all metadata on a single tool shed repository installed into a Galaxy instance."""
+ repository = get_installed_tool_shed_repository( trans, id )
+ tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
+ repository_clone_url = generate_clone_url_for_installed_repository( trans, repository )
+ tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
+ if relative_install_dir:
+ original_metadata_dict = repository.metadata
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False,
+ updating_installed_repository=False,
+ persist=False )
+ repository.metadata = metadata_dict
+ if metadata_dict != original_metadata_dict:
+ update_in_shed_tool_config( trans.app, repository )
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ log.debug( 'Metadata has been reset on repository %s.' % repository.name )
+ else:
+ log.debug( 'Metadata did not need to be reset on repository %s.' % repository.name )
+ else:
+ log.debug( 'Error locating installation directory for repository %s.' % repository.name )
+ return invalid_file_tups, metadata_dict
def reset_all_metadata_on_repository_in_tool_shed( trans, id ):
"""Reset all metadata on a single repository in a tool shed."""
def reset_all_tool_versions( trans, id, repo ):
diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -83,7 +83,7 @@
hg clone <a href="${clone_str}">${clone_str}</a></%def>
-<%def name="render_repository_items( repository_metadata_id, metadata, can_set_metadata=False )">
+<%def name="render_repository_items( repository_metadata_id, changeset_revision, metadata, can_set_metadata=False )"><% from galaxy.tool_shed.encoding_util import tool_shed_encode %>
%if metadata or can_set_metadata:
<p/>
@@ -91,6 +91,28 @@
<div class="toolFormTitle">Preview tools and inspect metadata by tool version</div><div class="toolFormBody">
%if metadata:
+ %if 'repository_dependencies' in metadata:
+ <div class="form-row">
+ <table class="grid">
+ <tr>
+ <td><b>tool shed</b></td>
+ <td><b>name</b></td>
+ <td><b>version</b></td>
+ <td><b>type</b></td>
+ </tr>
+ %for repository_dependency_tup in metadata[ 'repository_dependencies' ]:
+ <% toolshed, name, owner, changeset_revision = repository_dependency_tup %>
+ <tr>
+ <td>${toolshed | h}</td>
+ <td>${name | h}</td>
+ <td>${owner | h}</td>
+ <td>${changeset_revision | h}</td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
%if 'tool_dependencies' in metadata:
<%
# See if tool dependencies are packages, environment settings or both.
diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 templates/webapps/community/repository/manage_repository.mako
--- a/templates/webapps/community/repository/manage_repository.mako
+++ b/templates/webapps/community/repository/manage_repository.mako
@@ -204,7 +204,7 @@
</form></div></div>
-${render_repository_items( repository_metadata_id, metadata, can_set_metadata=True )}
+${render_repository_items( repository_metadata_id, changeset_revision, metadata, can_set_metadata=True )}
<p/><div class="toolForm"><div class="toolFormTitle">Manage categories</div>
diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 templates/webapps/community/repository/preview_tools_in_changeset.mako
--- a/templates/webapps/community/repository/preview_tools_in_changeset.mako
+++ b/templates/webapps/community/repository/preview_tools_in_changeset.mako
@@ -79,4 +79,4 @@
</div></div><p/>
-${render_repository_items( repository_metadata_id, metadata )}
+${render_repository_items( repository_metadata_id, changeset_revision, metadata, can_set_metadata=False )}
diff -r 7a51b701af8825baaf4aeb68f422304434c01a10 -r a43b16b64b1a4b0b7fe2d5be0d87a9047c3a54a3 templates/webapps/community/repository/view_repository.mako
--- a/templates/webapps/community/repository/view_repository.mako
+++ b/templates/webapps/community/repository/view_repository.mako
@@ -181,7 +181,7 @@
%endif
</div></div>
-${render_repository_items( repository_metadata_id, metadata )}
+${render_repository_items( repository_metadata_id, changeset_revision, metadata, can_set_metadata=False )}
%if repository.categories:
<p/><div class="toolForm">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/e1895e14176e/
changeset: e1895e14176e
user: greg
date: 2012-11-15 20:57:34
summary: Refactor ~/galaxy/util/shed_util.py
affected #: 13 files
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -6,6 +6,7 @@
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
+from galaxy.util.shed_util_common import *
from galaxy.util.odict import odict
from galaxy.tool_shed.common_util import *
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/tool_shed/update_manager.py
--- a/lib/galaxy/tool_shed/update_manager.py
+++ b/lib/galaxy/tool_shed/update_manager.py
@@ -4,6 +4,7 @@
import threading, urllib2, logging
from galaxy.util import string_as_bool
from galaxy.util.shed_util import *
+from galaxy.util.shed_util_common import *
log = logging.getLogger( __name__ )
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -33,6 +33,7 @@
from galaxy.util.hash_util import *
from galaxy.util import listify
from galaxy.util.shed_util import *
+from galaxy.util.shed_util_common import *
from galaxy.web import url_for
from galaxy.visualization.genome.visual_analytics import TracksterConfig
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,14 +1,9 @@
-import sys, os, tempfile, shutil, logging, string, urllib2
-import galaxy.tools.data
-from datetime import date, datetime, timedelta
+import os, tempfile, shutil, logging, urllib2
from galaxy import util
-from galaxy.web import url_for
-from galaxy.web.form_builder import SelectField
-from galaxy.tools import parameters
from galaxy.datatypes.checkers import *
from galaxy.datatypes.sniff import is_column_based
from galaxy.util.json import *
-from galaxy.util import inflector
+from galaxy.util.shed_util_common import *
from galaxy.tools.search import ToolBoxSearch
from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package, set_environment
from galaxy.tool_shed.encoding_util import *
@@ -26,19 +21,6 @@
log = logging.getLogger( __name__ )
-GALAXY_ADMIN_TOOL_SHED_CONTROLLER = 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER'
-INITIAL_CHANGELOG_HASH = '000000000000'
-# Characters that must be html escaped
-MAPPED_CHARS = { '>' :'>',
- '<' :'<',
- '"' : '"',
- '&' : '&',
- '\'' : ''' }
-MAX_CONTENT_SIZE = 32768
-NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'tool_dependencies.xml' ]
-VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" )
-TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER'
-
def add_to_shed_tool_config( app, shed_tool_conf_dict, elem_list ):
# A tool shed repository is being installed so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -175,27 +157,6 @@
except:
pass
return converter_path, display_path
-def build_repository_ids_select_field( trans, cntrller, name='repository_ids', multiple=True, display='checkboxes' ):
- """Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata."""
- repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
- if cntrller == TOOL_SHED_ADMIN_CONTROLLER:
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( trans.model.Repository.table.c.deleted == False ) \
- .order_by( trans.model.Repository.table.c.name,
- trans.model.Repository.table.c.user_id ):
- owner = repository.user.username
- option_label = '%s (%s)' % ( repository.name, owner )
- option_value = '%s' % trans.security.encode_id( repository.id )
- repositories_select_field.add_option( option_label, option_value )
- elif cntrller == GALAXY_ADMIN_TOOL_SHED_CONTROLLER:
- for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \
- .order_by( trans.model.ToolShedRepository.table.c.name,
- trans.model.ToolShedRepository.table.c.owner ):
- option_label = '%s (%s)' % ( repository.name, repository.owner )
- option_value = trans.security.encode_id( repository.id )
- repositories_select_field.add_option( option_label, option_value )
- return repositories_select_field
def can_generate_tool_dependency_metadata( root, metadata_dict ):
"""
Make sure the combination of name, version and type (the type will be the value of elem.tag) of each root element tag in the tool_dependencies.xml
@@ -245,50 +206,6 @@
# tag for any tool in the repository.
break
return can_generate_dependency_metadata
-def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
- """
- Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
- sure the files exist.
- """
- invalid_files_and_errors_tups = []
- correction_msg = ''
- for input_param in tool.input_params:
- if isinstance( input_param, parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
- # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist.
- options = input_param.dynamic_options or input_param.options
- if options:
- if options.tool_data_table or options.missing_tool_data_table_name:
- # Make sure the repository contains a tool_data_table_conf.xml.sample file.
- sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
- if sample_tool_data_table_conf:
- error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf )
- if error:
- invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
- else:
- options.missing_tool_data_table_name = None
- else:
- correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample "
- correction_msg += "to the repository that includes the required entry to correct this error.<br/>"
- invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
- if options.index_file or options.missing_index_file:
- # Make sure the repository contains the required xxx.loc.sample file.
- index_file = options.index_file or options.missing_index_file
- index_file_name = strip_path( index_file )
- sample_found = False
- for sample_file in sample_files:
- sample_file_name = strip_path( sample_file )
- if sample_file_name == '%s.sample' % index_file_name:
- options.index_file = index_file_name
- options.missing_index_file = None
- if options.tool_data_table:
- options.tool_data_table.missing_index_file = None
- sample_found = True
- break
- if not sample_found:
- correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
- correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
- invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
- return invalid_files_and_errors_tups
def clean_repository_metadata( trans, id, changeset_revisions ):
# Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
# We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
@@ -388,17 +305,6 @@
else:
return 'subset'
return 'not equal and not subset'
-def concat_messages( msg1, msg2 ):
- if msg1:
- if msg2:
- message = '%s %s' % ( msg1, msg2 )
- else:
- message = msg1
- elif msg2:
- message = msg2
- else:
- message = ''
- return message
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
# Persist the current in-memory list of config_elems to a file named by the value of config_filename.
fd, filename = tempfile.mkstemp()
@@ -439,35 +345,6 @@
# Eliminate the port, if any, since it will result in an invalid directory name.
return tool_shed_url.split( ':' )[ 0 ]
return tool_shed_url.rstrip( '/' )
-def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
- """Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository."""
- try:
- commands.clone( get_configured_ui(),
- str( repository_clone_url ),
- dest=str( repository_file_dir ),
- pull=True,
- noupdate=False,
- rev=util.listify( str( ctx_rev ) ) )
- return True, None
- except Exception, e:
- error_message = 'Error cloning repository: %s' % str( e )
- log.debug( error_message )
- return False, error_message
-def copy_sample_file( app, filename, dest_path=None ):
- """Copy xxx.sample to dest_path/xxx.sample and dest_path/xxx. The default value for dest_path is ~/tool-data."""
- if dest_path is None:
- dest_path = os.path.abspath( app.config.tool_data_path )
- sample_file_name = strip_path( filename )
- copied_file = sample_file_name.replace( '.sample', '' )
- full_source_path = os.path.abspath( filename )
- full_destination_path = os.path.join( dest_path, sample_file_name )
- # Don't copy a file to itself - not sure how this happens, but sometimes it does...
- if full_source_path != full_destination_path:
- # It's ok to overwrite the .sample version of the file.
- shutil.copy( full_source_path, full_destination_path )
- # Only create the .loc file if it does not yet exist. We don't overwrite it in case it contains stuff proprietary to the local instance.
- if not os.path.exists( os.path.join( dest_path, copied_file ) ):
- shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) )
def copy_sample_files( app, sample_files, tool_path=None, sample_files_copied=None, dest_path=None ):
"""
Copy all appropriate files to dest_path in the local Galaxy environment that have not already been copied. Those that have been copied
@@ -484,15 +361,6 @@
# Attempt to ensure we're copying an appropriate file.
if is_data_index_sample_file( filename ):
copy_sample_file( app, filename, dest_path=dest_path )
-def create_repo_info_dict( repository, owner, repository_clone_url, changeset_revision, ctx_rev, metadata ):
- repo_info_dict = {}
- repo_info_dict[ repository.name ] = ( repository.description,
- repository_clone_url,
- changeset_revision,
- ctx_rev,
- owner,
- metadata.get( 'tool_dependencies', None ) )
- return repo_info_dict
def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ):
return dict( tool_shed=tool_shed,
repository_name=name,
@@ -501,20 +369,6 @@
tool_dicts=tool_dicts,
converter_path=converter_path,
display_path=display_path )
-def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
- downloadable = is_downloadable( metadata_dict )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- if repository_metadata:
- repository_metadata.metadata = metadata_dict
- repository_metadata.downloadable = downloadable
- else:
- repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id,
- changeset_revision=changeset_revision,
- metadata=metadata_dict,
- downloadable=downloadable )
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- return repository_metadata
def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
status, current_changeset_revision=None, owner='', dist_to_shed=False ):
# The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
@@ -618,15 +472,6 @@
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
-def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
- """Generate the URL for cloning a repository that is in the tool shed."""
- base_url = url_for( '/', qualified=True ).rstrip( '/' )
- if trans.user:
- protocol, base = base_url.split( '://' )
- username = '%s@' % trans.user.username
- return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
- else:
- return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
def generate_datatypes_metadata( datatypes_config, metadata_dict ):
"""Update the received metadata_dict with information from the parsed datatypes_config."""
tree = ElementTree.parse( datatypes_config )
@@ -681,191 +526,6 @@
else:
tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ]
return tool_dependencies_dict
-def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, shed_config_dict={}, relative_install_dir=None, repository_files_dir=None,
- resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=False ):
- """
- Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
- the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
- disk files, so the value of repository_files_dir will not always be repository.repo_path( app ) (it could be an absolute path to a temporary
- directory containing a clone). If it is an absolute path, the value of relative_install_dir must contain repository.repo_path( app ).
-
- The value of persist will be True when the installed repository contains a valid tool_data_table_conf.xml.sample file, in which case the entries
- should ultimately be persisted to the file referred to by app.config.shed_tool_data_table_config.
- """
- if updating_installed_repository:
- # Keep the original tool shed repository metadata if setting metadata on a repository installed into a local Galaxy instance for which
- # we have pulled updates.
- original_repository_metadata = repository.metadata
- else:
- original_repository_metadata = None
- readme_file_names = get_readme_file_names( repository.name )
- metadata_dict = { 'shed_config_filename': shed_config_dict.get( 'config_filename' ) }
- invalid_file_tups = []
- invalid_tool_configs = []
- tool_dependencies_config = None
- original_tool_data_path = app.config.tool_data_path
- original_tool_data_table_config_path = app.config.tool_data_table_config_path
- if resetting_all_metadata_on_repository:
- if not relative_install_dir:
- raise Exception( "The value of repository.repo_path( app ) must be sent when resetting all metadata on a repository." )
- # Keep track of the location where the repository is temporarily cloned so that we can strip the path when setting metadata. The value of
- # repository_files_dir is the full path to the temporary directory to which the repository was cloned.
- work_dir = repository_files_dir
- files_dir = repository_files_dir
- # Since we're working from a temporary directory, we can safely copy sample files included in the repository to the repository root.
- app.config.tool_data_path = repository_files_dir
- app.config.tool_data_table_config_path = repository_files_dir
- else:
- # Use a temporary working directory to copy all sample files.
- work_dir = tempfile.mkdtemp()
- # All other files are on disk in the repository's repo_path, which is the value of relative_install_dir.
- files_dir = relative_install_dir
- if shed_config_dict.get( 'tool_path' ):
- files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
- app.config.tool_data_path = work_dir
- app.config.tool_data_table_config_path = work_dir
- # Handle proprietary datatypes, if any.
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', files_dir )
- if datatypes_config:
- metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
- # Get the relative path to all sample files included in the repository for storage in the repository's metadata.
- sample_file_metadata_paths, sample_file_copy_paths = get_sample_files_from_disk( repository_files_dir=files_dir,
- tool_path=shed_config_dict.get( 'tool_path' ),
- relative_install_dir=relative_install_dir,
- resetting_all_metadata_on_repository=resetting_all_metadata_on_repository )
- if sample_file_metadata_paths:
- metadata_dict[ 'sample_files' ] = sample_file_metadata_paths
- # Copy all sample files included in the repository to a single directory location so we can load tools that depend on them.
- for sample_file in sample_file_copy_paths:
- copy_sample_file( app, sample_file, dest_path=work_dir )
- # If the list of sample files includes a tool_data_table_conf.xml.sample file, laad it's table elements into memory.
- relative_path, filename = os.path.split( sample_file )
- if filename == 'tool_data_table_conf.xml.sample':
- new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
- tool_data_path=original_tool_data_path,
- shed_tool_data_table_config=app.config.shed_tool_data_table_config,
- persist=persist )
- for root, dirs, files in os.walk( files_dir ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- if '.hg' in dirs:
- dirs.remove( '.hg' )
- for name in files:
- # See if we have a READ_ME file.
- if name.lower() in readme_file_names:
- if resetting_all_metadata_on_repository:
- full_path_to_readme = os.path.join( root, name )
- stripped_path_to_readme = full_path_to_readme.replace( work_dir, '' )
- if stripped_path_to_readme.startswith( '/' ):
- stripped_path_to_readme = stripped_path_to_readme[ 1: ]
- relative_path_to_readme = os.path.join( relative_install_dir, stripped_path_to_readme )
- else:
- relative_path_to_readme = os.path.join( root, name )
- if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_readme.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
- relative_path_to_readme = relative_path_to_readme[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
- metadata_dict[ 'readme' ] = relative_path_to_readme
- # See if we have a tool config.
- elif name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
- full_path = str( os.path.abspath( os.path.join( root, name ) ) )
- if os.path.getsize( full_path ) > 0:
- if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
- or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
- try:
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = util.parse_xml( full_path )
- element_tree_root = element_tree.getroot()
- is_tool = element_tree_root.tag == 'tool'
- except Exception, e:
- log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) )
- is_tool = False
- if is_tool:
- tool, valid, error_message = load_tool_from_config( app, full_path )
- if tool is None:
- if not valid:
- invalid_file_tups.append( ( name, error_message ) )
- else:
- invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_file_metadata_paths )
- can_set_metadata = True
- for tup in invalid_files_and_errors_tups:
- if name in tup:
- can_set_metadata = False
- invalid_tool_configs.append( name )
- break
- if can_set_metadata:
- if resetting_all_metadata_on_repository:
- full_path_to_tool_config = os.path.join( root, name )
- stripped_path_to_tool_config = full_path_to_tool_config.replace( work_dir, '' )
- if stripped_path_to_tool_config.startswith( '/' ):
- stripped_path_to_tool_config = stripped_path_to_tool_config[ 1: ]
- relative_path_to_tool_config = os.path.join( relative_install_dir, stripped_path_to_tool_config )
- else:
- relative_path_to_tool_config = os.path.join( root, name )
- if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_tool_config.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
- relative_path_to_tool_config = relative_path_to_tool_config[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
- metadata_dict = generate_tool_metadata( relative_path_to_tool_config, tool, repository_clone_url, metadata_dict )
- else:
- for tup in invalid_files_and_errors_tups:
- invalid_file_tups.append( tup )
- # Find all exported workflows.
- elif name.endswith( '.ga' ):
- relative_path = os.path.join( root, name )
- if os.path.getsize( os.path.abspath( relative_path ) ) > 0:
- fp = open( relative_path, 'rb' )
- workflow_text = fp.read()
- fp.close()
- exported_workflow_dict = from_json_string( workflow_text )
- if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
- metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
- if 'tools' in metadata_dict:
- # This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', files_dir )
- if tool_dependencies_config:
- metadata_dict = generate_tool_dependency_metadata( app,
- repository,
- tool_dependencies_config,
- metadata_dict,
- original_repository_metadata=original_repository_metadata )
- if invalid_tool_configs:
- metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
- # Reset the value of the app's tool_data_path and tool_data_table_config_path to their respective original values.
- app.config.tool_data_path = original_tool_data_path
- app.config.tool_data_table_config_path = original_tool_data_table_config_path
- return metadata_dict, invalid_file_tups
-def generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ):
- if as_html:
- new_line = '<br/>'
- bold_start = '<b>'
- bold_end = '</b>'
- else:
- new_line = '\n'
- bold_start = ''
- bold_end = ''
- message = ''
- if not displaying_invalid_tool:
- if metadata_dict:
- message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip( trans.app ) )
- message += "Correct the following problems if necessary and reset metadata.%s" % new_line
- else:
- message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip( trans.app ) )
- message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line
- for itc_tup in invalid_file_tups:
- tool_file, exception_msg = itc_tup
- if exception_msg.find( 'No such file or directory' ) >= 0:
- exception_items = exception_msg.split()
- missing_file_items = exception_items[ 7 ].split( '/' )
- missing_file = missing_file_items[ -1 ].rstrip( '\'' )
- if missing_file.endswith( '.loc' ):
- sample_ext = '%s.sample' % missing_file
- else:
- sample_ext = missing_file
- correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end )
- correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end )
- else:
- if as_html:
- correction_msg = exception_msg
- else:
- correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
- message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
- return message
def generate_package_dependency_metadata( elem, tool_dependencies_dict ):
"""The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set."""
requirements_dict = {}
@@ -1155,13 +815,6 @@
else:
metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ]
return metadata_dict
-def get_changectx_for_changeset( repo, changeset_revision, **kwd ):
- """Retrieve a specified changectx from a repository"""
- for changeset in repo.changelog:
- ctx = repo.changectx( changeset )
- if str( ctx ) == changeset_revision:
- return ctx
- return None
def get_config( config_file, repo, ctx, dir ):
"""Return the latest version of config_filename from the repository manifest."""
config_file = strip_path( config_file )
@@ -1172,22 +825,6 @@
if ctx_file_name == config_file:
return get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
return None
-def get_config_from_disk( config_file, relative_install_dir ):
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == config_file:
- return os.path.abspath( os.path.join( root, name ) )
- return None
-def get_configured_ui():
- # Configure any desired ui settings.
- _ui = ui.ui()
- # The following will suppress all messages. This is
- # the same as adding the following setting to the repo
- # hgrc file' [ui] section:
- # quiet = True
- _ui.setconfig( 'ui', 'quiet', True )
- return _ui
def get_converter_and_display_paths( registration_elem, relative_install_dir ):
"""Find the relative path to data type converters and display applications included in installed tool shed repositories."""
converter_path = None
@@ -1247,33 +884,6 @@
ctx_rev = response.read()
response.close()
return ctx_rev
-def get_file_context_from_ctx( ctx, filename ):
- # We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
- # within ctx if the files were moved within the change set. For example, in the following ctx.files() list, the former may have been moved to
- # the latter: ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']. Another scenario
- # is that the file has been deleted.
- deleted = False
- filename = strip_path( filename )
- for ctx_file in ctx.files():
- ctx_file_name = strip_path( ctx_file )
- if filename == ctx_file_name:
- try:
- # If the file was moved, its destination will be returned here.
- fctx = ctx[ ctx_file ]
- return fctx
- except LookupError, e:
- # Set deleted for now, and continue looking in case the file was moved instead of deleted.
- deleted = True
- if deleted:
- return 'DELETED'
- return None
-def get_file_from_changeset_revision( app, repository, repo_files_dir, changeset_revision, file_name, dir ):
- """Return file_name from the received changeset_revision of the repository manifest."""
- stripped_file_name = strip_path( file_name )
- repo = hg.repository( get_configured_ui(), repo_files_dir )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- named_tmp_file = get_named_tmpfile_from_ctx( ctx, file_name, dir )
- return named_tmp_file
def get_installed_tool_shed_repository( trans, id ):
"""Get a repository on the Galaxy side from the database via id"""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
@@ -1309,63 +919,6 @@
fh.write( fctx.data() )
fh.close()
return sample_files, deleted_sample_files
-def get_named_tmpfile_from_ctx( ctx, filename, dir ):
- filename = strip_path( filename )
- for ctx_file in ctx.files():
- ctx_file_name = strip_path( ctx_file )
- if filename == ctx_file_name:
- try:
- # If the file was moved, its destination file contents will be returned here.
- fctx = ctx[ ctx_file ]
- except LookupError, e:
- # Continue looking in case the file was moved.
- fctx = None
- continue
- if fctx:
- fh = tempfile.NamedTemporaryFile( 'wb', dir=dir )
- tmp_filename = fh.name
- fh.close()
- fh = open( tmp_filename, 'wb' )
- fh.write( fctx.data() )
- fh.close()
- return tmp_filename
- return None
-def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ):
- parent_id = None
- # Compare from most recent to oldest.
- changeset_revisions.reverse()
- for changeset_revision in changeset_revisions:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- metadata = repository_metadata.metadata
- tools_dicts = metadata.get( 'tools', [] )
- for tool_dict in tools_dicts:
- if tool_dict[ 'guid' ] == guid:
- # The tool has not changed between the compared changeset revisions.
- continue
- if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version:
- # The tool version is different, so we've found the parent.
- return tool_dict[ 'guid' ]
- if parent_id is None:
- # The tool did not change through all of the changeset revisions.
- return old_id
-def get_repository_file_contents( file_path ):
- if is_gzip( file_path ):
- to_html = to_html_str( '\ngzip compressed file\n' )
- elif is_bz2( file_path ):
- to_html = to_html_str( '\nbz2 compressed file\n' )
- elif check_zip( file_path ):
- to_html = to_html_str( '\nzip compressed file\n' )
- elif check_binary( file_path ):
- to_html = to_html_str( '\nBinary file\n' )
- else:
- to_html = ''
- for i, line in enumerate( open( file_path ) ):
- to_html = '%s%s' % ( to_html, to_html_str( line ) )
- if len( to_html ) > MAX_CONTENT_SIZE:
- large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
- to_html = '%s%s' % ( to_html, to_html_str( large_str ) )
- break
- return to_html
def get_repository_files( trans, folder_path ):
contents = []
for item in os.listdir( folder_path ):
@@ -1379,28 +932,6 @@
if contents:
contents.sort()
return contents
-def get_repository_in_tool_shed( trans, id ):
- """Get a repository on the tool shed side from the database via id"""
- return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
-def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ):
- """Get metadata for a specified repository change set from the database"""
- # Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow
- # created in the past. The cause of this issue has been resolved, but we'll leave this method as is for a while longer to ensure all duplicate
- # records are removed.
- all_metadata_records = trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
- trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
- .order_by( trans.model.RepositoryMetadata.table.c.update_time.desc() ) \
- .all()
- if len( all_metadata_records ) > 1:
- # Delete all recrds older than the last one updated.
- for repository_metadata in all_metadata_records[ 1: ]:
- trans.sa_session.delete( repository_metadata )
- trans.sa_session.flush()
- return all_metadata_records[ 0 ]
- elif all_metadata_records:
- return all_metadata_records[ 0 ]
- return None
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( 'repos' )
repo_path = items[ 1 ]
@@ -1667,55 +1198,6 @@
repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
return repository_tools_tups, sample_files_copied
-def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ):
- # Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories.
- message = ''
- sample_files = copy_disk_sample_files_to_dir( trans, repo_files_dir, work_dir )
- if sample_files:
- if 'tool_data_table_conf.xml.sample' in sample_files:
- # Load entries into the tool_data_tables if the tool requires them.
- tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
- error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
- tool, valid, message2 = load_tool_from_config( trans.app, tool_config_filepath )
- message = concat_messages( message, message2 )
- return tool, valid, message, sample_files
-def handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir ):
- tool = None
- message = ''
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- # We're not currently doing anything with the returned list of deleted_sample_files here. It is intended to help handle sample files that are in
- # the manifest, but have been deleted from disk.
- sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
- if sample_files:
- trans.app.config.tool_data_path = work_dir
- if 'tool_data_table_conf.xml.sample' in sample_files:
- # Load entries into the tool_data_tables if the tool requires them.
- tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
- if tool_data_table_config:
- error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
- if error:
- log.debug( message )
- manifest_ctx, ctx_file = get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision )
- if manifest_ctx and ctx_file:
- tool, message2 = load_tool_from_tmp_config( trans, repo, manifest_ctx, ctx_file, work_dir )
- message = concat_messages( message, message2 )
- return tool, message, sample_files
-def handle_sample_tool_data_table_conf_file( app, filename, persist=False ):
- """
- Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary. If persist is True (should only occur
- if call is from the Galaxy side, not the tool shed), the new entries will be appended to Galaxy's shed_tool_data_table_conf.xml file on disk.
- """
- error = False
- message = ''
- try:
- new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=filename,
- tool_data_path=app.config.tool_data_path,
- shed_tool_data_table_config=app.config.shed_tool_data_table_config,
- persist=persist )
- except Exception, e:
- message = str( e )
- error = True
- return error, message
def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies ):
"""
Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation
@@ -1800,8 +1282,6 @@
return False
# Default to copying the file if none of the above are true.
return True
-def is_downloadable( metadata_dict ):
- return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
def load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype converters
app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=installed_repository_dict, deactivate=deactivate )
@@ -1825,22 +1305,6 @@
def load_installed_display_applications( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate )
-def load_tool_from_config( app, full_path ):
- try:
- tool = app.toolbox.load_tool( full_path )
- valid = True
- error_message = None
- except KeyError, e:
- tool = None
- valid = False
- error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e )
- error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
- error_message += 'this error. '
- except Exception, e:
- tool = None
- valid = False
- error_message = str( e )
- return tool, valid, error_message
def load_tool_from_tmp_config( trans, repo, ctx, ctx_file, work_dir ):
tool = None
message = ''
@@ -1866,27 +1330,6 @@
except:
pass
return tool, message
-def open_repository_files_folder( trans, folder_path ):
- try:
- files_list = get_repository_files( trans, folder_path )
- except OSError, e:
- if str( e ).find( 'No such file or directory' ) >= 0:
- # We have a repository with no contents.
- return []
- folder_contents = []
- for filename in files_list:
- is_folder = False
- if filename and filename[-1] == os.sep:
- is_folder = True
- if filename:
- full_path = os.path.join( folder_path, filename )
- node = { "title": filename,
- "isFolder": is_folder,
- "isLazy": is_folder,
- "tooltip": full_path,
- "key": full_path }
- folder_contents.append( node )
- return folder_contents
def panel_entry_per_tool( tool_section_dict ):
# Return True if tool_section_dict looks like this.
# {<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]}
@@ -1906,12 +1349,6 @@
repo,
source=repository_clone_url,
rev=[ ctx_rev ] )
-def remove_dir( dir ):
- if os.path.exists( dir ):
- try:
- shutil.rmtree( dir )
- except:
- pass
def remove_from_shed_tool_config( trans, shed_tool_conf_dict, guids_to_remove ):
# A tool shed repository is being uninstalled so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -2113,209 +1550,6 @@
else:
log.debug( 'Error locating installation directory for repository %s.' % repository.name )
return invalid_file_tups, metadata_dict
-def reset_all_metadata_on_repository_in_tool_shed( trans, id ):
- """Reset all metadata on a single repository in a tool shed."""
- def reset_all_tool_versions( trans, id, repo ):
- changeset_revisions = []
- for changeset in repo.changelog:
- changeset_revision = str( repo.changectx( changeset ) )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- if repository_metadata:
- metadata = repository_metadata.metadata
- if metadata:
- if metadata.get( 'tools', None ):
- changeset_revisions.append( changeset_revision )
- # The list of changeset_revisions is now filtered to contain only those that are downloadable and contain tools.
- # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
- for index, changeset_revision in enumerate( changeset_revisions ):
- tool_versions_dict = {}
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- metadata = repository_metadata.metadata
- tool_dicts = metadata[ 'tools' ]
- if index == 0:
- # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools.
- # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config.
- for tool_dict in tool_dicts:
- tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
- else:
- for tool_dict in tool_dicts:
- parent_id = get_parent_id( trans,
- id,
- tool_dict[ 'id' ],
- tool_dict[ 'version' ],
- tool_dict[ 'guid' ],
- changeset_revisions[ 0:index ] )
- tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
- if tool_versions_dict:
- repository_metadata.tool_versions = tool_versions_dict
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- repository = get_repository_in_tool_shed( trans, id )
- log.debug( "Resetting all metadata on repository: %s" % repository.name )
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
- repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
- # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop
- # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list.
- changeset_revisions = []
- # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict.
- metadata_changeset_revision = None
- metadata_dict = None
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- invalid_file_tups = []
- home_dir = os.getcwd()
- for changeset in repo.changelog:
- work_dir = tempfile.mkdtemp()
- current_changeset_revision = str( repo.changectx( changeset ) )
- ctx = repo.changectx( changeset )
- log.debug( "Cloning repository revision: %s", str( ctx.rev() ) )
- cloned_ok, error_message = clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
- if cloned_ok:
- log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
- current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_clone_url=repository_clone_url,
- relative_install_dir=repo_dir,
- repository_files_dir=work_dir,
- resetting_all_metadata_on_repository=True,
- updating_installed_repository=False,
- persist=False )
- if current_metadata_dict:
- if not metadata_changeset_revision and not metadata_dict:
- # We're at the first change set in the change log.
- metadata_changeset_revision = current_changeset_revision
- metadata_dict = current_metadata_dict
- if ancestor_changeset_revision:
- # Compare metadata from ancestor and current. The value of comparison will be one of:
- # 'no metadata' - no metadata for either ancestor or current, so continue from current
- # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
- # 'subset' - ancestor metadata is a subset of current metadata, so continue from current
- # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
- comparison = compare_changeset_revisions( ancestor_changeset_revision,
- ancestor_metadata_dict,
- current_changeset_revision,
- current_metadata_dict )
- if comparison in [ 'no metadata', 'equal', 'subset' ]:
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
- elif comparison == 'not equal and not subset':
- metadata_changeset_revision = ancestor_changeset_revision
- metadata_dict = ancestor_metadata_dict
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
- changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
- else:
- # We're at the beginning of the change log.
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
- if not ctx.children():
- metadata_changeset_revision = current_changeset_revision
- metadata_dict = current_metadata_dict
- # We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
- changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- elif ancestor_metadata_dict:
- # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
- if not ctx.children():
- # We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
- changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- remove_dir( work_dir )
- # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
- clean_repository_metadata( trans, id, changeset_revisions )
- # Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog.
- reset_all_tool_versions( trans, id, repo )
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
- return invalid_file_tups, metadata_dict
-def reset_metadata_on_selected_repositories( trans, **kwd ):
- # This method is called from both Galaxy and the Tool Shed, so the cntrller param is required.
- repository_ids = util.listify( kwd.get( 'repository_ids', None ) )
- CONTROLLER = kwd[ 'CONTROLLER' ]
- message = ''
- status = 'done'
- if repository_ids:
- successful_count = 0
- unsuccessful_count = 0
- for repository_id in repository_ids:
- try:
- if CONTROLLER == 'TOOL_SHED_ADMIN_CONTROLLER':
- repository = get_repository_in_tool_shed( trans, repository_id )
- invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
- elif CONTROLLER == 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER':
- repository = get_installed_tool_shed_repository( trans, repository_id )
- invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans, repository_id )
- if invalid_file_tups:
- message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
- log.debug( message )
- unsuccessful_count += 1
- else:
- log.debug( "Successfully reset metadata on repository %s" % repository.name )
- successful_count += 1
- except Exception, e:
- log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) )
- unsuccessful_count += 1
- message = "Successfully reset metadata on %d %s. " % ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
- if unsuccessful_count:
- message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count,
- inflector.cond_plural( unsuccessful_count, "repository" ) )
- else:
- message = 'Select at least one repository to on which to reset all metadata.'
- status = 'error'
- return message, status
-def reset_tool_data_tables( app ):
- # Reset the tool_data_tables to an empty dictionary.
- app.tool_data_tables.data_tables = {}
-def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
- """
- Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and
- including the included_upper_bounds_changeset_revision. The value of excluded_lower_bounds_changeset_revision will be the value of
- INITIAL_CHANGELOG_HASH if no valid changesets exist before included_upper_bounds_changeset_revision.
- """
- # To set excluded_lower_bounds_changeset_revision, calling methods should do the following, where the value of changeset_revision
- # is a downloadable changeset_revision.
- # excluded_lower_bounds_changeset_revision = get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
- if excluded_lower_bounds_changeset_revision == INITIAL_CHANGELOG_HASH:
- appending_started = True
- else:
- appending_started = False
- reversed_changelog = []
- for changeset in repo.changelog:
- changeset_hash = str( repo.changectx( changeset ) )
- if appending_started:
- reversed_changelog.insert( 0, changeset )
- if changeset_hash == excluded_lower_bounds_changeset_revision and not appending_started:
- appending_started = True
- if changeset_hash == included_upper_bounds_changeset_revision:
- break
- return reversed_changelog
-def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ):
- return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision )
-def strip_path( fpath ):
- if not fpath:
- return fpath
- try:
- file_path, file_name = os.path.split( fpath )
- except:
- file_name = fpath
- return file_name
-def to_html_escaped( text ):
- """Translates the characters in text to html values"""
- translated = []
- for c in text:
- if c in [ '\r\n', '\n', ' ', '\t' ] or c in VALID_CHARS:
- translated.append( c )
- elif c in MAPPED_CHARS:
- translated.append( MAPPED_CHARS[ c ] )
- else:
- translated.append( '' )
- return ''.join( translated )
def to_html_str( text ):
"""Translates the characters in text to an html string"""
translated = []
@@ -2443,32 +1677,8 @@
elem = guid_to_tool_elem_dict[ guid ]
config_elems.append( elem )
config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
-def update_repository( repo, ctx_rev=None ):
- """
- Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
- changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
- """
- # TODO: We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
- # The codes used to show the status of files are as follows.
- # M = modified
- # A = added
- # R = removed
- # C = clean
- # ! = deleted, but still tracked
- # ? = not tracked
- # I = ignored
- # It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that
- # purging is not supported by the mercurial API. See the deprecated update_for_browsing() method in common.py.
- commands.update( get_configured_ui(),
- repo,
- rev=ctx_rev )
def update_tool_shed_repository_status( app, tool_shed_repository, status ):
sa_session = app.model.context.current
tool_shed_repository.status = status
sa_session.add( tool_shed_repository )
sa_session.flush()
-def url_join( *args ):
- parts = []
- for arg in args:
- parts.append( arg.strip( '/' ) )
- return '/'.join( parts )
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/util/shed_util_common.py
--- /dev/null
+++ b/lib/galaxy/util/shed_util_common.py
@@ -0,0 +1,784 @@
+import os, shutil, tempfile, logging
+from galaxy import util
+from galaxy.tools import parameters
+from galaxy.util import inflector
+from galaxy.web import url_for
+from galaxy.web.form_builder import SelectField
+from galaxy.datatypes.checkers import *
+from galaxy.model.orm import *
+
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( 'mercurial' )
+from mercurial import hg, ui, commands
+
+log = logging.getLogger( __name__ )
+
+INITIAL_CHANGELOG_HASH = '000000000000'
+# Characters that must be html escaped
+MAPPED_CHARS = { '>' :'>',
+ '<' :'<',
+ '"' : '"',
+ '&' : '&',
+ '\'' : ''' }
+MAX_CONTENT_SIZE = 32768
+NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'tool_dependencies.xml' ]
+GALAXY_ADMIN_TOOL_SHED_CONTROLLER = 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER'
+TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER'
+VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" )
+
+def build_repository_ids_select_field( trans, cntrller, name='repository_ids', multiple=True, display='checkboxes' ):
+ """Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata."""
+ repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
+ if cntrller == TOOL_SHED_ADMIN_CONTROLLER:
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.deleted == False ) \
+ .order_by( trans.model.Repository.table.c.name,
+ trans.model.Repository.table.c.user_id ):
+ owner = repository.user.username
+ option_label = '%s (%s)' % ( repository.name, owner )
+ option_value = '%s' % trans.security.encode_id( repository.id )
+ repositories_select_field.add_option( option_label, option_value )
+ elif cntrller == GALAXY_ADMIN_TOOL_SHED_CONTROLLER:
+ for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
+ .filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \
+ .order_by( trans.model.ToolShedRepository.table.c.name,
+ trans.model.ToolShedRepository.table.c.owner ):
+ option_label = '%s (%s)' % ( repository.name, repository.owner )
+ option_value = trans.security.encode_id( repository.id )
+ repositories_select_field.add_option( option_label, option_value )
+ return repositories_select_field
+def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
+ """
+ Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
+ sure the files exist.
+ """
+ invalid_files_and_errors_tups = []
+ correction_msg = ''
+ for input_param in tool.input_params:
+ if isinstance( input_param, parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
+ # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist.
+ options = input_param.dynamic_options or input_param.options
+ if options:
+ if options.tool_data_table or options.missing_tool_data_table_name:
+ # Make sure the repository contains a tool_data_table_conf.xml.sample file.
+ sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
+ if sample_tool_data_table_conf:
+ error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf )
+ if error:
+ invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
+ else:
+ options.missing_tool_data_table_name = None
+ else:
+ correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample "
+ correction_msg += "to the repository that includes the required entry to correct this error.<br/>"
+ invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
+ if options.index_file or options.missing_index_file:
+ # Make sure the repository contains the required xxx.loc.sample file.
+ index_file = options.index_file or options.missing_index_file
+ index_file_name = strip_path( index_file )
+ sample_found = False
+ for sample_file in sample_files:
+ sample_file_name = strip_path( sample_file )
+ if sample_file_name == '%s.sample' % index_file_name:
+ options.index_file = index_file_name
+ options.missing_index_file = None
+ if options.tool_data_table:
+ options.tool_data_table.missing_index_file = None
+ sample_found = True
+ break
+ if not sample_found:
+ correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
+ correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
+ invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
+ return invalid_files_and_errors_tups
+def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
+ """Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository."""
+ try:
+ commands.clone( get_configured_ui(),
+ str( repository_clone_url ),
+ dest=str( repository_file_dir ),
+ pull=True,
+ noupdate=False,
+ rev=util.listify( str( ctx_rev ) ) )
+ return True, None
+ except Exception, e:
+ error_message = 'Error cloning repository: %s' % str( e )
+ log.debug( error_message )
+ return False, error_message
+def concat_messages( msg1, msg2 ):
+ if msg1:
+ if msg2:
+ message = '%s %s' % ( msg1, msg2 )
+ else:
+ message = msg1
+ elif msg2:
+ message = msg2
+ else:
+ message = ''
+ return message
+def copy_sample_file( app, filename, dest_path=None ):
+ """Copy xxx.sample to dest_path/xxx.sample and dest_path/xxx. The default value for dest_path is ~/tool-data."""
+ if dest_path is None:
+ dest_path = os.path.abspath( app.config.tool_data_path )
+ sample_file_name = strip_path( filename )
+ copied_file = sample_file_name.replace( '.sample', '' )
+ full_source_path = os.path.abspath( filename )
+ full_destination_path = os.path.join( dest_path, sample_file_name )
+ # Don't copy a file to itself - not sure how this happens, but sometimes it does...
+ if full_source_path != full_destination_path:
+ # It's ok to overwrite the .sample version of the file.
+ shutil.copy( full_source_path, full_destination_path )
+ # Only create the .loc file if it does not yet exist. We don't overwrite it in case it contains stuff proprietary to the local instance.
+ if not os.path.exists( os.path.join( dest_path, copied_file ) ):
+ shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) )
+def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
+ downloadable = is_downloadable( metadata_dict )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if repository_metadata:
+ repository_metadata.metadata = metadata_dict
+ repository_metadata.downloadable = downloadable
+ else:
+ repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id,
+ changeset_revision=changeset_revision,
+ metadata=metadata_dict,
+ downloadable=downloadable )
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ return repository_metadata
+def create_repo_info_dict( repository, owner, repository_clone_url, changeset_revision, ctx_rev, metadata ):
+ repo_info_dict = {}
+ repo_info_dict[ repository.name ] = ( repository.description,
+ repository_clone_url,
+ changeset_revision,
+ ctx_rev,
+ owner,
+ metadata.get( 'tool_dependencies', None ) )
+ return repo_info_dict
+def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
+ """Generate the URL for cloning a repository that is in the tool shed."""
+ base_url = url_for( '/', qualified=True ).rstrip( '/' )
+ if trans.user:
+ protocol, base = base_url.split( '://' )
+ username = '%s@' % trans.user.username
+ return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
+ else:
+ return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
+def generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ):
+ if as_html:
+ new_line = '<br/>'
+ bold_start = '<b>'
+ bold_end = '</b>'
+ else:
+ new_line = '\n'
+ bold_start = ''
+ bold_end = ''
+ message = ''
+ if not displaying_invalid_tool:
+ if metadata_dict:
+ message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip( trans.app ) )
+ message += "Correct the following problems if necessary and reset metadata.%s" % new_line
+ else:
+ message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip( trans.app ) )
+ message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line
+ for itc_tup in invalid_file_tups:
+ tool_file, exception_msg = itc_tup
+ if exception_msg.find( 'No such file or directory' ) >= 0:
+ exception_items = exception_msg.split()
+ missing_file_items = exception_items[ 7 ].split( '/' )
+ missing_file = missing_file_items[ -1 ].rstrip( '\'' )
+ if missing_file.endswith( '.loc' ):
+ sample_ext = '%s.sample' % missing_file
+ else:
+ sample_ext = missing_file
+ correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end )
+ correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end )
+ else:
+ if as_html:
+ correction_msg = exception_msg
+ else:
+ correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
+ message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
+ return message
+def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, shed_config_dict={}, relative_install_dir=None, repository_files_dir=None,
+ resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=False ):
+ """
+ Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
+ the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
+ disk files, so the value of repository_files_dir will not always be repository.repo_path( app ) (it could be an absolute path to a temporary
+ directory containing a clone). If it is an absolute path, the value of relative_install_dir must contain repository.repo_path( app ).
+
+ The value of persist will be True when the installed repository contains a valid tool_data_table_conf.xml.sample file, in which case the entries
+ should ultimately be persisted to the file referred to by app.config.shed_tool_data_table_config.
+ """
+ if updating_installed_repository:
+ # Keep the original tool shed repository metadata if setting metadata on a repository installed into a local Galaxy instance for which
+ # we have pulled updates.
+ original_repository_metadata = repository.metadata
+ else:
+ original_repository_metadata = None
+ readme_file_names = get_readme_file_names( repository.name )
+ metadata_dict = { 'shed_config_filename': shed_config_dict.get( 'config_filename' ) }
+ invalid_file_tups = []
+ invalid_tool_configs = []
+ tool_dependencies_config = None
+ original_tool_data_path = app.config.tool_data_path
+ original_tool_data_table_config_path = app.config.tool_data_table_config_path
+ if resetting_all_metadata_on_repository:
+ if not relative_install_dir:
+ raise Exception( "The value of repository.repo_path( app ) must be sent when resetting all metadata on a repository." )
+ # Keep track of the location where the repository is temporarily cloned so that we can strip the path when setting metadata. The value of
+ # repository_files_dir is the full path to the temporary directory to which the repository was cloned.
+ work_dir = repository_files_dir
+ files_dir = repository_files_dir
+ # Since we're working from a temporary directory, we can safely copy sample files included in the repository to the repository root.
+ app.config.tool_data_path = repository_files_dir
+ app.config.tool_data_table_config_path = repository_files_dir
+ else:
+ # Use a temporary working directory to copy all sample files.
+ work_dir = tempfile.mkdtemp()
+ # All other files are on disk in the repository's repo_path, which is the value of relative_install_dir.
+ files_dir = relative_install_dir
+ if shed_config_dict.get( 'tool_path' ):
+ files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
+ app.config.tool_data_path = work_dir
+ app.config.tool_data_table_config_path = work_dir
+ # Handle proprietary datatypes, if any.
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', files_dir )
+ if datatypes_config:
+ metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
+ # Get the relative path to all sample files included in the repository for storage in the repository's metadata.
+ sample_file_metadata_paths, sample_file_copy_paths = get_sample_files_from_disk( repository_files_dir=files_dir,
+ tool_path=shed_config_dict.get( 'tool_path' ),
+ relative_install_dir=relative_install_dir,
+ resetting_all_metadata_on_repository=resetting_all_metadata_on_repository )
+ if sample_file_metadata_paths:
+ metadata_dict[ 'sample_files' ] = sample_file_metadata_paths
+ # Copy all sample files included in the repository to a single directory location so we can load tools that depend on them.
+ for sample_file in sample_file_copy_paths:
+ copy_sample_file( app, sample_file, dest_path=work_dir )
+ # If the list of sample files includes a tool_data_table_conf.xml.sample file, laad it's table elements into memory.
+ relative_path, filename = os.path.split( sample_file )
+ if filename == 'tool_data_table_conf.xml.sample':
+ new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
+ tool_data_path=original_tool_data_path,
+ shed_tool_data_table_config=app.config.shed_tool_data_table_config,
+ persist=persist )
+ for root, dirs, files in os.walk( files_dir ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ if '.hg' in dirs:
+ dirs.remove( '.hg' )
+ for name in files:
+ # See if we have a READ_ME file.
+ if name.lower() in readme_file_names:
+ if resetting_all_metadata_on_repository:
+ full_path_to_readme = os.path.join( root, name )
+ stripped_path_to_readme = full_path_to_readme.replace( work_dir, '' )
+ if stripped_path_to_readme.startswith( '/' ):
+ stripped_path_to_readme = stripped_path_to_readme[ 1: ]
+ relative_path_to_readme = os.path.join( relative_install_dir, stripped_path_to_readme )
+ else:
+ relative_path_to_readme = os.path.join( root, name )
+ if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_readme.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
+ relative_path_to_readme = relative_path_to_readme[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
+ metadata_dict[ 'readme' ] = relative_path_to_readme
+ # See if we have a tool config.
+ elif name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
+ full_path = str( os.path.abspath( os.path.join( root, name ) ) )
+ if os.path.getsize( full_path ) > 0:
+ if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
+ or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
+ try:
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( full_path )
+ element_tree_root = element_tree.getroot()
+ is_tool = element_tree_root.tag == 'tool'
+ except Exception, e:
+ log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) )
+ is_tool = False
+ if is_tool:
+ tool, valid, error_message = load_tool_from_config( app, full_path )
+ if tool is None:
+ if not valid:
+ invalid_file_tups.append( ( name, error_message ) )
+ else:
+ invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_file_metadata_paths )
+ can_set_metadata = True
+ for tup in invalid_files_and_errors_tups:
+ if name in tup:
+ can_set_metadata = False
+ invalid_tool_configs.append( name )
+ break
+ if can_set_metadata:
+ if resetting_all_metadata_on_repository:
+ full_path_to_tool_config = os.path.join( root, name )
+ stripped_path_to_tool_config = full_path_to_tool_config.replace( work_dir, '' )
+ if stripped_path_to_tool_config.startswith( '/' ):
+ stripped_path_to_tool_config = stripped_path_to_tool_config[ 1: ]
+ relative_path_to_tool_config = os.path.join( relative_install_dir, stripped_path_to_tool_config )
+ else:
+ relative_path_to_tool_config = os.path.join( root, name )
+ if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_tool_config.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
+ relative_path_to_tool_config = relative_path_to_tool_config[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
+ metadata_dict = generate_tool_metadata( relative_path_to_tool_config, tool, repository_clone_url, metadata_dict )
+ else:
+ for tup in invalid_files_and_errors_tups:
+ invalid_file_tups.append( tup )
+ # Find all exported workflows.
+ elif name.endswith( '.ga' ):
+ relative_path = os.path.join( root, name )
+ if os.path.getsize( os.path.abspath( relative_path ) ) > 0:
+ fp = open( relative_path, 'rb' )
+ workflow_text = fp.read()
+ fp.close()
+ exported_workflow_dict = from_json_string( workflow_text )
+ if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
+ metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
+ if 'tools' in metadata_dict:
+ # This step must be done after metadata for tools has been defined.
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', files_dir )
+ if tool_dependencies_config:
+ metadata_dict = generate_tool_dependency_metadata( app,
+ repository,
+ tool_dependencies_config,
+ metadata_dict,
+ original_repository_metadata=original_repository_metadata )
+ if invalid_tool_configs:
+ metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
+ # Reset the value of the app's tool_data_path and tool_data_table_config_path to their respective original values.
+ app.config.tool_data_path = original_tool_data_path
+ app.config.tool_data_table_config_path = original_tool_data_table_config_path
+ return metadata_dict, invalid_file_tups
+def get_changectx_for_changeset( repo, changeset_revision, **kwd ):
+ """Retrieve a specified changectx from a repository"""
+ for changeset in repo.changelog:
+ ctx = repo.changectx( changeset )
+ if str( ctx ) == changeset_revision:
+ return ctx
+ return None
+def get_config_from_disk( config_file, relative_install_dir ):
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == config_file:
+ return os.path.abspath( os.path.join( root, name ) )
+ return None
+def get_configured_ui():
+ # Configure any desired ui settings.
+ _ui = ui.ui()
+ # The following will suppress all messages. This is
+ # the same as adding the following setting to the repo
+ # hgrc file' [ui] section:
+ # quiet = True
+ _ui.setconfig( 'ui', 'quiet', True )
+ return _ui
+def get_file_context_from_ctx( ctx, filename ):
+ # We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
+ # within ctx if the files were moved within the change set. For example, in the following ctx.files() list, the former may have been moved to
+ # the latter: ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']. Another scenario
+ # is that the file has been deleted.
+ deleted = False
+ filename = strip_path( filename )
+ for ctx_file in ctx.files():
+ ctx_file_name = strip_path( ctx_file )
+ if filename == ctx_file_name:
+ try:
+ # If the file was moved, its destination will be returned here.
+ fctx = ctx[ ctx_file ]
+ return fctx
+ except LookupError, e:
+ # Set deleted for now, and continue looking in case the file was moved instead of deleted.
+ deleted = True
+ if deleted:
+ return 'DELETED'
+ return None
+def get_repository_file_contents( file_path ):
+ if is_gzip( file_path ):
+ to_html = to_html_str( '\ngzip compressed file\n' )
+ elif is_bz2( file_path ):
+ to_html = to_html_str( '\nbz2 compressed file\n' )
+ elif check_zip( file_path ):
+ to_html = to_html_str( '\nzip compressed file\n' )
+ elif check_binary( file_path ):
+ to_html = to_html_str( '\nBinary file\n' )
+ else:
+ to_html = ''
+ for i, line in enumerate( open( file_path ) ):
+ to_html = '%s%s' % ( to_html, to_html_str( line ) )
+ if len( to_html ) > MAX_CONTENT_SIZE:
+ large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
+ to_html = '%s%s' % ( to_html, to_html_str( large_str ) )
+ break
+ return to_html
+def get_repository_in_tool_shed( trans, id ):
+ """Get a repository on the tool shed side from the database via id"""
+ return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
+def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ):
+ """Get metadata for a specified repository change set from the database"""
+ # Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow
+ # created in the past. The cause of this issue has been resolved, but we'll leave this method as is for a while longer to ensure all duplicate
+ # records are removed.
+ all_metadata_records = trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
+ trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
+ .order_by( trans.model.RepositoryMetadata.table.c.update_time.desc() ) \
+ .all()
+ if len( all_metadata_records ) > 1:
+ # Delete all recrds older than the last one updated.
+ for repository_metadata in all_metadata_records[ 1: ]:
+ trans.sa_session.delete( repository_metadata )
+ trans.sa_session.flush()
+ return all_metadata_records[ 0 ]
+ elif all_metadata_records:
+ return all_metadata_records[ 0 ]
+ return None
+def get_named_tmpfile_from_ctx( ctx, filename, dir ):
+ filename = strip_path( filename )
+ for ctx_file in ctx.files():
+ ctx_file_name = strip_path( ctx_file )
+ if filename == ctx_file_name:
+ try:
+ # If the file was moved, its destination file contents will be returned here.
+ fctx = ctx[ ctx_file ]
+ except LookupError, e:
+ # Continue looking in case the file was moved.
+ fctx = None
+ continue
+ if fctx:
+ fh = tempfile.NamedTemporaryFile( 'wb', dir=dir )
+ tmp_filename = fh.name
+ fh.close()
+ fh = open( tmp_filename, 'wb' )
+ fh.write( fctx.data() )
+ fh.close()
+ return tmp_filename
+ return None
+def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ):
+ parent_id = None
+ # Compare from most recent to oldest.
+ changeset_revisions.reverse()
+ for changeset_revision in changeset_revisions:
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ metadata = repository_metadata.metadata
+ tools_dicts = metadata.get( 'tools', [] )
+ for tool_dict in tools_dicts:
+ if tool_dict[ 'guid' ] == guid:
+ # The tool has not changed between the compared changeset revisions.
+ continue
+ if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version:
+ # The tool version is different, so we've found the parent.
+ return tool_dict[ 'guid' ]
+ if parent_id is None:
+ # The tool did not change through all of the changeset revisions.
+ return old_id
+def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ):
+ # Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories.
+ message = ''
+ sample_files = copy_disk_sample_files_to_dir( trans, repo_files_dir, work_dir )
+ if sample_files:
+ if 'tool_data_table_conf.xml.sample' in sample_files:
+ # Load entries into the tool_data_tables if the tool requires them.
+ tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
+ error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
+ tool, valid, message2 = load_tool_from_config( trans.app, tool_config_filepath )
+ message = concat_messages( message, message2 )
+ return tool, valid, message, sample_files
+def handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir ):
+ tool = None
+ message = ''
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ # We're not currently doing anything with the returned list of deleted_sample_files here. It is intended to help handle sample files that are in
+ # the manifest, but have been deleted from disk.
+ sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
+ if sample_files:
+ trans.app.config.tool_data_path = work_dir
+ if 'tool_data_table_conf.xml.sample' in sample_files:
+ # Load entries into the tool_data_tables if the tool requires them.
+ tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
+ if tool_data_table_config:
+ error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
+ if error:
+ log.debug( message )
+ manifest_ctx, ctx_file = get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision )
+ if manifest_ctx and ctx_file:
+ tool, message2 = load_tool_from_tmp_config( trans, repo, manifest_ctx, ctx_file, work_dir )
+ message = concat_messages( message, message2 )
+ return tool, message, sample_files
+def handle_sample_tool_data_table_conf_file( app, filename, persist=False ):
+ """
+ Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary. If persist is True (should only occur
+ if call is from the Galaxy side, not the tool shed), the new entries will be appended to Galaxy's shed_tool_data_table_conf.xml file on disk.
+ """
+ error = False
+ message = ''
+ try:
+ new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=filename,
+ tool_data_path=app.config.tool_data_path,
+ shed_tool_data_table_config=app.config.shed_tool_data_table_config,
+ persist=persist )
+ except Exception, e:
+ message = str( e )
+ error = True
+ return error, message
+def is_downloadable( metadata_dict ):
+ return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
+def load_tool_from_config( app, full_path ):
+ try:
+ tool = app.toolbox.load_tool( full_path )
+ valid = True
+ error_message = None
+ except KeyError, e:
+ tool = None
+ valid = False
+ error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e )
+ error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
+ error_message += 'this error. '
+ except Exception, e:
+ tool = None
+ valid = False
+ error_message = str( e )
+ return tool, valid, error_message
+def open_repository_files_folder( trans, folder_path ):
+ try:
+ files_list = get_repository_files( trans, folder_path )
+ except OSError, e:
+ if str( e ).find( 'No such file or directory' ) >= 0:
+ # We have a repository with no contents.
+ return []
+ folder_contents = []
+ for filename in files_list:
+ is_folder = False
+ if filename and filename[-1] == os.sep:
+ is_folder = True
+ if filename:
+ full_path = os.path.join( folder_path, filename )
+ node = { "title": filename,
+ "isFolder": is_folder,
+ "isLazy": is_folder,
+ "tooltip": full_path,
+ "key": full_path }
+ folder_contents.append( node )
+ return folder_contents
+def remove_dir( dir ):
+ if os.path.exists( dir ):
+ try:
+ shutil.rmtree( dir )
+ except:
+ pass
+def reset_all_metadata_on_repository_in_tool_shed( trans, id ):
+ """Reset all metadata on a single repository in a tool shed."""
+ def reset_all_tool_versions( trans, id, repo ):
+ changeset_revisions = []
+ for changeset in repo.changelog:
+ changeset_revision = str( repo.changectx( changeset ) )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if metadata.get( 'tools', None ):
+ changeset_revisions.append( changeset_revision )
+ # The list of changeset_revisions is now filtered to contain only those that are downloadable and contain tools.
+ # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
+ for index, changeset_revision in enumerate( changeset_revisions ):
+ tool_versions_dict = {}
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ metadata = repository_metadata.metadata
+ tool_dicts = metadata[ 'tools' ]
+ if index == 0:
+ # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools.
+ # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config.
+ for tool_dict in tool_dicts:
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
+ else:
+ for tool_dict in tool_dicts:
+ parent_id = get_parent_id( trans,
+ id,
+ tool_dict[ 'id' ],
+ tool_dict[ 'version' ],
+ tool_dict[ 'guid' ],
+ changeset_revisions[ 0:index ] )
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
+ if tool_versions_dict:
+ repository_metadata.tool_versions = tool_versions_dict
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ repository = get_repository_in_tool_shed( trans, id )
+ log.debug( "Resetting all metadata on repository: %s" % repository.name )
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
+ # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop
+ # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list.
+ changeset_revisions = []
+ # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict.
+ metadata_changeset_revision = None
+ metadata_dict = None
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ invalid_file_tups = []
+ home_dir = os.getcwd()
+ for changeset in repo.changelog:
+ work_dir = tempfile.mkdtemp()
+ current_changeset_revision = str( repo.changectx( changeset ) )
+ ctx = repo.changectx( changeset )
+ log.debug( "Cloning repository revision: %s", str( ctx.rev() ) )
+ cloned_ok, error_message = clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
+ if cloned_ok:
+ log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
+ current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=repo_dir,
+ repository_files_dir=work_dir,
+ resetting_all_metadata_on_repository=True,
+ updating_installed_repository=False,
+ persist=False )
+ if current_metadata_dict:
+ if not metadata_changeset_revision and not metadata_dict:
+ # We're at the first change set in the change log.
+ metadata_changeset_revision = current_changeset_revision
+ metadata_dict = current_metadata_dict
+ if ancestor_changeset_revision:
+ # Compare metadata from ancestor and current. The value of comparison will be one of:
+ # 'no metadata' - no metadata for either ancestor or current, so continue from current
+ # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
+ # 'subset' - ancestor metadata is a subset of current metadata, so continue from current
+ # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
+ comparison = compare_changeset_revisions( ancestor_changeset_revision,
+ ancestor_metadata_dict,
+ current_changeset_revision,
+ current_metadata_dict )
+ if comparison in [ 'no metadata', 'equal', 'subset' ]:
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ elif comparison == 'not equal and not subset':
+ metadata_changeset_revision = ancestor_changeset_revision
+ metadata_dict = ancestor_metadata_dict
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ else:
+ # We're at the beginning of the change log.
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ if not ctx.children():
+ metadata_changeset_revision = current_changeset_revision
+ metadata_dict = current_metadata_dict
+ # We're at the end of the change log.
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ elif ancestor_metadata_dict:
+ # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
+ if not ctx.children():
+ # We're at the end of the change log.
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ remove_dir( work_dir )
+ # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
+ clean_repository_metadata( trans, id, changeset_revisions )
+ # Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog.
+ reset_all_tool_versions( trans, id, repo )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
+ return invalid_file_tups, metadata_dict
+def reset_metadata_on_selected_repositories( trans, **kwd ):
+ # This method is called from both Galaxy and the Tool Shed, so the cntrller param is required.
+ repository_ids = util.listify( kwd.get( 'repository_ids', None ) )
+ CONTROLLER = kwd[ 'CONTROLLER' ]
+ message = ''
+ status = 'done'
+ if repository_ids:
+ successful_count = 0
+ unsuccessful_count = 0
+ for repository_id in repository_ids:
+ try:
+ if CONTROLLER == 'TOOL_SHED_ADMIN_CONTROLLER':
+ repository = get_repository_in_tool_shed( trans, repository_id )
+ invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
+ elif CONTROLLER == 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER':
+ repository = get_installed_tool_shed_repository( trans, repository_id )
+ invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans, repository_id )
+ if invalid_file_tups:
+ message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
+ log.debug( message )
+ unsuccessful_count += 1
+ else:
+ log.debug( "Successfully reset metadata on repository %s" % repository.name )
+ successful_count += 1
+ except Exception, e:
+ log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) )
+ unsuccessful_count += 1
+ message = "Successfully reset metadata on %d %s. " % ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
+ if unsuccessful_count:
+ message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count,
+ inflector.cond_plural( unsuccessful_count, "repository" ) )
+ else:
+ message = 'Select at least one repository to on which to reset all metadata.'
+ status = 'error'
+ return message, status
+def reset_tool_data_tables( app ):
+ # Reset the tool_data_tables to an empty dictionary.
+ app.tool_data_tables.data_tables = {}
+def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
+ """
+ Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and
+ including the included_upper_bounds_changeset_revision. The value of excluded_lower_bounds_changeset_revision will be the value of
+ INITIAL_CHANGELOG_HASH if no valid changesets exist before included_upper_bounds_changeset_revision.
+ """
+ # To set excluded_lower_bounds_changeset_revision, calling methods should do the following, where the value of changeset_revision
+ # is a downloadable changeset_revision.
+ # excluded_lower_bounds_changeset_revision = get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
+ if excluded_lower_bounds_changeset_revision == INITIAL_CHANGELOG_HASH:
+ appending_started = True
+ else:
+ appending_started = False
+ reversed_changelog = []
+ for changeset in repo.changelog:
+ changeset_hash = str( repo.changectx( changeset ) )
+ if appending_started:
+ reversed_changelog.insert( 0, changeset )
+ if changeset_hash == excluded_lower_bounds_changeset_revision and not appending_started:
+ appending_started = True
+ if changeset_hash == included_upper_bounds_changeset_revision:
+ break
+ return reversed_changelog
+def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ):
+ return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision )
+def strip_path( fpath ):
+ if not fpath:
+ return fpath
+ try:
+ file_path, file_name = os.path.split( fpath )
+ except:
+ file_name = fpath
+ return file_name
+def update_repository( repo, ctx_rev=None ):
+ """
+ Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
+ changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
+ """
+ # TODO: We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
+ # The codes used to show the status of files are as follows.
+ # M = modified
+ # A = added
+ # R = removed
+ # C = clean
+ # ! = deleted, but still tracked
+ # ? = not tracked
+ # I = ignored
+ # It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that
+ # purging is not supported by the mercurial API. See the deprecated update_for_browsing() method in common.py.
+ commands.update( get_configured_ui(),
+ repo,
+ rev=ctx_rev )
+def url_join( *args ):
+ parts = []
+ for arg in args:
+ parts.append( arg.strip( '/' ) )
+ return '/'.join( parts )
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -5,9 +5,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.web.form_builder import SelectField
from galaxy.util import inflector
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import build_repository_ids_select_field, get_changectx_for_changeset, get_configured_ui, get_repository_in_tool_shed
-from galaxy.util.shed_util import reset_metadata_on_selected_repositories, TOOL_SHED_ADMIN_CONTROLLER
+from galaxy.util.shed_util_common import *
from common import *
from repository import RepositoryGrid, CategoryGrid
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -5,13 +5,7 @@
from galaxy.tools import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import check_tool_input_params, clone_repository, concat_messages, copy_sample_file, create_or_update_repository_metadata
-from galaxy.util.shed_util import generate_clone_url_for_repository_in_tool_shed, generate_message_for_invalid_tools, generate_metadata_for_changeset_revision
-from galaxy.util.shed_util import get_changectx_for_changeset, get_config_from_disk, get_configured_ui, get_file_context_from_ctx, get_named_tmpfile_from_ctx
-from galaxy.util.shed_util import get_parent_id, get_repository_in_tool_shed, get_repository_metadata_by_changeset_revision
-from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_disk, handle_sample_files_and_load_tool_from_tmp_config, INITIAL_CHANGELOG_HASH
-from galaxy.util.shed_util import is_downloadable, load_tool_from_config, remove_dir, reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path
+from galaxy.util.shed_util_common import *
from galaxy.web.base.controller import *
from galaxy.web.base.controllers.admin import *
from galaxy.webapps.community import model
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -9,14 +9,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import create_repo_info_dict, generate_clone_url_for_repository_in_tool_shed, generate_message_for_invalid_tools
-from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, get_file_from_changeset_revision
-from galaxy.util.shed_util import get_repository_file_contents, get_repository_in_tool_shed, get_repository_metadata_by_changeset_revision
-from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_disk, handle_sample_files_and_load_tool_from_tmp_config
-from galaxy.util.shed_util import INITIAL_CHANGELOG_HASH, load_tool_from_config, NOT_TOOL_CONFIGS, open_repository_files_folder, remove_dir
-from galaxy.util.shed_util import reset_all_metadata_on_repository_in_tool_shed, reversed_lower_upper_bounded_changelog
-from galaxy.util.shed_util import reversed_upper_bounded_changelog, strip_path, to_html_escaped, update_repository, url_join
+from galaxy.util.shed_util_common import *
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -1265,6 +1258,13 @@
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
return get_repository_file_contents( file_path )
+ def get_file_from_changeset_revision( self, repo_files_dir, changeset_revision, file_name, dir ):
+ """Return file_name from the received changeset_revision of the repository manifest."""
+ stripped_file_name = strip_path( file_name )
+ repo = hg.repository( get_configured_ui(), repo_files_dir )
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ named_tmp_file = get_named_tmpfile_from_ctx( ctx, file_name, dir )
+ return named_tmp_file
def get_metadata( self, trans, repository_id, changeset_revision ):
repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata and repository_metadata.metadata:
@@ -2231,6 +2231,17 @@
if list:
return ','.join( list )
return ''
+ def to_html_escaped( self, text ):
+ """Translates the characters in text to html values"""
+ translated = []
+ for c in text:
+ if c in [ '\r\n', '\n', ' ', '\t' ] or c in VALID_CHARS:
+ translated.append( c )
+ elif c in MAPPED_CHARS:
+ translated.append( MAPPED_CHARS[ c ] )
+ else:
+ translated.append( '' )
+ return ''.join( translated )
def __validate_repository_name( self, name, user ):
# Repository names must be unique for each user, must be at least four characters
# in length and must contain only lower-case letters, numbers, and the '_' character.
@@ -2304,7 +2315,7 @@
anchors = modified + added + removed + deleted + unknown + ignored + clean
diffs = []
for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node() ):
- diffs.append( to_html_escaped( diff ) )
+ diffs.append( self.to_html_escaped( diff ) )
is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, ctx_str )
return trans.fill_template( '/webapps/community/repository/view_changeset.mako',
@@ -2356,12 +2367,7 @@
except IOError:
work_dir = tempfile.mkdtemp()
try:
- manifest_readme_file = get_file_from_changeset_revision( trans.app,
- repository,
- repo_files_dir,
- changeset_revision,
- readme_file,
- work_dir )
+ manifest_readme_file = self.get_file_from_changeset_revision( repo_files_dir, changeset_revision, readme_file, work_dir )
f = open( manifest_readme_file, 'r' )
raw_text = f.read()
f.close()
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/webapps/community/controllers/repository_review.py
--- a/lib/galaxy/webapps/community/controllers/repository_review.py
+++ b/lib/galaxy/webapps/community/controllers/repository_review.py
@@ -8,8 +8,7 @@
from sqlalchemy.sql.expression import func
from common import *
from repository import RepositoryGrid
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import get_configured_ui, get_repository_in_tool_shed
+from galaxy.util.shed_util_common import *
from galaxy.util.odict import odict
from galaxy import eggs
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -3,9 +3,7 @@
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
from common import *
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import get_configured_ui, get_repository_in_tool_shed, reset_tool_data_tables, handle_sample_tool_data_table_conf_file
-from galaxy.util.shed_util import update_repository
+from galaxy.util.shed_util_common import *
from galaxy import eggs
eggs.require('mercurial')
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/webapps/community/controllers/workflow.py
--- a/lib/galaxy/webapps/community/controllers/workflow.py
+++ b/lib/galaxy/webapps/community/controllers/workflow.py
@@ -10,8 +10,7 @@
from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps
from galaxy.model.orm import *
from common import *
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import get_repository_in_tool_shed
+from galaxy.util.shed_util_common import *
from galaxy.tool_shed.encoding_util import *
class RepoInputDataModule( InputDataModule ):
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -2,6 +2,7 @@
from admin import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
+from galaxy.util.shed_util_common import *
from galaxy.tool_shed.encoding_util import *
from galaxy import eggs, tools
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r e1895e14176e80bbeccb918dba03723b07cc8112 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -77,7 +77,7 @@
<%def name="render_clone_str( repository )"><%
- from galaxy.util.shed_util import generate_clone_url_for_repository_in_tool_shed
+ from galaxy.util.shed_util_common import generate_clone_url_for_repository_in_tool_shed
clone_str = generate_clone_url_for_repository_in_tool_shed( trans, repository )
%>
hg clone <a href="${clone_str}">${clone_str}</a>
https://bitbucket.org/galaxy/galaxy-central/changeset/7a51b701af88/
changeset: 7a51b701af88
user: natefoo
date: 2012-11-15 21:06:43
summary: Merge
affected #: 13 files
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -6,6 +6,7 @@
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
+from galaxy.util.shed_util_common import *
from galaxy.util.odict import odict
from galaxy.tool_shed.common_util import *
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/tool_shed/update_manager.py
--- a/lib/galaxy/tool_shed/update_manager.py
+++ b/lib/galaxy/tool_shed/update_manager.py
@@ -4,6 +4,7 @@
import threading, urllib2, logging
from galaxy.util import string_as_bool
from galaxy.util.shed_util import *
+from galaxy.util.shed_util_common import *
log = logging.getLogger( __name__ )
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -33,6 +33,7 @@
from galaxy.util.hash_util import *
from galaxy.util import listify
from galaxy.util.shed_util import *
+from galaxy.util.shed_util_common import *
from galaxy.web import url_for
from galaxy.visualization.genome.visual_analytics import TracksterConfig
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,14 +1,9 @@
-import sys, os, tempfile, shutil, logging, string, urllib2
-import galaxy.tools.data
-from datetime import date, datetime, timedelta
+import os, tempfile, shutil, logging, urllib2
from galaxy import util
-from galaxy.web import url_for
-from galaxy.web.form_builder import SelectField
-from galaxy.tools import parameters
from galaxy.datatypes.checkers import *
from galaxy.datatypes.sniff import is_column_based
from galaxy.util.json import *
-from galaxy.util import inflector
+from galaxy.util.shed_util_common import *
from galaxy.tools.search import ToolBoxSearch
from galaxy.tool_shed.tool_dependencies.install_util import create_or_update_tool_dependency, install_package, set_environment
from galaxy.tool_shed.encoding_util import *
@@ -26,19 +21,6 @@
log = logging.getLogger( __name__ )
-GALAXY_ADMIN_TOOL_SHED_CONTROLLER = 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER'
-INITIAL_CHANGELOG_HASH = '000000000000'
-# Characters that must be html escaped
-MAPPED_CHARS = { '>' :'>',
- '<' :'<',
- '"' : '"',
- '&' : '&',
- '\'' : ''' }
-MAX_CONTENT_SIZE = 32768
-NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'tool_dependencies.xml' ]
-VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" )
-TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER'
-
def add_to_shed_tool_config( app, shed_tool_conf_dict, elem_list ):
# A tool shed repository is being installed so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -175,27 +157,6 @@
except:
pass
return converter_path, display_path
-def build_repository_ids_select_field( trans, cntrller, name='repository_ids', multiple=True, display='checkboxes' ):
- """Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata."""
- repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
- if cntrller == TOOL_SHED_ADMIN_CONTROLLER:
- for repository in trans.sa_session.query( trans.model.Repository ) \
- .filter( trans.model.Repository.table.c.deleted == False ) \
- .order_by( trans.model.Repository.table.c.name,
- trans.model.Repository.table.c.user_id ):
- owner = repository.user.username
- option_label = '%s (%s)' % ( repository.name, owner )
- option_value = '%s' % trans.security.encode_id( repository.id )
- repositories_select_field.add_option( option_label, option_value )
- elif cntrller == GALAXY_ADMIN_TOOL_SHED_CONTROLLER:
- for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
- .filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \
- .order_by( trans.model.ToolShedRepository.table.c.name,
- trans.model.ToolShedRepository.table.c.owner ):
- option_label = '%s (%s)' % ( repository.name, repository.owner )
- option_value = trans.security.encode_id( repository.id )
- repositories_select_field.add_option( option_label, option_value )
- return repositories_select_field
def can_generate_tool_dependency_metadata( root, metadata_dict ):
"""
Make sure the combination of name, version and type (the type will be the value of elem.tag) of each root element tag in the tool_dependencies.xml
@@ -245,50 +206,6 @@
# tag for any tool in the repository.
break
return can_generate_dependency_metadata
-def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
- """
- Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
- sure the files exist.
- """
- invalid_files_and_errors_tups = []
- correction_msg = ''
- for input_param in tool.input_params:
- if isinstance( input_param, parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
- # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist.
- options = input_param.dynamic_options or input_param.options
- if options:
- if options.tool_data_table or options.missing_tool_data_table_name:
- # Make sure the repository contains a tool_data_table_conf.xml.sample file.
- sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
- if sample_tool_data_table_conf:
- error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf )
- if error:
- invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
- else:
- options.missing_tool_data_table_name = None
- else:
- correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample "
- correction_msg += "to the repository that includes the required entry to correct this error.<br/>"
- invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
- if options.index_file or options.missing_index_file:
- # Make sure the repository contains the required xxx.loc.sample file.
- index_file = options.index_file or options.missing_index_file
- index_file_name = strip_path( index_file )
- sample_found = False
- for sample_file in sample_files:
- sample_file_name = strip_path( sample_file )
- if sample_file_name == '%s.sample' % index_file_name:
- options.index_file = index_file_name
- options.missing_index_file = None
- if options.tool_data_table:
- options.tool_data_table.missing_index_file = None
- sample_found = True
- break
- if not sample_found:
- correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
- correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
- invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
- return invalid_files_and_errors_tups
def clean_repository_metadata( trans, id, changeset_revisions ):
# Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
# We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
@@ -388,17 +305,6 @@
else:
return 'subset'
return 'not equal and not subset'
-def concat_messages( msg1, msg2 ):
- if msg1:
- if msg2:
- message = '%s %s' % ( msg1, msg2 )
- else:
- message = msg1
- elif msg2:
- message = msg2
- else:
- message = ''
- return message
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
# Persist the current in-memory list of config_elems to a file named by the value of config_filename.
fd, filename = tempfile.mkstemp()
@@ -439,35 +345,6 @@
# Eliminate the port, if any, since it will result in an invalid directory name.
return tool_shed_url.split( ':' )[ 0 ]
return tool_shed_url.rstrip( '/' )
-def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
- """Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository."""
- try:
- commands.clone( get_configured_ui(),
- str( repository_clone_url ),
- dest=str( repository_file_dir ),
- pull=True,
- noupdate=False,
- rev=util.listify( str( ctx_rev ) ) )
- return True, None
- except Exception, e:
- error_message = 'Error cloning repository: %s' % str( e )
- log.debug( error_message )
- return False, error_message
-def copy_sample_file( app, filename, dest_path=None ):
- """Copy xxx.sample to dest_path/xxx.sample and dest_path/xxx. The default value for dest_path is ~/tool-data."""
- if dest_path is None:
- dest_path = os.path.abspath( app.config.tool_data_path )
- sample_file_name = strip_path( filename )
- copied_file = sample_file_name.replace( '.sample', '' )
- full_source_path = os.path.abspath( filename )
- full_destination_path = os.path.join( dest_path, sample_file_name )
- # Don't copy a file to itself - not sure how this happens, but sometimes it does...
- if full_source_path != full_destination_path:
- # It's ok to overwrite the .sample version of the file.
- shutil.copy( full_source_path, full_destination_path )
- # Only create the .loc file if it does not yet exist. We don't overwrite it in case it contains stuff proprietary to the local instance.
- if not os.path.exists( os.path.join( dest_path, copied_file ) ):
- shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) )
def copy_sample_files( app, sample_files, tool_path=None, sample_files_copied=None, dest_path=None ):
"""
Copy all appropriate files to dest_path in the local Galaxy environment that have not already been copied. Those that have been copied
@@ -484,15 +361,6 @@
# Attempt to ensure we're copying an appropriate file.
if is_data_index_sample_file( filename ):
copy_sample_file( app, filename, dest_path=dest_path )
-def create_repo_info_dict( repository, owner, repository_clone_url, changeset_revision, ctx_rev, metadata ):
- repo_info_dict = {}
- repo_info_dict[ repository.name ] = ( repository.description,
- repository_clone_url,
- changeset_revision,
- ctx_rev,
- owner,
- metadata.get( 'tool_dependencies', None ) )
- return repo_info_dict
def create_repository_dict_for_proprietary_datatypes( tool_shed, name, owner, installed_changeset_revision, tool_dicts, converter_path=None, display_path=None ):
return dict( tool_shed=tool_shed,
repository_name=name,
@@ -501,20 +369,6 @@
tool_dicts=tool_dicts,
converter_path=converter_path,
display_path=display_path )
-def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
- downloadable = is_downloadable( metadata_dict )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- if repository_metadata:
- repository_metadata.metadata = metadata_dict
- repository_metadata.downloadable = downloadable
- else:
- repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id,
- changeset_revision=changeset_revision,
- metadata=metadata_dict,
- downloadable=downloadable )
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- return repository_metadata
def create_or_update_tool_shed_repository( app, name, description, installed_changeset_revision, ctx_rev, repository_clone_url, metadata_dict,
status, current_changeset_revision=None, owner='', dist_to_shed=False ):
# The received value for dist_to_shed will be True if the InstallManager is installing a repository that contains tools or datatypes that used
@@ -618,15 +472,6 @@
"""Generate the URL for cloning a repository that has been installed into a Galaxy instance."""
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
return url_join( tool_shed_url, 'repos', repository.owner, repository.name )
-def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
- """Generate the URL for cloning a repository that is in the tool shed."""
- base_url = url_for( '/', qualified=True ).rstrip( '/' )
- if trans.user:
- protocol, base = base_url.split( '://' )
- username = '%s@' % trans.user.username
- return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
- else:
- return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
def generate_datatypes_metadata( datatypes_config, metadata_dict ):
"""Update the received metadata_dict with information from the parsed datatypes_config."""
tree = ElementTree.parse( datatypes_config )
@@ -681,191 +526,6 @@
else:
tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ]
return tool_dependencies_dict
-def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, shed_config_dict={}, relative_install_dir=None, repository_files_dir=None,
- resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=False ):
- """
- Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
- the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
- disk files, so the value of repository_files_dir will not always be repository.repo_path( app ) (it could be an absolute path to a temporary
- directory containing a clone). If it is an absolute path, the value of relative_install_dir must contain repository.repo_path( app ).
-
- The value of persist will be True when the installed repository contains a valid tool_data_table_conf.xml.sample file, in which case the entries
- should ultimately be persisted to the file referred to by app.config.shed_tool_data_table_config.
- """
- if updating_installed_repository:
- # Keep the original tool shed repository metadata if setting metadata on a repository installed into a local Galaxy instance for which
- # we have pulled updates.
- original_repository_metadata = repository.metadata
- else:
- original_repository_metadata = None
- readme_file_names = get_readme_file_names( repository.name )
- metadata_dict = { 'shed_config_filename': shed_config_dict.get( 'config_filename' ) }
- invalid_file_tups = []
- invalid_tool_configs = []
- tool_dependencies_config = None
- original_tool_data_path = app.config.tool_data_path
- original_tool_data_table_config_path = app.config.tool_data_table_config_path
- if resetting_all_metadata_on_repository:
- if not relative_install_dir:
- raise Exception( "The value of repository.repo_path( app ) must be sent when resetting all metadata on a repository." )
- # Keep track of the location where the repository is temporarily cloned so that we can strip the path when setting metadata. The value of
- # repository_files_dir is the full path to the temporary directory to which the repository was cloned.
- work_dir = repository_files_dir
- files_dir = repository_files_dir
- # Since we're working from a temporary directory, we can safely copy sample files included in the repository to the repository root.
- app.config.tool_data_path = repository_files_dir
- app.config.tool_data_table_config_path = repository_files_dir
- else:
- # Use a temporary working directory to copy all sample files.
- work_dir = tempfile.mkdtemp()
- # All other files are on disk in the repository's repo_path, which is the value of relative_install_dir.
- files_dir = relative_install_dir
- if shed_config_dict.get( 'tool_path' ):
- files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
- app.config.tool_data_path = work_dir
- app.config.tool_data_table_config_path = work_dir
- # Handle proprietary datatypes, if any.
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', files_dir )
- if datatypes_config:
- metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
- # Get the relative path to all sample files included in the repository for storage in the repository's metadata.
- sample_file_metadata_paths, sample_file_copy_paths = get_sample_files_from_disk( repository_files_dir=files_dir,
- tool_path=shed_config_dict.get( 'tool_path' ),
- relative_install_dir=relative_install_dir,
- resetting_all_metadata_on_repository=resetting_all_metadata_on_repository )
- if sample_file_metadata_paths:
- metadata_dict[ 'sample_files' ] = sample_file_metadata_paths
- # Copy all sample files included in the repository to a single directory location so we can load tools that depend on them.
- for sample_file in sample_file_copy_paths:
- copy_sample_file( app, sample_file, dest_path=work_dir )
- # If the list of sample files includes a tool_data_table_conf.xml.sample file, laad it's table elements into memory.
- relative_path, filename = os.path.split( sample_file )
- if filename == 'tool_data_table_conf.xml.sample':
- new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
- tool_data_path=original_tool_data_path,
- shed_tool_data_table_config=app.config.shed_tool_data_table_config,
- persist=persist )
- for root, dirs, files in os.walk( files_dir ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- if '.hg' in dirs:
- dirs.remove( '.hg' )
- for name in files:
- # See if we have a READ_ME file.
- if name.lower() in readme_file_names:
- if resetting_all_metadata_on_repository:
- full_path_to_readme = os.path.join( root, name )
- stripped_path_to_readme = full_path_to_readme.replace( work_dir, '' )
- if stripped_path_to_readme.startswith( '/' ):
- stripped_path_to_readme = stripped_path_to_readme[ 1: ]
- relative_path_to_readme = os.path.join( relative_install_dir, stripped_path_to_readme )
- else:
- relative_path_to_readme = os.path.join( root, name )
- if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_readme.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
- relative_path_to_readme = relative_path_to_readme[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
- metadata_dict[ 'readme' ] = relative_path_to_readme
- # See if we have a tool config.
- elif name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
- full_path = str( os.path.abspath( os.path.join( root, name ) ) )
- if os.path.getsize( full_path ) > 0:
- if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
- or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
- try:
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = util.parse_xml( full_path )
- element_tree_root = element_tree.getroot()
- is_tool = element_tree_root.tag == 'tool'
- except Exception, e:
- log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) )
- is_tool = False
- if is_tool:
- tool, valid, error_message = load_tool_from_config( app, full_path )
- if tool is None:
- if not valid:
- invalid_file_tups.append( ( name, error_message ) )
- else:
- invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_file_metadata_paths )
- can_set_metadata = True
- for tup in invalid_files_and_errors_tups:
- if name in tup:
- can_set_metadata = False
- invalid_tool_configs.append( name )
- break
- if can_set_metadata:
- if resetting_all_metadata_on_repository:
- full_path_to_tool_config = os.path.join( root, name )
- stripped_path_to_tool_config = full_path_to_tool_config.replace( work_dir, '' )
- if stripped_path_to_tool_config.startswith( '/' ):
- stripped_path_to_tool_config = stripped_path_to_tool_config[ 1: ]
- relative_path_to_tool_config = os.path.join( relative_install_dir, stripped_path_to_tool_config )
- else:
- relative_path_to_tool_config = os.path.join( root, name )
- if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_tool_config.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
- relative_path_to_tool_config = relative_path_to_tool_config[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
- metadata_dict = generate_tool_metadata( relative_path_to_tool_config, tool, repository_clone_url, metadata_dict )
- else:
- for tup in invalid_files_and_errors_tups:
- invalid_file_tups.append( tup )
- # Find all exported workflows.
- elif name.endswith( '.ga' ):
- relative_path = os.path.join( root, name )
- if os.path.getsize( os.path.abspath( relative_path ) ) > 0:
- fp = open( relative_path, 'rb' )
- workflow_text = fp.read()
- fp.close()
- exported_workflow_dict = from_json_string( workflow_text )
- if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
- metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
- if 'tools' in metadata_dict:
- # This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', files_dir )
- if tool_dependencies_config:
- metadata_dict = generate_tool_dependency_metadata( app,
- repository,
- tool_dependencies_config,
- metadata_dict,
- original_repository_metadata=original_repository_metadata )
- if invalid_tool_configs:
- metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
- # Reset the value of the app's tool_data_path and tool_data_table_config_path to their respective original values.
- app.config.tool_data_path = original_tool_data_path
- app.config.tool_data_table_config_path = original_tool_data_table_config_path
- return metadata_dict, invalid_file_tups
-def generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ):
- if as_html:
- new_line = '<br/>'
- bold_start = '<b>'
- bold_end = '</b>'
- else:
- new_line = '\n'
- bold_start = ''
- bold_end = ''
- message = ''
- if not displaying_invalid_tool:
- if metadata_dict:
- message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip( trans.app ) )
- message += "Correct the following problems if necessary and reset metadata.%s" % new_line
- else:
- message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip( trans.app ) )
- message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line
- for itc_tup in invalid_file_tups:
- tool_file, exception_msg = itc_tup
- if exception_msg.find( 'No such file or directory' ) >= 0:
- exception_items = exception_msg.split()
- missing_file_items = exception_items[ 7 ].split( '/' )
- missing_file = missing_file_items[ -1 ].rstrip( '\'' )
- if missing_file.endswith( '.loc' ):
- sample_ext = '%s.sample' % missing_file
- else:
- sample_ext = missing_file
- correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end )
- correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end )
- else:
- if as_html:
- correction_msg = exception_msg
- else:
- correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
- message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
- return message
def generate_package_dependency_metadata( elem, tool_dependencies_dict ):
"""The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set."""
requirements_dict = {}
@@ -1155,13 +815,6 @@
else:
metadata_dict[ 'workflows' ] = [ ( relative_path, exported_workflow_dict ) ]
return metadata_dict
-def get_changectx_for_changeset( repo, changeset_revision, **kwd ):
- """Retrieve a specified changectx from a repository"""
- for changeset in repo.changelog:
- ctx = repo.changectx( changeset )
- if str( ctx ) == changeset_revision:
- return ctx
- return None
def get_config( config_file, repo, ctx, dir ):
"""Return the latest version of config_filename from the repository manifest."""
config_file = strip_path( config_file )
@@ -1172,22 +825,6 @@
if ctx_file_name == config_file:
return get_named_tmpfile_from_ctx( changeset_ctx, ctx_file, dir )
return None
-def get_config_from_disk( config_file, relative_install_dir ):
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0:
- for name in files:
- if name == config_file:
- return os.path.abspath( os.path.join( root, name ) )
- return None
-def get_configured_ui():
- # Configure any desired ui settings.
- _ui = ui.ui()
- # The following will suppress all messages. This is
- # the same as adding the following setting to the repo
- # hgrc file' [ui] section:
- # quiet = True
- _ui.setconfig( 'ui', 'quiet', True )
- return _ui
def get_converter_and_display_paths( registration_elem, relative_install_dir ):
"""Find the relative path to data type converters and display applications included in installed tool shed repositories."""
converter_path = None
@@ -1247,33 +884,6 @@
ctx_rev = response.read()
response.close()
return ctx_rev
-def get_file_context_from_ctx( ctx, filename ):
- # We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
- # within ctx if the files were moved within the change set. For example, in the following ctx.files() list, the former may have been moved to
- # the latter: ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']. Another scenario
- # is that the file has been deleted.
- deleted = False
- filename = strip_path( filename )
- for ctx_file in ctx.files():
- ctx_file_name = strip_path( ctx_file )
- if filename == ctx_file_name:
- try:
- # If the file was moved, its destination will be returned here.
- fctx = ctx[ ctx_file ]
- return fctx
- except LookupError, e:
- # Set deleted for now, and continue looking in case the file was moved instead of deleted.
- deleted = True
- if deleted:
- return 'DELETED'
- return None
-def get_file_from_changeset_revision( app, repository, repo_files_dir, changeset_revision, file_name, dir ):
- """Return file_name from the received changeset_revision of the repository manifest."""
- stripped_file_name = strip_path( file_name )
- repo = hg.repository( get_configured_ui(), repo_files_dir )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- named_tmp_file = get_named_tmpfile_from_ctx( ctx, file_name, dir )
- return named_tmp_file
def get_installed_tool_shed_repository( trans, id ):
"""Get a repository on the Galaxy side from the database via id"""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
@@ -1309,63 +919,6 @@
fh.write( fctx.data() )
fh.close()
return sample_files, deleted_sample_files
-def get_named_tmpfile_from_ctx( ctx, filename, dir ):
- filename = strip_path( filename )
- for ctx_file in ctx.files():
- ctx_file_name = strip_path( ctx_file )
- if filename == ctx_file_name:
- try:
- # If the file was moved, its destination file contents will be returned here.
- fctx = ctx[ ctx_file ]
- except LookupError, e:
- # Continue looking in case the file was moved.
- fctx = None
- continue
- if fctx:
- fh = tempfile.NamedTemporaryFile( 'wb', dir=dir )
- tmp_filename = fh.name
- fh.close()
- fh = open( tmp_filename, 'wb' )
- fh.write( fctx.data() )
- fh.close()
- return tmp_filename
- return None
-def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ):
- parent_id = None
- # Compare from most recent to oldest.
- changeset_revisions.reverse()
- for changeset_revision in changeset_revisions:
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- metadata = repository_metadata.metadata
- tools_dicts = metadata.get( 'tools', [] )
- for tool_dict in tools_dicts:
- if tool_dict[ 'guid' ] == guid:
- # The tool has not changed between the compared changeset revisions.
- continue
- if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version:
- # The tool version is different, so we've found the parent.
- return tool_dict[ 'guid' ]
- if parent_id is None:
- # The tool did not change through all of the changeset revisions.
- return old_id
-def get_repository_file_contents( file_path ):
- if is_gzip( file_path ):
- to_html = to_html_str( '\ngzip compressed file\n' )
- elif is_bz2( file_path ):
- to_html = to_html_str( '\nbz2 compressed file\n' )
- elif check_zip( file_path ):
- to_html = to_html_str( '\nzip compressed file\n' )
- elif check_binary( file_path ):
- to_html = to_html_str( '\nBinary file\n' )
- else:
- to_html = ''
- for i, line in enumerate( open( file_path ) ):
- to_html = '%s%s' % ( to_html, to_html_str( line ) )
- if len( to_html ) > MAX_CONTENT_SIZE:
- large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
- to_html = '%s%s' % ( to_html, to_html_str( large_str ) )
- break
- return to_html
def get_repository_files( trans, folder_path ):
contents = []
for item in os.listdir( folder_path ):
@@ -1379,28 +932,6 @@
if contents:
contents.sort()
return contents
-def get_repository_in_tool_shed( trans, id ):
- """Get a repository on the tool shed side from the database via id"""
- return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
-def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ):
- """Get metadata for a specified repository change set from the database"""
- # Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow
- # created in the past. The cause of this issue has been resolved, but we'll leave this method as is for a while longer to ensure all duplicate
- # records are removed.
- all_metadata_records = trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
- trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
- .order_by( trans.model.RepositoryMetadata.table.c.update_time.desc() ) \
- .all()
- if len( all_metadata_records ) > 1:
- # Delete all recrds older than the last one updated.
- for repository_metadata in all_metadata_records[ 1: ]:
- trans.sa_session.delete( repository_metadata )
- trans.sa_session.flush()
- return all_metadata_records[ 0 ]
- elif all_metadata_records:
- return all_metadata_records[ 0 ]
- return None
def get_repository_owner( cleaned_repository_url ):
items = cleaned_repository_url.split( 'repos' )
repo_path = items[ 1 ]
@@ -1667,55 +1198,6 @@
repository_tool = app.toolbox.load_tool( os.path.join( tool_path, tup_path ), guid=guid )
repository_tools_tups[ index ] = ( tup_path, guid, repository_tool )
return repository_tools_tups, sample_files_copied
-def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ):
- # Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories.
- message = ''
- sample_files = copy_disk_sample_files_to_dir( trans, repo_files_dir, work_dir )
- if sample_files:
- if 'tool_data_table_conf.xml.sample' in sample_files:
- # Load entries into the tool_data_tables if the tool requires them.
- tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
- error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
- tool, valid, message2 = load_tool_from_config( trans.app, tool_config_filepath )
- message = concat_messages( message, message2 )
- return tool, valid, message, sample_files
-def handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir ):
- tool = None
- message = ''
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- # We're not currently doing anything with the returned list of deleted_sample_files here. It is intended to help handle sample files that are in
- # the manifest, but have been deleted from disk.
- sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
- if sample_files:
- trans.app.config.tool_data_path = work_dir
- if 'tool_data_table_conf.xml.sample' in sample_files:
- # Load entries into the tool_data_tables if the tool requires them.
- tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
- if tool_data_table_config:
- error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
- if error:
- log.debug( message )
- manifest_ctx, ctx_file = get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision )
- if manifest_ctx and ctx_file:
- tool, message2 = load_tool_from_tmp_config( trans, repo, manifest_ctx, ctx_file, work_dir )
- message = concat_messages( message, message2 )
- return tool, message, sample_files
-def handle_sample_tool_data_table_conf_file( app, filename, persist=False ):
- """
- Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary. If persist is True (should only occur
- if call is from the Galaxy side, not the tool shed), the new entries will be appended to Galaxy's shed_tool_data_table_conf.xml file on disk.
- """
- error = False
- message = ''
- try:
- new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=filename,
- tool_data_path=app.config.tool_data_path,
- shed_tool_data_table_config=app.config.shed_tool_data_table_config,
- persist=persist )
- except Exception, e:
- message = str( e )
- error = True
- return error, message
def handle_tool_dependencies( app, tool_shed_repository, tool_dependencies_config, tool_dependencies ):
"""
Install and build tool dependencies defined in the tool_dependencies_config. This config's tag sets can currently refer to installation
@@ -1800,8 +1282,6 @@
return False
# Default to copying the file if none of the above are true.
return True
-def is_downloadable( metadata_dict ):
- return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
def load_installed_datatype_converters( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype converters
app.datatypes_registry.load_datatype_converters( app.toolbox, installed_repository_dict=installed_repository_dict, deactivate=deactivate )
@@ -1825,22 +1305,6 @@
def load_installed_display_applications( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate )
-def load_tool_from_config( app, full_path ):
- try:
- tool = app.toolbox.load_tool( full_path )
- valid = True
- error_message = None
- except KeyError, e:
- tool = None
- valid = False
- error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e )
- error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
- error_message += 'this error. '
- except Exception, e:
- tool = None
- valid = False
- error_message = str( e )
- return tool, valid, error_message
def load_tool_from_tmp_config( trans, repo, ctx, ctx_file, work_dir ):
tool = None
message = ''
@@ -1866,27 +1330,6 @@
except:
pass
return tool, message
-def open_repository_files_folder( trans, folder_path ):
- try:
- files_list = get_repository_files( trans, folder_path )
- except OSError, e:
- if str( e ).find( 'No such file or directory' ) >= 0:
- # We have a repository with no contents.
- return []
- folder_contents = []
- for filename in files_list:
- is_folder = False
- if filename and filename[-1] == os.sep:
- is_folder = True
- if filename:
- full_path = os.path.join( folder_path, filename )
- node = { "title": filename,
- "isFolder": is_folder,
- "isLazy": is_folder,
- "tooltip": full_path,
- "key": full_path }
- folder_contents.append( node )
- return folder_contents
def panel_entry_per_tool( tool_section_dict ):
# Return True if tool_section_dict looks like this.
# {<Tool guid> : [{ tool_config : <tool_config_file>, id: <ToolSection id>, version : <ToolSection version>, name : <TooSection name>}]}
@@ -1906,12 +1349,6 @@
repo,
source=repository_clone_url,
rev=[ ctx_rev ] )
-def remove_dir( dir ):
- if os.path.exists( dir ):
- try:
- shutil.rmtree( dir )
- except:
- pass
def remove_from_shed_tool_config( trans, shed_tool_conf_dict, guids_to_remove ):
# A tool shed repository is being uninstalled so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -2113,209 +1550,6 @@
else:
log.debug( 'Error locating installation directory for repository %s.' % repository.name )
return invalid_file_tups, metadata_dict
-def reset_all_metadata_on_repository_in_tool_shed( trans, id ):
- """Reset all metadata on a single repository in a tool shed."""
- def reset_all_tool_versions( trans, id, repo ):
- changeset_revisions = []
- for changeset in repo.changelog:
- changeset_revision = str( repo.changectx( changeset ) )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- if repository_metadata:
- metadata = repository_metadata.metadata
- if metadata:
- if metadata.get( 'tools', None ):
- changeset_revisions.append( changeset_revision )
- # The list of changeset_revisions is now filtered to contain only those that are downloadable and contain tools.
- # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
- for index, changeset_revision in enumerate( changeset_revisions ):
- tool_versions_dict = {}
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- metadata = repository_metadata.metadata
- tool_dicts = metadata[ 'tools' ]
- if index == 0:
- # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools.
- # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config.
- for tool_dict in tool_dicts:
- tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
- else:
- for tool_dict in tool_dicts:
- parent_id = get_parent_id( trans,
- id,
- tool_dict[ 'id' ],
- tool_dict[ 'version' ],
- tool_dict[ 'guid' ],
- changeset_revisions[ 0:index ] )
- tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
- if tool_versions_dict:
- repository_metadata.tool_versions = tool_versions_dict
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- repository = get_repository_in_tool_shed( trans, id )
- log.debug( "Resetting all metadata on repository: %s" % repository.name )
- repo_dir = repository.repo_path( trans.app )
- repo = hg.repository( get_configured_ui(), repo_dir )
- repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
- # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop
- # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list.
- changeset_revisions = []
- # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict.
- metadata_changeset_revision = None
- metadata_dict = None
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- invalid_file_tups = []
- home_dir = os.getcwd()
- for changeset in repo.changelog:
- work_dir = tempfile.mkdtemp()
- current_changeset_revision = str( repo.changectx( changeset ) )
- ctx = repo.changectx( changeset )
- log.debug( "Cloning repository revision: %s", str( ctx.rev() ) )
- cloned_ok, error_message = clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
- if cloned_ok:
- log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
- current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
- repository=repository,
- repository_clone_url=repository_clone_url,
- relative_install_dir=repo_dir,
- repository_files_dir=work_dir,
- resetting_all_metadata_on_repository=True,
- updating_installed_repository=False,
- persist=False )
- if current_metadata_dict:
- if not metadata_changeset_revision and not metadata_dict:
- # We're at the first change set in the change log.
- metadata_changeset_revision = current_changeset_revision
- metadata_dict = current_metadata_dict
- if ancestor_changeset_revision:
- # Compare metadata from ancestor and current. The value of comparison will be one of:
- # 'no metadata' - no metadata for either ancestor or current, so continue from current
- # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
- # 'subset' - ancestor metadata is a subset of current metadata, so continue from current
- # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
- comparison = compare_changeset_revisions( ancestor_changeset_revision,
- ancestor_metadata_dict,
- current_changeset_revision,
- current_metadata_dict )
- if comparison in [ 'no metadata', 'equal', 'subset' ]:
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
- elif comparison == 'not equal and not subset':
- metadata_changeset_revision = ancestor_changeset_revision
- metadata_dict = ancestor_metadata_dict
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
- changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
- else:
- # We're at the beginning of the change log.
- ancestor_changeset_revision = current_changeset_revision
- ancestor_metadata_dict = current_metadata_dict
- if not ctx.children():
- metadata_changeset_revision = current_changeset_revision
- metadata_dict = current_metadata_dict
- # We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
- changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- elif ancestor_metadata_dict:
- # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
- if not ctx.children():
- # We're at the end of the change log.
- repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
- changeset_revisions.append( metadata_changeset_revision )
- ancestor_changeset_revision = None
- ancestor_metadata_dict = None
- remove_dir( work_dir )
- # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
- clean_repository_metadata( trans, id, changeset_revisions )
- # Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog.
- reset_all_tool_versions( trans, id, repo )
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
- return invalid_file_tups, metadata_dict
-def reset_metadata_on_selected_repositories( trans, **kwd ):
- # This method is called from both Galaxy and the Tool Shed, so the cntrller param is required.
- repository_ids = util.listify( kwd.get( 'repository_ids', None ) )
- CONTROLLER = kwd[ 'CONTROLLER' ]
- message = ''
- status = 'done'
- if repository_ids:
- successful_count = 0
- unsuccessful_count = 0
- for repository_id in repository_ids:
- try:
- if CONTROLLER == 'TOOL_SHED_ADMIN_CONTROLLER':
- repository = get_repository_in_tool_shed( trans, repository_id )
- invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
- elif CONTROLLER == 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER':
- repository = get_installed_tool_shed_repository( trans, repository_id )
- invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans, repository_id )
- if invalid_file_tups:
- message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
- log.debug( message )
- unsuccessful_count += 1
- else:
- log.debug( "Successfully reset metadata on repository %s" % repository.name )
- successful_count += 1
- except Exception, e:
- log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) )
- unsuccessful_count += 1
- message = "Successfully reset metadata on %d %s. " % ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
- if unsuccessful_count:
- message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count,
- inflector.cond_plural( unsuccessful_count, "repository" ) )
- else:
- message = 'Select at least one repository to on which to reset all metadata.'
- status = 'error'
- return message, status
-def reset_tool_data_tables( app ):
- # Reset the tool_data_tables to an empty dictionary.
- app.tool_data_tables.data_tables = {}
-def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
- """
- Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and
- including the included_upper_bounds_changeset_revision. The value of excluded_lower_bounds_changeset_revision will be the value of
- INITIAL_CHANGELOG_HASH if no valid changesets exist before included_upper_bounds_changeset_revision.
- """
- # To set excluded_lower_bounds_changeset_revision, calling methods should do the following, where the value of changeset_revision
- # is a downloadable changeset_revision.
- # excluded_lower_bounds_changeset_revision = get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
- if excluded_lower_bounds_changeset_revision == INITIAL_CHANGELOG_HASH:
- appending_started = True
- else:
- appending_started = False
- reversed_changelog = []
- for changeset in repo.changelog:
- changeset_hash = str( repo.changectx( changeset ) )
- if appending_started:
- reversed_changelog.insert( 0, changeset )
- if changeset_hash == excluded_lower_bounds_changeset_revision and not appending_started:
- appending_started = True
- if changeset_hash == included_upper_bounds_changeset_revision:
- break
- return reversed_changelog
-def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ):
- return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision )
-def strip_path( fpath ):
- if not fpath:
- return fpath
- try:
- file_path, file_name = os.path.split( fpath )
- except:
- file_name = fpath
- return file_name
-def to_html_escaped( text ):
- """Translates the characters in text to html values"""
- translated = []
- for c in text:
- if c in [ '\r\n', '\n', ' ', '\t' ] or c in VALID_CHARS:
- translated.append( c )
- elif c in MAPPED_CHARS:
- translated.append( MAPPED_CHARS[ c ] )
- else:
- translated.append( '' )
- return ''.join( translated )
def to_html_str( text ):
"""Translates the characters in text to an html string"""
translated = []
@@ -2443,32 +1677,8 @@
elem = guid_to_tool_elem_dict[ guid ]
config_elems.append( elem )
config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
-def update_repository( repo, ctx_rev=None ):
- """
- Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
- changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
- """
- # TODO: We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
- # The codes used to show the status of files are as follows.
- # M = modified
- # A = added
- # R = removed
- # C = clean
- # ! = deleted, but still tracked
- # ? = not tracked
- # I = ignored
- # It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that
- # purging is not supported by the mercurial API. See the deprecated update_for_browsing() method in common.py.
- commands.update( get_configured_ui(),
- repo,
- rev=ctx_rev )
def update_tool_shed_repository_status( app, tool_shed_repository, status ):
sa_session = app.model.context.current
tool_shed_repository.status = status
sa_session.add( tool_shed_repository )
sa_session.flush()
-def url_join( *args ):
- parts = []
- for arg in args:
- parts.append( arg.strip( '/' ) )
- return '/'.join( parts )
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/util/shed_util_common.py
--- /dev/null
+++ b/lib/galaxy/util/shed_util_common.py
@@ -0,0 +1,784 @@
+import os, shutil, tempfile, logging
+from galaxy import util
+from galaxy.tools import parameters
+from galaxy.util import inflector
+from galaxy.web import url_for
+from galaxy.web.form_builder import SelectField
+from galaxy.datatypes.checkers import *
+from galaxy.model.orm import *
+
+from galaxy import eggs
+import pkg_resources
+
+pkg_resources.require( 'mercurial' )
+from mercurial import hg, ui, commands
+
+log = logging.getLogger( __name__ )
+
+INITIAL_CHANGELOG_HASH = '000000000000'
+# Characters that must be html escaped
+MAPPED_CHARS = { '>' :'>',
+ '<' :'<',
+ '"' : '"',
+ '&' : '&',
+ '\'' : ''' }
+MAX_CONTENT_SIZE = 32768
+NOT_TOOL_CONFIGS = [ 'datatypes_conf.xml', 'tool_dependencies.xml' ]
+GALAXY_ADMIN_TOOL_SHED_CONTROLLER = 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER'
+TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER'
+VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" )
+
+def build_repository_ids_select_field( trans, cntrller, name='repository_ids', multiple=True, display='checkboxes' ):
+ """Method called from both Galaxy and the Tool Shed to generate the current list of repositories for resetting metadata."""
+ repositories_select_field = SelectField( name=name, multiple=multiple, display=display )
+ if cntrller == TOOL_SHED_ADMIN_CONTROLLER:
+ for repository in trans.sa_session.query( trans.model.Repository ) \
+ .filter( trans.model.Repository.table.c.deleted == False ) \
+ .order_by( trans.model.Repository.table.c.name,
+ trans.model.Repository.table.c.user_id ):
+ owner = repository.user.username
+ option_label = '%s (%s)' % ( repository.name, owner )
+ option_value = '%s' % trans.security.encode_id( repository.id )
+ repositories_select_field.add_option( option_label, option_value )
+ elif cntrller == GALAXY_ADMIN_TOOL_SHED_CONTROLLER:
+ for repository in trans.sa_session.query( trans.model.ToolShedRepository ) \
+ .filter( trans.model.ToolShedRepository.table.c.uninstalled == False ) \
+ .order_by( trans.model.ToolShedRepository.table.c.name,
+ trans.model.ToolShedRepository.table.c.owner ):
+ option_label = '%s (%s)' % ( repository.name, repository.owner )
+ option_value = trans.security.encode_id( repository.id )
+ repositories_select_field.add_option( option_label, option_value )
+ return repositories_select_field
+def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
+ """
+ Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
+ sure the files exist.
+ """
+ invalid_files_and_errors_tups = []
+ correction_msg = ''
+ for input_param in tool.input_params:
+ if isinstance( input_param, parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
+ # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist.
+ options = input_param.dynamic_options or input_param.options
+ if options:
+ if options.tool_data_table or options.missing_tool_data_table_name:
+ # Make sure the repository contains a tool_data_table_conf.xml.sample file.
+ sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
+ if sample_tool_data_table_conf:
+ error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf )
+ if error:
+ invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
+ else:
+ options.missing_tool_data_table_name = None
+ else:
+ correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample "
+ correction_msg += "to the repository that includes the required entry to correct this error.<br/>"
+ invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
+ if options.index_file or options.missing_index_file:
+ # Make sure the repository contains the required xxx.loc.sample file.
+ index_file = options.index_file or options.missing_index_file
+ index_file_name = strip_path( index_file )
+ sample_found = False
+ for sample_file in sample_files:
+ sample_file_name = strip_path( sample_file )
+ if sample_file_name == '%s.sample' % index_file_name:
+ options.index_file = index_file_name
+ options.missing_index_file = None
+ if options.tool_data_table:
+ options.tool_data_table.missing_index_file = None
+ sample_found = True
+ break
+ if not sample_found:
+ correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
+ correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
+ invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
+ return invalid_files_and_errors_tups
+def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
+ """Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository."""
+ try:
+ commands.clone( get_configured_ui(),
+ str( repository_clone_url ),
+ dest=str( repository_file_dir ),
+ pull=True,
+ noupdate=False,
+ rev=util.listify( str( ctx_rev ) ) )
+ return True, None
+ except Exception, e:
+ error_message = 'Error cloning repository: %s' % str( e )
+ log.debug( error_message )
+ return False, error_message
+def concat_messages( msg1, msg2 ):
+ if msg1:
+ if msg2:
+ message = '%s %s' % ( msg1, msg2 )
+ else:
+ message = msg1
+ elif msg2:
+ message = msg2
+ else:
+ message = ''
+ return message
+def copy_sample_file( app, filename, dest_path=None ):
+ """Copy xxx.sample to dest_path/xxx.sample and dest_path/xxx. The default value for dest_path is ~/tool-data."""
+ if dest_path is None:
+ dest_path = os.path.abspath( app.config.tool_data_path )
+ sample_file_name = strip_path( filename )
+ copied_file = sample_file_name.replace( '.sample', '' )
+ full_source_path = os.path.abspath( filename )
+ full_destination_path = os.path.join( dest_path, sample_file_name )
+ # Don't copy a file to itself - not sure how this happens, but sometimes it does...
+ if full_source_path != full_destination_path:
+ # It's ok to overwrite the .sample version of the file.
+ shutil.copy( full_source_path, full_destination_path )
+ # Only create the .loc file if it does not yet exist. We don't overwrite it in case it contains stuff proprietary to the local instance.
+ if not os.path.exists( os.path.join( dest_path, copied_file ) ):
+ shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) )
+def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
+ downloadable = is_downloadable( metadata_dict )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if repository_metadata:
+ repository_metadata.metadata = metadata_dict
+ repository_metadata.downloadable = downloadable
+ else:
+ repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id,
+ changeset_revision=changeset_revision,
+ metadata=metadata_dict,
+ downloadable=downloadable )
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ return repository_metadata
+def create_repo_info_dict( repository, owner, repository_clone_url, changeset_revision, ctx_rev, metadata ):
+ repo_info_dict = {}
+ repo_info_dict[ repository.name ] = ( repository.description,
+ repository_clone_url,
+ changeset_revision,
+ ctx_rev,
+ owner,
+ metadata.get( 'tool_dependencies', None ) )
+ return repo_info_dict
+def generate_clone_url_for_repository_in_tool_shed( trans, repository ):
+ """Generate the URL for cloning a repository that is in the tool shed."""
+ base_url = url_for( '/', qualified=True ).rstrip( '/' )
+ if trans.user:
+ protocol, base = base_url.split( '://' )
+ username = '%s@' % trans.user.username
+ return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
+ else:
+ return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
+def generate_message_for_invalid_tools( trans, invalid_file_tups, repository, metadata_dict, as_html=True, displaying_invalid_tool=False ):
+ if as_html:
+ new_line = '<br/>'
+ bold_start = '<b>'
+ bold_end = '</b>'
+ else:
+ new_line = '\n'
+ bold_start = ''
+ bold_end = ''
+ message = ''
+ if not displaying_invalid_tool:
+ if metadata_dict:
+ message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip( trans.app ) )
+ message += "Correct the following problems if necessary and reset metadata.%s" % new_line
+ else:
+ message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip( trans.app ) )
+ message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line
+ for itc_tup in invalid_file_tups:
+ tool_file, exception_msg = itc_tup
+ if exception_msg.find( 'No such file or directory' ) >= 0:
+ exception_items = exception_msg.split()
+ missing_file_items = exception_items[ 7 ].split( '/' )
+ missing_file = missing_file_items[ -1 ].rstrip( '\'' )
+ if missing_file.endswith( '.loc' ):
+ sample_ext = '%s.sample' % missing_file
+ else:
+ sample_ext = missing_file
+ correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end )
+ correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end )
+ else:
+ if as_html:
+ correction_msg = exception_msg
+ else:
+ correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
+ message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
+ return message
+def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, shed_config_dict={}, relative_install_dir=None, repository_files_dir=None,
+ resetting_all_metadata_on_repository=False, updating_installed_repository=False, persist=False ):
+ """
+ Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
+ the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
+ disk files, so the value of repository_files_dir will not always be repository.repo_path( app ) (it could be an absolute path to a temporary
+ directory containing a clone). If it is an absolute path, the value of relative_install_dir must contain repository.repo_path( app ).
+
+ The value of persist will be True when the installed repository contains a valid tool_data_table_conf.xml.sample file, in which case the entries
+ should ultimately be persisted to the file referred to by app.config.shed_tool_data_table_config.
+ """
+ if updating_installed_repository:
+ # Keep the original tool shed repository metadata if setting metadata on a repository installed into a local Galaxy instance for which
+ # we have pulled updates.
+ original_repository_metadata = repository.metadata
+ else:
+ original_repository_metadata = None
+ readme_file_names = get_readme_file_names( repository.name )
+ metadata_dict = { 'shed_config_filename': shed_config_dict.get( 'config_filename' ) }
+ invalid_file_tups = []
+ invalid_tool_configs = []
+ tool_dependencies_config = None
+ original_tool_data_path = app.config.tool_data_path
+ original_tool_data_table_config_path = app.config.tool_data_table_config_path
+ if resetting_all_metadata_on_repository:
+ if not relative_install_dir:
+ raise Exception( "The value of repository.repo_path( app ) must be sent when resetting all metadata on a repository." )
+ # Keep track of the location where the repository is temporarily cloned so that we can strip the path when setting metadata. The value of
+ # repository_files_dir is the full path to the temporary directory to which the repository was cloned.
+ work_dir = repository_files_dir
+ files_dir = repository_files_dir
+ # Since we're working from a temporary directory, we can safely copy sample files included in the repository to the repository root.
+ app.config.tool_data_path = repository_files_dir
+ app.config.tool_data_table_config_path = repository_files_dir
+ else:
+ # Use a temporary working directory to copy all sample files.
+ work_dir = tempfile.mkdtemp()
+ # All other files are on disk in the repository's repo_path, which is the value of relative_install_dir.
+ files_dir = relative_install_dir
+ if shed_config_dict.get( 'tool_path' ):
+ files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
+ app.config.tool_data_path = work_dir
+ app.config.tool_data_table_config_path = work_dir
+ # Handle proprietary datatypes, if any.
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', files_dir )
+ if datatypes_config:
+ metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
+ # Get the relative path to all sample files included in the repository for storage in the repository's metadata.
+ sample_file_metadata_paths, sample_file_copy_paths = get_sample_files_from_disk( repository_files_dir=files_dir,
+ tool_path=shed_config_dict.get( 'tool_path' ),
+ relative_install_dir=relative_install_dir,
+ resetting_all_metadata_on_repository=resetting_all_metadata_on_repository )
+ if sample_file_metadata_paths:
+ metadata_dict[ 'sample_files' ] = sample_file_metadata_paths
+ # Copy all sample files included in the repository to a single directory location so we can load tools that depend on them.
+ for sample_file in sample_file_copy_paths:
+ copy_sample_file( app, sample_file, dest_path=work_dir )
+ # If the list of sample files includes a tool_data_table_conf.xml.sample file, laad it's table elements into memory.
+ relative_path, filename = os.path.split( sample_file )
+ if filename == 'tool_data_table_conf.xml.sample':
+ new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
+ tool_data_path=original_tool_data_path,
+ shed_tool_data_table_config=app.config.shed_tool_data_table_config,
+ persist=persist )
+ for root, dirs, files in os.walk( files_dir ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ if '.hg' in dirs:
+ dirs.remove( '.hg' )
+ for name in files:
+ # See if we have a READ_ME file.
+ if name.lower() in readme_file_names:
+ if resetting_all_metadata_on_repository:
+ full_path_to_readme = os.path.join( root, name )
+ stripped_path_to_readme = full_path_to_readme.replace( work_dir, '' )
+ if stripped_path_to_readme.startswith( '/' ):
+ stripped_path_to_readme = stripped_path_to_readme[ 1: ]
+ relative_path_to_readme = os.path.join( relative_install_dir, stripped_path_to_readme )
+ else:
+ relative_path_to_readme = os.path.join( root, name )
+ if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_readme.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
+ relative_path_to_readme = relative_path_to_readme[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
+ metadata_dict[ 'readme' ] = relative_path_to_readme
+ # See if we have a tool config.
+ elif name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
+ full_path = str( os.path.abspath( os.path.join( root, name ) ) )
+ if os.path.getsize( full_path ) > 0:
+ if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
+ or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
+ try:
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( full_path )
+ element_tree_root = element_tree.getroot()
+ is_tool = element_tree_root.tag == 'tool'
+ except Exception, e:
+ log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) )
+ is_tool = False
+ if is_tool:
+ tool, valid, error_message = load_tool_from_config( app, full_path )
+ if tool is None:
+ if not valid:
+ invalid_file_tups.append( ( name, error_message ) )
+ else:
+ invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_file_metadata_paths )
+ can_set_metadata = True
+ for tup in invalid_files_and_errors_tups:
+ if name in tup:
+ can_set_metadata = False
+ invalid_tool_configs.append( name )
+ break
+ if can_set_metadata:
+ if resetting_all_metadata_on_repository:
+ full_path_to_tool_config = os.path.join( root, name )
+ stripped_path_to_tool_config = full_path_to_tool_config.replace( work_dir, '' )
+ if stripped_path_to_tool_config.startswith( '/' ):
+ stripped_path_to_tool_config = stripped_path_to_tool_config[ 1: ]
+ relative_path_to_tool_config = os.path.join( relative_install_dir, stripped_path_to_tool_config )
+ else:
+ relative_path_to_tool_config = os.path.join( root, name )
+ if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_tool_config.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
+ relative_path_to_tool_config = relative_path_to_tool_config[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
+ metadata_dict = generate_tool_metadata( relative_path_to_tool_config, tool, repository_clone_url, metadata_dict )
+ else:
+ for tup in invalid_files_and_errors_tups:
+ invalid_file_tups.append( tup )
+ # Find all exported workflows.
+ elif name.endswith( '.ga' ):
+ relative_path = os.path.join( root, name )
+ if os.path.getsize( os.path.abspath( relative_path ) ) > 0:
+ fp = open( relative_path, 'rb' )
+ workflow_text = fp.read()
+ fp.close()
+ exported_workflow_dict = from_json_string( workflow_text )
+ if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
+ metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
+ if 'tools' in metadata_dict:
+ # This step must be done after metadata for tools has been defined.
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', files_dir )
+ if tool_dependencies_config:
+ metadata_dict = generate_tool_dependency_metadata( app,
+ repository,
+ tool_dependencies_config,
+ metadata_dict,
+ original_repository_metadata=original_repository_metadata )
+ if invalid_tool_configs:
+ metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
+ # Reset the value of the app's tool_data_path and tool_data_table_config_path to their respective original values.
+ app.config.tool_data_path = original_tool_data_path
+ app.config.tool_data_table_config_path = original_tool_data_table_config_path
+ return metadata_dict, invalid_file_tups
+def get_changectx_for_changeset( repo, changeset_revision, **kwd ):
+ """Retrieve a specified changectx from a repository"""
+ for changeset in repo.changelog:
+ ctx = repo.changectx( changeset )
+ if str( ctx ) == changeset_revision:
+ return ctx
+ return None
+def get_config_from_disk( config_file, relative_install_dir ):
+ for root, dirs, files in os.walk( relative_install_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == config_file:
+ return os.path.abspath( os.path.join( root, name ) )
+ return None
+def get_configured_ui():
+ # Configure any desired ui settings.
+ _ui = ui.ui()
+ # The following will suppress all messages. This is
+ # the same as adding the following setting to the repo
+ # hgrc file' [ui] section:
+ # quiet = True
+ _ui.setconfig( 'ui', 'quiet', True )
+ return _ui
+def get_file_context_from_ctx( ctx, filename ):
+ # We have to be careful in determining if we found the correct file because multiple files with the same name may be in different directories
+ # within ctx if the files were moved within the change set. For example, in the following ctx.files() list, the former may have been moved to
+ # the latter: ['tmap_wrapper_0.0.19/tool_data_table_conf.xml.sample', 'tmap_wrapper_0.3.3/tool_data_table_conf.xml.sample']. Another scenario
+ # is that the file has been deleted.
+ deleted = False
+ filename = strip_path( filename )
+ for ctx_file in ctx.files():
+ ctx_file_name = strip_path( ctx_file )
+ if filename == ctx_file_name:
+ try:
+ # If the file was moved, its destination will be returned here.
+ fctx = ctx[ ctx_file ]
+ return fctx
+ except LookupError, e:
+ # Set deleted for now, and continue looking in case the file was moved instead of deleted.
+ deleted = True
+ if deleted:
+ return 'DELETED'
+ return None
+def get_repository_file_contents( file_path ):
+ if is_gzip( file_path ):
+ to_html = to_html_str( '\ngzip compressed file\n' )
+ elif is_bz2( file_path ):
+ to_html = to_html_str( '\nbz2 compressed file\n' )
+ elif check_zip( file_path ):
+ to_html = to_html_str( '\nzip compressed file\n' )
+ elif check_binary( file_path ):
+ to_html = to_html_str( '\nBinary file\n' )
+ else:
+ to_html = ''
+ for i, line in enumerate( open( file_path ) ):
+ to_html = '%s%s' % ( to_html, to_html_str( line ) )
+ if len( to_html ) > MAX_CONTENT_SIZE:
+ large_str = '\nFile contents truncated because file size is larger than maximum viewing size of %s\n' % util.nice_size( MAX_CONTENT_SIZE )
+ to_html = '%s%s' % ( to_html, to_html_str( large_str ) )
+ break
+ return to_html
+def get_repository_in_tool_shed( trans, id ):
+ """Get a repository on the tool shed side from the database via id"""
+ return trans.sa_session.query( trans.model.Repository ).get( trans.security.decode_id( id ) )
+def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ):
+ """Get metadata for a specified repository change set from the database"""
+ # Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow
+ # created in the past. The cause of this issue has been resolved, but we'll leave this method as is for a while longer to ensure all duplicate
+ # records are removed.
+ all_metadata_records = trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
+ trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
+ .order_by( trans.model.RepositoryMetadata.table.c.update_time.desc() ) \
+ .all()
+ if len( all_metadata_records ) > 1:
+ # Delete all recrds older than the last one updated.
+ for repository_metadata in all_metadata_records[ 1: ]:
+ trans.sa_session.delete( repository_metadata )
+ trans.sa_session.flush()
+ return all_metadata_records[ 0 ]
+ elif all_metadata_records:
+ return all_metadata_records[ 0 ]
+ return None
+def get_named_tmpfile_from_ctx( ctx, filename, dir ):
+ filename = strip_path( filename )
+ for ctx_file in ctx.files():
+ ctx_file_name = strip_path( ctx_file )
+ if filename == ctx_file_name:
+ try:
+ # If the file was moved, its destination file contents will be returned here.
+ fctx = ctx[ ctx_file ]
+ except LookupError, e:
+ # Continue looking in case the file was moved.
+ fctx = None
+ continue
+ if fctx:
+ fh = tempfile.NamedTemporaryFile( 'wb', dir=dir )
+ tmp_filename = fh.name
+ fh.close()
+ fh = open( tmp_filename, 'wb' )
+ fh.write( fctx.data() )
+ fh.close()
+ return tmp_filename
+ return None
+def get_parent_id( trans, id, old_id, version, guid, changeset_revisions ):
+ parent_id = None
+ # Compare from most recent to oldest.
+ changeset_revisions.reverse()
+ for changeset_revision in changeset_revisions:
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ metadata = repository_metadata.metadata
+ tools_dicts = metadata.get( 'tools', [] )
+ for tool_dict in tools_dicts:
+ if tool_dict[ 'guid' ] == guid:
+ # The tool has not changed between the compared changeset revisions.
+ continue
+ if tool_dict[ 'id' ] == old_id and tool_dict[ 'version' ] != version:
+ # The tool version is different, so we've found the parent.
+ return tool_dict[ 'guid' ]
+ if parent_id is None:
+ # The tool did not change through all of the changeset revisions.
+ return old_id
+def handle_sample_files_and_load_tool_from_disk( trans, repo_files_dir, tool_config_filepath, work_dir ):
+ # Copy all sample files from disk to a temporary directory since the sample files may be in multiple directories.
+ message = ''
+ sample_files = copy_disk_sample_files_to_dir( trans, repo_files_dir, work_dir )
+ if sample_files:
+ if 'tool_data_table_conf.xml.sample' in sample_files:
+ # Load entries into the tool_data_tables if the tool requires them.
+ tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
+ error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
+ tool, valid, message2 = load_tool_from_config( trans.app, tool_config_filepath )
+ message = concat_messages( message, message2 )
+ return tool, valid, message, sample_files
+def handle_sample_files_and_load_tool_from_tmp_config( trans, repo, changeset_revision, tool_config_filename, work_dir ):
+ tool = None
+ message = ''
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ # We're not currently doing anything with the returned list of deleted_sample_files here. It is intended to help handle sample files that are in
+ # the manifest, but have been deleted from disk.
+ sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
+ if sample_files:
+ trans.app.config.tool_data_path = work_dir
+ if 'tool_data_table_conf.xml.sample' in sample_files:
+ # Load entries into the tool_data_tables if the tool requires them.
+ tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
+ if tool_data_table_config:
+ error, message = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
+ if error:
+ log.debug( message )
+ manifest_ctx, ctx_file = get_ctx_file_path_from_manifest( tool_config_filename, repo, changeset_revision )
+ if manifest_ctx and ctx_file:
+ tool, message2 = load_tool_from_tmp_config( trans, repo, manifest_ctx, ctx_file, work_dir )
+ message = concat_messages( message, message2 )
+ return tool, message, sample_files
+def handle_sample_tool_data_table_conf_file( app, filename, persist=False ):
+ """
+ Parse the incoming filename and add new entries to the in-memory app.tool_data_tables dictionary. If persist is True (should only occur
+ if call is from the Galaxy side, not the tool shed), the new entries will be appended to Galaxy's shed_tool_data_table_conf.xml file on disk.
+ """
+ error = False
+ message = ''
+ try:
+ new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=filename,
+ tool_data_path=app.config.tool_data_path,
+ shed_tool_data_table_config=app.config.shed_tool_data_table_config,
+ persist=persist )
+ except Exception, e:
+ message = str( e )
+ error = True
+ return error, message
+def is_downloadable( metadata_dict ):
+ return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
+def load_tool_from_config( app, full_path ):
+ try:
+ tool = app.toolbox.load_tool( full_path )
+ valid = True
+ error_message = None
+ except KeyError, e:
+ tool = None
+ valid = False
+ error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e )
+ error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
+ error_message += 'this error. '
+ except Exception, e:
+ tool = None
+ valid = False
+ error_message = str( e )
+ return tool, valid, error_message
+def open_repository_files_folder( trans, folder_path ):
+ try:
+ files_list = get_repository_files( trans, folder_path )
+ except OSError, e:
+ if str( e ).find( 'No such file or directory' ) >= 0:
+ # We have a repository with no contents.
+ return []
+ folder_contents = []
+ for filename in files_list:
+ is_folder = False
+ if filename and filename[-1] == os.sep:
+ is_folder = True
+ if filename:
+ full_path = os.path.join( folder_path, filename )
+ node = { "title": filename,
+ "isFolder": is_folder,
+ "isLazy": is_folder,
+ "tooltip": full_path,
+ "key": full_path }
+ folder_contents.append( node )
+ return folder_contents
+def remove_dir( dir ):
+ if os.path.exists( dir ):
+ try:
+ shutil.rmtree( dir )
+ except:
+ pass
+def reset_all_metadata_on_repository_in_tool_shed( trans, id ):
+ """Reset all metadata on a single repository in a tool shed."""
+ def reset_all_tool_versions( trans, id, repo ):
+ changeset_revisions = []
+ for changeset in repo.changelog:
+ changeset_revision = str( repo.changectx( changeset ) )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if metadata.get( 'tools', None ):
+ changeset_revisions.append( changeset_revision )
+ # The list of changeset_revisions is now filtered to contain only those that are downloadable and contain tools.
+ # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
+ for index, changeset_revision in enumerate( changeset_revisions ):
+ tool_versions_dict = {}
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ metadata = repository_metadata.metadata
+ tool_dicts = metadata[ 'tools' ]
+ if index == 0:
+ # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools.
+ # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config.
+ for tool_dict in tool_dicts:
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
+ else:
+ for tool_dict in tool_dicts:
+ parent_id = get_parent_id( trans,
+ id,
+ tool_dict[ 'id' ],
+ tool_dict[ 'version' ],
+ tool_dict[ 'guid' ],
+ changeset_revisions[ 0:index ] )
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
+ if tool_versions_dict:
+ repository_metadata.tool_versions = tool_versions_dict
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
+ repository = get_repository_in_tool_shed( trans, id )
+ log.debug( "Resetting all metadata on repository: %s" % repository.name )
+ repo_dir = repository.repo_path( trans.app )
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ repository_clone_url = generate_clone_url_for_repository_in_tool_shed( trans, repository )
+ # The list of changeset_revisions refers to repository_metadata records that have been created or updated. When the following loop
+ # completes, we'll delete all repository_metadata records for this repository that do not have a changeset_revision value in this list.
+ changeset_revisions = []
+ # When a new repository_metadata record is created, it always uses the values of metadata_changeset_revision and metadata_dict.
+ metadata_changeset_revision = None
+ metadata_dict = None
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ invalid_file_tups = []
+ home_dir = os.getcwd()
+ for changeset in repo.changelog:
+ work_dir = tempfile.mkdtemp()
+ current_changeset_revision = str( repo.changectx( changeset ) )
+ ctx = repo.changectx( changeset )
+ log.debug( "Cloning repository revision: %s", str( ctx.rev() ) )
+ cloned_ok, error_message = clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
+ if cloned_ok:
+ log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
+ current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ repository=repository,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=repo_dir,
+ repository_files_dir=work_dir,
+ resetting_all_metadata_on_repository=True,
+ updating_installed_repository=False,
+ persist=False )
+ if current_metadata_dict:
+ if not metadata_changeset_revision and not metadata_dict:
+ # We're at the first change set in the change log.
+ metadata_changeset_revision = current_changeset_revision
+ metadata_dict = current_metadata_dict
+ if ancestor_changeset_revision:
+ # Compare metadata from ancestor and current. The value of comparison will be one of:
+ # 'no metadata' - no metadata for either ancestor or current, so continue from current
+ # 'equal' - ancestor metadata is equivalent to current metadata, so continue from current
+ # 'subset' - ancestor metadata is a subset of current metadata, so continue from current
+ # 'not equal and not subset' - ancestor metadata is neither equal to nor a subset of current metadata, so persist ancestor metadata.
+ comparison = compare_changeset_revisions( ancestor_changeset_revision,
+ ancestor_metadata_dict,
+ current_changeset_revision,
+ current_metadata_dict )
+ if comparison in [ 'no metadata', 'equal', 'subset' ]:
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ elif comparison == 'not equal and not subset':
+ metadata_changeset_revision = ancestor_changeset_revision
+ metadata_dict = ancestor_metadata_dict
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ else:
+ # We're at the beginning of the change log.
+ ancestor_changeset_revision = current_changeset_revision
+ ancestor_metadata_dict = current_metadata_dict
+ if not ctx.children():
+ metadata_changeset_revision = current_changeset_revision
+ metadata_dict = current_metadata_dict
+ # We're at the end of the change log.
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ elif ancestor_metadata_dict:
+ # We reach here only if current_metadata_dict is empty and ancestor_metadata_dict is not.
+ if not ctx.children():
+ # We're at the end of the change log.
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ changeset_revisions.append( metadata_changeset_revision )
+ ancestor_changeset_revision = None
+ ancestor_metadata_dict = None
+ remove_dir( work_dir )
+ # Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
+ clean_repository_metadata( trans, id, changeset_revisions )
+ # Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog.
+ reset_all_tool_versions( trans, id, repo )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
+ return invalid_file_tups, metadata_dict
+def reset_metadata_on_selected_repositories( trans, **kwd ):
+ # This method is called from both Galaxy and the Tool Shed, so the cntrller param is required.
+ repository_ids = util.listify( kwd.get( 'repository_ids', None ) )
+ CONTROLLER = kwd[ 'CONTROLLER' ]
+ message = ''
+ status = 'done'
+ if repository_ids:
+ successful_count = 0
+ unsuccessful_count = 0
+ for repository_id in repository_ids:
+ try:
+ if CONTROLLER == 'TOOL_SHED_ADMIN_CONTROLLER':
+ repository = get_repository_in_tool_shed( trans, repository_id )
+ invalid_file_tups, metadata_dict = reset_all_metadata_on_repository_in_tool_shed( trans, repository_id )
+ elif CONTROLLER == 'GALAXY_ADMIN_TOOL_SHED_CONTROLLER':
+ repository = get_installed_tool_shed_repository( trans, repository_id )
+ invalid_file_tups, metadata_dict = reset_all_metadata_on_installed_repository( trans, repository_id )
+ if invalid_file_tups:
+ message = generate_message_for_invalid_tools( trans, invalid_file_tups, repository, None, as_html=False )
+ log.debug( message )
+ unsuccessful_count += 1
+ else:
+ log.debug( "Successfully reset metadata on repository %s" % repository.name )
+ successful_count += 1
+ except Exception, e:
+ log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) )
+ unsuccessful_count += 1
+ message = "Successfully reset metadata on %d %s. " % ( successful_count, inflector.cond_plural( successful_count, "repository" ) )
+ if unsuccessful_count:
+ message += "Error setting metadata on %d %s - see the paster log for details. " % ( unsuccessful_count,
+ inflector.cond_plural( unsuccessful_count, "repository" ) )
+ else:
+ message = 'Select at least one repository to on which to reset all metadata.'
+ status = 'error'
+ return message, status
+def reset_tool_data_tables( app ):
+ # Reset the tool_data_tables to an empty dictionary.
+ app.tool_data_tables.data_tables = {}
+def reversed_lower_upper_bounded_changelog( repo, excluded_lower_bounds_changeset_revision, included_upper_bounds_changeset_revision ):
+ """
+ Return a reversed list of changesets in the repository changelog after the excluded_lower_bounds_changeset_revision, but up to and
+ including the included_upper_bounds_changeset_revision. The value of excluded_lower_bounds_changeset_revision will be the value of
+ INITIAL_CHANGELOG_HASH if no valid changesets exist before included_upper_bounds_changeset_revision.
+ """
+ # To set excluded_lower_bounds_changeset_revision, calling methods should do the following, where the value of changeset_revision
+ # is a downloadable changeset_revision.
+ # excluded_lower_bounds_changeset_revision = get_previous_downloadable_changset_revision( repository, repo, changeset_revision )
+ if excluded_lower_bounds_changeset_revision == INITIAL_CHANGELOG_HASH:
+ appending_started = True
+ else:
+ appending_started = False
+ reversed_changelog = []
+ for changeset in repo.changelog:
+ changeset_hash = str( repo.changectx( changeset ) )
+ if appending_started:
+ reversed_changelog.insert( 0, changeset )
+ if changeset_hash == excluded_lower_bounds_changeset_revision and not appending_started:
+ appending_started = True
+ if changeset_hash == included_upper_bounds_changeset_revision:
+ break
+ return reversed_changelog
+def reversed_upper_bounded_changelog( repo, included_upper_bounds_changeset_revision ):
+ return reversed_lower_upper_bounded_changelog( repo, INITIAL_CHANGELOG_HASH, included_upper_bounds_changeset_revision )
+def strip_path( fpath ):
+ if not fpath:
+ return fpath
+ try:
+ file_path, file_name = os.path.split( fpath )
+ except:
+ file_name = fpath
+ return file_name
+def update_repository( repo, ctx_rev=None ):
+ """
+ Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
+ changeset_revision before metadata is set because the process for setting metadata uses the repository files on disk.
+ """
+ # TODO: We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
+ # The codes used to show the status of files are as follows.
+ # M = modified
+ # A = added
+ # R = removed
+ # C = clean
+ # ! = deleted, but still tracked
+ # ? = not tracked
+ # I = ignored
+ # It would be nice if we could use mercurial's purge extension to remove untracked files. The problem is that
+ # purging is not supported by the mercurial API. See the deprecated update_for_browsing() method in common.py.
+ commands.update( get_configured_ui(),
+ repo,
+ rev=ctx_rev )
+def url_join( *args ):
+ parts = []
+ for arg in args:
+ parts.append( arg.strip( '/' ) )
+ return '/'.join( parts )
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -5,9 +5,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.web.form_builder import SelectField
from galaxy.util import inflector
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import build_repository_ids_select_field, get_changectx_for_changeset, get_configured_ui, get_repository_in_tool_shed
-from galaxy.util.shed_util import reset_metadata_on_selected_repositories, TOOL_SHED_ADMIN_CONTROLLER
+from galaxy.util.shed_util_common import *
from common import *
from repository import RepositoryGrid, CategoryGrid
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -5,13 +5,7 @@
from galaxy.tools import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import check_tool_input_params, clone_repository, concat_messages, copy_sample_file, create_or_update_repository_metadata
-from galaxy.util.shed_util import generate_clone_url_for_repository_in_tool_shed, generate_message_for_invalid_tools, generate_metadata_for_changeset_revision
-from galaxy.util.shed_util import get_changectx_for_changeset, get_config_from_disk, get_configured_ui, get_file_context_from_ctx, get_named_tmpfile_from_ctx
-from galaxy.util.shed_util import get_parent_id, get_repository_in_tool_shed, get_repository_metadata_by_changeset_revision
-from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_disk, handle_sample_files_and_load_tool_from_tmp_config, INITIAL_CHANGELOG_HASH
-from galaxy.util.shed_util import is_downloadable, load_tool_from_config, remove_dir, reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path
+from galaxy.util.shed_util_common import *
from galaxy.web.base.controller import *
from galaxy.web.base.controllers.admin import *
from galaxy.webapps.community import model
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -9,14 +9,7 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import create_repo_info_dict, generate_clone_url_for_repository_in_tool_shed, generate_message_for_invalid_tools
-from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, get_file_from_changeset_revision
-from galaxy.util.shed_util import get_repository_file_contents, get_repository_in_tool_shed, get_repository_metadata_by_changeset_revision
-from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_disk, handle_sample_files_and_load_tool_from_tmp_config
-from galaxy.util.shed_util import INITIAL_CHANGELOG_HASH, load_tool_from_config, NOT_TOOL_CONFIGS, open_repository_files_folder, remove_dir
-from galaxy.util.shed_util import reset_all_metadata_on_repository_in_tool_shed, reversed_lower_upper_bounded_changelog
-from galaxy.util.shed_util import reversed_upper_bounded_changelog, strip_path, to_html_escaped, update_repository, url_join
+from galaxy.util.shed_util_common import *
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -1265,6 +1258,13 @@
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
return get_repository_file_contents( file_path )
+ def get_file_from_changeset_revision( self, repo_files_dir, changeset_revision, file_name, dir ):
+ """Return file_name from the received changeset_revision of the repository manifest."""
+ stripped_file_name = strip_path( file_name )
+ repo = hg.repository( get_configured_ui(), repo_files_dir )
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ named_tmp_file = get_named_tmpfile_from_ctx( ctx, file_name, dir )
+ return named_tmp_file
def get_metadata( self, trans, repository_id, changeset_revision ):
repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
if repository_metadata and repository_metadata.metadata:
@@ -2231,6 +2231,17 @@
if list:
return ','.join( list )
return ''
+ def to_html_escaped( self, text ):
+ """Translates the characters in text to html values"""
+ translated = []
+ for c in text:
+ if c in [ '\r\n', '\n', ' ', '\t' ] or c in VALID_CHARS:
+ translated.append( c )
+ elif c in MAPPED_CHARS:
+ translated.append( MAPPED_CHARS[ c ] )
+ else:
+ translated.append( '' )
+ return ''.join( translated )
def __validate_repository_name( self, name, user ):
# Repository names must be unique for each user, must be at least four characters
# in length and must contain only lower-case letters, numbers, and the '_' character.
@@ -2304,7 +2315,7 @@
anchors = modified + added + removed + deleted + unknown + ignored + clean
diffs = []
for diff in patch.diff( repo, node1=ctx_parent.node(), node2=ctx.node() ):
- diffs.append( to_html_escaped( diff ) )
+ diffs.append( self.to_html_escaped( diff ) )
is_malicious = changeset_is_malicious( trans, id, repository.tip( trans.app ) )
metadata = self.get_metadata( trans, id, ctx_str )
return trans.fill_template( '/webapps/community/repository/view_changeset.mako',
@@ -2356,12 +2367,7 @@
except IOError:
work_dir = tempfile.mkdtemp()
try:
- manifest_readme_file = get_file_from_changeset_revision( trans.app,
- repository,
- repo_files_dir,
- changeset_revision,
- readme_file,
- work_dir )
+ manifest_readme_file = self.get_file_from_changeset_revision( repo_files_dir, changeset_revision, readme_file, work_dir )
f = open( manifest_readme_file, 'r' )
raw_text = f.read()
f.close()
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/webapps/community/controllers/repository_review.py
--- a/lib/galaxy/webapps/community/controllers/repository_review.py
+++ b/lib/galaxy/webapps/community/controllers/repository_review.py
@@ -8,8 +8,7 @@
from sqlalchemy.sql.expression import func
from common import *
from repository import RepositoryGrid
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import get_configured_ui, get_repository_in_tool_shed
+from galaxy.util.shed_util_common import *
from galaxy.util.odict import odict
from galaxy import eggs
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -3,9 +3,7 @@
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
from common import *
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import get_configured_ui, get_repository_in_tool_shed, reset_tool_data_tables, handle_sample_tool_data_table_conf_file
-from galaxy.util.shed_util import update_repository
+from galaxy.util.shed_util_common import *
from galaxy import eggs
eggs.require('mercurial')
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/webapps/community/controllers/workflow.py
--- a/lib/galaxy/webapps/community/controllers/workflow.py
+++ b/lib/galaxy/webapps/community/controllers/workflow.py
@@ -10,8 +10,7 @@
from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps
from galaxy.model.orm import *
from common import *
-# TODO: re-factor shed_util to eliminate the following restricted imports
-from galaxy.util.shed_util import get_repository_in_tool_shed
+from galaxy.util.shed_util_common import *
from galaxy.tool_shed.encoding_util import *
class RepoInputDataModule( InputDataModule ):
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -2,6 +2,7 @@
from admin import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
+from galaxy.util.shed_util_common import *
from galaxy.tool_shed.encoding_util import *
from galaxy import eggs, tools
diff -r ad6c2f4b3433eab6ba577aee8fcd88266121798f -r 7a51b701af8825baaf4aeb68f422304434c01a10 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -77,7 +77,7 @@
<%def name="render_clone_str( repository )"><%
- from galaxy.util.shed_util import generate_clone_url_for_repository_in_tool_shed
+ from galaxy.util.shed_util_common import generate_clone_url_for_repository_in_tool_shed
clone_str = generate_clone_url_for_repository_in_tool_shed( trans, repository )
%>
hg clone <a href="${clone_str}">${clone_str}</a>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Extend tool panel to handle new parameters that have been added since job was run. Fix whitespace as well. Thanks to Jim Johnson for the inspiration.
by Bitbucket 15 Nov '12
by Bitbucket 15 Nov '12
15 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ad6c2f4b3433/
changeset: ad6c2f4b3433
user: jgoecks
date: 2012-11-15 20:37:20
summary: Extend tool panel to handle new parameters that have been added since job was run. Fix whitespace as well. Thanks to Jim Johnson for the inspiration.
affected #: 1 file
diff -r 4a581a24f07d20e17ce5fbe4c5ddb6715abbe335 -r ad6c2f4b3433eab6ba577aee8fcd88266121798f templates/show_params.mako
--- a/templates/show_params.mako
+++ b/templates/show_params.mako
@@ -11,26 +11,42 @@
</style><%def name="inputs_recursive( input_params, param_values, depth=1 )">
- %for input_index, input in enumerate( input_params.itervalues() ):
- %if input.type == "repeat":
- %for i in range( len(param_values[input.name]) ):
- ${ inputs_recursive(input.inputs, param_values[input.name][i], depth=depth+1) }
- %endfor
- %elif input.type == "conditional":
- <% current_case = param_values[input.name]['__current_case__'] %>
- <tr>
- ${ inputs_recursive_indent( text=input.test_param.label,depth=depth )}
- <!-- Get the value of the current Conditonal parameter -->
- <td>${input.cases[current_case].value}</td>
- </tr>
- ${ inputs_recursive(input.cases[current_case].inputs, param_values[input.name], depth=depth+1) }
- %elif getattr(input, "label", None):
- <tr>
- ${inputs_recursive_indent( text=input.label,depth=depth )}
- <td>${input.value_to_display_text(param_values[input.name], trans.app)}</td>
- </tr>
- %endif
- %endfor
+ %for input_index, input in enumerate( input_params.itervalues() ):
+ %if input.name in param_values:
+ %if input.type == "repeat":
+ %for i in range( len(param_values[input.name]) ):
+ ${ inputs_recursive(input.inputs, param_values[input.name][i], depth=depth+1) }
+ %endfor
+ %elif input.type == "conditional":
+ <% current_case = param_values[input.name]['__current_case__'] %>
+ <tr>
+ ${ inputs_recursive_indent( text=input.test_param.label, depth=depth )}
+ <!-- Get the value of the current Conditonal parameter -->
+ <td>${input.cases[current_case].value}</td>
+ </tr>
+ ${ inputs_recursive(input.cases[current_case].inputs, param_values[input.name], depth=depth+1) }
+ %elif getattr(input, "label", None):
+ <tr>
+ ${inputs_recursive_indent( text=input.label, depth=depth )}
+ <td>${input.value_to_display_text(param_values[input.name], trans.app)}</td>
+ </tr>
+ %endif
+ %else:
+ ## Parameter does not have a stored value.
+ <tr>
+ <%
+ # Get parameter label.
+ if input.type == "conditional":
+ label = input.test_param.label
+ else:
+ label = input.label
+ %>
+ ${inputs_recursive_indent( text=label, depth=depth )}
+ <td><em>not used (parameter was added after this job was run)</em></td>
+ </tr>
+ %endif
+
+ %endfor
</%def>
## function to add a indentation depending on the depth in a <tr>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: history panel: fix to packed template name
by Bitbucket 15 Nov '12
by Bitbucket 15 Nov '12
15 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4a581a24f07d/
changeset: 4a581a24f07d
user: carlfeberhard
date: 2012-11-15 19:43:54
summary: history panel: fix to packed template name
affected #: 2 files
diff -r b0afeaf880fc9913abc1ddffab25b953f2359fea -r 4a581a24f07d20e17ce5fbe4c5ddb6715abbe335 static/scripts/packed/templates/compiled/template-hda-failedMetaData.js
--- a/static/scripts/packed/templates/compiled/template-hda-failedMetaData.js
+++ /dev/null
@@ -1,1 +0,0 @@
-(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a["template-hda-failedMetadata"]=b(function(g,m,f,l,k){f=f||g.helpers;var c,i,o=this,h="function",n=f.blockHelperMissing,j=this.escapeExpression;function e(t,s){var q="",r,p;q+="\n";p=f.local;if(p){r=p.call(t,{hash:{},inverse:o.noop,fn:o.program(2,d,s)})}else{r=t.local;r=typeof r===h?r():r}if(!f.local){r=n.call(t,r,{hash:{},inverse:o.noop,fn:o.program(2,d,s)})}if(r||r===0){q+=r}q+='\nYou may be able to <a href="';r=t.urls;r=r==null||r===false?r:r.edit;r=typeof r===h?r():r;q+=j(r)+'" target="galaxy_main">set it manually or retry auto-detection</a>.\n';return q}function d(q,p){return"An error occurred setting the metadata for this dataset."}i=f.warningmessagesmall;if(i){c=i.call(m,{hash:{},inverse:o.noop,fn:o.program(1,e,k)})}else{c=m.warningmessagesmall;c=typeof c===h?c():c}if(!f.warningmessagesmall){c=n.call(m,c,{hash:{},inverse:o.noop,fn:o.program(1,e,k)})}if(c||c===0){return c}else{return""}})})();
\ No newline at end of file
diff -r b0afeaf880fc9913abc1ddffab25b953f2359fea -r 4a581a24f07d20e17ce5fbe4c5ddb6715abbe335 static/scripts/packed/templates/compiled/template-hda-failedMetadata.js
--- /dev/null
+++ b/static/scripts/packed/templates/compiled/template-hda-failedMetadata.js
@@ -0,0 +1,1 @@
+(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a["template-hda-failedMetadata"]=b(function(g,m,f,l,k){f=f||g.helpers;var c,i,o=this,h="function",n=f.blockHelperMissing,j=this.escapeExpression;function e(t,s){var q="",r,p;q+="\n";p=f.local;if(p){r=p.call(t,{hash:{},inverse:o.noop,fn:o.program(2,d,s)})}else{r=t.local;r=typeof r===h?r():r}if(!f.local){r=n.call(t,r,{hash:{},inverse:o.noop,fn:o.program(2,d,s)})}if(r||r===0){q+=r}q+='\nYou may be able to <a href="';r=t.urls;r=r==null||r===false?r:r.edit;r=typeof r===h?r():r;q+=j(r)+'" target="galaxy_main">set it manually or retry auto-detection</a>.\n';return q}function d(q,p){return"An error occurred setting the metadata for this dataset."}i=f.warningmessagesmall;if(i){c=i.call(m,{hash:{},inverse:o.noop,fn:o.program(1,e,k)})}else{c=m.warningmessagesmall;c=typeof c===h?c():c}if(!f.warningmessagesmall){c=n.call(m,c,{hash:{},inverse:o.noop,fn:o.program(1,e,k)})}if(c||c===0){return c}else{return""}})})();
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/dae89d6c6e82/
changeset: dae89d6c6e82
user: carlfeberhard
date: 2012-11-15 19:11:21
summary: history panel: fix to template name
affected #: 1 file
diff -r 8e52e426487bf732d051e538ac4cd7d352aa5065 -r dae89d6c6e82a095cd07d0ffa124f76d659863bc static/scripts/templates/compiled/template-hda-failedMetadata.js
--- /dev/null
+++ b/static/scripts/templates/compiled/template-hda-failedMetadata.js
@@ -0,0 +1,33 @@
+(function() {
+ var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
+templates['template-hda-failedMetadata'] = template(function (Handlebars,depth0,helpers,partials,data) {
+ helpers = helpers || Handlebars.helpers;
+ var stack1, foundHelper, self=this, functionType="function", blockHelperMissing=helpers.blockHelperMissing, escapeExpression=this.escapeExpression;
+
+function program1(depth0,data) {
+
+ var buffer = "", stack1, foundHelper;
+ buffer += "\n";
+ foundHelper = helpers.local;
+ if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(2, program2, data)}); }
+ else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
+ if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(2, program2, data)}); }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\nYou may be able to <a href=\"";
+ stack1 = depth0.urls;
+ stack1 = stack1 == null || stack1 === false ? stack1 : stack1.edit;
+ stack1 = typeof stack1 === functionType ? stack1() : stack1;
+ buffer += escapeExpression(stack1) + "\" target=\"galaxy_main\">set it manually or retry auto-detection</a>.\n";
+ return buffer;}
+function program2(depth0,data) {
+
+
+ return "An error occurred setting the metadata for this dataset.";}
+
+ foundHelper = helpers.warningmessagesmall;
+ if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(1, program1, data)}); }
+ else { stack1 = depth0.warningmessagesmall; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
+ if (!helpers.warningmessagesmall) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(1, program1, data)}); }
+ if(stack1 || stack1 === 0) { return stack1; }
+ else { return ''; }});
+})();
\ No newline at end of file
https://bitbucket.org/galaxy/galaxy-central/changeset/b0afeaf880fc/
changeset: b0afeaf880fc
user: carlfeberhard
date: 2012-11-15 19:11:56
summary: pack scripts
affected #: 4 files
diff -r dae89d6c6e82a095cd07d0ffa124f76d659863bc -r b0afeaf880fc9913abc1ddffab25b953f2359fea static/scripts/packed/mvc/dataset/hda-base.js
--- a/static/scripts/packed/mvc/dataset/hda-base.js
+++ b/static/scripts/packed/mvc/dataset/hda-base.js
@@ -1,1 +1,1 @@
-var HDABaseView=BaseView.extend(LoggableMixin).extend({tagName:"div",className:"historyItemContainer",initialize:function(a){this.log(this+".initialize:",a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton];if(!a.urlTemplates){throw ("HDAView needs urlTemplates on initialize")}this.urls=this._renderUrls(a.urlTemplates,this.model.toJSON());this.expanded=a.expanded||false;this.model.bind("change",this.render,this)},_renderUrls:function(d,a){var b=this,c={};_.each(d,function(e,f){if(_.isObject(e)){c[f]=b._renderUrls(e,a)}else{if(f==="meta_download"){c[f]=b._renderMetaDownloadUrls(e,a)}else{c[f]=_.template(e,a)}}});return c},_renderMetaDownloadUrls:function(b,a){return _.map(a.meta_files,function(c){return{url:_.template(b,{id:a.id,file_type:c.file_type}),file_type:c.file_type}})},render:function(){var b=this,e=this.model.get("id"),c=this.model.get("state"),a=$("<div/>").attr("id","historyItem-"+e),d=(this.$el.children().size()===0);this.$el.attr("id","historyItemContainer-"+e);a.addClass("historyItemWrapper").addClass("historyItem").addClass("historyItem-"+c);a.append(this._render_warnings());a.append(this._render_titleBar());this.body=$(this._render_body());a.append(this.body);make_popup_menus(a);a.find(".tooltip").tooltip({placement:"bottom"});this.$el.fadeOut("fast",function(){b.$el.children().remove();b.$el.append(a).fadeIn("fast",function(){b.log(b+" rendered:",b.$el);var f="rendered";if(d){f+=":initial"}else{if(b.model.inReadyState()){f+=":ready"}}b.trigger(f)})});return this},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(this.model.toJSON())))},_render_titleBar:function(){var a=$('<div class="historyItemTitleBar" style="overflow: hidden"></div>');a.append(this._render_titleButtons());a.append('<span class="state-icon"></span>');a.append(this._render_titleLink());return a},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());return a},_render_displayButton:function(){if((!this.model.inReadyState())||(this.model.get("state")===HistoryDatasetAssociation.STATES.ERROR)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.displayButton=null;return null}var a={icon_class:"display",target:"galaxy_main"};if(this.model.get("purged")){a.enabled=false;a.title=_l("Cannot display datasets removed from disk")}else{a.title=_l("Display data in browser");a.href=this.urls.display}this.displayButton=new IconButtonView({model:new IconButton(a)});return this.displayButton.render().$el},_render_titleLink:function(){return $(jQuery.trim(HDABaseView.templates.titleLink(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});return HDABaseView.templates.hdaSummary(a)},_render_primaryActionButtons:function(c){var a=this,b=$("<div/>").attr("id","primary-actions-"+this.model.get("id"));_.each(c,function(d){b.append(d.call(a))});return b},_render_downloadButton:function(){if(this.model.get("purged")||!this.model.hasData()){return null}var a=HDABaseView.templates.downloadLinks(_.extend(this.model.toJSON(),{urls:this.urls}));return $(a)},_render_showParamsButton:function(){this.showParamsButton=new IconButtonView({model:new IconButton({title:_l("View details"),href:this.urls.show_params,target:"galaxy_main",icon_class:"information"})});return this.showParamsButton.render().$el},_render_displayApps:function(){if(!this.model.hasData()){return null}var a=$("<div/>").addClass("display-apps");if(!_.isEmpty(this.model.get("display_types"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_types")}))}if(!_.isEmpty(this.model.get("display_apps"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_apps")}))}return a},_render_peek:function(){if(!this.model.get("peek")){return null}return $("<div/>").append($("<pre/>").attr("id","peek"+this.model.get("id")).addClass("peek").append(this.model.get("peek")))},_render_body:function(){var a=$("<div/>").attr("id","info-"+this.model.get("id")).addClass("historyItemBody").attr("style","display: block");switch(this.model.get("state")){case HistoryDatasetAssociation.STATES.NOT_VIEWABLE:this._render_body_not_viewable(a);break;case HistoryDatasetAssociation.STATES.UPLOAD:this._render_body_uploading(a);break;case HistoryDatasetAssociation.STATES.QUEUED:this._render_body_queued(a);break;case HistoryDatasetAssociation.STATES.RUNNING:this._render_body_running(a);break;case HistoryDatasetAssociation.STATES.ERROR:this._render_body_error(a);break;case HistoryDatasetAssociation.STATES.DISCARDED:this._render_body_discarded(a);break;case HistoryDatasetAssociation.STATES.SETTING_METADATA:this._render_body_setting_metadata(a);break;case HistoryDatasetAssociation.STATES.EMPTY:this._render_body_empty(a);break;case HistoryDatasetAssociation.STATES.FAILED_METADATA:this._render_body_failed_metadata(a);break;case HistoryDatasetAssociation.STATES.OK:this._render_body_ok(a);break;default:a.append($('<div>Error: unknown dataset state "'+state+'".</div>'))}a.append('<div style="clear: both"></div>');if(this.expanded){a.show()}else{a.hide()}return a},_render_body_not_viewable:function(a){a.append($("<div>"+_l("You do not have permission to view dataset")+".</div>"))},_render_body_uploading:function(a){a.append($("<div>"+_l("Dataset is uploading")+"</div>"))},_render_body_queued:function(a){a.append($("<div>"+_l("Job is waiting to run")+".</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_running:function(a){a.append("<div>"+_l("Job is currently running")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_error:function(a){if(!this.model.get("purged")){a.append($("<div>"+this.model.get("misc_blurb")+"</div>"))}a.append((_l("An error occurred running this job")+": <i>"+$.trim(this.model.get("misc_info"))+"</i>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers.concat([this._render_downloadButton])))},_render_body_discarded:function(a){a.append("<div>"+_l("The job creating this dataset was cancelled before completion")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_setting_metadata:function(a){a.append($("<div>"+_l("Metadata is being auto-detected")+".</div>"))},_render_body_empty:function(a){a.append($("<div>"+_l("No data")+": <i>"+this.model.get("misc_blurb")+"</i></div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_failed_metadata:function(a){a.append($(HDABaseView.templates.failedMetadata(this.model.toJSON())));this._render_body_ok(a)},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));a.append('<div class="clear"/>');a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility"},toggleBodyVisibility:function(c,a){var b=this,d=this.$el.find(".historyItemBody");a=(a===undefined)?(!d.is(":visible")):(a);if(a){d.slideDown("fast",function(){b.trigger("body-expanded",b.model.get("id"))})}else{d.slideUp("fast",function(){b.trigger("body-collapsed",b.model.get("id"))})}},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDABaseView("+a+")"}});HDABaseView.templates={warningMsg:Handlebars.templates["template-warningmessagesmall"],messages:Handlebars.templates["template-hda-warning-messages"],titleLink:Handlebars.templates["template-hda-titleLink"],hdaSummary:Handlebars.templates["template-hda-hdaSummary"],downloadLinks:Handlebars.templates["template-hda-downloadLinks"],failedMetadata:Handlebars.templates["template-hda-failedMetaData"],displayApps:Handlebars.templates["template-hda-displayApps"]};
\ No newline at end of file
+var HDABaseView=BaseView.extend(LoggableMixin).extend({tagName:"div",className:"historyItemContainer",initialize:function(a){this.log(this+".initialize:",a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton];if(!a.urlTemplates){throw ("HDAView needs urlTemplates on initialize")}this.urls=this._renderUrls(a.urlTemplates,this.model.toJSON());this.expanded=a.expanded||false;this.model.bind("change",this.render,this)},_renderUrls:function(d,a){var b=this,c={};_.each(d,function(e,f){if(_.isObject(e)){c[f]=b._renderUrls(e,a)}else{if(f==="meta_download"){c[f]=b._renderMetaDownloadUrls(e,a)}else{c[f]=_.template(e,a)}}});return c},_renderMetaDownloadUrls:function(b,a){return _.map(a.meta_files,function(c){return{url:_.template(b,{id:a.id,file_type:c.file_type}),file_type:c.file_type}})},render:function(){var b=this,e=this.model.get("id"),c=this.model.get("state"),a=$("<div/>").attr("id","historyItem-"+e),d=(this.$el.children().size()===0);this.$el.attr("id","historyItemContainer-"+e);a.addClass("historyItemWrapper").addClass("historyItem").addClass("historyItem-"+c);a.append(this._render_warnings());a.append(this._render_titleBar());this.body=$(this._render_body());a.append(this.body);make_popup_menus(a);a.find(".tooltip").tooltip({placement:"bottom"});this.$el.fadeOut("fast",function(){b.$el.children().remove();b.$el.append(a).fadeIn("fast",function(){b.log(b+" rendered:",b.$el);var f="rendered";if(d){f+=":initial"}else{if(b.model.inReadyState()){f+=":ready"}}b.trigger(f)})});return this},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(this.model.toJSON())))},_render_titleBar:function(){var a=$('<div class="historyItemTitleBar" style="overflow: hidden"></div>');a.append(this._render_titleButtons());a.append('<span class="state-icon"></span>');a.append(this._render_titleLink());return a},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());return a},_render_displayButton:function(){if((!this.model.inReadyState())||(this.model.get("state")===HistoryDatasetAssociation.STATES.ERROR)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.displayButton=null;return null}var a={icon_class:"display",target:"galaxy_main"};if(this.model.get("purged")){a.enabled=false;a.title=_l("Cannot display datasets removed from disk")}else{a.title=_l("Display data in browser");a.href=this.urls.display}this.displayButton=new IconButtonView({model:new IconButton(a)});return this.displayButton.render().$el},_render_titleLink:function(){return $(jQuery.trim(HDABaseView.templates.titleLink(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});return HDABaseView.templates.hdaSummary(a)},_render_primaryActionButtons:function(c){var a=this,b=$("<div/>").attr("id","primary-actions-"+this.model.get("id"));_.each(c,function(d){b.append(d.call(a))});return b},_render_downloadButton:function(){if(this.model.get("purged")||!this.model.hasData()){return null}var a=HDABaseView.templates.downloadLinks(_.extend(this.model.toJSON(),{urls:this.urls}));return $(a)},_render_showParamsButton:function(){this.showParamsButton=new IconButtonView({model:new IconButton({title:_l("View details"),href:this.urls.show_params,target:"galaxy_main",icon_class:"information"})});return this.showParamsButton.render().$el},_render_displayApps:function(){if(!this.model.hasData()){return null}var a=$("<div/>").addClass("display-apps");if(!_.isEmpty(this.model.get("display_types"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_types")}))}if(!_.isEmpty(this.model.get("display_apps"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_apps")}))}return a},_render_peek:function(){if(!this.model.get("peek")){return null}return $("<div/>").append($("<pre/>").attr("id","peek"+this.model.get("id")).addClass("peek").append(this.model.get("peek")))},_render_body:function(){var a=$("<div/>").attr("id","info-"+this.model.get("id")).addClass("historyItemBody").attr("style","display: block");switch(this.model.get("state")){case HistoryDatasetAssociation.STATES.NOT_VIEWABLE:this._render_body_not_viewable(a);break;case HistoryDatasetAssociation.STATES.UPLOAD:this._render_body_uploading(a);break;case HistoryDatasetAssociation.STATES.QUEUED:this._render_body_queued(a);break;case HistoryDatasetAssociation.STATES.RUNNING:this._render_body_running(a);break;case HistoryDatasetAssociation.STATES.ERROR:this._render_body_error(a);break;case HistoryDatasetAssociation.STATES.DISCARDED:this._render_body_discarded(a);break;case HistoryDatasetAssociation.STATES.SETTING_METADATA:this._render_body_setting_metadata(a);break;case HistoryDatasetAssociation.STATES.EMPTY:this._render_body_empty(a);break;case HistoryDatasetAssociation.STATES.FAILED_METADATA:this._render_body_failed_metadata(a);break;case HistoryDatasetAssociation.STATES.OK:this._render_body_ok(a);break;default:a.append($('<div>Error: unknown dataset state "'+state+'".</div>'))}a.append('<div style="clear: both"></div>');if(this.expanded){a.show()}else{a.hide()}return a},_render_body_not_viewable:function(a){a.append($("<div>"+_l("You do not have permission to view dataset")+".</div>"))},_render_body_uploading:function(a){a.append($("<div>"+_l("Dataset is uploading")+"</div>"))},_render_body_queued:function(a){a.append($("<div>"+_l("Job is waiting to run")+".</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_running:function(a){a.append("<div>"+_l("Job is currently running")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_error:function(a){if(!this.model.get("purged")){a.append($("<div>"+this.model.get("misc_blurb")+"</div>"))}a.append((_l("An error occurred running this job")+": <i>"+$.trim(this.model.get("misc_info"))+"</i>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers.concat([this._render_downloadButton])))},_render_body_discarded:function(a){a.append("<div>"+_l("The job creating this dataset was cancelled before completion")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_setting_metadata:function(a){a.append($("<div>"+_l("Metadata is being auto-detected")+".</div>"))},_render_body_empty:function(a){a.append($("<div>"+_l("No data")+": <i>"+this.model.get("misc_blurb")+"</i></div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_failed_metadata:function(a){a.append($(HDABaseView.templates.failedMetadata(this.model.toJSON())));this._render_body_ok(a)},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));a.append('<div class="clear"/>');a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility"},toggleBodyVisibility:function(c,a){var b=this,d=this.$el.find(".historyItemBody");a=(a===undefined)?(!d.is(":visible")):(a);if(a){d.slideDown("fast",function(){b.trigger("body-expanded",b.model.get("id"))})}else{d.slideUp("fast",function(){b.trigger("body-collapsed",b.model.get("id"))})}},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDABaseView("+a+")"}});HDABaseView.templates={warningMsg:Handlebars.templates["template-warningmessagesmall"],messages:Handlebars.templates["template-hda-warning-messages"],titleLink:Handlebars.templates["template-hda-titleLink"],hdaSummary:Handlebars.templates["template-hda-hdaSummary"],downloadLinks:Handlebars.templates["template-hda-downloadLinks"],failedMetadata:Handlebars.templates["template-hda-failedMetadata"],displayApps:Handlebars.templates["template-hda-displayApps"]};
\ No newline at end of file
diff -r dae89d6c6e82a095cd07d0ffa124f76d659863bc -r b0afeaf880fc9913abc1ddffab25b953f2359fea static/scripts/packed/mvc/history/history-panel.js
--- a/static/scripts/packed/mvc/history/history-panel.js
+++ b/static/scripts/packed/mvc/history/history-panel.js
@@ -1,1 +1,1 @@
-var HistoryPanel=BaseView.extend(LoggableMixin).extend({el:"body.historyPage",HDAView:HDAEditView,events:{"click #history-tag":"loadAndDisplayTags"},initialize:function(a){this.log(this+".initialize:",a);if(!a.urlTemplates){throw (this+" needs urlTemplates on initialize")}if(!a.urlTemplates.history){throw (this+" needs urlTemplates.history on initialize")}if(!a.urlTemplates.hda){throw (this+" needs urlTemplates.hda on initialize")}this.urlTemplates=a.urlTemplates.history;this.hdaUrlTemplates=a.urlTemplates.hda;this._setUpWebStorage(a.initiallyExpanded,a.show_deleted,a.show_hidden);this.model.bind("change:nice_size",this.updateHistoryDiskSize,this);this.model.hdas.bind("add",this.add,this);this.model.hdas.bind("reset",this.addAll,this);this.hdaViews={};this.urls={}},_setUpWebStorage:function(b,a,c){this.storage=new PersistantStorage("HistoryView."+this.model.get("id"),{expandedHdas:{},show_deleted:false,show_hidden:false});this.log("this.storage:",this.storage.get());if(b){this.storage.set("exandedHdas",b)}if((a===true)||(a===false)){this.storage.set("show_deleted",a)}if((c===true)||(c===false)){this.storage.set("show_hidden",c)}this.show_deleted=this.storage.get("show_deleted");this.show_hidden=this.storage.get("show_hidden");this.log("(init'd) this.storage:",this.storage.get())},add:function(a){},addAll:function(){this.render()},render:function(){var b=this,d=b.toString()+".set-up",c=$("<div/>"),a=this.model.toJSON(),e=(this.$el.children().size()===0);a.urls=this._renderUrls(a);c.append(HistoryPanel.templates.historyPanel(a));c.find(".tooltip").tooltip({placement:"bottom"});this._setUpActionButton(c.find("#history-action-popup"));if(!this.model.hdas.length||!this.renderItems(c.find("#"+this.model.get("id")+"-datasets"))){c.find("#emptyHistoryMessage").show()}$(b).queue(d,function(f){b.$el.fadeOut("fast",function(){f()})});$(b).queue(d,function(f){b.$el.html("");b.$el.append(c.children());b.$el.fadeIn("fast",function(){f()})});$(b).queue(d,function(f){this.log(b+" rendered:",b.$el);b._setUpBehaviours();if(e){b.trigger("rendered:initial")}else{b.trigger("rendered")}f()});$(b).dequeue(d);return this},_renderUrls:function(a){var b=this;b.urls={};_.each(this.urlTemplates,function(d,c){b.urls[c]=_.template(d,a)});return b.urls},_setUpActionButton:function(e){var c=this,d=(this.storage.get("show_deleted"))?("Hide deleted"):("Show deleted"),a=(this.storage.get("show_hidden"))?("Hide hidden"):("Show hidden"),b={};b[_l("refresh")]=function(){window.location.reload()};b[_l("collapse all")]=function(){c.hideAllHdaBodies()};b[_l(d)]=function(){c.toggleShowDeleted()};b[_l(a)]=function(){c.toggleShowHidden()};make_popupmenu(e,b)},renderItems:function(b){this.hdaViews={};var a=this,c=this.model.hdas.getVisible(this.storage.get("show_deleted"),this.storage.get("show_hidden"));_.each(c,function(f){var e=f.get("id"),d=a.storage.get("expandedHdas").get(e);a.hdaViews[e]=new a.HDAView({model:f,expanded:d,urlTemplates:a.hdaUrlTemplates});a._setUpHdaListeners(a.hdaViews[e]);b.prepend(a.hdaViews[e].render().$el)});return c.length},_setUpHdaListeners:function(b){var a=this;b.bind("body-expanded",function(c){a.storage.get("expandedHdas").set(c,true)});b.bind("body-collapsed",function(c){a.storage.get("expandedHdas").deleteKey(c)})},_setUpBehaviours:function(){if(!(this.model.get("user")&&this.model.get("user").email)){return}var a=this.$("#history-annotation-area");this.$("#history-annotate").click(function(){if(a.is(":hidden")){a.slideDown("fast")}else{a.slideUp("fast")}return false});async_save_text("history-name-container","history-name",this.urls.rename,"new_name",18);async_save_text("history-annotation-container","history-annotation",this.urls.annotate,"new_annotation",18,true,4)},updateHistoryDiskSize:function(){this.$el.find("#history-size").text(this.model.get("nice_size"))},showQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(a.is(":hidden")){a.slideDown("fast")}},hideQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(!a.is(":hidden")){a.slideUp("fast")}},toggleShowDeleted:function(){this.storage.set("show_deleted",!this.storage.get("show_deleted"));this.render()},toggleShowHidden:function(){this.storage.set("show_hidden",!this.storage.get("show_hidden"));this.render()},hideAllHdaBodies:function(){_.each(this.hdaViews,function(a){a.toggleBodyVisibility(null,false)});this.storage.set("expandedHdas",{})},loadAndDisplayTags:function(c){this.log(this+".loadAndDisplayTags",c);var d=this.$el.find("#history-tag-area"),b=d.find(".tag-elt");this.log("\t tagArea",d," tagElt",b);if(d.is(":hidden")){if(!jQuery.trim(b.html())){var a=this;$.ajax({url:a.urls.tag,error:function(){alert(_l("Tagging failed"))},success:function(e){b.html(e);b.find(".tooltip").tooltip();d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=this.model.get("name")||"";return"HistoryPanel("+a+")"}});HistoryPanel.templates={historyPanel:Handlebars.templates["template-history-historyPanel"]};
\ No newline at end of file
+var HistoryPanel=BaseView.extend(LoggableMixin).extend({el:"body.historyPage",HDAView:HDAEditView,events:{"click #history-refresh":function(){window.location.reload()},"click #history-tag":"loadAndDisplayTags"},initialize:function(a){this.log(this+".initialize:",a);if(!a.urlTemplates){throw (this+" needs urlTemplates on initialize")}if(!a.urlTemplates.history){throw (this+" needs urlTemplates.history on initialize")}if(!a.urlTemplates.hda){throw (this+" needs urlTemplates.hda on initialize")}this.urlTemplates=a.urlTemplates.history;this.hdaUrlTemplates=a.urlTemplates.hda;this._setUpWebStorage(a.initiallyExpanded,a.show_deleted,a.show_hidden);this.model.bind("change:nice_size",this.updateHistoryDiskSize,this);this.model.hdas.bind("add",this.add,this);this.model.hdas.bind("reset",this.addAll,this);this.hdaViews={};this.urls={}},_setUpWebStorage:function(b,a,c){this.storage=new PersistantStorage("HistoryView."+this.model.get("id"),{expandedHdas:{},show_deleted:false,show_hidden:false});this.log("this.storage:",this.storage.get());if(b){this.storage.set("exandedHdas",b)}if((a===true)||(a===false)){this.storage.set("show_deleted",a)}if((c===true)||(c===false)){this.storage.set("show_hidden",c)}this.show_deleted=this.storage.get("show_deleted");this.show_hidden=this.storage.get("show_hidden");this.log("(init'd) this.storage:",this.storage.get())},add:function(a){},addAll:function(){this.render()},render:function(){var b=this,d=b.toString()+".set-up",c=$("<div/>"),a=this.model.toJSON(),e=(this.$el.children().size()===0);a.urls=this._renderUrls(a);c.append(HistoryPanel.templates.historyPanel(a));c.find(".tooltip").tooltip({placement:"bottom"});this._setUpActionButton(c.find("#history-action-popup"));if(!this.model.hdas.length||!this.renderItems(c.find("#"+this.model.get("id")+"-datasets"))){c.find("#emptyHistoryMessage").show()}$(b).queue(d,function(f){b.$el.fadeOut("fast",function(){f()})});$(b).queue(d,function(f){b.$el.html("");b.$el.append(c.children());b.$el.fadeIn("fast",function(){f()})});$(b).queue(d,function(f){this.log(b+" rendered:",b.$el);b._setUpBehaviours();if(e){b.trigger("rendered:initial")}else{b.trigger("rendered")}f()});$(b).dequeue(d);return this},_renderUrls:function(a){var b=this;b.urls={};_.each(this.urlTemplates,function(d,c){b.urls[c]=_.template(d,a)});return b.urls},_setUpActionButton:function(e){var c=this,d=(this.storage.get("show_deleted"))?("Hide deleted"):("Show deleted"),a=(this.storage.get("show_hidden"))?("Hide hidden"):("Show hidden"),b={};b[_l("collapse all")]=function(){c.hideAllHdaBodies()};b[_l(d)]=function(){c.toggleShowDeleted()};b[_l(a)]=function(){c.toggleShowHidden()};make_popupmenu(e,b)},renderItems:function(b){this.hdaViews={};var a=this,c=this.model.hdas.getVisible(this.storage.get("show_deleted"),this.storage.get("show_hidden"));_.each(c,function(f){var e=f.get("id"),d=a.storage.get("expandedHdas").get(e);a.hdaViews[e]=new a.HDAView({model:f,expanded:d,urlTemplates:a.hdaUrlTemplates});a._setUpHdaListeners(a.hdaViews[e]);b.prepend(a.hdaViews[e].render().$el)});return c.length},_setUpHdaListeners:function(b){var a=this;b.bind("body-expanded",function(c){a.storage.get("expandedHdas").set(c,true)});b.bind("body-collapsed",function(c){a.storage.get("expandedHdas").deleteKey(c)})},_setUpBehaviours:function(){if(!(this.model.get("user")&&this.model.get("user").email)){return}var a=this.$("#history-annotation-area");this.$("#history-annotate").click(function(){if(a.is(":hidden")){a.slideDown("fast")}else{a.slideUp("fast")}return false});async_save_text("history-name-container","history-name",this.urls.rename,"new_name",18);async_save_text("history-annotation-container","history-annotation",this.urls.annotate,"new_annotation",18,true,4)},updateHistoryDiskSize:function(){this.$el.find("#history-size").text(this.model.get("nice_size"))},showQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(a.is(":hidden")){a.slideDown("fast")}},hideQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(!a.is(":hidden")){a.slideUp("fast")}},toggleShowDeleted:function(){this.storage.set("show_deleted",!this.storage.get("show_deleted"));this.render()},toggleShowHidden:function(){this.storage.set("show_hidden",!this.storage.get("show_hidden"));this.render()},hideAllHdaBodies:function(){_.each(this.hdaViews,function(a){a.toggleBodyVisibility(null,false)});this.storage.set("expandedHdas",{})},loadAndDisplayTags:function(c){this.log(this+".loadAndDisplayTags",c);var d=this.$el.find("#history-tag-area"),b=d.find(".tag-elt");this.log("\t tagArea",d," tagElt",b);if(d.is(":hidden")){if(!jQuery.trim(b.html())){var a=this;$.ajax({url:a.urls.tag,error:function(){alert(_l("Tagging failed"))},success:function(e){b.html(e);b.find(".tooltip").tooltip();d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=this.model.get("name")||"";return"HistoryPanel("+a+")"}});HistoryPanel.templates={historyPanel:Handlebars.templates["template-history-historyPanel"]};
\ No newline at end of file
diff -r dae89d6c6e82a095cd07d0ffa124f76d659863bc -r b0afeaf880fc9913abc1ddffab25b953f2359fea static/scripts/packed/templates/compiled/template-hda-failedMetaData.js
--- a/static/scripts/packed/templates/compiled/template-hda-failedMetaData.js
+++ b/static/scripts/packed/templates/compiled/template-hda-failedMetaData.js
@@ -1,1 +1,1 @@
-(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a["template-hda-failedMetaData"]=b(function(g,m,f,l,k){f=f||g.helpers;var c,i,o=this,h="function",n=f.blockHelperMissing,j=this.escapeExpression;function e(t,s){var q="",r,p;q+="\n";p=f.local;if(p){r=p.call(t,{hash:{},inverse:o.noop,fn:o.program(2,d,s)})}else{r=t.local;r=typeof r===h?r():r}if(!f.local){r=n.call(t,r,{hash:{},inverse:o.noop,fn:o.program(2,d,s)})}if(r||r===0){q+=r}q+='\nYou may be able to <a href="';r=t.urls;r=r==null||r===false?r:r.edit;r=typeof r===h?r():r;q+=j(r)+'" target="galaxy_main">set it manually or retry auto-detection</a>.\n';return q}function d(q,p){return"An error occurred setting the metadata for this dataset."}i=f.warningmessagesmall;if(i){c=i.call(m,{hash:{},inverse:o.noop,fn:o.program(1,e,k)})}else{c=m.warningmessagesmall;c=typeof c===h?c():c}if(!f.warningmessagesmall){c=n.call(m,c,{hash:{},inverse:o.noop,fn:o.program(1,e,k)})}if(c||c===0){return c}else{return""}})})();
\ No newline at end of file
+(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a["template-hda-failedMetadata"]=b(function(g,m,f,l,k){f=f||g.helpers;var c,i,o=this,h="function",n=f.blockHelperMissing,j=this.escapeExpression;function e(t,s){var q="",r,p;q+="\n";p=f.local;if(p){r=p.call(t,{hash:{},inverse:o.noop,fn:o.program(2,d,s)})}else{r=t.local;r=typeof r===h?r():r}if(!f.local){r=n.call(t,r,{hash:{},inverse:o.noop,fn:o.program(2,d,s)})}if(r||r===0){q+=r}q+='\nYou may be able to <a href="';r=t.urls;r=r==null||r===false?r:r.edit;r=typeof r===h?r():r;q+=j(r)+'" target="galaxy_main">set it manually or retry auto-detection</a>.\n';return q}function d(q,p){return"An error occurred setting the metadata for this dataset."}i=f.warningmessagesmall;if(i){c=i.call(m,{hash:{},inverse:o.noop,fn:o.program(1,e,k)})}else{c=m.warningmessagesmall;c=typeof c===h?c():c}if(!f.warningmessagesmall){c=n.call(m,c,{hash:{},inverse:o.noop,fn:o.program(1,e,k)})}if(c||c===0){return c}else{return""}})})();
\ No newline at end of file
diff -r dae89d6c6e82a095cd07d0ffa124f76d659863bc -r b0afeaf880fc9913abc1ddffab25b953f2359fea static/scripts/packed/templates/compiled/template-history-historyPanel.js
--- a/static/scripts/packed/templates/compiled/template-history-historyPanel.js
+++ b/static/scripts/packed/templates/compiled/template-history-historyPanel.js
@@ -1,1 +1,1 @@
-(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a["template-history-historyPanel"]=b(function(k,A,y,q,I){y=y||k.helpers;var z="",n,m,v=this,e="function",c=y.blockHelperMissing,d=this.escapeExpression;function t(N,M){var K="",L,J;K+='\n <div id="history-name" class="tooltip editable-text"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(2,s,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(2,s,M)})}if(L||L===0){K+=L}K+='">';J=y.name;if(J){L=J.call(N,{hash:{}})}else{L=N.name;L=typeof L===e?L():L}K+=d(L)+"</div>\n ";return K}function s(K,J){return"Click to rename history"}function r(N,M){var K="",L,J;K+='\n <div id="history-name" class="tooltip"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(5,p,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(5,p,M)})}if(L||L===0){K+=L}K+='">';J=y.name;if(J){L=J.call(N,{hash:{}})}else{L=N.name;L=typeof L===e?L():L}K+=d(L)+"</div>\n ";return K}function p(K,J){return"You must be logged in to edit your history name"}function o(K,J){return"Click to see more actions for this history"}function j(N,M){var K="",L,J;K+='\n <div id="history-secondary-links" style="float: right;">\n <a id="history-tag" title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(10,H,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(10,H,M)})}if(L||L===0){K+=L}K+='"\n class="icon-button tags tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n <a id="history-annotate" title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(12,G,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(12,G,M)})}if(L||L===0){K+=L}K+='"\n class="icon-button annotate tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n </div>\n ';return K}function H(K,J){return"Edit history tags"}function G(K,J){return"Edit history annotation"}function F(N,M){var K="",L,J;K+="\n ";J=y.warningmessagesmall;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(15,E,M)})}else{L=N.warningmessagesmall;L=typeof L===e?L():L}if(!y.warningmessagesmall){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(15,E,M)})}if(L||L===0){K+=L}K+="\n ";return K}function E(M,L){var K,J;J=y.local;if(J){K=J.call(M,{hash:{},inverse:v.noop,fn:v.program(16,D,L)})}else{K=M.local;K=typeof K===e?K():K}if(!y.local){K=c.call(M,K,{hash:{},inverse:v.noop,fn:v.program(16,D,L)})}if(K||K===0){return K}else{return""}}function D(K,J){return"You are currently viewing a deleted history!"}function C(N,M){var K="",L,J;K+='\n <div id="history-tag-annotation">\n\n <div id="history-tag-area" style="display: none">\n <strong>';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(19,B,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(19,B,M)})}if(L||L===0){K+=L}K+=':</strong>\n <div class="tag-elt"></div>\n </div>\n\n <div id="history-annotation-area" style="display: none">\n <strong>';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(21,l,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(21,l,M)})}if(L||L===0){K+=L}K+=':</strong>\n <div id="history-annotation-container">\n <div id="history-annotation" class="tooltip editable-text"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(23,i,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(23,i,M)})}if(L||L===0){K+=L}K+='">\n ';L=N.annotation;L=y["if"].call(N,L,{hash:{},inverse:v.program(27,g,M),fn:v.program(25,h,M)});if(L||L===0){K+=L}K+="\n </div>\n </div>\n </div>\n </div>\n ";return K}function B(K,J){return"Tags"}function l(K,J){return"Annotation"}function i(K,J){return"Click to edit annotation"}function h(N,M){var K="",L,J;K+="\n ";J=y.annotation;if(J){L=J.call(N,{hash:{}})}else{L=N.annotation;L=typeof L===e?L():L}K+=d(L)+"\n ";return K}function g(N,M){var K="",L,J;K+="\n <em>";J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(28,f,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(28,f,M)})}if(L||L===0){K+=L}K+="</em>\n ";return K}function f(K,J){return"Describe or add notes to history"}function x(N,M){var K="",L,J;K+='\n <div id="message-container">\n <div class="';J=y.status;if(J){L=J.call(N,{hash:{}})}else{L=N.status;L=typeof L===e?L():L}K+=d(L)+'message">\n ';J=y.message;if(J){L=J.call(N,{hash:{}})}else{L=N.message;L=typeof L===e?L():L}K+=d(L)+"\n </div><br />\n </div>\n ";return K}function w(K,J){return"You are over your disk quota.\n Tool execution is on hold until your disk usage drops below your allocated quota."}function u(K,J){return"Your history is empty. Click 'Get Data' on the left pane to start"}z+='<div id="history-controls">\n <div id="history-title-area" class="historyLinks">\n\n ';z+='\n <div id="history-name-container" style="float: left;">\n ';z+="\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.program(4,r,I),fn:v.program(1,t,I)});if(n||n===0){z+=n}z+='\n </div>\n\n <a id="history-action-popup" class="tooltip" title="';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(7,o,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(7,o,I)})}if(n||n===0){z+=n}z+='"\n href="javascript:void(0);" style="float: right;">\n <span class="ficon cogs large"></span>\n </a>\n <div style="clear: both;"></div>\n </div>\n\n <div id="history-subtitle-area">\n <div id="history-size" style="float:left;">';m=y.nice_size;if(m){n=m.call(A,{hash:{}})}else{n=A.nice_size;n=typeof n===e?n():n}z+=d(n)+"</div>\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(9,j,I)});if(n||n===0){z+=n}z+='\n <div style="clear: both;"></div>\n </div>\n\n ';n=A.deleted;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(14,F,I)});if(n||n===0){z+=n}z+="\n\n ";z+="\n ";z+="\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(18,C,I)});if(n||n===0){z+=n}z+="\n\n ";n=A.message;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(30,x,I)});if(n||n===0){z+=n}z+='\n\n <div id="quota-message-container" style="display: none">\n <div id="quota-message" class="errormessage">\n ';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(32,w,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(32,w,I)})}if(n||n===0){z+=n}z+='\n </div>\n </div>\n</div>\n\n<div id="';m=y.id;if(m){n=m.call(A,{hash:{}})}else{n=A.id;n=typeof n===e?n():n}z+=d(n)+'-datasets" class="history-datasets-list"></div>\n\n<div class="infomessagesmall" id="emptyHistoryMessage" style="display: none;">\n ';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(34,u,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(34,u,I)})}if(n||n===0){z+=n}z+="\n</div>";return z})})();
\ No newline at end of file
+(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a["template-history-historyPanel"]=b(function(j,C,A,r,J){A=A||j.helpers;var B="",n,m,x=this,f="function",c=A.blockHelperMissing,e=this.escapeExpression;function u(O,N){var L="",M,K;L+='\n <div id="history-name" class="tooltip editable-text"\n title="';K=A.local;if(K){M=K.call(O,{hash:{},inverse:x.noop,fn:x.program(2,t,N)})}else{M=O.local;M=typeof M===f?M():M}if(!A.local){M=c.call(O,M,{hash:{},inverse:x.noop,fn:x.program(2,t,N)})}if(M||M===0){L+=M}L+='">';K=A.name;if(K){M=K.call(O,{hash:{}})}else{M=O.name;M=typeof M===f?M():M}L+=e(M)+"</div>\n ";return L}function t(L,K){return"Click to rename history"}function s(O,N){var L="",M,K;L+='\n <div id="history-name" class="tooltip"\n title="';K=A.local;if(K){M=K.call(O,{hash:{},inverse:x.noop,fn:x.program(5,q,N)})}else{M=O.local;M=typeof M===f?M():M}if(!A.local){M=c.call(O,M,{hash:{},inverse:x.noop,fn:x.program(5,q,N)})}if(M||M===0){L+=M}L+='">';K=A.name;if(K){M=K.call(O,{hash:{}})}else{M=O.name;M=typeof M===f?M():M}L+=e(M)+"</div>\n ";return L}function q(L,K){return"You must be logged in to edit your history name"}function p(O,N){var L="",M,K;L+='\n <a id="history-tag" title="';K=A.local;if(K){M=K.call(O,{hash:{},inverse:x.noop,fn:x.program(8,l,N)})}else{M=O.local;M=typeof M===f?M():M}if(!A.local){M=c.call(O,M,{hash:{},inverse:x.noop,fn:x.program(8,l,N)})}if(M||M===0){L+=M}L+='"\n class="icon-button tags tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n <a id="history-annotate" title="';K=A.local;if(K){M=K.call(O,{hash:{},inverse:x.noop,fn:x.program(10,I,N)})}else{M=O.local;M=typeof M===f?M():M}if(!A.local){M=c.call(O,M,{hash:{},inverse:x.noop,fn:x.program(10,I,N)})}if(M||M===0){L+=M}L+='"\n class="icon-button annotate tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n ';return L}function l(L,K){return"Edit history tags"}function I(L,K){return"Edit history annotation"}function H(L,K){return"Refresh this display"}function G(L,K){return"Click to see more actions for this history"}function F(O,N){var L="",M,K;L+="\n ";K=A.warningmessagesmall;if(K){M=K.call(O,{hash:{},inverse:x.noop,fn:x.program(17,E,N)})}else{M=O.warningmessagesmall;M=typeof M===f?M():M}if(!A.warningmessagesmall){M=c.call(O,M,{hash:{},inverse:x.noop,fn:x.program(17,E,N)})}if(M||M===0){L+=M}L+="\n ";return L}function E(N,M){var L,K;K=A.local;if(K){L=K.call(N,{hash:{},inverse:x.noop,fn:x.program(18,D,M)})}else{L=N.local;L=typeof L===f?L():L}if(!A.local){L=c.call(N,L,{hash:{},inverse:x.noop,fn:x.program(18,D,M)})}if(L||L===0){return L}else{return""}}function D(L,K){return"You are currently viewing a deleted history!"}function o(O,N){var L="",M,K;L+='\n <div id="history-tag-annotation">\n\n <div id="history-tag-area" style="display: none">\n <strong>';K=A.local;if(K){M=K.call(O,{hash:{},inverse:x.noop,fn:x.program(21,k,N)})}else{M=O.local;M=typeof M===f?M():M}if(!A.local){M=c.call(O,M,{hash:{},inverse:x.noop,fn:x.program(21,k,N)})}if(M||M===0){L+=M}L+=':</strong>\n <div class="tag-elt"></div>\n </div>\n\n <div id="history-annotation-area" style="display: none">\n <strong>';K=A.local;if(K){M=K.call(O,{hash:{},inverse:x.noop,fn:x.program(23,i,N)})}else{M=O.local;M=typeof M===f?M():M}if(!A.local){M=c.call(O,M,{hash:{},inverse:x.noop,fn:x.program(23,i,N)})}if(M||M===0){L+=M}L+=':</strong>\n <div id="history-annotation-container">\n <div id="history-annotation" class="tooltip editable-text"\n title="';K=A.local;if(K){M=K.call(O,{hash:{},inverse:x.noop,fn:x.program(25,h,N)})}else{M=O.local;M=typeof M===f?M():M}if(!A.local){M=c.call(O,M,{hash:{},inverse:x.noop,fn:x.program(25,h,N)})}if(M||M===0){L+=M}L+='">\n ';M=O.annotation;M=A["if"].call(O,M,{hash:{},inverse:x.program(29,d,N),fn:x.program(27,g,N)});if(M||M===0){L+=M}L+="\n </div>\n </div>\n </div>\n </div>\n ";return L}function k(L,K){return"Tags"}function i(L,K){return"Annotation"}function h(L,K){return"Click to edit annotation"}function g(O,N){var L="",M,K;L+="\n ";K=A.annotation;if(K){M=K.call(O,{hash:{}})}else{M=O.annotation;M=typeof M===f?M():M}L+=e(M)+"\n ";return L}function d(O,N){var L="",M,K;L+="\n <em>";K=A.local;if(K){M=K.call(O,{hash:{},inverse:x.noop,fn:x.program(30,z,N)})}else{M=O.local;M=typeof M===f?M():M}if(!A.local){M=c.call(O,M,{hash:{},inverse:x.noop,fn:x.program(30,z,N)})}if(M||M===0){L+=M}L+="</em>\n ";return L}function z(L,K){return"Describe or add notes to history"}function y(O,N){var L="",M,K;L+='\n <div id="message-container">\n <div class="';K=A.status;if(K){M=K.call(O,{hash:{}})}else{M=O.status;M=typeof M===f?M():M}L+=e(M)+'message">\n ';K=A.message;if(K){M=K.call(O,{hash:{}})}else{M=O.message;M=typeof M===f?M():M}L+=e(M)+"\n </div><br />\n </div>\n ";return L}function w(L,K){return"You are over your disk quota.\n Tool execution is on hold until your disk usage drops below your allocated quota."}function v(L,K){return"Your history is empty. Click 'Get Data' on the left pane to start"}B+='<div id="history-controls">\n <div id="history-title-area" class="historyLinks">\n\n ';B+='\n <div id="history-name-container">\n ';B+="\n ";n=C.user;n=n==null||n===false?n:n.email;n=A["if"].call(C,n,{hash:{},inverse:x.program(4,s,J),fn:x.program(1,u,J)});if(n||n===0){B+=n}B+='\n </div>\n </div>\n\n <div id="history-subtitle-area">\n <div id="history-size" style="float:left;">';m=A.nice_size;if(m){n=m.call(C,{hash:{}})}else{n=C.nice_size;n=typeof n===f?n():n}B+=e(n)+'</div>\n\n <div id="history-secondary-links" style="float: right;">\n ';n=C.user;n=n==null||n===false?n:n.email;n=A["if"].call(C,n,{hash:{},inverse:x.noop,fn:x.program(7,p,J)});if(n||n===0){B+=n}B+='\n <a id="history-refresh" class="tooltip" title="';m=A.local;if(m){n=m.call(C,{hash:{},inverse:x.noop,fn:x.program(12,H,J)})}else{n=C.local;n=typeof n===f?n():n}if(!A.local){n=c.call(C,n,{hash:{},inverse:x.noop,fn:x.program(12,H,J)})}if(n||n===0){B+=n}B+='"\n href="javascript:void(0);">\n <span class="ficon refresh large"></span>\n </a>\n <a id="history-action-popup" class="tooltip" title="';m=A.local;if(m){n=m.call(C,{hash:{},inverse:x.noop,fn:x.program(14,G,J)})}else{n=C.local;n=typeof n===f?n():n}if(!A.local){n=c.call(C,n,{hash:{},inverse:x.noop,fn:x.program(14,G,J)})}if(n||n===0){B+=n}B+='"\n href="javascript:void(0);">\n <span class="ficon cogs large"></span>\n </a>\n </div>\n <div style="clear: both;"></div>\n </div>\n\n ';n=C.deleted;n=A["if"].call(C,n,{hash:{},inverse:x.noop,fn:x.program(16,F,J)});if(n||n===0){B+=n}B+="\n\n ";B+="\n ";B+="\n ";n=C.user;n=n==null||n===false?n:n.email;n=A["if"].call(C,n,{hash:{},inverse:x.noop,fn:x.program(20,o,J)});if(n||n===0){B+=n}B+="\n\n ";n=C.message;n=A["if"].call(C,n,{hash:{},inverse:x.noop,fn:x.program(32,y,J)});if(n||n===0){B+=n}B+='\n\n <div id="quota-message-container" style="display: none">\n <div id="quota-message" class="errormessage">\n ';m=A.local;if(m){n=m.call(C,{hash:{},inverse:x.noop,fn:x.program(34,w,J)})}else{n=C.local;n=typeof n===f?n():n}if(!A.local){n=c.call(C,n,{hash:{},inverse:x.noop,fn:x.program(34,w,J)})}if(n||n===0){B+=n}B+='\n </div>\n </div>\n</div>\n\n<div id="';m=A.id;if(m){n=m.call(C,{hash:{}})}else{n=C.id;n=typeof n===f?n():n}B+=e(n)+'-datasets" class="history-datasets-list"></div>\n\n<div class="infomessagesmall" id="emptyHistoryMessage" style="display: none;">\n ';m=A.local;if(m){n=m.call(C,{hash:{},inverse:x.noop,fn:x.program(36,v,J)})}else{n=C.local;n=typeof n===f?n():n}if(!A.local){n=c.call(C,n,{hash:{},inverse:x.noop,fn:x.program(36,v,J)})}if(n||n===0){B+=n}B+="\n</div>";return B})})();
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: tool_runner.py, rerun: allow encoded ids; history panel: move refresh out of popup, rearrange icons.
by Bitbucket 15 Nov '12
by Bitbucket 15 Nov '12
15 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8e52e426487b/
changeset: 8e52e426487b
user: carlfeberhard
date: 2012-11-15 19:08:57
summary: tool_runner.py, rerun: allow encoded ids; history panel: move refresh out of popup, rearrange icons.
affected #: 8 files
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r 8e52e426487bf732d051e538ac4cd7d352aa5065 lib/galaxy/webapps/galaxy/controllers/tool_runner.py
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -117,8 +117,14 @@
error( "'id' parameter is required" );
try:
id = int( id )
+
except:
- error( "Invalid value for 'id' parameter" )
+ # it's not an un-encoded id, try to parse as encoded
+ try:
+ id = trans.security.decode_id( id )
+ except:
+ error( "Invalid value for 'id' parameter" )
+
# Get the dataset object
data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
#only allow rerunning if user is allowed access to the dataset.
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r 8e52e426487bf732d051e538ac4cd7d352aa5065 static/scripts/mvc/dataset/hda-base.js
--- a/static/scripts/mvc/dataset/hda-base.js
+++ b/static/scripts/mvc/dataset/hda-base.js
@@ -523,7 +523,7 @@
titleLink : Handlebars.templates[ 'template-hda-titleLink' ],
hdaSummary : Handlebars.templates[ 'template-hda-hdaSummary' ],
downloadLinks : Handlebars.templates[ 'template-hda-downloadLinks' ],
- failedMetadata : Handlebars.templates[ 'template-hda-failedMetaData' ],
+ failedMetadata : Handlebars.templates[ 'template-hda-failedMetadata' ],
displayApps : Handlebars.templates[ 'template-hda-displayApps' ]
};
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r 8e52e426487bf732d051e538ac4cd7d352aa5065 static/scripts/mvc/history/history-panel.js
--- a/static/scripts/mvc/history/history-panel.js
+++ b/static/scripts/mvc/history/history-panel.js
@@ -127,6 +127,7 @@
/** event map
*/
events : {
+ 'click #history-refresh' : function(){ window.location.reload(); },
'click #history-tag' : 'loadAndDisplayTags'
},
@@ -313,7 +314,7 @@
show_deletedText = ( this.storage.get( 'show_deleted' ) )?( 'Hide deleted' ):( 'Show deleted' ),
show_hiddenText = ( this.storage.get( 'show_hidden' ) )?( 'Hide hidden' ):( 'Show hidden' ),
menuActions = {};
- menuActions[ _l( 'refresh' ) ] = function(){ window.location.reload(); };
+ //menuActions[ _l( 'refresh' ) ] = function(){ window.location.reload(); };
menuActions[ _l( 'collapse all' ) ] = function(){ historyPanel.hideAllHdaBodies(); };
menuActions[ _l( show_deletedText ) ] = function(){ historyPanel.toggleShowDeleted(); };
menuActions[ _l( show_hiddenText ) ] = function(){ historyPanel.toggleShowHidden(); };
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r 8e52e426487bf732d051e538ac4cd7d352aa5065 static/scripts/templates/compiled/template-hda-failedMetaData.js
--- a/static/scripts/templates/compiled/template-hda-failedMetaData.js
+++ /dev/null
@@ -1,33 +0,0 @@
-(function() {
- var template = Handlebars.template, templates = Handlebars.templates = Handlebars.templates || {};
-templates['template-hda-failedMetaData'] = template(function (Handlebars,depth0,helpers,partials,data) {
- helpers = helpers || Handlebars.helpers;
- var stack1, foundHelper, self=this, functionType="function", blockHelperMissing=helpers.blockHelperMissing, escapeExpression=this.escapeExpression;
-
-function program1(depth0,data) {
-
- var buffer = "", stack1, foundHelper;
- buffer += "\n";
- foundHelper = helpers.local;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(2, program2, data)}); }
- else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(2, program2, data)}); }
- if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += "\nYou may be able to <a href=\"";
- stack1 = depth0.urls;
- stack1 = stack1 == null || stack1 === false ? stack1 : stack1.edit;
- stack1 = typeof stack1 === functionType ? stack1() : stack1;
- buffer += escapeExpression(stack1) + "\" target=\"galaxy_main\">set it manually or retry auto-detection</a>.\n";
- return buffer;}
-function program2(depth0,data) {
-
-
- return "An error occurred setting the metadata for this dataset.";}
-
- foundHelper = helpers.warningmessagesmall;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(1, program1, data)}); }
- else { stack1 = depth0.warningmessagesmall; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- if (!helpers.warningmessagesmall) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(1, program1, data)}); }
- if(stack1 || stack1 === 0) { return stack1; }
- else { return ''; }});
-})();
\ No newline at end of file
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r 8e52e426487bf732d051e538ac4cd7d352aa5065 static/scripts/templates/compiled/template-history-historyPanel.js
--- a/static/scripts/templates/compiled/template-history-historyPanel.js
+++ b/static/scripts/templates/compiled/template-history-historyPanel.js
@@ -46,104 +46,109 @@
function program7(depth0,data) {
-
- return "Click to see more actions for this history";}
-
-function program9(depth0,data) {
-
var buffer = "", stack1, foundHelper;
- buffer += "\n <div id=\"history-secondary-links\" style=\"float: right;\">\n <a id=\"history-tag\" title=\"";
+ buffer += "\n <a id=\"history-tag\" title=\"";
+ foundHelper = helpers.local;
+ if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(8, program8, data)}); }
+ else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
+ if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(8, program8, data)}); }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\"\n class=\"icon-button tags tooltip\" target=\"galaxy_main\" href=\"javascript:void(0)\"></a>\n <a id=\"history-annotate\" title=\"";
foundHelper = helpers.local;
if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(10, program10, data)}); }
else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(10, program10, data)}); }
if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += "\"\n class=\"icon-button tags tooltip\" target=\"galaxy_main\" href=\"javascript:void(0)\"></a>\n <a id=\"history-annotate\" title=\"";
- foundHelper = helpers.local;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(12, program12, data)}); }
- else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(12, program12, data)}); }
- if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += "\"\n class=\"icon-button annotate tooltip\" target=\"galaxy_main\" href=\"javascript:void(0)\"></a>\n </div>\n ";
+ buffer += "\"\n class=\"icon-button annotate tooltip\" target=\"galaxy_main\" href=\"javascript:void(0)\"></a>\n ";
return buffer;}
-function program10(depth0,data) {
+function program8(depth0,data) {
return "Edit history tags";}
-function program12(depth0,data) {
+function program10(depth0,data) {
return "Edit history annotation";}
+function program12(depth0,data) {
+
+
+ return "Refresh this display";}
+
function program14(depth0,data) {
+
+ return "Click to see more actions for this history";}
+
+function program16(depth0,data) {
+
var buffer = "", stack1, foundHelper;
buffer += "\n ";
foundHelper = helpers.warningmessagesmall;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(15, program15, data)}); }
+ if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(17, program17, data)}); }
else { stack1 = depth0.warningmessagesmall; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- if (!helpers.warningmessagesmall) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(15, program15, data)}); }
+ if (!helpers.warningmessagesmall) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(17, program17, data)}); }
if(stack1 || stack1 === 0) { buffer += stack1; }
buffer += "\n ";
return buffer;}
-function program15(depth0,data) {
+function program17(depth0,data) {
var stack1, foundHelper;
foundHelper = helpers.local;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(16, program16, data)}); }
+ if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(18, program18, data)}); }
else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(16, program16, data)}); }
+ if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(18, program18, data)}); }
if(stack1 || stack1 === 0) { return stack1; }
else { return ''; }}
-function program16(depth0,data) {
+function program18(depth0,data) {
return "You are currently viewing a deleted history!";}
-function program18(depth0,data) {
+function program20(depth0,data) {
var buffer = "", stack1, foundHelper;
buffer += "\n <div id=\"history-tag-annotation\">\n\n <div id=\"history-tag-area\" style=\"display: none\">\n <strong>";
foundHelper = helpers.local;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(19, program19, data)}); }
- else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(19, program19, data)}); }
- if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += ":</strong>\n <div class=\"tag-elt\"></div>\n </div>\n\n <div id=\"history-annotation-area\" style=\"display: none\">\n <strong>";
- foundHelper = helpers.local;
if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(21, program21, data)}); }
else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(21, program21, data)}); }
if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += ":</strong>\n <div id=\"history-annotation-container\">\n <div id=\"history-annotation\" class=\"tooltip editable-text\"\n title=\"";
+ buffer += ":</strong>\n <div class=\"tag-elt\"></div>\n </div>\n\n <div id=\"history-annotation-area\" style=\"display: none\">\n <strong>";
foundHelper = helpers.local;
if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(23, program23, data)}); }
else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(23, program23, data)}); }
if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += ":</strong>\n <div id=\"history-annotation-container\">\n <div id=\"history-annotation\" class=\"tooltip editable-text\"\n title=\"";
+ foundHelper = helpers.local;
+ if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(25, program25, data)}); }
+ else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
+ if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(25, program25, data)}); }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
buffer += "\">\n ";
stack1 = depth0.annotation;
- stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.program(27, program27, data),fn:self.program(25, program25, data)});
+ stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.program(29, program29, data),fn:self.program(27, program27, data)});
if(stack1 || stack1 === 0) { buffer += stack1; }
buffer += "\n </div>\n </div>\n </div>\n </div>\n ";
return buffer;}
-function program19(depth0,data) {
+function program21(depth0,data) {
return "Tags";}
-function program21(depth0,data) {
+function program23(depth0,data) {
return "Annotation";}
-function program23(depth0,data) {
+function program25(depth0,data) {
return "Click to edit annotation";}
-function program25(depth0,data) {
+function program27(depth0,data) {
var buffer = "", stack1, foundHelper;
buffer += "\n ";
@@ -153,23 +158,23 @@
buffer += escapeExpression(stack1) + "\n ";
return buffer;}
-function program27(depth0,data) {
+function program29(depth0,data) {
var buffer = "", stack1, foundHelper;
buffer += "\n <em>";
foundHelper = helpers.local;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(28, program28, data)}); }
+ if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(30, program30, data)}); }
else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(28, program28, data)}); }
+ if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(30, program30, data)}); }
if(stack1 || stack1 === 0) { buffer += stack1; }
buffer += "</em>\n ";
return buffer;}
-function program28(depth0,data) {
+function program30(depth0,data) {
return "Describe or add notes to history";}
-function program30(depth0,data) {
+function program32(depth0,data) {
var buffer = "", stack1, foundHelper;
buffer += "\n <div id=\"message-container\">\n <div class=\"";
@@ -183,58 +188,64 @@
buffer += escapeExpression(stack1) + "\n </div><br />\n </div>\n ";
return buffer;}
-function program32(depth0,data) {
+function program34(depth0,data) {
return "You are over your disk quota.\n Tool execution is on hold until your disk usage drops below your allocated quota.";}
-function program34(depth0,data) {
+function program36(depth0,data) {
return "Your history is empty. Click 'Get Data' on the left pane to start";}
buffer += "<div id=\"history-controls\">\n <div id=\"history-title-area\" class=\"historyLinks\">\n\n ";
- buffer += "\n <div id=\"history-name-container\" style=\"float: left;\">\n ";
+ buffer += "\n <div id=\"history-name-container\">\n ";
buffer += "\n ";
stack1 = depth0.user;
stack1 = stack1 == null || stack1 === false ? stack1 : stack1.email;
stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.program(4, program4, data),fn:self.program(1, program1, data)});
if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += "\n </div>\n\n <a id=\"history-action-popup\" class=\"tooltip\" title=\"";
- foundHelper = helpers.local;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(7, program7, data)}); }
- else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(7, program7, data)}); }
- if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += "\"\n href=\"javascript:void(0);\" style=\"float: right;\">\n <span class=\"ficon cogs large\"></span>\n </a>\n <div style=\"clear: both;\"></div>\n </div>\n\n <div id=\"history-subtitle-area\">\n <div id=\"history-size\" style=\"float:left;\">";
+ buffer += "\n </div>\n </div>\n\n <div id=\"history-subtitle-area\">\n <div id=\"history-size\" style=\"float:left;\">";
foundHelper = helpers.nice_size;
if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{}}); }
else { stack1 = depth0.nice_size; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- buffer += escapeExpression(stack1) + "</div>\n ";
+ buffer += escapeExpression(stack1) + "</div>\n\n <div id=\"history-secondary-links\" style=\"float: right;\">\n ";
stack1 = depth0.user;
stack1 = stack1 == null || stack1 === false ? stack1 : stack1.email;
- stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(9, program9, data)});
+ stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(7, program7, data)});
if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += "\n <div style=\"clear: both;\"></div>\n </div>\n\n ";
+ buffer += "\n <a id=\"history-refresh\" class=\"tooltip\" title=\"";
+ foundHelper = helpers.local;
+ if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(12, program12, data)}); }
+ else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
+ if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(12, program12, data)}); }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\"\n href=\"javascript:void(0);\">\n <span class=\"ficon refresh large\"></span>\n </a>\n <a id=\"history-action-popup\" class=\"tooltip\" title=\"";
+ foundHelper = helpers.local;
+ if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(14, program14, data)}); }
+ else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
+ if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(14, program14, data)}); }
+ if(stack1 || stack1 === 0) { buffer += stack1; }
+ buffer += "\"\n href=\"javascript:void(0);\">\n <span class=\"ficon cogs large\"></span>\n </a>\n </div>\n <div style=\"clear: both;\"></div>\n </div>\n\n ";
stack1 = depth0.deleted;
- stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(14, program14, data)});
+ stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(16, program16, data)});
if(stack1 || stack1 === 0) { buffer += stack1; }
buffer += "\n\n ";
buffer += "\n ";
buffer += "\n ";
stack1 = depth0.user;
stack1 = stack1 == null || stack1 === false ? stack1 : stack1.email;
- stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(18, program18, data)});
+ stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(20, program20, data)});
if(stack1 || stack1 === 0) { buffer += stack1; }
buffer += "\n\n ";
stack1 = depth0.message;
- stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(30, program30, data)});
+ stack1 = helpers['if'].call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(32, program32, data)});
if(stack1 || stack1 === 0) { buffer += stack1; }
buffer += "\n\n <div id=\"quota-message-container\" style=\"display: none\">\n <div id=\"quota-message\" class=\"errormessage\">\n ";
foundHelper = helpers.local;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(32, program32, data)}); }
+ if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(34, program34, data)}); }
else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(32, program32, data)}); }
+ if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(34, program34, data)}); }
if(stack1 || stack1 === 0) { buffer += stack1; }
buffer += "\n </div>\n </div>\n</div>\n\n<div id=\"";
foundHelper = helpers.id;
@@ -242,9 +253,9 @@
else { stack1 = depth0.id; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
buffer += escapeExpression(stack1) + "-datasets\" class=\"history-datasets-list\"></div>\n\n<div class=\"infomessagesmall\" id=\"emptyHistoryMessage\" style=\"display: none;\">\n ";
foundHelper = helpers.local;
- if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(34, program34, data)}); }
+ if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{},inverse:self.noop,fn:self.program(36, program36, data)}); }
else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
- if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(34, program34, data)}); }
+ if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(36, program36, data)}); }
if(stack1 || stack1 === 0) { buffer += stack1; }
buffer += "\n</div>";
return buffer;});
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r 8e52e426487bf732d051e538ac4cd7d352aa5065 static/scripts/templates/hda-templates.html
--- a/static/scripts/templates/hda-templates.html
+++ b/static/scripts/templates/hda-templates.html
@@ -54,7 +54,7 @@
<!-- ---------------------------------------------------------------------- FAILED META WARNING -->
-<script type="text/template" class="template-hda" id="template-hda-failedMetaData">
+<script type="text/template" class="template-hda" id="template-hda-failedMetadata">
{{#warningmessagesmall}}
{{#local}}An error occurred setting the metadata for this dataset.{{/local}}
You may be able to <a href="{{ urls.edit }}" target="galaxy_main">set it manually or retry auto-detection</a>.
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r 8e52e426487bf732d051e538ac4cd7d352aa5065 static/scripts/templates/history-templates.html
--- a/static/scripts/templates/history-templates.html
+++ b/static/scripts/templates/history-templates.html
@@ -6,7 +6,7 @@
<div id="history-title-area" class="historyLinks">
{{! history name (if any) }}
- <div id="history-name-container" style="float: left;">
+ <div id="history-name-container">
{{! TODO: factor out conditional css }}
{{#if user.email}}
<div id="history-name" class="tooltip editable-text"
@@ -16,24 +16,27 @@
title="{{#local}}You must be logged in to edit your history name{{/local}}">{{name}}</div>
{{/if}}
</div>
-
- <a id="history-action-popup" class="tooltip" title="{{#local}}Click to see more actions for this history{{/local}}"
- href="javascript:void(0);" style="float: right;">
- <span class="ficon cogs large"></span>
- </a>
- <div style="clear: both;"></div></div><div id="history-subtitle-area"><div id="history-size" style="float:left;">{{nice_size}}</div>
- {{#if user.email}}
+
<div id="history-secondary-links" style="float: right;">
+ {{#if user.email}}
<a id="history-tag" title="{{#local}}Edit history tags{{/local}}"
class="icon-button tags tooltip" target="galaxy_main" href="javascript:void(0)"></a><a id="history-annotate" title="{{#local}}Edit history annotation{{/local}}"
class="icon-button annotate tooltip" target="galaxy_main" href="javascript:void(0)"></a>
+ {{/if}}
+ <a id="history-refresh" class="tooltip" title="{{#local}}Refresh this display{{/local}}"
+ href="javascript:void(0);">
+ <span class="ficon refresh large"></span>
+ </a>
+ <a id="history-action-popup" class="tooltip" title="{{#local}}Click to see more actions for this history{{/local}}"
+ href="javascript:void(0);">
+ <span class="ficon cogs large"></span>
+ </a></div>
- {{/if}}
<div style="clear: both;"></div></div>
diff -r 0a1db986221074e72131afd6d16320357843c5f8 -r 8e52e426487bf732d051e538ac4cd7d352aa5065 templates/root/alternate_history.mako
--- a/templates/root/alternate_history.mako
+++ b/templates/root/alternate_history.mako
@@ -440,6 +440,15 @@
#history-secondary-links {
}
+ /*why this is getting underlined is beyond me*/
+ #history-secondary-links #history-refresh {
+ text-decoration: none;
+ }
+ /*too tweaky*/
+ #history-annotate {
+ margin-right: 3px;
+ }
+
#history-tag-area, #history-annotation-area {
margin: 10px 0px 10px 0px;
}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f3c2243d9f6c/
changeset: f3c2243d9f6c
user: carlfeberhard
date: 2012-11-15 18:03:38
summary: Enabling client side history panel
affected #: 4 files
diff -r b8c331135aedd1754ae170f311c33e2da315189c -r f3c2243d9f6c215f1b2d2ed01409d24b6e317075 lib/galaxy/webapps/galaxy/controllers/root.py
--- a/lib/galaxy/webapps/galaxy/controllers/root.py
+++ b/lib/galaxy/webapps/galaxy/controllers/root.py
@@ -122,18 +122,17 @@
show_hidden = util.string_as_bool_or_none( show_hidden )
datasets = []
- history_panel_template = "root/history.mako"
+ history_panel_template = "root/alternate_history.mako"
- # history panel -> backbone (WIP - uncomment next to use)
- #USE_ALTERNATE = True
- if 'USE_ALTERNATE' in locals():
+ # keeping this switch here for a while - uncomment the next line to use the original mako history panel
+ #USE_ORIGINAL = True
+ if 'USE_ORIGINAL' in locals():
+ datasets = self.get_history_datasets( trans, history, show_deleted, show_hidden, show_purged )
+ history_panel_template = "root/history.mako"
+
+ else:
datasets = self.get_history_datasets( trans, history,
show_deleted=True, show_hidden=True, show_purged=True )
- #datasets = self.get_history_datasets( trans, history, show_deleted, show_hidden, show_purged )
- history_panel_template = "root/alternate_history.mako"
-
- else:
- datasets = self.get_history_datasets( trans, history, show_deleted, show_hidden, show_purged )
return trans.stream_template_mako( history_panel_template,
history = history,
diff -r b8c331135aedd1754ae170f311c33e2da315189c -r f3c2243d9f6c215f1b2d2ed01409d24b6e317075 static/scripts/templates/compiled/template-history-historyPanel.js
--- a/static/scripts/templates/compiled/template-history-historyPanel.js
+++ b/static/scripts/templates/compiled/template-history-historyPanel.js
@@ -47,7 +47,7 @@
function program7(depth0,data) {
- return "Click to see more actions";}
+ return "Click to see more actions for this history";}
function program9(depth0,data) {
diff -r b8c331135aedd1754ae170f311c33e2da315189c -r f3c2243d9f6c215f1b2d2ed01409d24b6e317075 static/scripts/templates/history-templates.html
--- a/static/scripts/templates/history-templates.html
+++ b/static/scripts/templates/history-templates.html
@@ -17,7 +17,7 @@
{{/if}}
</div>
- <a id="history-action-popup" class="tooltip" title="{{#local}}Click to see more actions{{/local}}"
+ <a id="history-action-popup" class="tooltip" title="{{#local}}Click to see more actions for this history{{/local}}"
href="javascript:void(0);" style="float: right;"><span class="ficon cogs large"></span></a>
diff -r b8c331135aedd1754ae170f311c33e2da315189c -r f3c2243d9f6c215f1b2d2ed01409d24b6e317075 templates/root/alternate_history.mako
--- a/templates/root/alternate_history.mako
+++ b/templates/root/alternate_history.mako
@@ -284,7 +284,6 @@
// global backbone models
top.Galaxy.currUser = top.Galaxy.currUser;
top.Galaxy.currHistoryPanel = top.Galaxy.currHistoryPanel;
- top.Galaxy.historyPanels = top.Galaxy.historyPanels || [];
top.Galaxy.paths = galaxy_paths;
@@ -327,6 +326,7 @@
// i don't like this history+user relationship, but user authentication changes views/behaviour
history.user = user;
+ // create the history panel
var historyPanel = new HistoryPanel({
model : new History( history, hdas ),
urlTemplates : galaxy_paths.attributes,
@@ -380,7 +380,6 @@
if( !Galaxy.currHistoryPanel ){ Galaxy.currHistoryPanel = historyPanel; }
- if( !( historyPanel in Galaxy.historyPanels ) ){ Galaxy.historyPanels.unshift( historyPanel ); }
return;
});
https://bitbucket.org/galaxy/galaxy-central/changeset/0a1db9862210/
changeset: 0a1db9862210
user: carlfeberhard
date: 2012-11-15 18:04:17
summary: pack scripts
affected #: 1 file
diff -r f3c2243d9f6c215f1b2d2ed01409d24b6e317075 -r 0a1db986221074e72131afd6d16320357843c5f8 static/scripts/packed/templates/compiled/template-history-historyPanel.js
--- a/static/scripts/packed/templates/compiled/template-history-historyPanel.js
+++ b/static/scripts/packed/templates/compiled/template-history-historyPanel.js
@@ -1,1 +1,1 @@
-(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a["template-history-historyPanel"]=b(function(k,A,y,q,I){y=y||k.helpers;var z="",n,m,v=this,e="function",c=y.blockHelperMissing,d=this.escapeExpression;function t(N,M){var K="",L,J;K+='\n <div id="history-name" class="tooltip editable-text"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(2,s,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(2,s,M)})}if(L||L===0){K+=L}K+='">';J=y.name;if(J){L=J.call(N,{hash:{}})}else{L=N.name;L=typeof L===e?L():L}K+=d(L)+"</div>\n ";return K}function s(K,J){return"Click to rename history"}function r(N,M){var K="",L,J;K+='\n <div id="history-name" class="tooltip"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(5,p,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(5,p,M)})}if(L||L===0){K+=L}K+='">';J=y.name;if(J){L=J.call(N,{hash:{}})}else{L=N.name;L=typeof L===e?L():L}K+=d(L)+"</div>\n ";return K}function p(K,J){return"You must be logged in to edit your history name"}function o(K,J){return"Click to see more actions"}function j(N,M){var K="",L,J;K+='\n <div id="history-secondary-links" style="float: right;">\n <a id="history-tag" title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(10,H,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(10,H,M)})}if(L||L===0){K+=L}K+='"\n class="icon-button tags tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n <a id="history-annotate" title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(12,G,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(12,G,M)})}if(L||L===0){K+=L}K+='"\n class="icon-button annotate tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n </div>\n ';return K}function H(K,J){return"Edit history tags"}function G(K,J){return"Edit history annotation"}function F(N,M){var K="",L,J;K+="\n ";J=y.warningmessagesmall;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(15,E,M)})}else{L=N.warningmessagesmall;L=typeof L===e?L():L}if(!y.warningmessagesmall){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(15,E,M)})}if(L||L===0){K+=L}K+="\n ";return K}function E(M,L){var K,J;J=y.local;if(J){K=J.call(M,{hash:{},inverse:v.noop,fn:v.program(16,D,L)})}else{K=M.local;K=typeof K===e?K():K}if(!y.local){K=c.call(M,K,{hash:{},inverse:v.noop,fn:v.program(16,D,L)})}if(K||K===0){return K}else{return""}}function D(K,J){return"You are currently viewing a deleted history!"}function C(N,M){var K="",L,J;K+='\n <div id="history-tag-annotation">\n\n <div id="history-tag-area" style="display: none">\n <strong>';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(19,B,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(19,B,M)})}if(L||L===0){K+=L}K+=':</strong>\n <div class="tag-elt"></div>\n </div>\n\n <div id="history-annotation-area" style="display: none">\n <strong>';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(21,l,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(21,l,M)})}if(L||L===0){K+=L}K+=':</strong>\n <div id="history-annotation-container">\n <div id="history-annotation" class="tooltip editable-text"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(23,i,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(23,i,M)})}if(L||L===0){K+=L}K+='">\n ';L=N.annotation;L=y["if"].call(N,L,{hash:{},inverse:v.program(27,g,M),fn:v.program(25,h,M)});if(L||L===0){K+=L}K+="\n </div>\n </div>\n </div>\n </div>\n ";return K}function B(K,J){return"Tags"}function l(K,J){return"Annotation"}function i(K,J){return"Click to edit annotation"}function h(N,M){var K="",L,J;K+="\n ";J=y.annotation;if(J){L=J.call(N,{hash:{}})}else{L=N.annotation;L=typeof L===e?L():L}K+=d(L)+"\n ";return K}function g(N,M){var K="",L,J;K+="\n <em>";J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(28,f,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(28,f,M)})}if(L||L===0){K+=L}K+="</em>\n ";return K}function f(K,J){return"Describe or add notes to history"}function x(N,M){var K="",L,J;K+='\n <div id="message-container">\n <div class="';J=y.status;if(J){L=J.call(N,{hash:{}})}else{L=N.status;L=typeof L===e?L():L}K+=d(L)+'message">\n ';J=y.message;if(J){L=J.call(N,{hash:{}})}else{L=N.message;L=typeof L===e?L():L}K+=d(L)+"\n </div><br />\n </div>\n ";return K}function w(K,J){return"You are over your disk quota.\n Tool execution is on hold until your disk usage drops below your allocated quota."}function u(K,J){return"Your history is empty. Click 'Get Data' on the left pane to start"}z+='<div id="history-controls">\n <div id="history-title-area" class="historyLinks">\n\n ';z+='\n <div id="history-name-container" style="float: left;">\n ';z+="\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.program(4,r,I),fn:v.program(1,t,I)});if(n||n===0){z+=n}z+='\n </div>\n\n <a id="history-action-popup" class="tooltip" title="';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(7,o,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(7,o,I)})}if(n||n===0){z+=n}z+='"\n href="javascript:void(0);" style="float: right;">\n <span class="ficon cogs large"></span>\n </a>\n <div style="clear: both;"></div>\n </div>\n\n <div id="history-subtitle-area">\n <div id="history-size" style="float:left;">';m=y.nice_size;if(m){n=m.call(A,{hash:{}})}else{n=A.nice_size;n=typeof n===e?n():n}z+=d(n)+"</div>\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(9,j,I)});if(n||n===0){z+=n}z+='\n <div style="clear: both;"></div>\n </div>\n\n ';n=A.deleted;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(14,F,I)});if(n||n===0){z+=n}z+="\n\n ";z+="\n ";z+="\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(18,C,I)});if(n||n===0){z+=n}z+="\n\n ";n=A.message;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(30,x,I)});if(n||n===0){z+=n}z+='\n\n <div id="quota-message-container" style="display: none">\n <div id="quota-message" class="errormessage">\n ';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(32,w,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(32,w,I)})}if(n||n===0){z+=n}z+='\n </div>\n </div>\n</div>\n\n<div id="';m=y.id;if(m){n=m.call(A,{hash:{}})}else{n=A.id;n=typeof n===e?n():n}z+=d(n)+'-datasets" class="history-datasets-list"></div>\n\n<div class="infomessagesmall" id="emptyHistoryMessage" style="display: none;">\n ';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(34,u,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(34,u,I)})}if(n||n===0){z+=n}z+="\n</div>";return z})})();
\ No newline at end of file
+(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a["template-history-historyPanel"]=b(function(k,A,y,q,I){y=y||k.helpers;var z="",n,m,v=this,e="function",c=y.blockHelperMissing,d=this.escapeExpression;function t(N,M){var K="",L,J;K+='\n <div id="history-name" class="tooltip editable-text"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(2,s,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(2,s,M)})}if(L||L===0){K+=L}K+='">';J=y.name;if(J){L=J.call(N,{hash:{}})}else{L=N.name;L=typeof L===e?L():L}K+=d(L)+"</div>\n ";return K}function s(K,J){return"Click to rename history"}function r(N,M){var K="",L,J;K+='\n <div id="history-name" class="tooltip"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(5,p,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(5,p,M)})}if(L||L===0){K+=L}K+='">';J=y.name;if(J){L=J.call(N,{hash:{}})}else{L=N.name;L=typeof L===e?L():L}K+=d(L)+"</div>\n ";return K}function p(K,J){return"You must be logged in to edit your history name"}function o(K,J){return"Click to see more actions for this history"}function j(N,M){var K="",L,J;K+='\n <div id="history-secondary-links" style="float: right;">\n <a id="history-tag" title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(10,H,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(10,H,M)})}if(L||L===0){K+=L}K+='"\n class="icon-button tags tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n <a id="history-annotate" title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(12,G,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(12,G,M)})}if(L||L===0){K+=L}K+='"\n class="icon-button annotate tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n </div>\n ';return K}function H(K,J){return"Edit history tags"}function G(K,J){return"Edit history annotation"}function F(N,M){var K="",L,J;K+="\n ";J=y.warningmessagesmall;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(15,E,M)})}else{L=N.warningmessagesmall;L=typeof L===e?L():L}if(!y.warningmessagesmall){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(15,E,M)})}if(L||L===0){K+=L}K+="\n ";return K}function E(M,L){var K,J;J=y.local;if(J){K=J.call(M,{hash:{},inverse:v.noop,fn:v.program(16,D,L)})}else{K=M.local;K=typeof K===e?K():K}if(!y.local){K=c.call(M,K,{hash:{},inverse:v.noop,fn:v.program(16,D,L)})}if(K||K===0){return K}else{return""}}function D(K,J){return"You are currently viewing a deleted history!"}function C(N,M){var K="",L,J;K+='\n <div id="history-tag-annotation">\n\n <div id="history-tag-area" style="display: none">\n <strong>';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(19,B,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(19,B,M)})}if(L||L===0){K+=L}K+=':</strong>\n <div class="tag-elt"></div>\n </div>\n\n <div id="history-annotation-area" style="display: none">\n <strong>';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(21,l,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(21,l,M)})}if(L||L===0){K+=L}K+=':</strong>\n <div id="history-annotation-container">\n <div id="history-annotation" class="tooltip editable-text"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(23,i,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(23,i,M)})}if(L||L===0){K+=L}K+='">\n ';L=N.annotation;L=y["if"].call(N,L,{hash:{},inverse:v.program(27,g,M),fn:v.program(25,h,M)});if(L||L===0){K+=L}K+="\n </div>\n </div>\n </div>\n </div>\n ";return K}function B(K,J){return"Tags"}function l(K,J){return"Annotation"}function i(K,J){return"Click to edit annotation"}function h(N,M){var K="",L,J;K+="\n ";J=y.annotation;if(J){L=J.call(N,{hash:{}})}else{L=N.annotation;L=typeof L===e?L():L}K+=d(L)+"\n ";return K}function g(N,M){var K="",L,J;K+="\n <em>";J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(28,f,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(28,f,M)})}if(L||L===0){K+=L}K+="</em>\n ";return K}function f(K,J){return"Describe or add notes to history"}function x(N,M){var K="",L,J;K+='\n <div id="message-container">\n <div class="';J=y.status;if(J){L=J.call(N,{hash:{}})}else{L=N.status;L=typeof L===e?L():L}K+=d(L)+'message">\n ';J=y.message;if(J){L=J.call(N,{hash:{}})}else{L=N.message;L=typeof L===e?L():L}K+=d(L)+"\n </div><br />\n </div>\n ";return K}function w(K,J){return"You are over your disk quota.\n Tool execution is on hold until your disk usage drops below your allocated quota."}function u(K,J){return"Your history is empty. Click 'Get Data' on the left pane to start"}z+='<div id="history-controls">\n <div id="history-title-area" class="historyLinks">\n\n ';z+='\n <div id="history-name-container" style="float: left;">\n ';z+="\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.program(4,r,I),fn:v.program(1,t,I)});if(n||n===0){z+=n}z+='\n </div>\n\n <a id="history-action-popup" class="tooltip" title="';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(7,o,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(7,o,I)})}if(n||n===0){z+=n}z+='"\n href="javascript:void(0);" style="float: right;">\n <span class="ficon cogs large"></span>\n </a>\n <div style="clear: both;"></div>\n </div>\n\n <div id="history-subtitle-area">\n <div id="history-size" style="float:left;">';m=y.nice_size;if(m){n=m.call(A,{hash:{}})}else{n=A.nice_size;n=typeof n===e?n():n}z+=d(n)+"</div>\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(9,j,I)});if(n||n===0){z+=n}z+='\n <div style="clear: both;"></div>\n </div>\n\n ';n=A.deleted;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(14,F,I)});if(n||n===0){z+=n}z+="\n\n ";z+="\n ";z+="\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(18,C,I)});if(n||n===0){z+=n}z+="\n\n ";n=A.message;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(30,x,I)});if(n||n===0){z+=n}z+='\n\n <div id="quota-message-container" style="display: none">\n <div id="quota-message" class="errormessage">\n ';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(32,w,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(32,w,I)})}if(n||n===0){z+=n}z+='\n </div>\n </div>\n</div>\n\n<div id="';m=y.id;if(m){n=m.call(A,{hash:{}})}else{n=A.id;n=typeof n===e?n():n}z+=d(n)+'-datasets" class="history-datasets-list"></div>\n\n<div class="infomessagesmall" id="emptyHistoryMessage" style="display: none;">\n ';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(34,u,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(34,u,I)})}if(n||n===0){z+=n}z+="\n</div>";return z})})();
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Fix the task runner I broke in 73e05bc. Thanks John Chilton.
by Bitbucket 15 Nov '12
by Bitbucket 15 Nov '12
15 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b8c331135aed/
changeset: b8c331135aed
user: natefoo
date: 2012-11-15 17:48:00
summary: Fix the task runner I broke in 73e05bc. Thanks John Chilton.
affected #: 1 file
diff -r 4e50648f6e5d015d253cb3c9e68dd731729b06f7 -r b8c331135aedd1754ae170f311c33e2da315189c lib/galaxy/jobs/mapper.py
--- a/lib/galaxy/jobs/mapper.py
+++ b/lib/galaxy/jobs/mapper.py
@@ -116,7 +116,7 @@
def __cache_job_runner_url( self, params ):
# If there's already a runner set in the Job object, don't overwrite from the tool
- if self.job_runner_name is not None:
+ if self.job_runner_name is not None and not self.job_runner_name.startswith('tasks'):
raw_job_runner_url = self.job_runner_name
else:
raw_job_runner_url = self.job_wrapper.tool.get_job_runner_url( params )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/2d1402caf21a/
changeset: 2d1402caf21a
user: carlfeberhard
date: 2012-11-15 17:25:18
summary: static/scripts/mvc: jsDocs on all. I've used jsdoc-toolkit via node with success on these; the Rhino version should work also.
affected #: 11 files
diff -r 94627de6252824193ff0ff659807d17e59e4f94f -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e static/scripts/mvc/base-mvc.js
--- a/static/scripts/mvc/base-mvc.js
+++ b/static/scripts/mvc/base-mvc.js
@@ -43,14 +43,19 @@
//==============================================================================
-/**
- * Adds logging capabilities to your Models/Views
- * can be used with plain browser console (or something more complex like an AJAX logger)
+/** @class Mixin to add logging capabilities to an object.
+ * Designed to allow switching an objects log output off/on at one central
+ * statement. Can be used with plain browser console (or something more
+ * complex like an AJAX logger).
+ * <br />NOTE: currently only uses the console.debug log function
+ * (as opposed to debug, error, warn, etc.)
+ * @name LoggableMixin
*
- * add to your models/views at the definition using chaining:
+ * @example
+ * // Add to your models/views at the definition using chaining:
* var MyModel = BaseModel.extend( LoggableMixin ).extend({ // ... });
*
- * or - more explicitly AFTER the definition:
+ * // or - more explicitly AFTER the definition:
* var MyModel = BaseModel.extend({
* logger : console
* // ...
@@ -58,12 +63,19 @@
* })
* _.extend( MyModel.prototype, LoggableMixin )
*
- * NOTE: currently only uses the console.debug log function (as opposed to debug, error, warn, etc.)
*/
-var LoggableMixin = {
+var LoggableMixin = /** @lends LoggableMixin# */{
+
+ /** The logging object whose log function will be used to output
+ * messages. Null will supress all logging. Commonly set to console.
+ */
// replace null with console (if available) to see all logs
logger : null,
+ /** Output log messages/arguments to logger.
+ * @param {Arguments} ... (this function is variadic)
+ * @returns undefined if not this.logger
+ */
log : function(){
if( this.logger ){
return this.logger.log.apply( this.logger, arguments );
@@ -74,21 +86,29 @@
// =============================================================================
-/** Global string localization object (and global short form alias)
- * set with either:
- * GalaxyLocalization.setLocalizedString( original, localized )
- * GalaxyLocalization.setLocalizedString({ original1 : localized1, original2 : localized2 })
- * get with either:
- * GalaxyLocalization.localize( string )
- * _l( string )
+/** @class string localizer (and global short form alias)
+ *
+ * @example
+ * // set with either:
+ * GalaxyLocalization.setLocalizedString( original, localized )
+ * GalaxyLocalization.setLocalizedString({ original1 : localized1, original2 : localized2 })
+ * // get with either:
+ * GalaxyLocalization.localize( string )
+ * _l( string )
+ *
+ * @constructs
*/
//TODO: move to Galaxy.Localization (maybe galaxy.base.js)
var GalaxyLocalization = jQuery.extend( {}, {
+ /** shortened, alias reference to GalaxyLocalization.localize */
ALIAS_NAME : '_l',
+ /** map of available localized strings (english -> localized) */
localizedStrings : {},
- // Set a single English string -> localized string association, or set an entire map of those associations
- // Pass in either two strings (english, localized) or just an obj (map) of english : localized
+ /** Set a single English string -> localized string association, or set an entire map of those associations
+ * @param {String or Object} str_or_obj english (key) string or a map of english -> localized strings
+ * @param {String} localized string if str_or_obj was a string
+ */
setLocalizedString : function( str_or_obj, localizedString ){
//console.debug( this + '.setLocalizedString:', str_or_obj, localizedString );
var self = this;
@@ -117,7 +137,10 @@
}
},
- // Attempt to get a localized string for strToLocalize. If not found, return the original strToLocalize
+ /** Attempt to get a localized string for strToLocalize. If not found, return the original strToLocalize.
+ * @param {String} strToLocalize the string to localize
+ * @returns either the localized string if found or strToLocalize if not found
+ */
localize : function( strToLocalize ){
//console.debug( this + '.localize:', strToLocalize );
@@ -132,6 +155,7 @@
return this.localizedStrings[ strToLocalize ] || strToLocalize;
},
+ /** String representation. */
toString : function(){ return 'GalaxyLocalization'; }
});
@@ -146,16 +170,17 @@
//==============================================================================
/**
- * @class PersistantStorage
- * persistant storage adapter to:
- * provide an easy interface to object based storage using method chaining
- * allow easy change of the storage engine used (h5's local storage?)
+ * @class persistant storage adapter.
+ * Provides an easy interface to object based storage using method chaining.
+ * Allows easy change of the storage engine used (h5's local storage?).
+ * @augments StorageRecursionHelper
*
* @param {String} storageKey : the key the storage engine will place the storage object under
* @param {Object} storageDefaults : [optional] initial object to set up storage with
*
- * @example :
- * HistoryPanel.storage = new PersistanStorage( HistoryPanel.toString(), { visibleItems, {} })
+ * @example
+ * // example of construction and use
+ * HistoryPanel.storage = new PersistanStorage( HistoryPanel.toString(), { visibleItems, {} });
* itemView.bind( 'toggleBodyVisibility', function( id, visible ){
* if( visible ){
* HistoryPanel.storage.get( 'visibleItems' ).set( id, true );
@@ -163,6 +188,7 @@
* HistoryPanel.storage.get( 'visibleItems' ).deleteKey( id );
* }
* });
+ * @constructor
*/
var PersistantStorage = function( storageKey, storageDefaults ){
if( !storageKey ){
@@ -176,16 +202,21 @@
STORAGE_ENGINE_SETTER = jQuery.jStorage.set,
STORAGE_ENGINE_KEY_DELETER = jQuery.jStorage.deleteKey;
- // recursion helper for method chaining access
- var StorageRecursionHelper = function( data, parent ){
+ /** Inner, recursive, private class for method chaining access.
+ * @name StorageRecursionHelper
+ * @constructor
+ */
+ function StorageRecursionHelper( data, parent ){
//console.debug( 'new StorageRecursionHelper. data:', data );
data = data || {};
parent = parent || null;
- return {
- // get a value from the storage obj named 'key',
- // if it's an object - return a new StorageRecursionHelper wrapped around it
- // if it's something simpler - return the value
- // if this isn't passed a key - return the data at this level of recursion
+
+ return /** @lends StorageRecursionHelper.prototype */{
+ /** get a value from the storage obj named 'key',
+ * if it's an object - return a new StorageRecursionHelper wrapped around it
+ * if it's something simpler - return the value
+ * if this isn't passed a key - return the data at this level of recursion
+ */
get : function( key ){
//console.debug( this + '.get', key );
if( key === undefined ){
@@ -197,32 +228,33 @@
}
return undefined;
},
+ /** get the underlying data based on this key */
// set a value on the current data - then pass up to top to save current entire object in storage
set : function( key, value ){
//TODO: add parameterless variation setting the data somehow
// ??: difficult bc of obj by ref, closure
//console.debug( this + '.set', key, value );
data[ key ] = value;
- this.save();
+ this._save();
return this;
},
// remove a key at this level - then save entire (as 'set' above)
deleteKey : function( key ){
//console.debug( this + '.deleteKey', key );
delete data[ key ];
- this.save();
+ this._save();
return this;
},
// pass up the recursion chain (see below for base case)
- save : function(){
+ _save : function(){
//console.debug( this + '.save', parent );
- return parent.save();
+ return parent._save();
},
toString : function(){
return ( 'StorageRecursionHelper(' + data + ')' );
}
};
- };
+ }
//??: more readable to make another class?
var returnedStorage = {};
@@ -238,17 +270,26 @@
// the object returned by this constructor will be a modified StorageRecursionHelper
returnedStorage = new StorageRecursionHelper( data );
- // the base case for save()'s upward recursion - save everything to storage
- returnedStorage.save = function( newData ){
- //console.debug( returnedStorage, '.save:', JSON.stringify( returnedStorage.get() ) );
- STORAGE_ENGINE_SETTER( storageKey, returnedStorage.get() );
- };
- // delete function to remove the base data object from the storageEngine
- returnedStorage.destroy = function(){
- //console.debug( returnedStorage, '.destroy:' );
- STORAGE_ENGINE_KEY_DELETER( storageKey );
- };
- returnedStorage.toString = function(){ return 'PersistantStorage(' + data + ')'; };
+
+ jQuery.extend( returnedStorage, /** @lends PersistantStorage.prototype */{
+ /** The base case for save()'s upward recursion - save everything to storage.
+ * @private
+ * @param {Any} newData data object to save to storage
+ */
+ _save : function( newData ){
+ //console.debug( returnedStorage, '._save:', JSON.stringify( returnedStorage.get() ) );
+ return STORAGE_ENGINE_SETTER( storageKey, returnedStorage.get() );
+ },
+ /** Delete function to remove the entire base data object from the storageEngine.
+ */
+ destroy : function(){
+ //console.debug( returnedStorage, '.destroy:' );
+ return STORAGE_ENGINE_KEY_DELETER( storageKey );
+ },
+ /** String representation.
+ */
+ toString : function(){ return 'PersistantStorage(' + data + ')'; }
+ });
return returnedStorage;
};
diff -r 94627de6252824193ff0ff659807d17e59e4f94f -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e static/scripts/mvc/dataset/hda-base.js
--- a/static/scripts/mvc/dataset/hda-base.js
+++ b/static/scripts/mvc/dataset/hda-base.js
@@ -2,33 +2,47 @@
// "../mvc/base-mvc"
//], function(){
//==============================================================================
-/** read only view for HistoryDatasetAssociations
- *
+/** @class Read only view for HistoryDatasetAssociation.
+ * @name HDABaseView
+ *
+ * @augments BaseView
+ * @borrows LoggableMixin#logger as #logger
+ * @borrows LoggableMixin#log as #log
+ * @constructs
*/
-var HDABaseView = BaseView.extend( LoggableMixin ).extend({
- //??TODO: add alias in initialize this.hda = this.model?
- // view for HistoryDatasetAssociation model above
+var HDABaseView = BaseView.extend( LoggableMixin ).extend(
+/** @lends HDABaseView.prototype */{
- // uncomment this out see log messages
+ ///** logger used to record this.log messages, commonly set to console */
+ //// comment this out to suppress log output
//logger : console,
tagName : "div",
className : "historyItemContainer",
- // ................................................................................ SET UP
+ // ......................................................................... SET UP
+ /** Set up the view, cache url templates, bind listeners
+ * @param {Object} attributes
+ * @config {Object} urlTemplates nested object containing url templates for this view
+ * @throws 'needs urlTemplates' if urlTemplates isn't present
+ * @see Backbone.View#initialize
+ */
initialize : function( attributes ){
this.log( this + '.initialize:', attributes );
- // which buttons go in most states (ok/failed meta are more complicated)
+ /** list of rendering functions for the default, primary icon-buttons. */
this.defaultPrimaryActionButtonRenderers = [
this._render_showParamsButton
];
// render urlTemplates (gen. provided by GalaxyPaths) to urls
+ //TODO:?? render urls here or in render()?
if( !attributes.urlTemplates ){ throw( 'HDAView needs urlTemplates on initialize' ); }
- this.urls = this.renderUrls( attributes.urlTemplates, this.model.toJSON() );
+ /** web controller urls for functions relating to this hda. These
+ * are rendered from urlTemplates using the model data. */
+ this.urls = this._renderUrls( attributes.urlTemplates, this.model.toJSON() );
- // whether the body of this hda is expanded (shown)
+ /** is the body of this hda view expanded/not. */
this.expanded = attributes.expanded || false;
// re-render the entire view on any model change
@@ -38,22 +52,25 @@
//}, this );
},
- // urlTemplates is a map (or nested map) of underscore templates (currently, anyhoo)
- // use the templates to create the apropo urls for each action this ds could use
- renderUrls : function( urlTemplates, modelJson ){
+ /** render the urls for this hda using the model data and the url templates from initialize.
+ * @param {Object} urlTemplates a map (or nested map) of underscore templates (currently, anyhoo)
+ * @param {Object} modelJson data from the model
+ * @returns {Object} the templated urls
+ */
+ _renderUrls : function( urlTemplates, modelJson ){
var hdaView = this,
urls = {};
_.each( urlTemplates, function( urlTemplateOrObj, urlKey ){
// object == nested templates: recurse
if( _.isObject( urlTemplateOrObj ) ){
- urls[ urlKey ] = hdaView.renderUrls( urlTemplateOrObj, modelJson );
+ urls[ urlKey ] = hdaView._renderUrls( urlTemplateOrObj, modelJson );
// string == template:
} else {
// meta_down load is a special case (see renderMetaDownloadUrls)
//TODO: should be a better (gen.) way to handle this case
if( urlKey === 'meta_download' ){
- urls[ urlKey ] = hdaView.renderMetaDownloadUrls( urlTemplateOrObj, modelJson );
+ urls[ urlKey ] = hdaView._renderMetaDownloadUrls( urlTemplateOrObj, modelJson );
} else {
urls[ urlKey ] = _.template( urlTemplateOrObj, modelJson );
}
@@ -62,8 +79,13 @@
return urls;
},
- // there can be more than one meta_file to download, so return a list of url and file_type for each
- renderMetaDownloadUrls : function( urlTemplate, modelJson ){
+ /** there can be more than one meta_file (e.g. bam index) to download,
+ * so return a list of url and file_type for each
+ * @param {Object} urlTemplate underscore templates for meta download urls
+ * @param {Object} modelJson data from the model
+ * @returns {Object} url and filetype for each meta file
+ */
+ _renderMetaDownloadUrls : function( urlTemplate, modelJson ){
return _.map( modelJson.meta_files, function( meta_file ){
return {
url : _.template( urlTemplate, { id: modelJson.id, file_type: meta_file.file_type }),
@@ -72,8 +94,14 @@
});
},
- // ................................................................................ RENDER MAIN
- // events: rendered, rendered:ready, rendered:initial, rendered:ready:initial
+ // ......................................................................... RENDER MAIN
+ /** Render this HDA, set up ui.
+ * @fires rendered:ready when rendered and NO running HDAs
+ * @fires rendered when rendered and running HDAs
+ * @fires rendered:initial on first render with running HDAs
+ * @fires rendered:initial:ready when first rendered and NO running HDAs
+ * @returns {Object} this HDABaseView
+ */
render : function(){
var view = this,
id = this.model.get( 'id' ),
@@ -118,15 +146,19 @@
return this;
},
- // ................................................................................ RENDER WARNINGS
- // hda warnings including: is deleted, is purged, is hidden (including links to further actions (undelete, etc.))
+ // ................................................................................ RENDER titlebar
+ /** Render any hda warnings including: is deleted, is purged, is hidden.
+ * (including links to further actions (undelete, etc.))
+ * @returns {jQuery} rendered DOM
+ */
_render_warnings : function(){
// jQ errs on building dom with whitespace - if there are no messages, trim -> ''
return $( jQuery.trim( HDABaseView.templates.messages( this.model.toJSON() )));
},
- // ................................................................................ RENDER TITLEBAR
- // the part of an hda always shown (whether the body is expanded or not): title link, title buttons
+ /** Render the part of an hda always shown (whether the body is expanded or not): title link, title buttons.
+ * @returns {jQuery} rendered DOM
+ */
_render_titleBar : function(){
var titleBar = $( '<div class="historyItemTitleBar" style="overflow: hidden"></div>' );
titleBar.append( this._render_titleButtons() );
@@ -135,9 +167,9 @@
return titleBar;
},
- // ................................................................................ display, edit attr, delete
- // icon-button group for the common, most easily accessed actions
- //NOTE: these are generally displayed for almost every hda state (tho poss. disabled)
+ /** Render icon-button group for the common, most easily accessed actions.
+ * @returns {jQuery} rendered DOM
+ */
_render_titleButtons : function(){
// render the display, edit attr and delete icon-buttons
var buttonDiv = $( '<div class="historyItemButtons"></div>' );
@@ -145,7 +177,9 @@
return buttonDiv;
},
- // icon-button to display this hda in the galaxy main iframe
+ /** Render icon-button to display this hda in the galaxy main iframe.
+ * @returns {jQuery} rendered DOM
+ */
_render_displayButton : function(){
// don't show display if not in ready state, error'd, or not accessible
if( ( !this.model.inReadyState() )
@@ -175,23 +209,30 @@
return this.displayButton.render().$el;
},
- // ................................................................................ titleLink
- // render the hid and hda.name as a link (that will expand the body)
+ /** Render the hid and hda.name as a link (that will expand the body).
+ * @returns {jQuery} rendered DOM
+ */
_render_titleLink : function(){
return $( jQuery.trim( HDABaseView.templates.titleLink(
+ //TODO?? does this need urls?
_.extend( this.model.toJSON(), { urls: this.urls } )
)));
},
- // ................................................................................ RENDER BODY
- // render the data/metadata summary (format, size, misc info, etc.)
+ // ......................................................................... RENDER BODY
+ /** Render the data/metadata summary (format, size, misc info, etc.).
+ * @returns {jQuery} rendered DOM
+ */
_render_hdaSummary : function(){
var modelData = _.extend( this.model.toJSON(), { urls: this.urls } );
return HDABaseView.templates.hdaSummary( modelData );
},
- // ................................................................................ primary actions
- // render the icon-buttons gen. placed underneath the hda summary
+ // ......................................................................... primary actions
+ /** Render the icon-buttons gen. placed underneath the hda summary (e.g. download, show params, etc.)
+ * @param {Array} buttonRenderingFuncs array of rendering functions appending the results in order
+ * @returns {jQuery} rendered DOM
+ */
_render_primaryActionButtons : function( buttonRenderingFuncs ){
var view = this,
primaryActionButtons = $( '<div/>' ).attr( 'id', 'primary-actions-' + this.model.get( 'id' ) );
@@ -201,7 +242,9 @@
return primaryActionButtons;
},
- // icon-button/popupmenu to down the data (and/or the associated meta files (bai, etc.)) for this hda
+ /** Render icon-button/popupmenu to download the data (and/or the associated meta files (bai, etc.)) for this hda.
+ * @returns {jQuery} rendered DOM
+ */
_render_downloadButton : function(){
// don't show anything if the data's been purged
if( this.model.get( 'purged' ) || !this.model.hasData() ){ return null; }
@@ -215,7 +258,9 @@
return $( downloadLinkHTML );
},
- // icon-button to show the input and output (stdout/err) for the job that created this hda
+ /** Render icon-button to show the input and output (stdout/err) for the job that created this hda.
+ * @returns {jQuery} rendered DOM
+ */
_render_showParamsButton : function(){
// gen. safe to show in all cases
this.showParamsButton = new IconButtonView({ model : new IconButton({
@@ -227,8 +272,10 @@
return this.showParamsButton.render().$el;
},
- // ................................................................................ other elements
- // render links to external genome display applications (igb, gbrowse, etc.)
+ // ......................................................................... other elements
+ /** Render links to external genome display applications (igb, gbrowse, etc.).
+ * @returns {jQuery} rendered DOM
+ */
//TODO: not a fan of the style on these
_render_displayApps : function(){
if( !this.model.hasData() ){ return null; }
@@ -250,7 +297,9 @@
return displayAppsDiv;
},
- // render the data peek
+ /** Render the data peek.
+ * @returns {jQuery} rendered DOM
+ */
//TODO: curr. pre-formatted into table on the server side - may not be ideal/flexible
_render_peek : function(){
if( !this.model.get( 'peek' ) ){ return null; }
@@ -262,28 +311,99 @@
);
},
- // ................................................................................ state body renderers
- // _render_body fns for the various states
+ // ......................................................................... state body renderers
+ /** Render the (expanded) body of an HDA, dispatching to other functions based on the HDA state
+ * @returns {jQuery} rendered DOM
+ */
//TODO: only render these on expansion (or already expanded)
+ _render_body : function(){
+ //this.log( this + '_render_body' );
+
+ var body = $( '<div/>' )
+ .attr( 'id', 'info-' + this.model.get( 'id' ) )
+ .addClass( 'historyItemBody' )
+ .attr( 'style', 'display: block' );
+
+ //TODO: not a fan of this dispatch
+ switch( this.model.get( 'state' ) ){
+ case HistoryDatasetAssociation.STATES.NOT_VIEWABLE :
+ this._render_body_not_viewable( body );
+ break;
+ case HistoryDatasetAssociation.STATES.UPLOAD :
+ this._render_body_uploading( body );
+ break;
+ case HistoryDatasetAssociation.STATES.QUEUED :
+ this._render_body_queued( body );
+ break;
+ case HistoryDatasetAssociation.STATES.RUNNING :
+ this._render_body_running( body );
+ break;
+ case HistoryDatasetAssociation.STATES.ERROR :
+ this._render_body_error( body );
+ break;
+ case HistoryDatasetAssociation.STATES.DISCARDED :
+ this._render_body_discarded( body );
+ break;
+ case HistoryDatasetAssociation.STATES.SETTING_METADATA :
+ this._render_body_setting_metadata( body );
+ break;
+ case HistoryDatasetAssociation.STATES.EMPTY :
+ this._render_body_empty( body );
+ break;
+ case HistoryDatasetAssociation.STATES.FAILED_METADATA :
+ this._render_body_failed_metadata( body );
+ break;
+ case HistoryDatasetAssociation.STATES.OK :
+ this._render_body_ok( body );
+ break;
+ default:
+ //??: no body?
+ body.append( $( '<div>Error: unknown dataset state "' + state + '".</div>' ) );
+ }
+ body.append( '<div style="clear: both"></div>' );
+
+ if( this.expanded ){
+ body.show();
+ } else {
+ body.hide();
+ }
+ return body;
+ },
+
+ /** Render inaccessible, not-owned by curr user.
+ * @param {jQuery} parent DOM to which to append this body
+ */
_render_body_not_viewable : function( parent ){
//TODO: revisit - still showing display, edit, delete (as common) - that CAN'T be right
parent.append( $( '<div>' + _l( 'You do not have permission to view dataset' ) + '.</div>' ) );
},
+ /** Render an HDA still being uploaded.
+ * @param {jQuery} parent DOM to which to append this body
+ */
_render_body_uploading : function( parent ){
parent.append( $( '<div>' + _l( 'Dataset is uploading' ) + '</div>' ) );
},
+ /** Render an HDA whose job is queued.
+ * @param {jQuery} parent DOM to which to append this body
+ */
_render_body_queued : function( parent ){
parent.append( $( '<div>' + _l( 'Job is waiting to run' ) + '.</div>' ) );
parent.append( this._render_primaryActionButtons( this.defaultPrimaryActionButtonRenderers ));
},
+ /** Render an HDA whose job is running.
+ * @param {jQuery} parent DOM to which to append this body
+ */
_render_body_running : function( parent ){
parent.append( '<div>' + _l( 'Job is currently running' ) + '.</div>' );
parent.append( this._render_primaryActionButtons( this.defaultPrimaryActionButtonRenderers ));
},
+ /** Render an HDA whose job has failed.
+ * @param {jQuery} parent DOM to which to append this body
+ */
_render_body_error : function( parent ){
if( !this.model.get( 'purged' ) ){
parent.append( $( '<div>' + this.model.get( 'misc_blurb' ) + '</div>' ) );
@@ -295,15 +415,24 @@
));
},
+ /** Render an HDA which was deleted during upload.
+ * @param {jQuery} parent DOM to which to append this body
+ */
_render_body_discarded : function( parent ){
parent.append( '<div>' + _l( 'The job creating this dataset was cancelled before completion' ) + '.</div>' );
parent.append( this._render_primaryActionButtons( this.defaultPrimaryActionButtonRenderers ));
},
+ /** Render an HDA where the metadata is still being determined.
+ * @param {jQuery} parent DOM to which to append this body
+ */
_render_body_setting_metadata : function( parent ){
parent.append( $( '<div>' + _l( 'Metadata is being auto-detected' ) + '.</div>' ) );
},
+ /** Render an empty/no data HDA.
+ * @param {jQuery} parent DOM to which to append this body
+ */
_render_body_empty : function( parent ){
//TODO: replace i with dataset-misc-info class
//?? why are we showing the file size when we know it's zero??
@@ -311,6 +440,9 @@
parent.append( this._render_primaryActionButtons( this.defaultPrimaryActionButtonRenderers ));
},
+ /** Render an HDA where the metadata wasn't produced correctly.
+ * @param {jQuery} parent DOM to which to append this body
+ */
_render_body_failed_metadata : function( parent ){
//TODO: the css for this box is broken (unlike the others)
// add a message box about the failure at the top of the body...
@@ -319,6 +451,9 @@
this._render_body_ok( parent );
},
+ /** Render an HDA that's done running and where everything worked.
+ * @param {jQuery} parent DOM to which to append this body
+ */
_render_body_ok : function( parent ){
// most common state renderer and the most complicated
parent.append( this._render_hdaSummary() );
@@ -344,67 +479,18 @@
parent.append( this._render_peek() );
},
- _render_body : function(){
- //this.log( this + '_render_body' );
-
- var body = $( '<div/>' )
- .attr( 'id', 'info-' + this.model.get( 'id' ) )
- .addClass( 'historyItemBody' )
- .attr( 'style', 'display: block' );
-
- //TODO: not a fan of this dispatch
- switch( this.model.get( 'state' ) ){
- case HistoryDatasetAssociation.STATES.NOT_VIEWABLE :
- this._render_body_not_viewable( body );
- break;
- case HistoryDatasetAssociation.STATES.UPLOAD :
- this._render_body_uploading( body );
- break;
- case HistoryDatasetAssociation.STATES.QUEUED :
- this._render_body_queued( body );
- break;
- case HistoryDatasetAssociation.STATES.RUNNING :
- this._render_body_running( body );
- break;
- case HistoryDatasetAssociation.STATES.ERROR :
- this._render_body_error( body );
- break;
- case HistoryDatasetAssociation.STATES.DISCARDED :
- this._render_body_discarded( body );
- break;
- case HistoryDatasetAssociation.STATES.SETTING_METADATA :
- this._render_body_setting_metadata( body );
- break;
- case HistoryDatasetAssociation.STATES.EMPTY :
- this._render_body_empty( body );
- break;
- case HistoryDatasetAssociation.STATES.FAILED_METADATA :
- this._render_body_failed_metadata( body );
- break;
- case HistoryDatasetAssociation.STATES.OK :
- this._render_body_ok( body );
- break;
- default:
- //??: no body?
- body.append( $( '<div>Error: unknown dataset state "' + state + '".</div>' ) );
- }
- body.append( '<div style="clear: both"></div>' );
-
- if( this.expanded ){
- body.show();
- } else {
- body.hide();
- }
- return body;
- },
-
- // ................................................................................ EVENTS
+ // ......................................................................... EVENTS
+ /** event map */
events : {
'click .historyItemTitle' : 'toggleBodyVisibility'
},
- // expand/collapse body
- // event: body-visible, body-hidden
+ /** Render an HDA that's done running and where everything worked.
+ * @param {Event} event the event that triggered this (@link HDABaseView#events)
+ * @param {Boolean} expanded if true, expand; if false, collapse
+ * @fires body-expanded when a body has been expanded
+ * @fires body-collapsed when a body has been collapsed
+ */
toggleBodyVisibility : function( event, expanded ){
var hdaView = this,
$body = this.$el.find( '.historyItemBody' );
@@ -413,23 +499,23 @@
if( expanded ){
$body.slideDown( 'fast', function(){
- hdaView.trigger( 'body-visible', hdaView.model.get( 'id' ) );
+ hdaView.trigger( 'body-expanded', hdaView.model.get( 'id' ) );
});
} else {
$body.slideUp( 'fast', function(){
- hdaView.trigger( 'body-hidden', hdaView.model.get( 'id' ) );
+ hdaView.trigger( 'body-collapsed', hdaView.model.get( 'id' ) );
});
}
},
- // ................................................................................ UTILTIY
+ // ......................................................................... MISC
toString : function(){
var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
return 'HDABaseView(' + modelString + ')';
}
});
-//------------------------------------------------------------------------------
+//------------------------------------------------------------------------------ TEMPLATES
HDABaseView.templates = {
warningMsg : Handlebars.templates[ 'template-warningmessagesmall' ],
diff -r 94627de6252824193ff0ff659807d17e59e4f94f -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e static/scripts/mvc/dataset/hda-edit.js
--- a/static/scripts/mvc/dataset/hda-edit.js
+++ b/static/scripts/mvc/dataset/hda-edit.js
@@ -2,25 +2,42 @@
// "../mvc/base-mvc"
//], function(){
//==============================================================================
-/** editing view for HistoryDatasetAssociations
+/** @class Editing view for HistoryDatasetAssociation.
+ * @name HDAEditView
*
+ * @augments HDABaseView
+ * @borrows LoggableMixin#logger as #logger
+ * @borrows LoggableMixin#log as #log
+ * @constructs
*/
-var HDAEditView = HDABaseView.extend({
+var HDAEditView = HDABaseView.extend( LoggableMixin ).extend(
+/** @lends HDAEditView.prototype */{
- // ................................................................................ SET UP
+ // ......................................................................... SET UP
+ /** Set up the view, cache url templates, bind listeners.
+ * Overrides HDABaseView.initialize to change default actions (adding re-run).
+ * @param {Object} attributes
+ * @config {Object} urlTemplates nested object containing url templates for this view
+ * @throws 'needs urlTemplates' if urlTemplates isn't present
+ * @see HDABaseView#initialize
+ */
initialize : function( attributes ){
HDABaseView.prototype.initialize.call( this, attributes );
- // which buttons go in most states (ok/failed meta are more complicated)
- // HDAEdit gets the rerun button on almost all states
+ /** list of rendering functions for the default, primary icon-buttons. */
this.defaultPrimaryActionButtonRenderers = [
this._render_showParamsButton,
+ // HDAEdit gets the rerun button on almost all states
this._render_rerunButton
];
},
- // ................................................................................ RENDER WARNINGS
- // hda warnings including: is deleted, is purged, is hidden (including links to further actions (undelete, etc.))
+ // ......................................................................... RENDER WARNINGS
+ /** Render any hda warnings including: is deleted, is purged, is hidden.
+ * Overrides _render_warnings to include links to further actions (undelete, etc.)).
+ * @returns {Object} the templated urls
+ * @see HDABaseView#_render_warnings
+ */
_render_warnings : function(){
// jQ errs on building dom with whitespace - if there are no messages, trim -> ''
return $( jQuery.trim( HDABaseView.templates.messages(
@@ -28,9 +45,12 @@
)));
},
- // ................................................................................ display, edit attr, delete
- // icon-button group for the common, most easily accessed actions
- //NOTE: these are generally displayed for almost every hda state (tho poss. disabled)
+ // ......................................................................... edit attr, delete
+ /** Render icon-button group for the common, most easily accessed actions.
+ * Overrides _render_titleButtons to include edit and delete buttons.
+ * @see HDABaseView#_render_titleButtons
+ * @returns {jQuery} rendered DOM
+ */
_render_titleButtons : function(){
// render the display, edit attr and delete icon-buttons
var buttonDiv = $( '<div class="historyItemButtons"></div>' );
@@ -40,7 +60,9 @@
return buttonDiv;
},
- // icon-button to edit the attributes (format, permissions, etc.) this hda
+ /** Render icon-button to edit the attributes (format, permissions, etc.) this hda.
+ * @returns {jQuery} rendered DOM
+ */
_render_editButton : function(){
// don't show edit while uploading
//TODO??: error?
@@ -76,7 +98,9 @@
return this.editButton.render().$el;
},
- // icon-button to delete this hda
+ /** Render icon-button to delete this hda.
+ * @returns {jQuery} rendered DOM
+ */
_render_deleteButton : function(){
// don't show delete if...
//TODO??: not viewable/accessible are essentially the same (not viewable set from accessible)
@@ -103,8 +127,12 @@
return this.deleteButton.render().$el;
},
- // ................................................................................ RENDER BODY
- // render the data/metadata summary (format, size, misc info, etc.)
+ // ......................................................................... RENDER BODY
+ /** Render the data/metadata summary (format, size, misc info, etc.).
+ * Overrides _render_hdaSummary to include edit link in dbkey.
+ * @see HDABaseView#_render_hdaSummary
+ * @returns {jQuery} rendered DOM
+ */
_render_hdaSummary : function(){
var modelData = _.extend( this.model.toJSON(), { urls: this.urls } );
// if there's no dbkey and it's editable : pass a flag to the template to render a link to editing in the '?'
@@ -116,8 +144,10 @@
return HDABaseView.templates.hdaSummary( modelData );
},
- // ................................................................................ primary actions
- // icon-button to show the input and output (stdout/err) for the job that created this hda
+ // ......................................................................... primary actions
+ /** Render icon-button to report an error on this hda to the galaxy admin.
+ * @returns {jQuery} rendered DOM
+ */
_render_errButton : function(){
if( this.model.get( 'state' ) !== HistoryDatasetAssociation.STATES.ERROR ){
this.errButton = null;
@@ -133,7 +163,9 @@
return this.errButton.render().$el;
},
- // icon-button to re run the job that created this hda
+ /** Render icon-button to re-run the job that created this hda.
+ * @returns {jQuery} rendered DOM
+ */
_render_rerunButton : function(){
this.rerunButton = new IconButtonView({ model : new IconButton({
title : _l( 'Run this job again' ),
@@ -144,8 +176,10 @@
return this.rerunButton.render().$el;
},
- // build an icon-button or popupmenu based on the number of applicable visualizations
- // also map button/popup clicks to viz setup functions
+ /** Render an icon-button or popupmenu based on the number of applicable visualizations
+ * and map button/popup clicks to viz setup functions.
+ * @returns {jQuery} rendered DOM
+ */
_render_visualizationsButton : function(){
var dbkey = this.model.get( 'dbkey' ),
visualizations = this.model.get( 'visualizations' ),
@@ -179,6 +213,7 @@
// Add dbkey to params if it exists.
if( dbkey ){ params.dbkey = dbkey; }
+ /** @inner */
function create_viz_action( visualization ) {
switch( visualization ){
case 'trackster':
@@ -208,8 +243,11 @@
return $icon;
},
- // ................................................................................ secondary actions
- // secondary actions: currently tagging and annotation (if user is allowed)
+ // ......................................................................... secondary actions
+ /** Render secondary actions: currently tagging and annotation (if user is allowed).
+ * @param {Array} buttonRenderingFuncs array of rendering functions appending the results in order
+ * @returns {jQuery} rendered DOM
+ */
_render_secondaryActionButtons : function( buttonRenderingFuncs ){
// move to the right (same level as primary)
var secondaryActionButtons = $( '<div/>' ),
@@ -224,7 +262,9 @@
return secondaryActionButtons;
},
- // icon-button to load and display tagging html
+ /** Render icon-button to load and display tagging html.
+ * @returns {jQuery} rendered DOM
+ */
//TODO: these should be a sub-MV
_render_tagButton : function(){
//TODO: check for User
@@ -243,7 +283,9 @@
return this.tagButton.render().$el;
},
- // icon-button to load and display annotation html
+ /** Render icon-button to load and display annotation html.
+ * @returns {jQuery} rendered DOM
+ */
//TODO: these should be a sub-MV
_render_annotateButton : function(){
//TODO: check for User
@@ -261,10 +303,12 @@
return this.annotateButton.render().$el;
},
- // ................................................................................ other elements
+ // ......................................................................... other elements
+ /** Render area to display tags.
+ * @returns {jQuery} rendered DOM
+ */
//TODO: into sub-MV
//TODO: check for User
- // render the area used to load tag display
_render_tagArea : function(){
if( !this.urls.tags.set ){ return null; }
//TODO: move to mvc/tags.js
@@ -273,9 +317,11 @@
));
},
+ /** Render area to display annotation.
+ * @returns {jQuery} rendered DOM
+ */
//TODO: into sub-MV
//TODO: check for User
- // render the area used to load annotation display
_render_annotationArea : function(){
if( !this.urls.annotation.get ){ return null; }
//TODO: move to mvc/annotations.js
@@ -284,14 +330,23 @@
));
},
- // ................................................................................ state body renderers
+ // ......................................................................... state body renderers
+ /** Render an HDA whose job has failed.
+ * Overrides _render_body_error to prepend error report button to primary actions strip.
+ * @param {jQuery} parent DOM to which to append this body
+ * @see HDABaseView#_render_body_error
+ */
_render_body_error : function( parent ){
- // overridden to prepend error report button to primary actions strip
HDABaseView.prototype._render_body_error.call( this, parent );
var primaryActions = parent.find( '#primary-actions-' + this.model.get( 'id' ) );
primaryActions.prepend( this._render_errButton() );
},
+ /** Render an HDA that's done running and where everything worked.
+ * Overrides _render_body_ok to add tag/annotation functionality and additional primary actions
+ * @param {jQuery} parent DOM to which to append this body
+ * @see HDABaseView#_render_body_ok
+ */
_render_body_ok : function( parent ){
// most common state renderer and the most complicated
parent.append( this._render_hdaSummary() );
@@ -327,15 +382,17 @@
parent.append( this._render_peek() );
},
- // ................................................................................ EVENTS
+ // ......................................................................... EVENTS
+ /** event map */
events : {
'click .historyItemTitle' : 'toggleBodyVisibility',
'click a.icon-button.tags' : 'loadAndDisplayTags',
'click a.icon-button.annotate' : 'loadAndDisplayAnnotation'
},
- // ................................................................................ STATE CHANGES / MANIPULATION
- // find the tag area and, if initial: (via ajax) load the html for displaying them; otherwise, unhide/hide
+ // ......................................................................... STATE CHANGES / MANIPULATION
+ /** Find the tag area and, if initial: load the html (via ajax) for displaying them; otherwise, unhide/hide
+ */
//TODO: into sub-MV
loadAndDisplayTags : function( event ){
//BUG: broken with latest
@@ -369,7 +426,8 @@
return false;
},
- // find the annotation area and, if initial: (via ajax) load the html for displaying it; otherwise, unhide/hide
+ /** Find the annotation area and, if initial: load the html (via ajax) for displaying them; otherwise, unhide/hide
+ */
//TODO: into sub-MV
loadAndDisplayAnnotation : function( event ){
//TODO: this is a drop in from history.mako - should use MV as well
@@ -411,7 +469,8 @@
return false;
},
- // ................................................................................ UTILTIY
+ // ......................................................................... UTILTIY
+ /** string rep */
toString : function(){
var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
return 'HDAView(' + modelString + ')';
@@ -427,8 +486,12 @@
//==============================================================================
//TODO: these belong somewhere else
-//TODO: should be imported from scatterplot.js
-//TODO: OR abstracted to 'load this in the galaxy_main frame'
+/** Create scatterplot loading/set up function for use with the visualizations popupmenu.
+ * @param {String} url url (gen. 'visualizations') to which to append 'scatterplot' and params
+ * @param {Object} params parameters to convert to query string for splot page
+ * @returns function that loads the scatterplot
+ */
+//TODO: should be imported from scatterplot.js OR abstracted to 'load this in the galaxy_main frame'
function create_scatterplot_action_fn( url, params ){
action = function() {
var galaxy_main = $( window.parent.document ).find( 'iframe#galaxy_main' ),
@@ -442,7 +505,12 @@
}
// -----------------------------------------------------------------------------
-// Create trackster action function.
+/** Create trackster loading/set up function for use with the visualizations popupmenu.
+ * Shows modal dialog for load old/create new.
+ * @param {String} vis_url visualizations url (gen. 'visualizations')
+ * @param {Object} dataset_params parameters to pass to trackster in the query string.
+ * @returns function that displays modal, loads trackster
+ */
//TODO: should be imported from trackster.js
function create_trackster_action_fn(vis_url, dataset_params, dbkey) {
return function() {
diff -r 94627de6252824193ff0ff659807d17e59e4f94f -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e static/scripts/mvc/dataset/hda-model.js
--- a/static/scripts/mvc/dataset/hda-model.js
+++ b/static/scripts/mvc/dataset/hda-model.js
@@ -2,15 +2,23 @@
// "../mvc/base-mvc"
//], function(){
//==============================================================================
-/**
+/** @class (HDA) model for a Galaxy dataset
+ * related to a history.
+ * @name HistoryDatasetAssociation
*
+ * @augments BaseModel
+ * @borrows LoggableMixin#logger as #logger
+ * @borrows LoggableMixin#log as #log
+ * @constructs
*/
-var HistoryDatasetAssociation = BaseModel.extend( LoggableMixin ).extend({
- // a single HDA model
+var HistoryDatasetAssociation = BaseModel.extend( LoggableMixin ).extend(
+/** @lends HistoryDatasetAssociation.prototype */{
- // uncomment this out see log messages
+ ///** logger used to record this.log messages, commonly set to console */
+ //// comment this out to suppress log output
//logger : console,
+ /** default attributes for a model */
defaults : {
// ---these are part of an HDA proper:
@@ -46,18 +54,22 @@
accessible : false
},
- // fetch location of this history in the api
+ /** fetch location of this history in the api */
+ urlRoot: 'api/histories/',
url : function(){
//TODO: get this via url router
return 'api/histories/' + this.get( 'history_id' ) + '/contents/' + this.get( 'id' );
},
- // (curr) only handles changing state of non-accessible hdas to STATES.NOT_VIEWABLE
+ /** Set up the model, determine if accessible, bind listeners
+ * @see Backbone.Model#initialize
+ */
//TODO:? use initialize (or validate) to check purged AND deleted -> purged XOR deleted
initialize : function(){
this.log( this + '.initialize', this.attributes );
this.log( '\tparent history_id: ' + this.get( 'history_id' ) );
+ // (curr) only handles changing state of non-accessible hdas to STATES.NOT_VIEWABLE
//!! this state is not in trans.app.model.Dataset.states - set it here -
//TODO: change to server side.
if( !this.get( 'accessible' ) ){
@@ -81,12 +93,18 @@
//});
},
+ /** Is this hda deleted or purged?
+ */
isDeletedOrPurged : function(){
return ( this.get( 'deleted' ) || this.get( 'purged' ) );
},
- // based on show_deleted, show_hidden (gen. from the container control), would this ds show in the list of ds's?
- //TODO: too many visibles
+ /** based on show_deleted, show_hidden (gen. from the container control),
+ * would this ds show in the list of ds's?
+ * @param {Boolean} show_deleted are we showing deleted hdas?
+ * @param {Boolean} show_hidden are we showing hidden hdas?
+ */
+ //TODO: too many 'visible's
isVisible : function( show_deleted, show_hidden ){
var isVisible = true;
if( ( !show_deleted )
@@ -100,7 +118,11 @@
return isVisible;
},
- // 'ready' states are states where no processing (for the ds) is left to do on the server
+ /** Is this HDA in a 'ready' state; where 'Ready' states are states where no
+ * processing (for the ds) is left to do on the server.
+ * Currently: NEW, OK, EMPTY, FAILED_METADATA, NOT_VIEWABLE, DISCARDED,
+ * and ERROR
+ */
inReadyState : function(){
var state = this.get( 'state' );
return (
@@ -114,12 +136,15 @@
);
},
- // convenience fn to match hda.has_data
+ /** Convenience function to match hda.has_data.
+ */
hasData : function(){
//TODO:?? is this equivalent to all possible hda.has_data calls?
return ( this.get( 'file_size' ) > 0 );
},
+ /** String representation
+ */
toString : function(){
var nameAndId = this.get( 'id' ) || '';
if( this.get( 'name' ) ){
@@ -130,48 +155,83 @@
});
//------------------------------------------------------------------------------
+/** Class level map of possible HDA states to their string equivalents.
+ * A port of galaxy.model.Dataset.states.
+ */
HistoryDatasetAssociation.STATES = {
+ // NOT ready states
+ /** is uploading and not ready */
UPLOAD : 'upload',
+ /** the job that will produce the dataset queued in the runner */
QUEUED : 'queued',
+ /** the job that will produce the dataset is running */
RUNNING : 'running',
+ /** metadata for the dataset is being discovered/set */
SETTING_METADATA : 'setting_metadata',
+ // ready states
+ /** was created without a tool */
NEW : 'new',
+ /** has no data */
+ EMPTY : 'empty',
+ /** has successfully completed running */
OK : 'ok',
- EMPTY : 'empty',
+ /** metadata discovery/setting failed or errored (but otherwise ok) */
FAILED_METADATA : 'failed_metadata',
+ /** not accessible to the current user (i.e. due to permissions) */
NOT_VIEWABLE : 'noPermission', // not in trans.app.model.Dataset.states
+ /** deleted while uploading */
DISCARDED : 'discarded',
+ /** the tool producing this dataset failed */
ERROR : 'error'
};
//==============================================================================
-/**
+/** @class Backbone collection of (HDA) models
*
+ * @borrows LoggableMixin#logger as #logger
+ * @borrows LoggableMixin#log as #log
+ * @constructs
*/
-var HDACollection = Backbone.Collection.extend( LoggableMixin ).extend({
+var HDACollection = Backbone.Collection.extend( LoggableMixin ).extend(
+/** @lends HDACollection.prototype */{
model : HistoryDatasetAssociation,
- //logger : console,
+ ///** logger used to record this.log messages, commonly set to console */
+ //// comment this out to suppress log output
+ //logger : console,
+ /** Set up.
+ * @see Backbone.Collection#initialize
+ */
initialize : function(){
//this.bind( 'all', function( event ){
// this.log( this + '', arguments );
//});
},
- // return the ids of every hda in this collection
+ /** Get the ids of every hda in this collection
+ * @returns array of encoded ids
+ */
ids : function(){
return this.map( function( item ){ return item.id; });
},
- // return an HDA collection containing every 'shown' hda based on show_deleted/hidden
+ /** Get every 'shown' hda in this collection based on show_deleted/hidden
+ * @param {Boolean} show_deleted are we showing deleted hdas?
+ * @param {Boolean} show_hidden are we showing hidden hdas?
+ * @returns array of hda models
+ * @see HistoryDatasetAssociation#isVisible
+ */
getVisible : function( show_deleted, show_hidden ){
return this.filter( function( item ){ return item.isVisible( show_deleted, show_hidden ); });
},
- // get a map where <possible hda state> : [ <list of hda ids in that state> ]
+ /** For each possible hda state, get an array of all hda ids in that state
+ * @returns a map of states -> hda ids
+ * @see HistoryDatasetAssociation#STATES
+ */
getStateLists : function(){
var stateLists = {};
_.each( _.values( HistoryDatasetAssociation.STATES ), function( state ){
@@ -184,7 +244,10 @@
return stateLists;
},
- // returns the id of every hda still running (not in a ready state)
+ /** Get the id of every hda in this collection not in a 'ready' state (running).
+ * @returns an array of hda ids
+ * @see HistoryDatasetAssociation#inReadyState
+ */
running : function(){
var idList = [];
this.each( function( item ){
@@ -195,7 +258,10 @@
return idList;
},
- // update (fetch -> render) the hdas with the ids given
+ /** Update (fetch) the data of the hdas with the given ids.
+ * @param {String[]} ids an array of hda ids to update
+ * @see HistoryDatasetAssociation#fetch
+ */
update : function( ids ){
this.log( this + 'update:', ids );
@@ -208,6 +274,7 @@
});
},
+ /** String representation. */
toString : function(){
return ( 'HDACollection(' + this.ids().join(',') + ')' );
}
diff -r 94627de6252824193ff0ff659807d17e59e4f94f -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e static/scripts/mvc/history/history-model.js
--- a/static/scripts/mvc/history/history-model.js
+++ b/static/scripts/mvc/history/history-model.js
@@ -2,13 +2,21 @@
// "../mvc/base-mvc"
//], function(){
//==============================================================================
-/**
+/** @class Model for a Galaxy history resource - both a record of user
+ * tool use and a collection of the datasets those tools produced.
+ * @name History
*
+ * @augments BaseModel
+ * @borrows LoggableMixin#logger as #logger
+ * @borrows LoggableMixin#log as #log
+ * @constructs
*/
-var History = BaseModel.extend( LoggableMixin ).extend({
+var History = BaseModel.extend( LoggableMixin ).extend(
+/** @lends History.prototype */{
//TODO: bind change events from items and collection to this (itemLengths, states)
- // uncomment this out see log messages
+ ///** logger used to record this.log messages, commonly set to console */
+ //// comment this out to suppress log output
//logger : console,
// values from api (may need more)
@@ -27,15 +35,23 @@
message : null
},
+ //TODO: hardcoded
+ urlRoot: 'api/histories/',
+ /** url for fetch */
url : function(){
// api location of history resource
- //TODO: hardcoded
return 'api/histories/' + this.get( 'id' );
},
+ /** Set up the hdas collection
+ * @param {Object} initialSettings model data for this History
+ * @param {Object[]} initialHdas array of model data for this History's HDAs
+ * @see BaseModel#initialize
+ */
initialize : function( initialSettings, initialHdas ){
this.log( this + ".initialize:", initialSettings, initialHdas );
+ /** HDACollection of the HDAs contained in this history. */
this.hdas = new HDACollection();
// if we've got hdas passed in the constructor, load them and set up updates if needed
@@ -44,6 +60,7 @@
this.checkForUpdates();
}
+ // events
//this.on( 'change', function( currModel, changedList ){
// this.log( this + ' has changed:', currModel, changedList );
//});
@@ -53,7 +70,11 @@
//});
},
- // get data via the api (alternative to sending options,hdas to initialize)
+ /** get data via the api (alternative to sending options, hdas to initialize)
+ * @param {String} historyId encoded id
+ * @param {Object[]} success
+ * @see BaseModel#initialize
+ */
//TODO: this needs work - move to more straightforward deferred
// events: loaded, loaded:user, loaded:hdas
loadFromApi : function( historyId, success ){
@@ -178,13 +199,22 @@
});
//==============================================================================
-/** A collection of histories (per user or admin)
- * (stub) currently unused
+/** @class A collection of histories (per user).
+ * (stub) currently unused.
+ * @name HistoryCollection
+ *
+ * @borrows LoggableMixin#logger as #logger
+ * @borrows LoggableMixin#log as #log
+ * @constructs
*/
-var HistoryCollection = Backbone.Collection.extend( LoggableMixin ).extend({
+var HistoryCollection = Backbone.Collection.extend( LoggableMixin ).extend(
+/** @lends HistoryCollection.prototype */{
model : History,
- urlRoot : 'api/histories',
- //logger : console
+ urlRoot : 'api/histories'
+
+ ///** logger used to record this.log messages, commonly set to console */
+ //// comment this out to suppress log output
+ //logger : console,
});
//==============================================================================
diff -r 94627de6252824193ff0ff659807d17e59e4f94f -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e static/scripts/mvc/history/history-panel.js
--- a/static/scripts/mvc/history/history-panel.js
+++ b/static/scripts/mvc/history/history-panel.js
@@ -102,22 +102,43 @@
sorting, re-shuffling
============================================================================= */
-/** view for the HDACollection (as per current right hand panel)
+/** @class View/Controller for the history model as used in the history
+ * panel (current right hand panel).
+ * @name HistoryPanel
*
+ * @augments BaseView
+ * @borrows LoggableMixin#logger as #logger
+ * @borrows LoggableMixin#log as #log
+ * @constructs
*/
-var HistoryPanel = BaseView.extend( LoggableMixin ).extend({
+var HistoryPanel = BaseView.extend( LoggableMixin ).extend(
+/** @lends HistoryPanel.prototype */{
- // uncomment this out see log messages
+ ///** logger used to record this.log messages, commonly set to console */
+ //// comment this out to suppress log output
//logger : console,
// direct attachment to existing element
el : 'body.historyPage',
+ /** which class to use for constructing the HDA views */
//HDAView : HDABaseView,
HDAView : HDAEditView,
- // init with the model, urlTemplates, set up storage, bind HDACollection events
- //NOTE: this will create or load PersistantStorage keyed under 'HistoryView.<id>'
- //pre: you'll need to pass in the urlTemplates (urlTemplates : { history : {...}, hda : {...} })
+ /** event map
+ */
+ events : {
+ 'click #history-tag' : 'loadAndDisplayTags'
+ },
+
+ // ......................................................................... SET UP
+ /** Set up the view, set up storage, bind listeners to HDACollection events
+ * @param {Object} attributes
+ * @config {Object} urlTemplates.history nested object containing url templates for this view
+ * @config {Object} urlTemplates.hda nested object containing url templates for HDAViews
+ * @throws 'needs urlTemplates' if urlTemplates.history or urlTemplates.hda aren't present
+ * @see PersistantStorage
+ * @see Backbone.View#initialize
+ */
initialize : function( attributes ){
this.log( this + '.initialize:', attributes );
@@ -128,81 +149,93 @@
if( !attributes.urlTemplates.history ){ throw( this + ' needs urlTemplates.history on initialize' ); }
if( !attributes.urlTemplates.hda ){ throw( this + ' needs urlTemplates.hda on initialize' ); }
this.urlTemplates = attributes.urlTemplates.history;
+ /** map web controller urls for history related actions */
this.hdaUrlTemplates = attributes.urlTemplates.hda;
+ this._setUpWebStorage( attributes.initiallyExpanded, attributes.show_deleted, attributes.show_hidden );
+
+ // bind events from the model's hda collection
+ // don't need to re-render entire model on all changes, just render disk size when it changes
+ //this.model.bind( 'change', this.render, this );
+ this.model.bind( 'change:nice_size', this.updateHistoryDiskSize, this );
+ this.model.hdas.bind( 'add', this.add, this );
+ this.model.hdas.bind( 'reset', this.addAll, this );
+ //this.bind( 'all', function(){
+ // this.log( arguments );
+ //}, this );
+
+ // set up instance vars
+ /** map of hda model ids to hda views */
+ this.hdaViews = {};
+ /** map web controller urls for history related actions */
+ this.urls = {};
+ },
+
+ /** Set up client side storage. Currently PersistanStorage keyed under 'HistoryPanel.<id>'
+ * @param {Object} initiallyExpanded
+ * @param {Boolean} show_deleted whether to show deleted HDAs (overrides stored)
+ * @param {Boolean} show_hidden
+ * @see PersistantStorage
+ */
+ _setUpWebStorage : function( initiallyExpanded, show_deleted, show_hidden ){
+
// data that needs to be persistant over page refreshes
// (note the key function which uses the history id as well)
this.storage = new PersistantStorage( 'HistoryView.' + this.model.get( 'id' ), {
+ //TODOL initiallyExpanded only works on first load right now
expandedHdas : {},
show_deleted : false,
show_hidden : false
});
this.log( 'this.storage:', this.storage.get() );
- // get the show_deleted/hidden settings giving priority to values passed into initialize, but
+ // expanded Hdas is a map of hda.ids -> a boolean rep'ing whether this hda's body is expanded
+ // store any pre-expanded ids passed in
+ if( initiallyExpanded ){
+ this.storage.set( 'exandedHdas', initiallyExpanded );
+ }
+
+ // get the show_deleted/hidden settings giving priority to values passed in,
// using web storage otherwise
- this.log( 'show_deleted:', attributes.show_deleted, 'show_hidden', attributes.show_hidden );
+ //this.log( 'show_deleted:', show_deleted, 'show_hidden', show_hidden );
// if the page has specifically requested show_deleted/hidden, these will be either true or false
// (as opposed to undefined, null) - and we give priority to that setting
- if( ( attributes.show_deleted === true ) || ( attributes.show_deleted === false ) ){
+ if( ( show_deleted === true ) || ( show_deleted === false ) ){
// save them to web storage
- this.storage.set( 'show_deleted', attributes.show_deleted );
+ this.storage.set( 'show_deleted', show_deleted );
}
- if( ( attributes.show_hidden === true ) || ( attributes.show_hidden === false ) ){
- this.storage.set( 'show_hidden', attributes.show_hidden );
+ if( ( show_hidden === true ) || ( show_hidden === false ) ){
+ this.storage.set( 'show_hidden', show_hidden );
}
- // pull show_deleted/hidden from the web storage if the page hasn't specified whether to show_deleted/hidden,
+ // if the page hasn't specified whether to show_deleted/hidden, pull show_deleted/hidden from the web storage
this.show_deleted = this.storage.get( 'show_deleted' );
this.show_hidden = this.storage.get( 'show_hidden' );
- this.log( 'this.show_deleted:', this.show_deleted, 'show_hidden', this.show_hidden );
- this.log( '(now) this.storage:', this.storage.get() );
-
- // bind events from the model's hda collection
- //this.model.bind( 'change', this.render, this );
- this.model.bind( 'change:nice_size', this.updateHistoryDiskSize, this );
-
- this.model.hdas.bind( 'add', this.add, this );
- this.model.hdas.bind( 'reset', this.addAll, this );
- this.model.hdas.bind( 'all', this.all, this );
-
- //this.bind( 'all', function(){
- // this.log( arguments );
- //}, this );
-
- // set up instance vars
- this.hdaViews = {};
- this.urls = {};
+ //this.log( 'this.show_deleted:', this.show_deleted, 'show_hidden', this.show_hidden );
+ this.log( '(init\'d) this.storage:', this.storage.get() );
},
+ /** Add an hda to this history's collection
+ * @param {HistoryDatasetAssociation} hda hda to add to the collection
+ */
add : function( hda ){
//console.debug( 'add.' + this, hda );
//TODO
},
+ /** Event hander to respond when hdas are reset
+ */
addAll : function(){
//console.debug( 'addAll.' + this );
// re render when all hdas are reset
this.render();
},
- all : function( event ){
- //console.debug( 'allItemEvents.' + this, event );
- //...for which to do the debuggings
- },
-
- // render the urls for this view using urlTemplates and the model data
- renderUrls : function( modelJson ){
- var historyView = this;
-
- historyView.urls = {};
- _.each( this.urlTemplates, function( urlTemplate, urlKey ){
- historyView.urls[ urlKey ] = _.template( urlTemplate, modelJson );
- });
- return historyView.urls;
- },
-
- // render urls, historyView body, and hdas (if any are shown), fade out, swap, fade in, set up behaviours
- // events: rendered, rendered:initial
+ // ......................................................................... RENDERING
+ /** Render urls, historyPanel body, and hdas (if any are shown)
+ * @see Backbone.View#render
+ */
+ /** event rendered triggered when the panel rendering is complete */
+ /** event rendered:initial triggered when the FIRST panel rendering is complete */
render : function(){
var historyView = this,
setUpQueueName = historyView.toString() + '.set-up',
@@ -213,13 +246,13 @@
//console.debug( this + '.render, initialRender:', initialRender );
// render the urls and add them to the model json
- modelJson.urls = this.renderUrls( modelJson );
+ modelJson.urls = this._renderUrls( modelJson );
// render the main template, tooltips
//NOTE: this is done before the items, since item views should handle theirs themselves
newRender.append( HistoryPanel.templates.historyPanel( modelJson ) );
newRender.find( '.tooltip' ).tooltip({ placement: 'bottom' });
- this.setUpActionButton( newRender.find( '#history-action-popup' ) );
+ this._setUpActionButton( newRender.find( '#history-action-popup' ) );
// render hda views (if any and any shown (show_deleted/hidden)
//TODO: this seems too elaborate
@@ -244,7 +277,7 @@
this.log( historyView + ' rendered:', historyView.$el );
//TODO: ideally, these would be set up before the fade in (can't because of async save text)
- historyView.setUpBehaviours();
+ historyView._setUpBehaviours();
if( initialRender ){
historyView.trigger( 'rendered:initial' );
@@ -258,7 +291,24 @@
return this;
},
- setUpActionButton : function( $button ){
+ /** Render the urls for this view using urlTemplates and the model data
+ * @param {Object} modelJson data from the model used to fill templates
+ */
+ _renderUrls : function( modelJson ){
+ var historyView = this;
+
+ historyView.urls = {};
+ _.each( this.urlTemplates, function( urlTemplate, urlKey ){
+ historyView.urls[ urlKey ] = _.template( urlTemplate, modelJson );
+ });
+ return historyView.urls;
+ },
+
+ /** Set up history actions popup menu
+ * @param {jQuery} $button jQuery dom object to turn into the 'button' that activates the menu
+ * @see make_popupmenu (galaxy-base.js)
+ */
+ _setUpActionButton : function( $button ){
var historyPanel = this,
show_deletedText = ( this.storage.get( 'show_deleted' ) )?( 'Hide deleted' ):( 'Show deleted' ),
show_hiddenText = ( this.storage.get( 'show_hidden' ) )?( 'Hide hidden' ):( 'Show hidden' ),
@@ -270,7 +320,11 @@
make_popupmenu( $button, menuActions );
},
- // set up a view for each item to be shown, init with model and listeners, cache to map ( model.id : view )
+ /** Set up/render a view for each HDA to be shown, init with model and listeners.
+ * HDA views are cached to the map this.hdaViews (using the model.id as key).
+ * @param {jQuery} $whereTo what dom element to prepend the HDA views to
+ * @returns the number of visible hda views
+ */
renderItems : function( $whereTo ){
this.hdaViews = {};
var historyView = this,
@@ -290,7 +344,7 @@
expanded : expanded,
urlTemplates : historyView.hdaUrlTemplates
});
- historyView.setUpHdaListeners( historyView.hdaViews[ hdaId ] );
+ historyView._setUpHdaListeners( historyView.hdaViews[ hdaId ] );
// render it (NOTE: reverse order, newest on top (prepend))
//TODO: by default send a reverse order list (although this may be more efficient - it's more confusing)
@@ -299,21 +353,25 @@
return visibleHdas.length;
},
- // set up HistoryView->HDAView listeners
- setUpHdaListeners : function( hdaView ){
+ /** Set up HistoryPanel listeners for HDAView events. Currently binds:
+ * HDAView#body-visible, HDAView#body-hidden to store expanded states
+ * @param {HDAView} hdaView HDAView (base or edit) to listen to
+ */
+ _setUpHdaListeners : function( hdaView ){
var historyView = this;
- // use storage to maintain a list of hdas whose bodies are expanded
- hdaView.bind( 'body-visible', function( id ){
+ // maintain a list of hdas whose bodies are expanded
+ hdaView.bind( 'body-expanded', function( id ){
historyView.storage.get( 'expandedHdas' ).set( id, true );
});
- hdaView.bind( 'body-hidden', function( id ){
+ hdaView.bind( 'body-collapsed', function( id ){
historyView.storage.get( 'expandedHdas' ).deleteKey( id );
});
},
- // set up js/widget behaviours: tooltips,
+ /** Set up HistoryPanel js/widget behaviours
+ */
//TODO: these should be either sub-MVs, or handled by events
- setUpBehaviours : function(){
+ _setUpBehaviours : function(){
// anon users shouldn't have access to any of these
if( !( this.model.get( 'user' ) && this.model.get( 'user' ).email ) ){ return; }
@@ -337,40 +395,51 @@
this.urls.annotate, "new_annotation", 18, true, 4 );
},
- // update the history size display (curr. upper right of panel)
+ // ......................................................................... EVENTS
+ /** Update the history size display (curr. upper right of panel).
+ */
updateHistoryDiskSize : function(){
this.$el.find( '#history-size' ).text( this.model.get( 'nice_size' ) );
},
- events : {
- 'click #history-tag' : 'loadAndDisplayTags'
- },
-
+ /** Show the over quota message (which happens to be in the history panel).
+ */
//TODO: this seems more like a per user message than a history message; IOW, this doesn't belong here
- showQuotaMessage : function( userData ){
+ showQuotaMessage : function(){
var msg = this.$el.find( '#quota-message-container' );
//this.log( this + ' showing quota message:', msg, userData );
if( msg.is( ':hidden' ) ){ msg.slideDown( 'fast' ); }
},
+ /** Hide the over quota message (which happens to be in the history panel).
+ */
//TODO: this seems more like a per user message than a history message
- hideQuotaMessage : function( userData ){
+ hideQuotaMessage : function(){
var msg = this.$el.find( '#quota-message-container' );
//this.log( this + ' hiding quota message:', msg, userData );
if( !msg.is( ':hidden' ) ){ msg.slideUp( 'fast' ); }
},
- toggleShowDeleted : function( x, y, z ){
+ /** Handle the user toggling the deleted visibility by:
+ * (1) storing the new value in the persistant storage
+ * (2) re-rendering the history
+ */
+ toggleShowDeleted : function(){
this.storage.set( 'show_deleted', !this.storage.get( 'show_deleted' ) );
this.render();
},
+ /** Handle the user toggling the deleted visibility by:
+ * (1) storing the new value in the persistant storage
+ * (2) re-rendering the history
+ */
toggleShowHidden : function(){
this.storage.set( 'show_hidden', !this.storage.get( 'show_hidden' ) );
this.render();
},
- // collapse all hda bodies
+ /** Collapse all hda bodies and clear expandedHdas in the storage
+ */
hideAllHdaBodies : function(){
_.each( this.hdaViews, function( item ){
item.toggleBodyVisibility( null, false );
@@ -378,7 +447,8 @@
this.storage.set( 'expandedHdas', {} );
},
- // find the tag area and, if initial: (via ajax) load the html for displaying them; otherwise, unhide/hide
+ /** Find the tag area and, if initial: load the html (via ajax) for displaying them; otherwise, unhide/hide
+ */
//TODO: into sub-MV
loadAndDisplayTags : function( event ){
this.log( this + '.loadAndDisplayTags', event );
@@ -414,13 +484,16 @@
return false;
},
+ // ......................................................................... MISC
+ /** Return a string rep of the history
+ */
toString : function(){
var nameString = this.model.get( 'name' ) || '';
- return 'HistoryView(' + nameString + ')';
+ return 'HistoryPanel(' + nameString + ')';
}
});
-//------------------------------------------------------------------------------
+//------------------------------------------------------------------------------ TEMPLATES
HistoryPanel.templates = {
historyPanel : Handlebars.templates[ 'template-history-historyPanel' ]
};
diff -r 94627de6252824193ff0ff659807d17e59e4f94f -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e static/scripts/mvc/user/user-model.js
--- a/static/scripts/mvc/user/user-model.js
+++ b/static/scripts/mvc/user/user-model.js
@@ -1,15 +1,37 @@
-var User = BaseModel.extend( LoggableMixin ).extend({
- //logger : console,
+/** @class Model for a Galaxy user (including anonymous users).
+ * @name User
+ *
+ * @augments BaseModel
+ * @borrows LoggableMixin#logger as #logger
+ * @borrows LoggableMixin#log as #log
+ * @constructs
+ */
+var User = BaseModel.extend( LoggableMixin ).extend(
+/** @lends User.prototype */{
+
+ ///** logger used to record this.log messages, commonly set to console */
+ //// comment this out to suppress log output
+ //logger : console,
+
+ /** API location for this resource */
urlRoot : 'api/users',
- defaults : {
+ /** Model defaults
+ * Note: don't check for anon-users with the username as the default is '(anonymous user)'
+ * a safer method is if( !user.get( 'email' ) ) -> anon user
+ */
+ defaults : /** @lends User.prototype */{
id : null,
username : '(' + _l( "anonymous user" ) + ')',
email : "",
total_disk_usage : 0,
- nice_total_disk_usage : "0 bytes"
+ nice_total_disk_usage : "0 bytes",
+ quota_percent : null
},
+ /** Set up and bind events
+ * @param {Object} data Initial model data.
+ */
initialize : function( data ){
this.log( 'User.initialize:', data );
@@ -17,12 +39,25 @@
this.on( 'change', function( model, data ){ this.log( this + ' has changed:', model, data.changes ); });
},
- // events: loaded
+ isAnonymous : function(){
+ return ( !this.get( 'email' ) );
+ },
+
+ /** Load a user with the API using an id.
+ * If getting an anonymous user or no access to a user id, pass the User.CURRENT_ID_STR
+ * (e.g. 'current') and the API will return the current transaction's user data.
+ * @param {String} idOrCurrent encoded user id or the User.CURRENT_ID_STR
+ * @param {Object} options hash to pass to Backbone.Model.fetch. Can contain success, error fns.
+ * @fires loaded when the model has been loaded from the API, passing the newModel and AJAX response.
+ */
loadFromApi : function( idOrCurrent, options ){
idOrCurrent = idOrCurrent || User.CURRENT_ID_STR;
+
options = options || {};
var model = this,
userFn = options.success;
+
+ /** @ignore */
options.success = function( newModel, response ){
model.trigger( 'loaded', newModel, response );
if( userFn ){ userFn( newModel, response ); }
@@ -35,6 +70,7 @@
return BaseModel.prototype.fetch.call( this, options );
},
+ /** string representation */
toString : function(){
var userInfo = [ this.get( 'username' ) ];
if( this.get( 'id' ) ){
diff -r 94627de6252824193ff0ff659807d17e59e4f94f -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e static/scripts/mvc/user/user-quotameter.js
--- a/static/scripts/mvc/user/user-quotameter.js
+++ b/static/scripts/mvc/user/user-quotameter.js
@@ -1,17 +1,27 @@
-// strange view that spans two frames: renders to two separate elements based on a User's disk usage:
-// a quota/usage bar (curr. masthead), and
-// an over-quota message (curr. history panel)
+/** @class View to display a user's disk/storage usage
+ * either as a progress bar representing the percentage of a quota used
+ * or a simple text element displaying the human readable size used.
+ * @name UserQuotaMeter
+ *
+ * @augments BaseModel
+ * @borrows LoggableMixin#logger as #logger
+ * @borrows LoggableMixin#log as #log
+ * @constructs
+ */
+var UserQuotaMeter = BaseView.extend( LoggableMixin ).extend(
+/** @lends UserQuotaMeter.prototype */{
+
+ ///** logger used to record this.log messages, commonly set to console */
+ //// comment this out to suppress log output
+ //logger : console,
-// for now, keep the view in the history panel (where the message is), but render ALSO to the masthead
-
-var UserQuotaMeter = BaseView.extend( LoggableMixin ).extend({
- //logger : console,
-
+ /** Defaults for optional settings passed to initialize */
options : {
warnAtPercent : 85,
errorAtPercent : 100
},
+ /** Set up, accept options, and bind events */
initialize : function( options ){
this.log( this + '.initialize:', options );
_.extend( this.options, options );
@@ -20,18 +30,29 @@
this.model.bind( 'change:quota_percent change:total_disk_usage', this.render, this );
},
+ /** Re-load user model data from the api */
update : function( options ){
this.log( this + ' updating user data...', options );
this.model.loadFromApi( this.model.get( 'id' ), options );
return this;
},
+ /** Is the user over their quota (if there is one)?
+ * @returns {Boolean} true if over quota, false if no quota or under quota
+ */
isOverQuota : function(){
return ( this.model.get( 'quota_percent' ) !== null
&& this.model.get( 'quota_percent' ) >= this.options.errorAtPercent );
},
- // events: quota:over, quota:under, quota:under:approaching, quota:under:ok
+ /** Render the meter when they have an applicable quota. Will render as a progress bar
+ * with their percentage of that quota in text over the bar.
+ * @fires quota:over when user is over quota (>= this.errorAtPercent)
+ * @fires quota:under when user is under quota
+ * @fires quota:under:approaching when user is >= this.warnAtPercent of their quota
+ * @fires quota:under:ok when user is below this.warnAtPercent
+ * @returns {jQuery} the rendered meter
+ */
_render_quota : function(){
var modelJson = this.model.toJSON(),
//prevPercent = this.model.previous( 'quota_percent' ),
@@ -63,12 +84,20 @@
return meter;
},
+ /** Render the meter when the user has NO applicable quota. Will render as text
+ * showing the human readable sum storage their data is using.
+ * @returns {jQuery} the rendered text
+ */
_render_usage : function(){
var usage = $( UserQuotaMeter.templates.usage( this.model.toJSON() ) );
this.log( this + '.rendering usage:', usage );
return usage;
},
+ /** Render either the quota percentage meter or the human readable disk usage
+ * depending on whether the user model has quota info (quota_percent === null -> no quota)
+ * @returns {Object} this UserQuotaMeter
+ */
render : function(){
//this.log( this + '.rendering' );
var meterHtml = null;
@@ -93,6 +122,9 @@
return 'UserQuotaMeter(' + this.model + ')';
}
});
+
+
+//------------------------------------------------------------------------------ TEMPLATES
UserQuotaMeter.templates = {
quota : Handlebars.templates[ 'template-user-quotaMeter-quota' ],
usage : Handlebars.templates[ 'template-user-quotaMeter-usage' ]
diff -r 94627de6252824193ff0ff659807d17e59e4f94f -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e static/scripts/templates/compiled/template-history-historyPanel.js
--- a/static/scripts/templates/compiled/template-history-historyPanel.js
+++ b/static/scripts/templates/compiled/template-history-historyPanel.js
@@ -193,7 +193,8 @@
return "Your history is empty. Click 'Get Data' on the left pane to start";}
- buffer += "\n<div id=\"history-controls\">\n <div id=\"history-title-area\" class=\"historyLinks\">\n\n <div id=\"history-name-container\" style=\"float: left;\">\n ";
+ buffer += "<div id=\"history-controls\">\n <div id=\"history-title-area\" class=\"historyLinks\">\n\n ";
+ buffer += "\n <div id=\"history-name-container\" style=\"float: left;\">\n ";
buffer += "\n ";
stack1 = depth0.user;
stack1 = stack1 == null || stack1 === false ? stack1 : stack1.email;
@@ -205,7 +206,7 @@
else { stack1 = depth0.local; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
if (!helpers.local) { stack1 = blockHelperMissing.call(depth0, stack1, {hash:{},inverse:self.noop,fn:self.program(7, program7, data)}); }
if(stack1 || stack1 === 0) { buffer += stack1; }
- buffer += "\"\n href=\"javascript:void(0);\" style=\"float: right;\">\n <span class=\"ficon cog large\"></span>\n </a>\n <div style=\"clear: both;\"></div>\n </div>\n\n <div id=\"history-subtitle-area\">\n <div id=\"history-size\" style=\"float:left;\">";
+ buffer += "\"\n href=\"javascript:void(0);\" style=\"float: right;\">\n <span class=\"ficon cogs large\"></span>\n </a>\n <div style=\"clear: both;\"></div>\n </div>\n\n <div id=\"history-subtitle-area\">\n <div id=\"history-size\" style=\"float:left;\">";
foundHelper = helpers.nice_size;
if (foundHelper) { stack1 = foundHelper.call(depth0, {hash:{}}); }
else { stack1 = depth0.nice_size; stack1 = typeof stack1 === functionType ? stack1() : stack1; }
diff -r 94627de6252824193ff0ff659807d17e59e4f94f -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e static/scripts/templates/history-templates.html
--- a/static/scripts/templates/history-templates.html
+++ b/static/scripts/templates/history-templates.html
@@ -19,7 +19,7 @@
<a id="history-action-popup" class="tooltip" title="{{#local}}Click to see more actions{{/local}}"
href="javascript:void(0);" style="float: right;">
- <span class="ficon cog large"></span>
+ <span class="ficon cogs large"></span></a><div style="clear: both;"></div></div>
diff -r 94627de6252824193ff0ff659807d17e59e4f94f -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e templates/root/alternate_history.mako
--- a/templates/root/alternate_history.mako
+++ b/templates/root/alternate_history.mako
@@ -423,7 +423,6 @@
width: 90%;
margin: -2px 0px -3px -4px;
font-weight: bold;
- font-size: 110%;
color: black;
}
https://bitbucket.org/galaxy/galaxy-central/changeset/4e50648f6e5d/
changeset: 4e50648f6e5d
user: carlfeberhard
date: 2012-11-15 17:25:58
summary: pack scripts
affected #: 9 files
diff -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e -r 4e50648f6e5d015d253cb3c9e68dd731729b06f7 static/scripts/packed/mvc/base-mvc.js
--- a/static/scripts/packed/mvc/base-mvc.js
+++ b/static/scripts/packed/mvc/base-mvc.js
@@ -1,1 +1,1 @@
-var BaseModel=Backbone.RelationalModel.extend({defaults:{name:null,hidden:false},show:function(){this.set("hidden",false)},hide:function(){this.set("hidden",true)},is_visible:function(){return !this.attributes.hidden}});var BaseView=Backbone.View.extend({initialize:function(){this.model.on("change:hidden",this.update_visible,this);this.update_visible()},update_visible:function(){if(this.model.attributes.hidden){this.$el.hide()}else{this.$el.show()}}});var LoggableMixin={logger:null,log:function(){if(this.logger){return this.logger.log.apply(this.logger,arguments)}return undefined}};var GalaxyLocalization=jQuery.extend({},{ALIAS_NAME:"_l",localizedStrings:{},setLocalizedString:function(b,a){var c=this;var d=function(f,e){if(f!==e){c.localizedStrings[f]=e}};if(jQuery.type(b)==="string"){d(b,a)}else{if(jQuery.type(b)==="object"){jQuery.each(b,function(e,f){d(e,f)})}else{throw ("Localization.setLocalizedString needs either a string or object as the first argument, given: "+b)}}},localize:function(a){return this.localizedStrings[a]||a},toString:function(){return"GalaxyLocalization"}});window[GalaxyLocalization.ALIAS_NAME]=function(a){return GalaxyLocalization.localize(a)};var PersistantStorage=function(g,d){if(!g){throw ("PersistantStorage needs storageKey argument")}d=d||{};var b=jQuery.jStorage.get,c=jQuery.jStorage.set,a=jQuery.jStorage.deleteKey;var e=function(i,h){i=i||{};h=h||null;return{get:function(j){if(j===undefined){return i}else{if(i.hasOwnProperty(j)){return(jQuery.type(i[j])==="object")?(new e(i[j],this)):(i[j])}}return undefined},set:function(j,k){i[j]=k;this.save();return this},deleteKey:function(j){delete i[j];this.save();return this},save:function(){return h.save()},toString:function(){return("StorageRecursionHelper("+i+")")}}};var f={};data=b(g);if(data===null){data=jQuery.extend(true,{},d);c(g,data)}f=new e(data);f.save=function(h){c(g,f.get())};f.destroy=function(){a(g)};f.toString=function(){return"PersistantStorage("+data+")"};return f};
\ No newline at end of file
+var BaseModel=Backbone.RelationalModel.extend({defaults:{name:null,hidden:false},show:function(){this.set("hidden",false)},hide:function(){this.set("hidden",true)},is_visible:function(){return !this.attributes.hidden}});var BaseView=Backbone.View.extend({initialize:function(){this.model.on("change:hidden",this.update_visible,this);this.update_visible()},update_visible:function(){if(this.model.attributes.hidden){this.$el.hide()}else{this.$el.show()}}});var LoggableMixin={logger:null,log:function(){if(this.logger){return this.logger.log.apply(this.logger,arguments)}return undefined}};var GalaxyLocalization=jQuery.extend({},{ALIAS_NAME:"_l",localizedStrings:{},setLocalizedString:function(b,a){var c=this;var d=function(f,e){if(f!==e){c.localizedStrings[f]=e}};if(jQuery.type(b)==="string"){d(b,a)}else{if(jQuery.type(b)==="object"){jQuery.each(b,function(e,f){d(e,f)})}else{throw ("Localization.setLocalizedString needs either a string or object as the first argument, given: "+b)}}},localize:function(a){return this.localizedStrings[a]||a},toString:function(){return"GalaxyLocalization"}});window[GalaxyLocalization.ALIAS_NAME]=function(a){return GalaxyLocalization.localize(a)};var PersistantStorage=function(g,d){if(!g){throw ("PersistantStorage needs storageKey argument")}d=d||{};var b=jQuery.jStorage.get,c=jQuery.jStorage.set,a=jQuery.jStorage.deleteKey;function e(i,h){i=i||{};h=h||null;return{get:function(j){if(j===undefined){return i}else{if(i.hasOwnProperty(j)){return(jQuery.type(i[j])==="object")?(new e(i[j],this)):(i[j])}}return undefined},set:function(j,k){i[j]=k;this._save();return this},deleteKey:function(j){delete i[j];this._save();return this},_save:function(){return h._save()},toString:function(){return("StorageRecursionHelper("+i+")")}}}var f={};data=b(g);if(data===null){data=jQuery.extend(true,{},d);c(g,data)}f=new e(data);jQuery.extend(f,{_save:function(h){return c(g,f.get())},destroy:function(){return a(g)},toString:function(){return"PersistantStorage("+data+")"}});return f};
\ No newline at end of file
diff -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e -r 4e50648f6e5d015d253cb3c9e68dd731729b06f7 static/scripts/packed/mvc/dataset/hda-base.js
--- a/static/scripts/packed/mvc/dataset/hda-base.js
+++ b/static/scripts/packed/mvc/dataset/hda-base.js
@@ -1,1 +1,1 @@
-var HDABaseView=BaseView.extend(LoggableMixin).extend({tagName:"div",className:"historyItemContainer",initialize:function(a){this.log(this+".initialize:",a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton];if(!a.urlTemplates){throw ("HDAView needs urlTemplates on initialize")}this.urls=this.renderUrls(a.urlTemplates,this.model.toJSON());this.expanded=a.expanded||false;this.model.bind("change",this.render,this)},renderUrls:function(d,a){var b=this,c={};_.each(d,function(e,f){if(_.isObject(e)){c[f]=b.renderUrls(e,a)}else{if(f==="meta_download"){c[f]=b.renderMetaDownloadUrls(e,a)}else{c[f]=_.template(e,a)}}});return c},renderMetaDownloadUrls:function(b,a){return _.map(a.meta_files,function(c){return{url:_.template(b,{id:a.id,file_type:c.file_type}),file_type:c.file_type}})},render:function(){var b=this,e=this.model.get("id"),c=this.model.get("state"),a=$("<div/>").attr("id","historyItem-"+e),d=(this.$el.children().size()===0);this.$el.attr("id","historyItemContainer-"+e);a.addClass("historyItemWrapper").addClass("historyItem").addClass("historyItem-"+c);a.append(this._render_warnings());a.append(this._render_titleBar());this.body=$(this._render_body());a.append(this.body);make_popup_menus(a);a.find(".tooltip").tooltip({placement:"bottom"});this.$el.fadeOut("fast",function(){b.$el.children().remove();b.$el.append(a).fadeIn("fast",function(){b.log(b+" rendered:",b.$el);var f="rendered";if(d){f+=":initial"}else{if(b.model.inReadyState()){f+=":ready"}}b.trigger(f)})});return this},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(this.model.toJSON())))},_render_titleBar:function(){var a=$('<div class="historyItemTitleBar" style="overflow: hidden"></div>');a.append(this._render_titleButtons());a.append('<span class="state-icon"></span>');a.append(this._render_titleLink());return a},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());return a},_render_displayButton:function(){if((!this.model.inReadyState())||(this.model.get("state")===HistoryDatasetAssociation.STATES.ERROR)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.displayButton=null;return null}var a={icon_class:"display",target:"galaxy_main"};if(this.model.get("purged")){a.enabled=false;a.title=_l("Cannot display datasets removed from disk")}else{a.title=_l("Display data in browser");a.href=this.urls.display}this.displayButton=new IconButtonView({model:new IconButton(a)});return this.displayButton.render().$el},_render_titleLink:function(){return $(jQuery.trim(HDABaseView.templates.titleLink(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});return HDABaseView.templates.hdaSummary(a)},_render_primaryActionButtons:function(c){var a=this,b=$("<div/>").attr("id","primary-actions-"+this.model.get("id"));_.each(c,function(d){b.append(d.call(a))});return b},_render_downloadButton:function(){if(this.model.get("purged")||!this.model.hasData()){return null}var a=HDABaseView.templates.downloadLinks(_.extend(this.model.toJSON(),{urls:this.urls}));return $(a)},_render_showParamsButton:function(){this.showParamsButton=new IconButtonView({model:new IconButton({title:_l("View details"),href:this.urls.show_params,target:"galaxy_main",icon_class:"information"})});return this.showParamsButton.render().$el},_render_displayApps:function(){if(!this.model.hasData()){return null}var a=$("<div/>").addClass("display-apps");if(!_.isEmpty(this.model.get("display_types"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_types")}))}if(!_.isEmpty(this.model.get("display_apps"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_apps")}))}return a},_render_peek:function(){if(!this.model.get("peek")){return null}return $("<div/>").append($("<pre/>").attr("id","peek"+this.model.get("id")).addClass("peek").append(this.model.get("peek")))},_render_body_not_viewable:function(a){a.append($("<div>"+_l("You do not have permission to view dataset")+".</div>"))},_render_body_uploading:function(a){a.append($("<div>"+_l("Dataset is uploading")+"</div>"))},_render_body_queued:function(a){a.append($("<div>"+_l("Job is waiting to run")+".</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_running:function(a){a.append("<div>"+_l("Job is currently running")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_error:function(a){if(!this.model.get("purged")){a.append($("<div>"+this.model.get("misc_blurb")+"</div>"))}a.append((_l("An error occurred running this job")+": <i>"+$.trim(this.model.get("misc_info"))+"</i>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers.concat([this._render_downloadButton])))},_render_body_discarded:function(a){a.append("<div>"+_l("The job creating this dataset was cancelled before completion")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_setting_metadata:function(a){a.append($("<div>"+_l("Metadata is being auto-detected")+".</div>"))},_render_body_empty:function(a){a.append($("<div>"+_l("No data")+": <i>"+this.model.get("misc_blurb")+"</i></div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_failed_metadata:function(a){a.append($(HDABaseView.templates.failedMetadata(this.model.toJSON())));this._render_body_ok(a)},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));a.append('<div class="clear"/>');a.append(this._render_displayApps());a.append(this._render_peek())},_render_body:function(){var a=$("<div/>").attr("id","info-"+this.model.get("id")).addClass("historyItemBody").attr("style","display: block");switch(this.model.get("state")){case HistoryDatasetAssociation.STATES.NOT_VIEWABLE:this._render_body_not_viewable(a);break;case HistoryDatasetAssociation.STATES.UPLOAD:this._render_body_uploading(a);break;case HistoryDatasetAssociation.STATES.QUEUED:this._render_body_queued(a);break;case HistoryDatasetAssociation.STATES.RUNNING:this._render_body_running(a);break;case HistoryDatasetAssociation.STATES.ERROR:this._render_body_error(a);break;case HistoryDatasetAssociation.STATES.DISCARDED:this._render_body_discarded(a);break;case HistoryDatasetAssociation.STATES.SETTING_METADATA:this._render_body_setting_metadata(a);break;case HistoryDatasetAssociation.STATES.EMPTY:this._render_body_empty(a);break;case HistoryDatasetAssociation.STATES.FAILED_METADATA:this._render_body_failed_metadata(a);break;case HistoryDatasetAssociation.STATES.OK:this._render_body_ok(a);break;default:a.append($('<div>Error: unknown dataset state "'+state+'".</div>'))}a.append('<div style="clear: both"></div>');if(this.expanded){a.show()}else{a.hide()}return a},events:{"click .historyItemTitle":"toggleBodyVisibility"},toggleBodyVisibility:function(c,a){var b=this,d=this.$el.find(".historyItemBody");a=(a===undefined)?(!d.is(":visible")):(a);if(a){d.slideDown("fast",function(){b.trigger("body-visible",b.model.get("id"))})}else{d.slideUp("fast",function(){b.trigger("body-hidden",b.model.get("id"))})}},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDABaseView("+a+")"}});HDABaseView.templates={warningMsg:Handlebars.templates["template-warningmessagesmall"],messages:Handlebars.templates["template-hda-warning-messages"],titleLink:Handlebars.templates["template-hda-titleLink"],hdaSummary:Handlebars.templates["template-hda-hdaSummary"],downloadLinks:Handlebars.templates["template-hda-downloadLinks"],failedMetadata:Handlebars.templates["template-hda-failedMetaData"],displayApps:Handlebars.templates["template-hda-displayApps"]};
\ No newline at end of file
+var HDABaseView=BaseView.extend(LoggableMixin).extend({tagName:"div",className:"historyItemContainer",initialize:function(a){this.log(this+".initialize:",a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton];if(!a.urlTemplates){throw ("HDAView needs urlTemplates on initialize")}this.urls=this._renderUrls(a.urlTemplates,this.model.toJSON());this.expanded=a.expanded||false;this.model.bind("change",this.render,this)},_renderUrls:function(d,a){var b=this,c={};_.each(d,function(e,f){if(_.isObject(e)){c[f]=b._renderUrls(e,a)}else{if(f==="meta_download"){c[f]=b._renderMetaDownloadUrls(e,a)}else{c[f]=_.template(e,a)}}});return c},_renderMetaDownloadUrls:function(b,a){return _.map(a.meta_files,function(c){return{url:_.template(b,{id:a.id,file_type:c.file_type}),file_type:c.file_type}})},render:function(){var b=this,e=this.model.get("id"),c=this.model.get("state"),a=$("<div/>").attr("id","historyItem-"+e),d=(this.$el.children().size()===0);this.$el.attr("id","historyItemContainer-"+e);a.addClass("historyItemWrapper").addClass("historyItem").addClass("historyItem-"+c);a.append(this._render_warnings());a.append(this._render_titleBar());this.body=$(this._render_body());a.append(this.body);make_popup_menus(a);a.find(".tooltip").tooltip({placement:"bottom"});this.$el.fadeOut("fast",function(){b.$el.children().remove();b.$el.append(a).fadeIn("fast",function(){b.log(b+" rendered:",b.$el);var f="rendered";if(d){f+=":initial"}else{if(b.model.inReadyState()){f+=":ready"}}b.trigger(f)})});return this},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(this.model.toJSON())))},_render_titleBar:function(){var a=$('<div class="historyItemTitleBar" style="overflow: hidden"></div>');a.append(this._render_titleButtons());a.append('<span class="state-icon"></span>');a.append(this._render_titleLink());return a},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());return a},_render_displayButton:function(){if((!this.model.inReadyState())||(this.model.get("state")===HistoryDatasetAssociation.STATES.ERROR)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.displayButton=null;return null}var a={icon_class:"display",target:"galaxy_main"};if(this.model.get("purged")){a.enabled=false;a.title=_l("Cannot display datasets removed from disk")}else{a.title=_l("Display data in browser");a.href=this.urls.display}this.displayButton=new IconButtonView({model:new IconButton(a)});return this.displayButton.render().$el},_render_titleLink:function(){return $(jQuery.trim(HDABaseView.templates.titleLink(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});return HDABaseView.templates.hdaSummary(a)},_render_primaryActionButtons:function(c){var a=this,b=$("<div/>").attr("id","primary-actions-"+this.model.get("id"));_.each(c,function(d){b.append(d.call(a))});return b},_render_downloadButton:function(){if(this.model.get("purged")||!this.model.hasData()){return null}var a=HDABaseView.templates.downloadLinks(_.extend(this.model.toJSON(),{urls:this.urls}));return $(a)},_render_showParamsButton:function(){this.showParamsButton=new IconButtonView({model:new IconButton({title:_l("View details"),href:this.urls.show_params,target:"galaxy_main",icon_class:"information"})});return this.showParamsButton.render().$el},_render_displayApps:function(){if(!this.model.hasData()){return null}var a=$("<div/>").addClass("display-apps");if(!_.isEmpty(this.model.get("display_types"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_types")}))}if(!_.isEmpty(this.model.get("display_apps"))){a.append(HDABaseView.templates.displayApps({displayApps:this.model.get("display_apps")}))}return a},_render_peek:function(){if(!this.model.get("peek")){return null}return $("<div/>").append($("<pre/>").attr("id","peek"+this.model.get("id")).addClass("peek").append(this.model.get("peek")))},_render_body:function(){var a=$("<div/>").attr("id","info-"+this.model.get("id")).addClass("historyItemBody").attr("style","display: block");switch(this.model.get("state")){case HistoryDatasetAssociation.STATES.NOT_VIEWABLE:this._render_body_not_viewable(a);break;case HistoryDatasetAssociation.STATES.UPLOAD:this._render_body_uploading(a);break;case HistoryDatasetAssociation.STATES.QUEUED:this._render_body_queued(a);break;case HistoryDatasetAssociation.STATES.RUNNING:this._render_body_running(a);break;case HistoryDatasetAssociation.STATES.ERROR:this._render_body_error(a);break;case HistoryDatasetAssociation.STATES.DISCARDED:this._render_body_discarded(a);break;case HistoryDatasetAssociation.STATES.SETTING_METADATA:this._render_body_setting_metadata(a);break;case HistoryDatasetAssociation.STATES.EMPTY:this._render_body_empty(a);break;case HistoryDatasetAssociation.STATES.FAILED_METADATA:this._render_body_failed_metadata(a);break;case HistoryDatasetAssociation.STATES.OK:this._render_body_ok(a);break;default:a.append($('<div>Error: unknown dataset state "'+state+'".</div>'))}a.append('<div style="clear: both"></div>');if(this.expanded){a.show()}else{a.hide()}return a},_render_body_not_viewable:function(a){a.append($("<div>"+_l("You do not have permission to view dataset")+".</div>"))},_render_body_uploading:function(a){a.append($("<div>"+_l("Dataset is uploading")+"</div>"))},_render_body_queued:function(a){a.append($("<div>"+_l("Job is waiting to run")+".</div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_running:function(a){a.append("<div>"+_l("Job is currently running")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_error:function(a){if(!this.model.get("purged")){a.append($("<div>"+this.model.get("misc_blurb")+"</div>"))}a.append((_l("An error occurred running this job")+": <i>"+$.trim(this.model.get("misc_info"))+"</i>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers.concat([this._render_downloadButton])))},_render_body_discarded:function(a){a.append("<div>"+_l("The job creating this dataset was cancelled before completion")+".</div>");a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_setting_metadata:function(a){a.append($("<div>"+_l("Metadata is being auto-detected")+".</div>"))},_render_body_empty:function(a){a.append($("<div>"+_l("No data")+": <i>"+this.model.get("misc_blurb")+"</i></div>"));a.append(this._render_primaryActionButtons(this.defaultPrimaryActionButtonRenderers))},_render_body_failed_metadata:function(a){a.append($(HDABaseView.templates.failedMetadata(this.model.toJSON())));this._render_body_ok(a)},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton]));a.append('<div class="clear"/>');a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility"},toggleBodyVisibility:function(c,a){var b=this,d=this.$el.find(".historyItemBody");a=(a===undefined)?(!d.is(":visible")):(a);if(a){d.slideDown("fast",function(){b.trigger("body-expanded",b.model.get("id"))})}else{d.slideUp("fast",function(){b.trigger("body-collapsed",b.model.get("id"))})}},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDABaseView("+a+")"}});HDABaseView.templates={warningMsg:Handlebars.templates["template-warningmessagesmall"],messages:Handlebars.templates["template-hda-warning-messages"],titleLink:Handlebars.templates["template-hda-titleLink"],hdaSummary:Handlebars.templates["template-hda-hdaSummary"],downloadLinks:Handlebars.templates["template-hda-downloadLinks"],failedMetadata:Handlebars.templates["template-hda-failedMetaData"],displayApps:Handlebars.templates["template-hda-displayApps"]};
\ No newline at end of file
diff -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e -r 4e50648f6e5d015d253cb3c9e68dd731729b06f7 static/scripts/packed/mvc/dataset/hda-edit.js
--- a/static/scripts/packed/mvc/dataset/hda-edit.js
+++ b/static/scripts/packed/mvc/dataset/hda-edit.js
@@ -1,1 +1,1 @@
-var HDAEditView=HDABaseView.extend({initialize:function(a){HDABaseView.prototype.initialize.call(this,a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton,this._render_rerunButton]},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());a.append(this._render_editButton());a.append(this._render_deleteButton());return a},_render_editButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.UPLOAD)||(this.model.get("state")===HistoryDatasetAssociation.STATES.ERROR)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.editButton=null;return null}var c=this.model.get("purged"),a=this.model.get("deleted"),b={title:_l("Edit Attributes"),href:this.urls.edit,target:"galaxy_main",icon_class:"edit"};if(a||c){b.enabled=false;if(c){b.title=_l("Cannot edit attributes of datasets removed from disk")}else{if(a){b.title=_l("Undelete dataset to edit attributes")}}}this.editButton=new IconButtonView({model:new IconButton(b)});return this.editButton.render().$el},_render_deleteButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.deleteButton=null;return null}var a={title:_l("Delete"),href:this.urls["delete"],id:"historyItemDeleter-"+this.model.get("id"),icon_class:"delete"};if(this.model.get("deleted")||this.model.get("purged")){a={title:_l("Dataset is already deleted"),icon_class:"delete",enabled:false}}this.deleteButton=new IconButtonView({model:new IconButton(a)});return this.deleteButton.render().$el},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});if(this.model.get("metadata_dbkey")==="?"&&!this.model.isDeletedOrPurged()){_.extend(a,{dbkey_unknown_and_editable:true})}return HDABaseView.templates.hdaSummary(a)},_render_errButton:function(){if(this.model.get("state")!==HistoryDatasetAssociation.STATES.ERROR){this.errButton=null;return null}this.errButton=new IconButtonView({model:new IconButton({title:_l("View or report this error"),href:this.urls.report_error,target:"galaxy_main",icon_class:"bug"})});return this.errButton.render().$el},_render_rerunButton:function(){this.rerunButton=new IconButtonView({model:new IconButton({title:_l("Run this job again"),href:this.urls.rerun,target:"galaxy_main",icon_class:"arrow-circle"})});return this.rerunButton.render().$el},_render_visualizationsButton:function(){var c=this.model.get("dbkey"),a=this.model.get("visualizations"),f=this.urls.visualization,d={},g={dataset_id:this.model.get("id"),hda_ldda:"hda"};if(!(this.model.hasData())||!(a&&a.length)||!(f)){this.visualizationsButton=null;return null}this.visualizationsButton=new IconButtonView({model:new IconButton({title:_l("Visualize"),href:f,icon_class:"chart_curve"})});var b=this.visualizationsButton.render().$el;b.addClass("visualize-icon");if(c){g.dbkey=c}function e(h){switch(h){case"trackster":return create_trackster_action_fn(f,g,c);case"scatterplot":return create_scatterplot_action_fn(f,g);default:return function(){window.parent.location=f+"/"+h+"?"+$.param(g)}}}if(a.length===1){b.attr("title",a[0]);b.click(e(a[0]))}else{_.each(a,function(i){var h=i.charAt(0).toUpperCase()+i.slice(1);d[_l(h)]=e(i)});make_popupmenu(b,d)}return b},_render_secondaryActionButtons:function(b){var c=$("<div/>"),a=this;c.attr("style","float: right;").attr("id","secondary-actions-"+this.model.get("id"));_.each(b,function(d){c.append(d.call(a))});return c},_render_tagButton:function(){if(!(this.model.hasData())||(!this.urls.tags.get)){this.tagButton=null;return null}this.tagButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset tags"),target:"galaxy_main",href:this.urls.tags.get,icon_class:"tags"})});return this.tagButton.render().$el},_render_annotateButton:function(){if(!(this.model.hasData())||(!this.urls.annotation.get)){this.annotateButton=null;return null}this.annotateButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset annotation"),target:"galaxy_main",icon_class:"annotate"})});return this.annotateButton.render().$el},_render_tagArea:function(){if(!this.urls.tags.set){return null}return $(HDAEditView.templates.tagArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_annotationArea:function(){if(!this.urls.annotation.get){return null}return $(HDAEditView.templates.annotationArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_body_error:function(a){HDABaseView.prototype._render_body_error.call(this,a);var b=a.find("#primary-actions-"+this.model.get("id"));b.prepend(this._render_errButton())},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton,this._render_visualizationsButton]));a.append(this._render_secondaryActionButtons([this._render_tagButton,this._render_annotateButton]));a.append('<div class="clear"/>');a.append(this._render_tagArea());a.append(this._render_annotationArea());a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility","click a.icon-button.tags":"loadAndDisplayTags","click a.icon-button.annotate":"loadAndDisplayAnnotation"},loadAndDisplayTags:function(b){this.log(this+".loadAndDisplayTags",b);var c=this.$el.find(".tag-area"),a=c.find(".tag-elt");if(c.is(":hidden")){if(!jQuery.trim(a.html())){$.ajax({url:this.urls.tags.get,error:function(){alert(_l("Tagging failed"))},success:function(d){a.html(d);a.find(".tooltip").tooltip();c.slideDown("fast")}})}else{c.slideDown("fast")}}else{c.slideUp("fast")}return false},loadAndDisplayAnnotation:function(b){this.log(this+".loadAndDisplayAnnotation",b);var d=this.$el.find(".annotation-area"),c=d.find(".annotation-elt"),a=this.urls.annotation.set;if(d.is(":hidden")){if(!jQuery.trim(c.html())){$.ajax({url:this.urls.annotation.get,error:function(){alert(_l("Annotations failed"))},success:function(e){if(e===""){e="<em>"+_l("Describe or add notes to dataset")+"</em>"}c.html(e);d.find(".tooltip").tooltip();async_save_text(c.attr("id"),c.attr("id"),a,"new_annotation",18,true,4);d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDAView("+a+")"}});HDAEditView.templates={tagArea:Handlebars.templates["template-hda-tagArea"],annotationArea:Handlebars.templates["template-hda-annotationArea"]};function create_scatterplot_action_fn(a,b){action=function(){var d=$(window.parent.document).find("iframe#galaxy_main"),c=a+"/scatterplot?"+$.param(b);d.attr("src",c);$("div.popmenu-wrapper").remove();return false};return action}function create_trackster_action_fn(a,c,b){return function(){var d={};if(b){d.dbkey=b}$.ajax({url:a+"/list_tracks?f-"+$.param(d),dataType:"html",error:function(){alert(_l("Could not add this dataset to browser")+".")},success:function(e){var f=window.parent;f.show_modal(_l("View Data in a New or Saved Visualization"),"",{Cancel:function(){f.hide_modal()},"View in saved visualization":function(){f.show_modal(_l("Add Data to Saved Visualization"),e,{Cancel:function(){f.hide_modal()},"Add to visualization":function(){$(f.document).find("input[name=id]:checked").each(function(){var g=$(this).val();c.id=g;f.location=a+"/trackster?"+$.param(c)})}})},"View in new visualization":function(){f.location=a+"/trackster?"+$.param(c)}})}});return false}};
\ No newline at end of file
+var HDAEditView=HDABaseView.extend(LoggableMixin).extend({initialize:function(a){HDABaseView.prototype.initialize.call(this,a);this.defaultPrimaryActionButtonRenderers=[this._render_showParamsButton,this._render_rerunButton]},_render_warnings:function(){return $(jQuery.trim(HDABaseView.templates.messages(_.extend(this.model.toJSON(),{urls:this.urls}))))},_render_titleButtons:function(){var a=$('<div class="historyItemButtons"></div>');a.append(this._render_displayButton());a.append(this._render_editButton());a.append(this._render_deleteButton());return a},_render_editButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.UPLOAD)||(this.model.get("state")===HistoryDatasetAssociation.STATES.ERROR)||(this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.editButton=null;return null}var c=this.model.get("purged"),a=this.model.get("deleted"),b={title:_l("Edit Attributes"),href:this.urls.edit,target:"galaxy_main",icon_class:"edit"};if(a||c){b.enabled=false;if(c){b.title=_l("Cannot edit attributes of datasets removed from disk")}else{if(a){b.title=_l("Undelete dataset to edit attributes")}}}this.editButton=new IconButtonView({model:new IconButton(b)});return this.editButton.render().$el},_render_deleteButton:function(){if((this.model.get("state")===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(!this.model.get("accessible"))){this.deleteButton=null;return null}var a={title:_l("Delete"),href:this.urls["delete"],id:"historyItemDeleter-"+this.model.get("id"),icon_class:"delete"};if(this.model.get("deleted")||this.model.get("purged")){a={title:_l("Dataset is already deleted"),icon_class:"delete",enabled:false}}this.deleteButton=new IconButtonView({model:new IconButton(a)});return this.deleteButton.render().$el},_render_hdaSummary:function(){var a=_.extend(this.model.toJSON(),{urls:this.urls});if(this.model.get("metadata_dbkey")==="?"&&!this.model.isDeletedOrPurged()){_.extend(a,{dbkey_unknown_and_editable:true})}return HDABaseView.templates.hdaSummary(a)},_render_errButton:function(){if(this.model.get("state")!==HistoryDatasetAssociation.STATES.ERROR){this.errButton=null;return null}this.errButton=new IconButtonView({model:new IconButton({title:_l("View or report this error"),href:this.urls.report_error,target:"galaxy_main",icon_class:"bug"})});return this.errButton.render().$el},_render_rerunButton:function(){this.rerunButton=new IconButtonView({model:new IconButton({title:_l("Run this job again"),href:this.urls.rerun,target:"galaxy_main",icon_class:"arrow-circle"})});return this.rerunButton.render().$el},_render_visualizationsButton:function(){var c=this.model.get("dbkey"),a=this.model.get("visualizations"),f=this.urls.visualization,d={},g={dataset_id:this.model.get("id"),hda_ldda:"hda"};if(!(this.model.hasData())||!(a&&a.length)||!(f)){this.visualizationsButton=null;return null}this.visualizationsButton=new IconButtonView({model:new IconButton({title:_l("Visualize"),href:f,icon_class:"chart_curve"})});var b=this.visualizationsButton.render().$el;b.addClass("visualize-icon");if(c){g.dbkey=c}function e(h){switch(h){case"trackster":return create_trackster_action_fn(f,g,c);case"scatterplot":return create_scatterplot_action_fn(f,g);default:return function(){window.parent.location=f+"/"+h+"?"+$.param(g)}}}if(a.length===1){b.attr("title",a[0]);b.click(e(a[0]))}else{_.each(a,function(i){var h=i.charAt(0).toUpperCase()+i.slice(1);d[_l(h)]=e(i)});make_popupmenu(b,d)}return b},_render_secondaryActionButtons:function(b){var c=$("<div/>"),a=this;c.attr("style","float: right;").attr("id","secondary-actions-"+this.model.get("id"));_.each(b,function(d){c.append(d.call(a))});return c},_render_tagButton:function(){if(!(this.model.hasData())||(!this.urls.tags.get)){this.tagButton=null;return null}this.tagButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset tags"),target:"galaxy_main",href:this.urls.tags.get,icon_class:"tags"})});return this.tagButton.render().$el},_render_annotateButton:function(){if(!(this.model.hasData())||(!this.urls.annotation.get)){this.annotateButton=null;return null}this.annotateButton=new IconButtonView({model:new IconButton({title:_l("Edit dataset annotation"),target:"galaxy_main",icon_class:"annotate"})});return this.annotateButton.render().$el},_render_tagArea:function(){if(!this.urls.tags.set){return null}return $(HDAEditView.templates.tagArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_annotationArea:function(){if(!this.urls.annotation.get){return null}return $(HDAEditView.templates.annotationArea(_.extend(this.model.toJSON(),{urls:this.urls})))},_render_body_error:function(a){HDABaseView.prototype._render_body_error.call(this,a);var b=a.find("#primary-actions-"+this.model.get("id"));b.prepend(this._render_errButton())},_render_body_ok:function(a){a.append(this._render_hdaSummary());if(this.model.isDeletedOrPurged()){a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton]));return}a.append(this._render_primaryActionButtons([this._render_downloadButton,this._render_showParamsButton,this._render_rerunButton,this._render_visualizationsButton]));a.append(this._render_secondaryActionButtons([this._render_tagButton,this._render_annotateButton]));a.append('<div class="clear"/>');a.append(this._render_tagArea());a.append(this._render_annotationArea());a.append(this._render_displayApps());a.append(this._render_peek())},events:{"click .historyItemTitle":"toggleBodyVisibility","click a.icon-button.tags":"loadAndDisplayTags","click a.icon-button.annotate":"loadAndDisplayAnnotation"},loadAndDisplayTags:function(b){this.log(this+".loadAndDisplayTags",b);var c=this.$el.find(".tag-area"),a=c.find(".tag-elt");if(c.is(":hidden")){if(!jQuery.trim(a.html())){$.ajax({url:this.urls.tags.get,error:function(){alert(_l("Tagging failed"))},success:function(d){a.html(d);a.find(".tooltip").tooltip();c.slideDown("fast")}})}else{c.slideDown("fast")}}else{c.slideUp("fast")}return false},loadAndDisplayAnnotation:function(b){this.log(this+".loadAndDisplayAnnotation",b);var d=this.$el.find(".annotation-area"),c=d.find(".annotation-elt"),a=this.urls.annotation.set;if(d.is(":hidden")){if(!jQuery.trim(c.html())){$.ajax({url:this.urls.annotation.get,error:function(){alert(_l("Annotations failed"))},success:function(e){if(e===""){e="<em>"+_l("Describe or add notes to dataset")+"</em>"}c.html(e);d.find(".tooltip").tooltip();async_save_text(c.attr("id"),c.attr("id"),a,"new_annotation",18,true,4);d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=(this.model)?(this.model+""):("(no model)");return"HDAView("+a+")"}});HDAEditView.templates={tagArea:Handlebars.templates["template-hda-tagArea"],annotationArea:Handlebars.templates["template-hda-annotationArea"]};function create_scatterplot_action_fn(a,b){action=function(){var d=$(window.parent.document).find("iframe#galaxy_main"),c=a+"/scatterplot?"+$.param(b);d.attr("src",c);$("div.popmenu-wrapper").remove();return false};return action}function create_trackster_action_fn(a,c,b){return function(){var d={};if(b){d.dbkey=b}$.ajax({url:a+"/list_tracks?f-"+$.param(d),dataType:"html",error:function(){alert(_l("Could not add this dataset to browser")+".")},success:function(e){var f=window.parent;f.show_modal(_l("View Data in a New or Saved Visualization"),"",{Cancel:function(){f.hide_modal()},"View in saved visualization":function(){f.show_modal(_l("Add Data to Saved Visualization"),e,{Cancel:function(){f.hide_modal()},"Add to visualization":function(){$(f.document).find("input[name=id]:checked").each(function(){var g=$(this).val();c.id=g;f.location=a+"/trackster?"+$.param(c)})}})},"View in new visualization":function(){f.location=a+"/trackster?"+$.param(c)}})}});return false}};
\ No newline at end of file
diff -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e -r 4e50648f6e5d015d253cb3c9e68dd731729b06f7 static/scripts/packed/mvc/dataset/hda-model.js
--- a/static/scripts/packed/mvc/dataset/hda-model.js
+++ b/static/scripts/packed/mvc/dataset/hda-model.js
@@ -1,1 +1,1 @@
-var HistoryDatasetAssociation=BaseModel.extend(LoggableMixin).extend({defaults:{history_id:null,model_class:"HistoryDatasetAssociation",hid:0,id:null,name:"",state:"",data_type:null,file_size:0,meta_files:[],misc_blurb:"",misc_info:"",deleted:false,purged:false,visible:false,accessible:false},url:function(){return"api/histories/"+this.get("history_id")+"/contents/"+this.get("id")},initialize:function(){this.log(this+".initialize",this.attributes);this.log("\tparent history_id: "+this.get("history_id"));if(!this.get("accessible")){this.set("state",HistoryDatasetAssociation.STATES.NOT_VIEWABLE)}this.on("change:state",function(b,a){this.log(this+" has changed state:",b,a);if(this.inReadyState()){this.trigger("state:ready",this.get("id"),a,this.previous("state"),b)}})},isDeletedOrPurged:function(){return(this.get("deleted")||this.get("purged"))},isVisible:function(b,c){var a=true;if((!b)&&(this.get("deleted")||this.get("purged"))){a=false}if((!c)&&(!this.get("visible"))){a=false}return a},inReadyState:function(){var a=this.get("state");return((a===HistoryDatasetAssociation.STATES.NEW)||(a===HistoryDatasetAssociation.STATES.OK)||(a===HistoryDatasetAssociation.STATES.EMPTY)||(a===HistoryDatasetAssociation.STATES.FAILED_METADATA)||(a===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(a===HistoryDatasetAssociation.STATES.DISCARDED)||(a===HistoryDatasetAssociation.STATES.ERROR))},hasData:function(){return(this.get("file_size")>0)},toString:function(){var a=this.get("id")||"";if(this.get("name")){a+=':"'+this.get("name")+'"'}return"HistoryDatasetAssociation("+a+")"}});HistoryDatasetAssociation.STATES={UPLOAD:"upload",QUEUED:"queued",RUNNING:"running",SETTING_METADATA:"setting_metadata",NEW:"new",OK:"ok",EMPTY:"empty",FAILED_METADATA:"failed_metadata",NOT_VIEWABLE:"noPermission",DISCARDED:"discarded",ERROR:"error"};var HDACollection=Backbone.Collection.extend(LoggableMixin).extend({model:HistoryDatasetAssociation,initialize:function(){},ids:function(){return this.map(function(a){return a.id})},getVisible:function(a,b){return this.filter(function(c){return c.isVisible(a,b)})},getStateLists:function(){var a={};_.each(_.values(HistoryDatasetAssociation.STATES),function(b){a[b]=[]});this.each(function(b){a[b.get("state")].push(b.get("id"))});return a},running:function(){var a=[];this.each(function(b){if(!b.inReadyState()){a.push(b.get("id"))}});return a},update:function(a){this.log(this+"update:",a);if(!(a&&a.length)){return}var b=this;_.each(a,function(e,c){var d=b.get(e);d.fetch()})},toString:function(){return("HDACollection("+this.ids().join(",")+")")}});
\ No newline at end of file
+var HistoryDatasetAssociation=BaseModel.extend(LoggableMixin).extend({defaults:{history_id:null,model_class:"HistoryDatasetAssociation",hid:0,id:null,name:"",state:"",data_type:null,file_size:0,meta_files:[],misc_blurb:"",misc_info:"",deleted:false,purged:false,visible:false,accessible:false},urlRoot:"api/histories/",url:function(){return"api/histories/"+this.get("history_id")+"/contents/"+this.get("id")},initialize:function(){this.log(this+".initialize",this.attributes);this.log("\tparent history_id: "+this.get("history_id"));if(!this.get("accessible")){this.set("state",HistoryDatasetAssociation.STATES.NOT_VIEWABLE)}this.on("change:state",function(b,a){this.log(this+" has changed state:",b,a);if(this.inReadyState()){this.trigger("state:ready",this.get("id"),a,this.previous("state"),b)}})},isDeletedOrPurged:function(){return(this.get("deleted")||this.get("purged"))},isVisible:function(b,c){var a=true;if((!b)&&(this.get("deleted")||this.get("purged"))){a=false}if((!c)&&(!this.get("visible"))){a=false}return a},inReadyState:function(){var a=this.get("state");return((a===HistoryDatasetAssociation.STATES.NEW)||(a===HistoryDatasetAssociation.STATES.OK)||(a===HistoryDatasetAssociation.STATES.EMPTY)||(a===HistoryDatasetAssociation.STATES.FAILED_METADATA)||(a===HistoryDatasetAssociation.STATES.NOT_VIEWABLE)||(a===HistoryDatasetAssociation.STATES.DISCARDED)||(a===HistoryDatasetAssociation.STATES.ERROR))},hasData:function(){return(this.get("file_size")>0)},toString:function(){var a=this.get("id")||"";if(this.get("name")){a+=':"'+this.get("name")+'"'}return"HistoryDatasetAssociation("+a+")"}});HistoryDatasetAssociation.STATES={UPLOAD:"upload",QUEUED:"queued",RUNNING:"running",SETTING_METADATA:"setting_metadata",NEW:"new",EMPTY:"empty",OK:"ok",FAILED_METADATA:"failed_metadata",NOT_VIEWABLE:"noPermission",DISCARDED:"discarded",ERROR:"error"};var HDACollection=Backbone.Collection.extend(LoggableMixin).extend({model:HistoryDatasetAssociation,initialize:function(){},ids:function(){return this.map(function(a){return a.id})},getVisible:function(a,b){return this.filter(function(c){return c.isVisible(a,b)})},getStateLists:function(){var a={};_.each(_.values(HistoryDatasetAssociation.STATES),function(b){a[b]=[]});this.each(function(b){a[b.get("state")].push(b.get("id"))});return a},running:function(){var a=[];this.each(function(b){if(!b.inReadyState()){a.push(b.get("id"))}});return a},update:function(a){this.log(this+"update:",a);if(!(a&&a.length)){return}var b=this;_.each(a,function(e,c){var d=b.get(e);d.fetch()})},toString:function(){return("HDACollection("+this.ids().join(",")+")")}});
\ No newline at end of file
diff -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e -r 4e50648f6e5d015d253cb3c9e68dd731729b06f7 static/scripts/packed/mvc/history/history-model.js
--- a/static/scripts/packed/mvc/history/history-model.js
+++ b/static/scripts/packed/mvc/history/history-model.js
@@ -1,1 +1,1 @@
-var History=BaseModel.extend(LoggableMixin).extend({defaults:{id:"",name:"",state:"",diskSize:0,deleted:false,annotation:null,message:null},url:function(){return"api/histories/"+this.get("id")},initialize:function(a,b){this.log(this+".initialize:",a,b);this.hdas=new HDACollection();if(b&&b.length){this.hdas.reset(b);this.checkForUpdates()}},loadFromApi:function(a,c){var b=this;b.attributes.id=a;jQuery.when(jQuery.ajax("api/users/current"),b.fetch()).then(function(e,d){b.attributes.user=e[0];b.trigger("loaded:user",e[0]);b.trigger("loaded",d[0])}).then(function(){jQuery.ajax(b.url()+"/contents?"+jQuery.param({ids:b.hdaIdsFromStateIds().join(",")})).success(function(d){b.hdas.reset(d);b.checkForUpdates();b.trigger("loaded:hdas",d);if(c){callback(b)}})})},hdaIdsFromStateIds:function(){return _.reduce(_.values(this.get("state_ids")),function(b,a){return b.concat(a)})},checkForUpdates:function(a){if(this.hdas.running().length){this.stateUpdater()}else{this.trigger("ready")}return this},stateUpdater:function(){var c=this,a=this.get("state"),b=this.get("state_ids");jQuery.ajax("api/histories/"+this.get("id")).success(function(d){c.set(d);c.log("current history state:",c.get("state"),"(was)",a,"new size:",c.get("nice_size"));var e=[];_.each(_.keys(d.state_ids),function(g){var f=_.difference(d.state_ids[g],b[g]);e=e.concat(f)});if(e.length){c.hdas.update(e)}if((c.get("state")===HistoryDatasetAssociation.STATES.RUNNING)||(c.get("state")===HistoryDatasetAssociation.STATES.QUEUED)){setTimeout(function(){c.stateUpdater()},4000)}else{c.trigger("ready")}}).error(function(f,d,e){if(console&&console.warn){console.warn("Error getting history updates from the server:",f,d,e)}alert(_l("Error getting history updates from the server.")+"\n"+e)})},toString:function(){var a=(this.get("name"))?(","+this.get("name")):("");return"History("+this.get("id")+a+")"}});var HistoryCollection=Backbone.Collection.extend(LoggableMixin).extend({model:History,urlRoot:"api/histories",});
\ No newline at end of file
+var History=BaseModel.extend(LoggableMixin).extend({defaults:{id:"",name:"",state:"",diskSize:0,deleted:false,annotation:null,message:null},urlRoot:"api/histories/",url:function(){return"api/histories/"+this.get("id")},initialize:function(a,b){this.log(this+".initialize:",a,b);this.hdas=new HDACollection();if(b&&b.length){this.hdas.reset(b);this.checkForUpdates()}},loadFromApi:function(a,c){var b=this;b.attributes.id=a;jQuery.when(jQuery.ajax("api/users/current"),b.fetch()).then(function(e,d){b.attributes.user=e[0];b.trigger("loaded:user",e[0]);b.trigger("loaded",d[0])}).then(function(){jQuery.ajax(b.url()+"/contents?"+jQuery.param({ids:b.hdaIdsFromStateIds().join(",")})).success(function(d){b.hdas.reset(d);b.checkForUpdates();b.trigger("loaded:hdas",d);if(c){callback(b)}})})},hdaIdsFromStateIds:function(){return _.reduce(_.values(this.get("state_ids")),function(b,a){return b.concat(a)})},checkForUpdates:function(a){if(this.hdas.running().length){this.stateUpdater()}else{this.trigger("ready")}return this},stateUpdater:function(){var c=this,a=this.get("state"),b=this.get("state_ids");jQuery.ajax("api/histories/"+this.get("id")).success(function(d){c.set(d);c.log("current history state:",c.get("state"),"(was)",a,"new size:",c.get("nice_size"));var e=[];_.each(_.keys(d.state_ids),function(g){var f=_.difference(d.state_ids[g],b[g]);e=e.concat(f)});if(e.length){c.hdas.update(e)}if((c.get("state")===HistoryDatasetAssociation.STATES.RUNNING)||(c.get("state")===HistoryDatasetAssociation.STATES.QUEUED)){setTimeout(function(){c.stateUpdater()},4000)}else{c.trigger("ready")}}).error(function(f,d,e){if(console&&console.warn){console.warn("Error getting history updates from the server:",f,d,e)}alert(_l("Error getting history updates from the server.")+"\n"+e)})},toString:function(){var a=(this.get("name"))?(","+this.get("name")):("");return"History("+this.get("id")+a+")"}});var HistoryCollection=Backbone.Collection.extend(LoggableMixin).extend({model:History,urlRoot:"api/histories"});
\ No newline at end of file
diff -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e -r 4e50648f6e5d015d253cb3c9e68dd731729b06f7 static/scripts/packed/mvc/history/history-panel.js
--- a/static/scripts/packed/mvc/history/history-panel.js
+++ b/static/scripts/packed/mvc/history/history-panel.js
@@ -1,1 +1,1 @@
-var HistoryPanel=BaseView.extend(LoggableMixin).extend({el:"body.historyPage",HDAView:HDAEditView,initialize:function(a){this.log(this+".initialize:",a);if(!a.urlTemplates){throw (this+" needs urlTemplates on initialize")}if(!a.urlTemplates.history){throw (this+" needs urlTemplates.history on initialize")}if(!a.urlTemplates.hda){throw (this+" needs urlTemplates.hda on initialize")}this.urlTemplates=a.urlTemplates.history;this.hdaUrlTemplates=a.urlTemplates.hda;this.storage=new PersistantStorage("HistoryView."+this.model.get("id"),{expandedHdas:{},show_deleted:false,show_hidden:false});this.log("this.storage:",this.storage.get());this.log("show_deleted:",a.show_deleted,"show_hidden",a.show_hidden);if((a.show_deleted===true)||(a.show_deleted===false)){this.storage.set("show_deleted",a.show_deleted)}if((a.show_hidden===true)||(a.show_hidden===false)){this.storage.set("show_hidden",a.show_hidden)}this.show_deleted=this.storage.get("show_deleted");this.show_hidden=this.storage.get("show_hidden");this.log("this.show_deleted:",this.show_deleted,"show_hidden",this.show_hidden);this.log("(now) this.storage:",this.storage.get());this.model.bind("change:nice_size",this.updateHistoryDiskSize,this);this.model.hdas.bind("add",this.add,this);this.model.hdas.bind("reset",this.addAll,this);this.model.hdas.bind("all",this.all,this);this.hdaViews={};this.urls={}},add:function(a){},addAll:function(){this.render()},all:function(a){},renderUrls:function(a){var b=this;b.urls={};_.each(this.urlTemplates,function(d,c){b.urls[c]=_.template(d,a)});return b.urls},render:function(){var b=this,d=b.toString()+".set-up",c=$("<div/>"),a=this.model.toJSON(),e=(this.$el.children().size()===0);a.urls=this.renderUrls(a);c.append(HistoryPanel.templates.historyPanel(a));c.find(".tooltip").tooltip({placement:"bottom"});this.setUpActionButton(c.find("#history-action-popup"));if(!this.model.hdas.length||!this.renderItems(c.find("#"+this.model.get("id")+"-datasets"))){c.find("#emptyHistoryMessage").show()}$(b).queue(d,function(f){b.$el.fadeOut("fast",function(){f()})});$(b).queue(d,function(f){b.$el.html("");b.$el.append(c.children());b.$el.fadeIn("fast",function(){f()})});$(b).queue(d,function(f){this.log(b+" rendered:",b.$el);b.setUpBehaviours();if(e){b.trigger("rendered:initial")}else{b.trigger("rendered")}f()});$(b).dequeue(d);return this},setUpActionButton:function(e){var c=this,d=(this.storage.get("show_deleted"))?("Hide deleted"):("Show deleted"),a=(this.storage.get("show_hidden"))?("Hide hidden"):("Show hidden"),b={};b[_l("refresh")]=function(){window.location.reload()};b[_l("collapse all")]=function(){c.hideAllHdaBodies()};b[_l(d)]=function(){c.toggleShowDeleted()};b[_l(a)]=function(){c.toggleShowHidden()};make_popupmenu(e,b)},renderItems:function(b){this.hdaViews={};var a=this,c=this.model.hdas.getVisible(this.storage.get("show_deleted"),this.storage.get("show_hidden"));_.each(c,function(f){var e=f.get("id"),d=a.storage.get("expandedHdas").get(e);a.hdaViews[e]=new a.HDAView({model:f,expanded:d,urlTemplates:a.hdaUrlTemplates});a.setUpHdaListeners(a.hdaViews[e]);b.prepend(a.hdaViews[e].render().$el)});return c.length},setUpHdaListeners:function(b){var a=this;b.bind("body-visible",function(c){a.storage.get("expandedHdas").set(c,true)});b.bind("body-hidden",function(c){a.storage.get("expandedHdas").deleteKey(c)})},setUpBehaviours:function(){if(!(this.model.get("user")&&this.model.get("user").email)){return}var a=this.$("#history-annotation-area");this.$("#history-annotate").click(function(){if(a.is(":hidden")){a.slideDown("fast")}else{a.slideUp("fast")}return false});async_save_text("history-name-container","history-name",this.urls.rename,"new_name",18);async_save_text("history-annotation-container","history-annotation",this.urls.annotate,"new_annotation",18,true,4)},updateHistoryDiskSize:function(){this.$el.find("#history-size").text(this.model.get("nice_size"))},events:{"click #history-tag":"loadAndDisplayTags"},showQuotaMessage:function(a){var b=this.$el.find("#quota-message-container");if(b.is(":hidden")){b.slideDown("fast")}},hideQuotaMessage:function(a){var b=this.$el.find("#quota-message-container");if(!b.is(":hidden")){b.slideUp("fast")}},toggleShowDeleted:function(a,c,b){this.storage.set("show_deleted",!this.storage.get("show_deleted"));this.render()},toggleShowHidden:function(){this.storage.set("show_hidden",!this.storage.get("show_hidden"));this.render()},hideAllHdaBodies:function(){_.each(this.hdaViews,function(a){a.toggleBodyVisibility(null,false)});this.storage.set("expandedHdas",{})},loadAndDisplayTags:function(c){this.log(this+".loadAndDisplayTags",c);var d=this.$el.find("#history-tag-area"),b=d.find(".tag-elt");this.log("\t tagArea",d," tagElt",b);if(d.is(":hidden")){if(!jQuery.trim(b.html())){var a=this;$.ajax({url:a.urls.tag,error:function(){alert(_l("Tagging failed"))},success:function(e){b.html(e);b.find(".tooltip").tooltip();d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=this.model.get("name")||"";return"HistoryView("+a+")"}});HistoryPanel.templates={historyPanel:Handlebars.templates["template-history-historyPanel"]};
\ No newline at end of file
+var HistoryPanel=BaseView.extend(LoggableMixin).extend({el:"body.historyPage",HDAView:HDAEditView,events:{"click #history-tag":"loadAndDisplayTags"},initialize:function(a){this.log(this+".initialize:",a);if(!a.urlTemplates){throw (this+" needs urlTemplates on initialize")}if(!a.urlTemplates.history){throw (this+" needs urlTemplates.history on initialize")}if(!a.urlTemplates.hda){throw (this+" needs urlTemplates.hda on initialize")}this.urlTemplates=a.urlTemplates.history;this.hdaUrlTemplates=a.urlTemplates.hda;this._setUpWebStorage(a.initiallyExpanded,a.show_deleted,a.show_hidden);this.model.bind("change:nice_size",this.updateHistoryDiskSize,this);this.model.hdas.bind("add",this.add,this);this.model.hdas.bind("reset",this.addAll,this);this.hdaViews={};this.urls={}},_setUpWebStorage:function(b,a,c){this.storage=new PersistantStorage("HistoryView."+this.model.get("id"),{expandedHdas:{},show_deleted:false,show_hidden:false});this.log("this.storage:",this.storage.get());if(b){this.storage.set("exandedHdas",b)}if((a===true)||(a===false)){this.storage.set("show_deleted",a)}if((c===true)||(c===false)){this.storage.set("show_hidden",c)}this.show_deleted=this.storage.get("show_deleted");this.show_hidden=this.storage.get("show_hidden");this.log("(init'd) this.storage:",this.storage.get())},add:function(a){},addAll:function(){this.render()},render:function(){var b=this,d=b.toString()+".set-up",c=$("<div/>"),a=this.model.toJSON(),e=(this.$el.children().size()===0);a.urls=this._renderUrls(a);c.append(HistoryPanel.templates.historyPanel(a));c.find(".tooltip").tooltip({placement:"bottom"});this._setUpActionButton(c.find("#history-action-popup"));if(!this.model.hdas.length||!this.renderItems(c.find("#"+this.model.get("id")+"-datasets"))){c.find("#emptyHistoryMessage").show()}$(b).queue(d,function(f){b.$el.fadeOut("fast",function(){f()})});$(b).queue(d,function(f){b.$el.html("");b.$el.append(c.children());b.$el.fadeIn("fast",function(){f()})});$(b).queue(d,function(f){this.log(b+" rendered:",b.$el);b._setUpBehaviours();if(e){b.trigger("rendered:initial")}else{b.trigger("rendered")}f()});$(b).dequeue(d);return this},_renderUrls:function(a){var b=this;b.urls={};_.each(this.urlTemplates,function(d,c){b.urls[c]=_.template(d,a)});return b.urls},_setUpActionButton:function(e){var c=this,d=(this.storage.get("show_deleted"))?("Hide deleted"):("Show deleted"),a=(this.storage.get("show_hidden"))?("Hide hidden"):("Show hidden"),b={};b[_l("refresh")]=function(){window.location.reload()};b[_l("collapse all")]=function(){c.hideAllHdaBodies()};b[_l(d)]=function(){c.toggleShowDeleted()};b[_l(a)]=function(){c.toggleShowHidden()};make_popupmenu(e,b)},renderItems:function(b){this.hdaViews={};var a=this,c=this.model.hdas.getVisible(this.storage.get("show_deleted"),this.storage.get("show_hidden"));_.each(c,function(f){var e=f.get("id"),d=a.storage.get("expandedHdas").get(e);a.hdaViews[e]=new a.HDAView({model:f,expanded:d,urlTemplates:a.hdaUrlTemplates});a._setUpHdaListeners(a.hdaViews[e]);b.prepend(a.hdaViews[e].render().$el)});return c.length},_setUpHdaListeners:function(b){var a=this;b.bind("body-expanded",function(c){a.storage.get("expandedHdas").set(c,true)});b.bind("body-collapsed",function(c){a.storage.get("expandedHdas").deleteKey(c)})},_setUpBehaviours:function(){if(!(this.model.get("user")&&this.model.get("user").email)){return}var a=this.$("#history-annotation-area");this.$("#history-annotate").click(function(){if(a.is(":hidden")){a.slideDown("fast")}else{a.slideUp("fast")}return false});async_save_text("history-name-container","history-name",this.urls.rename,"new_name",18);async_save_text("history-annotation-container","history-annotation",this.urls.annotate,"new_annotation",18,true,4)},updateHistoryDiskSize:function(){this.$el.find("#history-size").text(this.model.get("nice_size"))},showQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(a.is(":hidden")){a.slideDown("fast")}},hideQuotaMessage:function(){var a=this.$el.find("#quota-message-container");if(!a.is(":hidden")){a.slideUp("fast")}},toggleShowDeleted:function(){this.storage.set("show_deleted",!this.storage.get("show_deleted"));this.render()},toggleShowHidden:function(){this.storage.set("show_hidden",!this.storage.get("show_hidden"));this.render()},hideAllHdaBodies:function(){_.each(this.hdaViews,function(a){a.toggleBodyVisibility(null,false)});this.storage.set("expandedHdas",{})},loadAndDisplayTags:function(c){this.log(this+".loadAndDisplayTags",c);var d=this.$el.find("#history-tag-area"),b=d.find(".tag-elt");this.log("\t tagArea",d," tagElt",b);if(d.is(":hidden")){if(!jQuery.trim(b.html())){var a=this;$.ajax({url:a.urls.tag,error:function(){alert(_l("Tagging failed"))},success:function(e){b.html(e);b.find(".tooltip").tooltip();d.slideDown("fast")}})}else{d.slideDown("fast")}}else{d.slideUp("fast")}return false},toString:function(){var a=this.model.get("name")||"";return"HistoryPanel("+a+")"}});HistoryPanel.templates={historyPanel:Handlebars.templates["template-history-historyPanel"]};
\ No newline at end of file
diff -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e -r 4e50648f6e5d015d253cb3c9e68dd731729b06f7 static/scripts/packed/mvc/user/user-model.js
--- a/static/scripts/packed/mvc/user/user-model.js
+++ b/static/scripts/packed/mvc/user/user-model.js
@@ -1,1 +1,1 @@
-var User=BaseModel.extend(LoggableMixin).extend({urlRoot:"api/users",defaults:{id:null,username:"("+_l("anonymous user")+")",email:"",total_disk_usage:0,nice_total_disk_usage:"0 bytes"},initialize:function(a){this.log("User.initialize:",a);this.on("loaded",function(b,c){this.log(this+" has loaded:",b,c)});this.on("change",function(b,c){this.log(this+" has changed:",b,c.changes)})},loadFromApi:function(d,b){d=d||User.CURRENT_ID_STR;b=b||{};var a=this,c=b.success;b.success=function(f,e){a.trigger("loaded",f,e);if(c){c(f,e)}};if(d===User.CURRENT_ID_STR){b.url=this.urlRoot+"/"+User.CURRENT_ID_STR}return BaseModel.prototype.fetch.call(this,b)},toString:function(){var a=[this.get("username")];if(this.get("id")){a.unshift(this.get("id"));a.push(this.get("email"))}return"User("+a.join(":")+")"}});User.CURRENT_ID_STR="current";User.getCurrentUserFromApi=function(b){var a=new User();a.loadFromApi(User.CURRENT_ID_STR,b);return a};var UserCollection=Backbone.Collection.extend(LoggableMixin).extend({model:User,urlRoot:"api/users"});
\ No newline at end of file
+var User=BaseModel.extend(LoggableMixin).extend({urlRoot:"api/users",defaults:{id:null,username:"("+_l("anonymous user")+")",email:"",total_disk_usage:0,nice_total_disk_usage:"0 bytes",quota_percent:null},initialize:function(a){this.log("User.initialize:",a);this.on("loaded",function(b,c){this.log(this+" has loaded:",b,c)});this.on("change",function(b,c){this.log(this+" has changed:",b,c.changes)})},isAnonymous:function(){return(!this.get("email"))},loadFromApi:function(d,b){d=d||User.CURRENT_ID_STR;b=b||{};var a=this,c=b.success;b.success=function(f,e){a.trigger("loaded",f,e);if(c){c(f,e)}};if(d===User.CURRENT_ID_STR){b.url=this.urlRoot+"/"+User.CURRENT_ID_STR}return BaseModel.prototype.fetch.call(this,b)},toString:function(){var a=[this.get("username")];if(this.get("id")){a.unshift(this.get("id"));a.push(this.get("email"))}return"User("+a.join(":")+")"}});User.CURRENT_ID_STR="current";User.getCurrentUserFromApi=function(b){var a=new User();a.loadFromApi(User.CURRENT_ID_STR,b);return a};var UserCollection=Backbone.Collection.extend(LoggableMixin).extend({model:User,urlRoot:"api/users"});
\ No newline at end of file
diff -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e -r 4e50648f6e5d015d253cb3c9e68dd731729b06f7 static/scripts/packed/templates/compiled/template-history-historyPanel.js
--- a/static/scripts/packed/templates/compiled/template-history-historyPanel.js
+++ b/static/scripts/packed/templates/compiled/template-history-historyPanel.js
@@ -1,1 +1,1 @@
-(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a["template-history-historyPanel"]=b(function(k,A,y,q,I){y=y||k.helpers;var z="",n,m,v=this,e="function",c=y.blockHelperMissing,d=this.escapeExpression;function t(N,M){var K="",L,J;K+='\n <div id="history-name" class="tooltip editable-text"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(2,s,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(2,s,M)})}if(L||L===0){K+=L}K+='">';J=y.name;if(J){L=J.call(N,{hash:{}})}else{L=N.name;L=typeof L===e?L():L}K+=d(L)+"</div>\n ";return K}function s(K,J){return"Click to rename history"}function r(N,M){var K="",L,J;K+='\n <div id="history-name" class="tooltip"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(5,p,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(5,p,M)})}if(L||L===0){K+=L}K+='">';J=y.name;if(J){L=J.call(N,{hash:{}})}else{L=N.name;L=typeof L===e?L():L}K+=d(L)+"</div>\n ";return K}function p(K,J){return"You must be logged in to edit your history name"}function o(K,J){return"Click to see more actions"}function j(N,M){var K="",L,J;K+='\n <div id="history-secondary-links" style="float: right;">\n <a id="history-tag" title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(10,H,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(10,H,M)})}if(L||L===0){K+=L}K+='"\n class="icon-button tags tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n <a id="history-annotate" title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(12,G,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(12,G,M)})}if(L||L===0){K+=L}K+='"\n class="icon-button annotate tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n </div>\n ';return K}function H(K,J){return"Edit history tags"}function G(K,J){return"Edit history annotation"}function F(N,M){var K="",L,J;K+="\n ";J=y.warningmessagesmall;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(15,E,M)})}else{L=N.warningmessagesmall;L=typeof L===e?L():L}if(!y.warningmessagesmall){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(15,E,M)})}if(L||L===0){K+=L}K+="\n ";return K}function E(M,L){var K,J;J=y.local;if(J){K=J.call(M,{hash:{},inverse:v.noop,fn:v.program(16,D,L)})}else{K=M.local;K=typeof K===e?K():K}if(!y.local){K=c.call(M,K,{hash:{},inverse:v.noop,fn:v.program(16,D,L)})}if(K||K===0){return K}else{return""}}function D(K,J){return"You are currently viewing a deleted history!"}function C(N,M){var K="",L,J;K+='\n <div id="history-tag-annotation">\n\n <div id="history-tag-area" style="display: none">\n <strong>';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(19,B,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(19,B,M)})}if(L||L===0){K+=L}K+=':</strong>\n <div class="tag-elt"></div>\n </div>\n\n <div id="history-annotation-area" style="display: none">\n <strong>';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(21,l,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(21,l,M)})}if(L||L===0){K+=L}K+=':</strong>\n <div id="history-annotation-container">\n <div id="history-annotation" class="tooltip editable-text"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(23,i,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(23,i,M)})}if(L||L===0){K+=L}K+='">\n ';L=N.annotation;L=y["if"].call(N,L,{hash:{},inverse:v.program(27,g,M),fn:v.program(25,h,M)});if(L||L===0){K+=L}K+="\n </div>\n </div>\n </div>\n </div>\n ";return K}function B(K,J){return"Tags"}function l(K,J){return"Annotation"}function i(K,J){return"Click to edit annotation"}function h(N,M){var K="",L,J;K+="\n ";J=y.annotation;if(J){L=J.call(N,{hash:{}})}else{L=N.annotation;L=typeof L===e?L():L}K+=d(L)+"\n ";return K}function g(N,M){var K="",L,J;K+="\n <em>";J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(28,f,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(28,f,M)})}if(L||L===0){K+=L}K+="</em>\n ";return K}function f(K,J){return"Describe or add notes to history"}function x(N,M){var K="",L,J;K+='\n <div id="message-container">\n <div class="';J=y.status;if(J){L=J.call(N,{hash:{}})}else{L=N.status;L=typeof L===e?L():L}K+=d(L)+'message">\n ';J=y.message;if(J){L=J.call(N,{hash:{}})}else{L=N.message;L=typeof L===e?L():L}K+=d(L)+"\n </div><br />\n </div>\n ";return K}function w(K,J){return"You are over your disk quota.\n Tool execution is on hold until your disk usage drops below your allocated quota."}function u(K,J){return"Your history is empty. Click 'Get Data' on the left pane to start"}z+='\n<div id="history-controls">\n <div id="history-title-area" class="historyLinks">\n\n <div id="history-name-container" style="float: left;">\n ';z+="\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.program(4,r,I),fn:v.program(1,t,I)});if(n||n===0){z+=n}z+='\n </div>\n\n <a id="history-action-popup" class="tooltip" title="';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(7,o,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(7,o,I)})}if(n||n===0){z+=n}z+='"\n href="javascript:void(0);" style="float: right;">\n <span class="ficon cog large"></span>\n </a>\n <div style="clear: both;"></div>\n </div>\n\n <div id="history-subtitle-area">\n <div id="history-size" style="float:left;">';m=y.nice_size;if(m){n=m.call(A,{hash:{}})}else{n=A.nice_size;n=typeof n===e?n():n}z+=d(n)+"</div>\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(9,j,I)});if(n||n===0){z+=n}z+='\n <div style="clear: both;"></div>\n </div>\n\n ';n=A.deleted;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(14,F,I)});if(n||n===0){z+=n}z+="\n\n ";z+="\n ";z+="\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(18,C,I)});if(n||n===0){z+=n}z+="\n\n ";n=A.message;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(30,x,I)});if(n||n===0){z+=n}z+='\n\n <div id="quota-message-container" style="display: none">\n <div id="quota-message" class="errormessage">\n ';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(32,w,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(32,w,I)})}if(n||n===0){z+=n}z+='\n </div>\n </div>\n</div>\n\n<div id="';m=y.id;if(m){n=m.call(A,{hash:{}})}else{n=A.id;n=typeof n===e?n():n}z+=d(n)+'-datasets" class="history-datasets-list"></div>\n\n<div class="infomessagesmall" id="emptyHistoryMessage" style="display: none;">\n ';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(34,u,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(34,u,I)})}if(n||n===0){z+=n}z+="\n</div>";return z})})();
\ No newline at end of file
+(function(){var b=Handlebars.template,a=Handlebars.templates=Handlebars.templates||{};a["template-history-historyPanel"]=b(function(k,A,y,q,I){y=y||k.helpers;var z="",n,m,v=this,e="function",c=y.blockHelperMissing,d=this.escapeExpression;function t(N,M){var K="",L,J;K+='\n <div id="history-name" class="tooltip editable-text"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(2,s,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(2,s,M)})}if(L||L===0){K+=L}K+='">';J=y.name;if(J){L=J.call(N,{hash:{}})}else{L=N.name;L=typeof L===e?L():L}K+=d(L)+"</div>\n ";return K}function s(K,J){return"Click to rename history"}function r(N,M){var K="",L,J;K+='\n <div id="history-name" class="tooltip"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(5,p,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(5,p,M)})}if(L||L===0){K+=L}K+='">';J=y.name;if(J){L=J.call(N,{hash:{}})}else{L=N.name;L=typeof L===e?L():L}K+=d(L)+"</div>\n ";return K}function p(K,J){return"You must be logged in to edit your history name"}function o(K,J){return"Click to see more actions"}function j(N,M){var K="",L,J;K+='\n <div id="history-secondary-links" style="float: right;">\n <a id="history-tag" title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(10,H,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(10,H,M)})}if(L||L===0){K+=L}K+='"\n class="icon-button tags tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n <a id="history-annotate" title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(12,G,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(12,G,M)})}if(L||L===0){K+=L}K+='"\n class="icon-button annotate tooltip" target="galaxy_main" href="javascript:void(0)"></a>\n </div>\n ';return K}function H(K,J){return"Edit history tags"}function G(K,J){return"Edit history annotation"}function F(N,M){var K="",L,J;K+="\n ";J=y.warningmessagesmall;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(15,E,M)})}else{L=N.warningmessagesmall;L=typeof L===e?L():L}if(!y.warningmessagesmall){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(15,E,M)})}if(L||L===0){K+=L}K+="\n ";return K}function E(M,L){var K,J;J=y.local;if(J){K=J.call(M,{hash:{},inverse:v.noop,fn:v.program(16,D,L)})}else{K=M.local;K=typeof K===e?K():K}if(!y.local){K=c.call(M,K,{hash:{},inverse:v.noop,fn:v.program(16,D,L)})}if(K||K===0){return K}else{return""}}function D(K,J){return"You are currently viewing a deleted history!"}function C(N,M){var K="",L,J;K+='\n <div id="history-tag-annotation">\n\n <div id="history-tag-area" style="display: none">\n <strong>';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(19,B,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(19,B,M)})}if(L||L===0){K+=L}K+=':</strong>\n <div class="tag-elt"></div>\n </div>\n\n <div id="history-annotation-area" style="display: none">\n <strong>';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(21,l,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(21,l,M)})}if(L||L===0){K+=L}K+=':</strong>\n <div id="history-annotation-container">\n <div id="history-annotation" class="tooltip editable-text"\n title="';J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(23,i,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(23,i,M)})}if(L||L===0){K+=L}K+='">\n ';L=N.annotation;L=y["if"].call(N,L,{hash:{},inverse:v.program(27,g,M),fn:v.program(25,h,M)});if(L||L===0){K+=L}K+="\n </div>\n </div>\n </div>\n </div>\n ";return K}function B(K,J){return"Tags"}function l(K,J){return"Annotation"}function i(K,J){return"Click to edit annotation"}function h(N,M){var K="",L,J;K+="\n ";J=y.annotation;if(J){L=J.call(N,{hash:{}})}else{L=N.annotation;L=typeof L===e?L():L}K+=d(L)+"\n ";return K}function g(N,M){var K="",L,J;K+="\n <em>";J=y.local;if(J){L=J.call(N,{hash:{},inverse:v.noop,fn:v.program(28,f,M)})}else{L=N.local;L=typeof L===e?L():L}if(!y.local){L=c.call(N,L,{hash:{},inverse:v.noop,fn:v.program(28,f,M)})}if(L||L===0){K+=L}K+="</em>\n ";return K}function f(K,J){return"Describe or add notes to history"}function x(N,M){var K="",L,J;K+='\n <div id="message-container">\n <div class="';J=y.status;if(J){L=J.call(N,{hash:{}})}else{L=N.status;L=typeof L===e?L():L}K+=d(L)+'message">\n ';J=y.message;if(J){L=J.call(N,{hash:{}})}else{L=N.message;L=typeof L===e?L():L}K+=d(L)+"\n </div><br />\n </div>\n ";return K}function w(K,J){return"You are over your disk quota.\n Tool execution is on hold until your disk usage drops below your allocated quota."}function u(K,J){return"Your history is empty. Click 'Get Data' on the left pane to start"}z+='<div id="history-controls">\n <div id="history-title-area" class="historyLinks">\n\n ';z+='\n <div id="history-name-container" style="float: left;">\n ';z+="\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.program(4,r,I),fn:v.program(1,t,I)});if(n||n===0){z+=n}z+='\n </div>\n\n <a id="history-action-popup" class="tooltip" title="';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(7,o,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(7,o,I)})}if(n||n===0){z+=n}z+='"\n href="javascript:void(0);" style="float: right;">\n <span class="ficon cogs large"></span>\n </a>\n <div style="clear: both;"></div>\n </div>\n\n <div id="history-subtitle-area">\n <div id="history-size" style="float:left;">';m=y.nice_size;if(m){n=m.call(A,{hash:{}})}else{n=A.nice_size;n=typeof n===e?n():n}z+=d(n)+"</div>\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(9,j,I)});if(n||n===0){z+=n}z+='\n <div style="clear: both;"></div>\n </div>\n\n ';n=A.deleted;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(14,F,I)});if(n||n===0){z+=n}z+="\n\n ";z+="\n ";z+="\n ";n=A.user;n=n==null||n===false?n:n.email;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(18,C,I)});if(n||n===0){z+=n}z+="\n\n ";n=A.message;n=y["if"].call(A,n,{hash:{},inverse:v.noop,fn:v.program(30,x,I)});if(n||n===0){z+=n}z+='\n\n <div id="quota-message-container" style="display: none">\n <div id="quota-message" class="errormessage">\n ';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(32,w,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(32,w,I)})}if(n||n===0){z+=n}z+='\n </div>\n </div>\n</div>\n\n<div id="';m=y.id;if(m){n=m.call(A,{hash:{}})}else{n=A.id;n=typeof n===e?n():n}z+=d(n)+'-datasets" class="history-datasets-list"></div>\n\n<div class="infomessagesmall" id="emptyHistoryMessage" style="display: none;">\n ';m=y.local;if(m){n=m.call(A,{hash:{},inverse:v.noop,fn:v.program(34,u,I)})}else{n=A.local;n=typeof n===e?n():n}if(!y.local){n=c.call(A,n,{hash:{},inverse:v.noop,fn:v.program(34,u,I)})}if(n||n===0){z+=n}z+="\n</div>";return z})})();
\ No newline at end of file
diff -r 2d1402caf21a072edfdcaca0b12f1d38b4421b6e -r 4e50648f6e5d015d253cb3c9e68dd731729b06f7 static/scripts/packed/viz/circster.js
--- a/static/scripts/packed/viz/circster.js
+++ b/static/scripts/packed/viz/circster.js
@@ -1,1 +1,1 @@
-define(["libs/underscore","libs/d3","viz/visualization"],function(g,l,i){var m=Backbone.Model.extend({is_visible:function(q,n){var o=q.getBoundingClientRect(),p=$("svg")[0].getBoundingClientRect();if(o.right<0||o.left>p.right||o.bottom<0||o.top>p.bottom){return false}return true}});var h={drawTicks:function(r,q,v,p,n){var u=r.append("g").selectAll("g").data(q).enter().append("g").selectAll("g").data(v).enter().append("g").attr("class","tick").attr("transform",function(w){return"rotate("+(w.angle*180/Math.PI-90)+")translate("+w.radius+",0)"});var t=[],s=[],o=function(w){return w.angle>Math.PI?"end":null};if(n){t=[0,0,0,-4];s=[4,0,"",".35em"];o=null}else{t=[1,0,4,0];s=[0,4,".35em",""]}u.append("line").attr("x1",t[0]).attr("y1",t[1]).attr("x2",t[2]).attr("y1",t[3]).style("stroke","#000");u.append("text").attr("x",s[0]).attr("y",s[1]).attr("dx",s[2]).attr("dy",s[3]).attr("text-anchor",o).attr("transform",p).text(function(w){return w.label})},formatNum:function(o,n){var q=null;if(o<1){q=o.toPrecision(n)}else{var p=Math.round(o.toPrecision(n));if(o<1000){q=p}else{if(o<1000000){q=Math.round((p/1000).toPrecision(3)).toFixed(0)+"K"}else{if(o<1000000000){q=Math.round((p/1000000).toPrecision(3)).toFixed(0)+"M"}}}}return q}};var c=Backbone.Model.extend({});var a=Backbone.View.extend({className:"circster",initialize:function(n){this.total_gap=n.total_gap;this.genome=n.genome;this.dataset_arc_height=n.dataset_arc_height;this.track_gap=10;this.label_arc_height=50;this.scale=1;this.circular_views=null;this.chords_views=null;this.model.get("tracks").on("add",this.add_track,this);this.model.get("tracks").on("remove",this.remove_track,this);this.get_circular_tracks()},get_circular_tracks:function(){return this.model.get("tracks").filter(function(n){return n.get("track_type")!=="DiagonalHeatmapTrack"})},get_chord_tracks:function(){return this.model.get("tracks").filter(function(n){return n.get("track_type")==="DiagonalHeatmapTrack"})},get_tracks_bounds:function(){var o=this.get_circular_tracks();dataset_arc_height=this.dataset_arc_height,min_dimension=Math.min(this.$el.width(),this.$el.height()),radius_start=min_dimension/2-o.length*(this.dataset_arc_height+this.track_gap)-(this.label_arc_height+this.track_gap),tracks_start_radii=l.range(radius_start,min_dimension/2,this.dataset_arc_height+this.track_gap);var n=this;return g.map(tracks_start_radii,function(p){return[p,p+n.dataset_arc_height]})},render:function(){var w=this,q=this.dataset_arc_height,n=w.$el.width(),v=w.$el.height(),s=this.get_circular_tracks(),p=this.get_chord_tracks(),r=this.get_tracks_bounds(),o=l.select(w.$el[0]).append("svg").attr("width",n).attr("height",v).attr("pointer-events","all").append("svg:g").call(l.behavior.zoom().on("zoom",function(){var x=l.event.scale;o.attr("transform","translate("+l.event.translate+") scale("+x+")");if(w.scale!==x){if(w.zoom_drag_timeout){clearTimeout(w.zoom_drag_timeout)}w.zoom_drag_timeout=setTimeout(function(){},400)}})).attr("transform","translate("+n/2+","+v/2+")").append("svg:g").attr("class","tracks");this.circular_views=s.map(function(y,z){var A=(y.get("track_type")==="LineTrack"?d:e),x=new A({el:o.append("g")[0],track:y,radius_bounds:r[z],genome:w.genome,total_gap:w.total_gap});x.render();return x});this.chords_views=p.map(function(y){var x=new j({el:o.append("g")[0],track:y,radius_bounds:r[0],genome:w.genome,total_gap:w.total_gap});x.render();return x});var u=this.circular_views[this.circular_views.length-1].radius_bounds[1],t=[u,u+this.label_arc_height];this.label_track_view=new b({el:o.append("g")[0],track:new c(),radius_bounds:t,genome:w.genome,total_gap:w.total_gap});this.label_track_view.render()},add_track:function(t){if(t.get("track_type")==="DiagonalHeatmapTrack"){var p=this.circular_views[0].radius_bounds,s=new j({el:l.select("g.tracks").append("g")[0],track:t,radius_bounds:p,genome:this.genome,total_gap:this.total_gap});s.render();this.chords_views.push(s)}else{var r=this.get_tracks_bounds();g.each(this.circular_views,function(v,w){v.update_radius_bounds(r[w])});g.each(this.chords_views,function(v){v.update_radius_bounds(r[0])});var q=this.circular_views.length,u=(t.get("track_type")==="LineTrack"?d:e),n=new u({el:l.select("g.tracks").append("g")[0],track:t,radius_bounds:r[q],genome:this.genome,total_gap:this.total_gap});n.render();this.circular_views.push(n);var o=r[r.length-1];o[1]=o[0];this.label_track_view.update_radius_bounds(o)}},remove_track:function(o,q,p){var n=this.circular_views[p.index];this.circular_views.splice(p.index,1);n.$el.remove();var r=this.get_tracks_bounds();g.each(this.circular_views,function(s,t){s.update_radius_bounds(r[t])})}});var k=Backbone.View.extend({tagName:"g",initialize:function(n){this.bg_stroke="ccc";this.loading_bg_fill="000";this.bg_fill="ccc";this.total_gap=n.total_gap;this.track=n.track;this.radius_bounds=n.radius_bounds;this.genome=n.genome;this.chroms_layout=this._chroms_layout();this.data_bounds=[];this.scale=1;this.parent_elt=l.select(this.$el[0])},get_fill_color:function(){var n=this.track.get("config").get_value("block_color");if(!n){n=this.track.get("config").get_value("color")}return n},render:function(){var r=this.parent_elt;if(!r){console.log("no parent elt")}var q=this.chroms_layout,t=l.svg.arc().innerRadius(this.radius_bounds[0]).outerRadius(this.radius_bounds[1]),n=r.selectAll("g").data(q).enter().append("svg:g"),p=n.append("path").attr("d",t).attr("class","chrom-background").style("stroke",this.bg_stroke).style("fill",this.loading_bg_fill);p.append("title").text(function(v){return v.data.chrom});var o=this,s=o.track.get("data_manager"),u=(s?s.data_is_ready():true);$.when(u).then(function(){$.when(o._render_data(r)).then(function(){p.style("fill",o.bg_fill);o.render_labels()})})},render_labels:function(){},update_radius_bounds:function(o){this.radius_bounds=o;var n=l.svg.arc().innerRadius(this.radius_bounds[0]).outerRadius(this.radius_bounds[1]);this.parent_elt.selectAll("g>path.chrom-background").transition().duration(1000).attr("d",n);this._transition_chrom_data();this._transition_labels()},update_scale:function(q){var p=this.scale;this.scale=q;if(q<=p){return}var o=this,n=new m();this.parent_elt.selectAll("path.chrom-data").filter(function(s,r){return n.is_visible(this)}).each(function(x,t){var w=l.select(this),s=w.attr("chrom"),v=o.genome.get_chrom_region(s),u=o.track.get("data_manager"),r;if(!u.can_get_more_detailed_data(v)){return}r=o.track.get("data_manager").get_more_detailed_data(v,"Coverage",0,q);$.when(r).then(function(A){w.remove();o._update_data_bounds();var z=g.find(o.chroms_layout,function(B){return B.data.chrom===s});var y=o.get_fill_color();o._render_chrom_data(o.parent_elt,z,A).style("stroke",y).style("fill",y)})});return o},_transition_chrom_data:function(){var o=this.track,q=this.chroms_layout,n=this.parent_elt.selectAll("g>path.chrom-data"),r=n[0].length;if(r>0){var p=this;$.when(o.get("data_manager").get_genome_wide_data(this.genome)).then(function(t){var s=g.reject(g.map(t,function(u,v){var w=null,x=p._get_path_function(q[v],u);if(x){w=x(u.data)}return w}),function(u){return u===null});n.each(function(v,u){l.select(this).transition().duration(1000).attr("d",s[u])})})}},_transition_labels:function(){},_update_data_bounds:function(){var n=this.data_bounds;this.data_bounds=this.get_data_bounds(this.track.get("data_manager").get_genome_wide_data(this.genome));if(this.data_bounds[0]<n[0]||this.data_bounds[1]>n[1]){this._transition_chrom_data()}},_render_data:function(q){var p=this,o=this.chroms_layout,n=this.track,r=$.Deferred();$.when(n.get("data_manager").get_genome_wide_data(this.genome)).then(function(t){p.data_bounds=p.get_data_bounds(t);layout_and_data=g.zip(o,t),chroms_data_layout=g.map(layout_and_data,function(u){var v=u[0],w=u[1];return p._render_chrom_data(q,v,w)});var s=p.get_fill_color();p.parent_elt.selectAll("path.chrom-data").style("stroke",s).style("fill",s);r.resolve(q)});return r},_render_chrom_data:function(n,o,p){},_get_path_function:function(o,n){},_chroms_layout:function(){var o=this.genome.get_chroms_info(),q=l.layout.pie().value(function(s){return s.len}).sort(null),r=q(o),n=this.total_gap/o.length,p=g.map(r,function(u,t){var s=u.endAngle-n;u.endAngle=(s>u.startAngle?s:u.startAngle);return u});return p}});var b=k.extend({initialize:function(n){k.prototype.initialize.call(this,n);this.innerRadius=this.radius_bounds[0];this.radius_bounds[0]=this.radius_bounds[1];this.bg_stroke="fff";this.bg_fill="fff";this.min_arc_len=0.08},_render_data:function(p){var o=this,n=p.selectAll("g");n.selectAll("path").attr("id",function(t){return"label-"+t.data.chrom});n.append("svg:text").filter(function(t){return t.endAngle-t.startAngle>o.min_arc_len}).attr("text-anchor","middle").append("svg:textPath").attr("xlink:href",function(t){return"#label-"+t.data.chrom}).attr("startOffset","25%").attr("font-weight","bold").text(function(t){return t.data.chrom});var q=function(v){var t=(v.endAngle-v.startAngle)/v.value,u=l.range(0,v.value,25000000).map(function(w,x){return{radius:o.innerRadius,angle:w*t+v.startAngle,label:x===0?0:(x%3?null:o.formatNum(w))}});if(u.length<4){u[u.length-1].label=o.formatNum(Math.round((u[u.length-1].angle-v.startAngle)/t))}return u};var s=function(t){return t.angle>Math.PI?"rotate(180)translate(-16)":null};var r=g.filter(this.chroms_layout,function(t){return t.endAngle-t.startAngle>o.min_arc_len});this.drawTicks(this.parent_elt,r,q,s)}});g.extend(b.prototype,h);var f=k.extend({_quantile:function(o,n){o.sort(l.ascending);return l.quantile(o,n)},_render_chrom_data:function(n,q,o){var r=this._get_path_function(q,o);if(!r){return null}var p=n.datum(o.data),s=p.append("path").attr("class","chrom-data").attr("chrom",q.data.chrom).attr("d",r);return s},_get_path_function:function(q,p){if(typeof p==="string"||!p.data||p.data.length===0){return null}var n=l.scale.linear().domain(this.data_bounds).range(this.radius_bounds).clamp(true);var r=l.scale.linear().domain([0,p.data.length]).range([q.startAngle,q.endAngle]);var o=l.svg.line.radial().interpolate("linear").radius(function(s){return n(s[1])}).angle(function(t,s){return r(s)});return l.svg.area.radial().interpolate(o.interpolate()).innerRadius(n(0)).outerRadius(o.radius()).angle(o.angle())},render_labels:function(){var n=this,q=function(){return"rotate(90)"};var p=g.filter(this.chroms_layout,function(r){return r.endAngle-r.startAngle>0.08}),o=g.filter(p,function(s,r){return r%3===0});this.drawTicks(this.parent_elt,o,this._data_bounds_ticks_fn(),q,true)},_transition_labels:function(){if(this.data_bounds.length===0){return}var o=this,q=g.filter(this.chroms_layout,function(r){return r.endAngle-r.startAngle>0.08}),p=g.filter(q,function(s,r){return r%3===0}),n=g.flatten(g.map(p,function(r){return o._data_bounds_ticks_fn()(r)}));this.parent_elt.selectAll("g.tick").data(n).transition().attr("transform",function(r){return"rotate("+(r.angle*180/Math.PI-90)+")translate("+r.radius+",0)"})},_data_bounds_ticks_fn:function(){var n=this;visibleChroms=0;return function(o){return[{radius:n.radius_bounds[0],angle:o.startAngle,label:n.formatNum(n.data_bounds[0])},{radius:n.radius_bounds[1],angle:o.startAngle,label:n.formatNum(n.data_bounds[1])}]}},get_data_bounds:function(n){}});g.extend(f.prototype,h);var e=f.extend({get_data_bounds:function(o){var n=g.map(o,function(p){if(typeof p==="string"||!p.max){return 0}return p.max});return[0,(n&&typeof n!=="string"?this._quantile(values,0.98):0)]}});var d=f.extend({get_data_bounds:function(o){var n=g.flatten(g.map(o,function(p){if(p){return g.map(p.data,function(q){return q[1]})}else{return 0}}));return[g.min(n),this._quantile(n,0.98)]}});var j=k.extend({render:function(){var n=this;$.when(n.track.get("data_manager").data_is_ready()).then(function(){$.when(n.track.get("data_manager").get_genome_wide_data(n.genome)).then(function(q){var p=[],o=n.genome.get_chroms_info();g.each(q,function(u,t){var r=o[t].chrom;var s=g.map(u.data,function(w){var v=n._get_region_angle(r,w[1]),x=n._get_region_angle(w[3],w[4]);return{source:{startAngle:v,endAngle:v+0.01},target:{startAngle:x,endAngle:x+0.01}}});p=p.concat(s)});n.parent_elt.append("g").attr("class","chord").selectAll("path").data(p).enter().append("path").style("fill",n.get_fill_color()).attr("d",l.svg.chord().radius(n.radius_bounds[0])).style("opacity",1)})})},update_radius_bounds:function(n){this.radius_bounds=n;this.parent_elt.selectAll("path").transition().attr("d",l.svg.chord().radius(this.radius_bounds[0]))},_get_region_angle:function(p,n){var o=g.find(this.chroms_layout,function(q){return q.data.chrom===p});return o.endAngle-((o.endAngle-o.startAngle)*(o.data.len-n)/o.data.len)}});return{CircsterView:a}});
\ No newline at end of file
+define(["libs/underscore","libs/d3","viz/visualization"],function(g,l,i){var m=Backbone.Model.extend({is_visible:function(q,n){var o=q.getBoundingClientRect(),p=$("svg")[0].getBoundingClientRect();if(o.right<0||o.left>p.right||o.bottom<0||o.top>p.bottom){return false}return true}});var h={drawTicks:function(r,q,v,p,n){var u=r.append("g").selectAll("g").data(q).enter().append("g").selectAll("g").data(v).enter().append("g").attr("class","tick").attr("transform",function(w){return"rotate("+(w.angle*180/Math.PI-90)+")translate("+w.radius+",0)"});var t=[],s=[],o=function(w){return w.angle>Math.PI?"end":null};if(n){t=[0,0,0,-4];s=[4,0,"",".35em"];o=null}else{t=[1,0,4,0];s=[0,4,".35em",""]}u.append("line").attr("x1",t[0]).attr("y1",t[1]).attr("x2",t[2]).attr("y1",t[3]).style("stroke","#000");u.append("text").attr("x",s[0]).attr("y",s[1]).attr("dx",s[2]).attr("dy",s[3]).attr("text-anchor",o).attr("transform",p).text(function(w){return w.label})},formatNum:function(o,n){if(n===undefined){n=2}var q=null;if(o<1){q=o.toPrecision(n)}else{var p=Math.round(o.toPrecision(n));if(o<1000){q=p}else{if(o<1000000){q=Math.round((p/1000).toPrecision(3)).toFixed(0)+"K"}else{if(o<1000000000){q=Math.round((p/1000000).toPrecision(3)).toFixed(0)+"M"}}}}return q}};var c=Backbone.Model.extend({});var a=Backbone.View.extend({className:"circster",initialize:function(n){this.total_gap=n.total_gap;this.genome=n.genome;this.dataset_arc_height=n.dataset_arc_height;this.track_gap=10;this.label_arc_height=50;this.scale=1;this.circular_views=null;this.chords_views=null;this.model.get("tracks").on("add",this.add_track,this);this.model.get("tracks").on("remove",this.remove_track,this);this.get_circular_tracks()},get_circular_tracks:function(){return this.model.get("tracks").filter(function(n){return n.get("track_type")!=="DiagonalHeatmapTrack"})},get_chord_tracks:function(){return this.model.get("tracks").filter(function(n){return n.get("track_type")==="DiagonalHeatmapTrack"})},get_tracks_bounds:function(){var o=this.get_circular_tracks();dataset_arc_height=this.dataset_arc_height,min_dimension=Math.min(this.$el.width(),this.$el.height()),radius_start=min_dimension/2-o.length*(this.dataset_arc_height+this.track_gap)-(this.label_arc_height+this.track_gap),tracks_start_radii=l.range(radius_start,min_dimension/2,this.dataset_arc_height+this.track_gap);var n=this;return g.map(tracks_start_radii,function(p){return[p,p+n.dataset_arc_height]})},render:function(){var w=this,q=this.dataset_arc_height,n=w.$el.width(),v=w.$el.height(),s=this.get_circular_tracks(),p=this.get_chord_tracks(),r=this.get_tracks_bounds(),o=l.select(w.$el[0]).append("svg").attr("width",n).attr("height",v).attr("pointer-events","all").append("svg:g").call(l.behavior.zoom().on("zoom",function(){var x=l.event.scale;o.attr("transform","translate("+l.event.translate+") scale("+x+")");if(w.scale!==x){if(w.zoom_drag_timeout){clearTimeout(w.zoom_drag_timeout)}w.zoom_drag_timeout=setTimeout(function(){},400)}})).attr("transform","translate("+n/2+","+v/2+")").append("svg:g").attr("class","tracks");this.circular_views=s.map(function(y,z){var A=(y.get("track_type")==="LineTrack"?d:e),x=new A({el:o.append("g")[0],track:y,radius_bounds:r[z],genome:w.genome,total_gap:w.total_gap});x.render();return x});this.chords_views=p.map(function(y){var x=new j({el:o.append("g")[0],track:y,radius_bounds:r[0],genome:w.genome,total_gap:w.total_gap});x.render();return x});var u=this.circular_views[this.circular_views.length-1].radius_bounds[1],t=[u,u+this.label_arc_height];this.label_track_view=new b({el:o.append("g")[0],track:new c(),radius_bounds:t,genome:w.genome,total_gap:w.total_gap});this.label_track_view.render()},add_track:function(t){if(t.get("track_type")==="DiagonalHeatmapTrack"){var p=this.circular_views[0].radius_bounds,s=new j({el:l.select("g.tracks").append("g")[0],track:t,radius_bounds:p,genome:this.genome,total_gap:this.total_gap});s.render();this.chords_views.push(s)}else{var r=this.get_tracks_bounds();g.each(this.circular_views,function(v,w){v.update_radius_bounds(r[w])});g.each(this.chords_views,function(v){v.update_radius_bounds(r[0])});var q=this.circular_views.length,u=(t.get("track_type")==="LineTrack"?d:e),n=new u({el:l.select("g.tracks").append("g")[0],track:t,radius_bounds:r[q],genome:this.genome,total_gap:this.total_gap});n.render();this.circular_views.push(n);var o=r[r.length-1];o[1]=o[0];this.label_track_view.update_radius_bounds(o)}},remove_track:function(o,q,p){var n=this.circular_views[p.index];this.circular_views.splice(p.index,1);n.$el.remove();var r=this.get_tracks_bounds();g.each(this.circular_views,function(s,t){s.update_radius_bounds(r[t])})}});var k=Backbone.View.extend({tagName:"g",initialize:function(n){this.bg_stroke="ccc";this.loading_bg_fill="000";this.bg_fill="ccc";this.total_gap=n.total_gap;this.track=n.track;this.radius_bounds=n.radius_bounds;this.genome=n.genome;this.chroms_layout=this._chroms_layout();this.data_bounds=[];this.scale=1;this.parent_elt=l.select(this.$el[0])},get_fill_color:function(){var n=this.track.get("config").get_value("block_color");if(!n){n=this.track.get("config").get_value("color")}return n},render:function(){var r=this.parent_elt;if(!r){console.log("no parent elt")}var q=this.chroms_layout,t=l.svg.arc().innerRadius(this.radius_bounds[0]).outerRadius(this.radius_bounds[1]),n=r.selectAll("g").data(q).enter().append("svg:g"),p=n.append("path").attr("d",t).attr("class","chrom-background").style("stroke",this.bg_stroke).style("fill",this.loading_bg_fill);p.append("title").text(function(v){return v.data.chrom});var o=this,s=o.track.get("data_manager"),u=(s?s.data_is_ready():true);$.when(u).then(function(){$.when(o._render_data(r)).then(function(){p.style("fill",o.bg_fill);o.render_labels()})})},render_labels:function(){},update_radius_bounds:function(o){this.radius_bounds=o;var n=l.svg.arc().innerRadius(this.radius_bounds[0]).outerRadius(this.radius_bounds[1]);this.parent_elt.selectAll("g>path.chrom-background").transition().duration(1000).attr("d",n);this._transition_chrom_data();this._transition_labels()},update_scale:function(q){var p=this.scale;this.scale=q;if(q<=p){return}var o=this,n=new m();this.parent_elt.selectAll("path.chrom-data").filter(function(s,r){return n.is_visible(this)}).each(function(x,t){var w=l.select(this),s=w.attr("chrom"),v=o.genome.get_chrom_region(s),u=o.track.get("data_manager"),r;if(!u.can_get_more_detailed_data(v)){return}r=o.track.get("data_manager").get_more_detailed_data(v,"Coverage",0,q);$.when(r).then(function(A){w.remove();o._update_data_bounds();var z=g.find(o.chroms_layout,function(B){return B.data.chrom===s});var y=o.get_fill_color();o._render_chrom_data(o.parent_elt,z,A).style("stroke",y).style("fill",y)})});return o},_transition_chrom_data:function(){var o=this.track,q=this.chroms_layout,n=this.parent_elt.selectAll("g>path.chrom-data"),r=n[0].length;if(r>0){var p=this;$.when(o.get("data_manager").get_genome_wide_data(this.genome)).then(function(t){var s=g.reject(g.map(t,function(u,v){var w=null,x=p._get_path_function(q[v],u);if(x){w=x(u.data)}return w}),function(u){return u===null});n.each(function(v,u){l.select(this).transition().duration(1000).attr("d",s[u])})})}},_transition_labels:function(){},_update_data_bounds:function(){var n=this.data_bounds;this.data_bounds=this.get_data_bounds(this.track.get("data_manager").get_genome_wide_data(this.genome));if(this.data_bounds[0]<n[0]||this.data_bounds[1]>n[1]){this._transition_chrom_data()}},_render_data:function(q){var p=this,o=this.chroms_layout,n=this.track,r=$.Deferred();$.when(n.get("data_manager").get_genome_wide_data(this.genome)).then(function(t){p.data_bounds=p.get_data_bounds(t);layout_and_data=g.zip(o,t),chroms_data_layout=g.map(layout_and_data,function(u){var v=u[0],w=u[1];return p._render_chrom_data(q,v,w)});var s=p.get_fill_color();p.parent_elt.selectAll("path.chrom-data").style("stroke",s).style("fill",s);r.resolve(q)});return r},_render_chrom_data:function(n,o,p){},_get_path_function:function(o,n){},_chroms_layout:function(){var o=this.genome.get_chroms_info(),q=l.layout.pie().value(function(s){return s.len}).sort(null),r=q(o),n=this.total_gap/o.length,p=g.map(r,function(u,t){var s=u.endAngle-n;u.endAngle=(s>u.startAngle?s:u.startAngle);return u});return p}});var b=k.extend({initialize:function(n){k.prototype.initialize.call(this,n);this.innerRadius=this.radius_bounds[0];this.radius_bounds[0]=this.radius_bounds[1];this.bg_stroke="fff";this.bg_fill="fff";this.min_arc_len=0.08},_render_data:function(p){var o=this,n=p.selectAll("g");n.selectAll("path").attr("id",function(t){return"label-"+t.data.chrom});n.append("svg:text").filter(function(t){return t.endAngle-t.startAngle>o.min_arc_len}).attr("text-anchor","middle").append("svg:textPath").attr("xlink:href",function(t){return"#label-"+t.data.chrom}).attr("startOffset","25%").attr("font-weight","bold").text(function(t){return t.data.chrom});var q=function(v){var t=(v.endAngle-v.startAngle)/v.value,u=l.range(0,v.value,25000000).map(function(w,x){return{radius:o.innerRadius,angle:w*t+v.startAngle,label:x===0?0:(x%3?null:o.formatNum(w))}});if(u.length<4){u[u.length-1].label=o.formatNum(Math.round((u[u.length-1].angle-v.startAngle)/t))}return u};var s=function(t){return t.angle>Math.PI?"rotate(180)translate(-16)":null};var r=g.filter(this.chroms_layout,function(t){return t.endAngle-t.startAngle>o.min_arc_len});this.drawTicks(this.parent_elt,r,q,s)}});g.extend(b.prototype,h);var f=k.extend({_quantile:function(o,n){o.sort(l.ascending);return l.quantile(o,n)},_render_chrom_data:function(n,q,o){var r=this._get_path_function(q,o);if(!r){return null}var p=n.datum(o.data),s=p.append("path").attr("class","chrom-data").attr("chrom",q.data.chrom).attr("d",r);return s},_get_path_function:function(q,p){if(typeof p==="string"||!p.data||p.data.length===0){return null}var n=l.scale.linear().domain(this.data_bounds).range(this.radius_bounds).clamp(true);var r=l.scale.linear().domain([0,p.data.length]).range([q.startAngle,q.endAngle]);var o=l.svg.line.radial().interpolate("linear").radius(function(s){return n(s[1])}).angle(function(t,s){return r(s)});return l.svg.area.radial().interpolate(o.interpolate()).innerRadius(n(0)).outerRadius(o.radius()).angle(o.angle())},render_labels:function(){var n=this,q=function(){return"rotate(90)"};var p=g.filter(this.chroms_layout,function(r){return r.endAngle-r.startAngle>0.08}),o=g.filter(p,function(s,r){return r%3===0});this.drawTicks(this.parent_elt,o,this._data_bounds_ticks_fn(),q,true)},_transition_labels:function(){if(this.data_bounds.length===0){return}var o=this,q=g.filter(this.chroms_layout,function(r){return r.endAngle-r.startAngle>0.08}),p=g.filter(q,function(s,r){return r%3===0}),n=g.flatten(g.map(p,function(r){return o._data_bounds_ticks_fn()(r)}));this.parent_elt.selectAll("g.tick").data(n).transition().attr("transform",function(r){return"rotate("+(r.angle*180/Math.PI-90)+")translate("+r.radius+",0)"})},_data_bounds_ticks_fn:function(){var n=this;visibleChroms=0;return function(o){return[{radius:n.radius_bounds[0],angle:o.startAngle,label:n.formatNum(n.data_bounds[0])},{radius:n.radius_bounds[1],angle:o.startAngle,label:n.formatNum(n.data_bounds[1])}]}},get_data_bounds:function(n){}});g.extend(f.prototype,h);var e=f.extend({get_data_bounds:function(o){var n=g.map(o,function(p){if(typeof p==="string"||!p.max){return 0}return p.max});return[0,(n&&typeof n!=="string"?this._quantile(n,0.98):0)]}});var d=f.extend({get_data_bounds:function(o){var n=g.flatten(g.map(o,function(p){if(p){return g.map(p.data,function(q){return q[1]})}else{return 0}}));return[g.min(n),this._quantile(n,0.98)]}});var j=k.extend({render:function(){var n=this;$.when(n.track.get("data_manager").data_is_ready()).then(function(){$.when(n.track.get("data_manager").get_genome_wide_data(n.genome)).then(function(q){var p=[],o=n.genome.get_chroms_info();g.each(q,function(u,t){var r=o[t].chrom;var s=g.map(u.data,function(w){var v=n._get_region_angle(r,w[1]),x=n._get_region_angle(w[3],w[4]);return{source:{startAngle:v,endAngle:v+0.01},target:{startAngle:x,endAngle:x+0.01}}});p=p.concat(s)});n.parent_elt.append("g").attr("class","chord").selectAll("path").data(p).enter().append("path").style("fill",n.get_fill_color()).attr("d",l.svg.chord().radius(n.radius_bounds[0])).style("opacity",1)})})},update_radius_bounds:function(n){this.radius_bounds=n;this.parent_elt.selectAll("path").transition().attr("d",l.svg.chord().radius(this.radius_bounds[0]))},_get_region_angle:function(p,n){var o=g.find(this.chroms_layout,function(q){return q.data.chrom===p});return o.endAngle-((o.endAngle-o.startAngle)*(o.data.len-n)/o.data.len)}});return{CircsterView:a}});
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Make 'paused' a terminal state so that the history no longer polls for updates when it contains paused datasets.
by Bitbucket 15 Nov '12
by Bitbucket 15 Nov '12
15 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/94627de62528/
changeset: 94627de62528
user: natefoo
date: 2012-11-15 17:21:13
summary: Make 'paused' a terminal state so that the history no longer polls for updates when it contains paused datasets.
affected #: 2 files
diff -r a478d3fb1d07fd929f22ba55e90ee96a81393cf3 -r 94627de6252824193ff0ff659807d17e59e4f94f lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -195,7 +195,7 @@
job.state = model.Job.states.PAUSED
for dataset_assoc in job.output_datasets + job.output_library_datasets:
dataset_assoc.dataset.dataset.state = model.Dataset.states.PAUSED
- dataset_assoc.dataset.blurb = "Execution of this dataset's job is paused because you were over your disk quota at the time it was ready to run"
+ dataset_assoc.dataset.info = "Execution of this dataset's job is paused because you were over your disk quota at the time it was ready to run"
self.sa_session.add( dataset_assoc.dataset.dataset )
self.sa_session.add( job )
else:
diff -r a478d3fb1d07fd929f22ba55e90ee96a81393cf3 -r 94627de6252824193ff0ff659807d17e59e4f94f templates/root/history.mako
--- a/templates/root/history.mako
+++ b/templates/root/history.mako
@@ -30,7 +30,7 @@
<script type="text/javascript">
-<% TERMINAL_STATES = ["ok", "error", "empty", "deleted", "discarded", "failed_metadata"] %>
+<% TERMINAL_STATES = ["ok", "error", "empty", "deleted", "discarded", "failed_metadata", "paused"] %>
TERMINAL_STATES = ${ h.to_json_string(TERMINAL_STATES) };
// Tag handling.
@@ -708,4 +708,4 @@
</div></body>
-</html>
\ No newline at end of file
+</html>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: When a job fails, pause any dependent jobs (and the dependent jobs' outputs).
by Bitbucket 15 Nov '12
by Bitbucket 15 Nov '12
15 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a478d3fb1d07/
changeset: a478d3fb1d07
user: natefoo
date: 2012-11-15 17:11:23
summary: When a job fails, pause any dependent jobs (and the dependent jobs' outputs).
affected #: 5 files
diff -r 0074aa873198dc48ae9e8e5f5b7c70a99dc0cf54 -r a478d3fb1d07fd929f22ba55e90ee96a81393cf3 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -245,6 +245,9 @@
# Update (non-library) job output datasets through the object store
if dataset not in job.output_library_datasets:
self.app.object_store.update_from_file(dataset.dataset, create=True)
+ # Pause any dependent jobs (and those jobs' outputs)
+ for dep_job_assoc in dataset.dependent_jobs:
+ self.pause( dep_job_assoc.job, "Execution of this dataset's job is paused because its input datasets are in an error state." )
self.sa_session.add( dataset )
self.sa_session.flush()
job.state = job.states.ERROR
@@ -275,6 +278,19 @@
if self.app.config.cleanup_job == 'always' or (self.app.config.cleanup_job == 'onsuccess' and job.state == job.states.DELETED):
self.cleanup()
+ def pause( self, job=None, message=None ):
+ if job is None:
+ job = self.get_job()
+ if message is None:
+ message = "Execution of this dataset's job is paused"
+ if job.state == job.states.NEW:
+ for dataset_assoc in job.output_datasets + job.output_library_datasets:
+ dataset_assoc.dataset.dataset.state = dataset_assoc.dataset.dataset.states.PAUSED
+ dataset_assoc.dataset.info = message
+ self.sa_session.add( dataset_assoc.dataset )
+ job.state = job.states.PAUSED
+ self.sa_session.add( job )
+
def change_state( self, state, info = False ):
job = self.get_job()
self.sa_session.refresh( job )
@@ -437,6 +453,9 @@
log.debug( "setting dataset state to ERROR" )
# TODO: This is where the state is being set to error. Change it!
dataset_assoc.dataset.dataset.state = model.Dataset.states.ERROR
+ # Pause any dependent jobs (and those jobs' outputs)
+ for dep_job_assoc in dataset_assoc.dataset.dependent_jobs:
+ self.pause( dep_job_assoc.job, "Execution of this dataset's job is paused because its input datasets are in an error state." )
else:
dataset_assoc.dataset.dataset.state = model.Dataset.states.OK
# If any of the rest of the finish method below raises an
diff -r 0074aa873198dc48ae9e8e5f5b7c70a99dc0cf54 -r a478d3fb1d07fd929f22ba55e90ee96a81393cf3 lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -193,6 +193,10 @@
elif job_state == JOB_USER_OVER_QUOTA:
log.info( "(%d) User (%s) is over quota: job paused" % ( job.id, job.user_id ) )
job.state = model.Job.states.PAUSED
+ for dataset_assoc in job.output_datasets + job.output_library_datasets:
+ dataset_assoc.dataset.dataset.state = model.Dataset.states.PAUSED
+ dataset_assoc.dataset.blurb = "Execution of this dataset's job is paused because you were over your disk quota at the time it was ready to run"
+ self.sa_session.add( dataset_assoc.dataset.dataset )
self.sa_session.add( job )
else:
log.error( "(%d) Job in unknown state '%s'" % ( job.id, job_state ) )
diff -r 0074aa873198dc48ae9e8e5f5b7c70a99dc0cf54 -r a478d3fb1d07fd929f22ba55e90ee96a81393cf3 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -870,6 +870,7 @@
EMPTY = 'empty',
ERROR = 'error',
DISCARDED = 'discarded',
+ PAUSED = 'paused',
SETTING_METADATA = 'setting_metadata',
FAILED_METADATA = 'failed_metadata' )
permitted_actions = get_permitted_actions( filter='DATASET' )
diff -r 0074aa873198dc48ae9e8e5f5b7c70a99dc0cf54 -r a478d3fb1d07fd929f22ba55e90ee96a81393cf3 lib/galaxy/model/mapping.py
--- a/lib/galaxy/model/mapping.py
+++ b/lib/galaxy/model/mapping.py
@@ -1571,13 +1571,13 @@
) )
assign_mapper( context, JobToInputDatasetAssociation, JobToInputDatasetAssociation.table,
- properties=dict( job=relation( Job ), dataset=relation( HistoryDatasetAssociation, lazy=False ) ) )
+ properties=dict( job=relation( Job ), dataset=relation( HistoryDatasetAssociation, lazy=False, backref="dependent_jobs" ) ) )
assign_mapper( context, JobToOutputDatasetAssociation, JobToOutputDatasetAssociation.table,
properties=dict( job=relation( Job ), dataset=relation( HistoryDatasetAssociation, lazy=False ) ) )
assign_mapper( context, JobToInputLibraryDatasetAssociation, JobToInputLibraryDatasetAssociation.table,
- properties=dict( job=relation( Job ), dataset=relation( LibraryDatasetDatasetAssociation, lazy=False ) ) )
+ properties=dict( job=relation( Job ), dataset=relation( LibraryDatasetDatasetAssociation, lazy=False, backref="dependent_jobs" ) ) )
assign_mapper( context, JobToOutputLibraryDatasetAssociation, JobToOutputLibraryDatasetAssociation.table,
properties=dict( job=relation( Job ), dataset=relation( LibraryDatasetDatasetAssociation, lazy=False ) ) )
diff -r 0074aa873198dc48ae9e8e5f5b7c70a99dc0cf54 -r a478d3fb1d07fd929f22ba55e90ee96a81393cf3 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -153,7 +153,7 @@
</div>
%elif data_state == "paused":
<div>
- ${_('Job is currently paused. Check your quota and parent jobs for failure, use the history menu to resume.')}</div>
+ ${_('Job is currently paused:')} <i>${data.display_info().strip().rstrip('.')}.</i> ${_('Use the history menu to resume.')}</div><div><a href="${h.url_for( controller='dataset', action='show_params', dataset_id=dataset_id )}" target="galaxy_main" title='${_("View Details")}' class="icon-button information tooltip"></a>
%if for_editing:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add missing attributes to the ToolDataTable class.
by Bitbucket 15 Nov '12
by Bitbucket 15 Nov '12
15 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0074aa873198/
changeset: 0074aa873198
user: greg
date: 2012-11-15 15:35:25
summary: Add missing attributes to the ToolDataTable class.
affected #: 1 file
diff -r 340438c62171578078323d39da398d5053b69d0a -r 0074aa873198dc48ae9e8e5f5b7c70a99dc0cf54 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -118,6 +118,14 @@
class ToolDataTable( object ):
def __init__( self, config_element, tool_data_path ):
self.name = config_element.get( 'name' )
+ self.comment_char = config_element.get( 'comment_char' )
+ for file_elem in config_element.findall( 'file' ):
+ # There should only be one file_elem.
+ if 'path' in file_elem.attrib:
+ tool_data_file_path = file_elem.get( 'path' )
+ self.tool_data_file = os.path.split( tool_data_file_path )[1]
+ else:
+ self.tool_data_file = None
self.tool_data_path = tool_data_path
self.missing_index_file = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/340438c62171/
changeset: 340438c62171
user: jgoecks
date: 2012-11-15 00:50:20
summary: Small bug fixes for Circster.
affected #: 1 file
diff -r cc7df5ca1d47dbbd98614c21589435f84c67f9f5 -r 340438c62171578078323d39da398d5053b69d0a static/scripts/viz/circster.js
--- a/static/scripts/viz/circster.js
+++ b/static/scripts/viz/circster.js
@@ -84,6 +84,11 @@
* Format number for display at a tick.
*/
formatNum: function(num, sigDigits) {
+ // Use default of 2 sig. digits.
+ if (sigDigits === undefined) {
+ sigDigits = 2;
+ }
+
var rval = null;
if (num < 1) {
rval = num.toPrecision(sigDigits);
@@ -866,7 +871,7 @@
if (typeof d === 'string' || !d.max) { return 0; }
return d.max;
});
- return [ 0, (max_data && typeof max_data !== 'string' ? this._quantile(values, 0.98) : 0) ];
+ return [ 0, (max_data && typeof max_data !== 'string' ? this._quantile(max_data, 0.98) : 0) ];
}
});
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Introduction of the dataset "Paused" state and basic resume-all functionality for a history. This will need to be reimplemented in backbone, when that's committed and enabled.
by Bitbucket 14 Nov '12
by Bitbucket 14 Nov '12
14 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/cc7df5ca1d47/
changeset: cc7df5ca1d47
user: dannon
date: 2012-11-14 22:18:26
summary: Introduction of the dataset "Paused" state and basic resume-all functionality for a history. This will need to be reimplemented in backbone, when that's committed and enabled.
affected #: 7 files
diff -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb -r cc7df5ca1d47dbbd98614c21589435f84c67f9f5 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -698,6 +698,11 @@
def unhide_datasets( self ):
for dataset in self.datasets:
dataset.mark_unhidden()
+ def resume_paused_jobs( self ):
+ for dataset in self.datasets:
+ job = dataset.creating_job
+ if job.state == Job.states.PAUSED:
+ job.set_state(Job.states.QUEUED)
def get_disk_size( self, nice_size=False ):
# unique datasets only
db_session = object_session( self )
diff -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb -r cc7df5ca1d47dbbd98614c21589435f84c67f9f5 lib/galaxy/webapps/galaxy/controllers/history.py
--- a/lib/galaxy/webapps/galaxy/controllers/history.py
+++ b/lib/galaxy/webapps/galaxy/controllers/history.py
@@ -564,6 +564,20 @@
return trans.show_ok_message( "Your datasets have been unhidden.", refresh_frames=refresh_frames )
@web.expose
+ def resume_paused_jobs( self, trans, current=False, ids=None ):
+ """Resume paused jobs the active history -- this does not require a logged in user."""
+ if not ids and util.string_as_bool( current ):
+ histories = [ trans.get_history() ]
+ refresh_frames = ['history']
+ else:
+ raise NotImplementedError( "You can currently only resume all the datasets of the current history." )
+ for history in histories:
+ history.resume_paused_jobs()
+ trans.sa_session.add( history )
+ trans.sa_session.flush()
+ return trans.show_ok_message( "Your jobs have been resumed.", refresh_frames=refresh_frames )
+
+ @web.expose
@web.require_login( "rate items" )
@web.json
def rate_async( self, trans, id, rating ):
diff -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb -r cc7df5ca1d47dbbd98614c21589435f84c67f9f5 static/june_2007_style/base.less
--- a/static/june_2007_style/base.less
+++ b/static/june_2007_style/base.less
@@ -1637,6 +1637,11 @@
opacity: .60;
}
+div.historyItem-paused {
+ // border-color: @history_paused_border;
+ background: @history_paused_bg;
+}
+
// Special case for showing the spinner but not changing the background
div.historyItemTitleBar.spinner .state-icon {
background: url(data_running.gif) 0 1px no-repeat !important;
diff -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb -r cc7df5ca1d47dbbd98614c21589435f84c67f9f5 static/june_2007_style/blue/base.css
--- a/static/june_2007_style/blue/base.css
+++ b/static/june_2007_style/blue/base.css
@@ -851,6 +851,7 @@
div.historyItem-upload{background:#ccccff;}div.historyItem-upload .state-icon{background-image:url(data_upload.gif);}
div.historyItem-queued{background:#eeeeee;}
div.historyItem-noPermission{filter:alpha(opacity=60);-moz-opacity:.60;opacity:.60;}
+div.historyItem-paused{background:#d9edf7;}
div.historyItemTitleBar.spinner .state-icon{background:url(data_running.gif) 0 1px no-repeat !important;}
div.historyItemButtons{float:right;}
div.historyItemBody div{padding-top:2px;}
diff -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb -r cc7df5ca1d47dbbd98614c21589435f84c67f9f5 static/june_2007_style/blue_colors.ini
--- a/static/june_2007_style/blue_colors.ini
+++ b/static/june_2007_style/blue_colors.ini
@@ -44,6 +44,8 @@
history_deleted_bg=#3399FF
history_error_border=#AA6666
history_error_bg=#FFCCCC
+history_paused_border=#6666AA
+history_paused_bg=#d9edf7
history_running_border=#AAAA66
history_running_bg=#FFFFCC
history_ok_border=#66AA66
diff -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb -r cc7df5ca1d47dbbd98614c21589435f84c67f9f5 templates/root/history_common.mako
--- a/templates/root/history_common.mako
+++ b/templates/root/history_common.mako
@@ -127,7 +127,12 @@
%endif
%endif
</div>
- <span class="state-icon"></span>
+ ## Hack, do it in css
+ %if data_state == "paused":
+ <span class="ficon pause"></span>
+ %else:
+ <span class="state-icon"></span>
+ %endif
<span class="historyItemTitle">${hid}: ${data.display_name()}</span></div>
@@ -146,6 +151,15 @@
<a href="${h.url_for( controller='tool_runner', action='rerun', id=data.id )}" target="galaxy_main" title='${_("Run this job again")}' class="icon-button arrow-circle tooltip"></a>
%endif
</div>
+ %elif data_state == "paused":
+ <div>
+ ${_('Job is currently paused. Check your quota and parent jobs for failure, use the history menu to resume.')}</div>
+ <div>
+ <a href="${h.url_for( controller='dataset', action='show_params', dataset_id=dataset_id )}" target="galaxy_main" title='${_("View Details")}' class="icon-button information tooltip"></a>
+ %if for_editing:
+ <a href="${h.url_for( controller='tool_runner', action='rerun', id=data.id )}" target="galaxy_main" title='${_("Run this job again")}' class="icon-button arrow-circle tooltip"></a>
+ %endif
+ </div>
%elif data_state == "running":
<div>${_('Job is currently running')}</div><div>
diff -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb -r cc7df5ca1d47dbbd98614c21589435f84c67f9f5 templates/root/index.mako
--- a/templates/root/index.mako
+++ b/templates/root/index.mako
@@ -37,6 +37,9 @@
"${_("Dataset Security")}": function() {
galaxy_main.location = "${h.url_for( controller='root', action='history_set_default_permissions' )}";
},
+ "${_("Resume Paused Jobs")}": function() {
+ galaxy_history.location = "${h.url_for( controller='history', action='resume_paused_jobs', current=True)}";
+ },
"${_("Show Deleted Datasets")}": function() {
galaxy_history.location = "${h.url_for( controller='root', action='history', show_deleted=True)}";
},
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Add more featureful job limiting and optimize the query for checking whether
by Bitbucket 13 Nov '12
by Bitbucket 13 Nov '12
13 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/73e05bc14cf1/
changeset: 73e05bc14cf1
user: natefoo
date: 2012-11-13 21:00:55
summary: Add more featureful job limiting and optimize the query for checking whether
jobs are ready to run. Input dependency checks are now performed via SQL
rather than walking up the object chain. Limits on the number of jobs a user
can run can now be set across the entire instance and a job runner URL.
Quota checks at job runtime are only performed once, after limit checks. If a
user is over quota, jobs are moved to a "paused" state. Once the user is under
quota, jobs can be unpaused and continue to run (once this UI is added in
another commit, shortly). This obviates the need for quota checks on every
job, on every queue cycle.
When a job's input dataset errors, the job (and all jobs dependent upon that
job) are no longer errored. It will then be possible to remap a job to new
inputs to allow execution to continue from the point of failure. Commit for
that is also coming shortly.
affected #: 7 files
diff -r ed0738c6001654d5456dd36579b278cd10fcd00c -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -124,6 +124,9 @@
self.enable_beta_job_managers = string_as_bool( kwargs.get( 'enable_beta_job_managers', 'False' ) )
# Per-user Job concurrency limitations
self.user_job_limit = int( kwargs.get( 'user_job_limit', 0 ) )
+ # user_job_limit for backwards-compatibility
+ self.registered_user_job_limit = int( kwargs.get( 'registered_user_job_limit', self.user_job_limit ) )
+ self.anonymous_user_job_limit = int( kwargs.get( 'anonymous_user_job_limit', self.user_job_limit ) )
self.default_cluster_job_runner = kwargs.get( 'default_cluster_job_runner', 'local:///' )
self.pbs_application_server = kwargs.get('pbs_application_server', "" )
self.pbs_dataset_server = kwargs.get('pbs_dataset_server', "" )
@@ -216,6 +219,19 @@
self.job_manager = kwargs.get('job_manager', self.server_name).strip()
self.job_handlers = [ x.strip() for x in kwargs.get('job_handlers', self.server_name).split(',') ]
self.default_job_handlers = [ x.strip() for x in kwargs.get('default_job_handlers', ','.join( self.job_handlers ) ).split(',') ]
+ # parse the [galaxy:job_limits] section
+ self.job_limits = {}
+ try:
+ job_limits = global_conf_parser.items( 'galaxy:job_limits' )
+ for k, v in job_limits:
+ # ConfigParser considers the first colon to be the delimiter, undo this behavior
+ more_k, v = v.split('=', 1)
+ k = '%s:%s' % (k, more_k.strip())
+ v = v.strip().rsplit(None, 1)
+ v[1] = int(v[1])
+ self.job_limits[k] = v
+ except ConfigParser.NoSectionError:
+ pass
# Use database for IPC unless this is a standalone server (or multiple servers doing self dispatching in memory)
if self.track_jobs_in_database is None or self.track_jobs_in_database == "None":
self.track_jobs_in_database = True
diff -r ed0738c6001654d5456dd36579b278cd10fcd00c -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -81,7 +81,7 @@
self.tool_provided_job_metadata = None
# Wrapper holding the info required to restore and clean up from files used for setting metadata externally
self.external_output_metadata = metadata.JobExternalOutputMetadataWrapper( job )
- self.job_runner_mapper = JobRunnerMapper( self )
+ self.job_runner_mapper = JobRunnerMapper( self, job.job_runner_name )
self.params = None
if job.params:
self.params = from_json_string( job.params )
diff -r ed0738c6001654d5456dd36579b278cd10fcd00c -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -8,7 +8,7 @@
import threading
from Queue import Queue, Empty
-from sqlalchemy.sql.expression import and_, or_
+from sqlalchemy.sql.expression import and_, or_, select, func
from galaxy import util, model
from galaxy.jobs import Sleeper, JobWrapper, TaskWrapper
@@ -16,7 +16,7 @@
log = logging.getLogger( __name__ )
# States for running a job. These are NOT the same as data states
-JOB_WAIT, JOB_ERROR, JOB_INPUT_ERROR, JOB_INPUT_DELETED, JOB_READY, JOB_DELETED, JOB_ADMIN_DELETED = 'wait', 'error', 'input_error', 'input_deleted', 'ready', 'deleted', 'admin_deleted'
+JOB_WAIT, JOB_ERROR, JOB_INPUT_ERROR, JOB_INPUT_DELETED, JOB_READY, JOB_DELETED, JOB_ADMIN_DELETED, JOB_USER_OVER_QUOTA = 'wait', 'error', 'input_error', 'input_deleted', 'ready', 'deleted', 'admin_deleted', 'user_over_quota'
class JobHandler( object ):
"""
@@ -126,9 +126,32 @@
# Clear the session so we get fresh states for job and all datasets
self.sa_session.expunge_all()
# Fetch all new jobs
- jobs_to_check = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
- .filter( ( model.Job.state == model.Job.states.NEW ) \
- & ( model.Job.handler == self.app.config.server_name ) ).all()
+ hda_not_ready = self.sa_session.query(model.Job.id).enable_eagerloads(False) \
+ .join(model.JobToInputDatasetAssociation) \
+ .join(model.HistoryDatasetAssociation) \
+ .join(model.Dataset) \
+ .filter(and_((model.Job.state == model.Job.states.NEW),
+ or_((model.HistoryDatasetAssociation._state != None),
+ (model.HistoryDatasetAssociation.deleted == True ),
+ (model.Dataset.state != model.Dataset.states.OK ),
+ (model.Dataset.deleted == True)))).subquery()
+ ldda_not_ready = self.sa_session.query(model.Job.id).enable_eagerloads(False) \
+ .join(model.JobToInputLibraryDatasetAssociation) \
+ .join(model.LibraryDatasetDatasetAssociation) \
+ .join(model.Dataset) \
+ .filter(and_((model.Job.state == model.Job.states.NEW),
+ or_((model.LibraryDatasetDatasetAssociation._state != None),
+ (model.LibraryDatasetDatasetAssociation.deleted == True),
+ (model.Dataset.state != model.Dataset.states.OK),
+ (model.Dataset.deleted == True)))).subquery()
+ jobs_to_check = self.sa_session.query(model.Job).enable_eagerloads(False) \
+ .filter(and_((model.Job.state == model.Job.states.NEW),
+ (model.Job.handler == self.app.config.server_name),
+ ~model.Job.table.c.id.in_(hda_not_ready),
+ ~model.Job.table.c.id.in_(ldda_not_ready))) \
+ .order_by(model.Job.id).all()
+ # Ensure that we get new job counts on each iteration
+ self.__clear_user_job_count()
else:
# Get job objects and append to watch queue for any which were
# previously waiting
@@ -150,7 +173,8 @@
new_waiting_jobs = []
for job in jobs_to_check:
try:
- # Check the job's dependencies, requeue if they're not done
+ # Check the job's dependencies, requeue if they're not done.
+ # Some of these states will only happen when using the in-memory job queue
job_state = self.__check_if_ready_to_run( job )
if job_state == JOB_WAIT:
if not self.track_jobs_in_database:
@@ -166,6 +190,10 @@
log.info( "(%d) Job deleted by user while still queued" % job.id )
elif job_state == JOB_ADMIN_DELETED:
log.info( "(%d) Job deleted by admin while still queued" % job.id )
+ elif job_state == JOB_USER_OVER_QUOTA:
+ log.info( "(%d) User (%s) is over quota: job paused" % ( job.id, job.user_id ) )
+ job.state = model.Job.states.PAUSED
+ self.sa_session.add( job )
else:
log.error( "(%d) Job in unknown state '%s'" % ( job.id, job_state ) )
if not self.track_jobs_in_database:
@@ -174,6 +202,8 @@
log.exception( "failure running job %d" % job.id )
# Update the waiting list
self.waiting_jobs = new_waiting_jobs
+ # Flush, if we updated the state
+ self.sa_session.flush()
# Done with the session
self.sa_session.remove()
@@ -187,57 +217,88 @@
job can be dispatched. Otherwise, return JOB_WAIT indicating that input
datasets are still being prepared.
"""
- if job.state == model.Job.states.DELETED:
- return JOB_DELETED
- elif job.state == model.Job.states.ERROR:
- return JOB_ADMIN_DELETED
- elif self.app.config.enable_quotas:
+ # If tracking in the database, job.state is guaranteed to be NEW and the inputs are guaranteed to be OK
+ if not self.track_jobs_in_database:
+ if job.state == model.Job.states.DELETED:
+ return JOB_DELETED
+ elif job.state == model.Job.states.ERROR:
+ return JOB_ADMIN_DELETED
+ for dataset_assoc in job.input_datasets + job.input_library_datasets:
+ idata = dataset_assoc.dataset
+ if not idata:
+ continue
+ # don't run jobs for which the input dataset was deleted
+ if idata.deleted:
+ JobWrapper( job, self ).fail( "input data %s (file: %s) was deleted before the job started" % ( idata.hid, idata.file_name ) )
+ return JOB_INPUT_DELETED
+ # an error in the input data causes us to bail immediately
+ elif idata.state == idata.states.ERROR:
+ JobWrapper( job, self ).fail( "input data %s is in error state" % ( idata.hid ) )
+ return JOB_INPUT_ERROR
+ elif idata.state == idata.states.FAILED_METADATA:
+ JobWrapper( job, self ).fail( "input data %s failed to properly set metadata" % ( idata.hid ) )
+ return JOB_INPUT_ERROR
+ elif idata.state != idata.states.OK and not ( idata.state == idata.states.SETTING_METADATA and job.tool_id is not None and job.tool_id == self.app.datatypes_registry.set_external_metadata_tool.id ):
+ # need to requeue
+ return JOB_WAIT
+ state = self.__check_user_jobs( job )
+ if state == JOB_READY and self.app.config.enable_quotas:
quota = self.app.quota_agent.get_quota( job.user )
if quota is not None:
try:
usage = self.app.quota_agent.get_usage( user=job.user, history=job.history )
if usage > quota:
- return JOB_WAIT
+ return JOB_USER_OVER_QUOTA
except AssertionError, e:
pass # No history, should not happen with an anon user
- for dataset_assoc in job.input_datasets + job.input_library_datasets:
- idata = dataset_assoc.dataset
- if not idata:
- continue
- # don't run jobs for which the input dataset was deleted
- if idata.deleted:
- JobWrapper( job, self ).fail( "input data %s (file: %s) was deleted before the job started" % ( idata.hid, idata.file_name ) )
- return JOB_INPUT_DELETED
- # an error in the input data causes us to bail immediately
- elif idata.state == idata.states.ERROR:
- JobWrapper( job, self ).fail( "input data %s is in error state" % ( idata.hid ) )
- return JOB_INPUT_ERROR
- elif idata.state == idata.states.FAILED_METADATA:
- JobWrapper( job, self ).fail( "input data %s failed to properly set metadata" % ( idata.hid ) )
- return JOB_INPUT_ERROR
- elif idata.state != idata.states.OK and not ( idata.state == idata.states.SETTING_METADATA and job.tool_id is not None and job.tool_id == self.app.datatypes_registry.set_external_metadata_tool.id ):
- # need to requeue
- return JOB_WAIT
- return self.__check_user_jobs( job )
+ return state
+
+ def __clear_user_job_count( self ):
+ self.user_job_count = {}
+ self.user_job_count_per_runner = {}
def __check_user_jobs( self, job ):
- if not self.app.config.user_job_limit:
- return JOB_READY
if job.user:
- count = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
- .filter( and_( model.Job.user_id == job.user.id,
- or_( model.Job.state == model.Job.states.RUNNING,
- model.Job.state == model.Job.states.QUEUED ) ) ).count()
+ # Check the hard limit first
+ if self.app.config.registered_user_job_limit:
+ # Cache the job count if necessary
+ if not self.user_job_count:
+ query = self.sa_session.execute(select([model.Job.table.c.user_id, func.count(model.Job.table.c.user_id)]) \
+ .where(and_(model.Job.table.c.state.in_((model.Job.states.QUEUED, model.Job.states.RUNNING)), (model.Job.table.c.user_id is not None))) \
+ .group_by(model.Job.table.c.user_id))
+ for row in query:
+ self.user_job_count[row[0]] = row[1]
+ if self.user_job_count.get(job.user_id, 0) >= self.app.config.registered_user_job_limit:
+ return JOB_WAIT
+ # If we pass the hard limit, also check the per-runner count
+ if job.job_runner_name in self.app.config.job_limits:
+ # Cache the job count if necessary
+ if job.job_runner_name not in self.user_job_count_per_runner:
+ self.user_job_count_per_runner[job.job_runner_name] = {}
+ query_url, limit = self.app.config.job_limits[job.job_runner_name]
+ base_query = select([model.Job.table.c.user_id, model.Job.table.c.job_runner_name, func.count(model.Job.table.c.user_id).label('job_count')]) \
+ .where(model.Job.table.c.state.in_((model.Job.states.QUEUED, model.Job.states.RUNNING))) \
+ .group_by(model.Job.table.c.user_id, model.Job.table.c.job_runner_name)
+ if '%' in query_url or '_' in query_url:
+ subq = base_query.having(model.Job.table.c.job_runner_name.like(query_url)).alias('subq')
+ query = self.sa_session.execute(select([subq.c.user_id, func.sum(subq.c.job_count).label('job_count')]).group_by(subq.c.user_id))
+ else:
+ query = self.sa_session.execute(base_query.having(model.Job.table.c.job_runner_name == query_url))
+ for row in query:
+ self.user_job_count_per_runner[job.job_runner_name][row['user_id']] = row['job_count']
+ if self.user_job_count_per_runner[job.job_runner_name].get(job.user_id, 0) >= self.app.config.job_limits[job.job_runner_name][1]:
+ return JOB_WAIT
elif job.galaxy_session:
- count = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
- .filter( and_( model.Job.session_id == job.galaxy_session.id,
- or_( model.Job.state == model.Job.states.RUNNING,
- model.Job.state == model.Job.states.QUEUED ) ) ).count()
+ # Anonymous users only get the hard limit
+ if self.app.config.anonymous_user_job_limit:
+ count = self.sa_session.query( model.Job ).enable_eagerloads( False ) \
+ .filter( and_( model.Job.session_id == job.galaxy_session.id,
+ or_( model.Job.state == model.Job.states.RUNNING,
+ model.Job.state == model.Job.states.QUEUED ) ) ).count()
+ if count >= self.app.config.anonymous_user_job_limit:
+ return JOB_WAIT
else:
log.warning( 'Job %s is not associated with a user or session so job concurrency limit cannot be checked.' % job.id )
- return JOB_READY
- if count >= self.app.config.user_job_limit:
- return JOB_WAIT
return JOB_READY
def put( self, job_id, tool_id ):
diff -r ed0738c6001654d5456dd36579b278cd10fcd00c -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb lib/galaxy/jobs/manager.py
--- a/lib/galaxy/jobs/manager.py
+++ b/lib/galaxy/jobs/manager.py
@@ -146,6 +146,7 @@
for job in jobs_to_check:
job.handler = self.__get_handler( job )
+ job.job_runner_name = self.__get_runner_url( job )
log.debug( "(%s) Job assigned to handler '%s'" % ( job.id, job.handler ) )
self.sa_session.add( job )
@@ -168,6 +169,14 @@
log.exception( "(%s) Caught exception attempting to get tool-specific job handler for tool '%s', selecting at random from available handlers instead:" % ( job.id, job.tool_id ) )
return random.choice( self.app.config.job_handlers )
+ def __get_runner_url( self, job ):
+ """This fetches the raw runner URL, and does not perform any computation e.g. for the dynamic runner"""
+ try:
+ return self.app.toolbox.tools_by_id.get( job.tool_id, None ).get_job_runner_url( job.params )
+ except Exception, e:
+ log.warning( 'Unable to determine job runner URL for job %s: %s' % (job.id, str(e)) )
+ return None
+
def put( self, job_id, tool ):
"""Add a job to the queue (by job identifier)"""
if not self.app.config.track_jobs_in_database:
diff -r ed0738c6001654d5456dd36579b278cd10fcd00c -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb lib/galaxy/jobs/mapper.py
--- a/lib/galaxy/jobs/mapper.py
+++ b/lib/galaxy/jobs/mapper.py
@@ -14,8 +14,9 @@
(in the form of job_wrappers) to job runner url strings.
"""
- def __init__( self, job_wrapper ):
+ def __init__( self, job_wrapper, job_runner_name=None ):
self.job_wrapper = job_wrapper
+ self.job_runner_name = job_runner_name
self.rule_modules = self.__get_rule_modules( )
def __get_rule_modules( self ):
@@ -114,7 +115,11 @@
raise Exception( "Unhandled dynamic job runner type specified - %s" % expand_type )
def __cache_job_runner_url( self, params ):
- raw_job_runner_url = self.job_wrapper.tool.get_job_runner_url( params )
+ # If there's already a runner set in the Job object, don't overwrite from the tool
+ if self.job_runner_name is not None:
+ raw_job_runner_url = self.job_runner_name
+ else:
+ raw_job_runner_url = self.job_wrapper.tool.get_job_runner_url( params )
if raw_job_runner_url.startswith( DYNAMIC_RUNNER_PREFIX ):
job_runner_url = self.__expand_dynamic_job_runner_url( raw_job_runner_url[ len( DYNAMIC_RUNNER_PREFIX ) : ] )
else:
diff -r ed0738c6001654d5456dd36579b278cd10fcd00c -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -112,6 +112,7 @@
RUNNING = 'running',
OK = 'ok',
ERROR = 'error',
+ PAUSED = 'paused',
DELETED = 'deleted',
DELETED_NEW = 'deleted_new' )
# Please include an accessor (get/set pair) for any new columns/members.
diff -r ed0738c6001654d5456dd36579b278cd10fcd00c -r 73e05bc14cf1478b5ff9d8e8fffdf28d701dd2cb universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -659,12 +659,6 @@
# bytes). 0 for no limit.
#output_size_limit = 0
-# Jobs can be held back from submission to a runner if a user already has more
-# jobs queued or running than the number specified below. This prevents a
-# single user from stuffing the queue and preventing other users from being
-# able to run jobs.
-#user_job_limit = None
-
# Clustering Galaxy is not a straightforward process and requires some
# pre-configuration. See the the wiki before attempting to set any of these
# options:
@@ -717,6 +711,36 @@
# Details" option in the history. Administrators can always see this.
#expose_dataset_path = False
+# -- Job Limiting
+
+# A hard limit on the total number of jobs a user can have running across all
+# configured job destinations can be configured here.
+#registered_user_job_limit = None
+#anonymous_user_job_limit = None
+
+# Additionally, jobs can be limited based on runner URLs (or matching of runner
+# URLs). Matching is via SQL's 'LIKE' operator, so the wildcard characters are
+# '_' and '%' (regex is not supported). Since the job runner code often
+# rewrites the URL once the job has been submitted to the cluster, you will
+# need to define how to match the runner URL stored in the database. When in
+# doubt, you can run a job and then examine the stored value of
+# 'job_runner_name' in the 'job' table of the database to see what you'll need
+# to match.
+#
+# For example, if default_cluster_job_runner is set to pbs:/// and the default
+# Torque cluster happens to be pbs.example.org, the job_runner_name is likely
+# to be stored as 'pbs://pbs.example.org/'. To limit the number of jobs a user
+# can run on this cluster to 4, use the following:
+#
+# pbs:/// = pbs://pbs.example.org/ 4
+#
+# An example that uses matching (if, for example, your runner URL contains
+# native options):
+#
+# drmaa:/// = drmaa://sge.example.org/% 4
+
+[galaxy:job_limits]
+
# ---- Per-Tool Job Management ----------------------------------------------
# Per-tool job handler and runner overrides. Parameters can be included to define multiple
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
13 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/ed0738c60016/
changeset: ed0738c60016
user: greg
date: 2012-11-13 20:36:40
summary: Relocate the tool shed's ShedCounter.
affected #: 3 files
diff -r 424d407c67f7ea4f830317b7ab033815415a5a3b -r ed0738c6001654d5456dd36579b278cd10fcd00c lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -1,7 +1,6 @@
import sys, os, tempfile, shutil, logging, string, urllib2
import galaxy.tools.data
from datetime import date, datetime, timedelta
-from time import strftime, gmtime
from galaxy import util
from galaxy.web import url_for
from galaxy.web.form_builder import SelectField
@@ -40,85 +39,6 @@
VALID_CHARS = set( string.letters + string.digits + "'\"-=_.()/+*^,:?!#[]%\\$@;{}" )
TOOL_SHED_ADMIN_CONTROLLER = 'TOOL_SHED_ADMIN_CONTROLLER'
-class ShedCounter( object ):
- def __init__( self, model ):
- # TODO: Enhance the ShedCounter to retrieve information from the db instead of displaying what's currently in memory.
- self.model = model
- self.generation_time = strftime( "%b %d, %Y", gmtime() )
- self.repositories = 0
- #self.new_repositories = 0
- self.deleted_repositories = 0
- self.invalid_tools = 0
- self.valid_tools = 0
- self.workflows = 0
- self.proprietary_datatypes = 0
- self.total_clones = 0
- self.generate_statistics()
- @property
- def sa_session( self ):
- """Returns a SQLAlchemy session"""
- return self.model.context
- def generate_statistics( self ):
- self.repositories = 0
- #self.new_repositories = 0
- self.deleted_repositories = 0
- self.invalid_tools = 0
- self.valid_tools = 0
- self.workflows = 0
- self.proprietary_datatypes = 0
- self.total_clones = 0
- for repository in self.sa_session.query( self.model.Repository ):
- self.repositories += 1
- self.total_clones += repository.times_downloaded
- is_deleted = repository.deleted
- #is_new = repository.is_new
- #if is_deleted and is_new:
- if is_deleted:
- self.deleted_repositories += 1
- # self.new_repositories += 1
- #elif is_deleted:
- # self.deleted_repositories += 1
- #elif is_new:
- # self.new_repositories += 1
- else:
- processed_guids = []
- processed_invalid_tool_configs = []
- processed_relative_workflow_paths = []
- processed_datatypes = []
- # A repository's metadata_revisions are those that ignore the value of the repository_metadata.downloadable column.
- for metadata_revision in repository.metadata_revisions:
- metadata = metadata_revision.metadata
- if 'tools' in metadata:
- tool_dicts = metadata[ 'tools' ]
- for tool_dict in tool_dicts:
- if 'guid' in tool_dict:
- guid = tool_dict[ 'guid' ]
- if guid not in processed_guids:
- self.valid_tools += 1
- processed_guids.append( guid )
- if 'invalid_tools' in metadata:
- invalid_tool_configs = metadata[ 'invalid_tools' ]
- for invalid_tool_config in invalid_tool_configs:
- if invalid_tool_config not in processed_invalid_tool_configs:
- self.invalid_tools += 1
- processed_invalid_tool_configs.append( invalid_tool_config )
- if 'datatypes' in metadata:
- datatypes = metadata[ 'datatypes' ]
- for datatypes_dict in datatypes:
- if 'extension' in datatypes_dict:
- extension = datatypes_dict[ 'extension' ]
- if extension not in processed_datatypes:
- self.proprietary_datatypes += 1
- processed_datatypes.append( extension )
- if 'workflows' in metadata:
- workflows = metadata[ 'workflows' ]
- for workflow_tup in workflows:
- relative_path, exported_workflow_dict = workflow_tup
- if relative_path not in processed_relative_workflow_paths:
- self.workflows += 1
- processed_relative_workflow_paths.append( relative_path )
- self.generation_time = strftime( "%b %d, %Y", gmtime() )
-
def add_to_shed_tool_config( app, shed_tool_conf_dict, elem_list ):
# A tool shed repository is being installed so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
diff -r 424d407c67f7ea4f830317b7ab033815415a5a3b -r ed0738c6001654d5456dd36579b278cd10fcd00c lib/galaxy/webapps/community/model/mapping.py
--- a/lib/galaxy/webapps/community/model/mapping.py
+++ b/lib/galaxy/webapps/community/model/mapping.py
@@ -13,7 +13,7 @@
from galaxy.model.orm.ext.assignmapper import *
from galaxy.model.custom_types import *
from galaxy.util.bunch import Bunch
-from galaxy.util.shed_util import ShedCounter
+from galaxy.webapps.community.util.shed_statistics import *
from galaxy.webapps.community.util.hgweb_config import *
from galaxy.webapps.community.security import CommunityRBACAgent
diff -r 424d407c67f7ea4f830317b7ab033815415a5a3b -r ed0738c6001654d5456dd36579b278cd10fcd00c lib/galaxy/webapps/community/util/shed_statistics.py
--- /dev/null
+++ b/lib/galaxy/webapps/community/util/shed_statistics.py
@@ -0,0 +1,80 @@
+from time import strftime, gmtime
+
+class ShedCounter( object ):
+ def __init__( self, model ):
+ # TODO: Enhance the ShedCounter to retrieve information from the db instead of displaying what's currently in memory.
+ self.model = model
+ self.generation_time = strftime( "%b %d, %Y", gmtime() )
+ self.repositories = 0
+ #self.new_repositories = 0
+ self.deleted_repositories = 0
+ self.invalid_tools = 0
+ self.valid_tools = 0
+ self.workflows = 0
+ self.proprietary_datatypes = 0
+ self.total_clones = 0
+ self.generate_statistics()
+ @property
+ def sa_session( self ):
+ """Returns a SQLAlchemy session"""
+ return self.model.context
+ def generate_statistics( self ):
+ self.repositories = 0
+ #self.new_repositories = 0
+ self.deleted_repositories = 0
+ self.invalid_tools = 0
+ self.valid_tools = 0
+ self.workflows = 0
+ self.proprietary_datatypes = 0
+ self.total_clones = 0
+ for repository in self.sa_session.query( self.model.Repository ):
+ self.repositories += 1
+ self.total_clones += repository.times_downloaded
+ is_deleted = repository.deleted
+ #is_new = repository.is_new
+ #if is_deleted and is_new:
+ if is_deleted:
+ self.deleted_repositories += 1
+ # self.new_repositories += 1
+ #elif is_deleted:
+ # self.deleted_repositories += 1
+ #elif is_new:
+ # self.new_repositories += 1
+ else:
+ processed_guids = []
+ processed_invalid_tool_configs = []
+ processed_relative_workflow_paths = []
+ processed_datatypes = []
+ # A repository's metadata_revisions are those that ignore the value of the repository_metadata.downloadable column.
+ for metadata_revision in repository.metadata_revisions:
+ metadata = metadata_revision.metadata
+ if 'tools' in metadata:
+ tool_dicts = metadata[ 'tools' ]
+ for tool_dict in tool_dicts:
+ if 'guid' in tool_dict:
+ guid = tool_dict[ 'guid' ]
+ if guid not in processed_guids:
+ self.valid_tools += 1
+ processed_guids.append( guid )
+ if 'invalid_tools' in metadata:
+ invalid_tool_configs = metadata[ 'invalid_tools' ]
+ for invalid_tool_config in invalid_tool_configs:
+ if invalid_tool_config not in processed_invalid_tool_configs:
+ self.invalid_tools += 1
+ processed_invalid_tool_configs.append( invalid_tool_config )
+ if 'datatypes' in metadata:
+ datatypes = metadata[ 'datatypes' ]
+ for datatypes_dict in datatypes:
+ if 'extension' in datatypes_dict:
+ extension = datatypes_dict[ 'extension' ]
+ if extension not in processed_datatypes:
+ self.proprietary_datatypes += 1
+ processed_datatypes.append( extension )
+ if 'workflows' in metadata:
+ workflows = metadata[ 'workflows' ]
+ for workflow_tup in workflows:
+ relative_path, exported_workflow_dict = workflow_tup
+ if relative_path not in processed_relative_workflow_paths:
+ self.workflows += 1
+ processed_relative_workflow_paths.append( relative_path )
+ self.generation_time = strftime( "%b %d, %Y", gmtime() )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/424d407c67f7/
changeset: 424d407c67f7
user: inithello
date: 2012-11-13 19:56:59
summary: Fix for renaming repository.
affected #: 1 file
diff -r 447448028a2f7326fc452ee32055de5c58e822f6 -r 424d407c67f7ea4f830317b7ab033815415a5a3b lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1676,8 +1676,7 @@
# Change the entry in the hgweb.config file for the repository.
old_lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
new_lhs = "repos/%s/%s" % ( repository.user.username, repo_name )
- new_rhs = "%s\n" % repo_dir
- trans.app.hgweb_config_manager.change_entry( old_lhs, new_lhs, new_rhs )
+ trans.app.hgweb_config_manager.change_entry( old_lhs, new_lhs, repo_dir )
# Change the entry in the repository's hgrc file.
hgrc_file = os.path.join( repo_dir, '.hg', 'hgrc' )
self.__change_repository_name_in_hgrc_file( hgrc_file, repo_name )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add the framework components to enable creating a functional test framework for the tool shed.
by Bitbucket 13 Nov '12
by Bitbucket 13 Nov '12
13 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/447448028a2f/
changeset: 447448028a2f
user: greg
date: 2012-11-13 19:49:09
summary: Add the framework components to enable creating a functional test framework for the tool shed.
affected #: 9 files
diff -r 3419a45c1f01790b3108d1726253fcb46feedbf4 -r 447448028a2f7326fc452ee32055de5c58e822f6 run_tool_shed_functional_tests.sh
--- /dev/null
+++ b/run_tool_shed_functional_tests.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+# A good place to look for nose info: http://somethingaboutorange.com/mrl/projects/nose/
+#rm -f ./test/tool_shed/run_functional_tests.log
+
+if [ ! $1 ]; then
+ python ./test/tool_shed/functional_tests.py -v --with-nosehtml --html-report-file ./test/tool_shed/run_functional_tests.html ./test/tool_shed/functional
+elif [ $1 = 'help' ]; then
+ echo "'run_tool_shed_functional_tests.sh' for running all the test scripts in the ./test/tool_shed/functional directory"
+ echo "'run_tool_shed_functional_tests.sh testscriptname' for running one test script named testscriptname in the .test/tool_shed/functional directory"
+else
+ python ./test/tool_shed/functional_tests.py -v --with-nosehtml --html-report-file ./test/tool_shed/run_functional_tests.html $1
+fi
+
+echo "'sh run_tool_shed_functional_tests.sh help' for help"
diff -r 3419a45c1f01790b3108d1726253fcb46feedbf4 -r 447448028a2f7326fc452ee32055de5c58e822f6 test/tool_shed/__init__.py
--- /dev/null
+++ b/test/tool_shed/__init__.py
@@ -0,0 +1,1 @@
+"""Tool shed functional Tests"""
\ No newline at end of file
diff -r 3419a45c1f01790b3108d1726253fcb46feedbf4 -r 447448028a2f7326fc452ee32055de5c58e822f6 test/tool_shed/base/test_db_util.py
--- /dev/null
+++ b/test/tool_shed/base/test_db_util.py
@@ -0,0 +1,39 @@
+import galaxy.webapps.community.model as model
+from galaxy.model.orm import *
+from galaxy.webapps.community.model.mapping import context as sa_session
+from base.twilltestcase import *
+import sys
+
+def delete_obj( obj ):
+ sa_session.delete( obj )
+ sa_session.flush()
+def delete_user_roles( user ):
+ for ura in user.roles:
+ sa_session.delete( ura )
+ sa_session.flush()
+def flush( obj ):
+ sa_session.add( obj )
+ sa_session.flush()
+def get_default_user_permissions_by_role( role ):
+ return sa_session.query( model.DefaultUserPermissions ) \
+ .filter( model.DefaultUserPermissions.table.c.role_id == role.id ) \
+ .all()
+def get_default_user_permissions_by_user( user ):
+ return sa_session.query( model.DefaultUserPermissions ) \
+ .filter( model.DefaultUserPermissions.table.c.user_id==user.id ) \
+ .all()
+def get_private_role( user ):
+ for role in user.all_roles():
+ if role.name == user.email and role.description == 'Private Role for %s' % user.email:
+ return role
+ raise AssertionError( "Private role not found for user '%s'" % user.email )
+def get_user( email ):
+ return sa_session.query( model.User ) \
+ .filter( model.User.table.c.email==email ) \
+ .first()
+def mark_obj_deleted( obj ):
+ obj.deleted = True
+ sa_session.add( obj )
+ sa_session.flush()
+def refresh( obj ):
+ sa_session.refresh( obj )
diff -r 3419a45c1f01790b3108d1726253fcb46feedbf4 -r 447448028a2f7326fc452ee32055de5c58e822f6 test/tool_shed/base/twilltestcase.py
--- /dev/null
+++ b/test/tool_shed/base/twilltestcase.py
@@ -0,0 +1,20 @@
+from base.twilltestcase import *
+
+class ShedTwillTestCase( TwillTestCase ):
+ def setUp( self ):
+ # Security helper
+ self.security = security.SecurityHelper( id_secret='changethisinproductiontoo' )
+ self.history_id = None
+ self.host = os.environ.get( 'TOOL_SHED_TEST_HOST' )
+ self.port = os.environ.get( 'TOOL_SHED_TEST_PORT' )
+ self.url = "http://%s:%s" % ( self.host, self.port )
+ self.file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', None )
+ self.tool_shed_test_file = None
+ self.shed_tools_dict = {}
+ self.keepOutdir = os.environ.get( 'TOOL_SHED_TEST_SAVE', '' )
+ if self.keepOutdir > '':
+ try:
+ os.makedirs( self.keepOutdir )
+ except:
+ pass
+ self.home()
diff -r 3419a45c1f01790b3108d1726253fcb46feedbf4 -r 447448028a2f7326fc452ee32055de5c58e822f6 test/tool_shed/functional/test_0000_create_repository.py
--- /dev/null
+++ b/test/tool_shed/functional/test_0000_create_repository.py
@@ -0,0 +1,44 @@
+import tempfile, time, re, tempfile, os, shutil
+import galaxy.webapps.community.model
+from galaxy.util import parse_xml, string_as_bool
+from galaxy.util.shed_util import clean_tool_shed_url
+from galaxy.model.orm import *
+from tool_shed.base.twilltestcase import *
+from tool_shed.base.test_db_util import *
+
+admin_user = None
+admin_user_private_role = None
+admin_email = 'test(a)bx.psu.edu'
+admin_username = 'admin-user'
+
+class TestCreateRepository( ShedTwillTestCase ):
+
+ def test_0000_initiate_users( self ):
+ """Create necessary users and login as an admin user."""
+ self.logout()
+ self.login( email=admin_email, username=admin_username )
+ admin_user = get_user( admin_email )
+ assert admin_user is not None, 'Problem retrieving user with email %s from the database' % admin_email
+ admin_user_private_role = get_private_role( admin_user )
+ def test_0005_create_category( self ):
+ """Create a category"""
+ self.visit_url( '/admin/manage_categories?operation=create' )
+ try:
+ tc.fv( "1", "name", "Text Manipulation" )
+ tc.fv( "1", "description", "Tools for manipulating text" )
+ tc.submit( "create_category_button" )
+ except Exception, e:
+ errmsg = "Problem creating a category: %s" % str( e )
+ raise AssertionError( e )
+ def test_0010_create_filter_repository( self ):
+ """Create a repository"""
+ self.visit_url( '/repository/create_repository' )
+ try:
+ tc.fv( "1", "name", "filter" )
+ tc.fv( "1", "description", "Galaxy's filter tool" )
+ tc.fv( "1", "long_description", "Long description of Galaxy's filter tool" )
+ tc.fv( "1", "category_id", "Text Manipulation" )
+ tc.submit( "create_repository_button" )
+ except Exception, e:
+ errmsg = "Problem creating a repository: %s" % str( e )
+ raise AssertionError( e )
diff -r 3419a45c1f01790b3108d1726253fcb46feedbf4 -r 447448028a2f7326fc452ee32055de5c58e822f6 test/tool_shed/functional_tests.py
--- /dev/null
+++ b/test/tool_shed/functional_tests.py
@@ -0,0 +1,274 @@
+#!/usr/bin/env python
+
+import os, sys, shutil, tempfile, re
+
+# Assume we are run from the galaxy root directory, add lib to the python path
+cwd = os.getcwd()
+tool_shed_home_directory = os.path.join( cwd, 'test', 'tool_shed' )
+default_tool_shed_test_file_dir = os.path.join( tool_shed_home_directory, 'test_data' )
+new_path = [ os.path.join( cwd, "lib" ) ]
+new_path.extend( sys.path[1:] )
+sys.path = new_path
+
+from galaxy import eggs
+
+eggs.require( "nose" )
+eggs.require( "NoseHTML" )
+eggs.require( "NoseTestDiff" )
+eggs.require( "twill==0.9" )
+eggs.require( "Paste" )
+eggs.require( "PasteDeploy" )
+eggs.require( "Cheetah" )
+
+# This should not be required, but it is under certain conditions, thanks to this bug: http://code.google.com/p/python-nose/issues/detail?id=284
+eggs.require( "pysqlite" )
+
+import atexit, logging, os, os.path, sys, tempfile
+import twill, unittest, time
+import sys, threading, random
+import httplib, socket
+from paste import httpserver
+import galaxy.webapps.community.app
+from galaxy.webapps.community.app import UniverseApplication
+from galaxy.webapps.community import buildapp
+
+import nose.core
+import nose.config
+import nose.loader
+import nose.plugins.manager
+
+log = logging.getLogger( "tool_shed_functional_tests.py" )
+
+default_tool_shed_test_host = "localhost"
+default_tool_shed_test_port_min = 8000
+default_tool_shed_test_port_max = 9999
+default_tool_shed_locales = 'en'
+
+def run_tests( test_config ):
+ loader = nose.loader.TestLoader( config=test_config )
+ plug_loader = test_config.plugins.prepareTestLoader( loader )
+ if plug_loader is not None:
+ loader = plug_loader
+ tests = loader.loadTestsFromNames( test_config.testNames )
+ test_runner = nose.core.TextTestRunner( stream=test_config.stream,
+ verbosity=test_config.verbosity,
+ config=test_config )
+ plug_runner = test_config.plugins.prepareTestRunner( test_runner )
+ if plug_runner is not None:
+ test_runner = plug_runner
+ return test_runner.run( tests )
+
+def main():
+ # ---- Configuration ------------------------------------------------------
+ tool_shed_test_host = os.environ.get( 'TOOL_SHED_TEST_HOST', default_tool_shed_test_host )
+ tool_shed_test_port = os.environ.get( 'TOOL_SHED_TEST_PORT', None )
+ tool_shed_test_save = os.environ.get( 'TOOL_SHED_TEST_SAVE', None )
+ tool_path = os.environ.get( 'TOOL_SHED_TEST_TOOL_PATH', 'tools' )
+ start_server = 'TOOL_SHED_TEST_EXTERNAL' not in os.environ
+ if 'HTTP_ACCEPT_LANGUAGE' not in os.environ:
+ os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_tool_shed_locales
+ tool_shed_test_file_dir = os.environ.get( 'TOOL_SHED_TEST_FILE_DIR', default_tool_shed_test_file_dir )
+ if not os.path.isabs( tool_shed_test_file_dir ):
+ tool_shed_test_file_dir = tool_shed_test_file_dir
+ ignore_files = ()
+ if os.path.exists( 'tool_data_table_conf.test.xml' ):
+ tool_data_table_config_path = 'tool_data_table_conf.test.xml'
+ else:
+ tool_data_table_config_path = 'tool_data_table_conf.xml'
+ shed_tool_data_table_config = 'shed_tool_data_table_conf.xml'
+ tool_dependency_dir = os.environ.get( 'TOOL_SHED_TOOL_DEPENDENCY_DIR', None )
+ use_distributed_object_store = os.environ.get( 'TOOL_SHED_USE_DISTRIBUTED_OBJECT_STORE', False )
+
+ if start_server:
+ psu_production = False
+ tool_shed_test_proxy_port = None
+ if 'TOOL_SHED_TEST_PSU_PRODUCTION' in os.environ:
+ if not tool_shed_test_port:
+ raise Exception( 'Set TOOL_SHED_TEST_PORT to the port to which the proxy server will proxy' )
+ tool_shed_test_proxy_port = os.environ.get( 'TOOL_SHED_TEST_PROXY_PORT', None )
+ if not tool_shed_test_proxy_port:
+ raise Exception( 'Set TOOL_SHED_TEST_PROXY_PORT to the port on which the proxy server is listening' )
+ base_file_path = os.environ.get( 'TOOL_SHED_TEST_BASE_FILE_PATH', None )
+ if not base_file_path:
+ raise Exception( 'Set TOOL_SHED_TEST_BASE_FILE_PATH to the directory which will contain the dataset files directory' )
+ base_new_file_path = os.environ.get( 'TOOL_SHED_TEST_BASE_NEW_FILE_PATH', None )
+ if not base_new_file_path:
+ raise Exception( 'Set TOOL_SHED_TEST_BASE_NEW_FILE_PATH to the directory which will contain the temporary directory' )
+ database_connection = os.environ.get( 'TOOL_SHED_TEST_DBURI', None )
+ if not database_connection:
+ raise Exception( 'Set TOOL_SHED_TEST_DBURI to the URI of the database to be used for tests' )
+ nginx_upload_store = os.environ.get( 'TOOL_SHED_TEST_NGINX_UPLOAD_STORE', None )
+ if not nginx_upload_store:
+ raise Exception( 'Set TOOL_SHED_TEST_NGINX_UPLOAD_STORE to the path where the nginx upload module places uploaded files' )
+ file_path = tempfile.mkdtemp( dir=base_file_path )
+ new_file_path = tempfile.mkdtemp( dir=base_new_file_path )
+ kwargs = dict( database_engine_option_pool_size = '10',
+ database_engine_option_max_overflow = '20',
+ database_engine_option_strategy = 'threadlocal',
+ static_enabled = 'False',
+ debug = 'False' )
+ psu_production = True
+ else:
+ if 'TOOL_SHED_TEST_DBPATH' in os.environ:
+ db_path = os.environ[ 'TOOL_SHED_TEST_DBPATH' ]
+ else:
+ tempdir = tempfile.mkdtemp()
+ db_path = os.path.join( tempdir, 'database' )
+ file_path = os.path.join( db_path, 'files' )
+ new_file_path = os.path.join( db_path, 'tmp' )
+ if 'TOOL_SHED_TEST_DBURI' in os.environ:
+ database_connection = os.environ[ 'TOOL_SHED_TEST_DBURI' ]
+ else:
+ database_connection = 'sqlite:///' + os.path.join( db_path, 'universe.sqlite' )
+ kwargs = {}
+ for dir in file_path, new_file_path:
+ try:
+ os.makedirs( dir )
+ except OSError:
+ pass
+
+ print "Database connection:", database_connection
+
+ # ---- Build Application --------------------------------------------------
+ app = None
+ if start_server:
+ global_conf = { '__file__' : 'community_wsgi.ini.sample' }
+ if psu_production:
+ global_conf = None
+ if not database_connection.startswith( 'sqlite://' ):
+ kwargs[ 'database_engine_option_max_overflow' ] = '20'
+ if tool_dependency_dir is not None:
+ kwargs[ 'tool_dependency_dir' ] = tool_dependency_dir
+ if use_distributed_object_store:
+ kwargs[ 'object_store' ] = 'distributed'
+ kwargs[ 'distributed_object_store_config_file' ] = 'distributed_object_store_conf.xml.sample'
+
+ app = UniverseApplication( job_queue_workers = 5,
+ id_secret = 'changethisinproductiontoo',
+ template_path = 'templates',
+ database_connection = database_connection,
+ database_engine_option_pool_size = '10',
+ file_path = file_path,
+ new_file_path = new_file_path,
+ tool_path=tool_path,
+ datatype_converters_config_file = 'datatype_converters_conf.xml.sample',
+ tool_parse_help = False,
+ tool_data_table_config_path = tool_data_table_config_path,
+ shed_tool_data_table_config = shed_tool_data_table_config,
+ log_destination = "stdout",
+ use_heartbeat = False,
+ allow_user_creation = True,
+ allow_user_deletion = True,
+ admin_users = 'test(a)bx.psu.edu',
+ global_conf = global_conf,
+ running_functional_tests = True,
+ hgweb_config_dir = new_file_path,
+ **kwargs )
+ log.info( "Embedded Universe application started" )
+
+ # ---- Run webserver ------------------------------------------------------
+ server = None
+ if start_server:
+ webapp = buildapp.app_factory( dict( database_file=database_connection ),
+ use_translogger=False,
+ static_enabled=False,
+ app=app )
+ if tool_shed_test_port is not None:
+ server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ else:
+ random.seed()
+ for i in range( 0, 9 ):
+ try:
+ tool_shed_test_port = str( random.randint( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
+ log.debug( "Attempting to serve app on randomly chosen port: %s" % tool_shed_test_port )
+ server = httpserver.serve( webapp, host=tool_shed_test_host, port=tool_shed_test_port, start_loop=False )
+ break
+ except socket.error, e:
+ if e[0] == 98:
+ continue
+ raise
+ else:
+ raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % ( default_tool_shed_test_port_min, default_tool_shed_test_port_max ) )
+ if tool_shed_test_proxy_port:
+ os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_proxy_port
+ else:
+ os.environ[ 'TOOL_SHED_TEST_PORT' ] = tool_shed_test_port
+ t = threading.Thread( target=server.serve_forever )
+ t.start()
+ # Test if the server is up
+ for i in range( 10 ):
+ # Directly test the app, not the proxy.
+ conn = httplib.HTTPConnection( tool_shed_test_host, tool_shed_test_port )
+ conn.request( "GET", "/" )
+ if conn.getresponse().status == 200:
+ break
+ time.sleep( 0.1 )
+ else:
+ raise Exception( "Test HTTP server did not return '200 OK' after 10 tries" )
+ # Test if the proxy server is up.
+ if psu_production:
+ # Directly test the app, not the proxy.
+ conn = httplib.HTTPConnection( tool_shed_test_host, tool_shed_test_proxy_port )
+ conn.request( "GET", "/" )
+ if not conn.getresponse().status == 200:
+ raise Exception( "Test HTTP proxy server did not return '200 OK'" )
+ log.info( "Embedded web server started" )
+ # We don't add the tests to the path until everything is up and running
+ new_path = [ os.path.join( cwd, 'test' ) ]
+ new_path.extend( sys.path[1:] )
+ sys.path = new_path
+ # ---- Find tests ---------------------------------------------------------
+ if tool_shed_test_proxy_port:
+ log.info( "Functional tests will be run against %s:%s" % ( tool_shed_test_host, tool_shed_test_proxy_port ) )
+ else:
+ log.info( "Functional tests will be run against %s:%s" % ( tool_shed_test_host, tool_shed_test_port ) )
+ success = False
+ try:
+ # What requires these? Handy for (eg) functional tests to save outputs?
+ if tool_shed_test_save:
+ os.environ[ 'TOOL_SHED_TEST_SAVE' ] = tool_shed_test_save
+ # Pass in through script set env, will leave a copy of ALL test validate files.
+ os.environ[ 'TOOL_SHED_TEST_HOST' ] = tool_shed_test_host
+ if tool_shed_test_file_dir:
+ os.environ[ 'TOOL_SHED_TEST_FILE_DIR' ] = tool_shed_test_file_dir
+ test_config = nose.config.Config( env=os.environ, ignoreFiles=ignore_files, plugins=nose.plugins.manager.DefaultPluginManager() )
+ test_config.configure( sys.argv )
+ # Run the tests.
+ result = run_tests( test_config )
+ success = result.wasSuccessful()
+ except:
+ log.exception( "Failure running tests" )
+
+ log.info( "Shutting down" )
+ # ---- Tear down -----------------------------------------------------------
+ if server:
+ log.info( "Shutting down embedded web server" )
+ server.server_close()
+ server = None
+ log.info( "Embedded web server stopped" )
+ if app:
+ log.info( "Shutting down app" )
+ app.shutdown()
+ app = None
+ log.info( "Embedded Universe application stopped" )
+ try:
+ if os.path.exists( tempdir ) and 'TOOL_SHED_TEST_NO_CLEANUP' not in os.environ:
+ log.info( "Cleaning up temporary files in %s" % tempdir )
+ shutil.rmtree( tempdir )
+ except:
+ pass
+ if psu_production and 'TOOL_SHED_TEST_NO_CLEANUP' not in os.environ:
+ for dir in ( file_path, new_file_path ):
+ try:
+ if os.path.exists( dir ):
+ log.info( 'Cleaning up temporary files in %s' % dir )
+ shutil.rmtree( dir )
+ except:
+ pass
+ if success:
+ return 0
+ else:
+ return 1
+
+if __name__ == "__main__":
+ sys.exit( main() )
diff -r 3419a45c1f01790b3108d1726253fcb46feedbf4 -r 447448028a2f7326fc452ee32055de5c58e822f6 test/tool_shed/test_data/filtering_1.1.0.tar
Binary file test/tool_shed/test_data/filtering_1.1.0.tar has changed
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Create a new, empty hgweb.config file if one isn't found in the configured location.
by Bitbucket 13 Nov '12
by Bitbucket 13 Nov '12
13 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3419a45c1f01/
changeset: 3419a45c1f01
user: greg
date: 2012-11-13 19:45:03
summary: Create a new, empty hgweb.config file if one isn't found in the configured location.
affected #: 2 files
diff -r 4471a94e8fbff8d8c165c58f0f76867500953b82 -r 3419a45c1f01790b3108d1726253fcb46feedbf4 lib/galaxy/webapps/community/app.py
--- a/lib/galaxy/webapps/community/app.py
+++ b/lib/galaxy/webapps/community/app.py
@@ -49,5 +49,6 @@
# Let the HgwebConfigManager know where the hgweb.config file is located.
self.hgweb_config_manager = self.model.hgweb_config_manager
self.hgweb_config_manager.hgweb_config_dir = self.config.hgweb_config_dir
+ print >> sys.stderr, "Tool shed hgweb.config file is: ", self.hgweb_config_manager.hgweb_config
def shutdown( self ):
pass
diff -r 4471a94e8fbff8d8c165c58f0f76867500953b82 -r 3419a45c1f01790b3108d1726253fcb46feedbf4 lib/galaxy/webapps/community/util/hgweb_config.py
--- a/lib/galaxy/webapps/community/util/hgweb_config.py
+++ b/lib/galaxy/webapps/community/util/hgweb_config.py
@@ -4,6 +4,11 @@
log = logging.getLogger( __name__ )
+new_hgweb_config_template = """
+[paths]
+
+"""
+
class HgWebConfigManager( object ):
def __init__( self ):
self.hgweb_config_dir = None
@@ -41,7 +46,13 @@
def hgweb_config( self ):
hgweb_config = os.path.join( self.hgweb_config_dir, 'hgweb.config' )
if not os.path.exists( hgweb_config ):
- raise Exception( "Required file %s does not exist - check config setting for hgweb_config_dir." % hgweb_config )
+ # We used to raise an exception here...
+ # raise Exception( "Required file %s does not exist - check config setting for hgweb_config_dir." % hgweb_config )
+ # ...but now we just log the missing file and create a new empty one.
+ log.debug( "Required file %s does not exist, so creating a new, empty file. Check your config setting for hgweb_config_dir." % hgweb_config )
+ hgweb_config_file = open( hgweb_config, 'wb' )
+ hgweb_config_file.write( new_hgweb_config_template )
+ hgweb_config_file.close()
return os.path.abspath( hgweb_config )
def make_backup( self ):
# Make a backup of the hgweb.config file.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Modules.py: Fix sa_session.add() for PJAs, remove unused imports, whitespace cleanup.
by Bitbucket 13 Nov '12
by Bitbucket 13 Nov '12
13 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/4471a94e8fbf/
changeset: 4471a94e8fbf
user: dannon
date: 2012-11-13 19:07:32
summary: Modules.py: Fix sa_session.add() for PJAs, remove unused imports, whitespace cleanup.
affected #: 1 file
diff -r 5013377e0bf7a656ea593098f1d1b38f3d6928c6 -r 4471a94e8fbff8d8c165c58f0f76867500953b82 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -3,7 +3,6 @@
from galaxy import web
from galaxy.tools.parameters import DataToolParameter, DummyDataset, RuntimeValue, check_param, visit_input_values
from galaxy.tools import DefaultToolState
-from galaxy.tools.parameters.grouping import Repeat, Conditional
from galaxy.util.bunch import Bunch
from galaxy.util.json import from_json_string, to_json_string
from galaxy.jobs.actions.post import ActionBox
@@ -13,12 +12,12 @@
log = logging.getLogger( __name__ )
class WorkflowModule( object ):
-
+
def __init__( self, trans ):
self.trans = trans
-
+
## ---- Creating modules from various representations ---------------------
-
+
@classmethod
def new( Class, trans, tool_id=None ):
"""
@@ -37,12 +36,12 @@
return Class( trans )
## ---- Saving in various forms ------------------------------------------
-
+
def save_to_step( self, step ):
step.type = self.type
-
+
## ---- General attributes -----------------------------------------------
-
+
def get_type( self ):
return self.type
def get_name( self ):
@@ -51,9 +50,9 @@
return None
def get_tooltip( self, static_path='' ):
return None
-
+
## ---- Configuration time -----------------------------------------------
-
+
def get_state( self ):
return None
def get_errors( self ):
@@ -66,16 +65,16 @@
pass
def get_config_form( self ):
raise TypeError( "Abstract method" )
-
+
def check_and_update_state( self ):
"""
If the state is not in sync with the current implementation of the
module, try to update. Returns a list of messages to be displayed
"""
pass
-
+
## ---- Run time ---------------------------------------------------------
-
+
def get_runtime_inputs( self ):
raise TypeError( "Abstract method" )
def get_runtime_state( self ):
@@ -86,7 +85,7 @@
raise TypeError( "Abstract method" )
def update_runtime_state( self, trans, state, values ):
raise TypeError( "Abstract method" )
-
+
def execute( self, trans, state ):
raise TypeError( "Abstract method" )
@@ -112,6 +111,7 @@
if step.tool_inputs and "name" in step.tool_inputs:
module.state['name'] = step.tool_inputs[ 'name' ]
return module
+
def save_to_step( self, step ):
step.type = self.type
step.tool_id = None
@@ -119,8 +119,10 @@
def get_data_inputs( self ):
return []
+
def get_data_outputs( self ):
return [ dict( name='output', extensions=['input'] ) ]
+
def get_config_form( self ):
form = web.FormBuilder( title=self.name ) \
.add_text( "name", "Name", value=self.state['name'] )
@@ -128,25 +130,29 @@
module=self, form=form )
def get_state( self, secure=True ):
return to_json_string( self.state )
-
+
def update_state( self, incoming ):
self.state['name'] = incoming.get( 'name', 'Input Dataset' )
-
+
def get_runtime_inputs( self, filter_set=['data'] ):
label = self.state.get( "name", "Input Dataset" )
return dict( input=DataToolParameter( None, Element( "param", name="input", label=label, multiple=True, type="data", format=', '.join(filter_set) ), self.trans ) )
+
def get_runtime_state( self ):
state = DefaultToolState()
state.inputs = dict( input=None )
return state
+
def encode_runtime_state( self, trans, state ):
fake_tool = Bunch( inputs = self.get_runtime_inputs() )
return state.encode( fake_tool, trans.app )
+
def decode_runtime_state( self, trans, string ):
fake_tool = Bunch( inputs = self.get_runtime_inputs() )
state = DefaultToolState()
state.decode( string, fake_tool, trans.app )
return state
+
def update_runtime_state( self, trans, state, values ):
errors = {}
for name, param in self.get_runtime_inputs().iteritems():
@@ -155,14 +161,14 @@
if error:
errors[ name ] = error
return errors
-
+
def execute( self, trans, state ):
return None, dict( output=state.inputs['input'])
-
+
class ToolModule( WorkflowModule ):
-
+
type = "tool"
-
+
def __init__( self, trans, tool_id ):
self.trans = trans
self.tool_id = tool_id
@@ -175,11 +181,13 @@
else:
self.errors = {}
self.errors[ tool_id ] = 'Tool unavailable'
+
@classmethod
def new( Class, trans, tool_id=None ):
module = Class( trans, tool_id )
module.state = module.tool.new_state( trans, all_pages=True )
return module
+
@classmethod
def from_dict( Class, trans, d, secure=True ):
tool_id = d[ 'tool_id' ]
@@ -191,6 +199,7 @@
module.post_job_actions = d.get( "post_job_actions", {} )
module.workflow_outputs = d.get( "workflow_outputs", [] )
return module
+
@classmethod
def from_workflow_step( Class, trans, step ):
tool_id = step.tool_id
@@ -215,12 +224,14 @@
module.post_job_actions = pjadict
return module
return None
+
@classmethod
def __get_tool_version( cls, trans, tool_id ):
# Return a ToolVersion if one exists for tool_id.
return trans.sa_session.query( trans.app.model.ToolVersion ) \
.filter( trans.app.model.ToolVersion.table.c.tool_id == tool_id ) \
.first()
+
def save_to_step( self, step ):
step.type = self.type
step.tool_id = self.tool_id
@@ -241,24 +252,31 @@
action_arguments = v['action_arguments']
else:
action_arguments = None
- n_p = PostJobAction(v['action_type'], step, output_name, action_arguments)
+ self.trans.sa_session.add(PostJobAction(v['action_type'], step, output_name, action_arguments))
+
def get_name( self ):
if self.tool:
return self.tool.name
return 'unavailable'
+
def get_tool_id( self ):
return self.tool_id
+
def get_tool_version( self ):
return self.tool.version
+
def get_state( self, secure=True ):
return self.state.encode( self.tool, self.trans.app, secure=secure )
+
def get_errors( self ):
return self.errors
+
def get_tooltip( self, static_path='' ):
if self.tool.help:
return self.tool.help.render( static_path=static_path )
else:
return None
+
def get_data_inputs( self ):
data_inputs = []
def callback( input, value, prefixed_name, prefixed_label ):
@@ -270,6 +288,7 @@
extensions=input.extensions ) )
visit_input_values( self.tool.inputs, self.state.inputs, callback )
return data_inputs
+
def get_data_outputs( self ):
data_outputs = []
data_inputs = None
@@ -293,20 +312,23 @@
formats.append( format )
data_outputs.append( dict( name=name, extensions=formats ) )
return data_outputs
+
def get_post_job_actions( self ):
return self.post_job_actions
+
def get_config_form( self ):
self.add_dummy_datasets()
- return self.trans.fill_template( "workflow/editor_tool_form.mako",
+ return self.trans.fill_template( "workflow/editor_tool_form.mako",
tool=self.tool, values=self.state.inputs, errors=( self.errors or {} ) )
- def update_state( self, incoming ):
+
+ def update_state( self, incoming ):
# Build a callback that handles setting an input to be required at
# runtime. We still process all other parameters the user might have
# set. We also need to make sure all datasets have a dummy value
# for dependencies to see
-
+
self.post_job_actions = ActionBox.handle_incoming(incoming)
-
+
make_runtime_key = incoming.get( 'make_runtime', None )
make_buildtime_key = incoming.get( 'make_buildtime', None )
def item_callback( trans, key, input, value, error, old_value, context ):
@@ -328,8 +350,10 @@
# Update state using incoming values
errors = self.tool.update_state( self.trans, self.tool.inputs, self.state.inputs, incoming, item_callback=item_callback )
self.errors = errors or None
+
def check_and_update_state( self ):
return self.tool.check_and_update_param_values( self.state.inputs, self.trans )
+
def add_dummy_datasets( self, connections=None):
if connections:
# Store onnections by input name
@@ -348,11 +372,12 @@
else:
replacement = DummyDataset()
return replacement
- visit_input_values( self.tool.inputs, self.state.inputs, callback )
+ visit_input_values( self.tool.inputs, self.state.inputs, callback )
class WorkflowModuleFactory( object ):
def __init__( self, module_types ):
self.module_types = module_types
+
def new( self, trans, type, tool_id=None ):
"""
Return module for type and (optional) tool_id intialized with
@@ -360,18 +385,20 @@
"""
assert type in self.module_types
return self.module_types[type].new( trans, tool_id )
+
def from_dict( self, trans, d, **kwargs ):
"""
Return module initialized from the data in dictionary `d`.
"""
type = d['type']
assert type in self.module_types
- return self.module_types[type].from_dict( trans, d, **kwargs )
+ return self.module_types[type].from_dict( trans, d, **kwargs )
+
def from_workflow_step( self, trans, step ):
"""
Return module initializd from the WorkflowStep object `step`.
"""
type = step.type
return self.module_types[type].from_workflow_step( trans, step )
-
+
module_factory = WorkflowModuleFactory( dict( data_input=InputDataModule, tool=ToolModule ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dannon: Fix for rendering workflow tooltips when tool.help is nonexistent.
by Bitbucket 13 Nov '12
by Bitbucket 13 Nov '12
13 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/5013377e0bf7/
changeset: 5013377e0bf7
user: dannon
date: 2012-11-13 18:26:55
summary: Fix for rendering workflow tooltips when tool.help is nonexistent.
affected #: 1 file
diff -r d3ac39a6f8d7a5f95ffabb4ba8f8a846b17ac15a -r 5013377e0bf7a656ea593098f1d1b38f3d6928c6 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -255,7 +255,10 @@
def get_errors( self ):
return self.errors
def get_tooltip( self, static_path='' ):
- return self.tool.help.render( static_path=static_path )
+ if self.tool.help:
+ return self.tool.help.render( static_path=static_path )
+ else:
+ return None
def get_data_inputs( self ):
data_inputs = []
def callback( input, value, prefixed_name, prefixed_label ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Add a new HgWebConfigManager to manage the tool shed's hgweb.config file.
by Bitbucket 13 Nov '12
by Bitbucket 13 Nov '12
13 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d3ac39a6f8d7/
changeset: d3ac39a6f8d7
user: greg
date: 2012-11-13 18:04:58
summary: Add a new HgWebConfigManager to manage the tool shed's hgweb.config file.
affected #: 8 files
diff -r 3024a86c0d0d21b1a85ff1b91516f34f416bcfcb -r d3ac39a6f8d7a5f95ffabb4ba8f8a846b17ac15a lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -118,6 +118,7 @@
self.workflows += 1
processed_relative_workflow_paths.append( relative_path )
self.generation_time = strftime( "%b %d, %Y", gmtime() )
+
def add_to_shed_tool_config( app, shed_tool_conf_dict, elem_list ):
# A tool shed repository is being installed so change the shed_tool_conf file. Parse the config file to generate the entire list
# of config_elems instead of using the in-memory list since it will be a subset of the entire list if one or more repositories have
@@ -1353,11 +1354,6 @@
ctx = get_changectx_for_changeset( repo, changeset_revision )
named_tmp_file = get_named_tmpfile_from_ctx( ctx, file_name, dir )
return named_tmp_file
-def get_hgweb_config( app ):
- hgweb_config = os.path.join( app.config.hgweb_config_dir, 'hgweb.config' )
- if not os.path.exists( hgweb_config ):
- raise Exception( "Required file %s does not exist - check config setting for hgweb_config_dir." % hgweb_config )
- return hgweb_config
def get_installed_tool_shed_repository( trans, id ):
"""Get a repository on the Galaxy side from the database via id"""
return trans.sa_session.query( trans.model.ToolShedRepository ).get( trans.security.decode_id( id ) )
diff -r 3024a86c0d0d21b1a85ff1b91516f34f416bcfcb -r d3ac39a6f8d7a5f95ffabb4ba8f8a846b17ac15a lib/galaxy/webapps/community/app.py
--- a/lib/galaxy/webapps/community/app.py
+++ b/lib/galaxy/webapps/community/app.py
@@ -46,5 +46,8 @@
# TODO: Add OpenID support
self.openid_providers = OpenIDProviders()
self.shed_counter = self.model.shed_counter
+ # Let the HgwebConfigManager know where the hgweb.config file is located.
+ self.hgweb_config_manager = self.model.hgweb_config_manager
+ self.hgweb_config_manager.hgweb_config_dir = self.config.hgweb_config_dir
def shutdown( self ):
pass
diff -r 3024a86c0d0d21b1a85ff1b91516f34f416bcfcb -r d3ac39a6f8d7a5f95ffabb4ba8f8a846b17ac15a lib/galaxy/webapps/community/controllers/hg.py
--- a/lib/galaxy/webapps/community/controllers/hg.py
+++ b/lib/galaxy/webapps/community/controllers/hg.py
@@ -1,7 +1,6 @@
import os, logging
from galaxy.web.base.controller import *
from galaxy.webapps.community.controllers.common import *
-from galaxy.util.shed_util import get_hgweb_config
from galaxy import eggs
eggs.require('mercurial')
@@ -18,9 +17,7 @@
# hg clone http://test@127.0.0.1:9009/repos/test/convert_characters1
hg_version = mercurial.__version__.version
cmd = kwd.get( 'cmd', None )
- hgweb_config = get_hgweb_config( trans.app )
- if not os.path.exists( hgweb_config ):
- raise Exception( "Required file %s does not exist." % str( hgweb_config ) )
+ hgweb_config = trans.app.hgweb_config_manager.hgweb_config
def make_web_app():
hgwebapp = hgwebdir( hgweb_config )
return hgwebapp
diff -r 3024a86c0d0d21b1a85ff1b91516f34f416bcfcb -r d3ac39a6f8d7a5f95ffabb4ba8f8a846b17ac15a lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1,4 +1,4 @@
-import os, logging, tempfile, shutil
+import os, logging, tempfile, shutil, ConfigParser
from time import strftime
from datetime import date, datetime
from galaxy import util
@@ -11,7 +11,7 @@
from galaxy.model.orm import *
# TODO: re-factor shed_util to eliminate the following restricted imports
from galaxy.util.shed_util import create_repo_info_dict, generate_clone_url_for_repository_in_tool_shed, generate_message_for_invalid_tools
-from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, get_file_from_changeset_revision, get_hgweb_config
+from galaxy.util.shed_util import get_changectx_for_changeset, get_configured_ui, get_file_from_changeset_revision
from galaxy.util.shed_util import get_repository_file_contents, get_repository_in_tool_shed, get_repository_metadata_by_changeset_revision
from galaxy.util.shed_util import handle_sample_files_and_load_tool_from_disk, handle_sample_files_and_load_tool_from_tmp_config
from galaxy.util.shed_util import INITIAL_CHANGELOG_HASH, load_tool_from_config, NOT_TOOL_CONFIGS, open_repository_files_folder, remove_dir
@@ -530,26 +530,6 @@
repositories_i_own_grid = RepositoriesIOwnGrid()
deprecated_repositories_i_own_grid = DeprecatedRepositoriesIOwnGrid()
- def __add_hgweb_config_entry( self, trans, repository, repository_path ):
- # Add an entry in the hgweb.config file for a new repository. An entry looks something like:
- # repos/test/mira_assembler = database/community_files/000/repo_123.
- hgweb_config = get_hgweb_config( trans.app )
- if repository_path.startswith( './' ):
- repository_path = repository_path.replace( './', '', 1 )
- entry = "repos/%s/%s = %s" % ( repository.user.username, repository.name, repository_path )
- tmp_fd, tmp_fname = tempfile.mkstemp()
- if os.path.exists( hgweb_config ):
- # Make a backup of the hgweb.config file since we're going to be changing it.
- self.__make_hgweb_config_copy( trans, hgweb_config )
- new_hgweb_config = open( tmp_fname, 'wb' )
- for i, line in enumerate( open( hgweb_config ) ):
- new_hgweb_config.write( line )
- else:
- new_hgweb_config = open( tmp_fname, 'wb' )
- new_hgweb_config.write( '[paths]\n' )
- new_hgweb_config.write( "%s\n" % entry )
- new_hgweb_config.flush()
- shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
@web.expose
def browse_categories( self, trans, **kwd ):
# The request came from the tool shed.
@@ -824,25 +804,14 @@
selected_value=selected_value,
refresh_on_change=False,
multiple=True )
- def __change_hgweb_config_entry( self, trans, repository, old_repository_name, new_repository_name ):
- # Change an entry in the hgweb.config file for a repository. This only happens when
- # the owner changes the name of the repository. An entry looks something like:
- # repos/test/mira_assembler = database/community_files/000/repo_123.
- hgweb_config = get_hgweb_config( trans.app )
- # Make a backup of the hgweb.config file since we're going to be changing it.
- self.__make_hgweb_config_copy( trans, hgweb_config )
- repo_dir = repository.repo_path( trans.app )
- old_lhs = "repos/%s/%s" % ( repository.user.username, old_repository_name )
- new_entry = "repos/%s/%s = %s\n" % ( repository.user.username, new_repository_name, repo_dir )
- tmp_fd, tmp_fname = tempfile.mkstemp()
- new_hgweb_config = open( tmp_fname, 'wb' )
- for i, line in enumerate( open( hgweb_config ) ):
- if line.startswith( old_lhs ):
- new_hgweb_config.write( new_entry )
- else:
- new_hgweb_config.write( line )
- new_hgweb_config.flush()
- shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
+ def __change_repository_name_in_hgrc_file( self, hgrc_file, new_name ):
+ config = ConfigParser.ConfigParser()
+ config.read( hgrc_file )
+ config.read( hgrc_file )
+ config.set( 'web', 'name', new_name )
+ new_file = open( hgrc_file, 'wb' )
+ config.write( new_file )
+ new_file.close()
@web.expose
def check_for_updates( self, trans, **kwd ):
"""Handle a request from a local Galaxy instance."""
@@ -1001,8 +970,9 @@
os.makedirs( repository_path )
# Create the local repository
repo = hg.repository( get_configured_ui(), repository_path, create=True )
- # Add an entry in the hgweb.config file for the local repository, enabling calls to repository.repo_path( trans.app )
- self.__add_hgweb_config_entry( trans, repository, repository_path )
+ # Add an entry in the hgweb.config file for the local repository.
+ lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
+ trans.app.hgweb_config_manager.add_entry( lhs, repository_path )
# Create a .hg/hgrc file for the local repository
self.__create_hgrc_file( trans, repository )
flush_needed = False
@@ -1616,13 +1586,6 @@
changeset_revision=changeset_revision,
message=message,
status='error' ) )
- def __make_hgweb_config_copy( self, trans, hgweb_config ):
- # Make a backup of the hgweb.config file
- today = date.today()
- backup_date = today.strftime( "%Y_%m_%d" )
- hgweb_config_backup_filename = 'hgweb.config_%s_backup' % backup_date
- hgweb_config_copy = os.path.join( trans.app.config.hgweb_config_dir, hgweb_config_backup_filename )
- shutil.copy( os.path.abspath( hgweb_config ), os.path.abspath( hgweb_config_copy ) )
def __make_same_length( self, list1, list2 ):
# If either list is 1 item, we'll append to it until its length is the same as the other.
if len( list1 ) == 1:
@@ -1710,7 +1673,14 @@
if message:
error = True
else:
- self.__change_hgweb_config_entry( trans, repository, repository.name, repo_name )
+ # Change the entry in the hgweb.config file for the repository.
+ old_lhs = "repos/%s/%s" % ( repository.user.username, repository.name )
+ new_lhs = "repos/%s/%s" % ( repository.user.username, repo_name )
+ new_rhs = "%s\n" % repo_dir
+ trans.app.hgweb_config_manager.change_entry( old_lhs, new_lhs, new_rhs )
+ # Change the entry in the repository's hgrc file.
+ hgrc_file = os.path.join( repo_dir, '.hg', 'hgrc' )
+ self.__change_repository_name_in_hgrc_file( hgrc_file, repo_name )
repository.name = repo_name
flush_needed = True
elif repository.times_downloaded != 0 and repo_name != repository.name:
diff -r 3024a86c0d0d21b1a85ff1b91516f34f416bcfcb -r d3ac39a6f8d7a5f95ffabb4ba8f8a846b17ac15a lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -6,7 +6,6 @@
"""
import os.path, os, errno, sys, codecs, operator, logging, tarfile, mimetypes, ConfigParser
from galaxy import util
-from galaxy.util.shed_util import get_hgweb_config
from galaxy.util.bunch import Bunch
from galaxy.util.hash_util import *
from galaxy.web.form_builder import *
@@ -110,12 +109,7 @@
MARKED_FOR_REMOVAL = 'r',
MARKED_FOR_ADDITION = 'a',
NOT_TRACKED = '?' )
- # Handle to the hgweb.config file on disk.
- hgweb_config_file = None
- # This repository's entry in the hgweb.config file on disk.
- hgweb_path = None
- def __init__( self, name=None, description=None, long_description=None, user_id=None, private=False, email_alerts=None, times_downloaded=0,
- deprecated=False ):
+ def __init__( self, name=None, description=None, long_description=None, user_id=None, private=False, email_alerts=None, times_downloaded=0, deprecated=False ):
self.name = name or "Unnamed repository"
self.description = description
self.long_description = long_description
@@ -124,27 +118,8 @@
self.email_alerts = email_alerts
self.times_downloaded = times_downloaded
self.deprecated = deprecated
- def get_hgweb_config_file( self, app ):
- if self.hgweb_config_file is None:
- self.hgweb_config_file = get_hgweb_config( app )
- return self.hgweb_config_file
- def get_hgweb_path( self, app ):
- # TODO: If possible, handle this using the mercurial api.
- if self.hgweb_path is None:
- lhs = os.path.join( "repos", self.user.username, self.name )
- config = ConfigParser.ConfigParser()
- config.read( self.get_hgweb_config_file( app ) )
- for option in config.options( "paths" ):
- if option == lhs:
- self.hgweb_path = config.get( "paths", option )
- break
- if self.hgweb_path is None:
- raise Exception( "Entry for repository %s missing in file %s." % ( lhs, hgweb_config ) )
- return self.hgweb_path
def repo_path( self, app ):
- # Repository locations on disk are stored in the hgweb.config file located in the directory defined by the config setting hgweb_config_dir.
- # An entry looks something like: repos/test/mira_assembler = database/community_files/000/repo_123
- return self.get_hgweb_path( app )
+ return app.hgweb_config_manager.get_entry( os.path.join( "repos", self.user.username, self.name ) )
def revision( self, app ):
repo = hg.repository( ui.ui(), self.repo_path( app ) )
tip_ctx = repo.changectx( repo.changelog.tip() )
diff -r 3024a86c0d0d21b1a85ff1b91516f34f416bcfcb -r d3ac39a6f8d7a5f95ffabb4ba8f8a846b17ac15a lib/galaxy/webapps/community/model/mapping.py
--- a/lib/galaxy/webapps/community/model/mapping.py
+++ b/lib/galaxy/webapps/community/model/mapping.py
@@ -14,6 +14,7 @@
from galaxy.model.custom_types import *
from galaxy.util.bunch import Bunch
from galaxy.util.shed_util import ShedCounter
+from galaxy.webapps.community.util.hgweb_config import *
from galaxy.webapps.community.security import CommunityRBACAgent
metadata = MetaData()
@@ -318,4 +319,5 @@
# Load local tool shed security policy
result.security_agent = CommunityRBACAgent( result )
result.shed_counter = ShedCounter( result )
+ result.hgweb_config_manager = HgWebConfigManager()
return result
diff -r 3024a86c0d0d21b1a85ff1b91516f34f416bcfcb -r d3ac39a6f8d7a5f95ffabb4ba8f8a846b17ac15a lib/galaxy/webapps/community/util/hgweb_config.py
--- /dev/null
+++ b/lib/galaxy/webapps/community/util/hgweb_config.py
@@ -0,0 +1,63 @@
+import sys, os, ConfigParser, logging, shutil
+from time import strftime
+from datetime import date
+
+log = logging.getLogger( __name__ )
+
+class HgWebConfigManager( object ):
+ def __init__( self ):
+ self.hgweb_config_dir = None
+ self.in_memory_config = None
+ def add_entry( self, lhs, rhs ):
+ """Add an entry in the hgweb.config file for a new repository."""
+ # Since we're changing the config, make sure the latest is loaded into memory.
+ self.read_config( force_read=True )
+ # An entry looks something like: repos/test/mira_assembler = database/community_files/000/repo_123.
+ if rhs.startswith( './' ):
+ rhs = rhs.replace( './', '', 1 )
+ self.make_backup()
+ # Add the new entry into memory.
+ self.in_memory_config.set( 'paths', lhs, rhs )
+ # Persist our in-memory configuration.
+ self.write_config()
+ def change_entry( self, old_lhs, new_lhs, new_rhs ):
+ """Change an entry in the hgweb.config file for a repository - this only happens when the owner changes the name of the repository."""
+ self.make_backup()
+ # Remove the old entry.
+ self.in_memory_config.remove_option( 'paths', old_lhs )
+ # Add the new entry.
+ self.in_memory_config.set( 'paths', new_lhs, new_rhs )
+ # Persist our in-memory configuration.
+ self.write_config()
+ def get_entry( self, lhs ):
+ """Return an entry in the hgweb.config file for a repository"""
+ self.read_config()
+ try:
+ entry = self.in_memory_config.get( 'paths', lhs )
+ except ConfigParser.NoOptionError:
+ raise Exception( "Entry for repository %s missing in file %s." % ( lhs, self.hgweb_config ) )
+ return entry
+ @property
+ def hgweb_config( self ):
+ hgweb_config = os.path.join( self.hgweb_config_dir, 'hgweb.config' )
+ if not os.path.exists( hgweb_config ):
+ raise Exception( "Required file %s does not exist - check config setting for hgweb_config_dir." % hgweb_config )
+ return os.path.abspath( hgweb_config )
+ def make_backup( self ):
+ # Make a backup of the hgweb.config file.
+ today = date.today()
+ backup_date = today.strftime( "%Y_%m_%d" )
+ hgweb_config_backup_filename = 'hgweb.config_%s_backup' % backup_date
+ hgweb_config_copy = os.path.join( self.hgweb_config_dir, hgweb_config_backup_filename )
+ shutil.copy( os.path.abspath( self.hgweb_config ), os.path.abspath( hgweb_config_copy ) )
+ def read_config( self, force_read=False ):
+ if force_read or self.in_memory_config is None:
+ config = ConfigParser.ConfigParser()
+ config.read( self.hgweb_config )
+ self.in_memory_config = config
+ def write_config( self ):
+ """Writing the in-memory configuration to the hgweb.config file on disk."""
+ config_file = open( self.hgweb_config, 'wb' )
+ self.in_memory_config.write( config_file )
+ config_file.close
+
\ No newline at end of file
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Move the important details to the begining of the new tool shed repository alert email template, and include the repository name in the email subject.
by Bitbucket 12 Nov '12
by Bitbucket 12 Nov '12
12 Nov '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/3024a86c0d0d/
changeset: 3024a86c0d0d
user: greg
date: 2012-11-12 22:39:49
summary: Move the important details to the begining of the new tool shed repository alert email template, and include the repository name in the email subject.
affected #: 1 file
diff -r 74a7bc65f1ed8bde40425d1823ad8761b6cdcea8 -r 3024a86c0d0d21b1a85ff1b91516f34f416bcfcb lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -25,25 +25,23 @@
log = logging.getLogger( __name__ )
new_repo_email_alert_template = """
-GALAXY TOOL SHED NEW REPOSITORY ALERT
------------------------------------------------------------------------------
-You received this alert because you registered to receive email when
-new repositories were created in the Galaxy tool shed named "${host}".
------------------------------------------------------------------------------
+Revision: ${revision}
+Change description:
+${description}
Repository name: ${repository_name}
+Uploaded by: ${username}
Date content uploaded: ${display_date}
-Uploaded by: ${username}
-
-Revision: ${revision}
-Change description:
-${description}
${content_alert_str}
-----------------------------------------------------------------------------
This change alert was sent from the Galaxy tool shed hosted on the server
"${host}"
+-----------------------------------------------------------------------------
+You received this alert because you registered to receive email when
+new repositories were created in the Galaxy tool shed named "${host}".
+-----------------------------------------------------------------------------
"""
email_alert_template = """
@@ -497,7 +495,8 @@
admin_users = trans.app.config.get( "admin_users", "" ).split( "," )
frm = email_from
if new_repo_alert:
- subject = "New Galaxy tool shed repository alert"
+ subject = "Galaxy tool shed alert for new repository named %s" % str( repository.name )
+ subject = subject[ :80 ]
email_alerts = []
for user in trans.sa_session.query( trans.model.User ) \
.filter( and_( trans.model.User.table.c.deleted == False,
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0