galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
October 2012
- 1 participants
- 194 discussions
09 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/79bb2432133f/
changeset: 79bb2432133f
user: jgoecks
date: 2012-10-09 17:59:59
summary: Fix typo in datasets API controller.
affected #: 1 file
diff -r 04229b267b3e8dd112d3e66fd6826d2f66404424 -r 79bb2432133f9b18450e142fc6b80ddab364b57e lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -46,7 +46,7 @@
elif data_type == 'data':
rval = self._data( trans, dataset, **kwd )
elif data_type == 'features':
- rval = self._search_features( trans, dataset, kwd.get( 'query ' ) )
+ rval = self._search_features( trans, dataset, kwd.get( 'query' ) )
elif data_type == 'raw_data':
rval = self._raw_data( trans, dataset, **kwd )
elif data_type == 'track_config':
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/04229b267b3e/
changeset: 04229b267b3e
user: dan
date: 2012-10-09 16:27:54
summary: Some cleanup for shed_util.py
affected #: 1 file
diff -r 429e895ee1b3bfb2fea1632d34a01d54faeac866 -r 04229b267b3e8dd112d3e66fd6826d2f66404424 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -387,8 +387,6 @@
else:
tmp_url = repository_clone_url
return tmp_url
-def tool_shed_from_repository_clone_url( repository_clone_url ):
- return clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def clean_tool_shed_url( tool_shed_url ):
if tool_shed_url.find( ':' ) > 0:
# Eliminate the port, if any, since it will result in an invalid directory name.
@@ -1772,42 +1770,6 @@
config_elems.remove( config_elem )
# Persist the altered in-memory version of the tool config.
config_elems_to_xml_file( trans.app, config_elems, shed_tool_conf, tool_path )
-def update_in_shed_tool_config( app, repository ):
- # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
- # of config_elems instead of using the in-memory list.
- shed_conf_dict = repository.get_shed_config_dict( app )
- shed_tool_conf = shed_conf_dict[ 'config_filename' ]
- tool_path = shed_conf_dict[ 'tool_path' ]
-
- #hack for 'trans.app' used in lots of places. These places should just directly use app
- trans = util.bunch.Bunch()
- trans.app = app
-
- tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
- repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
- cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url( trans, repository ) )
- tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
- owner = repository.owner
- if not owner:
- owner = get_repository_owner( cleaned_repository_clone_url )
- guid_to_tool_elem_dict = {}
- for tool_config_filename, guid, tool in repository_tools_tups:
- guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
- config_elems = []
- tree = util.parse_xml( shed_tool_conf )
- root = tree.getroot()
- for elem in root:
- if elem.tag == 'section':
- for i, tool_elem in enumerate( elem ):
- guid = tool_elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem[i] = guid_to_tool_elem_dict[ guid ]
- elif elem.tag == 'tool':
- guid = elem.attrib.get( 'guid' )
- if guid in guid_to_tool_elem_dict:
- elem = guid_to_tool_elem_dict[ guid ]
- config_elems.append( elem )
- config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def remove_from_tool_panel( trans, repository, shed_tool_conf, uninstall ):
"""A tool shed repository is being deactivated or uninstalled so handle tool panel alterations accordingly."""
# Determine where the tools are currently defined in the tool panel and store this information so the tools can be displayed
@@ -2016,6 +1978,8 @@
elif c not in [ '\r' ]:
translated.append( '' )
return ''.join( translated )
+def tool_shed_from_repository_clone_url( repository_clone_url ):
+ return clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def translate_string( raw_text, to_html=True ):
if raw_text:
if to_html:
@@ -2086,6 +2050,42 @@
sa_session.delete( tool_dependency )
sa_session.flush()
return new_tool_dependency
+def update_in_shed_tool_config( app, repository ):
+ # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
+ # of config_elems instead of using the in-memory list.
+ shed_conf_dict = repository.get_shed_config_dict( app )
+ shed_tool_conf = shed_conf_dict[ 'config_filename' ]
+ tool_path = shed_conf_dict[ 'tool_path' ]
+
+ #hack for 'trans.app' used in lots of places. These places should just directly use app
+ trans = util.bunch.Bunch()
+ trans.app = app
+
+ tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
+ repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
+ cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url( trans, repository ) )
+ tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
+ owner = repository.owner
+ if not owner:
+ owner = get_repository_owner( cleaned_repository_clone_url )
+ guid_to_tool_elem_dict = {}
+ for tool_config_filename, guid, tool in repository_tools_tups:
+ guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
+ config_elems = []
+ tree = util.parse_xml( shed_tool_conf )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'section':
+ for i, tool_elem in enumerate( elem ):
+ guid = tool_elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem[i] = guid_to_tool_elem_dict[ guid ]
+ elif elem.tag == 'tool':
+ guid = elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem = guid_to_tool_elem_dict[ guid ]
+ config_elems.append( elem )
+ config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def update_repository( repo, ctx_rev=None ):
"""
Update the cloned repository to changeset_revision. It is critical that the installed repository is updated to the desired
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/921a612db28a/
changeset: 921a612db28a
user: dan
date: 2012-10-09 16:24:58
summary: Enhance Galaxy's model.ToolShedRepository to handle shed_conf_dict easier.
affected #: 1 file
diff -r 413cf15e4065a9f8d559ca110e6e86b84f8a6620 -r 921a612db28a69a33dc1ea8bef5ed20ffa63bba2 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -2987,6 +2987,78 @@
return relative_path
return None
@property
+ def tool_shed_path_name( self ):
+ tool_shed_url = self.tool_shed
+ if tool_shed_url.find( ':' ) > 0:
+ # Eliminate the port, if any, since it will result in an invalid directory name.
+ tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
+ return tool_shed_url.rstrip( '/' )
+ def get_tool_relative_path( self, app ):
+ shed_conf_dict = self.get_shed_config_dict( app )
+ tool_path = None
+ relative_path = None
+ if shed_conf_dict:
+ tool_path = shed_conf_dict[ 'tool_path' ]
+ relative_path = os.path.join( self.tool_shed_path_name, 'repos', self.owner, self.name, self.installed_changeset_revision )
+ return tool_path, relative_path
+ def get_shed_config_filename( self ):
+ shed_config_filename = None
+ if self.metadata:
+ shed_config_filename = self.metadata.get( 'shed_config_filename', shed_config_filename )
+ return shed_config_filename
+ def set_shed_config_filename( self, value ):
+ self.metadata[ 'shed_config_filename' ] = value
+ shed_config_filename = property( get_shed_config_filename, set_shed_config_filename )
+ def guess_shed_config( self, app, default=None ):
+ tool_ids = []
+ metadata = self.metadata or {}
+ for tool in metadata.get( 'tools', [] ):
+ tool_ids.append( tool.get( 'guid' ) )
+ for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
+ name = shed_tool_conf_dict[ 'config_filename' ]
+ for elem in shed_tool_conf_dict[ 'config_elems' ]:
+ if elem.tag == 'tool':
+ for sub_elem in elem.findall( 'id' ):
+ tool_id = sub_elem.text.strip()
+ if tool_id in tool_ids:
+ self.shed_config_filename = name
+ return shed_tool_conf_dict
+ elif elem.tag == "section":
+ for tool_elem in elem.findall( 'tool' ):
+ for sub_elem in tool_elem.findall( 'id' ):
+ tool_id = sub_elem.text.strip()
+ if tool_id in tool_ids:
+ self.shed_config_filename = name
+ return shed_tool_conf_dict
+ if self.includes_datatypes:
+ #we need to search by filepaths here, which is less desirable
+ tool_shed_url = self.tool_shed
+ if tool_shed_url.find( ':' ) > 0:
+ # Eliminate the port, if any, since it will result in an invalid directory name.
+ tool_shed_url = tool_shed_url.split( ':' )[ 0 ]
+ tool_shed = tool_shed_url.rstrip( '/' )
+ for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
+ tool_path = shed_tool_conf_dict[ 'tool_path' ]
+ relative_path = os.path.join( tool_path, tool_shed, 'repos', self.owner, self.name, self.installed_changeset_revision )
+ if os.path.exists( relative_path ):
+ self.shed_config_filename = shed_tool_conf_dict[ 'config_filename' ]
+ return shed_tool_conf_dict
+ #if self.dist_to_shed:
+ # #return ./migrated_tools.xml
+ return default
+ def get_shed_config_dict( self, app, default=None ):
+ """
+ Return the in-memory version of the shed_tool_conf file, which is stored in the config_elems entry
+ in the shed_tool_conf_dict.
+ """
+ if not self.shed_config_filename:
+ self.guess_shed_config( app, default=default )
+ if self.shed_config_filename:
+ for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
+ if self.shed_config_filename == shed_tool_conf_dict[ 'config_filename' ]:
+ return shed_tool_conf_dict
+ return default
+ @property
def can_install( self ):
return self.status == self.installation_status.NEW
@property
https://bitbucket.org/galaxy/galaxy-central/changeset/0263d37b08e6/
changeset: 0263d37b08e6
user: dan
date: 2012-10-09 16:24:58
summary: Add a helper method to the toolbox to provide a shed_config_dict based upon the provided filename.
affected #: 1 file
diff -r 921a612db28a69a33dc1ea8bef5ed20ffa63bba2 -r 0263d37b08e610e572b5f08940c19a6f19f0931d lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -150,6 +150,11 @@
tool_path=tool_path,
config_elems=config_elems )
self.shed_tool_confs.append( shed_tool_conf_dict )
+ def get_shed_config_dict_by_filename( self, filename, default=None ):
+ for shed_config_dict in self.shed_tool_confs:
+ if shed_config_dict[ 'config_filename' ] == filename:
+ return shed_config_dict
+ return default
def __add_tool_to_tool_panel( self, tool_id, panel_component, section=False ):
# See if a version of this tool is already loaded into the tool panel. The value of panel_component
# will be a ToolSection (if the value of section=True) or self.tool_panel (if section=False).
https://bitbucket.org/galaxy/galaxy-central/changeset/97eb5bde48da/
changeset: 97eb5bde48da
user: dan
date: 2012-10-09 16:24:59
summary: Enhance Galaxy's handling of Tool Shed Repositories to work with hierarchical relative paths.
affected #: 3 files
diff -r 0263d37b08e610e572b5f08940c19a6f19f0931d -r 97eb5bde48da4575e8de4fed64fe910a4df45dce lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -33,9 +33,12 @@
root = tree.getroot()
self.tool_shed = clean_tool_shed_url( root.get( 'name' ) )
self.repository_owner = REPOSITORY_OWNER
+ index, self.shed_config_dict = get_shed_tool_conf_dict( app, self.migrated_tools_config )
for repository_elem in root:
self.install_repository( repository_elem, install_dependencies )
def get_guid( self, repository_clone_url, relative_install_dir, tool_config ):
+ if self.shed_config_dict.get( 'tool_path' ):
+ relative_install_dir = os.path.join( self.shed_config_dict['tool_path'], relative_install_dir )
found = False
for root, dirs, files in os.walk( relative_install_dir ):
if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
@@ -122,6 +125,10 @@
def handle_repository_contents( self, tool_shed_repository, repository_clone_url, relative_install_dir, repository_elem, install_dependencies ):
"""Generate the metadata for the installed tool shed repository, among other things."""
tool_panel_dict_for_display = odict()
+ if self.tool_path:
+ repo_install_dir = os.path.join( self.tool_path, relative_install_dir )
+ else:
+ repo_install_dir = relative_install_dir
for tool_elem in repository_elem:
# The tool_elem looks something like this: <tool id="EMBOSS: antigenic1" version="5.0.0" file="emboss_antigenic.xml" />
tool_config = tool_elem.get( 'file' )
@@ -135,6 +142,7 @@
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=self.app,
repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
+ shed_config_dict = self.shed_config_dict,
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
@@ -150,7 +158,7 @@
if 'tools' in metadata_dict:
sample_files = metadata_dict.get( 'sample_files', [] )
tool_index_sample_files = get_tool_index_sample_files( sample_files )
- copy_sample_files( self.app, tool_index_sample_files )
+ copy_sample_files( self.app, tool_index_sample_files, tool_path=self.tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict )
if repository_tools_tups:
@@ -163,14 +171,14 @@
repository_tools_tups,
sample_files_copied )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- copy_sample_files( self.app, sample_files, sample_files_copied=sample_files_copied )
+ copy_sample_files( self.app, sample_files, tool_path=self.tool_path, sample_files_copied=sample_files_copied )
if install_dependencies and tool_dependencies and 'tool_dependencies' in metadata_dict:
# Install tool dependencies.
update_tool_shed_repository_status( self.app,
tool_shed_repository,
self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from disk.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repo_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=self.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -195,10 +203,10 @@
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
work_dir = tempfile.mkdtemp()
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repo_install_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
# after this installation completes.
- converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, relative_install_dir, override=False )
+ converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, repo_install_dir, override=False ) #repo_install_dir was relative_install_dir
if converter_path or display_path:
# Create a dictionary of tool shed repository related information.
repository_dict = create_repository_dict_for_proprietary_datatypes( tool_shed=self.tool_shed,
@@ -224,13 +232,15 @@
description = repository_elem.get( 'description' )
installed_changeset_revision = repository_elem.get( 'changeset_revision' )
# Install path is of the form: <tool path>/<tool shed>/repos/<repository owner>/<repository name>/<installed changeset revision>
- clone_dir = os.path.join( self.tool_path, self.tool_shed, 'repos', self.repository_owner, name, installed_changeset_revision )
+ relative_clone_dir = os.path.join( self.tool_shed, 'repos', self.repository_owner, name, installed_changeset_revision )
+ clone_dir = os.path.join( self.tool_path, relative_clone_dir )
if self.__isinstalled( clone_dir ):
print "Skipping automatic install of repository '", name, "' because it has already been installed in location ", clone_dir
else:
tool_shed_url = self.__get_url_from_tool_shed( self.tool_shed )
repository_clone_url = os.path.join( tool_shed_url, 'repos', self.repository_owner, name )
- relative_install_dir = os.path.join( clone_dir, name )
+ relative_install_dir = os.path.join( relative_clone_dir, name )
+ install_dir = os.path.join( clone_dir, name )
ctx_rev = get_ctx_rev( tool_shed_url, name, self.repository_owner, installed_changeset_revision )
print "Adding new row (or updating an existing row) for repository '%s' in the tool_shed_repository table." % name
tool_shed_repository = create_or_update_tool_shed_repository( app=self.app,
@@ -245,7 +255,7 @@
owner=self.repository_owner,
dist_to_shed=True )
update_tool_shed_repository_status( self.app, tool_shed_repository, self.app.model.ToolShedRepository.installation_status.CLONING )
- cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
self.handle_repository_contents( tool_shed_repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
diff -r 0263d37b08e610e572b5f08940c19a6f19f0931d -r 97eb5bde48da4575e8de4fed64fe910a4df45dce lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -387,6 +387,8 @@
else:
tmp_url = repository_clone_url
return tmp_url
+def tool_shed_from_repository_clone_url( repository_clone_url ):
+ return clean_repository_clone_url( repository_clone_url ).split( 'repos' )[ 0 ].rstrip( '/' )
def clean_tool_shed_url( tool_shed_url ):
if tool_shed_url.find( ':' ) > 0:
# Eliminate the port, if any, since it will result in an invalid directory name.
@@ -421,7 +423,7 @@
# Only create the .loc file if it does not yet exist. We don't overwrite it in case it contains stuff proprietary to the local instance.
if not os.path.exists( os.path.join( dest_path, copied_file ) ):
shutil.copy( full_source_path, os.path.join( dest_path, copied_file ) )
-def copy_sample_files( app, sample_files, sample_files_copied=None, dest_path=None ):
+def copy_sample_files( app, sample_files, tool_path=None, sample_files_copied=None, dest_path=None ):
"""
Copy all files to dest_path in the local Galaxy environment that have not already been copied. Those that have been copied
are contained in sample_files_copied. The default value for dest_path is ~/tool-data.
@@ -429,6 +431,8 @@
sample_files_copied = util.listify( sample_files_copied )
for filename in sample_files:
if filename not in sample_files_copied:
+ if tool_path:
+ filename=os.path.join( tool_path, filename )
copy_sample_file( app, filename, dest_path=dest_path )
def create_repo_info_dict( repository, owner, repository_clone_url, changeset_revision, ctx_rev, metadata ):
repo_info_dict = {}
@@ -504,6 +508,9 @@
def create_tool_dependency_objects( app, tool_shed_repository, relative_install_dir, set_status=True ):
# Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository.
tool_dependency_objects = []
+ shed_config_dict = tool_shed_repository.get_shed_config_dict( app )
+ if shed_config_dict.get( 'tool_path' ):
+ relative_install_dir = os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir )
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
try:
@@ -601,7 +608,7 @@
else:
tool_dependencies_dict[ 'set_environment' ] = [ requirements_dict ]
return tool_dependencies_dict
-def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, relative_install_dir=None, repository_files_dir=None,
+def generate_metadata_for_changeset_revision( app, repository, repository_clone_url, shed_config_dict={}, relative_install_dir=None, repository_files_dir=None,
resetting_all_metadata_on_repository=False, updating_installed_repository=False ):
"""
Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
@@ -616,7 +623,7 @@
else:
original_repository_metadata = None
readme_file_names = get_readme_file_names( repository.name )
- metadata_dict = {}
+ metadata_dict = { 'shed_config_filename': shed_config_dict.get( 'config_filename' ) }
invalid_file_tups = []
invalid_tool_configs = []
tool_dependencies_config = None
@@ -637,6 +644,8 @@
work_dir = tempfile.mkdtemp()
# All other files are on disk in the repository's repo_path, which is the value of relative_install_dir.
files_dir = relative_install_dir
+ if shed_config_dict.get( 'tool_path' ):
+ files_dir = os.path.join( shed_config_dict['tool_path'], files_dir )
app.config.tool_data_path = work_dir
app.config.tool_data_table_config_path = work_dir
# Handle proprietary datatypes, if any.
@@ -645,6 +654,7 @@
metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
# Get the relative path to all sample files included in the repository for storage in the repository's metadata.
sample_file_metadata_paths, sample_file_copy_paths = get_sample_files_from_disk( repository_files_dir=files_dir,
+ tool_path=shed_config_dict.get( 'tool_path' ),
relative_install_dir=relative_install_dir,
resetting_all_metadata_on_repository=resetting_all_metadata_on_repository )
if sample_file_metadata_paths:
@@ -674,6 +684,8 @@
relative_path_to_readme = os.path.join( relative_install_dir, stripped_path_to_readme )
else:
relative_path_to_readme = os.path.join( root, name )
+ if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_readme.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
+ relative_path_to_readme = relative_path_to_readme[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
metadata_dict[ 'readme' ] = relative_path_to_readme
# See if we have a tool config.
elif name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
@@ -711,6 +723,8 @@
relative_path_to_tool_config = os.path.join( relative_install_dir, stripped_path_to_tool_config )
else:
relative_path_to_tool_config = os.path.join( root, name )
+ if relative_install_dir and shed_config_dict.get( 'tool_path' ) and relative_path_to_tool_config.startswith( os.path.join( shed_config_dict.get( 'tool_path' ), relative_install_dir ) ):
+ relative_path_to_tool_config = relative_path_to_tool_config[ len( shed_config_dict.get( 'tool_path' ) ) + 1: ]
metadata_dict = generate_tool_metadata( relative_path_to_tool_config, tool, repository_clone_url, metadata_dict )
else:
for tup in invalid_files_and_errors_tups:
@@ -844,13 +858,35 @@
else:
metadata_dict[ 'tools' ] = [ tool_dict ]
return metadata_dict
+def generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section ):
+ if tool_section is not None:
+ tool_elem = SubElement( tool_section, 'tool' )
+ else:
+ tool_elem = Element( 'tool' )
+ tool_elem.attrib[ 'file' ] = tool_file_path
+ tool_elem.attrib[ 'guid' ] = tool.guid
+ tool_shed_elem = SubElement( tool_elem, 'tool_shed' )
+ tool_shed_elem.text = tool_shed
+ repository_name_elem = SubElement( tool_elem, 'repository_name' )
+ repository_name_elem.text = repository_name
+ repository_owner_elem = SubElement( tool_elem, 'repository_owner' )
+ repository_owner_elem.text = owner
+ changeset_revision_elem = SubElement( tool_elem, 'installed_changeset_revision' )
+ changeset_revision_elem.text = changeset_revision
+ id_elem = SubElement( tool_elem, 'id' )
+ id_elem.text = tool.id
+ version_elem = SubElement( tool_elem, 'version' )
+ version_elem.text = tool.version
+ return tool_elem
+
def generate_tool_panel_elem_list( repository_name, repository_clone_url, changeset_revision, tool_panel_dict, repository_tools_tups, owner='' ):
"""Generate a list of ElementTree Element objects for each section or tool."""
elem_list = []
tool_elem = None
- tmp_url = clean_repository_clone_url( repository_clone_url )
+ cleaned_repository_clone_url = clean_repository_clone_url( repository_clone_url )
if not owner:
- owner = get_repository_owner( tmp_url )
+ owner = get_repository_owner( cleaned_repository_clone_url )
+ tool_shed = cleaned_repository_clone_url.split( 'repos' )[ 0 ].rstrip( '/' )
for guid, tool_section_dicts in tool_panel_dict.items():
for tool_section_dict in tool_section_dicts:
tool_section = None
@@ -874,23 +910,9 @@
if tup_guid == guid:
break
if inside_section:
- tool_elem = SubElement( tool_section, 'tool' )
+ tool_elem = generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, tool_section )
else:
- tool_elem = Element( 'tool' )
- tool_elem.attrib[ 'file' ] = tool_file_path
- tool_elem.attrib[ 'guid' ] = guid
- tool_shed_elem = SubElement( tool_elem, 'tool_shed' )
- tool_shed_elem.text = tmp_url.split( 'repos' )[ 0 ].rstrip( '/' )
- repository_name_elem = SubElement( tool_elem, 'repository_name' )
- repository_name_elem.text = repository_name
- repository_owner_elem = SubElement( tool_elem, 'repository_owner' )
- repository_owner_elem.text = owner
- changeset_revision_elem = SubElement( tool_elem, 'installed_changeset_revision' )
- changeset_revision_elem.text = changeset_revision
- id_elem = SubElement( tool_elem, 'id' )
- id_elem.text = tool.id
- version_elem = SubElement( tool_elem, 'version' )
- version_elem.text = tool.version
+ tool_elem = generate_tool_elem( tool_shed, repository_name, changeset_revision, owner, tool_file_path, tool, None )
if inside_section:
if section_in_elem_list:
elem_list[ index ] = tool_section
@@ -1253,18 +1275,21 @@
return get_repository_owner( tmp_url )
def get_repository_tools_tups( app, metadata_dict ):
repository_tools_tups = []
+ index, shed_conf_dict = get_shed_tool_conf_dict( app, metadata_dict.get( 'shed_config_filename' ) )
if 'tools' in metadata_dict:
for tool_dict in metadata_dict[ 'tools' ]:
- relative_path = tool_dict.get( 'tool_config', None )
+ load_relative_path = relative_path = tool_dict.get( 'tool_config', None )
+ if shed_conf_dict.get( 'tool_path' ):
+ load_relative_path = os.path.join( shed_conf_dict.get( 'tool_path' ), relative_path )
guid = tool_dict.get( 'guid', None )
if relative_path and guid:
- tool = app.toolbox.load_tool( os.path.abspath( relative_path ), guid=guid )
+ tool = app.toolbox.load_tool( os.path.abspath( load_relative_path ), guid=guid )
else:
tool = None
if tool:
repository_tools_tups.append( ( relative_path, guid, tool ) )
return repository_tools_tups
-def get_sample_files_from_disk( repository_files_dir, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
+def get_sample_files_from_disk( repository_files_dir, tool_path = None, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
if resetting_all_metadata_on_repository:
# Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata.
work_dir = repository_files_dir
@@ -1287,6 +1312,9 @@
else:
relative_path_to_sample_file = os.path.join( root, name )
sample_file_copy_paths.append( relative_path_to_sample_file )
+ if tool_path and relative_install_dir:
+ if relative_path_to_sample_file.startswith( os.path.join( tool_path, relative_install_dir ) ):
+ relative_path_to_sample_file = relative_path_to_sample_file[ len( tool_path ) + 1 :]
sample_file_metadata_paths.append( relative_path_to_sample_file )
return sample_file_metadata_paths, sample_file_copy_paths
def get_shed_tool_conf_dict( app, shed_tool_conf ):
@@ -1327,46 +1355,16 @@
partial_install_dir = '%s/repos/%s/%s/%s' % ( tool_shed, repository.owner, repository.name, repository.installed_changeset_revision )
# Get the relative tool installation paths from each of the shed tool configs.
relative_install_dir = None
- for shed_tool_conf_dict in app.toolbox.shed_tool_confs:
- shed_tool_conf = shed_tool_conf_dict[ 'config_filename' ]
- if repository.dist_to_shed:
- # The repository is owned by devteam and contains tools migrated from the Galaxy distribution to the tool shed, so
- # the reserved tool panel config is migrated_tools_conf.xml, to which app.config.migrated_tools_config refers.
- if shed_tool_conf == app.config.migrated_tools_config:
- tool_path = shed_tool_conf_dict[ 'tool_path' ]
- relative_install_dir = os.path.join( tool_path, partial_install_dir )
- if tool_path and relative_install_dir:
- return shed_tool_conf, tool_path, relative_install_dir
- elif repository.uninstalled:
- # Since the repository is uninstalled we don't know what tool panel config was originally used to
- # define the tools in the repository, so we'll just make sure not to use the reserved migrated_tools_conf.xml.
- if shed_tool_conf != app.config.migrated_tools_config:
- tool_path = shed_tool_conf_dict[ 'tool_path' ]
- relative_install_dir = os.path.join( tool_path, partial_install_dir )
- if tool_path and relative_install_dir:
- return shed_tool_conf, tool_path, relative_install_dir
- else:
- if repository.includes_tools:
- metadata = repository.metadata
- for tool_dict in metadata[ 'tools' ]:
- # Parse the tool panel config to get the entire set of config_elems. # We'll check config_elems until we
- # find an element that matches one of the tools in the repository's metadata.
- tool_panel_config = shed_tool_conf_dict[ 'config_filename' ]
- tree = util.parse_xml( tool_panel_config )
- root = tree.getroot()
- tool_path, relative_install_dir = get_tool_path_install_dir( partial_install_dir,
- shed_tool_conf_dict,
- tool_dict,
- root )
- if tool_path and relative_install_dir:
- return shed_tool_conf, tool_path, relative_install_dir
- else:
- # Nothing will be loaded into the tool panel, so look for the installed repository on disk.
- tool_path = shed_tool_conf_dict[ 'tool_path' ]
- relative_install_dir = os.path.join( tool_path, partial_install_dir )
- if tool_path and relative_install_dir and os.path.isdir( relative_install_dir ):
- return shed_tool_conf, tool_path, relative_install_dir
- return None, None, None
+ shed_config_dict = repository.get_shed_config_dict( app )
+ if not shed_config_dict:
+ #just pick a semi-random shed config
+ for shed_config_dict in app.toolbox.shed_tool_confs:
+ if ( repository.dist_to_shed and shed_config_dict['config_filename'] == app.config.migrated_tools_config ) or ( not repository.dist_to_shed and shed_config_dict['config_filename'] != app.config.migrated_tools_config ):
+ break
+ shed_tool_conf = shed_config_dict[ 'config_filename' ]
+ tool_path = shed_config_dict[ 'tool_path' ]
+ relative_install_dir = partial_install_dir
+ return shed_tool_conf, tool_path, relative_install_dir
def get_tool_path_install_dir( partial_install_dir, shed_tool_conf_dict, tool_dict, config_elems ):
for elem in config_elems:
if elem.tag == 'tool':
@@ -1774,6 +1772,42 @@
config_elems.remove( config_elem )
# Persist the altered in-memory version of the tool config.
config_elems_to_xml_file( trans.app, config_elems, shed_tool_conf, tool_path )
+def update_in_shed_tool_config( app, repository ):
+ # A tool shed repository is being updated so change the shed_tool_conf file. Parse the config file to generate the entire list
+ # of config_elems instead of using the in-memory list.
+ shed_conf_dict = repository.get_shed_config_dict( app )
+ shed_tool_conf = shed_conf_dict[ 'config_filename' ]
+ tool_path = shed_conf_dict[ 'tool_path' ]
+
+ #hack for 'trans.app' used in lots of places. These places should just directly use app
+ trans = util.bunch.Bunch()
+ trans.app = app
+
+ tool_panel_dict = generate_tool_panel_dict_from_shed_tool_conf_entries( trans, repository )
+ repository_tools_tups = get_repository_tools_tups( app, repository.metadata )
+ cleaned_repository_clone_url = clean_repository_clone_url( generate_clone_url( trans, repository ) )
+ tool_shed = tool_shed_from_repository_clone_url( cleaned_repository_clone_url )
+ owner = repository.owner
+ if not owner:
+ owner = get_repository_owner( cleaned_repository_clone_url )
+ guid_to_tool_elem_dict = {}
+ for tool_config_filename, guid, tool in repository_tools_tups:
+ guid_to_tool_elem_dict[ guid ] = generate_tool_elem( tool_shed, repository.name, repository.changeset_revision, repository.owner or '', tool_config_filename, tool, None )
+ config_elems = []
+ tree = util.parse_xml( shed_tool_conf )
+ root = tree.getroot()
+ for elem in root:
+ if elem.tag == 'section':
+ for i, tool_elem in enumerate( elem ):
+ guid = tool_elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem[i] = guid_to_tool_elem_dict[ guid ]
+ elif elem.tag == 'tool':
+ guid = elem.attrib.get( 'guid' )
+ if guid in guid_to_tool_elem_dict:
+ elem = guid_to_tool_elem_dict[ guid ]
+ config_elems.append( elem )
+ config_elems_to_xml_file( app, config_elems, shed_tool_conf, tool_path )
def remove_from_tool_panel( trans, repository, shed_tool_conf, uninstall ):
"""A tool shed repository is being deactivated or uninstalled so handle tool panel alterations accordingly."""
# Determine where the tools are currently defined in the tool panel and store this information so the tools can be displayed
diff -r 0263d37b08e610e572b5f08940c19a6f19f0931d -r 97eb5bde48da4575e8de4fed64fe910a4df45dce lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -655,15 +655,17 @@
update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
- clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision ) )
- relative_install_dir = os.path.join( clone_dir, tool_shed_repository.name )
- cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( relative_install_dir ), ctx_rev )
+ relative_clone_dir = self.generate_tool_path( repository_clone_url, tool_shed_repository.installed_changeset_revision )
+ clone_dir = os.path.join( tool_path, relative_clone_dir )
+ relative_install_dir = os.path.join( relative_clone_dir, tool_shed_repository.name )
+ install_dir = os.path.join( tool_path, relative_install_dir )
+ cloned_ok, error_message = clone_repository( repository_clone_url, os.path.abspath( install_dir ), ctx_rev )
if cloned_ok:
if reinstalling:
# Since we're reinstalling the repository we need to find the latest changeset revision to which is can be updated.
current_changeset_revision, current_ctx_rev = get_update_to_changeset_revision_and_ctx_rev( trans, tool_shed_repository )
if current_ctx_rev != ctx_rev:
- repo = hg.repository( get_configured_ui(), path=os.path.abspath( relative_install_dir ) )
+ repo = hg.repository( get_configured_ui(), path=os.path.abspath( install_dir ) )
pull_repository( repo, repository_clone_url, current_changeset_revision )
update_repository( repo, ctx_rev=current_ctx_rev )
self.handle_repository_contents( trans,
@@ -704,7 +706,7 @@
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', install_dir )#relative_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -736,6 +738,7 @@
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
repository=tool_shed_repository,
repository_clone_url=repository_clone_url,
+ shed_config_dict = trans.app.toolbox.get_shed_config_dict_by_filename( shed_tool_conf ),
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
@@ -749,7 +752,7 @@
tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
sample_files = metadata_dict.get( 'sample_files', [] )
tool_index_sample_files = get_tool_index_sample_files( sample_files )
- copy_sample_files( self.app, tool_index_sample_files )
+ copy_sample_files( self.app, tool_index_sample_files, tool_path=tool_path )
sample_files_copied = [ s for s in tool_index_sample_files ]
repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
@@ -762,7 +765,7 @@
repository_tools_tups,
sample_files_copied )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
- copy_sample_files( trans.app, sample_files, sample_files_copied=sample_files_copied )
+ copy_sample_files( trans.app, sample_files, tool_path=tool_path, sample_files_copied=sample_files_copied )
add_to_tool_panel( app=trans.app,
repository_name=tool_shed_repository.name,
repository_clone_url=repository_clone_url,
@@ -805,6 +808,8 @@
repository_id = kwd[ 'id' ]
operation = kwd.get( 'operation', None )
repository = get_repository( trans, repository_id )
+ if not repository:
+ return trans.show_error_message( 'Invalid repository specified.' )
if repository.status in [ trans.model.ToolShedRepository.installation_status.CLONING ]:
return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
action='monitor_repository_installation',
@@ -822,7 +827,7 @@
description = util.restore_text( params.get( 'description', repository.description ) )
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
- repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, repository.name ) )
+ repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, repository.name ) )
else:
repo_files_dir = None
if repository.in_error_state:
@@ -1447,20 +1452,27 @@
repository = get_repository( trans, id )
tool_shed_url = get_url_from_repository_tool_shed( trans.app, repository )
repository_clone_url = self.__generate_clone_url( trans, repository )
- relative_install_dir = repository.repo_path( trans.app )
+ tool_path, relative_install_dir = repository.get_tool_relative_path( trans.app )
if relative_install_dir:
+ original_metadata_dict = repository.metadata
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
repository=repository,
repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
updating_installed_repository=False )
repository.metadata = metadata_dict
- trans.sa_session.add( repository )
- trans.sa_session.flush()
- message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name
- status = 'done'
+ if metadata_dict != original_metadata_dict:
+ update_in_shed_tool_config( trans.app, repository )#def update_in_shed_tool_config( trans, shed_tool_conf_dict, elem_list ):
+ trans.sa_session.add( repository )
+ trans.sa_session.flush()
+ message = 'Metadata has been reset on repository <b>%s</b>.' % repository.name
+ status = 'done'
+ else:
+ message = 'Metadata did not need to be reset on repository <b>%s</b>.' % repository.name
+ status = 'done'
else:
message = 'Error locating installation directory for repository <b>%s</b>.' % repository.name
status = 'error'
@@ -1618,7 +1630,10 @@
else:
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
if relative_install_dir:
- repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
+ if tool_path:
+ repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
+ else:
+ repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
repo = hg.repository( get_configured_ui(), path=repo_files_dir )
repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
pull_repository( repo, repository_clone_url, latest_ctx_rev )
@@ -1628,6 +1643,7 @@
metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
repository=repository,
repository_clone_url=repository_clone_url,
+ shed_config_dict = repository.get_shed_config_dict( trans.app ),
relative_install_dir=relative_install_dir,
repository_files_dir=None,
resetting_all_metadata_on_repository=False,
@@ -1667,8 +1683,13 @@
status = params.get( 'status', 'done' )
repository = get_repository( trans, id )
metadata = repository.metadata
+ shed_config_dict = repository.get_shed_config_dict( trans.app )
+ tool_path = shed_config_dict.get( 'tool_path', None )
if metadata and 'readme' in metadata:
- f = open( metadata[ 'readme' ], 'r' )
+ readme_filename = metadata[ 'readme' ]
+ if tool_path:
+ readme_filename = os.path.join( tool_path, readme_filename )
+ f = open( readme_filename, 'r' )
raw_text = f.read()
f.close()
readme_text = translate_string( raw_text, to_html=True )
@@ -1691,6 +1712,7 @@
status = params.get( 'status', 'done' )
repository = get_repository( trans, repository_id )
repository_metadata = repository.metadata
+ shed_config_dict = repository.get_shed_config_dict( trans.app )
tool_metadata = {}
tool_lineage = []
tool = None
@@ -1698,7 +1720,10 @@
for tool_metadata_dict in repository_metadata[ 'tools' ]:
if tool_metadata_dict[ 'id' ] == tool_id:
tool_metadata = tool_metadata_dict
- tool = trans.app.toolbox.load_tool( os.path.abspath( tool_metadata[ 'tool_config' ] ), guid=tool_metadata[ 'guid' ] )
+ tool_config = tool_metadata[ 'tool_config' ]
+ if shed_config_dict and shed_config_dict.get( 'tool_path' ):
+ tool_config = os.path.join( shed_config_dict.get( 'tool_path' ), tool_config )
+ tool = trans.app.toolbox.load_tool( os.path.abspath( tool_config ), guid=tool_metadata[ 'guid' ] )
if tool:
tool_lineage = self.get_versions_of_tool( trans.app, tool.id )
break
https://bitbucket.org/galaxy/galaxy-central/changeset/429e895ee1b3/
changeset: 429e895ee1b3
user: dan
date: 2012-10-09 16:24:59
summary: Enhance manage_repository.mako to handle null repository metadata.
affected #: 1 file
diff -r 97eb5bde48da4575e8de4fed64fe910a4df45dce -r 429e895ee1b3bfb2fea1632d34a01d54faeac866 templates/admin/tool_shed_repository/manage_repository.mako
--- a/templates/admin/tool_shed_repository/manage_repository.mako
+++ b/templates/admin/tool_shed_repository/manage_repository.mako
@@ -95,7 +95,7 @@
<div class="toolFormTitle">${repository.name}</div><div class="toolFormBody"><%
- metadata = repository.metadata
+ metadata = repository.metadata or {}
missing_tool_dependencies = repository.missing_tool_dependencies
installed_tool_dependencies = repository.installed_tool_dependencies
%>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/40b252052087/
changeset: 40b252052087
user: chapmanb
date: 2012-10-04 21:31:16
summary: Correctly set history and handle output datasets for error cases in tool API. Allow specification of dataset name during uploads, exposing through API
affected #: 3 files
diff -r f3b183e756f9b209ef0904718ed547e04c74ab7a -r 40b252052087bc06fd1adc47f9633a496a7dd07c lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -209,7 +209,7 @@
dataset_name = get_file_name( data_file['filename'] )
if not dataset_info:
dataset_info = 'uploaded file'
- return Bunch( type='file', path=data_file['local_filename'], name=get_file_name( data_file['filename'] ) )
+ return Bunch( type='file', path=data_file['local_filename'], name=dataset_name )
#return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info
except:
# The uploaded file should've been persisted by the upload tool action
@@ -227,14 +227,13 @@
if line:
if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ) and not line.lower().startswith( 'https://' ):
continue # non-url line, ignore
- precreated_name = line
dataset_name = override_name
if not dataset_name:
dataset_name = line
dataset_info = override_info
if not dataset_info:
dataset_info = 'uploaded url'
- yield Bunch( type='url', path=line, name=precreated_name )
+ yield Bunch( type='url', path=line, name=dataset_name )
#yield ( 'url', line, precreated_name, dataset_name, dataset_info )
else:
dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here
diff -r f3b183e756f9b209ef0904718ed547e04c74ab7a -r 40b252052087bc06fd1adc47f9633a496a7dd07c lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -55,6 +55,17 @@
tool = trans.app.toolbox.get_tool( tool_id )
if not tool:
return { "message": { "type": "error", "text" : messages.NO_TOOL } }
+
+ # Set running history from payload parameters.
+ # History not set correctly as part of this API call for
+ # dataset upload.
+ history_id = payload.get("history_id", None)
+ if history_id:
+ target_history = trans.sa_session.query(trans.app.model.History).get(
+ trans.security.decode_id(history_id))
+ trans.galaxy_session.current_history = target_history
+ else:
+ target_history = None
# Set up inputs.
inputs = payload[ 'inputs' ]
@@ -62,10 +73,10 @@
inputs['runtool_btn'] = 'Execute'
# TODO: encode data ids and decode ids.
params = util.Params( inputs, sanitize = False )
- template, vars = tool.handle_input( trans, params.__dict__ )
-
+ template, vars = tool.handle_input( trans, params.__dict__, history=target_history)
+
# TODO: check for errors and ensure that output dataset(s) are available.
- output_datasets = vars[ 'out_data' ].values()
+ output_datasets = vars.get('out_data', {}).values()
rval = {
"outputs": []
}
diff -r f3b183e756f9b209ef0904718ed547e04c74ab7a -r 40b252052087bc06fd1adc47f9633a496a7dd07c tools/data_source/upload.xml
--- a/tools/data_source/upload.xml
+++ b/tools/data_source/upload.xml
@@ -41,6 +41,7 @@
<param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand."><option value="Yes">Yes</option></param>
+ <param name="NAME" type="hidden" help="Name for dataset in upload"></param></upload_dataset><param name="dbkey" type="genomebuild" label="Genome" /><conditional name="files_metadata" title="Specify metadata" value_from="self:app.datatypes_registry.get_upload_metadata_params" value_ref="file_type" value_ref_in_group="False" />
https://bitbucket.org/galaxy/galaxy-central/changeset/413cf15e4065/
changeset: 413cf15e4065
user: jgoecks
date: 2012-10-09 05:44:02
summary: Merged in chapmanb/galaxy-central-apiupload (pull request #74)
affected #: 3 files
diff -r 8269f76312af60e356707bc660b6e9903e402106 -r 413cf15e4065a9f8d559ca110e6e86b84f8a6620 lib/galaxy/tools/parameters/grouping.py
--- a/lib/galaxy/tools/parameters/grouping.py
+++ b/lib/galaxy/tools/parameters/grouping.py
@@ -209,7 +209,7 @@
dataset_name = get_file_name( data_file['filename'] )
if not dataset_info:
dataset_info = 'uploaded file'
- return Bunch( type='file', path=data_file['local_filename'], name=get_file_name( data_file['filename'] ) )
+ return Bunch( type='file', path=data_file['local_filename'], name=dataset_name )
#return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info
except:
# The uploaded file should've been persisted by the upload tool action
@@ -227,14 +227,13 @@
if line:
if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ) and not line.lower().startswith( 'https://' ):
continue # non-url line, ignore
- precreated_name = line
dataset_name = override_name
if not dataset_name:
dataset_name = line
dataset_info = override_info
if not dataset_info:
dataset_info = 'uploaded url'
- yield Bunch( type='url', path=line, name=precreated_name )
+ yield Bunch( type='url', path=line, name=dataset_name )
#yield ( 'url', line, precreated_name, dataset_name, dataset_info )
else:
dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here
diff -r 8269f76312af60e356707bc660b6e9903e402106 -r 413cf15e4065a9f8d559ca110e6e86b84f8a6620 lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -55,6 +55,17 @@
tool = trans.app.toolbox.get_tool( tool_id )
if not tool:
return { "message": { "type": "error", "text" : messages.NO_TOOL } }
+
+ # Set running history from payload parameters.
+ # History not set correctly as part of this API call for
+ # dataset upload.
+ history_id = payload.get("history_id", None)
+ if history_id:
+ target_history = trans.sa_session.query(trans.app.model.History).get(
+ trans.security.decode_id(history_id))
+ trans.galaxy_session.current_history = target_history
+ else:
+ target_history = None
# Set up inputs.
inputs = payload[ 'inputs' ]
@@ -62,10 +73,10 @@
inputs['runtool_btn'] = 'Execute'
# TODO: encode data ids and decode ids.
params = util.Params( inputs, sanitize = False )
- template, vars = tool.handle_input( trans, params.__dict__ )
-
+ template, vars = tool.handle_input( trans, params.__dict__, history=target_history)
+
# TODO: check for errors and ensure that output dataset(s) are available.
- output_datasets = vars[ 'out_data' ].values()
+ output_datasets = vars.get('out_data', {}).values()
rval = {
"outputs": []
}
diff -r 8269f76312af60e356707bc660b6e9903e402106 -r 413cf15e4065a9f8d559ca110e6e86b84f8a6620 tools/data_source/upload.xml
--- a/tools/data_source/upload.xml
+++ b/tools/data_source/upload.xml
@@ -41,6 +41,7 @@
<param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand."><option value="Yes">Yes</option></param>
+ <param name="NAME" type="hidden" help="Name for dataset in upload"></param></upload_dataset><param name="dbkey" type="genomebuild" label="Genome" /><conditional name="files_metadata" title="Specify metadata" value_from="self:app.datatypes_registry.get_upload_metadata_params" value_ref="file_type" value_ref_in_group="False" />
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Fixes that (a) simplify data providers framework and (b) make it possible to view different trees in the same nexus file.
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/8269f76312af/
changeset: 8269f76312af
user: jgoecks
date: 2012-10-09 05:33:03
summary: Fixes that (a) simplify data providers framework and (b) make it possible to view different trees in the same nexus file.
affected #: 7 files
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 lib/galaxy/visualization/data_providers/genome.py
--- a/lib/galaxy/visualization/data_providers/genome.py
+++ b/lib/galaxy/visualization/data_providers/genome.py
@@ -520,7 +520,7 @@
rval.append( payload )
- return { 'data': rval, 'message': message }
+ return { 'data': rval, 'dataset_type': self.dataset_type, 'message': message }
def write_data_to_file( self, regions, filename ):
out = open( filename, "w" )
@@ -550,8 +550,6 @@
for large datasets.
"""
- dataset_type = 'interval_index'
-
def get_iterator( self, chrom=None, start=None, end=None ):
# Read first line in order to match chrom naming format.
line = source.readline()
@@ -696,8 +694,6 @@
for large datasets.
"""
- dataset_type = 'tabix'
-
def get_iterator( self, chrom, start, end ):
# Read first line in order to match chrom naming format.
line = source.readline()
@@ -1278,7 +1274,7 @@
results.append( payload )
- return { 'data': results, 'message': message }
+ return { 'data': results, 'dataset_type': self.dataset_type, 'message': message }
class GtfTabixDataProvider( TabixDataProvider ):
"""
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 lib/galaxy/visualization/data_providers/phyloviz/__init__.py
--- a/lib/galaxy/visualization/data_providers/phyloviz/__init__.py
+++ b/lib/galaxy/visualization/data_providers/phyloviz/__init__.py
@@ -7,36 +7,37 @@
class PhylovizDataProvider( BaseDataProvider ):
+ dataset_type = "phylo"
+
def __init__( self, original_dataset=None ):
super( PhylovizDataProvider, self ).__init__( original_dataset=original_dataset )
- def get_data( self ):
- """returns [trees], meta
- Trees are actually an array of JsonDicts. It's usually one tree, except in the case of Nexus
+ def get_data( self, tree_index=0 ):
+ """
+ Returns trees.
+ Trees are actually an array of JsonDicts. It's usually one tree, except in the case of Nexus
"""
- jsonDicts, meta = [], {}
file_ext = self.original_dataset.datatype.file_ext
file_name = self.original_dataset.file_name
- try:
- if file_ext == "nhx": # parses newick files
- newickParser = Newick_Parser()
- jsonDicts, parseMsg = newickParser.parseFile( file_name )
- elif file_ext == "phyloxml": # parses phyloXML files
- phyloxmlParser = Phyloxml_Parser()
- jsonDicts, parseMsg = phyloxmlParser.parseFile( file_name )
- elif file_ext == "nex": # parses nexus files
- nexusParser = Nexus_Parser()
- jsonDicts, parseMsg = nexusParser.parseFile( file_name )
- meta["trees"] = parseMsg
- else:
- raise Exception("File type is not supported")
+ parseMsg = None
+ jsonDicts = []
+ rval = { 'dataset_type': self.dataset_type }
- meta["msg"] = parseMsg
+ if file_ext == "nhx": # parses newick files
+ newickParser = Newick_Parser()
+ jsonDicts, parseMsg = newickParser.parseFile( file_name )
+ elif file_ext == "phyloxml": # parses phyloXML files
+ phyloxmlParser = Phyloxml_Parser()
+ jsonDicts, parseMsg = phyloxmlParser.parseFile( file_name )
+ elif file_ext == "nex": # parses nexus files
+ nexusParser = Nexus_Parser()
+ jsonDicts, parseMsg = nexusParser.parseFile( file_name )
+ jsonDicts = jsonDicts[ int( tree_index ) ]
+ rval["trees"] = parseMsg
- except Exception, e:
- raise e
- jsonDicts, meta["msg"] = [], "Parse failed"
+ rval[ "data" ] = jsonDicts
+ rval[ "msg"] = parseMsg
+
+ return rval
- return jsonDicts, meta
-
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 lib/galaxy/visualization/data_providers/registry.py
--- a/lib/galaxy/visualization/data_providers/registry.py
+++ b/lib/galaxy/visualization/data_providers/registry.py
@@ -1,5 +1,8 @@
from galaxy.visualization.data_providers.basic import ColumnDataProvider
from galaxy.visualization.data_providers.genome import *
+from galaxy.visualization.data_providers.phyloviz import PhylovizDataProvider
+from galaxy.datatypes.xml import Phyloxml
+from galaxy.datatypes.data import Newick, Nexus
class DataProviderRegistry( object ):
"""
@@ -45,6 +48,8 @@
data_provider_class = RawVcfDataProvider
elif isinstance( original_dataset.datatype, Tabular ):
data_provider_class = ColumnDataProvider
+ elif isinstance( original_dataset.datatype, ( Nexus, Newick, Phyloxml ) ):
+ data_provider_class = PhylovizDataProvider
data_provider = data_provider_class( original_dataset=original_dataset )
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 lib/galaxy/webapps/galaxy/api/datasets.py
--- a/lib/galaxy/webapps/galaxy/api/datasets.py
+++ b/lib/galaxy/webapps/galaxy/api/datasets.py
@@ -185,19 +185,7 @@
return msg
# Return data.
- data = None
data_provider = trans.app.data_provider_registry.get_data_provider( trans, raw=True, original_dataset=dataset )
-
- if isinstance( data_provider, ColumnDataProvider ):
- data = data_provider.get_data( **kwargs )
-
- else:
- # Default to genomic data.
- # FIXME: need better way to set dataset_type.
- low, high = int( kwargs.get( 'low' ) ), int( kwargs.get( 'high' ) )
- data = data_provider.get_data( start=low, end=high, **kwargs )
- data[ 'dataset_type' ] = 'interval_index'
- data[ 'extra_info' ] = None
- if isinstance( dataset.datatype, Vcf ):
- data[ 'dataset_type' ] = 'tabix'
+ data = data_provider.get_data( **kwargs )
+
return data
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 lib/galaxy/webapps/galaxy/controllers/visualization.py
--- a/lib/galaxy/webapps/galaxy/controllers/visualization.py
+++ b/lib/galaxy/webapps/galaxy/controllers/visualization.py
@@ -847,8 +847,7 @@
# Get data.
pd = PhylovizDataProvider( original_dataset=hda )
- json, config = pd.get_data()
- json = json[tree_index]
+ config = pd.get_data( tree_index=tree_index )
config["title"] = hda.display_name()
config["ext"] = hda.datatype.file_ext
@@ -857,7 +856,7 @@
config["saved_visualization"] = False
# Return viz.
- return trans.fill_template_mako( "visualization/phyloviz.mako", data = json, config=config )
+ return trans.fill_template_mako( "visualization/phyloviz.mako", data = config[ "data" ], config=config )
@web.json
def bookmarks_from_dataset( self, trans, hda_id=None, ldda_id=None ):
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 static/scripts/viz/phyloviz.js
--- a/static/scripts/viz/phyloviz.js
+++ b/static/scripts/viz/phyloviz.js
@@ -1,4 +1,4 @@
-define(['libs/d3', 'viz/visualization'], function(d3, visualization_mod) {
+define(['libs/d3', 'viz/visualization', 'mvc/data'], function(d3, visualization_mod, data_mod) {
var UserMenuBase = Backbone.View.extend({
/**
@@ -181,6 +181,12 @@
nodeAttrChangedTime : 0
},
+ initialize: function(options) {
+ this.set("dataset", new data_mod.Dataset({
+ id: options.dataset_id
+ }));
+ },
+
root : {}, // Root has to be its own independent object because it is not part of the viz_config
toggle : function (d) {
@@ -255,7 +261,7 @@
},
success: function(res){
var viz_id = res.url.split("id=")[1].split("&")[0],
- viz_url = "/phyloviz/visualization?id=" + viz_id;
+ viz_url = "/visualization?id=" + viz_id;
window.history.pushState({}, "", viz_url + window.location.hash);
hide_modal();
}
@@ -662,11 +668,11 @@
* Primes the Ajax URL to load another Nexus tree
*/
var self = this,
- treeIndex = $("#phylovizNexSelector :selected").val(),
- dataset_id = self.phyloTree.get("dataset_id"),
- url = "phyloviz/getJsonData?dataset_id=" + dataset_id + "&treeIndex=" + String(treeIndex);
- $.getJSON(url, function(packedJson){
- window.initPhyloViz(packedJson.data, packedJson.config);
+ treeIndex = $("#phylovizNexSelector :selected").val();
+ $.getJSON(self.phyloTree.get("dataset").url(), { tree_index: treeIndex, data_type: 'raw_data' }, function(packedJson){
+ self.data = packedJson.data;
+ self.config = packedJson;
+ self.render();
});
}
});
diff -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 -r 8269f76312af60e356707bc660b6e9903e402106 templates/visualization/phyloviz.mako
--- a/templates/visualization/phyloviz.mako
+++ b/templates/visualization/phyloviz.mako
@@ -165,7 +165,7 @@
// -- Render viz. --
phyloviz.render();
- }
+ };
$(function firstVizLoad(){ // calls when viz is loaded for the first time
var config = JSON.parse( '${ h.to_json_string( config )}');
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Add interpreter and absolute path logic to version tag. Thanks to Björn Grüning.
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/02fe49c3d251/
changeset: 02fe49c3d251
user: jgoecks
date: 2012-10-08 20:22:44
summary: Add interpreter and absolute path logic to version tag. Thanks to Björn Grüning.
affected #: 1 file
diff -r 724aaf15bcbe4989450af00c83ea8538c6b2f051 -r 02fe49c3d251bd30114dcd616336b73b2e8d1ab2 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -960,6 +960,12 @@
version_cmd = root.find("version_command")
if version_cmd is not None:
self.version_string_cmd = version_cmd.text
+ version_cmd_interpreter = version_cmd.get( "interpreter", None )
+ if version_cmd_interpreter:
+ executable = self.version_string_cmd.split()[0]
+ abs_executable = os.path.abspath(os.path.join(self.tool_dir, executable))
+ command_line = self.version_string_cmd.replace(executable, abs_executable, 1)
+ self.version_string_cmd = self.interpreter + " " + command_line
# Parallelism for tasks, read from tool config.
parallelism = root.find("parallelism")
if parallelism is not None and parallelism.get("method"):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: carlfeberhard: history.js: add persistant storage, show prev. opened datasets on page refresh; base-mvc.js: add PersistantStorage object adapter
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/724aaf15bcbe/
changeset: 724aaf15bcbe
user: carlfeberhard
date: 2012-10-08 19:27:11
summary: history.js: add persistant storage, show prev. opened datasets on page refresh; base-mvc.js: add PersistantStorage object adapter
affected #: 3 files
diff -r c456d67423b6988de9f9777d9758901845d02de8 -r 724aaf15bcbe4989450af00c83ea8538c6b2f051 static/scripts/mvc/base-mvc.js
--- a/static/scripts/mvc/base-mvc.js
+++ b/static/scripts/mvc/base-mvc.js
@@ -41,6 +41,7 @@
}
});
+
//==============================================================================
/**
* Adds logging capabilities to your Models/Views
@@ -69,6 +70,7 @@
}
};
+
// =============================================================================
/** Global string localization object (and global short form alias)
* set with either:
@@ -78,8 +80,8 @@
* _l( original )
*/
//TODO: move to Galaxy.Localization (maybe galaxy.base.js)
-var GalaxyLocalization = jQuery.extend({}, {
- aliasName : '_l',
+var GalaxyLocalization = jQuery.extend( {}, {
+ ALIAS_NAME : '_l',
localizedStrings : {},
setLocalizedString : function( str_or_obj, localizedString ){
@@ -127,7 +129,7 @@
});
// global localization alias
-window[ GalaxyLocalization.aliasName ] = function( str ){ return GalaxyLocalization.localize( str ); };
+window[ GalaxyLocalization.ALIAS_NAME ] = function( str ){ return GalaxyLocalization.localize( str ); };
//TEST: setLocalizedString( string, string ), _l( string )
//TEST: setLocalizedString( hash ), _l( string )
@@ -135,203 +137,112 @@
//TEST: _l( non assigned string )
-
//==============================================================================
/**
- * Base class for template loaders:
- * The main interface is loader.getTemplates( templatesToLoad )
- * where templatesToLoad is in the form:
- * {
- * remoteTemplateFilename1: {
- * templateFunctionName1 : templateID1,
- * templateFunctionName2 : templateID2,
- * ...
- * },
- * remoteTemplateFilename2: {
- * templateFunctionName3 : templateID3,
- * templateFunctionName4 : templateID4,
- * ...
- * }
- * }
- * getTemplates will return a map of the templates in the form:
- * {
- * templateFunctionName1 : compiledTemplateFn1(),
- * templateFunctionName2 : compiledTemplateFn2(),
- * templateFunctionName3 : compiledTemplateFn3(),
- * ...
- * }
+ * @class PersistantStorage
+ * persistant storage adapter to:
+ * provide an easy interface to object based storage using method chaining
+ * allow easy change of the storage engine used (h5's local storage?)
*
- * Generally meant to be called for Backbone views, etc like this:
- * BackboneView.templates = CompiledTemplateLoader( templatesToLoad );
+ * @param {String} storageKey : the key the storage engine will place the storage object under
+ * @param {Object} storageDefaults : [optional] initial object to set up storage with
+ *
+ * @example :
+ * HistoryPanel.storage = new PersistanStorage( HistoryPanel.toString(), { visibleItems, {} })
+ * itemView.bind( 'toggleBodyVisibility', function( id, visible ){
+ * if( visible ){
+ * HistoryPanel.storage.get( 'visibleItems' ).set( id, true );
+ * } else {
+ * HistoryPanel.storage.get( 'visibleItems' ).deleteKey( id );
+ * }
+ * });
*/
-var TemplateLoader = _.extend( {}, LoggableMixin, {
- //TODO: incorporate caching of template functions (for use across objects)
- //TODO: only require and use 2 level (or some variation) map templatesToLoad for the remote loader
-
- // comment next line out to suppress logging
- //logger : console,
-
- //cachedTemplates : {},
-
- getTemplateLoadFn : function(){
- throw( "There is no templateLoadFn. Make sure you're using a subclass of TemplateLoader" );
- },
-
- // loop through templatesToLoad assuming it is a map in the form mentioned above
- getTemplates : function( templatesToLoad, forceReload ){
- forceReload = forceReload || false;
- this.log( this, 'getTemplates:', templatesToLoad, ', forceReload:', forceReload );
-
- //!TODO: cache templates here
- var templates = {},
- loader = this,
- templateLoadFn = this.getTemplateLoadFn();
-
- if( !templatesToLoad ){ return templates; }
- jQuery.each( templatesToLoad, function( templateFile, templateData ){
-
- //TODO: handle flatter map versions of templatesToLoad ({ name : id })
- jQuery.each( templateData, function( templateName, templateID ){
- loader.log( loader + ', templateFile:', templateFile,
- 'templateName:', templateName, ', templateID:', templateID );
- templates[ templateName ] = templateLoadFn.call( loader, templateFile, templateName, templateID );
- });
- });
- return templates;
+var PersistantStorage = function( storageKey, storageDefaults ){
+ if( !storageKey ){
+ throw( "PersistantStorage needs storageKey argument" );
}
-});
+ storageDefaults = storageDefaults || {};
+ // ~constants for the current engine
+ //TODO:?? this would be greatly simplified if we're IE9+ only (setters/getters)
+ var STORAGE_ENGINE_GETTER = jQuery.jStorage.get,
+ STORAGE_ENGINE_SETTER = jQuery.jStorage.set,
+ STORAGE_ENGINE_KEY_DELETER = jQuery.jStorage.deleteKey;
-//..............................................................................
-/** find the compiled template in Handlebars.templates by templateName
- * and return the entire, requested templates map
- */
-var CompiledTemplateLoader = _.extend( {}, TemplateLoader, {
- getTemplateLoadFn : function(){ return this.loadCompiledHandlebarsTemplate; },
-
- // override if new compiler
- loadCompiledHandlebarsTemplate : function( templateFile, templateName, templateID ){
- //pre: compiled templates should have been loaded with the mako helper h.templates
- // (although these could be dynamically loaded as well?)
- this.log( 'getInDomTemplates, templateFile:', templateFile,
- 'templateName:', templateName, ', templateID:', templateID );
-
- if( !Handlebars.templates || !Handlebars.templates[ templateID ] ){
- throw( 'Template not found: Handlebars.' + templateID
- + '. Check your h.templates() call in the mako file that rendered this page' );
- }
- this.log( 'found template function:', templateID );
- // really this is just a lookup
- return Handlebars.templates[ templateID ];
- }
-
- //TEST: Handlebars.full NOT runtime
- //TEST: no Handlebars
- //TEST: bad id
- //TEST: Handlebars.runtime, good id
-});
+ // recursion helper for method chaining access
+ var StorageRecursionHelper = function( data, parent ){
+ //console.debug( 'new StorageRecursionHelper. data:', data );
+ data = data || {};
+ parent = parent || null;
+ return {
+ // get a value from the storage obj named 'key',
+ // if it's an object - return a new StorageRecursionHelper wrapped around it
+ // if it's something simpler - return the value
+ // if this isn't passed a key - return the data at this level of recursion
+ get : function( key ){
+ //console.debug( this + '.get', key );
+ if( key === undefined ){
+ return data;
+ } else if( data.hasOwnProperty( key ) ){
+ return ( jQuery.type( data[ key ] ) === 'object' )?
+ ( new StorageRecursionHelper( data[ key ], this ) )
+ :( data[ key ] );
+ }
+ return undefined;
+ },
+ // set a value on the current data - then pass up to top to save current entire object in storage
+ set : function( key, value ){
+ //TODO: add parameterless variation setting the data somehow
+ // ??: difficult bc of obj by ref, closure
+ //console.debug( this + '.set', key, value );
+ data[ key ] = value;
+ this.save();
+ return this;
+ },
+ // remove a key at this level - then save entire (as 'set' above)
+ deleteKey : function( key ){
+ //console.debug( this + '.deleteKey', key );
+ delete data[ key ];
+ this.save();
+ return this;
+ },
+ // pass up the recursion chain (see below for base case)
+ save : function(){
+ //console.debug( this + '.save', parent );
+ return parent.save();
+ },
+ toString : function(){
+ return ( 'StorageRecursionHelper(' + data + ')' );
+ }
+ };
+ };
-//..............................................................................
-/** find the NON-compiled template templateID in the DOM, compile it (using Handlebars),
- * and return the entire, requested templates map
- * (Note: for use with Mako.include and multiple templates)
- */
-var InDomTemplateLoader = _.extend( {}, TemplateLoader, {
-
- // override or change if a new compiler (Underscore, etc.) is being used
- compileTemplate : function( templateText ){
- // we'll need the compiler
- if( !Handlebars || !Handlebars.compile ){
- throw( 'No Handlebars.compile found. You may only have Handlebars.runtime loaded.'
- + 'Include handlebars.full for this to work' );
- }
- // copy fn ref to this view under the templateName
- this.log( 'compiling template:', templateText );
- return Handlebars.compile( templateText );
- },
-
- findTemplateInDom : function( templateFile, templateName, templateID ){
- // assume the last is best
- return $( 'script#' + templateID ).last();
- },
-
- getTemplateLoadFn : function(){ return this.loadInDomTemplate; },
-
- loadInDomTemplate : function( templateFile, templateName, templateID ){
- this.log( 'getInDomTemplate, templateFile:', templateFile,
- 'templateName:', templateName, ', templateID:', templateID );
-
- // find it in the dom by the id and compile
- var template = this.findTemplateInDom( templateFile, templateName, templateID );
- if( !template || !template.length ){
- throw( 'Template not found within the DOM: ' + templateID
- + '. Check that this template has been included in the page' );
- }
- this.log( 'found template in dom:', template.html() );
- return this.compileTemplate( template.html() );
+ //??: more readable to make another class?
+ var returnedStorage = {};
+ // attempt to get starting data from engine...
+ data = STORAGE_ENGINE_GETTER( storageKey );
+
+ // ...if that fails, use the defaults (and store them)
+ if( data === null ){
+ //console.debug( 'no previous data. using defaults...' );
+ data = jQuery.extend( true, {}, storageDefaults );
+ STORAGE_ENGINE_SETTER( storageKey, data );
}
- //TEST: no compiler
- //TEST: good url, good id, in DOM
- //TEST: good url, good id, NOT in DOM
-});
+ // the object returned by this constructor will be a modified StorageRecursionHelper
+ returnedStorage = new StorageRecursionHelper( data );
+ // the base case for save()'s upward recursion - save everything to storage
+ returnedStorage.save = function( newData ){
+ //console.debug( returnedStorage, '.save:', JSON.stringify( returnedStorage.get() ) );
+ STORAGE_ENGINE_SETTER( storageKey, returnedStorage.get() );
+ };
+ // delete function to remove the base data object from the storageEngine
+ returnedStorage.destroy = function(){
+ //console.debug( returnedStorage, '.destroy:' );
+ STORAGE_ENGINE_KEY_DELETER( storageKey );
+ };
+ returnedStorage.toString = function(){ return 'PersistantStorage(' + data + ')'; };
+
+ return returnedStorage;
+};
-//..............................................................................
-/** HTTP GET the NON-compiled templates, append into the DOM, compile them,
- * and return the entire, requested templates map
- * (for use with dynamically loaded views)
- */
-var RemoteTemplateLoader = _.extend( {}, InDomTemplateLoader, {
- templateBaseURL : 'static/scripts/templates/',
-
- getTemplateLoadFn : function(){ return this.loadViaHttpGet; },
-
- loadViaHttpGet : function( templateFile, templateName, templateID ){
- var templateBaseURL = 'static/scripts/templates/';
- this.log( 'loadViaHttpGet, templateFile:', templateFile,
- 'templateName:', templateName, ', templateID:', templateID,
- 'templateBaseURL:', this.templateBaseURL );
-
- //??: possibly not the best pattern here...
- // try in-dom first (prevent loading the same templateFile for each of its templates)
- var template = null;
- try {
- template = this.loadInDomTemplate( templateFile, templateName, templateID );
-
- // if that didn't work, load the templateFile via GET,...
- } catch( exception ){
- this.log( 'getInDomTemplate exception:' + exception );
- // handle no compiler exception
- if( !Handlebars.compile ){ throw( exception ); }
- //TEST:
-
- this.log( "Couldn't locate template in DOM: " + templateID );
- var loader = this;
- var url = templateBaseURL + templateFile;
- //??: async : false may cause problems in the long run
- jQuery.ajax( url, {
- method : 'GET',
- async : false,
- success : function( data ){
- loader.log( templateFile + ' loaded via GET. Attempting compile...' );
- //...move the templateFile into the DOM and try that again
- $( 'body' ).append( data );
- template = loader.loadInDomTemplate( templateFile, templateName, templateID );
- },
- error : function( data, status, xhr ){
- throw( 'Failed to fetch ' + url + ':' + status );
- }
- });
- }
- if( !template ){
- throw( "Couldn't load or fetch template: " + templateID );
- }
- return template;
- }
-
- //TEST: no compiler
- //TEST: good url, good id, already local
- //TEST: good url, good id, remote load
- //TEST: good url, bad template id
- //TEST: bad url, error from ajax
-});
diff -r c456d67423b6988de9f9777d9758901845d02de8 -r 724aaf15bcbe4989450af00c83ea8538c6b2f051 static/scripts/mvc/history.js
--- a/static/scripts/mvc/history.js
+++ b/static/scripts/mvc/history.js
@@ -6,6 +6,8 @@
Backbone.js implementation of history panel
TODO:
+ currently, adding a dataset (via tool execute, etc.) creates a new dataset and refreshes the page
+
meta:
require.js
convert function comments to jsDoc style, complete comments
@@ -142,8 +144,9 @@
className : "historyItemContainer",
// ................................................................................ SET UP
- initialize : function(){
+ initialize : function( attributes ){
this.log( this + '.initialize:', this, this.model );
+ this.visible = attributes.visible;
},
// ................................................................................ RENDER MAIN
@@ -209,6 +212,7 @@
return buttonDiv;
},
+ //TODO: ?? the three title buttons render for err'd datasets: is this normal?
_render_displayButton : function(){
// don't show display while uploading
if( this.model.get( 'state' ) === HistoryItem.STATES.UPLOAD ){ return null; }
@@ -608,6 +612,11 @@
if( this.model.get( 'bodyIsShown' ) === false ){
body.hide();
}
+ if( this.visible ){
+ body.show();
+ } else {
+ body.hide();
+ }
return body;
},
@@ -693,14 +702,15 @@
return false;
},
- toggleBodyVisibility : function(){
- this.log( this + '.toggleBodyVisibility' );
- this.$el.find( '.historyItemBody' ).toggle();
+ toggleBodyVisibility : function( visible ){
+ var $body = this.$el.find( '.historyItemBody' );
+ $body.toggle();
+ this.trigger( 'toggleBodyVisibility', this.model.get( 'id' ), $body.is( ':visible' ) );
},
// ................................................................................ UTILTIY
toString : function(){
- var modelString = ( this.model )?( this.model + '' ):( '' );
+ var modelString = ( this.model )?( this.model + '' ):( '(no model)' );
return 'HistoryItemView(' + modelString + ')';
}
});
@@ -708,21 +718,18 @@
//------------------------------------------------------------------------------
//HistoryItemView.templates = InDomTemplateLoader.getTemplates({
-HistoryItemView.templates = CompiledTemplateLoader.getTemplates({
- 'common-templates.html' : {
- warningMsg : 'template-warningmessagesmall'
- },
- 'history-templates.html' : {
- messages : 'template-history-warning-messages',
- titleLink : 'template-history-titleLink',
- hdaSummary : 'template-history-hdaSummary',
- downloadLinks : 'template-history-downloadLinks',
- failedMetadata : 'template-history-failedMetaData',
- tagArea : 'template-history-tagArea',
- annotationArea : 'template-history-annotationArea',
- displayApps : 'template-history-displayApps'
- }
-});
+HistoryItemView.templates = {
+ warningMsg : Handlebars.templates[ 'template-warningmessagesmall' ],
+
+ messages : Handlebars.templates[ 'template-history-warning-messages' ],
+ titleLink : Handlebars.templates[ 'template-history-titleLink' ],
+ hdaSummary : Handlebars.templates[ 'template-history-hdaSummary' ],
+ downloadLinks : Handlebars.templates[ 'template-history-downloadLinks' ],
+ failedMetadata : Handlebars.templates[ 'template-history-failedMetaData' ],
+ tagArea : Handlebars.templates[ 'template-history-tagArea' ],
+ annotationArea : Handlebars.templates[ 'template-history-annotationArea' ],
+ displayApps : Handlebars.templates[ 'template-history-displayApps' ]
+};
//==============================================================================
var HistoryCollection = Backbone.Collection.extend({
@@ -867,22 +874,51 @@
});
//------------------------------------------------------------------------------
+// view for the HistoryCollection (as per current right hand panel)
+//var HistoryView = BaseView.extend( LoggableMixin ).extend( UsesStorageMixin ) .extend({
var HistoryView = BaseView.extend( LoggableMixin ).extend({
- // view for the HistoryCollection (as per current right hand panel)
// uncomment this out see log messages
//logger : console,
// direct attachment to existing element
el : 'body.historyPage',
-
+ //TODO: add id?
+
initialize : function(){
this.log( this + '.initialize:', this );
- this.itemViews = [];
- var parent = this;
+ // data that needs to be persistant over page refreshes
+ this.storage = new PersistantStorage(
+ 'HistoryView.' + this.model.get( 'id' ),
+ { visibleItems : {} }
+ );
+ // set up the individual history items/datasets
+ this.initializeItems();
+ },
+
+ initializeItems : function(){
+ this.itemViews = {};
+ var historyPanel = this;
this.model.items.each( function( item ){
- var itemView = new HistoryItemView({ model: item });
- parent.itemViews.push( itemView );
+ var itemId = item.get( 'id' ),
+ itemView = new HistoryItemView({
+ model: item, visible:
+ historyPanel.storage.get( 'visibleItems' ).get( itemId )
+ });
+ historyPanel.setUpItemListeners( itemView );
+ historyPanel.itemViews[ itemId ] = itemView;
+ });
+ },
+
+ setUpItemListeners : function( itemView ){
+ var HistoryPanel = this;
+ // use storage to maintain a list of items whose bodies are visible
+ itemView.bind( 'toggleBodyVisibility', function( id, visible ){
+ if( visible ){
+ HistoryPanel.storage.get( 'visibleItems' ).set( id, true );
+ } else {
+ HistoryPanel.storage.get( 'visibleItems' ).deleteKey( id );
+ }
});
},
@@ -907,8 +943,8 @@
var div = $( '<div/>' ),
view = this;
//NOTE!: render in reverse (newest on top) via prepend (instead of append)
- _.each( this.itemViews, function( itemView ){
- view.log( view + '.render_items:', itemView );
+ _.each( this.itemViews, function( itemView, viewId ){
+ view.log( view + '.render_items:', viewId, itemView );
div.prepend( itemView.render() );
});
return div;
@@ -919,12 +955,9 @@
return 'HistoryView(' + nameString + ')';
}
});
-//HistoryItemView.templates = InDomTemplateLoader.getTemplates({
-HistoryView.templates = CompiledTemplateLoader.getTemplates({
- 'history-templates.html' : {
- historyPanel : 'template-history-historyPanel'
- }
-});
+HistoryView.templates = {
+ historyPanel : Handlebars.templates[ 'template-history-historyPanel' ]
+};
diff -r c456d67423b6988de9f9777d9758901845d02de8 -r 724aaf15bcbe4989450af00c83ea8538c6b2f051 templates/root/alternate_history.mako
--- a/templates/root/alternate_history.mako
+++ b/templates/root/alternate_history.mako
@@ -344,7 +344,9 @@
${parent.javascripts()}
${h.js(
- "libs/jquery/jstorage", "libs/jquery/jquery.autocomplete", "galaxy.autocom_tagging",
+ "libs/jquery/jstorage",
+ "libs/jquery/jquery.autocomplete", "galaxy.autocom_tagging",
+ "libs/json2",
"mvc/base-mvc", "mvc/ui"
)}
@@ -395,10 +397,10 @@
if( pageData.hdaId ){
self.location = "#" + pageData.hdaId;
}
-
- glx_history = new History( pageData.history ).loadDatasetsAsHistoryItems( pageData.hdas );
- glx_history_view = new HistoryView({ model: glx_history });
+ var glx_history = new History( pageData.history ).loadDatasetsAsHistoryItems( pageData.hdas ),
+ glx_history_view = new HistoryView({ model: glx_history });
glx_history_view.render();
+ window.glx_history = glx_history; window.glx_history_view = glx_history_view;
return;
@@ -452,4 +454,4 @@
${_('Galaxy History')}
</%def>
-<body class="historyPage"></body>
\ No newline at end of file
+<body class="historyPage"></body>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Categories method cleanup in the tool shed admin controller, and add the search feature to the Admin Repository grid in the tool shed.
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/c456d67423b6/
changeset: c456d67423b6
user: greg
date: 2012-10-08 17:02:16
summary: Categories method cleanup in the tool shed admin controller, and add the search feature to the Admin Repository grid in the tool shed.
affected #: 3 files
diff -r 04290e2842452d00b9a1e62a1fb54b8acfc0826f -r c456d67423b6988de9f9777d9758901845d02de8 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -18,8 +18,7 @@
log = logging.getLogger( __name__ )
class UserListGrid( grids.Grid ):
- # TODO: move this to an admin_common controller since it is virtually the same
- # in the galaxy webapp.
+ # TODO: move this to an admin_common controller since it is virtually the same in the galaxy webapp.
class UserLoginColumn( grids.TextColumn ):
def get_value( self, trans, grid, user ):
return user.email
@@ -61,7 +60,6 @@
return query
return query.filter( and_( model.Tool.table.c.user_id == model.User.table.c.id,
model.User.table.c.email == column_filter ) )
- # Grid definition
title = "Users"
model_class = model.User
template='/admin/user/grid.mako'
@@ -146,8 +144,6 @@
if role.users:
return len( role.users )
return 0
-
- # Grid definition
title = "Roles"
model_class = model.Role
template='/admin/dataset_security/role/grid.mako'
@@ -231,19 +227,14 @@
if group.members:
return len( group.members )
return 0
-
- # Grid definition
title = "Groups"
model_class = model.Group
template='/admin/dataset_security/group/grid.mako'
default_sort_key = "name"
columns = [
NameColumn( "Name",
- #key="name",
link=( lambda item: dict( operation="Manage users and roles", id=item.id ) ),
- attach_popup=True
- #filterable="advanced"
- ),
+ attach_popup=True ),
UsersColumn( "Users", attach_popup=False ),
RolesColumn( "Roles", attach_popup=False ),
StatusColumn( "Status", attach_popup=False ),
@@ -301,6 +292,30 @@
]
class AdminRepositoryListGrid( RepositoryListGrid ):
+ columns = [ RepositoryListGrid.NameColumn( "Name",
+ key="name",
+ link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
+ attach_popup=True ),
+ RepositoryListGrid.DescriptionColumn( "Synopsis",
+ key="description",
+ attach_popup=False ),
+ RepositoryListGrid.MetadataRevisionColumn( "Metadata Revisions" ),
+ RepositoryListGrid.UserColumn( "Owner",
+ model_class=model.User,
+ link=( lambda item: dict( operation="repositories_by_user", id=item.id ) ),
+ attach_popup=False,
+ key="User.username" ),
+ RepositoryListGrid.EmailAlertsColumn( "Alert", attach_popup=False ),
+ # Columns that are valid for filtering but are not visible.
+ grids.DeletedColumn( "Deleted",
+ key="deleted",
+ visible=False,
+ filterable="advanced" ) ]
+ columns.append( grids.MulticolFilterColumn( "Search repository name, description",
+ cols_to_filter=[ columns[0], columns[1] ],
+ key="free-text-search",
+ visible=False,
+ filterable="standard" ) )
operations = [ operation for operation in RepositoryListGrid.operations ]
operations.append( grids.GridOperation( "Delete",
allow_multiple=False,
@@ -507,41 +522,26 @@
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
+ name = util.restore_text( params.get( 'name', '' ) ).strip()
+ description = util.restore_text( params.get( 'description', '' ) ).strip()
if params.get( 'create_category_button', False ):
- name = util.restore_text( params.name )
- description = util.restore_text( params.description )
- error = False
if not name or not description:
message = 'Enter a valid name and a description'
- error = True
- elif trans.sa_session.query( trans.app.model.Category ) \
- .filter( trans.app.model.Category.table.c.name==name ) \
- .first():
+ status = 'error'
+ elif get_category_by_name( trans, name ):
message = 'A category with that name already exists'
- error = True
- if error:
- return trans.fill_template( '/webapps/community/category/create_category.mako',
- name=name,
- description=description,
- message=message,
- status='error' )
+ status = 'error'
else:
# Create the category
category = trans.app.model.Category( name=name, description=description )
trans.sa_session.add( category )
+ trans.sa_session.flush()
message = "Category '%s' has been created" % category.name
- trans.sa_session.flush()
+ status = 'done'
trans.response.send_redirect( web.url_for( controller='admin',
action='manage_categories',
- message=util.sanitize_text( message ),
- status='done' ) )
- trans.response.send_redirect( web.url_for( controller='admin',
- action='create_category',
- message=util.sanitize_text( message ),
- status='error' ) )
- else:
- name = ''
- description = ''
+ message=message,
+ status=status ) )
return trans.fill_template( '/webapps/community/category/create_category.mako',
name=name,
description=description,
@@ -623,8 +623,7 @@
if not new_name:
message = 'Enter a valid name'
status = 'error'
- elif category.name != new_name and \
- trans.sa_session.query( trans.app.model.Category ).filter( trans.app.model.Category.table.c.name==new_name ).first():
+ elif category.name != new_name and get_category_by_name( trans, name ):
message = 'A category with that name already exists'
status = 'error'
else:
@@ -633,10 +632,11 @@
trans.sa_session.add( category )
trans.sa_session.flush()
message = "The information has been saved for category '%s'" % ( category.name )
+ status = 'done'
return trans.response.send_redirect( web.url_for( controller='admin',
action='manage_categories',
- message=util.sanitize_text( message ),
- status='done' ) )
+ message=message,
+ status=status ) )
return trans.fill_template( '/webapps/community/category/edit_category.mako',
category=category,
message=message,
@@ -649,20 +649,31 @@
# What we've done is rendered the search box for the RepositoryListGrid on the grid.mako
# template for the CategoryListGrid. See ~/templates/webapps/community/category/grid.mako.
# Since we are searching repositories and not categories, redirect to browse_repositories().
- return self.browse_repositories( trans, **kwd )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='browse_repositories',
+ **kwd ) )
if 'operation' in kwd:
operation = kwd['operation'].lower()
if operation == "create":
- return self.create_category( trans, **kwd )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='create_category',
+ **kwd ) )
elif operation == "delete":
- return self.mark_category_deleted( trans, **kwd )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='mark_category_deleted',
+ **kwd ) )
elif operation == "undelete":
- return self.undelete_category( trans, **kwd )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='undelete_category',
+ **kwd ) )
elif operation == "purge":
- return self.purge_category( trans, **kwd )
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='purge_category',
+ **kwd ) )
elif operation == "edit":
- return self.edit_category( trans, **kwd )
- # Render the list view
+ return trans.response.send_redirect( web.url_for( controller='admin',
+ action='edit_category',
+ **kwd ) )
return self.manage_category_list_grid( trans, **kwd )
@web.expose
@web.require_admin
diff -r 04290e2842452d00b9a1e62a1fb54b8acfc0826f -r c456d67423b6988de9f9777d9758901845d02de8 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -76,8 +76,6 @@
viewable_repositories += 1
return viewable_repositories
return 0
-
- # Grid definition
title = "Categories of valid repositories"
model_class = model.Category
template='/webapps/community/category/valid_grid.mako'
@@ -146,15 +144,6 @@
if column_filter == "All":
return query
return query.filter( model.Category.name == column_filter )
- class DeletedColumn( grids.DeletedColumn ):
- def get_accepted_filters( self ):
- """ Returns a list of accepted filters for this column. """
- accepted_filter_labels_and_vals = { "Active" : "False", "Deactivated or uninstalled" : "True", "All": "All" }
- accepted_filters = []
- for label, val in accepted_filter_labels_and_vals.items():
- args = { self.key: val }
- accepted_filters.append( grids.GridColumnFilter( label, args) )
- return accepted_filters
class UserColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
if repository.user:
@@ -179,8 +168,7 @@
columns = [
NameColumn( "Name",
key="name",
- link=( lambda item: dict( operation="view_or_manage_repository",
- id=item.id ) ),
+ link=( lambda item: dict( operation="view_or_manage_repository", id=item.id ) ),
attach_popup=True ),
DescriptionColumn( "Synopsis",
key="description",
@@ -207,10 +195,10 @@
model_class=model.Category,
key="Category.name",
visible=False ),
- DeletedColumn( "Status",
- key="deleted",
- visible=False,
- filterable="advanced" )
+ grids.DeletedColumn( "Deleted",
+ key="deleted",
+ visible=False,
+ filterable="advanced" )
]
columns.append( grids.MulticolFilterColumn( "Search repository name, description",
cols_to_filter=[ columns[0], columns[1] ],
diff -r 04290e2842452d00b9a1e62a1fb54b8acfc0826f -r c456d67423b6988de9f9777d9758901845d02de8 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -55,6 +55,15 @@
class ToolShedColumn( grids.TextColumn ):
def get_value( self, trans, grid, tool_shed_repository ):
return tool_shed_repository.tool_shed
+ class DeletedColumn( grids.DeletedColumn ):
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = { "Active" : "False", "Deactivated or uninstalled" : "True", "All": "All" }
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.items():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
# Grid definition
title = "Installed tool shed repositories"
model_class = model.ToolShedRepository
@@ -74,10 +83,10 @@
filterable="advanced" ),
ToolShedColumn( "Tool shed" ),
# Columns that are valid for filtering but are not visible.
- grids.DeletedColumn( "Deleted",
- key="deleted",
- visible=False,
- filterable="advanced" )
+ DeletedColumn( "Status",
+ key="deleted",
+ visible=False,
+ filterable="advanced" )
]
columns.append( grids.MulticolFilterColumn( "Search repository name",
cols_to_filter=[ columns[0] ],
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Make the DeletedColumn in the Repositories grid a subclass so that the Advanced search labels clarify the status of the filtered repositories.
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/04290e284245/
changeset: 04290e284245
user: greg
date: 2012-10-08 16:30:43
summary: Make the DeletedColumn in the Repositories grid a subclass so that the Advanced search labels clarify the status of the filtered repositories.
affected #: 1 file
diff -r 685a17af92dfd6a2e3d7e3c9a3a4b119c78a6f96 -r 04290e2842452d00b9a1e62a1fb54b8acfc0826f lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -40,8 +40,6 @@
viewable_repositories += 1
return viewable_repositories
return 0
-
- # Grid definition
title = "Categories"
model_class = model.Category
template='/webapps/community/category/grid.mako'
@@ -148,6 +146,15 @@
if column_filter == "All":
return query
return query.filter( model.Category.name == column_filter )
+ class DeletedColumn( grids.DeletedColumn ):
+ def get_accepted_filters( self ):
+ """ Returns a list of accepted filters for this column. """
+ accepted_filter_labels_and_vals = { "Active" : "False", "Deactivated or uninstalled" : "True", "All": "All" }
+ accepted_filters = []
+ for label, val in accepted_filter_labels_and_vals.items():
+ args = { self.key: val }
+ accepted_filters.append( grids.GridColumnFilter( label, args) )
+ return accepted_filters
class UserColumn( grids.TextColumn ):
def get_value( self, trans, grid, repository ):
if repository.user:
@@ -200,10 +207,10 @@
model_class=model.Category,
key="Category.name",
visible=False ),
- grids.DeletedColumn( "Deleted",
- key="deleted",
- visible=False,
- filterable="advanced" )
+ DeletedColumn( "Status",
+ key="deleted",
+ visible=False,
+ filterable="advanced" )
]
columns.append( grids.MulticolFilterColumn( "Search repository name, description",
cols_to_filter=[ columns[0], columns[1] ],
@@ -2182,7 +2189,10 @@
cntrller = params.get( 'cntrller', 'repository' )
repository = get_repository( trans, id )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )
- metadata = repository_metadata.metadata
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ else:
+ metadata = None
if metadata and 'readme' in metadata:
readme_file = str( metadata[ 'readme' ] )
repo_files_dir = repository.repo_path
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: natefoo: Add logical operators to the compute tool whitelist.
by Bitbucket 08 Oct '12
by Bitbucket 08 Oct '12
08 Oct '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/685a17af92df/
changeset: 685a17af92df
user: natefoo
date: 2012-10-08 16:28:29
summary: Add logical operators to the compute tool whitelist.
affected #: 1 file
diff -r 26dfa56403e1011445854560b3e9818180b3c8c2 -r 685a17af92dfd6a2e3d7e3c9a3a4b119c78a6f96 tools/stats/column_maker.py
--- a/tools/stats/column_maker.py
+++ b/tools/stats/column_maker.py
@@ -46,9 +46,10 @@
for key, value in mapped_str.items():
expr = expr.replace( key, value )
+operators = 'is|not|or|and'
builtin_and_math_functions = 'abs|all|any|bin|chr|cmp|complex|divmod|float|hex|int|len|long|max|min|oct|ord|pow|range|reversed|round|sorted|str|sum|type|unichr|unicode|log|exp|sqrt|ceil|floor'
string_and_list_methods = [ name for name in dir('') + dir([]) if not name.startswith('_') ]
-whitelist = "^([c0-9\+\-\*\/\(\)\.\'\"><=,: ]|%s|%s)*$" % (builtin_and_math_functions, '|'.join(string_and_list_methods))
+whitelist = "^([c0-9\+\-\*\/\(\)\.\'\"><=,:! ]|%s|%s|%s)*$" % (operators, builtin_and_math_functions, '|'.join(string_and_list_methods))
if not re.compile(whitelist).match(expr):
stop_err("Invalid expression")
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0