1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/dc74a95853b6/
Changeset: dc74a95853b6
User: dan
Date: 2014-05-13 21:12:25
Summary: Allow tool_runner rerun to accept a job_id instead of a dataset_id.
Affected #: 1 file
diff -r f7de39159497fbcffb3f0ed785f26ecf56b40155 -r dc74a95853b6ebe05f811b78b9a6cfbe43211fc8 lib/galaxy/webapps/galaxy/controllers/tool_runner.py
--- a/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
+++ b/lib/galaxy/webapps/galaxy/controllers/tool_runner.py
@@ -106,33 +106,41 @@
**vars )
@web.expose
- def rerun( self, trans, id=None, from_noframe=None, **kwd ):
+ def rerun( self, trans, id=None, from_noframe=None, job_id=None, **kwd ):
"""
Given a HistoryDatasetAssociation id, find the job and that created
the dataset, extract the parameters, and display the appropriate tool
form with parameters already filled in.
"""
- if not id:
- error( "'id' parameter is required" );
- try:
- id = int( id )
-
- except:
- # it's not an un-encoded id, try to parse as encoded
+ if job_id:
try:
- id = trans.security.decode_id( id )
+ job_id = trans.security.decode_id( job_id )
+ job = trans.sa_session.query( trans.app.model.Job ).get( job_id )
except:
- error( "Invalid value for 'id' parameter" )
-
- # Get the dataset object
- data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
- #only allow rerunning if user is allowed access to the dataset.
- if not ( trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ) ):
- error( "You are not allowed to access this dataset" )
- # Get the associated job, if any.
- job = data.creating_job
- if not job:
- raise Exception("Failed to get job information for dataset hid %d" % data.hid)
+ error( "Invalid value for 'job_id' parameter" )
+ param_error_text = "Failed to get parameters for job id %d " % job_id
+ else:
+ if not id:
+ error( "'id' parameter is required" );
+ try:
+ id = int( id )
+ except:
+ # it's not an un-encoded id, try to parse as encoded
+ try:
+ id = trans.security.decode_id( id )
+ except:
+ error( "Invalid value for 'id' parameter" )
+
+ # Get the dataset object
+ data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id )
+ #only allow rerunning if user is allowed access to the dataset.
+ if not ( trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ) ):
+ error( "You are not allowed to access this dataset" )
+ # Get the associated job, if any.
+ job = data.creating_job
+ if not job:
+ raise Exception("Failed to get job information for dataset hid %d" % data.hid)
+ param_error_text = "Failed to get parameters for dataset id %d " % data.id
# Get the tool object
tool_id = job.tool_id
tool_version = job.tool_version
@@ -172,7 +180,7 @@
try:
params_objects = job.get_param_values( trans.app, ignore_errors = True )
except:
- raise Exception( "Failed to get parameters for dataset id %d " % data.id )
+ raise Exception( param_error_text )
upgrade_messages = tool.check_and_update_param_values( params_objects, trans, update_values=False )
# Need to remap dataset parameters. Job parameters point to original
# dataset used; parameter should be the analygous dataset in the
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f7de39159497/
Changeset: f7de39159497
User: dan
Date: 2014-05-13 20:34:57
Summary: More Data Manager UI tweaks.
Affected #: 1 file
diff -r 1c913c980053c4ce2dc6425cdf102298488242ec -r f7de39159497fbcffb3f0ed785f26ecf56b40155 templates/webapps/galaxy/data_manager/index.mako
--- a/templates/webapps/galaxy/data_manager/index.mako
+++ b/templates/webapps/galaxy/data_manager/index.mako
@@ -12,11 +12,11 @@
%if view_only:
<p>Not implemented</p>
%elif not data_managers.data_managers:
- ${render_msg( "You do not currently have any Data Managers installed.", "warning" ) }
+ ${ render_msg( 'You do not currently have any Data Managers installed. You can install some from a <a href="%s">ToolShed</a>.' % ( h.url_for( controller="admin_toolshed", action="browse_tool_sheds" ) ), "warning" ) }
%else:
- <p>Choose your data managing option from below.</p>
+ <p>Choose your data managing option from below. You may install additional Data Managers from a <a href="${ h.url_for( controller='admin_toolshed', action='browse_tool_sheds' ) }">ToolShed</a>.</p><ul>
- <li><strong>Run Data Manager Tools</strong>
+ <li><h3>Run Data Manager Tools</h3><div style="margin-left:1em"><ul>
%for data_manager_id, data_manager in sorted( data_managers.data_managers.iteritems(), key=lambda x:x[1].name ):
@@ -29,12 +29,12 @@
</div></li><p/>
- <li><strong>View Data Manager Jobs</strong>
+ <li><h3>View Data Manager Jobs</h3><div style="margin-left:1em"><ul>
%for data_manager_id, data_manager in sorted( data_managers.data_managers.iteritems(), key=lambda x:x[1].name ):
<li>
- <a href="${h.url_for( controller='data_manager', action='manage_data_manager', id=data_manager_id)}" target="galaxy_main"><strong>${ data_manager.name | h }</strong></a> - ${ data_manager.description | h }</a>
+ <a href="${ h.url_for( controller='data_manager', action='manage_data_manager', id=data_manager_id)}" target="galaxy_main"><strong>${ data_manager.name | h }</strong></a> - ${ data_manager.description | h }</a></li><p/>
%endfor
@@ -43,7 +43,7 @@
</li><p/><p/>
- <li><strong>View Tool Data Table Entries</strong>
+ <li><h3>View Tool Data Table Entries</h3><div style="margin-left:1em"><ul><% managed_table_names = data_managers.managed_data_tables.keys() %>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e5b545b85b83/
Changeset: e5b545b85b83
User: dan
Date: 2014-05-13 19:01:47
Summary: Add the ability for Admins to reload tool data table entries from disk without restarting the Galaxy server. This does not look for new <table> entries or contained new <file>s within XML files, but will e.g. make any changes to a .loc file become live immediately without restart.
Affected #: 3 files
diff -r c74989f7cafe0605af12af8dc2303689fa674bde -r e5b545b85b836b6f1be76164543d7e4613f25d87 lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -141,6 +141,18 @@
out.write( '</tables>\n' )
os.chmod( full_path, 0644 )
+ def reload_tables( self, table_names=None ):
+ tables = self.get_tables()
+ if not table_names:
+ table_names = tables.keys()
+ elif not isinstance( table_names, list ):
+ table_names = [ table_names ]
+ for table_name in table_names:
+ tables[ table_name ].reload_from_files()
+ log.debug( "Reloaded tool data table '%s' from files.", table_name )
+ return table_names
+
+
class ToolDataTable( object ):
@classmethod
@@ -160,9 +172,14 @@
# increment this variable any time a new entry is added, or when the table is totally reloaded
# This value has no external meaning, and does not represent an abstract version of the underlying data
self._loaded_content_version = 1
+ self._load_info = ( [ config_element, tool_data_path ], { 'from_shed_config':from_shed_config } )
+ self._merged_load_info = []
- def _update_version( self ):
- self._loaded_content_version += 1
+ def _update_version( self, version=None ):
+ if version is not None:
+ self._loaded_content_version = version
+ else:
+ self._loaded_content_version += 1
return self._loaded_content_version
def get_empty_field_by_name( self, name ):
@@ -187,6 +204,16 @@
def merge_tool_data_table( self, other_table, allow_duplicates=True, persist=False, persist_on_error=False, entry_source=None, **kwd ):
raise NotImplementedError( "Abstract method" )
+ def reload_from_files( self ):
+ new_version = self._update_version()
+ merged_info = self._merged_load_info
+ self.__init__( *self._load_info[0], **self._load_info[1] )
+ self._update_version( version=new_version )
+ for ( tool_data_table_class, load_info ) in merged_info:
+ self.merge_tool_data_table( tool_data_table_class( *load_info[0], **load_info[1] ), allow_duplicates=False )
+ return self._update_version()
+
+
class TabularToolDataTable( ToolDataTable ):
"""
Data stored in a tabular / separated value format on disk, allows multiple
@@ -278,6 +305,8 @@
for filename, info in other_table.filenames.iteritems():
if filename not in self.filenames:
self.filenames[ filename ] = info
+ #save info about table
+ self._merged_load_info.append( ( other_table.__class__, other_table._load_info ) )
#add data entries and return current data table version
return self.add_entries( other_table.data, allow_duplicates=allow_duplicates, persist=persist, persist_on_error=persist_on_error, entry_source=entry_source, **kwd )
diff -r c74989f7cafe0605af12af8dc2303689fa674bde -r e5b545b85b836b6f1be76164543d7e4613f25d87 lib/galaxy/webapps/galaxy/controllers/data_manager.py
--- a/lib/galaxy/webapps/galaxy/controllers/data_manager.py
+++ b/lib/galaxy/webapps/galaxy/controllers/data_manager.py
@@ -77,3 +77,33 @@
if data_table is None:
return trans.response.send_redirect( web.url_for( controller="data_manager", action="index", message="Invalid Data table (%s) was requested" % data_table_name, status="error" ) )
return trans.fill_template( "data_manager/manage_data_table.mako", data_table=data_table, view_only=not_is_admin, message=message, status=status )
+
+ @web.expose
+ @web.require_admin
+ def reload_tool_data_tables( self, trans, table_name=None, **kwd ):
+ if table_name and isinstance( table_name, basestring ):
+ table_name = table_name.split( "," )
+ # Reload the tool data tables
+ table_names = self.app.tool_data_tables.reload_tables( table_names=table_name )
+ redirect_url = None
+ if table_names:
+ status = 'done'
+ if len( table_names ) == 1:
+ message = "The data table '%s' has been reloaded." % table_names[0]
+ redirect_url = web.url_for( controller='data_manager',
+ action='manage_data_table',
+ table_name=table_names[0],
+ message=message,
+ status=status )
+ else:
+ message = "The data tables '%s' have been reloaded." % ', '.join( table_names )
+ else:
+ message = "No data tables have been reloaded."
+ status = 'error'
+ if redirect_url is None:
+ redirect_url = web.url_for( controller='admin',
+ action='view_tool_data_tables',
+ message=message,
+ status=status )
+ return trans.response.send_redirect( redirect_url )
+
diff -r c74989f7cafe0605af12af8dc2303689fa674bde -r e5b545b85b836b6f1be76164543d7e4613f25d87 templates/webapps/galaxy/data_manager/manage_data_table.mako
--- a/templates/webapps/galaxy/data_manager/manage_data_table.mako
+++ b/templates/webapps/galaxy/data_manager/manage_data_table.mako
@@ -15,6 +15,9 @@
<thead><tr><th colspan="${len (column_name_list) }" style="font-size: 120%;">
Data Manager: ${ data_table.name | h }
+ <a class="icon-btn" href="${ h.url_for( controller="data_manager", action="reload_tool_data_tables", table_name=data_table.name ) }" title="Reload ${data_table.name} tool data table" data-placement="bottom">
+ <span class="fa fa-refresh"></span>
+ </a></th></tr><tr>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c74989f7cafe/
Changeset: c74989f7cafe
User: dan
Date: 2014-05-13 18:35:35
Summary: When viewing tool data table registry in admin interface, provide link to manage data table interface for each tool data table.
Affected #: 1 file
diff -r a4bdf116d532cf18560ed2a4f1b94e84d7d9cc8b -r c74989f7cafe0605af12af8dc2303689fa674bde templates/admin/view_data_tables_registry.mako
--- a/templates/admin/view_data_tables_registry.mako
+++ b/templates/admin/view_data_tables_registry.mako
@@ -26,7 +26,7 @@
%else:
<tr class="tr">
%endif
- <td>${data_table.name}</td>
+ <td><a href="${ h.url_for( controller="data_manager", action="manage_data_table", table_name=data_table.name ) }">${data_table.name}</a></td>
%for i, ( filename, file_dict ) in enumerate( data_table.filenames.iteritems() ):
%if i > 0:
<tr><td></td>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/e13610643216/
Changeset: e13610643216
User: jmchilton
Date: 2014-05-13 17:20:06
Summary: Remove no longer valid log message.
Affected #: 1 file
diff -r 953baebbd9d9f7757691b73eee3bc954570b2bbd -r e13610643216c4d59b41722516e9d4aa9b4466b8 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -210,7 +210,6 @@
destination = job_wrapper.get_output_destination( output_paths[ dataset.dataset_id ] )
if in_directory( source_file, job_working_directory ):
output_pairs.append( ( source_file, destination ) )
- log.debug( "Copying %s to %s as directed by from_work_dir" % ( source_file, destination ) )
else:
# Security violation.
log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, job_wrapper.working_directory ) )
https://bitbucket.org/galaxy/galaxy-central/commits/a4bdf116d532/
Changeset: a4bdf116d532
User: jmchilton
Date: 2014-05-13 17:20:06
Summary: Optimize/correct LWR remote metadata generation for working directory outputs.
Previously I was attempting to mimic Galaxy's behavior of copying these files to a fixed output location after execution on the remote server before setting metadata. This was an un-needed copy since it appears to easier to just send the metadata generation code the expected path on the working directory.
Affected #: 1 file
diff -r e13610643216c4d59b41722516e9d4aa9b4466b8 -r a4bdf116d532cf18560ed2a4f1b94e84d7d9cc8b lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -172,7 +172,6 @@
job_wrapper.prepare( **prepare_kwds )
self.__prepare_input_files_locally(job_wrapper)
remote_metadata = LwrJobRunner.__remote_metadata( client )
- remote_work_dir_copy = LwrJobRunner.__remote_work_dir_copy( client )
dependency_resolution = LwrJobRunner.__dependency_resolution( client )
metadata_kwds = self.__build_metadata_configuration(client, job_wrapper, remote_metadata, remote_job_config)
remote_command_params = dict(
@@ -184,7 +183,7 @@
self,
job_wrapper=job_wrapper,
include_metadata=remote_metadata,
- include_work_dir_outputs=remote_work_dir_copy,
+ include_work_dir_outputs=False,
remote_command_params=remote_command_params,
)
except Exception:
@@ -358,13 +357,7 @@
self.client_manager.shutdown()
def __client_outputs( self, client, job_wrapper ):
- remote_work_dir_copy = LwrJobRunner.__remote_work_dir_copy( client )
- if not remote_work_dir_copy:
- work_dir_outputs = self.get_work_dir_outputs( job_wrapper )
- else:
- # They have already been copied over to look like regular outputs remotely,
- # no need to handle them differently here.
- work_dir_outputs = []
+ work_dir_outputs = self.get_work_dir_outputs( job_wrapper )
output_files = self.get_output_files( job_wrapper )
client_outputs = ClientOutputs(
working_directory=job_wrapper.working_directory,
@@ -400,16 +393,6 @@
return remote_metadata
@staticmethod
- def __remote_work_dir_copy( lwr_client ):
- # Right now remote metadata handling assumes from_work_dir outputs
- # have been copied over before it runs. So do that remotely. This is
- # not the default though because adding it to the command line is not
- # cross-platform (no cp on Windows) and it's un-needed work outside
- # the context of metadata settting (just as easy to download from
- # either place.)
- return LwrJobRunner.__remote_metadata( lwr_client )
-
- @staticmethod
def __use_remote_datatypes_conf( lwr_client ):
""" When setting remote metadata, use integrated datatypes from this
Galaxy instance or use the datatypes config configured via the remote
@@ -440,7 +423,21 @@
outputs_directory = remote_job_config['outputs_directory']
configs_directory = remote_job_config['configs_directory']
working_directory = remote_job_config['working_directory']
+ # For metadata calculation, we need to build a list of of output
+ # file objects with real path indicating location on Galaxy server
+ # and false path indicating location on compute server. Since the
+ # LWR disables from_work_dir copying as part of the job command
+ # line we need to take the list of output locations on the LWR
+ # server (produced by self.get_output_files(job_wrapper)) and for
+ # each work_dir output substitute the effective path on the LWR
+ # server relative to the remote working directory as the
+ # false_path to send the metadata command generation module.
+ work_dir_outputs = self.get_work_dir_outputs(job_wrapper, job_working_directory=working_directory)
outputs = [Bunch(false_path=os.path.join(outputs_directory, os.path.basename(path)), real_path=path) for path in self.get_output_files(job_wrapper)]
+ for output in outputs:
+ for lwr_workdir_path, real_path in work_dir_outputs:
+ if real_path == output.real_path:
+ output.false_path = lwr_workdir_path
metadata_kwds['output_fnames'] = outputs
metadata_kwds['compute_tmp_dir'] = working_directory
metadata_kwds['config_root'] = remote_galaxy_home
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/953baebbd9d9/
Changeset: 953baebbd9d9
User: jmchilton
Date: 2014-05-13 16:29:41
Summary: Ensure new_file_path exists before set_metadata.py generates metadata.
This directory is required to exist but usually unneeded because Galaxy creates this directory on startup. However, when setting metadata remotely with the LWR that remote Galaxy instance may have never been started and this directory may not exist.
Affected #: 2 files
diff -r f5f6cd1e938304094c9491da10824f93cbfb2d27 -r 953baebbd9d9f7757691b73eee3bc954570b2bbd lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -417,6 +417,16 @@
else:
return default
+ def ensure_tempdir( self ):
+ self._ensure_directory( self.new_file_path )
+
+ def _ensure_directory( self, path ):
+ if path not in [ None, False ] and not os.path.isdir( path ):
+ try:
+ os.makedirs( path )
+ except Exception, e:
+ raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
+
def check( self ):
paths_to_check = [ self.root, self.tool_path, self.tool_data_path, self.template_path ]
# Check that required directories exist
@@ -440,11 +450,7 @@
self.whoosh_index_dir, \
self.object_store_cache_path, \
os.path.join( self.tool_data_path, 'shared', 'jars' ):
- if path not in [ None, False ] and not os.path.isdir( path ):
- try:
- os.makedirs( path )
- except Exception, e:
- raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
+ self._ensure_directory( path )
# Check that required files exist
tool_configs = self.tool_configs
if self.migrated_tools_config not in tool_configs:
diff -r f5f6cd1e938304094c9491da10824f93cbfb2d27 -r 953baebbd9d9f7757691b73eee3bc954570b2bbd scripts/set_metadata.py
--- a/scripts/set_metadata.py
+++ b/scripts/set_metadata.py
@@ -66,6 +66,7 @@
log.debug("Did not load option %s from %s" % (option, config_file_name))
# config object is required by ObjectStore class so create it now
universe_config = config.Configuration(**conf_dict)
+ universe_config.ensure_tempdir()
object_store = build_object_store_from_config(universe_config)
galaxy.model.Dataset.object_store = object_store
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.