1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/f10bc351f2ca/
Changeset: f10bc351f2ca
User: inithello
Date: 2013-05-06 20:53:46
Summary: Limit when tests are explicitly removed from the tests dict.
Affected #: 1 file
diff -r 01cfd0c5af0fa5555697d14283404af2f5ef496c -r f10bc351f2ca97577a3a823dc4113f965164cbd5 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -704,8 +704,6 @@
os.environ[ 'GALAXY_TOOL_SHED_TEST_FILE' ] = galaxy_shed_tools_dict
os.environ[ 'GALAXY_TEST_HOST' ] = galaxy_test_host
os.environ[ 'GALAXY_TEST_PORT' ] = galaxy_test_port
- # Explicitly clear tests from twill's test environment.
- remove_generated_tests( app )
# Set the module-level variable 'toolbox', so that test.functional.test_toolbox will generate the appropriate test methods.
test_toolbox.toolbox = app.toolbox
# Generate the test methods for this installed repository. We need to pass in True here, or it will look
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/10a0ad2f0280/
Changeset: 10a0ad2f0280
User: dannon
Date: 2013-05-06 19:07:31
Summary: Remove set_metadata_externally from the universe sample.
Affected #: 1 file
diff -r b72a06cf0247ac266fb7b2fc204cef7fb4691cb0 -r 10a0ad2f0280cb044bde1e6424b7a7a93365d5ce universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -625,12 +625,6 @@
# running more than one Galaxy server using the same database.
#enable_job_recovery = True
-# Setting metadata on job outputs to in a separate process (or if using a
-# cluster, on the cluster). Thanks to Python's Global Interpreter Lock and the
-# hefty expense that setting metadata incurs, your Galaxy process may become
-# unresponsive when this operation occurs internally.
-#set_metadata_externally = False
-
# Although it is fairly reliable, setting metadata can occasionally fail. In
# these instances, you can choose to retry setting it internally or leave it in
# a failed state (since retrying internally may cause the Galaxy process to be
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
5 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/bb5771403508/
Changeset: bb5771403508
User: dannon
Date: 2013-05-05 17:54:28
Summary: Workflow controller: remove unused variables.
Affected #: 1 file
diff -r 02822f28dc93348264a18f9f30abfedb53b80728 -r bb5771403508fbd75829960f089e6f35267716cf lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -134,7 +134,7 @@
@web.require_login( "use Galaxy workflows" )
def list_grid( self, trans, **kwargs ):
""" List user's stored workflows. """
- status = message = None
+ # status = message = None
if 'operation' in kwargs:
operation = kwargs['operation'].lower()
if operation == "rename":
@@ -462,7 +462,7 @@
return trans.show_error_message( "The specified workflow does not exist." )
# Rate workflow.
- stored_rating = self.rate_item( trans.sa_session, trans.get_user(), stored, rating )
+ self.rate_item( trans.sa_session, trans.get_user(), stored, rating )
return self.get_ave_item_rating_data( trans.sa_session, stored )
@@ -507,8 +507,6 @@
@web.require_login( "use Galaxy workflows" )
def gen_image( self, trans, id ):
stored = self.get_stored_workflow( trans, id, check_ownership=True )
- session = trans.sa_session
-
trans.response.set_content_type("image/svg+xml")
return self._workflow_to_svg_canvas( trans, stored ).standalone_xml()
@@ -942,7 +940,6 @@
"""
# Load encoded workflow from database
- user = trans.get_user()
id = trans.security.decode_id( id )
trans.workflow_building_mode = True
stored = trans.sa_session.query( model.StoredWorkflow ).get( id )
@@ -1034,6 +1031,7 @@
trans.response.headers["Content-Disposition"] = 'attachment; filename="Galaxy-Workflow-%s.ga"' % ( sname )
trans.response.set_content_type( 'application/galaxy-archive' )
return stored_dict
+
@web.expose
def import_workflow( self, trans, cntrller='workflow', **kwd ):
"""
@@ -1097,7 +1095,7 @@
else:
uploaded_file = file_data.file
uploaded_file_name = uploaded_file.name
- uploaded_file_filename = file_data.filename
+ # uploaded_file_filename = file_data.filename
if os.path.getsize( os.path.abspath( uploaded_file_name ) ) > 0:
# We're reading the file as text so we can re-use the existing code below.
# This may not be ideal...
@@ -1198,6 +1196,7 @@
status=status,
use_panels=True,
myexperiment_target_url=myexperiment_target_url )
+
@web.json
def get_datatypes( self, trans ):
ext_to_class_name = dict()
@@ -1335,7 +1334,8 @@
# Build the state for each step
errors = {}
has_upgrade_messages = False
- has_errors = False
+ # has_errors is never used
+ # has_errors = False
saved_history = None
if history_id is not None:
saved_history = trans.get_history();
@@ -1498,7 +1498,8 @@
step.state = step.module.state
# Error dict
if step.tool_errors:
- has_errors = True
+ # has_errors is never used.
+ # has_errors = True
errors[step.id] = step.tool_errors
else:
## Non-tool specific stuff?
@@ -1553,7 +1554,8 @@
# Build the state for each step
errors = {}
has_upgrade_messages = False
- has_errors = False
+ # has_errors is never used
+ # has_errors = False
if kwargs:
# If kwargs were provided, the states for each step should have
# been POSTed
@@ -1591,7 +1593,8 @@
step.state = step.module.state
# Error dict
if step.tool_errors:
- has_errors = True
+ # has_errors is never used
+ # has_errors = True
errors[step.id] = step.tool_errors
else:
## Non-tool specific stuff?
@@ -1825,7 +1828,7 @@
# Unpack and add post-job actions.
post_job_actions = step_dict.get( 'post_job_actions', {} )
for name, pja_dict in post_job_actions.items():
- pja = model.PostJobAction( pja_dict[ 'action_type' ],
+ model.PostJobAction( pja_dict[ 'action_type' ],
step, pja_dict[ 'output_name' ],
pja_dict[ 'action_arguments' ] )
# Second pass to deal with connections between steps
@@ -2064,7 +2067,6 @@
but track the associations.
"""
associations = []
- names_to_clean = []
# dbkey is pushed in by the framework
if 'dbkey' in values:
del values['dbkey']
https://bitbucket.org/galaxy/galaxy-central/commits/e3b8df9f9ecd/
Changeset: e3b8df9f9ecd
User: dannon
Date: 2013-05-05 18:12:19
Summary: Workflow controller - trim trailing whitespace, add spacing between classes/methods.
Affected #: 1 file
diff -r bb5771403508fbd75829960f089e6f35267716cf -r e3b8df9f9ecd003a9b2c0441987ce1b79b6179a4 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -37,8 +37,8 @@
from galaxy.workflow.modules import module_factory, ToolModule
+class StoredWorkflowListGrid( grids.Grid ):
-class StoredWorkflowListGrid( grids.Grid ):
class StepsColumn( grids.GridColumn ):
def get_value(self, trans, grid, workflow):
return len( workflow.latest_workflow.steps )
@@ -73,6 +73,7 @@
def apply_query_filter( self, trans, query, **kwargs ):
return query.filter_by( user=trans.user, deleted=False )
+
class StoredWorkflowAllPublishedGrid( grids.Grid ):
title = "Published Workflows"
model_class = model.StoredWorkflow
@@ -94,13 +95,16 @@
key="free-text-search", visible=False, filterable="standard" )
)
operations = []
+
def build_initial_query( self, trans, **kwargs ):
# Join so that searching stored_workflow.user makes sense.
return trans.sa_session.query( self.model_class ).join( model.User.table )
+
def apply_query_filter( self, trans, query, **kwargs ):
# A public workflow is published, has a slug, and is not deleted.
return query.filter( self.model_class.published==True ).filter( self.model_class.slug != None ).filter( self.model_class.deleted == False )
+
# Simple SGML parser to get all content in a single tag.
class SingleTagContentsParser( sgmllib.SGMLParser ):
@@ -119,6 +123,7 @@
if self.cur_tag == self.target_tag:
self.tag_content += text
+
class WorkflowController( BaseUIController, SharableMixin, UsesStoredWorkflowMixin, UsesAnnotations, UsesItemRatings ):
stored_list_grid = StoredWorkflowListGrid()
published_list_grid = StoredWorkflowAllPublishedGrid()
@@ -847,7 +852,7 @@
for input_name, conns in step.temp_input_connections.iteritems():
if conns:
conn_dicts = conns if isinstance(conns,list) else [conns]
- for conn_dict in conn_dicts:
+ for conn_dict in conn_dicts:
conn = model.WorkflowStepConnection()
conn.input_step = step
conn.input_name = input_name
@@ -883,6 +888,7 @@
"""
stored = self.get_stored_workflow( trans, id, check_ownership=False, check_accessible=True )
return trans.fill_template( "/workflow/export.mako", item=stored, use_panels=True )
+
@web.expose
@web.require_login( "use workflows" )
def import_from_myexp( self, trans, myexp_id, **kwd ):
@@ -932,6 +938,7 @@
return trans.show_warn_message( "Imported, but this workflow contains cycles. %s" % workflow_list_str )
else:
return trans.show_message( "Workflow '%s' imported. %s" % (workflow.name, workflow_list_str) )
+
@web.expose
@web.require_login( "use workflows" )
def export_to_myexp( self, trans, id, myexp_username, myexp_password ):
@@ -1767,7 +1774,7 @@
# tools. This should be removed at some point. Mirrored
# hack in _workflow_from_dict should never be removed so
# existing workflow exports continue to function.
- for input_name, input_conn in dict(input_conn_dict).iteritems():
+ for input_name, input_conn in dict(input_conn_dict).iteritems():
if len(input_conn) == 1:
input_conn_dict[input_name] = input_conn[0]
step_dict['input_connections'] = input_conn_dict
@@ -1776,6 +1783,7 @@
# Add to return value
data['steps'][step.order_index] = step_dict
return data
+
def _workflow_from_dict( self, trans, data, source=None, add_to_menu=False ):
"""
Creates a workflow from a dict. Created workflow is stored in the database and returned.
@@ -1869,16 +1877,13 @@
return stored, missing_tool_tups
def _workflow_to_svg_canvas( self, trans, stored ):
-
workflow = stored.latest_workflow
data = []
-
canvas = svgfig.canvas(style="stroke:black; fill:none; stroke-width:1px; stroke-linejoin:round; text-anchor:left")
text = svgfig.SVG("g")
connectors = svgfig.SVG("g")
boxes = svgfig.SVG("g")
svgfig.Text.defaults["font-size"] = "10px"
-
in_pos = {}
out_pos = {}
margin = 5
@@ -2021,6 +2026,7 @@
except CycleError:
return None
+
class FakeJob( object ):
"""
Fake job object for datasets that have no creating_job_associations,
https://bitbucket.org/galaxy/galaxy-central/commits/16fbd8c387a0/
Changeset: 16fbd8c387a0
User: dannon
Date: 2013-05-05 19:57:21
Summary: Add docstring comment for workflow controller 'imp'
Affected #: 1 file
diff -r e3b8df9f9ecd003a9b2c0441987ce1b79b6179a4 -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 lib/galaxy/webapps/galaxy/controllers/workflow.py
--- a/lib/galaxy/webapps/galaxy/controllers/workflow.py
+++ b/lib/galaxy/webapps/galaxy/controllers/workflow.py
@@ -356,6 +356,7 @@
@web.expose
@web.require_login( "to import a workflow", use_panels=True )
def imp( self, trans, id, **kwargs ):
+ """Imports a workflow shared by other users."""
# Set referer message.
referer = trans.request.referer
if referer is not "":
https://bitbucket.org/galaxy/galaxy-central/commits/c01b7cf7257a/
Changeset: c01b7cf7257a
User: dannon
Date: 2013-05-06 18:50:06
Summary: Metadata setting: Always set externally.
Affected #: 16 files
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -86,7 +86,6 @@
self.galaxy_data_manager_data_path = kwargs.get( 'galaxy_data_manager_data_path', self.tool_data_path )
self.tool_secret = kwargs.get( "tool_secret", "" )
self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
- self.set_metadata_externally = string_as_bool( kwargs.get( "set_metadata_externally", "False" ) )
self.retry_metadata_internally = string_as_bool( kwargs.get( "retry_metadata_internally", "True" ) )
self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) )
self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -28,8 +28,6 @@
STATEMENTS = "__galaxy_statements__" #this is the name of the property in a Datatype class where new metadata spec element Statements are stored
-DATABASE_CONNECTION_AVAILABLE = False #When False, certain metadata parameter types (see FileParameter) will behave differently
-
class Statement( object ):
"""
This class inserts its target into a list in the surrounding
@@ -442,19 +440,9 @@
return None
if isinstance( value, galaxy.model.MetadataFile ) or isinstance( value, MetadataTempFile ):
return value
- if DATABASE_CONNECTION_AVAILABLE:
- try:
- # FIXME: this query requires a monkey patch in assignmapper.py since
- # MetadataParameters do not have a handle to the sqlalchemy session
- # DBTODO this is problematic now.
- return galaxy.model.MetadataFile.get( value )
- except:
- #value was not a valid id
- return None
- else:
- mf = galaxy.model.MetadataFile()
- mf.id = value #we assume this is a valid id, since we cannot check it
- return mf
+ mf = galaxy.model.MetadataFile()
+ mf.id = value #we assume this is a valid id, since we cannot check it
+ return mf
def make_copy( self, value, target_context, source_context ):
value = self.wrap( value )
@@ -499,13 +487,13 @@
return value
def new_file( self, dataset = None, **kwds ):
- if DATABASE_CONNECTION_AVAILABLE:
+ if object_session( dataset ):
mf = galaxy.model.MetadataFile( name = self.spec.name, dataset = dataset, **kwds )
object_session( dataset ).add( mf )
object_session( dataset ).flush() #flush to assign id
return mf
else:
- #we need to make a tmp file that is accessable to the head node,
+ #we need to make a tmp file that is accessable to the head node,
#we will be copying its contents into the MetadataFile objects filename after restoring from JSON
#we do not include 'dataset' in the kwds passed, as from_JSON_value() will handle this for us
return MetadataTempFile( **kwds )
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/datatypes/registry.py
--- a/lib/galaxy/datatypes/registry.py
+++ b/lib/galaxy/datatypes/registry.py
@@ -397,7 +397,7 @@
except KeyError:
builder = data.Text()
return builder
- def change_datatype(self, data, ext, set_meta = True ):
+ def change_datatype(self, data, ext):
data.extension = ext
# call init_meta and copy metadata from itself. The datatype
# being converted *to* will handle any metadata copying and
@@ -405,10 +405,6 @@
if data.has_data():
data.set_size()
data.init_meta( copy_from=data )
- if set_meta:
- #metadata is being set internally
- data.set_meta( overwrite = False )
- data.set_peek()
return data
def old_change_datatype(self, data, ext):
"""Creates and returns a new datatype based on an existing data and an extension"""
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -953,9 +953,7 @@
#either use the metadata from originating output dataset, or call set_meta on the copies
#it would be quicker to just copy the metadata from the originating output dataset,
#but somewhat trickier (need to recurse up the copied_from tree), for now we'll call set_meta()
- if not self.app.config.set_metadata_externally or \
- ( not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) \
- and self.app.config.retry_metadata_internally ):
+ if ( not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and self.app.config.retry_metadata_internally ):
dataset.datatype.set_meta( dataset, overwrite = False ) #call datatype.set_meta directly for the initial set_meta call during dataset creation
elif not self.external_output_metadata.external_metadata_set_successfully( dataset, self.sa_session ) and job.states.ERROR != final_job_state:
dataset._state = model.Dataset.states.FAILED_METADATA
@@ -1226,8 +1224,7 @@
try:
for fname in self.extra_filenames:
os.remove( fname )
- if self.app.config.set_metadata_externally:
- self.external_output_metadata.cleanup_external_metadata( self.sa_session )
+ self.external_output_metadata.cleanup_external_metadata( self.sa_session )
galaxy.tools.imp_exp.JobExportHistoryArchiveWrapper( self.job_id ).cleanup_after_job( self.sa_session )
galaxy.tools.imp_exp.JobImportHistoryArchiveWrapper( self.app, self.job_id ).cleanup_after_job()
galaxy.tools.genome_index.GenomeIndexToolWrapper( self.job_id ).postprocessing( self.sa_session, self.app )
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/deferred/data_transfer.py
--- a/lib/galaxy/jobs/deferred/data_transfer.py
+++ b/lib/galaxy/jobs/deferred/data_transfer.py
@@ -136,15 +136,11 @@
if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
if spec.get( 'default' ):
setattr( ldda.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
- if self.app.config.set_metadata_externally:
- self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( self.app.datatypes_registry.set_external_metadata_tool,
- FakeTrans( self.app,
- history=sample.history,
- user=sample.request.user ),
- incoming = { 'input1':ldda } )
- else:
- ldda.set_meta()
- ldda.datatype.after_setting_metadata( ldda )
+ self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( self.app.datatypes_registry.set_external_metadata_tool,
+ FakeTrans( self.app,
+ history=sample.history,
+ user=sample.request.user ),
+ incoming = { 'input1':ldda } )
ldda.state = ldda.states.OK
# TODO: not sure if this flush is necessary
self.sa_session.add( ldda )
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -174,7 +174,7 @@
# Append metadata setting commands, we don't want to overwrite metadata
# that was copied over in init_meta(), as per established behavior
- if include_metadata and self.app.config.set_metadata_externally:
+ if include_metadata:
commands += "; cd %s; " % os.path.abspath( os.getcwd() )
commands += job_wrapper.setup_external_metadata(
exec_dir = os.path.abspath( os.getcwd() ),
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/runners/local.py
--- a/lib/galaxy/jobs/runners/local.py
+++ b/lib/galaxy/jobs/runners/local.py
@@ -83,14 +83,14 @@
stdout_file.close()
stderr_file.close()
log.debug('execution finished: %s' % command_line)
- except Exception, exc:
+ except Exception:
job_wrapper.fail( "failure running job", exception=True )
log.exception("failure running job %d" % job_wrapper.job_id)
return
#run the metadata setting script here
#this is terminate-able when output dataset/job is deleted
#so that long running set_meta()s can be canceled without having to reboot the server
- if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally and job_wrapper.output_paths:
+ if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and job_wrapper.output_paths:
external_metadata_script = job_wrapper.setup_external_metadata( output_fnames = job_wrapper.get_output_fnames(),
set_extension = True,
tmp_dir = job_wrapper.working_directory,
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -143,7 +143,7 @@
#run the metadata setting script here
#this is terminate-able when output dataset/job is deleted
#so that long running set_meta()s can be canceled without having to reboot the server
- if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally and job_wrapper.output_paths:
+ if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and job_wrapper.output_paths:
external_metadata_script = job_wrapper.setup_external_metadata( output_fnames = job_wrapper.get_output_fnames(),
set_extension = True,
kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/jobs/runners/tasks.py
--- a/lib/galaxy/jobs/runners/tasks.py
+++ b/lib/galaxy/jobs/runners/tasks.py
@@ -123,7 +123,7 @@
#run the metadata setting script here
#this is terminate-able when output dataset/job is deleted
#so that long running set_meta()s can be canceled without having to reboot the server
- if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and self.app.config.set_metadata_externally and job_wrapper.output_paths:
+ if job_wrapper.get_state() not in [ model.Job.states.ERROR, model.Job.states.DELETED ] and job_wrapper.output_paths:
external_metadata_script = job_wrapper.setup_external_metadata( output_fnames = job_wrapper.get_output_fnames(),
set_extension = True,
kwds = { 'overwrite' : False } ) #we don't want to overwrite metadata that was copied over in init_meta(), as per established behavior
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/tools/imp_exp/__init__.py
--- a/lib/galaxy/tools/imp_exp/__init__.py
+++ b/lib/galaxy/tools/imp_exp/__init__.py
@@ -189,15 +189,10 @@
# Although metadata is set above, need to set metadata to recover BAI for BAMs.
if hda.extension == 'bam':
- if self.app.config.set_metadata_externally:
- self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute_via_app(
- self.app.datatypes_registry.set_external_metadata_tool, self.app, jiha.job.session_id,
- new_history.id, jiha.job.user, incoming={ 'input1': hda }, overwrite=False
- )
- else:
- message = 'Attributes updated'
- hda.set_meta()
- hda.datatype.after_setting_metadata( hda )
+ self.app.datatypes_registry.set_external_metadata_tool.tool_action.execute_via_app(
+ self.app.datatypes_registry.set_external_metadata_tool, self.app, jiha.job.session_id,
+ new_history.id, jiha.job.user, incoming={ 'input1': hda }, overwrite=False
+ )
#
# Create jobs.
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/webapps/galaxy/api/tools.py
--- a/lib/galaxy/webapps/galaxy/api/tools.py
+++ b/lib/galaxy/webapps/galaxy/api/tools.py
@@ -365,15 +365,9 @@
# Set metadata.
# TODO: set meta internally if dataset is small enough?
- if trans.app.config.set_metadata_externally:
- trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool,
- trans, incoming = { 'input1':new_dataset },
- overwrite=False, job_params={ "source" : "trackster" } )
- else:
- message = 'Attributes updated'
- new_dataset.set_meta()
- new_dataset.datatype.after_setting_metadata( new_dataset )
-
+ trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool,
+ trans, incoming = { 'input1':new_dataset },
+ overwrite=False, job_params={ "source" : "trackster" } )
# Add HDA subset association.
subset_association = trans.app.model.HistoryDatasetAssociationSubset( hda=input_dataset, subset=new_dataset, location=regions_str )
trans.sa_session.add( subset_association )
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/webapps/galaxy/controllers/dataset.py
--- a/lib/galaxy/webapps/galaxy/controllers/dataset.py
+++ b/lib/galaxy/webapps/galaxy/controllers/dataset.py
@@ -379,10 +379,9 @@
message = "This dataset is currently being used as input or output. You cannot change datatype until the jobs have completed or you have canceled them."
error = True
else:
- trans.app.datatypes_registry.change_datatype( data, params.datatype, set_meta = not trans.app.config.set_metadata_externally )
+ trans.app.datatypes_registry.change_datatype( data, params.datatype )
trans.sa_session.flush()
- if trans.app.config.set_metadata_externally:
- trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior
+ trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data }, overwrite = False ) #overwrite is False as per existing behavior
message = "Changed the type of dataset '%s' to %s" % ( to_unicode( data.name ), params.datatype )
refresh_frames=['history']
else:
@@ -436,13 +435,8 @@
if name not in [ 'name', 'info', 'dbkey', 'base_name' ]:
if spec.get( 'default' ):
setattr( data.metadata, name, spec.unwrap( spec.get( 'default' ) ) )
- if trans.app.config.set_metadata_externally:
- message = 'Attributes have been queued to be updated'
- trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
- else:
- message = 'Attributes updated'
- data.set_meta()
- data.datatype.after_setting_metadata( data )
+ message = 'Attributes have been queued to be updated'
+ trans.app.datatypes_registry.set_external_metadata_tool.tool_action.execute( trans.app.datatypes_registry.set_external_metadata_tool, trans, incoming = { 'input1':data } )
trans.sa_session.flush()
refresh_frames=['history']
elif params.convert_data:
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a lib/galaxy/webapps/galaxy/controllers/user.py
--- a/lib/galaxy/webapps/galaxy/controllers/user.py
+++ b/lib/galaxy/webapps/galaxy/controllers/user.py
@@ -1339,9 +1339,6 @@
continue
# Get chrom count file.
- # NOTE: this conversion doesn't work well with set_metadata_externally=False
- # because the conversion occurs before metadata can be set; the
- # dataset is marked as deleted and a subsequent conversion is run.
chrom_count_dataset = len_dataset.get_converted_dataset( trans, "linecount" )
if not chrom_count_dataset or chrom_count_dataset.state != trans.app.model.Job.states.OK:
# No valid linecount dataset.
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a scripts/extract_dataset_part.py
--- a/scripts/extract_dataset_part.py
+++ b/scripts/extract_dataset_part.py
@@ -24,7 +24,6 @@
# This junk is here to prevent loading errors
import galaxy.model.mapping #need to load this before we unpickle, in order to setup properties assigned by the mappers
galaxy.model.Job() #this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here
-galaxy.datatypes.metadata.DATABASE_CONNECTION_AVAILABLE = False #Let metadata know that there is no database connection, and to just assume object ids are valid
def __main__():
"""
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a scripts/functional_tests.py
--- a/scripts/functional_tests.py
+++ b/scripts/functional_tests.py
@@ -185,7 +185,6 @@
cluster_files_directory = cluster_files_directory,
job_working_directory = job_working_directory,
outputs_to_working_directory = 'True',
- set_metadata_externally = 'True',
static_enabled = 'False',
debug = 'False',
track_jobs_in_database = 'True',
diff -r 16fbd8c387a0cb7eb7607b6d37c6ea26fa9c6d50 -r c01b7cf7257af79616730a73886711b6ecf0310a scripts/set_metadata.py
--- a/scripts/set_metadata.py
+++ b/scripts/set_metadata.py
@@ -28,7 +28,6 @@
import simplejson
import galaxy.model.mapping #need to load this before we unpickle, in order to setup properties assigned by the mappers
galaxy.model.Job() #this looks REAL stupid, but it is REQUIRED in order for SA to insert parameters into the classes defined by the mappers --> it appears that instantiating ANY mapper'ed class would suffice here
-galaxy.datatypes.metadata.DATABASE_CONNECTION_AVAILABLE = False #Let metadata know that there is no database connection, and to just assume object ids are valid
from galaxy.util import stringify_dictionary_keys
from galaxy.util.json import from_json_string
from sqlalchemy.orm import clear_mappers
https://bitbucket.org/galaxy/galaxy-central/commits/b72a06cf0247/
Changeset: b72a06cf0247
User: dannon
Date: 2013-05-06 18:53:32
Summary: Merge
Affected #: 4 files
Diff not available.
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/02822f28dc93/
Changeset: 02822f28dc93
User: dannon
Date: 2013-05-05 17:05:50
Summary: Temporarily disable database_connection in metadata.py until a better solution is found
Affected #: 1 file
diff -r c823b1404fea2350d9c0b2d0ebc4965d7f9ffcb8 -r 02822f28dc93348264a18f9f30abfedb53b80728 lib/galaxy/datatypes/metadata.py
--- a/lib/galaxy/datatypes/metadata.py
+++ b/lib/galaxy/datatypes/metadata.py
@@ -28,7 +28,7 @@
STATEMENTS = "__galaxy_statements__" #this is the name of the property in a Datatype class where new metadata spec element Statements are stored
-DATABASE_CONNECTION_AVAILABLE = True #When False, certain metadata parameter types (see FileParameter) will behave differently
+DATABASE_CONNECTION_AVAILABLE = False #When False, certain metadata parameter types (see FileParameter) will behave differently
class Statement( object ):
"""
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c823b1404fea/
Changeset: c823b1404fea
User: inithello
Date: 2013-05-04 15:07:50
Summary: Remove debug filtering.
Affected #: 1 file
diff -r d207aa915b9d89167858439df97bf385669b4a2d -r c823b1404fea2350d9c0b2d0ebc4965d7f9ffcb8 test/install_and_test_tool_shed_repositories/functional_tests.py
--- a/test/install_and_test_tool_shed_repositories/functional_tests.py
+++ b/test/install_and_test_tool_shed_repositories/functional_tests.py
@@ -572,14 +572,7 @@
# it will record the result of the tests, and if any failed, the traceback and captured output of the tool that was run.
# After all tests have completed, the repository is uninstalled, so that the previous test cases don't interfere with
# the next repository's functional tests.
- keep_skipping = True
for repository_info_dict in detailed_repository_list:
- if repository_info_dict[ 'name' ] == 'simsearch' and repository_info_dict[ 'changeset_revision' ] == '430f55978191':
- keep_skipping = False
- if keep_skipping:
- continue
- if repository_info_dict[ 'name' ] == 'sortmerna':
- continue
"""
Each repository_info_dict looks something like:
{
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/d207aa915b9d/
Changeset: d207aa915b9d
User: jgoecks
Date: 2013-05-04 00:02:57
Summary: When deleting a history and marking associated jobs deleted, first check that there is a job to mark.
Affected #: 1 file
diff -r e7feb8e93b6e862b8b450dc5a9636297cea28516 -r d207aa915b9d89167858439df97bf385669b4a2d lib/galaxy/webapps/galaxy/controllers/history.py
--- a/lib/galaxy/webapps/galaxy/controllers/history.py
+++ b/lib/galaxy/webapps/galaxy/controllers/history.py
@@ -563,11 +563,14 @@
self.sa_session.add( history )
self.sa_session.flush()
for hda in history.datasets:
- job = hda.creating_job_associations[0].job
- if job.history_id == history.id and job.state in [ trans.app.model.Job.states.QUEUED, trans.app.model.Job.states.RUNNING, trans.app.model.Job.states.NEW ]:
- # No need to check other outputs since the job's parent history is this history
- job.mark_deleted( trans.app.config.track_jobs_in_database )
- trans.app.job_manager.job_stop_queue.put( job.id )
+ # Not all datasets have jobs associated with them (e.g., datasets imported from libraries).
+ if hda.creating_job_associations:
+ # HDA has associated job, so try marking it deleted.
+ job = hda.creating_job_associations[0].job
+ if job.history_id == history.id and job.state in [ trans.app.model.Job.states.QUEUED, trans.app.model.Job.states.RUNNING, trans.app.model.Job.states.NEW ]:
+ # No need to check other outputs since the job's parent history is this history
+ job.mark_deleted( trans.app.config.track_jobs_in_database )
+ trans.app.job_manager.job_stop_queue.put( job.id )
# Regardless of whether it was previously deleted, we make a new history active
trans.new_history()
return trans.show_ok_message( "History deleted, a new history is active", refresh_frames=['history'] )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b5ea0e100bda/
Changeset: b5ea0e100bda
User: inithello
Date: 2013-05-03 21:55:56
Summary: Rename install_repository_tools to install_tool_shed_repositories. Use shed_util_common.url_join to generate the API url.
Affected #: 1 file
diff -r 75e7383c5c794e02503dc03e0d5daf3694323b7d -r b5ea0e100bdaf47d53d11fa2beb39f4218fa8136 scripts/api/install_tool_shed_repositories.py
--- /dev/null
+++ b/scripts/api/install_tool_shed_repositories.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+"""
+Install a specified repository revision from a specified tool shed into Galaxy. This example demonstrates installation of a repository that contains
+valid tools, loading them into a section of the Galaxy tool panel or creating a new tool panel section.
+You can choose if tool dependencies or repository dependencies should be installed, use --repository-deps or --tool-deps.
+
+This example requires a tool panel config file (e.g., tool_conf.xml, shed_tool_conf.xml, etc) to contain a tool panel section like the following:
+
+<section id="from_test_tool_shed" name="From Test Tool Shed" version="">
+</section>
+
+Here is a working example of how to use this script to install a repository from the test tool shed.
+./install_repository_tools.py --api <api key> --local <galaxy base url> --url http://testtoolshed.g2.bx.psu.edu --name gregs_filter --owner greg --revision f28d5018f9cb --tool-deps
+"""
+
+import os
+import sys
+import argparse
+sys.path.insert( 0, os.path.dirname( __file__ ) )
+from common import submit
+
+def main( options ):
+ """Collect all user data and install the tools via the Galaxy API."""
+ data = {}
+ data[ 'tool_shed_url' ] = options.tool_shed_url
+ data[ 'name' ] = options.name
+ data[ 'owner' ] = options.owner
+ data[ 'changeset_revision' ] = options.changeset_revision
+ if options.tool_panel_section_id:
+ data[ 'tool_panel_section_id' ] = options.tool_panel_section_id
+ elif options.new_tool_panel_section_label:
+ data[ 'new_tool_panel_section_label' ] = options.new_tool_panel_section_label
+ if options.install_repository_dependencies:
+ data[ 'install_repository_dependencies' ] = options.install_repository_dependencies
+ if options.install_tool_dependencies:
+ data[ 'install_tool_dependencies' ] = options.install_tool_dependencies
+ submit( options.api, '%s%s' % ( options.local_url.strip('/'), '/api/tool_shed_repositories/new/install_repository_revision' ), data )
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser( description='Installation of tool shed repositories via the Galaxy API.' )
+ parser.add_argument( "-u", "--url", dest="tool_shed_url", required=True, help="Tool Shed URL" )
+ parser.add_argument( "-a", "--api", dest="api", required=True, help="API Key" )
+ parser.add_argument( "-l", "--local", dest="local_url", required=True, help="URL of the galaxy instance." )
+ parser.add_argument( "-n", "--name", required=True, help="Repository name." )
+ parser.add_argument( "-o", "--owner", required=True, help="Repository owner." )
+ parser.add_argument( "-r", "--revision", dest="changeset_revision", required=True, help="Repository owner." )
+ parser.add_argument( "--panel-section-id", dest="tool_panel_section_id", help="Tool panel section id if you want to add your repository to an existing tool section." )
+ parser.add_argument( "--panel-section-name", dest="new_tool_panel_section_label", help="New tool panel section label. If specified a new tool section will be created." )
+ parser.add_argument( "--repository-deps", dest="install_repository_dependencies", action="store_true", default=False, help="Install repository dependencies. [False]")
+ parser.add_argument( "--tool-deps", dest="install_tool_dependencies", action="store_true", default=False, help="Install tool dependencies. [False]" )
+ options = parser.parse_args()
+ main( options )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.