galaxy-commits
Threads by month
- ----- 2025 -----
- June
- May
- April
- March
- February
- January
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
- 15302 discussions
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7980b6390e48/
changeset: 7980b6390e48
user: afgane
date: 2012-08-14 02:04:25
summary: Add the ability for Galaxy's ObjectStore to use OpenStack's SWIFT object store as the backend data storage
affected #: 3 files
diff -r be81990d148a81bc2896c3c543d2ff70ad9c3c67 -r 7980b6390e48db3bb0ae2f385d259a994e64b3f0 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -169,10 +169,14 @@
if self.nginx_upload_store:
self.nginx_upload_store = os.path.abspath( self.nginx_upload_store )
self.object_store = kwargs.get( 'object_store', 'disk' )
- self.aws_access_key = kwargs.get( 'aws_access_key', None )
- self.aws_secret_key = kwargs.get( 'aws_secret_key', None )
- self.s3_bucket = kwargs.get( 's3_bucket', None)
- self.use_reduced_redundancy = kwargs.get( 'use_reduced_redundancy', False )
+ self.os_access_key = kwargs.get( 'os_access_key', None )
+ self.os_secret_key = kwargs.get( 'os_secret_key', None )
+ self.os_bucket_name = kwargs.get( 'os_bucket_name', None )
+ self.os_host = kwargs.get( 'os_host', None )
+ self.os_port = kwargs.get( 'os_port', None )
+ self.os_is_secure = string_as_bool( kwargs.get( 'os_is_secure', True ) )
+ self.os_conn_path = kwargs.get( 'os_conn_path', '/' )
+ self.os_use_reduced_redundancy = kwargs.get( 'os_use_reduced_redundancy', False )
self.object_store_cache_size = float(kwargs.get( 'object_store_cache_size', -1 ))
self.distributed_object_store_config_file = kwargs.get( 'distributed_object_store_config_file', None )
# Parse global_conf and save the parser
diff -r be81990d148a81bc2896c3c543d2ff70ad9c3c67 -r 7980b6390e48db3bb0ae2f385d259a994e64b3f0 lib/galaxy/objectstore/__init__.py
--- a/lib/galaxy/objectstore/__init__.py
+++ b/lib/galaxy/objectstore/__init__.py
@@ -25,6 +25,7 @@
if sys.version_info >= (2, 6):
import multiprocessing
from galaxy.objectstore.s3_multipart_upload import multipart_upload
+ import boto
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from boto.exception import S3ResponseError
@@ -377,9 +378,9 @@
super(S3ObjectStore, self).__init__()
self.config = config
self.staging_path = self.config.file_path
- self.s3_conn = S3Connection()
- self.bucket = self._get_bucket(self.config.s3_bucket)
- self.use_rr = self.config.use_reduced_redundancy
+ self.s3_conn = get_OS_connection(self.config)
+ self.bucket = self._get_bucket(self.config.os_bucket_name)
+ self.use_rr = self.config.os_use_reduced_redundancy
self.cache_size = self.config.object_store_cache_size
self.transfer_progress = 0
# Clean cache only if value is set in universe_wsgi.ini
@@ -468,7 +469,7 @@
for i in range(5):
try:
bucket = self.s3_conn.get_bucket(bucket_name)
- log.debug("Using S3 object store; got bucket '%s'" % bucket.name)
+ log.debug("Using cloud object store with bucket '%s'" % bucket.name)
return bucket
except S3ResponseError:
log.debug("Could not get bucket '%s', attempt %s/5" % (bucket_name, i+1))
@@ -843,7 +844,6 @@
def get_store_usage_percent(self):
return 0.0
-
class DistributedObjectStore(ObjectStore):
"""
ObjectStore that defers to a list of backends, for getting objects the
@@ -1009,14 +1009,14 @@
store = config.object_store
if store == 'disk':
return DiskObjectStore(config=config)
- elif store == 's3':
- os.environ['AWS_ACCESS_KEY_ID'] = config.aws_access_key
- os.environ['AWS_SECRET_ACCESS_KEY'] = config.aws_secret_key
+ elif store == 's3' or store == 'swift':
return S3ObjectStore(config=config)
elif store == 'distributed':
return DistributedObjectStore(config=config)
elif store == 'hierarchical':
return HierarchicalObjectStore()
+ else:
+ log.error("Unrecognized object store definition: {0}".format(store))
def convert_bytes(bytes):
""" A helper function used for pretty printing disk usage """
@@ -1039,3 +1039,26 @@
else:
size = '%.2fb' % bytes
return size
+
+def get_OS_connection(config):
+ """
+ Get a connection object for a cloud Object Store specified in the config.
+ Currently, this is a ``boto`` connection object.
+ """
+ log.debug("Getting a connection object for '{0}' object store".format(config.object_store))
+ a_key = config.os_access_key
+ s_key = config.os_secret_key
+ if config.object_store == 's3':
+ return S3Connection(a_key, s_key)
+ else:
+ # Establish the connection now
+ calling_format = boto.s3.connection.OrdinaryCallingFormat()
+ s3_conn = boto.connect_s3(aws_access_key_id=a_key,
+ aws_secret_access_key=s_key,
+ is_secure=config.os_is_secure,
+ host=config.os_host,
+ port=int(config.os_port),
+ calling_format=calling_format,
+ path=config.os_conn_path)
+ return s3_conn
+
diff -r be81990d148a81bc2896c3c543d2ff70ad9c3c67 -r 7980b6390e48db3bb0ae2f385d259a994e64b3f0 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -481,16 +481,22 @@
# -- Beta features
-# Object store mode (valid options are: disk, s3, distributed, hierarchical)
+# Object store mode (valid options are: disk, s3, swift, distributed, hierarchical)
#object_store = disk
-#aws_access_key = <AWS access key>
-#aws_secret_key = <AWS secret key>
-#s3_bucket = <name of an existing S3 bucket>
-#use_reduced_redundancy = True
-
+#os_access_key = <your cloud object store access key>
+#os_secret_key = <your cloud object store secret key>
+#os_bucket_name = <name of an existing object store bucket or container>
+# If using 'swift' object store, you must specify the following connection properties
+#os_host = swift.rc.nectar.org.au
+#os_port = 8888
+#os_is_secure = False
+#os_conn_path = /
+# Reduced redundancy can be used only with the 's3' object store
+#os_use_reduced_redundancy = False
# Size (in GB) that the cache used by object store should be limited to.
-# If the value is not specified, the cache size will be limited only by the file
-# system size.
+# If the value is not specified, the cache size will be limited only by the
+# file system size. The file system location of the cache is considered the
+# configuration of the ``file_path`` directive defined above.
#object_store_cache_size = 100
# Configuration file for the distributed object store, if object_store =
https://bitbucket.org/galaxy/galaxy-central/changeset/7387d61dfd59/
changeset: 7387d61dfd59
user: afgane
date: 2012-08-16 01:50:00
summary: Handle AWS-specific config options for backward compatibility
affected #: 1 file
diff -r 7980b6390e48db3bb0ae2f385d259a994e64b3f0 -r 7387d61dfd59ca588bb84a57272beed1bb930abb lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -169,14 +169,21 @@
if self.nginx_upload_store:
self.nginx_upload_store = os.path.abspath( self.nginx_upload_store )
self.object_store = kwargs.get( 'object_store', 'disk' )
- self.os_access_key = kwargs.get( 'os_access_key', None )
- self.os_secret_key = kwargs.get( 'os_secret_key', None )
- self.os_bucket_name = kwargs.get( 'os_bucket_name', None )
+ # Handle AWS-specific config options for backward compatibility
+ if kwargs.get( 'aws_access_key', None) is not None:
+ self.os_access_key= kwargs.get( 'aws_access_key', None )
+ self.os_secret_key= kwargs.get( 'aws_secret_key', None )
+ self.os_bucket_name= kwargs.get( 's3_bucket', None )
+ self.os_use_reduced_redundancy = kwargs.get( 'use_reduced_redundancy', False )
+ else:
+ self.os_access_key = kwargs.get( 'os_access_key', None )
+ self.os_secret_key = kwargs.get( 'os_secret_key', None )
+ self.os_bucket_name = kwargs.get( 'os_bucket_name', None )
+ self.os_use_reduced_redundancy = kwargs.get( 'os_use_reduced_redundancy', False )
self.os_host = kwargs.get( 'os_host', None )
self.os_port = kwargs.get( 'os_port', None )
self.os_is_secure = string_as_bool( kwargs.get( 'os_is_secure', True ) )
self.os_conn_path = kwargs.get( 'os_conn_path', '/' )
- self.os_use_reduced_redundancy = kwargs.get( 'os_use_reduced_redundancy', False )
self.object_store_cache_size = float(kwargs.get( 'object_store_cache_size', -1 ))
self.distributed_object_store_config_file = kwargs.get( 'distributed_object_store_config_file', None )
# Parse global_conf and save the parser
https://bitbucket.org/galaxy/galaxy-central/changeset/a42eae47ae87/
changeset: a42eae47ae87
user: afgane
date: 2012-08-16 01:57:05
summary: Merge
affected #: 24 files
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -309,9 +309,10 @@
return self.fail( job.info )
# Check the tool's stdout, stderr, and exit code for errors, but only
- # if the job has not already been marked as having an error.
+ # if the job has not already been marked as having an error.
+ # The job's stdout and stderr will be set accordingly.
if job.states.ERROR != job.state:
- if ( self.check_tool_output( stdout, stderr, tool_exit_code ) ):
+ if ( self.check_tool_output( stdout, stderr, tool_exit_code, job )):
job.state = job.states.OK
else:
job.state = job.states.ERROR
@@ -335,7 +336,7 @@
log.warning( "finish(): %s not found, but %s is not empty, so it will be used instead" % ( dataset_path.false_path, dataset_path.real_path ) )
else:
return self.fail( "Job %s's output dataset(s) could not be read" % job.id )
- job_context = ExpressionContext( dict( stdout = stdout, stderr = stderr ) )
+ job_context = ExpressionContext( dict( stdout = job.stdout, stderr = job.stderr ) )
job_tool = self.app.toolbox.tools_by_id.get( job.tool_id, None )
@@ -430,12 +431,12 @@
# will now be seen by the user.
self.sa_session.flush()
# Save stdout and stderr
- if len( stdout ) > 32768:
+ if len( job.stdout ) > 32768:
log.error( "stdout for job %d is greater than 32K, only first part will be logged to database" % job.id )
- job.stdout = stdout[:32768]
- if len( stderr ) > 32768:
+ job.stdout = job.stdout[:32768]
+ if len( job.stderr ) > 32768:
log.error( "stderr for job %d is greater than 32K, only first part will be logged to database" % job.id )
- job.stderr = stderr[:32768]
+ job.stderr = job.stderr[:32768]
# custom post process setup
inp_data = dict( [ ( da.name, da.dataset ) for da in job.input_datasets ] )
out_data = dict( [ ( da.name, da.dataset ) for da in job.output_datasets ] )
@@ -457,7 +458,7 @@
# Call 'exec_after_process' hook
self.tool.call_hook( 'exec_after_process', self.queue.app, inp_data=inp_data,
out_data=out_data, param_dict=param_dict,
- tool=self.tool, stdout=stdout, stderr=stderr )
+ tool=self.tool, stdout=job.stdout, stderr=job.stderr )
job.command_line = self.command_line
bytes = 0
@@ -477,7 +478,7 @@
if self.app.config.cleanup_job == 'always' or ( not stderr and self.app.config.cleanup_job == 'onsuccess' ):
self.cleanup()
- def check_tool_output( self, stdout, stderr, tool_exit_code ):
+ def check_tool_output( self, stdout, stderr, tool_exit_code, job ):
"""
Check the output of a tool - given the stdout, stderr, and the tool's
exit code, return True if the tool exited succesfully and False
@@ -487,8 +488,8 @@
Note that, if the tool did not define any exit code handling or
any stdio/stderr handling, then it reverts back to previous behavior:
if stderr contains anything, then False is returned.
+ Note that the job id is just for messages.
"""
- job = self.get_job()
err_msg = ""
# By default, the tool succeeded. This covers the case where the code
# has a bug but the tool was ok, and it lets a workflow continue.
@@ -497,10 +498,14 @@
try:
# Check exit codes and match regular expressions against stdout and
# stderr if this tool was configured to do so.
+ # If there is a regular expression for scanning stdout/stderr,
+ # then we assume that the tool writer overwrote the default
+ # behavior of just setting an error if there is *anything* on
+ # stderr.
if ( len( self.tool.stdio_regexes ) > 0 or
len( self.tool.stdio_exit_codes ) > 0 ):
- # We will check the exit code ranges in the order in which
- # they were specified. Each exit_code is a ToolStdioExitCode
+ # Check the exit code ranges in the order in which
+ # they were specified. Each exit_code is a StdioExitCode
# that includes an applicable range. If the exit code was in
# that range, then apply the error level and add in a message.
# If we've reached a fatal error rule, then stop.
@@ -508,24 +513,33 @@
for stdio_exit_code in self.tool.stdio_exit_codes:
if ( tool_exit_code >= stdio_exit_code.range_start and
tool_exit_code <= stdio_exit_code.range_end ):
- if None != stdio_exit_code.desc:
- err_msg += stdio_exit_code.desc
- # TODO: Find somewhere to stick the err_msg - possibly to
- # the source (stderr/stdout), possibly in a new db column.
+ # Tack on a generic description of the code
+ # plus a specific code description. For example,
+ # this might append "Job 42: Warning: Out of Memory\n".
+ # TODO: Find somewhere to stick the err_msg -
+ # possibly to the source (stderr/stdout), possibly
+ # in a new db column.
+ code_desc = stdio_exit_code.desc
+ if ( None == code_desc ):
+ code_desc = ""
+ tool_msg = ( "Job %s: %s: Exit code %d: %s" % (
+ job.get_id_tag(),
+ galaxy.tools.StdioErrorLevel.desc( tool_exit_code ),
+ tool_exit_code,
+ code_desc ) )
+ log.info( tool_msg )
+ stderr = err_msg + stderr
max_error_level = max( max_error_level,
stdio_exit_code.error_level )
- if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ if ( max_error_level >=
+ galaxy.tools.StdioErrorLevel.FATAL ):
break
- # If there is a regular expression for scanning stdout/stderr,
- # then we assume that the tool writer overwrote the default
- # behavior of just setting an error if there is *anything* on
- # stderr.
if max_error_level < galaxy.tools.StdioErrorLevel.FATAL:
# We'll examine every regex. Each regex specifies whether
# it is to be run on stdout, stderr, or both. (It is
# possible for neither stdout nor stderr to be scanned,
- # but those won't be scanned.) We record the highest
+ # but those regexes won't be used.) We record the highest
# error level, which are currently "warning" and "fatal".
# If fatal, then we set the job's state to ERROR.
# If warning, then we still set the job's state to OK
@@ -539,19 +553,32 @@
# Repeat the stdout stuff for stderr.
# TODO: Collapse this into a single function.
if ( regex.stdout_match ):
- regex_match = re.search( regex.match, stdout )
+ regex_match = re.search( regex.match, stdout,
+ re.IGNORECASE )
if ( regex_match ):
- err_msg += self.regex_err_msg( regex_match, regex )
- max_error_level = max( max_error_level, regex.error_level )
- if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ rexmsg = self.regex_err_msg( regex_match, regex)
+ log.info( "Job %s: %s"
+ % ( job.get_id_tag(), rexmsg ) )
+ stdout = rexmsg + "\n" + stdout
+ max_error_level = max( max_error_level,
+ regex.error_level )
+ if ( max_error_level >=
+ galaxy.tools.StdioErrorLevel.FATAL ):
break
- if ( regex.stderr_match ):
- regex_match = re.search( regex.match, stderr )
+
+ if ( regex.stderr_match ):
+ regex_match = re.search( regex.match, stderr,
+ re.IGNORECASE )
if ( regex_match ):
- err_msg += self.regex_err_msg( regex_match, regex )
- max_error_level = max( max_error_level,
+ rexmsg = self.regex_err_msg( regex_match, regex)
+ # DELETEME
+ log.info( "Job %s: %s"
+ % ( job.get_id_tag(), rexmsg ) )
+ stderr = rexmsg + "\n" + stderr
+ max_error_level = max( max_error_level,
regex.error_level )
- if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ if ( max_error_level >=
+ galaxy.tools.StdioErrorLevel.FATAL ):
break
# If we encountered a fatal error, then we'll need to set the
@@ -565,17 +592,26 @@
# default to the previous behavior: when there's anything on stderr
# the job has an error, and the job is ok otherwise.
else:
- log.debug( "The tool did not define exit code or stdio handling; "
+ # TODO: Add in the tool and job id:
+ log.debug( "Tool did not define exit code or stdio handling; "
+ "checking stderr for success" )
if stderr:
success = False
else:
success = True
+
# On any exception, return True.
except:
+ tb = traceback.format_exc()
log.warning( "Tool check encountered unexpected exception; "
- + "assuming tool was successful" )
+ + "assuming tool was successful: " + tb )
success = True
+
+ # Store the modified stdout and stderr in the job:
+ if None != job:
+ job.stdout = stdout
+ job.stderr = stderr
+
return success
def regex_err_msg( self, match, regex ):
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -360,7 +360,7 @@
def __init__( self, app ):
self.app = app
self.job_runners = {}
- start_job_runners = ["local"]
+ start_job_runners = ["local", "lwr", "dynamic"]
if app.config.start_job_runners is not None:
start_job_runners.extend( [ x.strip() for x in util.listify( app.config.start_job_runners ) ] )
if app.config.use_tasked_jobs:
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -339,6 +339,8 @@
# By default, the exit code is 0, which typically indicates success.
exit_code = 0
exit_code_str = "0"
+ # To ensure that files below are readable, ownership must be reclaimed first
+ drm_job_state.job_wrapper.reclaim_ownership()
while which_try < (self.app.config.retry_job_output_collection + 1):
try:
ofh = file(ofile, "r")
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -136,27 +136,22 @@
repository_clone_url=repository_clone_url,
relative_install_dir=relative_install_dir,
repository_files_dir=None,
- resetting_all_metadata_on_repository=False )
+ resetting_all_metadata_on_repository=False,
+ webapp='galaxy' )
tool_shed_repository.metadata = metadata_dict
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
if 'tool_dependencies' in metadata_dict:
# All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed.
- tool_dependencies = create_tool_dependency_objects( self.app, tool_shed_repository, tool_shed_repository.installed_changeset_revision, set_status=True )
+ tool_dependencies = create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True )
else:
tool_dependencies = None
if 'tools' in metadata_dict:
- work_dir = tempfile.mkdtemp()
repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict )
if repository_tools_tups:
sample_files = metadata_dict.get( 'sample_files', [] )
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_data_table_entry( self.app,
- tool_shed_repository,
- tool_shed_repository.installed_changeset_revision,
- self.tool_path,
- repository_tools_tups,
- work_dir )
+ repository_tools_tups = handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
repository_tools_tups, sample_files_copied = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
@@ -166,12 +161,8 @@
update_tool_shed_repository_status( self.app,
tool_shed_repository,
self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
- # Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_repository( self.app,
- 'tool_dependencies.xml',
- tool_shed_repository,
- tool_shed_repository.installed_changeset_revision,
- work_dir )
+ # Get the tool_dependencies.xml file from disk.
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=self.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -189,10 +180,6 @@
self.migrated_tools_config,
tool_panel_dict=tool_panel_dict_for_display,
new_install=True )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
if 'datatypes' in metadata_dict:
tool_shed_repository.status = self.app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
@@ -200,11 +187,7 @@
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
work_dir = tempfile.mkdtemp()
- datatypes_config = get_config_from_repository( self.app,
- 'datatypes_conf.xml',
- tool_shed_repository,
- tool_shed_repository.installed_changeset_revision,
- work_dir )
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
# after this installation completes.
converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, relative_install_dir, override=False )
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -922,16 +922,11 @@
if self_id in self.app.config.tool_handlers:
self.job_handlers = self.app.config.tool_handlers[ self_id ]
# Set job runner(s). Each runner is a dict with 'url' and, optionally, 'params'.
- if self.app.config.start_job_runners is None:
- # Jobs are always local regardless of tool config if no additional
- # runners are started
- self.job_runners = [ { "url" : "local:///" } ]
- else:
- # Set job runner to the cluster default
- self.job_runners = [ { "url" : self.app.config.default_cluster_job_runner } ]
- # Set custom runner(s) if they're defined.
- if self_id in self.app.config.tool_runners:
- self.job_runners = self.app.config.tool_runners[ self_id ]
+ # Set job runner to the cluster default
+ self.job_runners = [ { "url" : self.app.config.default_cluster_job_runner } ]
+ # Set custom runner(s) if they're defined.
+ if self_id in self.app.config.tool_runners:
+ self.job_runners = self.app.config.tool_runners[ self_id ]
# Is this a 'hidden' tool (hidden in tool menu)
self.hidden = util.xml_text(root, "hidden")
if self.hidden: self.hidden = util.string_as_bool(self.hidden)
@@ -1316,6 +1311,8 @@
return_level = StdioErrorLevel.WARNING
elif ( re.search( "fatal", err_level, re.IGNORECASE ) ):
return_level = StdioErrorLevel.FATAL
+ else:
+ log.debug( "Error level %s did not match warning/fatal" % err_level )
except Exception, e:
log.error( "Exception in parse_error_level "
+ str(sys.exc_info() ) )
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -248,7 +248,7 @@
except:
pass
return converter_path, display_path
-def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
+def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files, webapp='galaxy' ):
"""
Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
sure the files exist.
@@ -291,8 +291,9 @@
correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( app )
+ if webapp == 'community':
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( app )
return invalid_files_and_errors_tups
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
# Persist the current in-memory list of config_elems to a file named by the value of config_filename.
@@ -427,16 +428,11 @@
sa_session.add( tool_shed_repository )
sa_session.flush()
return tool_shed_repository
-def create_tool_dependency_objects( app, tool_shed_repository, current_changeset_revision, set_status=True ):
+def create_tool_dependency_objects( app, tool_shed_repository, relative_install_dir, set_status=True ):
# Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository.
tool_dependency_objects = []
- work_dir = tempfile.mkdtemp()
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_repository( app,
- 'tool_dependencies.xml',
- tool_shed_repository,
- current_changeset_revision,
- work_dir )
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
tree = ElementTree.parse( tool_dependencies_config )
root = tree.getroot()
ElementInclude.include( root )
@@ -454,10 +450,6 @@
status=app.model.ToolDependency.installation_status.NEVER_INSTALLED,
set_status=set_status )
tool_dependency_objects.append( tool_dependency )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
return tool_dependency_objects
def generate_clone_url( trans, repository ):
"""Generate the URL for cloning a repository."""
@@ -541,14 +533,15 @@
if req_name==tool_dependency_name and req_version==tool_dependency_version and req_type==tool_dependency_type:
can_generate_dependency_metadata = True
break
- if not can_generate_dependency_metadata:
+ if requirements and not can_generate_dependency_metadata:
# We've discovered at least 1 combination of name, version and type that is not defined in the <requirement>
# tag for any tool in the repository.
break
if not can_generate_dependency_metadata:
break
return can_generate_dependency_metadata
-def generate_metadata_for_changeset_revision( app, repository_clone_url, relative_install_dir=None, repository_files_dir=None, resetting_all_metadata_on_repository=False ):
+def generate_metadata_for_changeset_revision( app, repository_clone_url, relative_install_dir=None, repository_files_dir=None,
+ resetting_all_metadata_on_repository=False, webapp='galaxy' ):
"""
Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
@@ -629,7 +622,7 @@
invalid_tool_configs.append( name )
invalid_file_tups.append( ( name, str( e ) ) )
if tool is not None:
- invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_files )
+ invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_files, webapp=webapp )
can_set_metadata = True
for tup in invalid_files_and_errors_tups:
if name in tup:
@@ -664,7 +657,7 @@
metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
if invalid_tool_configs:
metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
- if resetting_all_metadata_on_repository:
+ if webapp == 'community' and resetting_all_metadata_on_repository:
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( app )
# Reset the value of the app's tool_data_path to it's original value.
@@ -1255,7 +1248,7 @@
return shed_url
# The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
return None
-def handle_missing_data_table_entry( app, repository, changeset_revision, tool_path, repository_tools_tups, dir ):
+def handle_missing_data_table_entry( app, relative_install_dir, tool_path, repository_tools_tups ):
"""
Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the
tool_data_table_conf.xml file. This method is called only from Galaxy (not the tool shed) when a repository is being installed
@@ -1269,7 +1262,7 @@
break
if missing_data_table_entry:
# The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository.
- sample_tool_data_table_conf = get_config_from_repository( app, 'tool_data_table_conf.xml.sample', repository, changeset_revision, dir )
+ sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir )
# Add entries to the ToolDataTableManager's in-memory data_tables dictionary as well as the list of data_table_elems and the list of
# data_table_elem_names.
error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True )
@@ -1394,14 +1387,8 @@
def load_installed_datatypes( app, repository, relative_install_dir, deactivate=False ):
# Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later.
metadata = repository.metadata
- work_dir = tempfile.mkdtemp()
repository_dict = None
- datatypes_config = get_config_from_repository( app,
- 'datatypes_conf.xml',
- repository,
- repository.changeset_revision,
- work_dir,
- install_dir=relative_install_dir )
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
if datatypes_config:
converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=deactivate )
if converter_path or display_path:
@@ -1413,10 +1400,6 @@
tool_dicts=metadata.get( 'tools', [] ),
converter_path=converter_path,
display_path=display_path )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
return repository_dict
def load_installed_display_applications( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype display applications
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -947,55 +947,72 @@
return dict( data=dict( min=summary.min_val[0], max=summary.max_val[0], mean=mean, sd=sd ) )
- # The following seems not to work very well, for example it will only return one
- # data point if the tile is 1280px wide. Not sure what the intent is.
+ # Sample from region using approximately this many samples.
+ N = 1000
- # The first zoom level for BBI files is 640. If too much is requested, it will look at each block instead
- # of summaries. The calculation done is: zoom <> (end-start)/num_points/2.
- # Thus, the optimal number of points is (end-start)/num_points/2 = 640
- # num_points = (end-start) / 1280
- #num_points = (end-start) / 1280
- #if num_points < 1:
- # num_points = end - start
- #else:
- # num_points = min(num_points, 500)
+ def summarize_region( bbi, chrom, start, end, num_points ):
+ '''
+ Returns results from summarizing a region using num_points.
+ NOTE: num_points cannot be greater than end - start or BBI
+ will return None for all positions.s
+ '''
+ result = []
- # For now, we'll do 1000 data points by default. However, the summaries
- # don't seem to work when a summary pixel corresponds to less than one
- # datapoint, so we prevent that.
+ # Get summary; this samples at intervals of length
+ # (end - start)/num_points -- i.e. drops any fractional component
+ # of interval length.
+ summary = bbi.summarize( chrom, start, end, num_points )
+ if summary:
+ #mean = summary.sum_data / summary.valid_count
+
+ ## Standard deviation by bin, not yet used
+ ## var = summary.sum_squares - mean
+ ## var /= minimum( valid_count - 1, 1 )
+ ## sd = sqrt( var )
+
+ pos = start
+ step_size = (end - start) / num_points
- # FIXME: need to choose the number of points to maximize coverage of the area.
- # It appears that BBI calculates points using intervals of
- # floor( num_points / end - start )
- # In some cases, this prevents sampling near the end of the interval,
- # especially when (a) the total interval is small ( < 20-30Kb) and (b) the
- # computed interval size has a large fraction, e.g. 14.7 or 35.8
- num_points = min( 1000, end - start )
+ for i in range( num_points ):
+ result.append( (pos, float_nan( summary.sum_data[i] / summary.valid_count[i] ) ) )
+ pos += step_size
- # HACK to address the FIXME above; should generalize.
- if end - start <= 2000:
- num_points = end - start
+ return result
- summary = bbi.summarize( chrom, start, end, num_points )
+ # Approach is different depending on region size.
+ if end - start < N:
+ # Get values for individual bases in region, including start and end.
+ # To do this, need to increase end to next base and request number of points.
+ num_points = end - start + 1
+ end += 1
+ else:
+ #
+ # The goal is to sample the region between start and end uniformly
+ # using ~N data points. The challenge is that the size of sampled
+ # intervals rarely is full bases, so sampling using N points will
+ # leave the end of the region unsampled due to remainders for each
+ # interval. To recitify this, a new N is calculated based on the
+ # step size that covers as much of the region as possible.
+ #
+ # However, this still leaves some of the region unsampled. This
+ # could be addressed by repeatedly sampling remainder using a
+ # smaller and smaller step_size, but that would require iteratively
+ # going to BBI, which could be time consuming.
+ #
+
+ # Start with N samples.
+ num_points = N
+ step_size = ( end - start ) / num_points
+ # Add additional points to sample in the remainder not covered by
+ # the initial N samples.
+ remainder_start = start + step_size * num_points
+ additional_points = ( end - remainder_start ) / step_size
+ num_points += additional_points
+
+ result = summarize_region( bbi, chrom, start, end, num_points )
+
+ # Cleanup and return.
f.close()
-
- result = []
-
- if summary:
- #mean = summary.sum_data / summary.valid_count
-
- ## Standard deviation by bin, not yet used
- ## var = summary.sum_squares - mean
- ## var /= minimum( valid_count - 1, 1 )
- ## sd = sqrt( var )
-
- pos = start
- step_size = (end - start) / num_points
-
- for i in range( num_points ):
- result.append( (pos, float_nan( summary.sum_data[i] / summary.valid_count[i] ) ) )
- pos += step_size
-
return { 'data': result }
class BigBedDataProvider( BBIDataProvider ):
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -525,13 +525,8 @@
# Get the tool_shed_repository from one of the tool_dependencies.
message = ''
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
- work_dir = tempfile.mkdtemp()
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- tool_shed_repository,
- tool_shed_repository.changeset_revision,
- work_dir )
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) )
installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -539,10 +534,6 @@
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status == trans.app.model.ToolDependency.installation_status.ERROR:
message += ' %s' % installed_tool_dependency.error_message
- try:
- shutil.rmtree( work_dir )
- except:
- pass
tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies ]
if message:
status = 'error'
@@ -663,11 +654,7 @@
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- tool_shed_repository,
- tool_shed_repository.installed_changeset_revision,
- work_dir )
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -691,24 +678,19 @@
repository_clone_url=repository_clone_url,
relative_install_dir=relative_install_dir,
repository_files_dir=None,
- resetting_all_metadata_on_repository=False )
+ resetting_all_metadata_on_repository=False,
+ webapp='galaxy' )
tool_shed_repository.metadata = metadata_dict
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
if 'tool_dependencies' in metadata_dict and not reinstalling:
- tool_dependencies = create_tool_dependency_objects( trans.app, tool_shed_repository, tool_shed_repository.installed_changeset_revision, set_status=True )
+ tool_dependencies = create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
if 'tools' in metadata_dict:
tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- work_dir = tempfile.mkdtemp()
- repository_tools_tups = handle_missing_data_table_entry( trans.app,
- tool_shed_repository,
- tool_shed_repository.changeset_revision,
- tool_path,
- repository_tools_tups,
- work_dir )
+ repository_tools_tups = handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
sample_files = metadata_dict.get( 'sample_files', [] )
repository_tools_tups, sample_files_copied = handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups )
@@ -723,22 +705,13 @@
shed_tool_conf=shed_tool_conf,
tool_panel_dict=tool_panel_dict,
new_install=True )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
if 'datatypes' in metadata_dict:
tool_shed_repository.status = trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
tool_shed_repository.includes_datatypes = True
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
- work_dir = tempfile.mkdtemp()
- datatypes_config = get_config_from_repository( trans.app,
- 'datatypes_conf.xml',
- tool_shed_repository,
- tool_shed_repository.changeset_revision,
- work_dir )
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
# Load data types required by tools.
converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, relative_install_dir, override=False )
if converter_path or display_path:
@@ -756,10 +729,6 @@
if display_path:
# Load proprietary datatype display applications
trans.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
@web.expose
@web.require_admin
def manage_repository( self, trans, **kwd ):
@@ -1507,7 +1476,7 @@
trans.sa_session.flush()
# Create tool_dependency records if necessary.
if 'tool_dependencies' in metadata_dict:
- tool_dependencies = create_tool_dependency_objects( trans.app, repository, repository.changeset_revision, set_status=False )
+ tool_dependencies = create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
# See if any tool dependencies can be installed.
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -87,6 +87,7 @@
self.server_name = ''
self.job_manager = ''
self.default_job_handlers = []
+ self.default_cluster_job_runner = 'local:///'
self.job_handlers = []
self.tool_handlers = []
self.tool_runners = []
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -696,9 +696,14 @@
owner = repository_name_owner_list[ 1 ]
repository = get_repository_by_name_and_owner( trans, name, owner )
try:
- reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) )
- log.debug( "Successfully reset metadata on repository %s" % repository.name )
- successful_count += 1
+ invalid_file_tups = reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) )
+ if invalid_file_tups:
+ message = generate_message_for_invalid_tools( invalid_file_tups, repository, None, as_html=False )
+ log.debug( message )
+ unsuccessful_count += 1
+ else:
+ log.debug( "Successfully reset metadata on repository %s" % repository.name )
+ successful_count += 1
except Exception, e:
log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) )
unsuccessful_count += 1
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -277,6 +277,41 @@
return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
else:
return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
+def generate_message_for_invalid_tools( invalid_file_tups, repository, metadata_dict, as_html=True ):
+ if as_html:
+ new_line = '<br/>'
+ bold_start = '<b>'
+ bold_end = '</b>'
+ else:
+ new_line = '\n'
+ bold_start = ''
+ bold_end = ''
+ message = ''
+ if metadata_dict:
+ message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip )
+ message += "Correct the following problems if necessary and reset metadata.%s" % new_line
+ else:
+ message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip )
+ message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line
+ for itc_tup in invalid_file_tups:
+ tool_file, exception_msg = itc_tup
+ if exception_msg.find( 'No such file or directory' ) >= 0:
+ exception_items = exception_msg.split()
+ missing_file_items = exception_items[ 7 ].split( '/' )
+ missing_file = missing_file_items[ -1 ].rstrip( '\'' )
+ if missing_file.endswith( '.loc' ):
+ sample_ext = '%s.sample' % missing_file
+ else:
+ sample_ext = missing_file
+ correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end )
+ correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end )
+ else:
+ if as_html:
+ correction_msg = exception_msg
+ else:
+ correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
+ message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
+ return message
def generate_tool_guid( trans, repository, tool ):
"""
Generate a guid for the received tool. The form of the guid is
@@ -795,7 +830,8 @@
repository_clone_url=repository_clone_url,
relative_install_dir=repo_dir,
repository_files_dir=work_dir,
- resetting_all_metadata_on_repository=True )
+ resetting_all_metadata_on_repository=True,
+ webapp='community' )
if current_metadata_dict:
if not metadata_changeset_revision and not metadata_dict:
# We're at the first change set in the change log.
@@ -853,6 +889,7 @@
clean_repository_metadata( trans, id, changeset_revisions )
# Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog.
reset_all_tool_versions( trans, id, repo )
+ return invalid_file_tups
def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
"""
Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset
@@ -885,7 +922,8 @@
repository_clone_url=repository_clone_url,
relative_install_dir=repo_dir,
repository_files_dir=None,
- resetting_all_metadata_on_repository=False )
+ resetting_all_metadata_on_repository=False,
+ webapp='community' )
if metadata_dict:
downloadable = is_downloadable( metadata_dict )
repository_metadata = None
@@ -929,27 +967,7 @@
message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
status = "error"
if invalid_file_tups:
- if metadata_dict:
- message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip )
- message += "Correct the following problems if necessary and reset metadata.<br/>"
- else:
- message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip )
- message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.<br/>"
- for itc_tup in invalid_file_tups:
- tool_file, exception_msg = itc_tup
- if exception_msg.find( 'No such file or directory' ) >= 0:
- exception_items = exception_msg.split()
- missing_file_items = exception_items[ 7 ].split( '/' )
- missing_file = missing_file_items[ -1 ].rstrip( '\'' )
- if missing_file.endswith( '.loc' ):
- sample_ext = '%s.sample' % missing_file
- else:
- sample_ext = missing_file
- correction_msg = "This file refers to a missing file <b>%s</b>. " % str( missing_file )
- correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_ext
- else:
- correction_msg = exception_msg
- message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
+ message = generate_message_for_invalid_tools( invalid_file_tups, repository, metadata_dict )
status = 'error'
return message, status
def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ):
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -458,7 +458,10 @@
metadata = downloadable_revision.metadata
invalid_tools = metadata.get( 'invalid_tools', [] )
for invalid_tool_config in invalid_tools:
- invalid_tools_dict[ invalid_tool_config ] = ( repository.id, repository.name, downloadable_revision.changeset_revision )
+ invalid_tools_dict[ invalid_tool_config ] = ( repository.id,
+ repository.name,
+ repository.user.username,
+ downloadable_revision.changeset_revision )
else:
for repository in trans.sa_session.query( trans.model.Repository ) \
.filter( and_( trans.model.Repository.table.c.deleted == False,
@@ -468,7 +471,10 @@
metadata = downloadable_revision.metadata
invalid_tools = metadata.get( 'invalid_tools', [] )
for invalid_tool_config in invalid_tools:
- invalid_tools_dict[ invalid_tool_config ] = ( repository.id, repository.name, downloadable_revision.changeset_revision )
+ invalid_tools_dict[ invalid_tool_config ] = ( repository.id,
+ repository.name,
+ repository.user.username,
+ downloadable_revision.changeset_revision )
return trans.fill_template( '/webapps/community/repository/browse_invalid_tools.mako',
cntrller=cntrller,
invalid_tools_dict=invalid_tools_dict,
@@ -1373,6 +1379,7 @@
return trans.response.send_redirect( url )
@web.expose
def load_invalid_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
+ # FIXME: loading an invalid tool should display an appropriate message as to why the tool is invalid. This worked until recently.
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'error' )
@@ -1752,9 +1759,14 @@
status=status )
@web.expose
def reset_all_metadata( self, trans, id, **kwd ):
- reset_all_metadata_on_repository( trans, id, **kwd )
- message = "All repository metadata has been reset."
- status = 'done'
+ invalid_file_tups = reset_all_metadata_on_repository( trans, id, **kwd )
+ if invalid_file_tups:
+ repository = get_repository( trans, id )
+ message = generate_message_for_invalid_tools( invalid_file_tups, repository, None )
+ status = 'error'
+ else:
+ message = "All repository metadata has been reset."
+ status = 'done'
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
id=id,
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 run.sh
--- a/run.sh
+++ b/run.sh
@@ -36,6 +36,10 @@
fi
done
+if [ -n "$GALAXY_UNIVERSE_CONFIG_DIR" ]; then
+ python ./scripts/build_universe_config.py "$GALAXY_UNIVERSE_CONFIG_DIR"
+fi
+
# explicitly attempt to fetch eggs before running
FETCH_EGGS=1
for arg in "$@"; do
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 scripts/build_universe_config.py
--- /dev/null
+++ b/scripts/build_universe_config.py
@@ -0,0 +1,26 @@
+from ConfigParser import ConfigParser
+from os import listdir
+from os.path import join
+from re import match
+from sys import argv
+
+
+def merge():
+ "Merges all .ini files in a specified directory into ./universe.ini"
+ if len(argv) < 2:
+ message = "%s: Must specify directory to merge configuration files from." % argv[0]
+ raise Exception(message)
+ conf_directory = argv[1]
+ conf_files = [f for f in listdir(conf_directory) if match(r'.*\.ini', f)]
+ conf_files.sort()
+
+ parser = ConfigParser()
+ for conf_file in conf_files:
+ parser.read([join(conf_directory, conf_file)])
+ ## TODO: Expand enviroment variables here, that would
+ ## also make Galaxy much easier to configure.
+
+ parser.write(open("universe_wsgi.ini", 'w'))
+
+if __name__ == '__main__':
+ merge()
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 scripts/db_shell.py
--- /dev/null
+++ b/scripts/db_shell.py
@@ -0,0 +1,85 @@
+# This script allows easy access to Galaxy's database layer via the
+# Galaxy models. For example:q
+# % python -i scripts/db_shel.py
+# >>> new_user = User("admin(a)gmail.com")
+# >>> new_user.set_password
+# >>> sa_session.add(new_user)
+# >>> sa_session.commit()
+# >>> sa_session.query(User).all()
+#
+# You can also use this script as a library, for instance see https://gist.github.com/1979583q
+# TODO: This script overlaps alot wth manage_db.py and create_db.py,
+# these should maybe be refactored to remove duplication.
+import sys, os.path, logging
+
+new_path = [ os.path.join( os.getcwd(), "lib" ) ]
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+sys.path = new_path
+
+from galaxy import eggs
+
+import pkg_resources
+pkg_resources.require( "sqlalchemy-migrate" )
+pkg_resources.require( "SQLAlchemy" )
+
+from ConfigParser import SafeConfigParser
+
+log = logging.getLogger( __name__ )
+
+if sys.argv[-1] in [ 'community' ]:
+ # Need to pop the last arg so the command line args will be correct
+ # for sqlalchemy-migrate
+ webapp = sys.argv.pop()
+ config_file = 'community_wsgi.ini'
+ repo = 'lib/galaxy/webapps/community/model/migrate'
+else:
+ # Poor man's optparse
+ config_file = 'universe_wsgi.ini'
+ if '-c' in sys.argv:
+ pos = sys.argv.index( '-c' )
+ sys.argv.pop(pos)
+ config_file = sys.argv.pop( pos )
+ if not os.path.exists( config_file ):
+ print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % config_file
+ sys.exit( 1 )
+ repo = 'lib/galaxy/model/migrate'
+
+cp = SafeConfigParser()
+cp.read( config_file )
+
+if cp.has_option( "app:main", "database_connection" ):
+ db_url = cp.get( "app:main", "database_connection" )
+elif cp.has_option( "app:main", "database_file" ):
+ db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % cp.get( "app:main", "database_file" )
+else:
+ db_url = "sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE"
+
+dialect_to_egg = {
+ "sqlite" : "pysqlite>=2",
+ "postgres" : "psycopg2",
+ "mysql" : "MySQL_python"
+}
+dialect = ( db_url.split( ':', 1 ) )[0]
+try:
+ egg = dialect_to_egg[dialect]
+ try:
+ pkg_resources.require( egg )
+ log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
+ except:
+ # If the module is in the path elsewhere (i.e. non-egg), it'll still load.
+ log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
+except KeyError:
+ # Let this go, it could possibly work with db's we don't support
+ log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
+
+# Setup DB scripting environment
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.exc import *
+
+engine = create_engine(db_url, echo=True)
+db_session = scoped_session( sessionmaker( bind = engine ) )
+from galaxy.model.mapping import context as sa_session
+sa_session.bind = engine
+from galaxy.model import *
+
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 static/scripts/mvc/history.js
--- /dev/null
+++ b/static/scripts/mvc/history.js
@@ -0,0 +1,145 @@
+/*
+TODO:
+ as always: where does the model end and the view begin?
+ HistoryPanel
+ HistoryCollection: (collection of histories: 'Saved Histories')
+
+CASES:
+ logged-in/NOT
+*/
+//==============================================================================
+var HistoryItem = BaseModel.extend({
+ // a single history structure
+ // from: http://localhost:8080/api/histories/f2db41e1fa331b3e/contents/f2db41e1fa331…
+ /*
+ {
+ "data_type": "fastq",
+ "deleted": false,
+ "download_url": "/datasets/f2db41e1fa331b3e/display?to_ext=fastq",
+ "file_size": 226297533,
+ "genome_build": "?",
+ "id": "f2db41e1fa331b3e",
+ "metadata_data_lines": null,
+ "metadata_dbkey": "?",
+ "metadata_sequences": null,
+ "misc_blurb": "215.8 MB",
+ "misc_info": "uploaded fastq file",
+ "model_class": "HistoryDatasetAssociation",
+ "name": "LTCF-2-19_GTGAAA_L001_R1_001.fastq",
+ "state": "ok",
+ "visible": true
+ }
+ */
+
+ display : function(){},
+ edit_attr : function(){},
+ delete : function(){},
+ download : function(){},
+ details : function(){},
+ rerun : function(){},
+ tags : function(){},
+ annotations : function(){},
+ peek : function(){},
+});
+
+//..............................................................................
+var HistoryItemView = BaseView.extend({
+ // view for History model used in HistoryPanelView
+ tagName : "div",
+ className : "historyItemContainer",
+
+ icons : {
+ display : 'path to icon',
+ edit_attr : 'path to icon',
+ delete : 'path to icon',
+ download : 'path to icon',
+ details : 'path to icon',
+ rerun : 'path to icon',
+ tags : 'path to icon',
+ annotations : 'path to icon',
+ },
+
+ render : function(){
+ this.$el.append( 'div' )
+ },
+
+});
+
+
+
+//==============================================================================
+var History = Backbone.Collection.extend({
+ // a collection of HistoryItems
+
+ // from: http://localhost:8080/api/histories/f2db41e1fa331b3e
+ /*
+ {
+ "contents_url": "/api/histories/f2db41e1fa331b3e/contents",
+ "id": "f2db41e1fa331b3e",
+ "name": "one",
+ "state": "ok",
+ "state_details": {
+ "discarded": 0,
+ "empty": 0,
+ "error": 0,
+ "failed_metadata": 0,
+ "new": 0,
+ "ok": 4,
+ "queued": 0,
+ "running": 0,
+ "setting_metadata": 0,
+ "upload": 0
+ }
+ }
+ */
+
+ // from: http://localhost:8080/api/histories/f2db41e1fa331b3e/contents
+ // (most are replicated in HistoryItem)
+ /*
+ [
+ {
+ "id": "f2db41e1fa331b3e",
+ "name": "LTCF-2-19_GTGAAA_L001_R1_001.fastq",
+ "type": "file",
+ "url": "/api/histories/f2db41e1fa331b3e/contents/f2db41e1fa331b3e"
+ },
+ {
+ "id": "f597429621d6eb2b",
+ "name": "LTCF-2-19_GTGAAA_L001_R2_001.fastq",
+ "type": "file",
+ "url": "/api/histories/f2db41e1fa331b3e/contents/f597429621d6eb2b"
+ },
+ {
+ "id": "1cd8e2f6b131e891",
+ "name": "FASTQ Groomer on data 1",
+ "type": "file",
+ "url": "/api/histories/f2db41e1fa331b3e/contents/1cd8e2f6b131e891"
+ },
+ {
+ "id": "ebfb8f50c6abde6d",
+ "name": "FASTQ Groomer on data 2",
+ "type": "file",
+ "url": "/api/histories/f2db41e1fa331b3e/contents/ebfb8f50c6abde6d"
+ },
+ {
+ "id": "33b43b4e7093c91f",
+ "name": "Sa.04-02981.fasta",
+ "type": "file",
+ "url": "/api/histories/f2db41e1fa331b3e/contents/33b43b4e7093c91f"
+ }
+ ]
+ */
+});
+
+//..............................................................................
+var HistoryCollectionView = BaseView.extend({
+ // view for the HistoryCollection (as per current right hand panel)
+ tagName : "body",
+ className : "historyCollection",
+
+ render : function(){
+
+ },
+
+});
+
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 templates/base_panels.mako
--- a/templates/base_panels.mako
+++ b/templates/base_panels.mako
@@ -54,13 +54,17 @@
var galaxy_paths = new GalaxyPaths({
root_path: '${h.url_for( "/" )}',
image_path: '${h.url_for( "/static/images" )}',
+
tool_url: '${h.url_for( controller="/api/tools" )}',
+ history_url: '${h.url_for( controller="/api/histories" )}',
+
data_url: '${h.url_for( controller="/tracks", action="data" )}',
raw_data_url: '${h.url_for( controller="/tracks", action="raw_data" )}',
converted_datasets_state_url: '${h.url_for( controller="/tracks", action="converted_datasets_state" )}',
dataset_state_url: '${h.url_for( controller="/tracks", action="dataset_state" )}',
+ sweepster_url: '${h.url_for( controller="/tracks", action="sweepster" )}',
+
visualization_url: '${h.url_for( controller="/visualization", action="save" )}',
- sweepster_url: '${h.url_for( controller="/tracks", action="sweepster" )}'
});
</script></%def>
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 templates/webapps/community/repository/browse_invalid_tools.mako
--- a/templates/webapps/community/repository/browse_invalid_tools.mako
+++ b/templates/webapps/community/repository/browse_invalid_tools.mako
@@ -13,10 +13,11 @@
<tr><th>Tool config</th><th>Repository name</th>
+ <th>Repository owner</th><th>Changeset revision</th></tr>
%for invalid_tool_config, repository_tup in invalid_tools_dict.items():
- <% repository_id, repository_name, changeset_revision = repository_tup %>
+ <% repository_id, repository_name, repository_owner, changeset_revision = repository_tup %><tr><td><a class="view-info" href="${h.url_for( controller='repository', action='load_invalid_tool', repository_id=trans.security.encode_id( repository_id ), tool_config=invalid_tool_config, changeset_revision=changeset_revision, webapp=webapp )}">
@@ -24,6 +25,7 @@
</a></td><td>${repository_name}</td>
+ <td>${repository_owner}</td><td>${changeset_revision}</td></tr>
%endfor
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 test-data/bwa_wrapper_out2.sam
--- a/test-data/bwa_wrapper_out2.sam
+++ b/test-data/bwa_wrapper_out2.sam
@@ -1,30 +1,30 @@
-seq1 16 phiX174 322 25 36M * 0 0 GATATTTTAAAGGAGCGTGGATTACTATCTGAGTCC B&&I13A$G$*%$IIIIIII9(.+5$IIIIIII#II XT:A:U NM:i:2 X0:i:1 XM:i:2 XO:i:0 XG:i:0 MD:Z:2C8A24
-seq10 0 phiX174 4149 37 17M1D19M * 0 0 ATTCTTTCTTTTCGTATCAGGGCGTTGAGTTCGATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:5G11^G19
-seq11 0 phiX174 4072 37 18M1D18M * 0 0 GCATTTCTACTCCTTCTCATCCCCAATGCTTGGCTT II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:12T5^A18
+seq1 16 phiX174 322 25 36M * 0 0 GATATTTTAAAGGAGCGTGGATTACTATCTGAGTCC B&&I13A$G$*%$IIIIIII9(.+5$IIIIIII#II XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:2C8A24
+seq2 0 phiX174 141 37 36M * 0 0 ATTCGACCTATCCTTGCGCAGCTCGAGAAGCTCTTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
+seq3 0 phiX174 505 37 36M * 0 0 GTAACAAAGTTTGGATTGCTACTGACCGCTCTCGTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
+seq4 4 * 0 0 * * 0 0 AGCCGCTCGTCTTTTATGTAGGTGGTCAACCATTTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
+seq5 0 phiX174 4985 25 36M * 0 0 CAGTTATATGGCTTTTGGTTTCTATGTGGCTTAATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:13G17A4
+seq6 0 phiX174 925 37 11M1D25M * 0 0 AGGCGCTCGTCTTGGTATGTAGGTGGTCAACAATTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:1 X0:i:1 X1:i:0 XM:i:0 XO:i:1 XG:i:1 MD:Z:11^T25
+seq7 0 phiX174 943 37 13M1I22M * 0 0 TGTAGGTGGTCAACCAATTTTAATTGCAGGGGCTTC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:1 X0:i:1 X1:i:0 XM:i:0 XO:i:1 XG:i:1 MD:Z:35
+seq8 4 * 0 0 * * 0 0 ACACCCGTCCTTTACGTCATGCGCTCTATTCTCTGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
+seq9 0 phiX174 2596 37 16M1I19M * 0 0 GCCGCTATTCAGGTTGTTTTCTGTTGGTGCTGATAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:7A27
+seq10 0 phiX174 4149 37 17M1D19M * 0 0 ATTCTTTCTTTTCGTATCAGGGCGTTGAGTTCGATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:5G11^G19
+seq11 0 phiX174 4072 37 18M1D18M * 0 0 GCATTTCTACTCCTTCTCATCCCCAATGCTTGGCTT II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:12T5^A18
seq12 4 * 0 0 * * 0 0 CGCGCTTCGATAAAAATGGGATTGGCGTTTCCAACC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
seq13 4 * 0 0 * * 0 0 ATTTCTACTCTTTCTCATCCCCAATCCTTGCCTTCC IIIIIIIIIIIIIIIIIIIIIAAIIIIIIIIIIIII
-seq14 0 phiX174 3998 37 21M1D15M * 0 0 CCCTTTTGAATGTCACGCTGATATTTTGACTTTGAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:5C15^T15
+seq14 0 phiX174 3998 37 21M1D15M * 0 0 CCCTTTTGAATGTCACGCTGATATTTTGACTTTGAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:5C15^T15
seq15 4 * 0 0 * * 0 0 CCAACTTACCAAGGTGGGTTACGAAACGCGACGCCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
seq16 4 * 0 0 * * 0 0 TCAGGGTATTAAAAGAGATTATTTTTCTCCAGCCAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq17 0 phiX174 3034 37 19M1D17M * 0 0 GTGATGTGCTTGCTACCGAAACAATACTTTAGGCAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:19^T9G7
+seq17 0 phiX174 3034 37 19M1D17M * 0 0 GTGATGTGCTTGCTACCGAAACAATACTTTAGGCAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:19^T9G7
seq18 4 * 0 0 * * 0 0 TCAATCCCCCATGCTTGGCCGTTCCATAAGCAGATG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
seq19 4 * 0 0 * * 0 0 TTCCTGCGCTTAATGCTTGAGCGTCCTGGTGCTGAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq2 0 phiX174 141 37 36M * 0 0 ATTCGACCTATCCTTGCGCAGCTCGAGAAGCTCTTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
-seq20 0 phiX174 1082 37 36M * 0 0 CTTATTACCATTTCAACTACTCCGGTTATCGCTGGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
-seq21 0 phiX174 1344 37 15M1D21M * 0 0 CTGATACCAATAAAACCCTAAGCATTTGGTTCAGGG II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:15^T13T7
+seq20 0 phiX174 1082 37 36M * 0 0 CTTATTACCATTTCAACTACTCCGGTTATCGCTGGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
+seq21 0 phiX174 1344 37 15M1D21M * 0 0 CTGATACCAATAAAACCCTAAGCATTTGGTTCAGGG II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:15^T13T7
seq22 4 * 0 0 * * 0 0 AATCAAACTTACCAAGGGGTTACGACGCGACGCCGT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
seq23 4 * 0 0 * * 0 0 TGTGCTTCCCCAACTTGATTTAATAACCCTATAGAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq24 0 phiX174 4084 37 17M1I18M * 0 0 TTTCTCAATCCCCAATGCCTTGGCTTCCCTAAGCAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:27A7
-seq25 0 phiX174 520 37 16M1I19M * 0 0 TTGCTACTGACCGCTCTTCGTGCTCGTTGCTGCGTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:26C8
-seq26 0 phiX174 1976 37 36M * 0 0 CCGCGTGAAATTTCTATGAAGGATGTTTTCCGTTCT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
-seq27 0 phiX174 2598 37 20M1I15M * 0 0 CGCTAATCAAGTTGTTTCTGTTTGGTGCTGATATTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:9G25
+seq24 0 phiX174 4084 37 17M1I18M * 0 0 TTTCTCAATCCCCAATGCCTTGGCTTCCCTAAGCAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:27A7
+seq25 0 phiX174 520 37 16M1I19M * 0 0 TTGCTACTGACCGCTCTTCGTGCTCGTTGCTGCGTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:26C8
+seq26 0 phiX174 1976 37 36M * 0 0 CCGCGTGAAATTTCTATGAAGGATGTTTTCCGTTCT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
+seq27 0 phiX174 2598 37 20M1I15M * 0 0 CGCTAATCAAGTTGTTTCTGTTTGGTGCTGATATTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:9G25
seq28 4 * 0 0 * * 0 0 AAAGAGATTATTTGTCGGTCCAGCCACTAAAGTGAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
seq29 4 * 0 0 * * 0 0 CAAATTAATGCGCGCTTCGATAATGATTGGGGTATC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq3 0 phiX174 505 37 36M * 0 0 GTAACAAAGTTTGGATTGCTACTGACCGCTCTCGTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
-seq30 0 phiX174 4091 37 18M1I17M * 0 0 ATCCCCTATGCTTGGCTTACCATAAGCAGATGGATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:6A28
-seq4 4 * 0 0 * * 0 0 AGCCGCTCGTCTTTTATGTAGGTGGTCAACCATTTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq5 0 phiX174 4985 25 36M * 0 0 CAGTTATATGGCTTTTGGTTTCTATGTGGCTTAATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:2 XO:i:0 XG:i:0 MD:Z:13G17A4
-seq6 0 phiX174 925 37 11M1D25M * 0 0 AGGCGCTCGTCTTGGTATGTAGGTGGTCAACAATTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:1 X0:i:1 XM:i:0 XO:i:1 XG:i:1 MD:Z:11^T25
-seq7 0 phiX174 943 37 13M1I22M * 0 0 TGTAGGTGGTCAACCAATTTTAATTGCAGGGGCTTC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:1 X0:i:1 XM:i:0 XO:i:1 XG:i:1 MD:Z:35
-seq8 4 * 0 0 * * 0 0 ACACCCGTCCTTTACGTCATGCGCTCTATTCTCTGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq9 0 phiX174 2596 37 16M1I19M * 0 0 GCCGCTATTCAGGTTGTTTTCTGTTGGTGCTGATAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:7A27
+seq30 0 phiX174 4091 37 18M1I17M * 0 0 ATCCCCTATGCTTGGCTTACCATAAGCAGATGGATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:6A28
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 test-data/variant_detection/freebayes/freebayes_out_1.output_trace
--- a/test-data/variant_detection/freebayes/freebayes_out_1.output_trace
+++ /dev/null
@@ -1,8 +0,0 @@
-phiX174,1411,allele,phiX174,phiX174,A,60,100
-phiX174,1412,allele,phiX174,phiX174,G,60,100
-phiX174,1413,allele,phiX174,phiX174,C,60,100
-phiX174,1414,allele,phiX174,phiX174,G,60,100
-phiX174,1415,allele,phiX174,phiX174,C,60,100
-phiX174,1416,allele,phiX174,phiX174,C,60,100
-phiX174,1417,allele,phiX174,phiX174,G,60,100
-phiX174,1418,allele,phiX174,phiX174,T,60,100
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 test-data/variant_detection/freebayes/freebayes_out_1.vcf.contains
--- a/test-data/variant_detection/freebayes/freebayes_out_1.vcf.contains
+++ /dev/null
@@ -1,2 +0,0 @@
-#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT A
-
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -694,11 +694,19 @@
if filename is not None:
local_name = self.get_filename( filename, shed_tool_id=shed_tool_id )
temp_name = self.makeTfname(fname = filename)
- file( temp_name, 'wb' ).write(data)
- if self.keepOutdir > '':
- ofn = os.path.join(self.keepOutdir,os.path.basename(local_name))
- shutil.copy(temp_name,ofn)
- log.debug('## GALAXY_TEST_SAVE=%s. saved %s' % (self.keepOutdir,ofn))
+ file( temp_name, 'wb' ).write( data )
+
+ # if the server's env has GALAXY_TEST_SAVE, save the output file to that dir
+ if self.keepOutdir:
+ ofn = os.path.join( self.keepOutdir, os.path.basename( local_name ) )
+ try:
+ shutil.copy( temp_name, ofn )
+ except Exception, exc:
+ error_log_msg = ( 'TwillTestCase could not save output file %s to %s: ' % ( temp_name, ofn ) )
+ error_log_msg += str( e )
+ else:
+ log.debug('## GALAXY_TEST_SAVE=%s. saved %s' % ( self.keepOutdir, ofn ) )
+
try:
# have to nest try-except in try-finally to handle 2.4
try:
@@ -729,6 +737,7 @@
self.verify_extra_files_content( extra_files, elem.get( 'id' ), shed_tool_id=shed_tool_id )
except AssertionError, err:
errmsg = 'History item %s different than expected, difference (using %s):\n' % ( hid, compare )
+ errmsg += "( %s v. %s )\n" % ( local_name, temp_name )
errmsg += str( err )
raise AssertionError( errmsg )
finally:
diff -r 7387d61dfd59ca588bb84a57272beed1bb930abb -r a42eae47ae877073a077527463beb32e251d7035 tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -296,6 +296,9 @@
</inputs><outputs>
+ <data format="tabular" name="fusions" label="${tool.name} on ${on_string}: fusions" from_work_dir="tophat_out/fusions.out">
+ <filter>(params['fusion_search']['do_search'] == 'Yes')</filter>
+ </data><data format="bed" name="insertions" label="${tool.name} on ${on_string}: insertions" from_work_dir="tophat_out/insertions.bed"><actions><conditional name="refGenomeSource.genomeSource">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Cleanup for previous commit, 565476ce4f03, mainly to further comment and simplify code and avoid going to index multiple times.
by Bitbucket 15 Aug '12
by Bitbucket 15 Aug '12
15 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/24c6e925acaf/
changeset: 24c6e925acaf
user: jgoecks
date: 2012-08-16 00:09:48
summary: Cleanup for previous commit, 565476ce4f03, mainly to further comment and simplify code and avoid going to index multiple times.
affected #: 1 file
diff -r 565476ce4f0301d23538d87aeef805edc099badf -r 24c6e925acaf6b0dc8b52126ed295dcec35de2a6 lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -985,28 +985,31 @@
# To do this, need to increase end to next base and request number of points.
num_points = end - start + 1
end += 1
-
- result = summarize_region( bbi, chrom, start, end, num_points )
else:
#
# The goal is to sample the region between start and end uniformly
- # using N data points. The challenge is that the size of sampled
+ # using ~N data points. The challenge is that the size of sampled
# intervals rarely is full bases, so sampling using N points will
- # leave the end of the region unsampled. To recitify this, samples
- # beyond N are taken at the end of the interval.
+ # leave the end of the region unsampled due to remainders for each
+ # interval. To recitify this, a new N is calculated based on the
+ # step size that covers as much of the region as possible.
+ #
+ # However, this still leaves some of the region unsampled. This
+ # could be addressed by repeatedly sampling remainder using a
+ # smaller and smaller step_size, but that would require iteratively
+ # going to BBI, which could be time consuming.
#
- # Do initial summary.
+ # Start with N samples.
num_points = N
- result = summarize_region( bbi, chrom, start, end, num_points )
-
- # Do summary of remaining part of region.
step_size = ( end - start ) / num_points
- new_start = start + step_size * num_points
- new_num_points = min( ( end - new_start ) / step_size, end - start )
- if new_num_points is not 0:
- result.extend( summarize_region( bbi, chrom, new_start, end, new_num_points ) )
- #TODO: progressively reduce step_size to generate more datapoints.
+ # Add additional points to sample in the remainder not covered by
+ # the initial N samples.
+ remainder_start = start + step_size * num_points
+ additional_points = ( end - remainder_start ) / step_size
+ num_points += additional_points
+
+ result = summarize_region( bbi, chrom, start, end, num_points )
# Cleanup and return.
f.close()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Rewrite sampling code for BBI data provider to handle (a) boundary cases during base-level resolution and (b) remainder of region not sampled during first pass.
by Bitbucket 15 Aug '12
by Bitbucket 15 Aug '12
15 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/565476ce4f03/
changeset: 565476ce4f03
user: jgoecks
date: 2012-08-15 23:49:39
summary: Rewrite sampling code for BBI data provider to handle (a) boundary cases during base-level resolution and (b) remainder of region not sampled during first pass.
affected #: 1 file
diff -r 2531e085f2625b60135a6b4972f125e22a4fd354 -r 565476ce4f0301d23538d87aeef805edc099badf lib/galaxy/visualization/tracks/data_providers.py
--- a/lib/galaxy/visualization/tracks/data_providers.py
+++ b/lib/galaxy/visualization/tracks/data_providers.py
@@ -947,55 +947,69 @@
return dict( data=dict( min=summary.min_val[0], max=summary.max_val[0], mean=mean, sd=sd ) )
- # The following seems not to work very well, for example it will only return one
- # data point if the tile is 1280px wide. Not sure what the intent is.
+ # Sample from region using approximately this many samples.
+ N = 1000
- # The first zoom level for BBI files is 640. If too much is requested, it will look at each block instead
- # of summaries. The calculation done is: zoom <> (end-start)/num_points/2.
- # Thus, the optimal number of points is (end-start)/num_points/2 = 640
- # num_points = (end-start) / 1280
- #num_points = (end-start) / 1280
- #if num_points < 1:
- # num_points = end - start
- #else:
- # num_points = min(num_points, 500)
+ def summarize_region( bbi, chrom, start, end, num_points ):
+ '''
+ Returns results from summarizing a region using num_points.
+ NOTE: num_points cannot be greater than end - start or BBI
+ will return None for all positions.s
+ '''
+ result = []
- # For now, we'll do 1000 data points by default. However, the summaries
- # don't seem to work when a summary pixel corresponds to less than one
- # datapoint, so we prevent that.
+ # Get summary; this samples at intervals of length
+ # (end - start)/num_points -- i.e. drops any fractional component
+ # of interval length.
+ summary = bbi.summarize( chrom, start, end, num_points )
+ if summary:
+ #mean = summary.sum_data / summary.valid_count
+
+ ## Standard deviation by bin, not yet used
+ ## var = summary.sum_squares - mean
+ ## var /= minimum( valid_count - 1, 1 )
+ ## sd = sqrt( var )
+
+ pos = start
+ step_size = (end - start) / num_points
- # FIXME: need to choose the number of points to maximize coverage of the area.
- # It appears that BBI calculates points using intervals of
- # floor( num_points / end - start )
- # In some cases, this prevents sampling near the end of the interval,
- # especially when (a) the total interval is small ( < 20-30Kb) and (b) the
- # computed interval size has a large fraction, e.g. 14.7 or 35.8
- num_points = min( 1000, end - start )
+ for i in range( num_points ):
+ result.append( (pos, float_nan( summary.sum_data[i] / summary.valid_count[i] ) ) )
+ pos += step_size
- # HACK to address the FIXME above; should generalize.
- if end - start <= 2000:
- num_points = end - start
+ return result
- summary = bbi.summarize( chrom, start, end, num_points )
+ # Approach is different depending on region size.
+ if end - start < N:
+ # Get values for individual bases in region, including start and end.
+ # To do this, need to increase end to next base and request number of points.
+ num_points = end - start + 1
+ end += 1
+
+ result = summarize_region( bbi, chrom, start, end, num_points )
+ else:
+ #
+ # The goal is to sample the region between start and end uniformly
+ # using N data points. The challenge is that the size of sampled
+ # intervals rarely is full bases, so sampling using N points will
+ # leave the end of the region unsampled. To recitify this, samples
+ # beyond N are taken at the end of the interval.
+ #
+
+ # Do initial summary.
+ num_points = N
+ result = summarize_region( bbi, chrom, start, end, num_points )
+
+ # Do summary of remaining part of region.
+ step_size = ( end - start ) / num_points
+ new_start = start + step_size * num_points
+ new_num_points = min( ( end - new_start ) / step_size, end - start )
+ if new_num_points is not 0:
+ result.extend( summarize_region( bbi, chrom, new_start, end, new_num_points ) )
+ #TODO: progressively reduce step_size to generate more datapoints.
+
+ # Cleanup and return.
f.close()
-
- result = []
-
- if summary:
- #mean = summary.sum_data / summary.valid_count
-
- ## Standard deviation by bin, not yet used
- ## var = summary.sum_squares - mean
- ## var /= minimum( valid_count - 1, 1 )
- ## sd = sqrt( var )
-
- pos = start
- step_size = (end - start) / num_points
-
- for i in range( num_points ):
- result.append( (pos, float_nan( summary.sum_data[i] / summary.valid_count[i] ) ) )
- pos += step_size
-
return { 'data': result }
class BigBedDataProvider( BBIDataProvider ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Improve error message handling when setting metadata on tool shed repositories. Display the repository owner in the table grid when displaying invalid tools in the tool shed.
by Bitbucket 15 Aug '12
by Bitbucket 15 Aug '12
15 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/2531e085f262/
changeset: 2531e085f262
user: greg
date: 2012-08-15 20:50:38
summary: Improve error message handling when setting metadata on tool shed repositories. Display the repository owner in the table grid when displaying invalid tools in the tool shed.
affected #: 4 files
diff -r b99cd3b7670169d6b2723aff95c458b5448af34d -r 2531e085f2625b60135a6b4972f125e22a4fd354 lib/galaxy/webapps/community/controllers/admin.py
--- a/lib/galaxy/webapps/community/controllers/admin.py
+++ b/lib/galaxy/webapps/community/controllers/admin.py
@@ -696,9 +696,14 @@
owner = repository_name_owner_list[ 1 ]
repository = get_repository_by_name_and_owner( trans, name, owner )
try:
- reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) )
- log.debug( "Successfully reset metadata on repository %s" % repository.name )
- successful_count += 1
+ invalid_file_tups = reset_all_metadata_on_repository( trans, trans.security.encode_id( repository.id ) )
+ if invalid_file_tups:
+ message = generate_message_for_invalid_tools( invalid_file_tups, repository, None, as_html=False )
+ log.debug( message )
+ unsuccessful_count += 1
+ else:
+ log.debug( "Successfully reset metadata on repository %s" % repository.name )
+ successful_count += 1
except Exception, e:
log.debug( "Error attempting to reset metadata on repository '%s': %s" % ( repository.name, str( e ) ) )
unsuccessful_count += 1
diff -r b99cd3b7670169d6b2723aff95c458b5448af34d -r 2531e085f2625b60135a6b4972f125e22a4fd354 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -277,6 +277,41 @@
return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
else:
return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
+def generate_message_for_invalid_tools( invalid_file_tups, repository, metadata_dict, as_html=True ):
+ if as_html:
+ new_line = '<br/>'
+ bold_start = '<b>'
+ bold_end = '</b>'
+ else:
+ new_line = '\n'
+ bold_start = ''
+ bold_end = ''
+ message = ''
+ if metadata_dict:
+ message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip )
+ message += "Correct the following problems if necessary and reset metadata.%s" % new_line
+ else:
+ message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip )
+ message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.%s" % new_line
+ for itc_tup in invalid_file_tups:
+ tool_file, exception_msg = itc_tup
+ if exception_msg.find( 'No such file or directory' ) >= 0:
+ exception_items = exception_msg.split()
+ missing_file_items = exception_items[ 7 ].split( '/' )
+ missing_file = missing_file_items[ -1 ].rstrip( '\'' )
+ if missing_file.endswith( '.loc' ):
+ sample_ext = '%s.sample' % missing_file
+ else:
+ sample_ext = missing_file
+ correction_msg = "This file refers to a missing file %s%s%s. " % ( bold_start, str( missing_file ), bold_end )
+ correction_msg += "Upload a file named %s%s%s to the repository to correct this error." % ( bold_start, sample_ext, bold_end )
+ else:
+ if as_html:
+ correction_msg = exception_msg
+ else:
+ correction_msg = exception_msg.replace( '<br/>', new_line ).replace( '<b>', bold_start ).replace( '</b>', bold_end )
+ message += "%s%s%s - %s%s" % ( bold_start, tool_file, bold_end, correction_msg, new_line )
+ return message
def generate_tool_guid( trans, repository, tool ):
"""
Generate a guid for the received tool. The form of the guid is
@@ -854,6 +889,7 @@
clean_repository_metadata( trans, id, changeset_revisions )
# Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog.
reset_all_tool_versions( trans, id, repo )
+ return invalid_file_tups
def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
"""
Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset
@@ -931,27 +967,7 @@
message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
status = "error"
if invalid_file_tups:
- if metadata_dict:
- message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip )
- message += "Correct the following problems if necessary and reset metadata.<br/>"
- else:
- message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip )
- message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.<br/>"
- for itc_tup in invalid_file_tups:
- tool_file, exception_msg = itc_tup
- if exception_msg.find( 'No such file or directory' ) >= 0:
- exception_items = exception_msg.split()
- missing_file_items = exception_items[ 7 ].split( '/' )
- missing_file = missing_file_items[ -1 ].rstrip( '\'' )
- if missing_file.endswith( '.loc' ):
- sample_ext = '%s.sample' % missing_file
- else:
- sample_ext = missing_file
- correction_msg = "This file refers to a missing file <b>%s</b>. " % str( missing_file )
- correction_msg += "Upload a file named <b>%s</b> to the repository to correct this error." % sample_ext
- else:
- correction_msg = exception_msg
- message += "<b>%s</b> - %s<br/>" % ( tool_file, correction_msg )
+ message = generate_message_for_invalid_tools( invalid_file_tups, repository, metadata_dict )
status = 'error'
return message, status
def set_repository_metadata_due_to_new_tip( trans, repository, content_alert_str=None, **kwd ):
diff -r b99cd3b7670169d6b2723aff95c458b5448af34d -r 2531e085f2625b60135a6b4972f125e22a4fd354 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -458,7 +458,10 @@
metadata = downloadable_revision.metadata
invalid_tools = metadata.get( 'invalid_tools', [] )
for invalid_tool_config in invalid_tools:
- invalid_tools_dict[ invalid_tool_config ] = ( repository.id, repository.name, downloadable_revision.changeset_revision )
+ invalid_tools_dict[ invalid_tool_config ] = ( repository.id,
+ repository.name,
+ repository.user.username,
+ downloadable_revision.changeset_revision )
else:
for repository in trans.sa_session.query( trans.model.Repository ) \
.filter( and_( trans.model.Repository.table.c.deleted == False,
@@ -468,7 +471,10 @@
metadata = downloadable_revision.metadata
invalid_tools = metadata.get( 'invalid_tools', [] )
for invalid_tool_config in invalid_tools:
- invalid_tools_dict[ invalid_tool_config ] = ( repository.id, repository.name, downloadable_revision.changeset_revision )
+ invalid_tools_dict[ invalid_tool_config ] = ( repository.id,
+ repository.name,
+ repository.user.username,
+ downloadable_revision.changeset_revision )
return trans.fill_template( '/webapps/community/repository/browse_invalid_tools.mako',
cntrller=cntrller,
invalid_tools_dict=invalid_tools_dict,
@@ -1373,6 +1379,7 @@
return trans.response.send_redirect( url )
@web.expose
def load_invalid_tool( self, trans, repository_id, tool_config, changeset_revision, **kwd ):
+ # FIXME: loading an invalid tool should display an appropriate message as to why the tool is invalid. This worked until recently.
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'error' )
@@ -1752,9 +1759,14 @@
status=status )
@web.expose
def reset_all_metadata( self, trans, id, **kwd ):
- reset_all_metadata_on_repository( trans, id, **kwd )
- message = "All repository metadata has been reset."
- status = 'done'
+ invalid_file_tups = reset_all_metadata_on_repository( trans, id, **kwd )
+ if invalid_file_tups:
+ repository = get_repository( trans, id )
+ message = generate_message_for_invalid_tools( invalid_file_tups, repository, None )
+ status = 'error'
+ else:
+ message = "All repository metadata has been reset."
+ status = 'done'
return trans.response.send_redirect( web.url_for( controller='repository',
action='manage_repository',
id=id,
diff -r b99cd3b7670169d6b2723aff95c458b5448af34d -r 2531e085f2625b60135a6b4972f125e22a4fd354 templates/webapps/community/repository/browse_invalid_tools.mako
--- a/templates/webapps/community/repository/browse_invalid_tools.mako
+++ b/templates/webapps/community/repository/browse_invalid_tools.mako
@@ -13,10 +13,11 @@
<tr><th>Tool config</th><th>Repository name</th>
+ <th>Repository owner</th><th>Changeset revision</th></tr>
%for invalid_tool_config, repository_tup in invalid_tools_dict.items():
- <% repository_id, repository_name, changeset_revision = repository_tup %>
+ <% repository_id, repository_name, repository_owner, changeset_revision = repository_tup %><tr><td><a class="view-info" href="${h.url_for( controller='repository', action='load_invalid_tool', repository_id=trans.security.encode_id( repository_id ), tool_config=invalid_tool_config, changeset_revision=changeset_revision, webapp=webapp )}">
@@ -24,6 +25,7 @@
</a></td><td>${repository_name}</td>
+ <td>${repository_owner}</td><td>${changeset_revision}</td></tr>
%endfor
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Fix for setting tooll dependency metadata where at least one tool in the repository does not include a <requirements> tag set.
by Bitbucket 15 Aug '12
by Bitbucket 15 Aug '12
15 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b99cd3b76701/
changeset: b99cd3b76701
user: greg
date: 2012-08-15 17:55:17
summary: Fix for setting tooll dependency metadata where at least one tool in the repository does not include a <requirements> tag set.
affected #: 2 files
diff -r 01ed2f462dd7709876458b031d786d277d1f72f3 -r b99cd3b7670169d6b2723aff95c458b5448af34d lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -533,7 +533,7 @@
if req_name==tool_dependency_name and req_version==tool_dependency_version and req_type==tool_dependency_type:
can_generate_dependency_metadata = True
break
- if not can_generate_dependency_metadata:
+ if requirements and not can_generate_dependency_metadata:
# We've discovered at least 1 combination of name, version and type that is not defined in the <requirement>
# tag for any tool in the repository.
break
diff -r 01ed2f462dd7709876458b031d786d277d1f72f3 -r b99cd3b7670169d6b2723aff95c458b5448af34d lib/galaxy/webapps/community/config.py
--- a/lib/galaxy/webapps/community/config.py
+++ b/lib/galaxy/webapps/community/config.py
@@ -87,6 +87,7 @@
self.server_name = ''
self.job_manager = ''
self.default_job_handlers = []
+ self.default_cluster_job_runner = 'local:///'
self.job_handlers = []
self.tool_handlers = []
self.tool_runners = []
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: carlfeberhard: small fix to twilltestcase when GALAXY_TEST_SAVE fails
by Bitbucket 15 Aug '12
by Bitbucket 15 Aug '12
15 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/01ed2f462dd7/
changeset: 01ed2f462dd7
user: carlfeberhard
date: 2012-08-15 17:13:13
summary: small fix to twilltestcase when GALAXY_TEST_SAVE fails
affected #: 1 file
diff -r 7566c91c1ec65f90c52f173ff1a5442789028f93 -r 01ed2f462dd7709876458b031d786d277d1f72f3 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -694,11 +694,19 @@
if filename is not None:
local_name = self.get_filename( filename, shed_tool_id=shed_tool_id )
temp_name = self.makeTfname(fname = filename)
- file( temp_name, 'wb' ).write(data)
- if self.keepOutdir > '':
- ofn = os.path.join(self.keepOutdir,os.path.basename(local_name))
- shutil.copy(temp_name,ofn)
- log.debug('## GALAXY_TEST_SAVE=%s. saved %s' % (self.keepOutdir,ofn))
+ file( temp_name, 'wb' ).write( data )
+
+ # if the server's env has GALAXY_TEST_SAVE, save the output file to that dir
+ if self.keepOutdir:
+ ofn = os.path.join( self.keepOutdir, os.path.basename( local_name ) )
+ try:
+ shutil.copy( temp_name, ofn )
+ except Exception, exc:
+ error_log_msg = ( 'TwillTestCase could not save output file %s to %s: ' % ( temp_name, ofn ) )
+ error_log_msg += str( e )
+ else:
+ log.debug('## GALAXY_TEST_SAVE=%s. saved %s' % ( self.keepOutdir, ofn ) )
+
try:
# have to nest try-except in try-finally to handle 2.4
try:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/16726c4310a7/
changeset: 16726c4310a7
user: jmchilton
date: 2012-05-04 07:12:43
summary: Augment run.sh to search for the optional environment variable
GALAXY_UNIVERSE_CONFIG_DIR. If this is set, before Galaxy is launched
the directory specified by the environment variable is searched for
ini config files. These files are merged into a single configuration
structure and written back out as universe.ini.
This should allow for a more modular configuration of Galaxy. The
files are merged in lexiographic order and later files can override
properties specified ealier on. If a consistent naming scheme is used
this can be really useful:
As an example, one might specify the files as such:
000_galaxy_defaults.ini
000_default_sqlite_config.ini
100_institution_specific.ini
500_job_runner.ini
500_webapp_0.ini
500_webapp_1.ini
500_instance_specific.ini
500_available_postgres.ini
affected #: 2 files
diff -r 1eb32dbab2c1a0bc308eb4d3a069fa55ff6ecb83 -r 16726c4310a71be907ebdcbf850dcd7ce72797b4 run.sh
--- a/run.sh
+++ b/run.sh
@@ -33,6 +33,10 @@
fi
done
+if [ -n "$GALAXY_UNIVERSE_CONFIG_DIR" ]; then
+ python ./scripts/build_universe_config.py "$GALAXY_UNIVERSE_CONFIG_DIR"
+fi
+
# explicitly attempt to fetch eggs before running
FETCH_EGGS=1
for arg in "$@"; do
diff -r 1eb32dbab2c1a0bc308eb4d3a069fa55ff6ecb83 -r 16726c4310a71be907ebdcbf850dcd7ce72797b4 scripts/build_universe_config.py
--- /dev/null
+++ b/scripts/build_universe_config.py
@@ -0,0 +1,26 @@
+from ConfigParser import ConfigParser
+from os import listdir
+from os.path import join
+from re import match
+from sys import argv
+
+
+def merge():
+ "Merges all .ini files in a specified directory into ./universe.ini"
+ if len(argv) < 2:
+ message = "%s: Must specify directory to merge configuration files from." % argv[0]
+ raise Exception(message)
+ conf_directory = argv[1]
+ conf_files = [f for f in listdir(conf_directory) if match(r'.*\.ini', f)]
+ conf_files.sort()
+
+ parser = ConfigParser()
+ for conf_file in conf_files:
+ parser.read([join(conf_directory, conf_file)])
+ ## TODO: Expand enviroment variables here, that would
+ ## also make Galaxy much easier to configure.
+
+ parser.write(open("universe.ini", 'w'))
+
+if __name__ == '__main__':
+ merge()
https://bitbucket.org/galaxy/galaxy-central/changeset/6f4b97fd803e/
changeset: 6f4b97fd803e
user: jmchilton
date: 2012-05-24 14:47:14
summary: Fix a typo.
affected #: 1 file
diff -r 16726c4310a71be907ebdcbf850dcd7ce72797b4 -r 6f4b97fd803e6ab97192d8454bc71bff145e5695 scripts/build_universe_config.py
--- a/scripts/build_universe_config.py
+++ b/scripts/build_universe_config.py
@@ -20,7 +20,7 @@
## TODO: Expand enviroment variables here, that would
## also make Galaxy much easier to configure.
- parser.write(open("universe.ini", 'w'))
+ parser.write(open("universe_wsgi.ini", 'w'))
if __name__ == '__main__':
merge()
https://bitbucket.org/galaxy/galaxy-central/changeset/7566c91c1ec6/
changeset: 7566c91c1ec6
user: natefoo
date: 2012-08-15 16:57:14
summary: Merged in jmchilton/galaxy-central-configuration-directory (pull request #44)
affected #: 2 files
diff -r b2fd558879b99de125f4554ba5e15b726bd8431c -r 7566c91c1ec65f90c52f173ff1a5442789028f93 run.sh
--- a/run.sh
+++ b/run.sh
@@ -36,6 +36,10 @@
fi
done
+if [ -n "$GALAXY_UNIVERSE_CONFIG_DIR" ]; then
+ python ./scripts/build_universe_config.py "$GALAXY_UNIVERSE_CONFIG_DIR"
+fi
+
# explicitly attempt to fetch eggs before running
FETCH_EGGS=1
for arg in "$@"; do
diff -r b2fd558879b99de125f4554ba5e15b726bd8431c -r 7566c91c1ec65f90c52f173ff1a5442789028f93 scripts/build_universe_config.py
--- /dev/null
+++ b/scripts/build_universe_config.py
@@ -0,0 +1,26 @@
+from ConfigParser import ConfigParser
+from os import listdir
+from os.path import join
+from re import match
+from sys import argv
+
+
+def merge():
+ "Merges all .ini files in a specified directory into ./universe.ini"
+ if len(argv) < 2:
+ message = "%s: Must specify directory to merge configuration files from." % argv[0]
+ raise Exception(message)
+ conf_directory = argv[1]
+ conf_files = [f for f in listdir(conf_directory) if match(r'.*\.ini', f)]
+ conf_files.sort()
+
+ parser = ConfigParser()
+ for conf_file in conf_files:
+ parser.read([join(conf_directory, conf_file)])
+ ## TODO: Expand enviroment variables here, that would
+ ## also make Galaxy much easier to configure.
+
+ parser.write(open("universe_wsgi.ini", 'w'))
+
+if __name__ == '__main__':
+ merge()
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
3 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/70b29b2bf906/
changeset: 70b29b2bf906
user: jmchilton
date: 2012-03-05 19:11:53
summary: Adding db_shell.py that can be used to simplify writing command line
scripts against the Galaxy model/database layer or to make use of
Galaxy models in an interactive shell.
affected #: 2 files
diff -r e58a87c91bc4bb471b9b6477ef638dc41c4aa4b9 -r 70b29b2bf906dce0dbf8775a7a18e3d6b5403ea4 scripts/db_shell.py
--- /dev/null
+++ b/scripts/db_shell.py
@@ -0,0 +1,86 @@
+# This script allows easy access to Galaxy's database layer via the
+# Galaxy models. For example:q
+# % python -i scripts/db_shel.py
+# >>> new_user = User("admin(a)gmail.com")
+# >>> new_user.set_password
+# >>> sa_session.add(new_user)
+# >>> sa_session.commit()
+# >>> sa_session.query(User).all()
+#
+# You can also use this script as a library, for instance see https://gist.github.com/1979583q
+# TODO: This script overlaps alot wth manage_db.py and create_db.py,
+# these should maybe be refactored to remove duplication.
+import sys, os.path, logging
+
+new_path = [ os.path.join( os.getcwd(), "lib" ) ]
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+sys.path = new_path
+
+from galaxy import eggs
+
+import pkg_resources
+pkg_resources.require( "sqlalchemy-migrate" )
+pkg_resources.require( "SQLAlchemy" )
+
+from ConfigParser import SafeConfigParser
+
+log = logging.getLogger( __name__ )
+
+if sys.argv[-1] in [ 'community' ]:
+ # Need to pop the last arg so the command line args will be correct
+ # for sqlalchemy-migrate
+ webapp = sys.argv.pop()
+ config_file = 'community_wsgi.ini'
+ repo = 'lib/galaxy/webapps/community/model/migrate'
+else:
+ # Poor man's optparse
+ config_file = 'universe_wsgi.ini'
+ if '-c' in sys.argv:
+ pos = sys.argv.index( '-c' )
+ sys.argv.pop(pos)
+ config_file = sys.argv.pop( pos )
+ if not os.path.exists( config_file ):
+ print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % config_file
+ sys.exit( 1 )
+ repo = 'lib/galaxy/model/migrate'
+
+cp = SafeConfigParser()
+cp.read( config_file )
+
+if cp.has_option( "app:main", "database_connection" ):
+ db_url = cp.get( "app:main", "database_connection" )
+elif cp.has_option( "app:main", "database_file" ):
+ db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % cp.get( "app:main", "database_file" )
+else:
+ db_url = "sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE"
+
+dialect_to_egg = {
+ "sqlite" : "pysqlite>=2",
+ "postgres" : "psycopg2",
+ "mysql" : "MySQL_python"
+}
+dialect = ( db_url.split( ':', 1 ) )[0]
+try:
+ egg = dialect_to_egg[dialect]
+ try:
+ pkg_resources.require( egg )
+ log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
+ except:
+ # If the module is in the path elsewhere (i.e. non-egg), it'll still load.
+ log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
+except KeyError:
+ # Let this go, it could possibly work with db's we don't support
+ log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
+
+# Setup DB scripting environment
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.exc import *
+
+os.environ['PYTHONINSPECT'] = 'True'
+engine = create_engine(db_url, echo=True)
+db_session = scoped_session( sessionmaker( bind = engine ) )
+from galaxy.model.mapping import context as sa_session
+sa_session.bind = engine
+from galaxy.model import *
+
https://bitbucket.org/galaxy/galaxy-central/changeset/2eebd7dca84b/
changeset: 2eebd7dca84b
user: jmchilton
date: 2012-03-05 22:23:28
summary: Actually we don't want to set that environment variable, that make
python behave like -i every execution.
affected #: 1 file
diff -r 70b29b2bf906dce0dbf8775a7a18e3d6b5403ea4 -r 2eebd7dca84be8b34b99b9bbb16ee8f75352986c scripts/db_shell.py
--- a/scripts/db_shell.py
+++ b/scripts/db_shell.py
@@ -77,7 +77,6 @@
from sqlalchemy.orm import *
from sqlalchemy.exc import *
-os.environ['PYTHONINSPECT'] = 'True'
engine = create_engine(db_url, echo=True)
db_session = scoped_session( sessionmaker( bind = engine ) )
from galaxy.model.mapping import context as sa_session
https://bitbucket.org/galaxy/galaxy-central/changeset/b2fd558879b9/
changeset: b2fd558879b9
user: natefoo
date: 2012-08-15 16:54:50
summary: Merged in jmchilton/galaxy-central-preseeding (pull request #38)
affected #: 2 files
diff -r a6436bc768699a48f2d434c376ffeaf767ca71be -r b2fd558879b99de125f4554ba5e15b726bd8431c scripts/db_shell.py
--- /dev/null
+++ b/scripts/db_shell.py
@@ -0,0 +1,85 @@
+# This script allows easy access to Galaxy's database layer via the
+# Galaxy models. For example:q
+# % python -i scripts/db_shel.py
+# >>> new_user = User("admin(a)gmail.com")
+# >>> new_user.set_password
+# >>> sa_session.add(new_user)
+# >>> sa_session.commit()
+# >>> sa_session.query(User).all()
+#
+# You can also use this script as a library, for instance see https://gist.github.com/1979583q
+# TODO: This script overlaps alot wth manage_db.py and create_db.py,
+# these should maybe be refactored to remove duplication.
+import sys, os.path, logging
+
+new_path = [ os.path.join( os.getcwd(), "lib" ) ]
+new_path.extend( sys.path[1:] ) # remove scripts/ from the path
+sys.path = new_path
+
+from galaxy import eggs
+
+import pkg_resources
+pkg_resources.require( "sqlalchemy-migrate" )
+pkg_resources.require( "SQLAlchemy" )
+
+from ConfigParser import SafeConfigParser
+
+log = logging.getLogger( __name__ )
+
+if sys.argv[-1] in [ 'community' ]:
+ # Need to pop the last arg so the command line args will be correct
+ # for sqlalchemy-migrate
+ webapp = sys.argv.pop()
+ config_file = 'community_wsgi.ini'
+ repo = 'lib/galaxy/webapps/community/model/migrate'
+else:
+ # Poor man's optparse
+ config_file = 'universe_wsgi.ini'
+ if '-c' in sys.argv:
+ pos = sys.argv.index( '-c' )
+ sys.argv.pop(pos)
+ config_file = sys.argv.pop( pos )
+ if not os.path.exists( config_file ):
+ print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % config_file
+ sys.exit( 1 )
+ repo = 'lib/galaxy/model/migrate'
+
+cp = SafeConfigParser()
+cp.read( config_file )
+
+if cp.has_option( "app:main", "database_connection" ):
+ db_url = cp.get( "app:main", "database_connection" )
+elif cp.has_option( "app:main", "database_file" ):
+ db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % cp.get( "app:main", "database_file" )
+else:
+ db_url = "sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE"
+
+dialect_to_egg = {
+ "sqlite" : "pysqlite>=2",
+ "postgres" : "psycopg2",
+ "mysql" : "MySQL_python"
+}
+dialect = ( db_url.split( ':', 1 ) )[0]
+try:
+ egg = dialect_to_egg[dialect]
+ try:
+ pkg_resources.require( egg )
+ log.debug( "%s egg successfully loaded for %s dialect" % ( egg, dialect ) )
+ except:
+ # If the module is in the path elsewhere (i.e. non-egg), it'll still load.
+ log.warning( "%s egg not found, but an attempt will be made to use %s anyway" % ( egg, dialect ) )
+except KeyError:
+ # Let this go, it could possibly work with db's we don't support
+ log.error( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect )
+
+# Setup DB scripting environment
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.exc import *
+
+engine = create_engine(db_url, echo=True)
+db_session = scoped_session( sessionmaker( bind = engine ) )
+from galaxy.model.mapping import context as sa_session
+sa_session.bind = engine
+from galaxy.model import *
+
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Eliminate the freebay specific functional test data from the distribution since it is now included in the freebayes repository in the tool shed.
by Bitbucket 15 Aug '12
by Bitbucket 15 Aug '12
15 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a6436bc76869/
changeset: a6436bc76869
user: greg
date: 2012-08-15 16:52:46
summary: Eliminate the freebay specific functional test data from the distribution since it is now included in the freebayes repository in the tool shed.
affected #: 2 files
diff -r 1ce9b9138a59da9cce64cb795e55154a70d51cb1 -r a6436bc768699a48f2d434c376ffeaf767ca71be test-data/variant_detection/freebayes/freebayes_out_1.output_trace
--- a/test-data/variant_detection/freebayes/freebayes_out_1.output_trace
+++ /dev/null
@@ -1,8 +0,0 @@
-phiX174,1411,allele,phiX174,phiX174,A,60,100
-phiX174,1412,allele,phiX174,phiX174,G,60,100
-phiX174,1413,allele,phiX174,phiX174,C,60,100
-phiX174,1414,allele,phiX174,phiX174,G,60,100
-phiX174,1415,allele,phiX174,phiX174,C,60,100
-phiX174,1416,allele,phiX174,phiX174,C,60,100
-phiX174,1417,allele,phiX174,phiX174,G,60,100
-phiX174,1418,allele,phiX174,phiX174,T,60,100
diff -r 1ce9b9138a59da9cce64cb795e55154a70d51cb1 -r a6436bc768699a48f2d434c376ffeaf767ca71be test-data/variant_detection/freebayes/freebayes_out_1.vcf.contains
--- a/test-data/variant_detection/freebayes/freebayes_out_1.vcf.contains
+++ /dev/null
@@ -1,2 +0,0 @@
-#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT A
-
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jmchilton: Enable the lwr & dynamic job runners automatically. Neither require
by Bitbucket 15 Aug '12
by Bitbucket 15 Aug '12
15 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/1ce9b9138a59/
changeset: 1ce9b9138a59
user: jmchilton
date: 2012-08-15 06:38:24
summary: Enable the lwr & dynamic job runners automatically. Neither require
any special libraries and deployers must assign tools to use them
before they have any affect on Galaxy (i.e. no subtle/implicit
behaviors), so I don't anticipate any down side to doing this. Also
clean up logic in lib/galaxy/jobs/handler.py a little to accommodate
this.
affected #: 2 files
diff -r f557b7b05fdd701cbf99ee04f311bcadb1ae29c4 -r 1ce9b9138a59da9cce64cb795e55154a70d51cb1 lib/galaxy/jobs/handler.py
--- a/lib/galaxy/jobs/handler.py
+++ b/lib/galaxy/jobs/handler.py
@@ -360,7 +360,7 @@
def __init__( self, app ):
self.app = app
self.job_runners = {}
- start_job_runners = ["local"]
+ start_job_runners = ["local", "lwr", "dynamic"]
if app.config.start_job_runners is not None:
start_job_runners.extend( [ x.strip() for x in util.listify( app.config.start_job_runners ) ] )
if app.config.use_tasked_jobs:
diff -r f557b7b05fdd701cbf99ee04f311bcadb1ae29c4 -r 1ce9b9138a59da9cce64cb795e55154a70d51cb1 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -922,16 +922,11 @@
if self_id in self.app.config.tool_handlers:
self.job_handlers = self.app.config.tool_handlers[ self_id ]
# Set job runner(s). Each runner is a dict with 'url' and, optionally, 'params'.
- if self.app.config.start_job_runners is None:
- # Jobs are always local regardless of tool config if no additional
- # runners are started
- self.job_runners = [ { "url" : "local:///" } ]
- else:
- # Set job runner to the cluster default
- self.job_runners = [ { "url" : self.app.config.default_cluster_job_runner } ]
- # Set custom runner(s) if they're defined.
- if self_id in self.app.config.tool_runners:
- self.job_runners = self.app.config.tool_runners[ self_id ]
+ # Set job runner to the cluster default
+ self.job_runners = [ { "url" : self.app.config.default_cluster_job_runner } ]
+ # Set custom runner(s) if they're defined.
+ if self_id in self.app.config.tool_runners:
+ self.job_runners = self.app.config.tool_runners[ self_id ]
# Is this a 'hidden' tool (hidden in tool menu)
self.hidden = util.xml_text(root, "hidden")
if self.hidden: self.hidden = util.string_as_bool(self.hidden)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: scot...@gatech.edu: Reworked some stdout/stderr parsing to make errors and warnings more apparent.
by Bitbucket 14 Aug '12
by Bitbucket 14 Aug '12
14 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f557b7b05fdd/
changeset: f557b7b05fdd
user: scot...(a)gatech.edu
date: 2012-08-15 00:58:58
summary: Reworked some stdout/stderr parsing to make errors and warnings more apparent.
affected #: 2 files
diff -r a7a0a5962648eb68a27ed9e460d1b4ea012c2302 -r f557b7b05fdd701cbf99ee04f311bcadb1ae29c4 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -309,9 +309,10 @@
return self.fail( job.info )
# Check the tool's stdout, stderr, and exit code for errors, but only
- # if the job has not already been marked as having an error.
+ # if the job has not already been marked as having an error.
+ # The job's stdout and stderr will be set accordingly.
if job.states.ERROR != job.state:
- if ( self.check_tool_output( stdout, stderr, tool_exit_code ) ):
+ if ( self.check_tool_output( stdout, stderr, tool_exit_code, job )):
job.state = job.states.OK
else:
job.state = job.states.ERROR
@@ -335,7 +336,7 @@
log.warning( "finish(): %s not found, but %s is not empty, so it will be used instead" % ( dataset_path.false_path, dataset_path.real_path ) )
else:
return self.fail( "Job %s's output dataset(s) could not be read" % job.id )
- job_context = ExpressionContext( dict( stdout = stdout, stderr = stderr ) )
+ job_context = ExpressionContext( dict( stdout = job.stdout, stderr = job.stderr ) )
job_tool = self.app.toolbox.tools_by_id.get( job.tool_id, None )
@@ -430,12 +431,12 @@
# will now be seen by the user.
self.sa_session.flush()
# Save stdout and stderr
- if len( stdout ) > 32768:
+ if len( job.stdout ) > 32768:
log.error( "stdout for job %d is greater than 32K, only first part will be logged to database" % job.id )
- job.stdout = stdout[:32768]
- if len( stderr ) > 32768:
+ job.stdout = job.stdout[:32768]
+ if len( job.stderr ) > 32768:
log.error( "stderr for job %d is greater than 32K, only first part will be logged to database" % job.id )
- job.stderr = stderr[:32768]
+ job.stderr = job.stderr[:32768]
# custom post process setup
inp_data = dict( [ ( da.name, da.dataset ) for da in job.input_datasets ] )
out_data = dict( [ ( da.name, da.dataset ) for da in job.output_datasets ] )
@@ -457,7 +458,7 @@
# Call 'exec_after_process' hook
self.tool.call_hook( 'exec_after_process', self.queue.app, inp_data=inp_data,
out_data=out_data, param_dict=param_dict,
- tool=self.tool, stdout=stdout, stderr=stderr )
+ tool=self.tool, stdout=job.stdout, stderr=job.stderr )
job.command_line = self.command_line
bytes = 0
@@ -477,7 +478,7 @@
if self.app.config.cleanup_job == 'always' or ( not stderr and self.app.config.cleanup_job == 'onsuccess' ):
self.cleanup()
- def check_tool_output( self, stdout, stderr, tool_exit_code ):
+ def check_tool_output( self, stdout, stderr, tool_exit_code, job ):
"""
Check the output of a tool - given the stdout, stderr, and the tool's
exit code, return True if the tool exited succesfully and False
@@ -487,8 +488,8 @@
Note that, if the tool did not define any exit code handling or
any stdio/stderr handling, then it reverts back to previous behavior:
if stderr contains anything, then False is returned.
+ Note that the job id is just for messages.
"""
- job = self.get_job()
err_msg = ""
# By default, the tool succeeded. This covers the case where the code
# has a bug but the tool was ok, and it lets a workflow continue.
@@ -497,10 +498,14 @@
try:
# Check exit codes and match regular expressions against stdout and
# stderr if this tool was configured to do so.
+ # If there is a regular expression for scanning stdout/stderr,
+ # then we assume that the tool writer overwrote the default
+ # behavior of just setting an error if there is *anything* on
+ # stderr.
if ( len( self.tool.stdio_regexes ) > 0 or
len( self.tool.stdio_exit_codes ) > 0 ):
- # We will check the exit code ranges in the order in which
- # they were specified. Each exit_code is a ToolStdioExitCode
+ # Check the exit code ranges in the order in which
+ # they were specified. Each exit_code is a StdioExitCode
# that includes an applicable range. If the exit code was in
# that range, then apply the error level and add in a message.
# If we've reached a fatal error rule, then stop.
@@ -508,24 +513,33 @@
for stdio_exit_code in self.tool.stdio_exit_codes:
if ( tool_exit_code >= stdio_exit_code.range_start and
tool_exit_code <= stdio_exit_code.range_end ):
- if None != stdio_exit_code.desc:
- err_msg += stdio_exit_code.desc
- # TODO: Find somewhere to stick the err_msg - possibly to
- # the source (stderr/stdout), possibly in a new db column.
+ # Tack on a generic description of the code
+ # plus a specific code description. For example,
+ # this might append "Job 42: Warning: Out of Memory\n".
+ # TODO: Find somewhere to stick the err_msg -
+ # possibly to the source (stderr/stdout), possibly
+ # in a new db column.
+ code_desc = stdio_exit_code.desc
+ if ( None == code_desc ):
+ code_desc = ""
+ tool_msg = ( "Job %s: %s: Exit code %d: %s" % (
+ job.get_id_tag(),
+ galaxy.tools.StdioErrorLevel.desc( tool_exit_code ),
+ tool_exit_code,
+ code_desc ) )
+ log.info( tool_msg )
+ stderr = err_msg + stderr
max_error_level = max( max_error_level,
stdio_exit_code.error_level )
- if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ if ( max_error_level >=
+ galaxy.tools.StdioErrorLevel.FATAL ):
break
- # If there is a regular expression for scanning stdout/stderr,
- # then we assume that the tool writer overwrote the default
- # behavior of just setting an error if there is *anything* on
- # stderr.
if max_error_level < galaxy.tools.StdioErrorLevel.FATAL:
# We'll examine every regex. Each regex specifies whether
# it is to be run on stdout, stderr, or both. (It is
# possible for neither stdout nor stderr to be scanned,
- # but those won't be scanned.) We record the highest
+ # but those regexes won't be used.) We record the highest
# error level, which are currently "warning" and "fatal".
# If fatal, then we set the job's state to ERROR.
# If warning, then we still set the job's state to OK
@@ -539,19 +553,32 @@
# Repeat the stdout stuff for stderr.
# TODO: Collapse this into a single function.
if ( regex.stdout_match ):
- regex_match = re.search( regex.match, stdout )
+ regex_match = re.search( regex.match, stdout,
+ re.IGNORECASE )
if ( regex_match ):
- err_msg += self.regex_err_msg( regex_match, regex )
- max_error_level = max( max_error_level, regex.error_level )
- if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ rexmsg = self.regex_err_msg( regex_match, regex)
+ log.info( "Job %s: %s"
+ % ( job.get_id_tag(), rexmsg ) )
+ stdout = rexmsg + "\n" + stdout
+ max_error_level = max( max_error_level,
+ regex.error_level )
+ if ( max_error_level >=
+ galaxy.tools.StdioErrorLevel.FATAL ):
break
- if ( regex.stderr_match ):
- regex_match = re.search( regex.match, stderr )
+
+ if ( regex.stderr_match ):
+ regex_match = re.search( regex.match, stderr,
+ re.IGNORECASE )
if ( regex_match ):
- err_msg += self.regex_err_msg( regex_match, regex )
- max_error_level = max( max_error_level,
+ rexmsg = self.regex_err_msg( regex_match, regex)
+ # DELETEME
+ log.info( "Job %s: %s"
+ % ( job.get_id_tag(), rexmsg ) )
+ stderr = rexmsg + "\n" + stderr
+ max_error_level = max( max_error_level,
regex.error_level )
- if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
+ if ( max_error_level >=
+ galaxy.tools.StdioErrorLevel.FATAL ):
break
# If we encountered a fatal error, then we'll need to set the
@@ -565,17 +592,26 @@
# default to the previous behavior: when there's anything on stderr
# the job has an error, and the job is ok otherwise.
else:
- log.debug( "The tool did not define exit code or stdio handling; "
+ # TODO: Add in the tool and job id:
+ log.debug( "Tool did not define exit code or stdio handling; "
+ "checking stderr for success" )
if stderr:
success = False
else:
success = True
+
# On any exception, return True.
except:
+ tb = traceback.format_exc()
log.warning( "Tool check encountered unexpected exception; "
- + "assuming tool was successful" )
+ + "assuming tool was successful: " + tb )
success = True
+
+ # Store the modified stdout and stderr in the job:
+ if None != job:
+ job.stdout = stdout
+ job.stderr = stderr
+
return success
def regex_err_msg( self, match, regex ):
diff -r a7a0a5962648eb68a27ed9e460d1b4ea012c2302 -r f557b7b05fdd701cbf99ee04f311bcadb1ae29c4 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1316,6 +1316,8 @@
return_level = StdioErrorLevel.WARNING
elif ( re.search( "fatal", err_level, re.IGNORECASE ) ):
return_level = StdioErrorLevel.FATAL
+ else:
+ log.debug( "Error level %s did not match warning/fatal" % err_level )
except Exception, e:
log.error( "Exception in parse_error_level "
+ str(sys.exc_info() ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/a7a0a5962648/
changeset: a7a0a5962648
user: greg
date: 2012-08-14 22:24:54
summary: Fix for recently introduced bug that breaks installation of tool shed repositories into a local Galaxy instance if the repository includes tools that use the ToolDataTableManage. Also, move processing of sample files and other config files included in installed tool shed repositories to disk rather than memory.
affected #: 4 files
diff -r b4f52783c54ac175d306d5064ef5ab7bd5149ca9 -r a7a0a5962648eb68a27ed9e460d1b4ea012c2302 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -136,27 +136,22 @@
repository_clone_url=repository_clone_url,
relative_install_dir=relative_install_dir,
repository_files_dir=None,
- resetting_all_metadata_on_repository=False )
+ resetting_all_metadata_on_repository=False,
+ webapp='galaxy' )
tool_shed_repository.metadata = metadata_dict
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
if 'tool_dependencies' in metadata_dict:
# All tool_dependency objects must be created before the tools are processed even if no tool dependencies will be installed.
- tool_dependencies = create_tool_dependency_objects( self.app, tool_shed_repository, tool_shed_repository.installed_changeset_revision, set_status=True )
+ tool_dependencies = create_tool_dependency_objects( self.app, tool_shed_repository, relative_install_dir, set_status=True )
else:
tool_dependencies = None
if 'tools' in metadata_dict:
- work_dir = tempfile.mkdtemp()
repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict )
if repository_tools_tups:
sample_files = metadata_dict.get( 'sample_files', [] )
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- repository_tools_tups = handle_missing_data_table_entry( self.app,
- tool_shed_repository,
- tool_shed_repository.installed_changeset_revision,
- self.tool_path,
- repository_tools_tups,
- work_dir )
+ repository_tools_tups = handle_missing_data_table_entry( self.app, relative_install_dir, self.tool_path, repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
repository_tools_tups, sample_files_copied = handle_missing_index_file( self.app, self.tool_path, sample_files, repository_tools_tups )
# Copy remaining sample files included in the repository to the ~/tool-data directory of the local Galaxy instance.
@@ -166,12 +161,8 @@
update_tool_shed_repository_status( self.app,
tool_shed_repository,
self.app.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
- # Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_repository( self.app,
- 'tool_dependencies.xml',
- tool_shed_repository,
- tool_shed_repository.installed_changeset_revision,
- work_dir )
+ # Get the tool_dependencies.xml file from disk.
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=self.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -189,10 +180,6 @@
self.migrated_tools_config,
tool_panel_dict=tool_panel_dict_for_display,
new_install=True )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
if 'datatypes' in metadata_dict:
tool_shed_repository.status = self.app.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
@@ -200,11 +187,7 @@
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
work_dir = tempfile.mkdtemp()
- datatypes_config = get_config_from_repository( self.app,
- 'datatypes_conf.xml',
- tool_shed_repository,
- tool_shed_repository.installed_changeset_revision,
- work_dir )
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
# Load proprietary data types required by tools. The value of override is not important here since the Galaxy server will be started
# after this installation completes.
converter_path, display_path = alter_config_and_load_prorietary_datatypes( self.app, datatypes_config, relative_install_dir, override=False )
diff -r b4f52783c54ac175d306d5064ef5ab7bd5149ca9 -r a7a0a5962648eb68a27ed9e460d1b4ea012c2302 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -248,7 +248,7 @@
except:
pass
return converter_path, display_path
-def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
+def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files, webapp='galaxy' ):
"""
Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
sure the files exist.
@@ -291,8 +291,9 @@
correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( app )
+ if webapp == 'community':
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( app )
return invalid_files_and_errors_tups
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
# Persist the current in-memory list of config_elems to a file named by the value of config_filename.
@@ -427,16 +428,11 @@
sa_session.add( tool_shed_repository )
sa_session.flush()
return tool_shed_repository
-def create_tool_dependency_objects( app, tool_shed_repository, current_changeset_revision, set_status=True ):
+def create_tool_dependency_objects( app, tool_shed_repository, relative_install_dir, set_status=True ):
# Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository.
tool_dependency_objects = []
- work_dir = tempfile.mkdtemp()
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_repository( app,
- 'tool_dependencies.xml',
- tool_shed_repository,
- current_changeset_revision,
- work_dir )
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
tree = ElementTree.parse( tool_dependencies_config )
root = tree.getroot()
ElementInclude.include( root )
@@ -454,10 +450,6 @@
status=app.model.ToolDependency.installation_status.NEVER_INSTALLED,
set_status=set_status )
tool_dependency_objects.append( tool_dependency )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
return tool_dependency_objects
def generate_clone_url( trans, repository ):
"""Generate the URL for cloning a repository."""
@@ -548,7 +540,8 @@
if not can_generate_dependency_metadata:
break
return can_generate_dependency_metadata
-def generate_metadata_for_changeset_revision( app, repository_clone_url, relative_install_dir=None, repository_files_dir=None, resetting_all_metadata_on_repository=False ):
+def generate_metadata_for_changeset_revision( app, repository_clone_url, relative_install_dir=None, repository_files_dir=None,
+ resetting_all_metadata_on_repository=False, webapp='galaxy' ):
"""
Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
@@ -629,7 +622,7 @@
invalid_tool_configs.append( name )
invalid_file_tups.append( ( name, str( e ) ) )
if tool is not None:
- invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_files )
+ invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_files, webapp=webapp )
can_set_metadata = True
for tup in invalid_files_and_errors_tups:
if name in tup:
@@ -664,7 +657,7 @@
metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
if invalid_tool_configs:
metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
- if resetting_all_metadata_on_repository:
+ if webapp == 'community' and resetting_all_metadata_on_repository:
# Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
reset_tool_data_tables( app )
# Reset the value of the app's tool_data_path to it's original value.
@@ -1255,7 +1248,7 @@
return shed_url
# The tool shed from which the repository was originally installed must no longer be configured in tool_sheds_conf.xml.
return None
-def handle_missing_data_table_entry( app, repository, changeset_revision, tool_path, repository_tools_tups, dir ):
+def handle_missing_data_table_entry( app, relative_install_dir, tool_path, repository_tools_tups ):
"""
Inspect each tool to see if any have input parameters that are dynamically generated select lists that require entries in the
tool_data_table_conf.xml file. This method is called only from Galaxy (not the tool shed) when a repository is being installed
@@ -1269,7 +1262,7 @@
break
if missing_data_table_entry:
# The repository must contain a tool_data_table_conf.xml.sample file that includes all required entries for all tools in the repository.
- sample_tool_data_table_conf = get_config_from_repository( app, 'tool_data_table_conf.xml.sample', repository, changeset_revision, dir )
+ sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', relative_install_dir )
# Add entries to the ToolDataTableManager's in-memory data_tables dictionary as well as the list of data_table_elems and the list of
# data_table_elem_names.
error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf, persist=True )
@@ -1394,14 +1387,8 @@
def load_installed_datatypes( app, repository, relative_install_dir, deactivate=False ):
# Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later.
metadata = repository.metadata
- work_dir = tempfile.mkdtemp()
repository_dict = None
- datatypes_config = get_config_from_repository( app,
- 'datatypes_conf.xml',
- repository,
- repository.changeset_revision,
- work_dir,
- install_dir=relative_install_dir )
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
if datatypes_config:
converter_path, display_path = alter_config_and_load_prorietary_datatypes( app, datatypes_config, relative_install_dir, deactivate=deactivate )
if converter_path or display_path:
@@ -1413,10 +1400,6 @@
tool_dicts=metadata.get( 'tools', [] ),
converter_path=converter_path,
display_path=display_path )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
return repository_dict
def load_installed_display_applications( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype display applications
diff -r b4f52783c54ac175d306d5064ef5ab7bd5149ca9 -r a7a0a5962648eb68a27ed9e460d1b4ea012c2302 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -525,13 +525,8 @@
# Get the tool_shed_repository from one of the tool_dependencies.
message = ''
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
- work_dir = tempfile.mkdtemp()
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- tool_shed_repository,
- tool_shed_repository.changeset_revision,
- work_dir )
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', tool_shed_repository.repo_path( trans.app ) )
installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -539,10 +534,6 @@
for installed_tool_dependency in installed_tool_dependencies:
if installed_tool_dependency.status == trans.app.model.ToolDependency.installation_status.ERROR:
message += ' %s' % installed_tool_dependency.error_message
- try:
- shutil.rmtree( work_dir )
- except:
- pass
tool_dependency_ids = [ trans.security.encode_id( td.id ) for td in tool_dependencies ]
if message:
status = 'error'
@@ -663,11 +654,7 @@
tool_shed_repository,
trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES )
# Get the tool_dependencies.xml file from the repository.
- tool_dependencies_config = get_config_from_repository( trans.app,
- 'tool_dependencies.xml',
- tool_shed_repository,
- tool_shed_repository.installed_changeset_revision,
- work_dir )
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
installed_tool_dependencies = handle_tool_dependencies( app=trans.app,
tool_shed_repository=tool_shed_repository,
tool_dependencies_config=tool_dependencies_config,
@@ -691,24 +678,19 @@
repository_clone_url=repository_clone_url,
relative_install_dir=relative_install_dir,
repository_files_dir=None,
- resetting_all_metadata_on_repository=False )
+ resetting_all_metadata_on_repository=False,
+ webapp='galaxy' )
tool_shed_repository.metadata = metadata_dict
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
if 'tool_dependencies' in metadata_dict and not reinstalling:
- tool_dependencies = create_tool_dependency_objects( trans.app, tool_shed_repository, tool_shed_repository.installed_changeset_revision, set_status=True )
+ tool_dependencies = create_tool_dependency_objects( trans.app, tool_shed_repository, relative_install_dir, set_status=True )
if 'tools' in metadata_dict:
tool_panel_dict = generate_tool_panel_dict_for_new_install( metadata_dict[ 'tools' ], tool_section )
repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- work_dir = tempfile.mkdtemp()
- repository_tools_tups = handle_missing_data_table_entry( trans.app,
- tool_shed_repository,
- tool_shed_repository.changeset_revision,
- tool_path,
- repository_tools_tups,
- work_dir )
+ repository_tools_tups = handle_missing_data_table_entry( trans.app, relative_install_dir, tool_path, repository_tools_tups )
# Handle missing index files for tool parameters that are dynamically generated select lists.
sample_files = metadata_dict.get( 'sample_files', [] )
repository_tools_tups, sample_files_copied = handle_missing_index_file( trans.app, tool_path, sample_files, repository_tools_tups )
@@ -723,22 +705,13 @@
shed_tool_conf=shed_tool_conf,
tool_panel_dict=tool_panel_dict,
new_install=True )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
if 'datatypes' in metadata_dict:
tool_shed_repository.status = trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES
if not tool_shed_repository.includes_datatypes:
tool_shed_repository.includes_datatypes = True
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
- work_dir = tempfile.mkdtemp()
- datatypes_config = get_config_from_repository( trans.app,
- 'datatypes_conf.xml',
- tool_shed_repository,
- tool_shed_repository.changeset_revision,
- work_dir )
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
# Load data types required by tools.
converter_path, display_path = alter_config_and_load_prorietary_datatypes( trans.app, datatypes_config, relative_install_dir, override=False )
if converter_path or display_path:
@@ -756,10 +729,6 @@
if display_path:
# Load proprietary datatype display applications
trans.app.datatypes_registry.load_display_applications( installed_repository_dict=repository_dict )
- try:
- shutil.rmtree( work_dir )
- except:
- pass
@web.expose
@web.require_admin
def manage_repository( self, trans, **kwd ):
@@ -1507,7 +1476,7 @@
trans.sa_session.flush()
# Create tool_dependency records if necessary.
if 'tool_dependencies' in metadata_dict:
- tool_dependencies = create_tool_dependency_objects( trans.app, repository, repository.changeset_revision, set_status=False )
+ tool_dependencies = create_tool_dependency_objects( trans.app, repository, relative_install_dir, set_status=False )
message = "The installed repository named '%s' has been updated to change set revision '%s'. " % ( name, latest_changeset_revision )
# See if any tool dependencies can be installed.
shed_tool_conf, tool_path, relative_install_dir = get_tool_panel_config_tool_path_install_dir( trans.app, repository )
diff -r b4f52783c54ac175d306d5064ef5ab7bd5149ca9 -r a7a0a5962648eb68a27ed9e460d1b4ea012c2302 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -795,7 +795,8 @@
repository_clone_url=repository_clone_url,
relative_install_dir=repo_dir,
repository_files_dir=work_dir,
- resetting_all_metadata_on_repository=True )
+ resetting_all_metadata_on_repository=True,
+ webapp='community' )
if current_metadata_dict:
if not metadata_changeset_revision and not metadata_dict:
# We're at the first change set in the change log.
@@ -885,7 +886,8 @@
repository_clone_url=repository_clone_url,
relative_install_dir=repo_dir,
repository_files_dir=None,
- resetting_all_metadata_on_repository=False )
+ resetting_all_metadata_on_repository=False,
+ webapp='community' )
if metadata_dict:
downloadable = is_downloadable( metadata_dict )
repository_metadata = None
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: natefoo: Reclaim ownership of job's working dir prior to reading stdout/stderr/ec files to avoid permissions problems.
by Bitbucket 14 Aug '12
by Bitbucket 14 Aug '12
14 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b4f52783c54a/
changeset: b4f52783c54a
user: natefoo
date: 2012-08-14 21:22:29
summary: Reclaim ownership of job's working dir prior to reading stdout/stderr/ec files to avoid permissions problems.
affected #: 1 file
diff -r 0ef82aa1c39cfb192ed6d1c0a1ed3a1a9e103485 -r b4f52783c54ac175d306d5064ef5ab7bd5149ca9 lib/galaxy/jobs/runners/drmaa.py
--- a/lib/galaxy/jobs/runners/drmaa.py
+++ b/lib/galaxy/jobs/runners/drmaa.py
@@ -339,6 +339,8 @@
# By default, the exit code is 0, which typically indicates success.
exit_code = 0
exit_code_str = "0"
+ # To ensure that files below are readable, ownership must be reclaimed first
+ drm_job_state.job_wrapper.reclaim_ownership()
while which_try < (self.app.config.retry_job_output_collection + 1):
try:
ofh = file(ofile, "r")
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: carlfeberhard: tool-data change to fix bwa_wrapper.test_tool_000001; backbone history.js start
by Bitbucket 14 Aug '12
by Bitbucket 14 Aug '12
14 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0ef82aa1c39c/
changeset: 0ef82aa1c39c
user: carlfeberhard
date: 2012-08-14 19:15:48
summary: tool-data change to fix bwa_wrapper.test_tool_000001; backbone history.js start
affected #: 4 files
diff -r 40ad570cf570d4f2f2dcbf87c5eb3c212efe6f1c -r 0ef82aa1c39cfb192ed6d1c0a1ed3a1a9e103485 static/scripts/mvc/history.js
--- /dev/null
+++ b/static/scripts/mvc/history.js
@@ -0,0 +1,145 @@
+/*
+TODO:
+ as always: where does the model end and the view begin?
+ HistoryPanel
+ HistoryCollection: (collection of histories: 'Saved Histories')
+
+CASES:
+ logged-in/NOT
+*/
+//==============================================================================
+var HistoryItem = BaseModel.extend({
+ // a single history structure
+ // from: http://localhost:8080/api/histories/f2db41e1fa331b3e/contents/f2db41e1fa331…
+ /*
+ {
+ "data_type": "fastq",
+ "deleted": false,
+ "download_url": "/datasets/f2db41e1fa331b3e/display?to_ext=fastq",
+ "file_size": 226297533,
+ "genome_build": "?",
+ "id": "f2db41e1fa331b3e",
+ "metadata_data_lines": null,
+ "metadata_dbkey": "?",
+ "metadata_sequences": null,
+ "misc_blurb": "215.8 MB",
+ "misc_info": "uploaded fastq file",
+ "model_class": "HistoryDatasetAssociation",
+ "name": "LTCF-2-19_GTGAAA_L001_R1_001.fastq",
+ "state": "ok",
+ "visible": true
+ }
+ */
+
+ display : function(){},
+ edit_attr : function(){},
+ delete : function(){},
+ download : function(){},
+ details : function(){},
+ rerun : function(){},
+ tags : function(){},
+ annotations : function(){},
+ peek : function(){},
+});
+
+//..............................................................................
+var HistoryItemView = BaseView.extend({
+ // view for History model used in HistoryPanelView
+ tagName : "div",
+ className : "historyItemContainer",
+
+ icons : {
+ display : 'path to icon',
+ edit_attr : 'path to icon',
+ delete : 'path to icon',
+ download : 'path to icon',
+ details : 'path to icon',
+ rerun : 'path to icon',
+ tags : 'path to icon',
+ annotations : 'path to icon',
+ },
+
+ render : function(){
+ this.$el.append( 'div' )
+ },
+
+});
+
+
+
+//==============================================================================
+var History = Backbone.Collection.extend({
+ // a collection of HistoryItems
+
+ // from: http://localhost:8080/api/histories/f2db41e1fa331b3e
+ /*
+ {
+ "contents_url": "/api/histories/f2db41e1fa331b3e/contents",
+ "id": "f2db41e1fa331b3e",
+ "name": "one",
+ "state": "ok",
+ "state_details": {
+ "discarded": 0,
+ "empty": 0,
+ "error": 0,
+ "failed_metadata": 0,
+ "new": 0,
+ "ok": 4,
+ "queued": 0,
+ "running": 0,
+ "setting_metadata": 0,
+ "upload": 0
+ }
+ }
+ */
+
+ // from: http://localhost:8080/api/histories/f2db41e1fa331b3e/contents
+ // (most are replicated in HistoryItem)
+ /*
+ [
+ {
+ "id": "f2db41e1fa331b3e",
+ "name": "LTCF-2-19_GTGAAA_L001_R1_001.fastq",
+ "type": "file",
+ "url": "/api/histories/f2db41e1fa331b3e/contents/f2db41e1fa331b3e"
+ },
+ {
+ "id": "f597429621d6eb2b",
+ "name": "LTCF-2-19_GTGAAA_L001_R2_001.fastq",
+ "type": "file",
+ "url": "/api/histories/f2db41e1fa331b3e/contents/f597429621d6eb2b"
+ },
+ {
+ "id": "1cd8e2f6b131e891",
+ "name": "FASTQ Groomer on data 1",
+ "type": "file",
+ "url": "/api/histories/f2db41e1fa331b3e/contents/1cd8e2f6b131e891"
+ },
+ {
+ "id": "ebfb8f50c6abde6d",
+ "name": "FASTQ Groomer on data 2",
+ "type": "file",
+ "url": "/api/histories/f2db41e1fa331b3e/contents/ebfb8f50c6abde6d"
+ },
+ {
+ "id": "33b43b4e7093c91f",
+ "name": "Sa.04-02981.fasta",
+ "type": "file",
+ "url": "/api/histories/f2db41e1fa331b3e/contents/33b43b4e7093c91f"
+ }
+ ]
+ */
+});
+
+//..............................................................................
+var HistoryCollectionView = BaseView.extend({
+ // view for the HistoryCollection (as per current right hand panel)
+ tagName : "body",
+ className : "historyCollection",
+
+ render : function(){
+
+ },
+
+});
+
diff -r 40ad570cf570d4f2f2dcbf87c5eb3c212efe6f1c -r 0ef82aa1c39cfb192ed6d1c0a1ed3a1a9e103485 templates/base_panels.mako
--- a/templates/base_panels.mako
+++ b/templates/base_panels.mako
@@ -54,13 +54,17 @@
var galaxy_paths = new GalaxyPaths({
root_path: '${h.url_for( "/" )}',
image_path: '${h.url_for( "/static/images" )}',
+
tool_url: '${h.url_for( controller="/api/tools" )}',
+ history_url: '${h.url_for( controller="/api/histories" )}',
+
data_url: '${h.url_for( controller="/tracks", action="data" )}',
raw_data_url: '${h.url_for( controller="/tracks", action="raw_data" )}',
converted_datasets_state_url: '${h.url_for( controller="/tracks", action="converted_datasets_state" )}',
dataset_state_url: '${h.url_for( controller="/tracks", action="dataset_state" )}',
+ sweepster_url: '${h.url_for( controller="/tracks", action="sweepster" )}',
+
visualization_url: '${h.url_for( controller="/visualization", action="save" )}',
- sweepster_url: '${h.url_for( controller="/tracks", action="sweepster" )}'
});
</script></%def>
diff -r 40ad570cf570d4f2f2dcbf87c5eb3c212efe6f1c -r 0ef82aa1c39cfb192ed6d1c0a1ed3a1a9e103485 test-data/bwa_wrapper_out2.sam
--- a/test-data/bwa_wrapper_out2.sam
+++ b/test-data/bwa_wrapper_out2.sam
@@ -1,30 +1,30 @@
-seq1 16 phiX174 322 25 36M * 0 0 GATATTTTAAAGGAGCGTGGATTACTATCTGAGTCC B&&I13A$G$*%$IIIIIII9(.+5$IIIIIII#II XT:A:U NM:i:2 X0:i:1 XM:i:2 XO:i:0 XG:i:0 MD:Z:2C8A24
-seq10 0 phiX174 4149 37 17M1D19M * 0 0 ATTCTTTCTTTTCGTATCAGGGCGTTGAGTTCGATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:5G11^G19
-seq11 0 phiX174 4072 37 18M1D18M * 0 0 GCATTTCTACTCCTTCTCATCCCCAATGCTTGGCTT II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:12T5^A18
+seq1 16 phiX174 322 25 36M * 0 0 GATATTTTAAAGGAGCGTGGATTACTATCTGAGTCC B&&I13A$G$*%$IIIIIII9(.+5$IIIIIII#II XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:2C8A24
+seq2 0 phiX174 141 37 36M * 0 0 ATTCGACCTATCCTTGCGCAGCTCGAGAAGCTCTTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
+seq3 0 phiX174 505 37 36M * 0 0 GTAACAAAGTTTGGATTGCTACTGACCGCTCTCGTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
+seq4 4 * 0 0 * * 0 0 AGCCGCTCGTCTTTTATGTAGGTGGTCAACCATTTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
+seq5 0 phiX174 4985 25 36M * 0 0 CAGTTATATGGCTTTTGGTTTCTATGTGGCTTAATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:2 XO:i:0 XG:i:0 MD:Z:13G17A4
+seq6 0 phiX174 925 37 11M1D25M * 0 0 AGGCGCTCGTCTTGGTATGTAGGTGGTCAACAATTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:1 X0:i:1 X1:i:0 XM:i:0 XO:i:1 XG:i:1 MD:Z:11^T25
+seq7 0 phiX174 943 37 13M1I22M * 0 0 TGTAGGTGGTCAACCAATTTTAATTGCAGGGGCTTC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:1 X0:i:1 X1:i:0 XM:i:0 XO:i:1 XG:i:1 MD:Z:35
+seq8 4 * 0 0 * * 0 0 ACACCCGTCCTTTACGTCATGCGCTCTATTCTCTGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
+seq9 0 phiX174 2596 37 16M1I19M * 0 0 GCCGCTATTCAGGTTGTTTTCTGTTGGTGCTGATAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:7A27
+seq10 0 phiX174 4149 37 17M1D19M * 0 0 ATTCTTTCTTTTCGTATCAGGGCGTTGAGTTCGATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:5G11^G19
+seq11 0 phiX174 4072 37 18M1D18M * 0 0 GCATTTCTACTCCTTCTCATCCCCAATGCTTGGCTT II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:12T5^A18
seq12 4 * 0 0 * * 0 0 CGCGCTTCGATAAAAATGGGATTGGCGTTTCCAACC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
seq13 4 * 0 0 * * 0 0 ATTTCTACTCTTTCTCATCCCCAATCCTTGCCTTCC IIIIIIIIIIIIIIIIIIIIIAAIIIIIIIIIIIII
-seq14 0 phiX174 3998 37 21M1D15M * 0 0 CCCTTTTGAATGTCACGCTGATATTTTGACTTTGAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:5C15^T15
+seq14 0 phiX174 3998 37 21M1D15M * 0 0 CCCTTTTGAATGTCACGCTGATATTTTGACTTTGAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:5C15^T15
seq15 4 * 0 0 * * 0 0 CCAACTTACCAAGGTGGGTTACGAAACGCGACGCCG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
seq16 4 * 0 0 * * 0 0 TCAGGGTATTAAAAGAGATTATTTTTCTCCAGCCAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq17 0 phiX174 3034 37 19M1D17M * 0 0 GTGATGTGCTTGCTACCGAAACAATACTTTAGGCAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:19^T9G7
+seq17 0 phiX174 3034 37 19M1D17M * 0 0 GTGATGTGCTTGCTACCGAAACAATACTTTAGGCAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:19^T9G7
seq18 4 * 0 0 * * 0 0 TCAATCCCCCATGCTTGGCCGTTCCATAAGCAGATG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
seq19 4 * 0 0 * * 0 0 TTCCTGCGCTTAATGCTTGAGCGTCCTGGTGCTGAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq2 0 phiX174 141 37 36M * 0 0 ATTCGACCTATCCTTGCGCAGCTCGAGAAGCTCTTA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
-seq20 0 phiX174 1082 37 36M * 0 0 CTTATTACCATTTCAACTACTCCGGTTATCGCTGGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
-seq21 0 phiX174 1344 37 15M1D21M * 0 0 CTGATACCAATAAAACCCTAAGCATTTGGTTCAGGG II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:15^T13T7
+seq20 0 phiX174 1082 37 36M * 0 0 CTTATTACCATTTCAACTACTCCGGTTATCGCTGGC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
+seq21 0 phiX174 1344 37 15M1D21M * 0 0 CTGATACCAATAAAACCCTAAGCATTTGGTTCAGGG II#IIIIIII$5+.(9IIIIIII$%*$G$A31I&&B XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:15^T13T7
seq22 4 * 0 0 * * 0 0 AATCAAACTTACCAAGGGGTTACGACGCGACGCCGT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
seq23 4 * 0 0 * * 0 0 TGTGCTTCCCCAACTTGATTTAATAACCCTATAGAC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq24 0 phiX174 4084 37 17M1I18M * 0 0 TTTCTCAATCCCCAATGCCTTGGCTTCCCTAAGCAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:27A7
-seq25 0 phiX174 520 37 16M1I19M * 0 0 TTGCTACTGACCGCTCTTCGTGCTCGTTGCTGCGTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:26C8
-seq26 0 phiX174 1976 37 36M * 0 0 CCGCGTGAAATTTCTATGAAGGATGTTTTCCGTTCT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
-seq27 0 phiX174 2598 37 20M1I15M * 0 0 CGCTAATCAAGTTGTTTCTGTTTGGTGCTGATATTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:9G25
+seq24 0 phiX174 4084 37 17M1I18M * 0 0 TTTCTCAATCCCCAATGCCTTGGCTTCCCTAAGCAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:27A7
+seq25 0 phiX174 520 37 16M1I19M * 0 0 TTGCTACTGACCGCTCTTCGTGCTCGTTGCTGCGTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:26C8
+seq26 0 phiX174 1976 37 36M * 0 0 CCGCGTGAAATTTCTATGAAGGATGTTTTCCGTTCT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 X1:i:0 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
+seq27 0 phiX174 2598 37 20M1I15M * 0 0 CGCTAATCAAGTTGTTTCTGTTTGGTGCTGATATTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:9G25
seq28 4 * 0 0 * * 0 0 AAAGAGATTATTTGTCGGTCCAGCCACTAAAGTGAG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
seq29 4 * 0 0 * * 0 0 CAAATTAATGCGCGCTTCGATAATGATTGGGGTATC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq3 0 phiX174 505 37 36M * 0 0 GTAACAAAGTTTGGATTGCTACTGACCGCTCTCGTG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:0 X0:i:1 XM:i:0 XO:i:0 XG:i:0 MD:Z:36
-seq30 0 phiX174 4091 37 18M1I17M * 0 0 ATCCCCTATGCTTGGCTTACCATAAGCAGATGGATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:6A28
-seq4 4 * 0 0 * * 0 0 AGCCGCTCGTCTTTTATGTAGGTGGTCAACCATTTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq5 0 phiX174 4985 25 36M * 0 0 CAGTTATATGGCTTTTGGTTTCTATGTGGCTTAATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:2 XO:i:0 XG:i:0 MD:Z:13G17A4
-seq6 0 phiX174 925 37 11M1D25M * 0 0 AGGCGCTCGTCTTGGTATGTAGGTGGTCAACAATTT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:1 X0:i:1 XM:i:0 XO:i:1 XG:i:1 MD:Z:11^T25
-seq7 0 phiX174 943 37 13M1I22M * 0 0 TGTAGGTGGTCAACCAATTTTAATTGCAGGGGCTTC IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:1 X0:i:1 XM:i:0 XO:i:1 XG:i:1 MD:Z:35
-seq8 4 * 0 0 * * 0 0 ACACCCGTCCTTTACGTCATGCGCTCTATTCTCTGG IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII
-seq9 0 phiX174 2596 37 16M1I19M * 0 0 GCCGCTATTCAGGTTGTTTTCTGTTGGTGCTGATAT IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 XM:i:1 XO:i:1 XG:i:1 MD:Z:7A27
+seq30 0 phiX174 4091 37 18M1I17M * 0 0 ATCCCCTATGCTTGGCTTACCATAAGCAGATGGATA IIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIIII XT:A:U NM:i:2 X0:i:1 X1:i:0 XM:i:1 XO:i:1 XG:i:1 MD:Z:6A28
diff -r 40ad570cf570d4f2f2dcbf87c5eb3c212efe6f1c -r 0ef82aa1c39cfb192ed6d1c0a1ed3a1a9e103485 test/base/twilltestcase.py
--- a/test/base/twilltestcase.py
+++ b/test/base/twilltestcase.py
@@ -729,6 +729,7 @@
self.verify_extra_files_content( extra_files, elem.get( 'id' ), shed_tool_id=shed_tool_id )
except AssertionError, err:
errmsg = 'History item %s different than expected, difference (using %s):\n' % ( hid, compare )
+ errmsg += "( %s v. %s )\n" % ( local_name, temp_name )
errmsg += str( err )
raise AssertionError( errmsg )
finally:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

14 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/40ad570cf570/
changeset: 40ad570cf570
user: jgoecks
date: 2012-08-14 18:17:01
summary: Include fusions output in Tophat2 wrapper.
affected #: 1 file
diff -r be81990d148a81bc2896c3c543d2ff70ad9c3c67 -r 40ad570cf570d4f2f2dcbf87c5eb3c212efe6f1c tools/ngs_rna/tophat2_wrapper.xml
--- a/tools/ngs_rna/tophat2_wrapper.xml
+++ b/tools/ngs_rna/tophat2_wrapper.xml
@@ -296,6 +296,9 @@
</inputs><outputs>
+ <data format="tabular" name="fusions" label="${tool.name} on ${on_string}: fusions" from_work_dir="tophat_out/fusions.out">
+ <filter>(params['fusion_search']['do_search'] == 'Yes')</filter>
+ </data><data format="bed" name="insertions" label="${tool.name} on ${on_string}: insertions" from_work_dir="tophat_out/insertions.bed"><actions><conditional name="refGenomeSource.genomeSource">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: greg: Enable reversing the list of tool version ids for tool included in tool shed repositories installed into a local Galaxy instance.
by Bitbucket 13 Aug '12
by Bitbucket 13 Aug '12
13 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/be81990d148a/
changeset: be81990d148a
user: greg
date: 2012-08-13 22:41:47
summary: Enable reversing the list of tool version ids for tool included in tool shed repositories installed into a local Galaxy instance.
affected #: 2 files
diff -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a -r be81990d148a81bc2896c3c543d2ff70ad9c3c67 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -3050,7 +3050,12 @@
tool_versions.append( self )
__descendants( app, self )
return tool_versions
- def get_version_ids( self, app ):
+ def get_version_ids( self, app, reverse=False ):
+ if reverse:
+ version_ids = []
+ for tool_version in self.get_versions( app ):
+ version_ids.insert( 0, tool_version.tool_id )
+ return version_ids
return [ tool_version.tool_id for tool_version in self.get_versions( app ) ]
class ToolVersionAssociation( object ):
diff -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a -r be81990d148a81bc2896c3c543d2ff70ad9c3c67 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -499,7 +499,7 @@
return get_repository_file_contents( file_path )
def get_versions_of_tool( self, app, guid ):
tool_version = get_tool_version( app, guid )
- return tool_version.get_version_ids( app )
+ return tool_version.get_version_ids( app, reverse=True )
@web.expose
@web.require_admin
def initiate_repository_installation( self, trans, shed_repository_ids, encoded_kwd, reinstalling=False ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

13 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/0bd2cc07fdd8/
changeset: 0bd2cc07fdd8
user: greg
date: 2012-08-13 20:54:07
summary: Fixes for setting metadata on tool shed repositories and specific add tool lineage information on the view tool metadata page in the tool shed as well as for tool shed repositories installed into a local Galaxy instance.
affected #: 10 files
diff -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -132,7 +132,11 @@
tool_panel_dict_for_tool_config = generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
for k, v in tool_panel_dict_for_tool_config.items():
tool_panel_dict_for_display[ k ] = v
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( self.app, relative_install_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=self.app,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False )
tool_shed_repository.metadata = metadata_dict
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
diff -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -324,7 +324,7 @@
# Eliminate the port, if any, since it will result in an invalid directory name.
return tool_shed_url.split( ':' )[ 0 ]
return tool_shed_url.rstrip( '/' )
-def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
+def clone_repository( repository_clone_url, repository_file_dir, ctx_rev ):
"""Clone the repository up to the specified changeset_revision. No subsequent revisions will be present in the cloned repository."""
commands.clone( get_configured_ui(),
str( repository_clone_url ),
@@ -548,24 +548,55 @@
if not can_generate_dependency_metadata:
break
return can_generate_dependency_metadata
-def generate_metadata_for_changeset_revision( app, repository_files_dir, repository_clone_url ):
+def generate_metadata_for_changeset_revision( app, repository_clone_url, relative_install_dir=None, repository_files_dir=None, resetting_all_metadata_on_repository=False ):
"""
Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
- disk files, so the value of repository_files_dir will not always be repository.repo_path (it could be a temporary directory containing a clone).
+ disk files, so the value of repository_files_dir will not always be repository.repo_path (it could be an absolute path to a temporary directory
+ containing a clone). If it is an absolute path, the value of relative_install_dir must contain repository.repo_path.
"""
metadata_dict = {}
invalid_file_tups = []
invalid_tool_configs = []
tool_dependencies_config = None
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repository_files_dir )
+ original_tool_data_path = app.config.tool_data_path
+ if resetting_all_metadata_on_repository:
+ if not relative_install_dir:
+ raise Exception( "The value of repository.repo_path must be sent when resetting all metadata on a repository." )
+ # Keep track of the location where the repository is temporarily cloned so that we can strip the path when setting metadata. The value of
+ # repository_files_dir is the full path to the temporary directory to which the repository was cloned.
+ work_dir = repository_files_dir
+ files_dir = repository_files_dir
+ # Since we're working from a temporary directory, we can safely copy sample files included in the repository to the repository root.
+ app.config.tool_data_path = repository_files_dir
+ else:
+ # Use a temporary working directory to copy all sample files.
+ work_dir = tempfile.mkdtemp()
+ # All other files are on disk in the repository's repo_path, which is the value of relative_install_dir.
+ files_dir = relative_install_dir
+ app.config.tool_data_path = work_dir
+ # Handle proprietary datatypes, if any.
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', files_dir )
if datatypes_config:
metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
- sample_files = get_sample_files_from_disk( repository_files_dir )
+ # Get the relative path to all sample files included in the repository for storage in the repository's metadata.
+ sample_files = get_sample_files_from_disk( repository_files_dir=files_dir,
+ relative_install_dir=relative_install_dir,
+ resetting_all_metadata_on_repository=resetting_all_metadata_on_repository )
if sample_files:
metadata_dict[ 'sample_files' ] = sample_files
- # Find all tool configs and exported workflows.
- for root, dirs, files in os.walk( repository_files_dir ):
+ # Copy all sample files included in the repository to a single directory location so we can load tools that depend on them.
+ for sample_file in sample_files:
+ copy_sample_file( app, sample_file, dest_path=work_dir )
+ # If the list of sample files includes a tool_data_table_conf.xml.sample file, laad it's table elements into memory.
+ relative_path, filename = os.path.split( sample_file )
+ if filename == 'tool_data_table_conf.xml.sample':
+ new_table_elems = app.tool_data_tables.add_new_entries_from_config_file( config_filename=sample_file,
+ tool_data_path=app.config.tool_data_path,
+ tool_data_table_config_path=app.config.tool_data_table_config_path,
+ persist=False )
+ # Find all tool configs and exported workflows and add them to the repository's metadata.
+ for root, dirs, files in os.walk( files_dir ):
if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
if '.hg' in dirs:
dirs.remove( '.hg' )
@@ -586,11 +617,19 @@
if is_tool:
try:
tool = app.toolbox.load_tool( full_path )
+ except KeyError, e:
+ tool = None
+ invalid_tool_configs.append( name )
+ error_message = 'This file requires an entry for "%s" in the tool_data_table_conf.xml file. Upload a file ' % str( e )
+ error_message += 'named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct '
+ error_message += 'this error. '
+ invalid_file_tups.append( ( name, error_message ) )
except Exception, e:
tool = None
invalid_tool_configs.append( name )
+ invalid_file_tups.append( ( name, str( e ) ) )
if tool is not None:
- invalid_files_and_errors_tups = check_tool_input_params( app, repository_files_dir, name, tool, sample_files )
+ invalid_files_and_errors_tups = check_tool_input_params( app, files_dir, name, tool, sample_files )
can_set_metadata = True
for tup in invalid_files_and_errors_tups:
if name in tup:
@@ -598,7 +637,15 @@
invalid_tool_configs.append( name )
break
if can_set_metadata:
- metadata_dict = generate_tool_metadata( os.path.join( root, name ), tool, repository_clone_url, metadata_dict )
+ if resetting_all_metadata_on_repository:
+ full_path_to_tool_config = os.path.join( root, name )
+ stripped_path_to_tool_config = full_path_to_tool_config.replace( work_dir, '' )
+ if stripped_path_to_tool_config.startswith( '/' ):
+ stripped_path_to_tool_config = stripped_path_to_tool_config[ 1: ]
+ relative_path_to_tool_config = os.path.join( relative_install_dir, stripped_path_to_tool_config )
+ else:
+ relative_path_to_tool_config = os.path.join( root, name )
+ metadata_dict = generate_tool_metadata( relative_path_to_tool_config, tool, repository_clone_url, metadata_dict )
else:
invalid_file_tups.extend( invalid_files_and_errors_tups )
# Find all exported workflows
@@ -612,11 +659,16 @@
metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
if 'tools' in metadata_dict:
# This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repository_files_dir )
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', files_dir )
if tool_dependencies_config:
metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
if invalid_tool_configs:
metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
+ if resetting_all_metadata_on_repository:
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( app )
+ # Reset the value of the app's tool_data_path to it's original value.
+ app.config.tool_data_path = original_tool_data_path
return metadata_dict, invalid_file_tups
def generate_package_dependency_metadata( elem, tool_dependencies_dict ):
"""The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set."""
@@ -1028,13 +1080,24 @@
if tool:
repository_tools_tups.append( ( relative_path, guid, tool ) )
return repository_tools_tups
-def get_sample_files_from_disk( relative_install_dir ):
+def get_sample_files_from_disk( repository_files_dir, relative_install_dir=None, resetting_all_metadata_on_repository=False ):
+ if resetting_all_metadata_on_repository:
+ # Keep track of the location where the repository is temporarily cloned so that we can strip it when setting metadata.
+ work_dir = repository_files_dir
sample_files = []
- for root, dirs, files in os.walk( relative_install_dir ):
+ for root, dirs, files in os.walk( repository_files_dir ):
if root.find( '.hg' ) < 0:
for name in files:
if name.endswith( '.sample' ):
- sample_files.append( os.path.join( root, name ) )
+ if resetting_all_metadata_on_repository:
+ full_path_to_sample_file = os.path.join( root, name )
+ stripped_path_to_sample_file = full_path_to_sample_file.replace( work_dir, '' )
+ if stripped_path_to_sample_file.startswith( '/' ):
+ stripped_path_to_sample_file = stripped_path_to_sample_file[ 1: ]
+ relative_path_to_sample_file = os.path.join( relative_install_dir, stripped_path_to_sample_file )
+ else:
+ relative_path_to_sample_file = os.path.join( root, name )
+ sample_files.append( relative_path_to_sample_file )
return sample_files
def get_shed_tool_conf_dict( app, shed_tool_conf ):
"""
diff -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -497,6 +497,9 @@
trans.response.headers['Pragma'] = 'no-cache'
trans.response.headers['Expires'] = '0'
return get_repository_file_contents( file_path )
+ def get_versions_of_tool( self, app, guid ):
+ tool_version = get_tool_version( app, guid )
+ return tool_version.get_version_ids( app )
@web.expose
@web.require_admin
def initiate_repository_installation( self, trans, shed_repository_ids, encoded_kwd, reinstalling=False ):
@@ -684,7 +687,11 @@
Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
when an admin is installing a new repository or reinstalling an uninstalled repository.
"""
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False )
tool_shed_repository.metadata = metadata_dict
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
@@ -779,7 +786,14 @@
message = "The repository information has been updated."
elif params.get( 'set_metadata_button', False ):
repository_clone_url = generate_clone_url( trans, repository )
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
+ # TODO: Fix this by setting up a temporary work_dir - there is currently no way for an admin to reset metadata on an
+ # installed tool shed repository because the manage_repository template checs the value of can_reset_metadata, whic is
+ # currently always False.
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=None,
+ repository_files_dir=repository_files_dir,
+ resetting_all_metadata_on_repository=True )
if metadata_dict:
repository.metadata = metadata_dict
trans.sa_session.add( repository )
@@ -1479,7 +1493,11 @@
update_repository( repo, latest_ctx_rev )
# Update the repository metadata.
tool_shed = clean_tool_shed_url( tool_shed_url )
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=relative_install_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False )
repository.metadata = metadata_dict
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
@@ -1515,17 +1533,21 @@
webapp = get_webapp( trans, **kwd )
repository = get_repository( trans, repository_id )
metadata = {}
+ tool_lineage = []
tool = None
if 'tools' in repository.metadata:
for tool_metadata_dict in repository.metadata[ 'tools' ]:
if tool_metadata_dict[ 'id' ] == tool_id:
metadata = tool_metadata_dict
tool = trans.app.toolbox.load_tool( os.path.abspath( metadata[ 'tool_config' ] ), guid=metadata[ 'guid' ] )
+ if tool:
+ tool_lineage = self.get_versions_of_tool( trans.app, tool.id )
break
return trans.fill_template( "/admin/tool_shed_repository/view_tool_metadata.mako",
repository=repository,
tool=tool,
metadata=metadata,
+ tool_lineage=tool_lineage,
message=message,
status=status )
def __generate_clone_url( self, trans, repository ):
diff -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -105,34 +105,6 @@
trans.sa_session.flush()
return item_rating
-def add_repository_metadata_tool_versions( trans, id, changeset_revisions ):
- # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
- for index, changeset_revision in enumerate( changeset_revisions ):
- tool_versions_dict = {}
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
- if repository_metadata:
- metadata = repository_metadata.metadata
- if metadata:
- tool_dicts = metadata.get( 'tools', [] )
- if index == 0:
- # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools.
- # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config.
- for tool_dict in tool_dicts:
- tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
- else:
- for tool_dict in tool_dicts:
- # We have at least 2 changeset revisions to compare tool guids and tool ids.
- parent_id = get_parent_id( trans,
- id,
- tool_dict[ 'id' ],
- tool_dict[ 'version' ],
- tool_dict[ 'guid' ],
- changeset_revisions[ 0:index ] )
- tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
- if tool_versions_dict:
- repository_metadata.tool_versions = tool_versions_dict
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
def changeset_is_malicious( trans, id, changeset_revision, **kwd ):
"""Check the malicious flag in repository metadata for a specified change set"""
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -155,7 +127,6 @@
# We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
# records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later..
changeset_revisions_checked = []
- cleaned_changeset_revisions = []
for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \
.filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
.order_by( trans.model.RepositoryMetadata.table.c.changeset_revision,
@@ -165,9 +136,6 @@
if can_delete:
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
- else:
- cleaned_changeset_revisions.append( changeset_revision )
- return cleaned_changeset_revisions
def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
# The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of
# current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only
@@ -599,7 +567,7 @@
is a valid (downloadable) changset revision. The tool config will be located in the repository manifest between the received valid changeset
revision and the first changeset revision in the repository, searching backwards.
"""
- def load_from_tmp_config( ctx, ctx_file, work_dir ):
+ def load_from_tmp_config( toolbox, ctx, ctx_file, work_dir ):
tool = None
message = ''
tmp_tool_config = get_named_tmpfile_from_ctx( ctx, ctx_file, work_dir )
@@ -614,10 +582,12 @@
if tmp_code_file_name:
tmp_code_files.append( tmp_code_file_name )
try:
- tool = load_tool( trans, tmp_tool_config )
+ tool = toolbox.load_tool( tmp_tool_config )
+ except KeyError, e:
+ message = '<b>%s</b> - This file requires an entry for %s in the tool_data_table_conf.xml file. ' % ( tool_config_filename, str( e ) )
+ message += 'Upload a file named tool_data_table_conf.xml.sample to the repository that includes the required entry to correct this error. '
except Exception, e:
- tool = None
- message = "Error loading tool: %s. " % str( e )
+ message = 'Error loading tool: %s. ' % str( e )
for tmp_code_file in tmp_code_files:
try:
os.unlink( tmp_code_file )
@@ -634,16 +604,15 @@
repo_files_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_files_dir )
ctx = get_changectx_for_changeset( repo, changeset_revision )
- tool = None
message = ''
work_dir = tempfile.mkdtemp()
sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
if sample_files:
trans.app.config.tool_data_path = work_dir
- # Load entries into the tool_data_tables if the tool requires them.
- tool_data_table_config = copy_file_from_manifest( repo, ctx, 'tool_data_table_conf.xml.sample', work_dir )
- if tool_data_table_config:
- error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
+ if 'tool_data_table_conf.xml.sample' in sample_files:
+ # Load entries into the tool_data_tables if the tool requires them.
+ tool_data_table_config = os.path.join( work_dir, 'tool_data_table_conf.xml' )
+ error, correction_msg = handle_sample_tool_data_table_conf_file( trans.app, tool_data_table_config )
found = False
# Get the latest revision of the tool config from the repository manifest up to the value of changeset_revision.
for changeset in reversed_upper_bounded_changelog( repo, changeset_revision ):
@@ -655,16 +624,16 @@
found = True
break
if found:
- tool, message = load_from_tmp_config( manifest_ctx, ctx_file, work_dir )
+ tool, message = load_from_tmp_config( trans.app.toolbox, manifest_ctx, ctx_file, work_dir )
break
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
try:
shutil.rmtree( work_dir )
except:
pass
if sample_files:
trans.app.config.tool_data_path = original_tool_data_path
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( trans.app )
return tool, message
def load_tool_from_tmp_directory( trans, repo, repo_dir, ctx, filename, dir ):
is_tool_config = False
@@ -763,6 +732,41 @@
# The received metadata_dict includes no metadata for workflows, so a new repository_metadata table record is not needed.
return False
def reset_all_metadata_on_repository( trans, id, **kwd ):
+ def reset_all_tool_versions( trans, id, repo ):
+ changeset_revisions = []
+ for changeset in repo.changelog:
+ changeset_revision = str( repo.changectx( changeset ) )
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ if repository_metadata:
+ metadata = repository_metadata.metadata
+ if metadata:
+ if metadata.get( 'tools', None ):
+ changeset_revisions.append( changeset_revision )
+ # The list of changeset_revisions is now filtered to contain only those that are downloadable and contain tools.
+ # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
+ for index, changeset_revision in enumerate( changeset_revisions ):
+ tool_versions_dict = {}
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
+ metadata = repository_metadata.metadata
+ tool_dicts = metadata[ 'tools' ]
+ if index == 0:
+ # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools.
+ # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config.
+ for tool_dict in tool_dicts:
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
+ else:
+ for tool_dict in tool_dicts:
+ parent_id = get_parent_id( trans,
+ id,
+ tool_dict[ 'id' ],
+ tool_dict[ 'version' ],
+ tool_dict[ 'guid' ],
+ changeset_revisions[ 0:index ] )
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
+ if tool_versions_dict:
+ repository_metadata.tool_versions = tool_versions_dict
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
params = util.Params( kwd )
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'done' )
@@ -784,10 +788,14 @@
work_dir = tempfile.mkdtemp()
current_changeset_revision = str( repo.changectx( changeset ) )
ctx = repo.changectx( changeset )
- print "Cloning repository revision: ", str( ctx.rev() )
+ log.debug( "Cloning repository revision: %s", str( ctx.rev() ) )
clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
- print "Generating metadata for changset revision: ", str( ctx.rev() )
- current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, work_dir, repository_clone_url )
+ log.debug( "Generating metadata for changset revision: %s", str( ctx.rev() ) )
+ current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=repo_dir,
+ repository_files_dir=work_dir,
+ resetting_all_metadata_on_repository=True )
if current_metadata_dict:
if not metadata_changeset_revision and not metadata_dict:
# We're at the first change set in the change log.
@@ -842,27 +850,49 @@
except:
pass
# Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
- cleaned_changeset_revisions = clean_repository_metadata( trans, id, changeset_revisions )
- # Set tool version information for all downloadable changeset revisions.
- add_repository_metadata_tool_versions( trans, id, cleaned_changeset_revisions )
+ clean_repository_metadata( trans, id, changeset_revisions )
+ # Set tool version information for all downloadable changeset revisions. Get the list of changeset revisions from the changelog.
+ reset_all_tool_versions( trans, id, repo )
def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
"""
Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset
has problems.
"""
+ def add_tool_versions( trans, id, repository_metadata, changeset_revisions ):
+ # Build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in repository_metadata.
+ metadata = repository_metadata.metadata
+ tool_versions_dict = {}
+ for tool_dict in metadata.get( 'tools', [] ):
+ # We have at least 2 changeset revisions to compare tool guids and tool ids.
+ parent_id = get_parent_id( trans,
+ id,
+ tool_dict[ 'id' ],
+ tool_dict[ 'version' ],
+ tool_dict[ 'guid' ],
+ changeset_revisions )
+ tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
+ if tool_versions_dict:
+ repository_metadata.tool_versions = tool_versions_dict
+ trans.sa_session.add( repository_metadata )
+ trans.sa_session.flush()
message = ''
status = 'done'
- repository_clone_url = generate_clone_url( trans, trans.security.encode_id( repository.id ) )
+ encoded_id = trans.security.encode_id( repository.id )
+ repository_clone_url = generate_clone_url( trans, encoded_id )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, repo_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( app=trans.app,
+ repository_clone_url=repository_clone_url,
+ relative_install_dir=repo_dir,
+ repository_files_dir=None,
+ resetting_all_metadata_on_repository=False )
if metadata_dict:
downloadable = is_downloadable( metadata_dict )
repository_metadata = None
if new_tool_metadata_required( trans, repository, metadata_dict ) or new_workflow_metadata_required( trans, repository, metadata_dict ):
# Create a new repository_metadata table row.
repository_metadata = create_or_update_repository_metadata( trans,
- trans.security.encode_id( repository.id ),
+ encoded_id,
repository,
repository.tip,
metadata_dict )
@@ -882,21 +912,18 @@
else:
# There are no tools in the repository, and we're setting metadata on the repository tip.
repository_metadata = create_or_update_repository_metadata( trans,
- trans.security.encode_id( repository.id ),
+ encoded_id,
repository,
repository.tip,
metadata_dict )
if 'tools' in metadata_dict and repository_metadata and status != 'error':
# Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog.
- downloadable_changeset_revisions = [ rm.changeset_revision for rm in repository.downloadable_revisions ]
changeset_revisions = []
for changeset in repo.changelog:
changeset_revision = str( repo.changectx( changeset ) )
- if changeset_revision in downloadable_changeset_revisions:
+ if get_repository_metadata_by_changeset_revision( trans, encoded_id, changeset_revision ):
changeset_revisions.append( changeset_revision )
- # Now append the latest changeset_revision we just updated above.
- changeset_revisions.append( repository_metadata.changeset_revision )
- add_repository_metadata_tool_versions( trans, trans.security.encode_id( repository.id ), changeset_revisions )
+ add_tool_versions( trans, encoded_id, repository_metadata, changeset_revisions )
elif len( repo ) == 1 and not invalid_file_tups:
message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip )
message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
diff -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a lib/galaxy/webapps/community/controllers/hg.py
--- a/lib/galaxy/webapps/community/controllers/hg.py
+++ b/lib/galaxy/webapps/community/controllers/hg.py
@@ -6,7 +6,6 @@
from galaxy import eggs
eggs.require('mercurial')
import mercurial.__version__
-from mercurial import hg, ui, commands
from mercurial.hgweb.hgwebdir_mod import hgwebdir
from mercurial.hgweb.request import wsgiapplication
@@ -20,33 +19,24 @@
hg_version = mercurial.__version__.version
cmd = kwd.get( 'cmd', None )
wsgi_app = wsgiapplication( make_web_app )
- # In mercurial version 2.2.3, section 15.2. Command changes includes a new feature: pushkey: add hooks for pushkey/listkeys (see
- # http://mercurial.selenic.com/wiki/WhatsNew#Mercurial_2.2.3_.282012-07-01.29) Older versions require checking for 'listkeys'.
- push_from_command_line = ( hg_version < '2.2.3' and cmd == 'listkeys' ) or ( hg_version >= '2.2.3' and cmd == 'pushkey' )
- if push_from_command_line:
+ if hg_version >= '2.2.3' and cmd == 'pushkey':
# When doing an "hg push" from the command line, the following commands, in order, will be retrieved from environ, depending
- # upon the mercurial version being used. There is a weakness if the mercurial version < '2.2.3' because several commands include
- # listkeys, so repository metadata will be set, but only for the files currently on disk, so doing so is not too expensive.
- # If mercurial version < '2.2.3:
- # capabilities -> batch -> branchmap -> unbundle -> listkeys
- # If mercurial version >= '2.2.3':
- # capabilities -> batch -> branchmap -> unbundle -> listkeys -> pushkey
+ # upon the mercurial version being used. In mercurial version 2.2.3, section 15.2. Command changes includes a new feature:
+ # pushkey: add hooks for pushkey/listkeys (see http://mercurial.selenic.com/wiki/WhatsNew#Mercurial_2.2.3_.282012-07-01.29)
+ # We require version 2.2.3 since the pushkey hook was added in that version.
+ # If mercurial version >= '2.2.3': capabilities -> batch -> branchmap -> unbundle -> listkeys -> pushkey
path_info = kwd.get( 'path_info', None )
if path_info:
owner, name = path_info.split( '/' )
repository = get_repository_by_name_and_owner( trans, name, owner )
if repository:
- if hg_version < '2.2.3':
- # We're forced to update the repository so the disk files include the changes in the push. This is handled in the
- # pushkey hook in mercurial version 2.2.3 and newer.
- repo = hg.repository( ui.ui(), repository.repo_path )
- update_repository( repo )
- # Set metadata using the repository files on disk.
- error_message, status = set_repository_metadata( trans, repository )
- if status not in [ 'ok' ] and error_message:
- log.debug( "Error resetting metadata on repository '%s': %s" % ( str( repository.name ), str( error_message ) ) )
- elif status in [ 'ok' ] and error_message:
- log.debug( "Successfully reset metadata on repository %s, but encountered problem: %s" % ( str( repository.name ), str( error_message ) ) )
+ if hg_version >= '2.2.3':
+ # Set metadata using the repository files on disk.
+ error_message, status = set_repository_metadata( trans, repository )
+ if status not in [ 'ok' ] and error_message:
+ log.debug( "Error resetting metadata on repository '%s': %s" % ( str( repository.name ), str( error_message ) ) )
+ elif status in [ 'ok' ] and error_message:
+ log.debug( "Successfully reset metadata on repository %s, but encountered problem: %s" % ( str( repository.name ), str( error_message ) ) )
return wsgi_app
def make_web_app():
diff -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -10,7 +10,8 @@
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for_changeset, get_configured_ui, get_repository_file_contents, NOT_TOOL_CONFIGS
-from galaxy.util.shed_util import open_repository_files_folder, reversed_lower_upper_bounded_changelog, strip_path, to_html_escaped, update_repository
+from galaxy.util.shed_util import open_repository_files_folder, reversed_lower_upper_bounded_changelog, reversed_upper_bounded_changelog, strip_path
+from galaxy.util.shed_util import to_html_escaped, update_repository
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -1268,6 +1269,36 @@
update_dict[ 'changeset_revision' ] = str( latest_changeset_revision )
update_dict[ 'ctx_rev' ] = str( update_to_ctx.rev() )
return tool_shed_encode( update_dict )
+ def get_versions_of_tool( self, trans, repository, repository_metadata, guid ):
+ """Return the tool lineage in descendant order for the received guid contained in the received repsitory_metadata.tool_versions."""
+ encoded_id = trans.security.encode_id( repository.id )
+ repo_dir = repository.repo_path
+ repo = hg.repository( get_configured_ui(), repo_dir )
+ # Initialize the tool lineage
+ tool_guid_lineage = [ guid ]
+ # Get all ancestor guids of the received guid.
+ current_child_guid = guid
+ for changeset in reversed_upper_bounded_changelog( repo, repository_metadata.changeset_revision ):
+ ctx = repo.changectx( changeset )
+ rm = get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) )
+ if rm:
+ parent_guid = rm.tool_versions.get( current_child_guid, None )
+ if parent_guid:
+ tool_guid_lineage.append( parent_guid )
+ current_child_guid = parent_guid
+ # Get all descendant guids of the received guid.
+ current_parent_guid = guid
+ for changeset in reversed_lower_upper_bounded_changelog( repo, repository_metadata.changeset_revision, repository.tip ):
+ ctx = repo.changectx( changeset )
+ rm = get_repository_metadata_by_changeset_revision( trans, encoded_id, str( ctx ) )
+ if rm:
+ tool_versions = rm.tool_versions
+ for child_guid, parent_guid in tool_versions.items():
+ if parent_guid == current_parent_guid:
+ tool_guid_lineage.insert( 0, child_guid )
+ current_parent_guid = child_guid
+ break
+ return tool_guid_lineage
@web.expose
def help( self, trans, **kwd ):
params = util.Params( kwd )
@@ -1348,23 +1379,12 @@
webapp = get_webapp( trans, **kwd )
repository_clone_url = generate_clone_url( trans, repository_id )
repository = get_repository( trans, repository_id )
- repo_dir = repository.repo_path
- repo = hg.repository( get_configured_ui(), repo_dir )
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- invalid_message = ''
- metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, repo_dir, repository_clone_url )
- for invalid_file_tup in invalid_file_tups:
- invalid_tool_config, invalid_msg = invalid_file_tup
- invalid_tool_config_name = strip_path( invalid_tool_config )
- if tool_config == invalid_tool_config_name:
- invalid_message = invalid_msg
- break
tool, error_message = load_tool_from_changeset_revision( trans, repository_id, changeset_revision, tool_config )
tool_state = self.__new_state( trans )
is_malicious = changeset_is_malicious( trans, repository_id, repository.tip )
try:
- if invalid_message:
- message = invalid_message
+ if error_message:
+ message = error_message
return trans.fill_template( "/webapps/community/repository/tool_form.mako",
repository=repository,
changeset_revision=changeset_revision,
@@ -2165,20 +2185,25 @@
status = params.get( 'status', 'done' )
webapp = get_webapp( trans, **kwd )
repository = get_repository( trans, repository_id )
- metadata = {}
+ tool_metadata_dict = {}
+ tool_lineage = []
tool = None
+ guid = None
revision_label = get_revision_label( trans, repository, changeset_revision )
- repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision ).metadata
- if 'tools' in repository_metadata:
- for tool_metadata_dict in repository_metadata[ 'tools' ]:
+ repository_metadata = get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
+ metadata = repository_metadata.metadata
+ if 'tools' in metadata:
+ for tool_metadata_dict in metadata[ 'tools' ]:
if tool_metadata_dict[ 'id' ] == tool_id:
- metadata = tool_metadata_dict
+ guid = tool_metadata_dict[ 'guid' ]
try:
# We may be attempting to load a tool that no longer exists in the repository tip.
- tool = load_tool( trans, os.path.abspath( metadata[ 'tool_config' ] ) )
+ tool = load_tool( trans, os.path.abspath( tool_metadata_dict[ 'tool_config' ] ) )
except:
tool = None
break
+ if guid:
+ tool_lineage = self.get_versions_of_tool( trans, repository, repository_metadata, guid )
is_malicious = changeset_is_malicious( trans, repository_id, repository.tip )
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
@@ -2188,7 +2213,8 @@
return trans.fill_template( "/webapps/community/repository/view_tool_metadata.mako",
repository=repository,
tool=tool,
- metadata=metadata,
+ tool_metadata_dict=tool_metadata_dict,
+ tool_lineage=tool_lineage,
changeset_revision=changeset_revision,
revision_label=revision_label,
changeset_revision_select_field=changeset_revision_select_field,
diff -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a lib/galaxy/webapps/community/framework/middleware/hg.py
--- a/lib/galaxy/webapps/community/framework/middleware/hg.py
+++ b/lib/galaxy/webapps/community/framework/middleware/hg.py
@@ -57,9 +57,9 @@
connection.execute( "update repository set times_downloaded = %d where user_id = %d and name = '%s'" % ( times_downloaded, user_id, name.lower() ) )
connection.close()
if cmd == 'unbundle':
- # This is an hg push from the command line. When doing this, the following 7 commands, in order,
- # will be retrieved from environ (see the docs at http://mercurial.selenic.com/wiki/WireProtocol)
- # between -> capabilities -> heads -> branchmap -> unbundle -> unbundle -> listkeys
+ # This is an hg push from the command line. When doing this, the following commands, in order,
+ # will be retrieved from environ (see the docs at http://mercurial.selenic.com/wiki/WireProtocol)
+ # # If mercurial version >= '2.2.3': capabilities -> batch -> branchmap -> unbundle -> listkeys -> pushkey
#
# The mercurial API unbundle() ( i.e., hg push ) method ultimately requires authorization.
# We'll force password entry every time a change set is pushed.
diff -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -200,7 +200,7 @@
self.repository = repository
self.category = category
-class Tag ( object ):
+class Tag( object ):
def __init__( self, id=None, type=None, parent_id=None, name=None ):
self.id = id
self.type = type
@@ -209,7 +209,7 @@
def __str__ ( self ):
return "Tag(id=%s, type=%i, parent_id=%s, name=%s)" % ( self.id, self.type, self.parent_id, self.name )
-class ItemTagAssociation ( object ):
+class ItemTagAssociation( object ):
def __init__( self, id=None, user=None, item_id=None, tag_id=None, user_tname=None, value=None ):
self.id = id
self.user = user
diff -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a templates/admin/tool_shed_repository/view_tool_metadata.mako
--- a/templates/admin/tool_shed_repository/view_tool_metadata.mako
+++ b/templates/admin/tool_shed_repository/view_tool_metadata.mako
@@ -26,6 +26,11 @@
<div class="toolFormTitle">${metadata[ 'name' ]} tool metadata</div><div class="toolFormBody"><div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%"><td><b>Miscellaneous</td></tr>
+ </table>
+ </div>
+ <div class="form-row"><label>Name:</label>
${metadata[ 'name' ]}
<div style="clear: both"></div>
@@ -65,33 +70,35 @@
<div style="clear: both"></div></div>
%endif
- %if tool:
- <div class="form-row">
- <label>Command:</label>
- <pre>${tool.command}</pre>
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <label>Interpreter:</label>
- ${tool.interpreter}
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <label>Is multi-byte:</label>
- ${tool.is_multi_byte}
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <label>Forces a history refresh:</label>
- ${tool.force_history_refresh}
- <div style="clear: both"></div>
- </div>
- <div class="form-row">
- <label>Parallelism:</label>
- ${tool.parallelism}
- <div style="clear: both"></div>
- </div>
- %endif
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%"><td><b>Version lineage of this tool (guids ordered most recent to oldest)</td></tr>
+ </table>
+ </div>
+ <div class="form-row">
+ %if tool_lineage:
+ <table class="grid">
+ %for guid in tool_lineage:
+ <tr>
+ <td>
+ %if guid == metadata[ 'guid' ]:
+ ${guid} <b>(this tool)</b>
+ %else:
+ ${guid}
+ %endif
+ </td>
+ </tr>
+ %endfor
+ </table>
+ %else:
+ No tool versions are defined for this tool so it is critical that you <b>Set tool versions</b> from the <b>Manage repository</b> page.
+ %endif
+ </div>
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%"><td><b>Requirements (dependencies defined in the <requirements> tag set)</td></tr>
+ </table>
+ </div><%
if 'requirements' in metadata:
requirements = metadata[ 'requirements' ]
@@ -122,7 +129,48 @@
</table><div style="clear: both"></div></div>
+ %else:
+ <div class="form-row">
+ No requirements defined
+ </div>
%endif
+ %if tool:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%"><td><b>Additional information about this tool</td></tr>
+ </table>
+ </div>
+ <div class="form-row">
+ <label>Command:</label>
+ <pre>${tool.command}</pre>
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Interpreter:</label>
+ ${tool.interpreter}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Is multi-byte:</label>
+ ${tool.is_multi_byte}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Forces a history refresh:</label>
+ ${tool.force_history_refresh}
+ <div style="clear: both"></div>
+ </div>
+ <div class="form-row">
+ <label>Parallelism:</label>
+ ${tool.parallelism}
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%"><td><b>Functional tests</td></tr>
+ </table>
+ </div><%
if 'tests' in metadata:
tests = metadata[ 'tests' ]
@@ -166,6 +214,10 @@
%endfor
</table></div>
+ %else:
+ <div class="form-row">
+ No functional tests defined
+ </div>
%endif
</div></div>
diff -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b -r 0bd2cc07fdd896ca0bdbc6d23f7ac077218f007a templates/webapps/community/repository/view_tool_metadata.mako
--- a/templates/webapps/community/repository/view_tool_metadata.mako
+++ b/templates/webapps/community/repository/view_tool_metadata.mako
@@ -85,31 +85,14 @@
<div class="toolForm"><div class="toolFormTitle">Repository revision</div><div class="toolFormBody">
- %if len( changeset_revision_select_field.options ) > 1:
- <form name="change_revision" id="change_revision" action="${h.url_for( controller='repository', action='view_tool_metadata', repository_id=trans.security.encode_id( repository.id ), tool_id=metadata[ 'id' ], webapp=webapp )}" method="post" >
- <div class="form-row">
- <%
- if changeset_revision == repository.tip:
- tip_str = 'repository tip'
- else:
- tip_str = ''
- %>
- ${changeset_revision_select_field.get_html()} <i>${tip_str}</i>
- <div class="toolParamHelp" style="clear: both;">
- Select a revision to inspect and download versions of tools from this repository.
- </div>
- </div>
- </form>
- %else:
- <div class="form-row">
- <label>Revision:</label>
- %if can_view_change_log:
- <a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${revision_label}</a>
- %else:
- ${revision_label}
- %endif
- </div>
- %endif
+ <div class="form-row">
+ <label>Revision:</label>
+ %if can_view_change_log:
+ <a href="${h.url_for( controller='repository', action='view_changelog', id=trans.app.security.encode_id( repository.id ) )}">${revision_label}</a>
+ %else:
+ ${revision_label}
+ %endif
+ </div></div></div><p/>
@@ -127,47 +110,89 @@
<b>Repository name:</b><br/>
${repository.name}
%endif
-%if metadata:
+%if tool_metadata_dict:
<p/><div class="toolForm">
- <div class="toolFormTitle">${metadata[ 'name' ]} tool metadata</div>
+ <div class="toolFormTitle">${tool_metadata_dict[ 'name' ]} tool metadata</div><div class="toolFormBody"><div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%"><td><b>Miscellaneous</td></tr>
+ </table>
+ </div>
+ <div class="form-row"><label>Name:</label>
- <a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=metadata[ 'tool_config' ], changeset_revision=changeset_revision, webapp=webapp )}">${metadata[ 'name' ]}</a>
+ <a href="${h.url_for( controller='repository', action='display_tool', repository_id=trans.security.encode_id( repository.id ), tool_config=tool_metadata_dict[ 'tool_config' ], changeset_revision=changeset_revision, webapp=webapp )}">${tool_metadata_dict[ 'name' ]}</a><div style="clear: both"></div></div>
- %if 'description' in metadata:
+ %if 'description' in tool_metadata_dict:
<div class="form-row"><label>Description:</label>
- ${metadata[ 'description' ]}
+ ${tool_metadata_dict[ 'description' ]}
<div style="clear: both"></div></div>
%endif
- %if 'id' in metadata:
+ %if 'id' in tool_metadata_dict:
<div class="form-row"><label>Id:</label>
- ${metadata[ 'id' ]}
+ ${tool_metadata_dict[ 'id' ]}
<div style="clear: both"></div></div>
%endif
- %if 'guid' in metadata:
+ %if 'guid' in tool_metadata_dict:
<div class="form-row"><label>Guid:</label>
- ${metadata[ 'guid' ]}
+ ${tool_metadata_dict[ 'guid' ]}
<div style="clear: both"></div></div>
%endif
- %if 'version' in metadata:
+ %if 'version' in tool_metadata_dict:
<div class="form-row"><label>Version:</label>
- ${metadata[ 'version' ]}
+ ${tool_metadata_dict[ 'version' ]}
<div style="clear: both"></div></div>
%endif
+ %if 'version_string_cmd' in tool_metadata_dict:
+ <div class="form-row">
+ <label>Version command string:</label>
+ ${tool_metadata_dict[ 'version_string_cmd' ]}
+ <div style="clear: both"></div>
+ </div>
+ %endif
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%"><td><b>Version lineage of this tool (guids ordered most recent to oldest)</td></tr>
+ </table>
+ </div>
+ <div class="form-row">
+ %if tool_lineage:
+ <table class="grid">
+ %for guid in tool_lineage:
+ <tr>
+ <td>
+ %if guid == tool_metadata_dict[ 'guid' ]:
+ ${guid} <b>(this tool)</b>
+ %else:
+ ${guid}
+ %endif
+ </td>
+ </tr>
+ %endfor
+ </table>
+ %else:
+ No tool versions are defined for this tool so it is critical that you <b>Reset all repository metadata</b> from the
+ <b>Manage repository</b> page.
+ %endif
+ </div>
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%"><td><b>Requirements (dependencies defined in the <requirements> tag set)</td></tr>
+ </table>
+ </div><%
- if 'requirements' in metadata:
- requirements = metadata[ 'requirements' ]
+ if 'requirements' in tool_metadata_dict:
+ requirements = tool_metadata_dict[ 'requirements' ]
else:
requirements = None
%>
@@ -195,16 +220,18 @@
</table><div style="clear: both"></div></div>
- %endif
- %if 'version_string_cmd' in metadata:
+ %else:
<div class="form-row">
- <label>Version command string:</label>
- ${metadata[ 'version_string_cmd' ]}
- <div style="clear: both"></div>
+ No requirements defined
</div>
%endif
%if tool:
<div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%"><td><b>Additional information about this tool</td></tr>
+ </table>
+ </div>
+ <div class="form-row"><label>Command:</label><pre>${tool.command}</pre><div style="clear: both"></div>
@@ -230,9 +257,14 @@
<div style="clear: both"></div></div>
%endif
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%"><td><b>Functional tests</td></tr>
+ </table>
+ </div><%
- if 'tests' in metadata:
- tests = metadata[ 'tests' ]
+ if 'tests' in tool_metadata_dict:
+ tests = tool_metadata_dict[ 'tests' ]
else:
tests = None
%>
@@ -273,6 +305,10 @@
%endfor
</table></div>
+ %else:
+ <div class="form-row">
+ No functional tests defined
+ </div>
%endif
</div></div>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
4 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/d960803cbacc/
changeset: d960803cbacc
user: jmchilton
date: 2012-08-09 22:15:23
summary: JJ's initial work on extending the API to allow operations on groups.
affected #: 5 files
diff -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c -r d960803cbacc5cdeea67ffa9a26010adba1120d6 lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -544,7 +544,10 @@
else:
return False
-class Group( object ):
+class Group( object, APIItem ):
+ api_collection_visible_keys = ( 'id', 'name' )
+ api_element_visible_keys = ( 'id', 'name' )
+
def __init__( self, name = None ):
self.name = name
self.deleted = False
diff -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c -r d960803cbacc5cdeea67ffa9a26010adba1120d6 lib/galaxy/web/api/group_roles.py
--- /dev/null
+++ b/lib/galaxy/web/api/group_roles.py
@@ -0,0 +1,124 @@
+"""
+API operations on Group objects.
+"""
+import logging
+from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy import web
+
+log = logging.getLogger( __name__ )
+
+class GroupRolesAPIController( BaseAPIController ):
+
+ @web.expose_api
+ @web.require_admin
+ def index( self, trans, group_id, **kwd ):
+ """
+ GET /api/groups/{encoded_group_id}/roles
+ Displays a collection (list) of groups.
+ """
+ decoded_group_id = trans.security.decode_id( group_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ except:
+ group = None
+ if not group:
+ trans.response.status = 400
+ return "Invalid group id ( %s ) specified." % str( group_id )
+ rval = []
+ try:
+ for gra in group.roles:
+ role = gra.role
+ encoded_id = trans.security.encode_id( role.id )
+ rval.append( dict( id = encoded_id,
+ name = role.name,
+ url = url_for( 'group_role', group_id=group_id, id=encoded_id, ) ) )
+ except Exception, e:
+ rval = "Error in group API at listing roles"
+ log.error( rval + ": %s" % str(e) )
+ trans.response.status = 500
+ return rval
+
+ @web.expose_api
+ @web.require_admin
+ def show( self, trans, id, group_id, **kwd ):
+ """
+ GET /api/groups/{encoded_group_id}/roles/{encoded_role_id}
+ Displays information about a group role.
+ """
+ role_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_role_id = trans.security.decode_id( role_id )
+ item = None
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
+ for gra in group.roles:
+ if gra.role == role:
+ item = dict( id = role_id,
+ name = role.name,
+ url = url_for( 'group_role', group_id=group_id, id=role_id) ) # TODO Fix This
+ if not item:
+ item = "role %s not in group %s" % (role.name,group.name)
+ except Exception, e:
+ item = "Error in group_role API group %s role %s" % (group.name, role.name)
+ log.error(item + ": %s" % str(e))
+ return item
+
+ @web.expose_api
+ @web.require_admin
+ def update( self, trans, id, group_id, **kwd ):
+ """
+ PUT /api/groups/{encoded_group_id}/roles/{encoded_role_id}
+ Adds a role to a group
+ """
+ role_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_role_id = trans.security.decode_id( role_id )
+ item = None
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
+ for gra in group.roles:
+ if gra.role == role:
+ item = dict( id = role_id,
+ name = role.name,
+ url = url_for( 'group_role', group_id=group_id, id=role_id) )
+ if not item:
+ gra = trans.app.model.GroupRoleAssociation( group, role )
+ # Add GroupRoleAssociation
+ trans.sa_session.add( gra )
+ trans.sa_session.flush()
+ item = dict( id = role_id,
+ name = role.name,
+ url = url_for( 'group_role', group_id=group_id, id=role_id) )
+ except Exception, e:
+ item = "Error in group_role API Adding role %s to group %s" % (role.name,group.name)
+ log.error(item + ": %s" % str(e))
+ return item
+
+ @web.expose_api
+ @web.require_admin
+ def delete( self, trans, id, group_id, **kwd ):
+ """
+ DELETE /api/groups/{encoded_group_id}/roles/{encoded_role_id}
+ Removes a role from a group
+ """
+ role_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_role_id = trans.security.decode_id( role_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
+ for gra in group.roles:
+ if gra.role == role:
+ trans.sa_session.delete( gra )
+ trans.sa_session.flush()
+ item = dict( id = role_id,
+ name = role.name,
+ url = url_for( 'group_role', group_id=group_id, id=role_id) )
+ if not item:
+ item = "role %s not in group %s" % (role.name,group.name)
+ except Exception, e:
+ item = "Error in group_role API Removing role %s from group %s" % (role.name,group.name)
+ log.error(item + ": %s" % str(e))
+ return item
diff -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c -r d960803cbacc5cdeea67ffa9a26010adba1120d6 lib/galaxy/web/api/group_users.py
--- /dev/null
+++ b/lib/galaxy/web/api/group_users.py
@@ -0,0 +1,124 @@
+"""
+API operations on Group objects.
+"""
+import logging
+from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy import web
+
+log = logging.getLogger( __name__ )
+
+class GroupUsersAPIController( BaseAPIController ):
+
+ @web.expose_api
+ @web.require_admin
+ def index( self, trans, group_id, **kwd ):
+ """
+ GET /api/groups/{encoded_group_id}/users
+ Displays a collection (list) of groups.
+ """
+ decoded_group_id = trans.security.decode_id( group_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ except:
+ group = None
+ if not group:
+ trans.response.status = 400
+ return "Invalid group id ( %s ) specified." % str( group_id )
+ rval = []
+ try:
+ for uga in group.users:
+ user = uga.user
+ encoded_id = trans.security.encode_id( user.id )
+ rval.append( dict( id = encoded_id,
+ email = user.email,
+ url = url_for( 'group_user', group_id=group_id, id=encoded_id, ) ) )
+ except Exception, e:
+ rval = "Error in group API at listing users"
+ log.error( rval + ": %s" % str(e) )
+ trans.response.status = 500
+ return rval
+
+ @web.expose_api
+ @web.require_admin
+ def show( self, trans, id, group_id, **kwd ):
+ """
+ GET /api/groups/{encoded_group_id}/users/{encoded_user_id}
+ Displays information about a group user.
+ """
+ user_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_user_id = trans.security.decode_id( user_id )
+ item = None
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ for uga in group.users:
+ if uga.user == user:
+ item = dict( id = user_id,
+ email = user.email,
+ url = url_for( 'group_user', group_id=group_id, id=user_id) ) # TODO Fix This
+ if not item:
+ item = "user %s not in group %s" % (user.email,group.name)
+ except Exception, e:
+ item = "Error in group_user API group %s user %s" % (group.name, user.email)
+ log.error(item + ": %s" % str(e))
+ return item
+
+ @web.expose_api
+ @web.require_admin
+ def update( self, trans, id, group_id, **kwd ):
+ """
+ PUT /api/groups/{encoded_group_id}/users/{encoded_user_id}
+ Adds a user to a group
+ """
+ user_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_user_id = trans.security.decode_id( user_id )
+ item = None
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ for uga in group.users:
+ if uga.user == user:
+ item = dict( id = user_id,
+ email = user.email,
+ url = url_for( 'group_user', group_id=group_id, id=user_id) )
+ if not item:
+ uga = trans.app.model.UserGroupAssociation( user, group )
+ # Add UserGroupAssociations
+ trans.sa_session.add( uga )
+ trans.sa_session.flush()
+ item = dict( id = user_id,
+ email = user.email,
+ url = url_for( 'group_user', group_id=group_id, id=user_id) )
+ except Exception, e:
+ item = "Error in group_user API Adding user %s to group %s" % (user.email,group.name)
+ log.error(item + ": %s" % str(e))
+ return item
+
+ @web.expose_api
+ @web.require_admin
+ def delete( self, trans, id, group_id, **kwd ):
+ """
+ DELETE /api/groups/{encoded_group_id}/users/{encoded_user_id}
+ Removes a user from a group
+ """
+ user_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_user_id = trans.security.decode_id( user_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ for uga in group.users:
+ if uga.user == user:
+ trans.sa_session.delete( uga )
+ trans.sa_session.flush()
+ item = dict( id = user_id,
+ email = user.email,
+ url = url_for( 'group_user', group_id=group_id, id=user_id) )
+ if not item:
+ item = "user %s not in group %s" % (user.email,group.name)
+ except Exception, e:
+ item = "Error in group_user API Removing user %s from group %s" % (user.email,group.name)
+ log.error(item + ": %s" % str(e))
+ return item
diff -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c -r d960803cbacc5cdeea67ffa9a26010adba1120d6 lib/galaxy/web/api/groups.py
--- /dev/null
+++ b/lib/galaxy/web/api/groups.py
@@ -0,0 +1,128 @@
+"""
+API operations on Group objects.
+"""
+import logging
+from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy import web
+
+
+log = logging.getLogger( __name__ )
+
+
+class GroupAPIController( BaseAPIController ):
+
+ @web.expose_api
+ @web.require_admin
+ def index( self, trans, **kwd ):
+ """
+ GET /api/groups
+ Displays a collection (list) of groups.
+ """
+ rval = []
+ for group in trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.deleted == False ):
+ if trans.user_is_admin():
+ item = group.get_api_value( value_mapper={ 'id': trans.security.encode_id } )
+ encoded_id = trans.security.encode_id( group.id )
+ item['url'] = url_for( 'group', id=encoded_id )
+ rval.append( item )
+ return rval
+
+ @web.expose_api
+ def create( self, trans, payload, **kwd ):
+ """
+ POST /api/groups
+ Creates a new group.
+ """
+ log.info("groups payload%s\n" % (payload))
+ if not trans.user_is_admin():
+ trans.response.status = 403
+ return "You are not authorized to create a new group."
+ name = payload.get( 'name', None )
+ if not name:
+ trans.response.status = 400
+ return "Enter a valid name"
+ if trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name==name ).first():
+ trans.response.status = 400
+ return "A group with that name already exists"
+
+ group = trans.app.model.Group( name=name )
+ trans.sa_session.add( group )
+ user_ids = payload.get( 'user_ids', [] )
+ for i in user_ids:
+ log.info("user_id: %s\n" % (i ))
+ log.info("%s %s\n" % (i, trans.security.decode_id( i ) ))
+ users = [ trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( i ) ) for i in user_ids ]
+ role_ids = payload.get( 'role_ids', [] )
+ roles = [ trans.sa_session.query( trans.model.Role ).get( trans.security.decode_id( i ) ) for i in role_ids ]
+ trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=roles, users=users )
+ """
+ # Create the UserGroupAssociations
+ for user in users:
+ trans.app.security_agent.associate_user_group( user, group )
+ # Create the GroupRoleAssociations
+ for role in roles:
+ trans.app.security_agent.associate_group_role( group, role )
+ """
+ trans.sa_session.flush()
+ encoded_id = trans.security.encode_id( group.id )
+ item = group.get_api_value( view='element', value_mapper={ 'id': trans.security.encode_id } )
+ item['url'] = url_for( 'group', id=encoded_id )
+ return [ item ]
+
+ @web.expose_api
+ @web.require_admin
+ def show( self, trans, id, **kwd ):
+ """
+ GET /api/groups/{encoded_group_id}
+ Displays information about a group.
+ """
+ group_id = id
+ try:
+ decoded_group_id = trans.security.decode_id( group_id )
+ except TypeError:
+ trans.response.status = 400
+ return "Malformed group id ( %s ) specified, unable to decode." % str( group_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ except:
+ group = None
+ if not group:
+ trans.response.status = 400
+ return "Invalid group id ( %s ) specified." % str( group_id )
+ item = group.get_api_value( view='element', value_mapper={ 'id': trans.security.encode_id } )
+ item['url'] = url_for( 'group', id=group_id )
+ item['users_url'] = url_for( 'group_users', group_id=group_id )
+ item['roles_url'] = url_for( 'group_roles', group_id=group_id )
+ return item
+
+ @web.expose_api
+ @web.require_admin
+ def update( self, trans, id, payload, **kwd ):
+ """
+ PUT /api/groups/{encoded_group_id}
+ Modifies a group.
+ """
+ group_id = id
+ try:
+ decoded_group_id = trans.security.decode_id( group_id )
+ except TypeError:
+ trans.response.status = 400
+ return "Malformed group id ( %s ) specified, unable to decode." % str( group_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ except:
+ group = None
+ if not group:
+ trans.response.status = 400
+ return "Invalid group id ( %s ) specified." % str( group_id )
+ name = payload.get( 'name', None )
+ if name:
+ group.name = name
+ trans.sa_session.add(group)
+ user_ids = payload.get( 'user_ids', [] )
+ users = [ trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( i ) ) for i in user_ids ]
+ role_ids = payload.get( 'role_ids', [] )
+ roles = [ trans.sa_session.query( trans.model.Role ).get( trans.security.decode_id( i ) ) for i in role_ids ]
+ trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=roles, users=users,delete_existing_assocs=False )
+ trans.sa_session.flush()
+
diff -r 1cb2fdf2c7cf22d74ec46e6877609f7056372d7c -r d960803cbacc5cdeea67ffa9a26010adba1120d6 lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -122,6 +122,18 @@
'permissions',
path_prefix='/api/libraries/:library_id',
parent_resources=dict( member_name='library', collection_name='libraries' ) )
+ webapp.api_mapper.resource( 'user',
+ 'users',
+ controller='group_users',
+ name_prefix='group_',
+ path_prefix='/api/groups/:group_id',
+ parent_resources=dict( member_name='group', collection_name='groups' ) )
+ webapp.api_mapper.resource( 'role',
+ 'roles',
+ controller='group_roles',
+ name_prefix='group_',
+ path_prefix='/api/groups/:group_id',
+ parent_resources=dict( member_name='group', collection_name='groups' ) )
webapp.api_mapper.resource( 'dataset', 'datasets', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'library', 'libraries', path_prefix='/api' )
webapp.api_mapper.resource( 'sample', 'samples', path_prefix='/api' )
@@ -129,6 +141,7 @@
webapp.api_mapper.resource( 'form', 'forms', path_prefix='/api' )
webapp.api_mapper.resource( 'request_type', 'request_types', path_prefix='/api' )
webapp.api_mapper.resource( 'role', 'roles', path_prefix='/api' )
+ webapp.api_mapper.resource( 'group', 'groups', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'quota', 'quotas', path_prefix='/api' )
webapp.api_mapper.resource( 'tool', 'tools', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' )
https://bitbucket.org/galaxy/galaxy-central/changeset/761c58466d97/
changeset: 761c58466d97
user: jmchilton
date: 2012-08-09 22:18:00
summary: Merge.
affected #: 19 files
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 buildbot_setup.sh
--- a/buildbot_setup.sh
+++ b/buildbot_setup.sh
@@ -68,8 +68,13 @@
datatypes_conf.xml.sample
universe_wsgi.ini.sample
tool_data_table_conf.xml.sample
+migrated_tools_conf.xml.sample
+tool-data/shared/ensembl/builds.txt.sample
+tool-data/shared/igv/igv_build_sites.txt.sample
+tool-data/shared/ncbi/builds.txt.sample
+tool-data/shared/rviewer/rviewer_build_sites.txt.sample
tool-data/shared/ucsc/builds.txt.sample
-migrated_tools_conf.xml.sample
+tool-data/shared/ucsc/publicbuilds.txt.sample
"
DIRS="
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 contrib/multiproccess.sh
--- a/contrib/multiproccess.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-
-# copy this script to the top level galaxy directory and modify the following
-# for your environment
-
-web_server_names=(web{0..2}) # server names: web0 web1 web2
-runner_server_names=(runner0) # server name: runner0
-
-web_config='universe_wsgi.webapp.ini'
-runner_config='universe_wsgi.runner.ini'
-
-# actually do the requested action
-
-if [ -z "$1" ]; then
- echo "usage: multiprocess.sh <--daemon|--stop-daemon>"
- exit 1
-fi
-
-for server_name in ${web_server_names[@]}; do
- echo "[$server_name]"
- python ./scripts/paster.py serve $web_config --server-name=$server_name --pid-file=$server_name.pid --log-file=$server_name.log $@
-done
-for server_name in ${runner_server_names[@]}; do
- echo "[$server_name]"
- python ./scripts/paster.py serve $runner_config --server-name=$server_name --pid-file=$server_name.pid --log-file=$server_name.log $@
-done
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/tool_shed/install_manager.py
--- a/lib/galaxy/tool_shed/install_manager.py
+++ b/lib/galaxy/tool_shed/install_manager.py
@@ -2,7 +2,7 @@
Manage automatic installation of tools configured in the xxx.xml files in ~/scripts/migrate_tools (e.g., 0002_tools.xml).
All of the tools were at some point included in the Galaxy distribution, but are now hosted in the main Galaxy tool shed.
"""
-import urllib2
+import urllib2, tempfile
from galaxy.tools import ToolSection
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
@@ -132,7 +132,7 @@
tool_panel_dict_for_tool_config = generate_tool_panel_dict_for_tool_config( guid, tool_config, tool_sections=tool_sections )
for k, v in tool_panel_dict_for_tool_config.items():
tool_panel_dict_for_display[ k ] = v
- metadata_dict = generate_metadata_using_disk_files( self.toolbox, relative_install_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( self.app, relative_install_dir, repository_clone_url )
tool_shed_repository.metadata = metadata_dict
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
@@ -142,7 +142,7 @@
else:
tool_dependencies = None
if 'tools' in metadata_dict:
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
repository_tools_tups = get_repository_tools_tups( self.app, metadata_dict )
if repository_tools_tups:
sample_files = metadata_dict.get( 'sample_files', [] )
@@ -195,7 +195,7 @@
tool_shed_repository.includes_datatypes = True
self.app.sa_session.add( tool_shed_repository )
self.app.sa_session.flush()
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
datatypes_config = get_config_from_repository( self.app,
'datatypes_conf.xml',
tool_shed_repository,
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -4,6 +4,7 @@
import pkg_resources
pkg_resources.require( "simplejson" )
+pkg_resources.require( "Mako" )
import logging, os, string, sys, tempfile, glob, shutil, types, urllib, subprocess, random, math, traceback
import simplejson
@@ -2344,7 +2345,7 @@
command_line = command_line.replace( "\n", " " ).replace( "\r", " " ).strip()
except Exception, e:
# Modify exception message to be more clear
- #e.args = ( 'Error substituting into command line. Params: %r, Command: %s' % ( param_dict, self.command ) )
+ #e.args = ( 'Error substituting into command line. Params: %r, Command: %s' % ( param_dict, self.command ), )
raise
if self.interpreter:
# TODO: path munging for cluster/dataset server relocatability
@@ -2441,7 +2442,7 @@
if code:
return code( *args, **kwargs )
except Exception, e:
- e.args = ( "Error in '%s' hook '%s', original message: %s" % ( self.name, hook_name, e.args[0] ) )
+ e.args = ( "Error in '%s' hook '%s', original message: %s" % ( self.name, hook_name, e.args[0] ), )
raise
def exec_before_job( self, app, inp_data, out_data, param_dict={} ):
pass
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/util/shed_util.py
--- a/lib/galaxy/util/shed_util.py
+++ b/lib/galaxy/util/shed_util.py
@@ -3,6 +3,7 @@
from datetime import date, datetime, timedelta
from time import strftime, gmtime
from galaxy import util
+from galaxy.tools import parameters
from galaxy.datatypes.checkers import *
from galaxy.util.json import *
from galaxy.tools.search import ToolBoxSearch
@@ -247,6 +248,52 @@
except:
pass
return converter_path, display_path
+def check_tool_input_params( app, repo_dir, tool_config_name, tool, sample_files ):
+ """
+ Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
+ sure the files exist.
+ """
+ invalid_files_and_errors_tups = []
+ correction_msg = ''
+ for input_param in tool.input_params:
+ if isinstance( input_param, parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
+ # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist.
+ options = input_param.dynamic_options or input_param.options
+ if options:
+ if options.tool_data_table or options.missing_tool_data_table_name:
+ # Make sure the repository contains a tool_data_table_conf.xml.sample file.
+ sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
+ if sample_tool_data_table_conf:
+ error, correction_msg = handle_sample_tool_data_table_conf_file( app, sample_tool_data_table_conf )
+ if error:
+ invalid_files_and_errors_tups.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
+ else:
+ options.missing_tool_data_table_name = None
+ else:
+ correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample "
+ correction_msg += "to the repository that includes the required entry to correct this error.<br/>"
+ invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
+ if options.index_file or options.missing_index_file:
+ # Make sure the repository contains the required xxx.loc.sample file.
+ index_file = options.index_file or options.missing_index_file
+ index_file_name = strip_path( index_file )
+ sample_found = False
+ for sample_file in sample_files:
+ sample_file_name = strip_path( sample_file )
+ if sample_file_name == '%s.sample' % index_file_name:
+ options.index_file = index_file_name
+ options.missing_index_file = None
+ if options.tool_data_table:
+ options.tool_data_table.missing_index_file = None
+ sample_found = True
+ break
+ if not sample_found:
+ correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
+ correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
+ invalid_files_and_errors_tups.append( ( tool_config_name, correction_msg ) )
+ # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
+ reset_tool_data_tables( app )
+ return invalid_files_and_errors_tups
def config_elems_to_xml_file( app, config_elems, config_filename, tool_path ):
# Persist the current in-memory list of config_elems to a file named by the value of config_filename.
fd, filename = tempfile.mkstemp()
@@ -383,7 +430,7 @@
def create_tool_dependency_objects( app, tool_shed_repository, current_changeset_revision, set_status=True ):
# Create or update a ToolDependency for each entry in tool_dependencies_config. This method is called when installing a new tool_shed_repository.
tool_dependency_objects = []
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = get_config_from_repository( app,
'tool_dependencies.xml',
@@ -501,6 +548,76 @@
if not can_generate_dependency_metadata:
break
return can_generate_dependency_metadata
+def generate_metadata_for_changeset_revision( app, repository_files_dir, repository_clone_url ):
+ """
+ Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
+ the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
+ disk files, so the value of repository_files_dir will not always be repository.repo_path (it could be a temporary directory containing a clone).
+ """
+ metadata_dict = {}
+ invalid_file_tups = []
+ invalid_tool_configs = []
+ tool_dependencies_config = None
+ datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repository_files_dir )
+ if datatypes_config:
+ metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
+ sample_files = get_sample_files_from_disk( repository_files_dir )
+ if sample_files:
+ metadata_dict[ 'sample_files' ] = sample_files
+ # Find all tool configs and exported workflows.
+ for root, dirs, files in os.walk( repository_files_dir ):
+ if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
+ if '.hg' in dirs:
+ dirs.remove( '.hg' )
+ for name in files:
+ # Find all tool configs.
+ if name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
+ full_path = os.path.abspath( os.path.join( root, name ) )
+ if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
+ or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
+ try:
+ # Make sure we're looking at a tool config and not a display application config or something else.
+ element_tree = util.parse_xml( full_path )
+ element_tree_root = element_tree.getroot()
+ is_tool = element_tree_root.tag == 'tool'
+ except Exception, e:
+ print "Error parsing %s", full_path, ", exception: ", str( e )
+ is_tool = False
+ if is_tool:
+ try:
+ tool = app.toolbox.load_tool( full_path )
+ except Exception, e:
+ tool = None
+ invalid_tool_configs.append( name )
+ if tool is not None:
+ invalid_files_and_errors_tups = check_tool_input_params( app, repository_files_dir, name, tool, sample_files )
+ can_set_metadata = True
+ for tup in invalid_files_and_errors_tups:
+ if name in tup:
+ can_set_metadata = False
+ invalid_tool_configs.append( name )
+ break
+ if can_set_metadata:
+ metadata_dict = generate_tool_metadata( os.path.join( root, name ), tool, repository_clone_url, metadata_dict )
+ else:
+ invalid_file_tups.extend( invalid_files_and_errors_tups )
+ # Find all exported workflows
+ elif name.endswith( '.ga' ):
+ relative_path = os.path.join( root, name )
+ fp = open( relative_path, 'rb' )
+ workflow_text = fp.read()
+ fp.close()
+ exported_workflow_dict = from_json_string( workflow_text )
+ if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
+ metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
+ if 'tools' in metadata_dict:
+ # This step must be done after metadata for tools has been defined.
+ tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repository_files_dir )
+ if tool_dependencies_config:
+ metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
+ if invalid_tool_configs:
+ metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
+ return metadata_dict, invalid_file_tups
def generate_package_dependency_metadata( elem, tool_dependencies_dict ):
"""The value of package_name must match the value of the "package" type in the tool config's <requirements> tag set."""
requirements_dict = {}
@@ -517,58 +634,6 @@
if requirements_dict:
tool_dependencies_dict[ dependency_key ] = requirements_dict
return tool_dependencies_dict
-def generate_metadata_using_disk_files( toolbox, relative_install_dir, repository_clone_url ):
- """Generate metadata using only the repository files on disk - files are not retrieved from the repository manifest."""
- metadata_dict = {}
- tool_dependencies_config = None
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', relative_install_dir )
- if datatypes_config:
- metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
- sample_files = get_sample_files_from_disk( relative_install_dir )
- if sample_files:
- metadata_dict[ 'sample_files' ] = sample_files
- # Find all tool configs and exported workflows.
- for root, dirs, files in os.walk( relative_install_dir ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- if '.hg' in dirs:
- dirs.remove( '.hg' )
- for name in files:
- # Find all tool configs.
- if name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
- full_path = os.path.abspath( os.path.join( root, name ) )
- if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
- or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
- try:
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = util.parse_xml( full_path )
- element_tree_root = element_tree.getroot()
- is_tool = element_tree_root.tag == 'tool'
- except Exception, e:
- log.debug( "Error parsing %s, exception: %s" % ( full_path, str( e ) ) )
- is_tool = False
- if is_tool:
- try:
- tool = toolbox.load_tool( full_path )
- except Exception, e:
- tool = None
- if tool is not None:
- tool_config = os.path.join( root, name )
- metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict )
- # Find all exported workflows
- elif name.endswith( '.ga' ):
- relative_path = os.path.join( root, name )
- fp = open( relative_path, 'rb' )
- workflow_text = fp.read()
- fp.close()
- exported_workflow_dict = from_json_string( workflow_text )
- if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
- metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
- if 'tools' in metadata_dict:
- # This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', relative_install_dir )
- if tool_dependencies_config:
- metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
- return metadata_dict
def generate_tool_guid( repository_clone_url, tool ):
"""
Generate a guid for the installed tool. It is critical that this guid matches the guid for
@@ -1266,7 +1331,7 @@
def load_installed_datatypes( app, repository, relative_install_dir, deactivate=False ):
# Load proprietary datatypes and return information needed for loading proprietary datatypes converters and display applications later.
metadata = repository.metadata
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
repository_dict = None
datatypes_config = get_config_from_repository( app,
'datatypes_conf.xml',
@@ -1293,17 +1358,6 @@
def load_installed_display_applications( app, installed_repository_dict, deactivate=False ):
# Load or deactivate proprietary datatype display applications
app.datatypes_registry.load_display_applications( installed_repository_dict=installed_repository_dict, deactivate=deactivate )
-def make_tmp_directory():
- tmp_dir = os.getenv( 'TMPDIR', '' )
- if tmp_dir:
- tmp_dir = tmp_dir.strip()
- else:
- home_dir = os.getenv( 'HOME' )
- tmp_dir = os.path.join( home_dir, 'tmp' )
- work_dir = os.path.join( tmp_dir, 'work_tmp' )
- if not os.path.exists( work_dir ):
- os.makedirs( work_dir )
- return work_dir
def open_repository_files_folder( trans, folder_path ):
try:
files_list = get_repository_files( trans, folder_path )
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/web/controllers/admin_toolshed.py
--- a/lib/galaxy/web/controllers/admin_toolshed.py
+++ b/lib/galaxy/web/controllers/admin_toolshed.py
@@ -1,4 +1,4 @@
-import urllib2
+import urllib2, tempfile
from galaxy.web.controllers.admin import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.shed_util import *
@@ -522,7 +522,7 @@
# Get the tool_shed_repository from one of the tool_dependencies.
message = ''
tool_shed_repository = tool_dependencies[ 0 ].tool_shed_repository
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
# Get the tool_dependencies.xml file from the repository.
tool_dependencies_config = get_config_from_repository( trans.app,
'tool_dependencies.xml',
@@ -654,7 +654,7 @@
message += "from the installed repository's <b>Repository Actions</b> menu. "
status = 'error'
if install_tool_dependencies and tool_shed_repository.tool_dependencies and 'tool_dependencies' in metadata:
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
# Install tool dependencies.
update_tool_shed_repository_status( trans.app,
tool_shed_repository,
@@ -684,7 +684,7 @@
Generate the metadata for the installed tool shed repository, among other things. This method is called from Galaxy (never the tool shed)
when an admin is installing a new repository or reinstalling an uninstalled repository.
"""
- metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
tool_shed_repository.metadata = metadata_dict
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
@@ -695,7 +695,7 @@
repository_tools_tups = get_repository_tools_tups( trans.app, metadata_dict )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
repository_tools_tups = handle_missing_data_table_entry( trans.app,
tool_shed_repository,
tool_shed_repository.changeset_revision,
@@ -726,7 +726,7 @@
tool_shed_repository.includes_datatypes = True
trans.sa_session.add( tool_shed_repository )
trans.sa_session.flush()
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
datatypes_config = get_config_from_repository( trans.app,
'datatypes_conf.xml',
tool_shed_repository,
@@ -779,7 +779,7 @@
message = "The repository information has been updated."
elif params.get( 'set_metadata_button', False ):
repository_clone_url = generate_clone_url( trans, repository )
- metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
if metadata_dict:
repository.metadata = metadata_dict
trans.sa_session.add( repository )
@@ -1479,7 +1479,7 @@
update_repository( repo, latest_ctx_rev )
# Update the repository metadata.
tool_shed = clean_tool_shed_url( tool_shed_url )
- metadata_dict = generate_metadata_using_disk_files( trans.app.toolbox, relative_install_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, relative_install_dir, repository_clone_url )
repository.metadata = metadata_dict
# Update the repository changeset_revision in the database.
repository.changeset_revision = latest_changeset_revision
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/web/controllers/data_admin.py
--- a/lib/galaxy/web/controllers/data_admin.py
+++ b/lib/galaxy/web/controllers/data_admin.py
@@ -30,25 +30,28 @@
@web.expose
@web.require_admin
def manage_data( self, trans, **kwd ):
- genomes = dict()
if trans.app.config.get_bool( 'enable_beta_job_managers', False ) == False:
- return trans.fill_template( '/admin/data_admin/betajob.mako' )
- for line in trans.app.tool_data_tables.data_tables[ 'all_fasta' ].data:
- defstate = dict( state='Generate', style=self.jobstyles[ 'new' ] )
- indexers = dict( bowtie_indexes=defstate, bowtie2_indexes=defstate, bwa_indexes=defstate, perm_base_indexes=defstate, srma_indexes=defstate, sam_fa_indexes=defstate )
- dbkey = line[0]
- name = line[2]
- indexers[ 'name' ] = name
- indexers[ 'fapath' ] = line[3]
- genomes[ dbkey ] = indexers
- for table in [ 'bowtie_indexes', 'bowtie2_indexes', 'bwa_indexes', 'srma_indexes' ]:
- for line in trans.app.tool_data_tables.data_tables[ table ].data:
- dbkey = line[0]
- genomes[ dbkey ][ table ] = dict( state='Generated', style=self.jobstyles[ 'done' ] )
- for line in trans.app.tool_data_tables.data_tables[ 'sam_fa_indexes' ].data:
- genomes[ line[1] ][ 'sam_fa_indexes' ] = dict( state='Generated', style=self.jobstyles[ 'done' ] )
- for line in trans.app.tool_data_tables.data_tables[ 'perm_base_indexes' ].data:
- genomes[ line[1].split(':')[0] ][ 'perm_base_indexes' ] = dict( state='Generated', style=self.jobstyles[ 'done' ] )
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='This feature requires that enable_beta_job_managers be set to True in your Galaxy configuration.' )
+ if 'all_fasta' not in trans.app.tool_data_tables.data_tables:
+ return trans.fill_template( '/admin/data_admin/generic_error.mako', message='The local data manager requires that an all_fasta entry exists in your tool_data_table_conf.xml.' )
+ indextable = {}
+ dbkeys = []
+ labels = { 'bowtie_indexes': 'Bowtie', 'bowtie2_indexes': 'Bowtie 2', 'bwa_indexes': 'BWA', 'srma_indexes': 'Picard', 'sam_fa_indexes': 'SAM', 'perm_base_indexes': 'PerM' }
+ tablenames = { 'Bowtie': 'bowtie_indexes', 'Bowtie 2': 'bowtie2_indexes', 'BWA': 'bwa_indexes', 'Picard': 'srma_indexes', 'SAM': 'sam_fa_indexes', 'PerM': 'perm_base_indexes' }
+ indexfuncs = dict( bowtie_indexes='bowtie', bowtie2_indexes='bowtie2', bwa_indexes='bwa', srma_indexes='picard', sam_fa_indexes='sam', perm_base_indexes='perm' )
+ for genome in trans.app.tool_data_tables.data_tables[ 'all_fasta' ].data:
+ dbkey = genome[0]
+ dbkeys.append( dbkey )
+ indextable[ dbkey ] = dict( indexes=dict(), name=genome[2], path=genome[3] )
+ for genome in indextable:
+ for label in labels:
+ indextable[ genome ][ 'indexes' ][ label ] = 'Generate'
+ if label not in trans.app.tool_data_tables.data_tables:
+ indextable[ genome ][ 'indexes' ][ label ] = 'Disabled'
+ else:
+ for row in trans.app.tool_data_tables.data_tables[ label ].data:
+ if genome in row or row[0].startswith( genome ):
+ indextable[ genome ][ 'indexes' ][ label ] = 'Generated'
jobgrid = []
sa_session = trans.app.model.context.current
jobs = sa_session.query( model.GenomeIndexToolData ).order_by( model.GenomeIndexToolData.created_time.desc() ).filter_by( user_id=trans.get_user().id ).group_by( model.GenomeIndexToolData.deferred ).limit( 20 ).all()
@@ -65,7 +68,8 @@
jobtype = 'index'
indexers = ', '.join( params['indexes'] )
jobgrid.append( dict( jobtype=jobtype, indexers=indexers, rowclass=state, deferred=job.deferred.id, state=state, intname=job.deferred.params[ 'intname' ], dbkey=job.deferred.params[ 'dbkey' ] ) )
- return trans.fill_template( '/admin/data_admin/local_data.mako', jobgrid=jobgrid, genomes=genomes )
+ styles = dict( Generate=self.jobstyles['new'], Generated=self.jobstyles['ok'], Disabled=self.jobstyles['error'] )
+ return trans.fill_template( '/admin/data_admin/local_data.mako', jobgrid=jobgrid, indextable=indextable, labels=labels, dbkeys=dbkeys, styles=styles, indexfuncs=indexfuncs )
@web.expose
@web.require_admin
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/web/controllers/workflow.py
--- a/lib/galaxy/web/controllers/workflow.py
+++ b/lib/galaxy/web/controllers/workflow.py
@@ -655,7 +655,7 @@
'name': module.get_name(),
'tool_id': module.get_tool_id(),
'tool_state': module.get_state(),
- 'tooltip': module.get_tooltip(),
+ 'tooltip': module.get_tooltip( static_path=url_for( '/static' ) ),
'data_inputs': module.get_data_inputs(),
'data_outputs': module.get_data_outputs(),
'form_html': module.get_config_form(),
@@ -732,7 +732,7 @@
'tool_id': module.get_tool_id(),
'name': module.get_name(),
'tool_state': module.get_state(),
- 'tooltip': module.get_tooltip(),
+ 'tooltip': module.get_tooltip( static_path=url_for( '/static' ) ),
'tool_errors': module.get_errors(),
'data_inputs': module.get_data_inputs(),
'data_outputs': module.get_data_outputs(),
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -1,15 +1,13 @@
-import os, string, socket, logging, simplejson, binascii
+import os, string, socket, logging, simplejson, binascii, tempfile
from time import strftime
from datetime import *
from galaxy.datatypes.checkers import *
from galaxy.tools import *
from galaxy.util.json import from_json_string, to_json_string
from galaxy.util.hash_util import *
-from galaxy.util.shed_util import clone_repository, copy_sample_file, generate_datatypes_metadata, generate_tool_dependency_metadata, generate_tool_metadata
-from galaxy.util.shed_util import generate_workflow_metadata, get_changectx_for_changeset, get_config, get_config_from_disk, get_configured_ui
-from galaxy.util.shed_util import get_named_tmpfile_from_ctx, get_sample_files_from_disk, handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH
-from galaxy.util.shed_util import make_tmp_directory, NOT_TOOL_CONFIGS, reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path, to_html_escaped
-from galaxy.util.shed_util import to_html_str, update_repository
+from galaxy.util.shed_util import clone_repository, generate_metadata_for_changeset_revision, get_changectx_for_changeset, get_config_from_disk
+from galaxy.util.shed_util import get_configured_ui, get_named_tmpfile_from_ctx, handle_sample_tool_data_table_conf_file, INITIAL_CHANGELOG_HASH
+from galaxy.util.shed_util import reset_tool_data_tables, reversed_upper_bounded_changelog, strip_path
from galaxy.web.base.controller import *
from galaxy.webapps.community import model
from galaxy.model.orm import *
@@ -107,11 +105,8 @@
trans.sa_session.flush()
return item_rating
-## ---- Utility methods -------------------------------------------------------
-
def add_repository_metadata_tool_versions( trans, id, changeset_revisions ):
- # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' }
- # pairs for each tool in each changeset revision.
+ # If a repository includes tools, build a dictionary of { 'tool id' : 'parent tool id' } pairs for each tool in each changeset revision.
for index, changeset_revision in enumerate( changeset_revisions ):
tool_versions_dict = {}
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -120,57 +115,24 @@
if metadata:
tool_dicts = metadata.get( 'tools', [] )
if index == 0:
- # The first changset_revision is a special case because it will have no ancestor
- # changeset_revisions in which to match tools. The parent tool id for tools in
- # the first changeset_revision will be the "old_id" in the tool config.
+ # The first changset_revision is a special case because it will have no ancestor changeset_revisions in which to match tools.
+ # The parent tool id for tools in the first changeset_revision will be the "old_id" in the tool config.
for tool_dict in tool_dicts:
tool_versions_dict[ tool_dict[ 'guid' ] ] = tool_dict[ 'id' ]
else:
for tool_dict in tool_dicts:
# We have at least 2 changeset revisions to compare tool guids and tool ids.
- parent_id = get_parent_id( trans, id, tool_dict[ 'id' ], tool_dict[ 'version' ], tool_dict[ 'guid' ], changeset_revisions[ 0:index ] )
+ parent_id = get_parent_id( trans,
+ id,
+ tool_dict[ 'id' ],
+ tool_dict[ 'version' ],
+ tool_dict[ 'guid' ],
+ changeset_revisions[ 0:index ] )
tool_versions_dict[ tool_dict[ 'guid' ] ] = parent_id
if tool_versions_dict:
repository_metadata.tool_versions = tool_versions_dict
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
-def build_changeset_revision_select_field( trans, repository, selected_value=None, add_id_to_name=True ):
- """Build a SelectField whose options are the changeset_rev strings of all downloadable revisions of the received repository."""
- repo = hg.repository( get_configured_ui(), repository.repo_path )
- options = []
- changeset_tups = []
- refresh_on_change_values = []
- for repository_metadata in repository.downloadable_revisions:
- changeset_revision = repository_metadata.changeset_revision
- ctx = get_changectx_for_changeset( repo, changeset_revision )
- if ctx:
- rev = '%04d' % ctx.rev()
- label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
- else:
- rev = '-1'
- label = "-1:%s" % changeset_revision
- changeset_tups.append( ( rev, label, changeset_revision ) )
- refresh_on_change_values.append( changeset_revision )
- # Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time,
- # the changeset revisions may not be sorted correctly because setting metadata over time will reset update_time.
- for changeset_tup in sorted( changeset_tups ):
- # Display the latest revision first.
- options.insert( 0, ( changeset_tup[1], changeset_tup[2] ) )
- if add_id_to_name:
- name = 'changeset_revision_%d' % repository.id
- else:
- name = 'changeset_revision'
- select_field = SelectField( name=name,
- refresh_on_change=True,
- refresh_on_change_values=refresh_on_change_values )
- for option_tup in options:
- selected = selected_value and option_tup[1] == selected_value
- select_field.add_option( option_tup[0], option_tup[1], selected=selected )
- return select_field
-def changeset_is_downloadable( metadata_dict ):
- # A RepositoryMetadata record will be created if metadata_dict includes only invalid stuff like 'invalid_tools', but in this case
- # it won't be downloadable.
- return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
def changeset_is_malicious( trans, id, changeset_revision, **kwd ):
"""Check the malicious flag in repository metadata for a specified change set"""
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
@@ -188,60 +150,12 @@
if user_email in admin_users:
return True
return False
-def check_tool_input_params( trans, repo_dir, tool_config, tool, sample_files, invalid_files ):
- """
- Check all of the tool's input parameters, looking for any that are dynamically generated using external data files to make
- sure the files exist.
- """
- can_set_metadata = True
- correction_msg = ''
- for input_param in tool.input_params:
- if isinstance( input_param, tools.parameters.basic.SelectToolParameter ) and input_param.is_dynamic:
- # If the tool refers to .loc files or requires an entry in the tool_data_table_conf.xml, make sure all requirements exist.
- options = input_param.dynamic_options or input_param.options
- if options:
- if options.tool_data_table or options.missing_tool_data_table_name:
- # Make sure the repository contains a tool_data_table_conf.xml.sample file.
- sample_tool_data_table_conf = get_config_from_disk( 'tool_data_table_conf.xml.sample', repo_dir )
- if sample_tool_data_table_conf:
- error, correction_msg = handle_sample_tool_data_table_conf_file( trans, sample_tool_data_table_conf )
- if error:
- can_set_metadata = False
- invalid_files.append( ( 'tool_data_table_conf.xml.sample', correction_msg ) )
- else:
- options.missing_tool_data_table_name = None
- else:
- can_set_metadata = False
- correction_msg = "This file requires an entry in the tool_data_table_conf.xml file. Upload a file named tool_data_table_conf.xml.sample "
- correction_msg += "to the repository that includes the required entry to correct this error.<br/>"
- invalid_files.append( ( tool_config, correction_msg ) )
- if options.index_file or options.missing_index_file:
- # Make sure the repository contains the required xxx.loc.sample file.
- index_file = options.index_file or options.missing_index_file
- index_file_name = strip_path( index_file )
- sample_found = False
- for sample_file in sample_files:
- sample_file_name = strip_path( sample_file )
- if sample_file_name == '%s.sample' % index_file_name:
- options.index_file = index_file_name
- options.missing_index_file = None
- if options.tool_data_table:
- options.tool_data_table.missing_index_file = None
- sample_found = True
- break
- if not sample_found:
- can_set_metadata = False
- correction_msg = "This file refers to a file named <b>%s</b>. " % str( index_file )
- correction_msg += "Upload a file named <b>%s.sample</b> to the repository to correct this error." % str( index_file_name )
- invalid_files.append( ( tool_config, correction_msg ) )
- # Reset the tool_data_tables by loading the empty tool_data_table_conf.xml file.
- reset_tool_data_tables( trans.app )
- return can_set_metadata, invalid_files
def clean_repository_metadata( trans, id, changeset_revisions ):
# Delete all repository_metadata records associated with the repository that have a changeset_revision that is not in changeset_revisions.
# We sometimes see multiple records with the same changeset revision value - no idea how this happens. We'll assume we can delete the older
# records, so we'll order by update_time descending and delete records that have the same changeset_revision we come across later..
- changeset_revisions_checked = []
+ changeset_revisions_checked = []
+ cleaned_changeset_revisions = []
for repository_metadata in trans.sa_session.query( trans.model.RepositoryMetadata ) \
.filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
.order_by( trans.model.RepositoryMetadata.table.c.changeset_revision,
@@ -251,6 +165,9 @@
if can_delete:
trans.sa_session.delete( repository_metadata )
trans.sa_session.flush()
+ else:
+ cleaned_changeset_revisions.append( changeset_revision )
+ return cleaned_changeset_revisions
def compare_changeset_revisions( ancestor_changeset_revision, ancestor_metadata_dict, current_changeset_revision, current_metadata_dict ):
# The metadata associated with ancestor_changeset_revision is ancestor_metadata_dict. This changeset_revision is an ancestor of
# current_changeset_revision which is associated with current_metadata_dict. A new repository_metadata record will be created only
@@ -369,14 +286,19 @@
return file_path
return None
def create_or_update_repository_metadata( trans, id, repository, changeset_revision, metadata_dict ):
+ downloadable = is_downloadable( metadata_dict )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
repository_metadata.metadata = metadata_dict
+ repository_metadata.downloadable = downloadable
else:
- repository_metadata = trans.model.RepositoryMetadata( repository.id, changeset_revision, metadata_dict )
- repository_metadata.downloadable = changeset_is_downloadable( metadata_dict )
+ repository_metadata = trans.model.RepositoryMetadata( repository_id=repository.id,
+ changeset_revision=changeset_revision,
+ metadata=metadata_dict,
+ downloadable=downloadable )
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
+ return repository_metadata
def generate_clone_url( trans, repository_id ):
"""Generate the URL for cloning a repository."""
repository = get_repository( trans, repository_id )
@@ -387,69 +309,6 @@
return '%s://%s%s/repos/%s/%s' % ( protocol, username, base, repository.user.username, repository.name )
else:
return '%s/repos/%s/%s' % ( base_url, repository.user.username, repository.name )
-def generate_metadata_for_changeset_revision( trans, repository_files_dir, repository_clone_url ):
- """
- Generate metadata for a repository using it's files on disk. To generate metadata for changeset revisions older than the repository tip,
- the repository will have been cloned to a temporary location and updated to a specified changeset revision to access that changeset revision's
- disk files, so the value of repository_files_dir will not always be repository.repo_path (it could be a temporary directory containing a clone).
- """
- metadata_dict = {}
- invalid_files = []
- invalid_tool_configs = []
- tool_dependencies_config = None
- datatypes_config = get_config_from_disk( 'datatypes_conf.xml', repository_files_dir )
- if datatypes_config:
- metadata_dict = generate_datatypes_metadata( datatypes_config, metadata_dict )
- sample_files = get_sample_files_from_disk( repository_files_dir )
- if sample_files:
- metadata_dict[ 'sample_files' ] = sample_files
- # Find all tool configs and exported workflows.
- for root, dirs, files in os.walk( repository_files_dir ):
- if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
- if '.hg' in dirs:
- dirs.remove( '.hg' )
- for name in files:
- # Find all tool configs.
- if name not in NOT_TOOL_CONFIGS and name.endswith( '.xml' ):
- full_path = os.path.abspath( os.path.join( root, name ) )
- if not ( check_binary( full_path ) or check_image( full_path ) or check_gzip( full_path )[ 0 ]
- or check_bz2( full_path )[ 0 ] or check_zip( full_path ) ):
- try:
- # Make sure we're looking at a tool config and not a display application config or something else.
- element_tree = util.parse_xml( full_path )
- element_tree_root = element_tree.getroot()
- is_tool = element_tree_root.tag == 'tool'
- except Exception, e:
- print "Error parsing %s", full_path, ", exception: ", str( e )
- is_tool = False
- if is_tool:
- try:
- tool = trans.app.toolbox.load_tool( full_path )
- tool_config = os.path.join( root, name )
- except Exception, e:
- tool = None
- invalid_tool_configs.append( name )
- if tool is not None:
- can_set_metadata, invalid_files = check_tool_input_params( trans, repository_files_dir, tool_config, tool, sample_files, invalid_files )
- if can_set_metadata:
- metadata_dict = generate_tool_metadata( tool_config, tool, repository_clone_url, metadata_dict )
- # Find all exported workflows
- elif name.endswith( '.ga' ):
- relative_path = os.path.join( root, name )
- fp = open( relative_path, 'rb' )
- workflow_text = fp.read()
- fp.close()
- exported_workflow_dict = from_json_string( workflow_text )
- if 'a_galaxy_workflow' in exported_workflow_dict and exported_workflow_dict[ 'a_galaxy_workflow' ] == 'true':
- metadata_dict = generate_workflow_metadata( relative_path, exported_workflow_dict, metadata_dict )
- if 'tools' in metadata_dict:
- # This step must be done after metadata for tools has been defined.
- tool_dependencies_config = get_config_from_disk( 'tool_dependencies.xml', repository_files_dir )
- if tool_dependencies_config:
- metadata_dict = generate_tool_dependency_metadata( tool_dependencies_config, metadata_dict )
- if invalid_tool_configs:
- metadata_dict [ 'invalid_tools' ] = invalid_tool_configs
- return metadata_dict, invalid_files
def generate_tool_guid( trans, repository, tool ):
"""
Generate a guid for the received tool. The form of the guid is
@@ -588,10 +447,23 @@
.first()
def get_repository_metadata_by_changeset_revision( trans, id, changeset_revision ):
"""Get metadata for a specified repository change set from the database"""
- return trans.sa_session.query( trans.model.RepositoryMetadata ) \
- .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
- trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
- .first()
+ # Make sure there are no duplicate records, and return the single unique record for the changeset_revision. Duplicate records were somehow
+ # creatd in the past. This may or may not be resolved, so when it is confirmed that the cause of duplicate records has been corrected, tweak
+ # this method accordingly.
+ all_metadata_records = trans.sa_session.query( trans.model.RepositoryMetadata ) \
+ .filter( and_( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ),
+ trans.model.RepositoryMetadata.table.c.changeset_revision == changeset_revision ) ) \
+ .order_by( trans.model.RepositoryMetadata.table.c.update_time.desc() ) \
+ .all()
+ if len( all_metadata_records ) > 1:
+ # Delete all recrds older than the last one updated.
+ for repository_metadata in all_metadata_records[ 1: ]:
+ trans.sa_session.delete( repository_metadata )
+ trans.sa_session.flush()
+ return all_metadata_records[ 0 ]
+ elif all_metadata_records:
+ return all_metadata_records[ 0 ]
+ return None
def get_repository_metadata_by_id( trans, id ):
"""Get repository metadata from the database"""
return trans.sa_session.query( trans.model.RepositoryMetadata ).get( trans.security.decode_id( id ) )
@@ -700,6 +572,8 @@
util.send_mail( frm, to, subject, body, trans.app.config )
except Exception, e:
log.exception( "An error occurred sending a tool shed repository update alert by email." )
+def is_downloadable( metadata_dict ):
+ return 'datatypes' in metadata_dict or 'tools' in metadata_dict or 'workflows' in metadata_dict
def load_tool( trans, config_file ):
"""Load a single tool from the file named by `config_file` and return an instance of `Tool`."""
# Parse XML configuration file and get the root element
@@ -762,7 +636,7 @@
ctx = get_changectx_for_changeset( repo, changeset_revision )
tool = None
message = ''
- work_dir = make_tmp_directory()
+ work_dir = tempfile.mkdtemp()
sample_files, deleted_sample_files = get_list_of_copied_sample_files( repo, ctx, dir=work_dir )
if sample_files:
trans.app.config.tool_data_path = work_dir
@@ -913,7 +787,7 @@
print "Cloning repository revision: ", str( ctx.rev() )
clone_repository( repository_clone_url, work_dir, str( ctx.rev() ) )
print "Generating metadata for changset revision: ", str( ctx.rev() )
- current_metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, work_dir, repository_clone_url )
+ current_metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, work_dir, repository_clone_url )
if current_metadata_dict:
if not metadata_changeset_revision and not metadata_dict:
# We're at the first change set in the change log.
@@ -935,7 +809,7 @@
elif comparison == 'not equal and not subset':
metadata_changeset_revision = ancestor_changeset_revision
metadata_dict = ancestor_metadata_dict
- create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = current_changeset_revision
ancestor_metadata_dict = current_metadata_dict
@@ -947,7 +821,7 @@
metadata_changeset_revision = current_changeset_revision
metadata_dict = current_metadata_dict
# We're at the end of the change log.
- create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = None
ancestor_metadata_dict = None
@@ -958,7 +832,7 @@
metadata_dict = ancestor_metadata_dict
if not ctx.children():
# We're at the end of the change log.
- create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
+ repository_metadata = create_or_update_repository_metadata( trans, id, repository, metadata_changeset_revision, metadata_dict )
changeset_revisions.append( metadata_changeset_revision )
ancestor_changeset_revision = None
ancestor_metadata_dict = None
@@ -968,8 +842,9 @@
except:
pass
# Delete all repository_metadata records for this repository that do not have a changeset_revision value in changeset_revisions.
- clean_repository_metadata( trans, id, changeset_revisions )
- add_repository_metadata_tool_versions( trans, id, changeset_revisions )
+ cleaned_changeset_revisions = clean_repository_metadata( trans, id, changeset_revisions )
+ # Set tool version information for all downloadable changeset revisions.
+ add_repository_metadata_tool_versions( trans, id, cleaned_changeset_revisions )
def set_repository_metadata( trans, repository, content_alert_str='', **kwd ):
"""
Set metadata using the repository's current disk files, returning specific error messages (if any) to alert the repository owner that the changeset
@@ -980,46 +855,60 @@
repository_clone_url = generate_clone_url( trans, trans.security.encode_id( repository.id ) )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
- metadata_dict, invalid_files = generate_metadata_for_changeset_revision( trans, repo_dir, repository_clone_url )
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, repo_dir, repository_clone_url )
if metadata_dict:
+ downloadable = is_downloadable( metadata_dict )
+ repository_metadata = None
if new_tool_metadata_required( trans, repository, metadata_dict ) or new_workflow_metadata_required( trans, repository, metadata_dict ):
# Create a new repository_metadata table row.
- repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict )
- trans.sa_session.add( repository_metadata )
- try:
- trans.sa_session.flush()
- # If this is the first record stored for this repository, see if we need to send any email alerts.
- if len( repository.downloadable_revisions ) == 1:
- handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
- except TypeError, e:
- message = "Unable to save metadata for this repository, exception: %s" % str( e )
- status = 'error'
+ repository_metadata = create_or_update_repository_metadata( trans,
+ trans.security.encode_id( repository.id ),
+ repository,
+ repository.tip,
+ metadata_dict )
+ # If this is the first record stored for this repository, see if we need to send any email alerts.
+ if len( repository.downloadable_revisions ) == 1:
+ handle_email_alerts( trans, repository, content_alert_str='', new_repo_alert=True, admin_only=False )
else:
repository_metadata = get_latest_repository_metadata( trans, repository.id )
if repository_metadata:
+ downloadable = is_downloadable( metadata_dict )
# Update the last saved repository_metadata table row.
repository_metadata.changeset_revision = repository.tip
repository_metadata.metadata = metadata_dict
- repository_metadata.downloadable = changeset_is_downloadable( metadata_dict )
+ repository_metadata.downloadable = downloadable
trans.sa_session.add( repository_metadata )
trans.sa_session.flush()
else:
# There are no tools in the repository, and we're setting metadata on the repository tip.
- repository_metadata = trans.model.RepositoryMetadata( repository.id, repository.tip, metadata_dict )
- trans.sa_session.add( repository_metadata )
- trans.sa_session.flush()
- elif len( repo ) == 1 and not invalid_files:
+ repository_metadata = create_or_update_repository_metadata( trans,
+ trans.security.encode_id( repository.id ),
+ repository,
+ repository.tip,
+ metadata_dict )
+ if 'tools' in metadata_dict and repository_metadata and status != 'error':
+ # Set tool versions on the new downloadable change set. The order of the list of changesets is critical, so we use the repo's changelog.
+ downloadable_changeset_revisions = [ rm.changeset_revision for rm in repository.downloadable_revisions ]
+ changeset_revisions = []
+ for changeset in repo.changelog:
+ changeset_revision = str( repo.changectx( changeset ) )
+ if changeset_revision in downloadable_changeset_revisions:
+ changeset_revisions.append( changeset_revision )
+ # Now append the latest changeset_revision we just updated above.
+ changeset_revisions.append( repository_metadata.changeset_revision )
+ add_repository_metadata_tool_versions( trans, trans.security.encode_id( repository.id ), changeset_revisions )
+ elif len( repo ) == 1 and not invalid_file_tups:
message = "Revision '%s' includes no tools, datatypes or exported workflows for which metadata can " % str( repository.tip )
message += "be defined so this revision cannot be automatically installed into a local Galaxy instance."
status = "error"
- if invalid_files:
+ if invalid_file_tups:
if metadata_dict:
message += "Metadata was defined for some items in revision '%s'. " % str( repository.tip )
message += "Correct the following problems if necessary and reset metadata.<br/>"
else:
message += "Metadata cannot be defined for revision '%s' so this revision cannot be automatically " % str( repository.tip )
message += "installed into a local Galaxy instance. Correct the following problems and reset metadata.<br/>"
- for itc_tup in invalid_files:
+ for itc_tup in invalid_file_tups:
tool_file, exception_msg = itc_tup
if exception_msg.find( 'No such file or directory' ) >= 0:
exception_items = exception_msg.split()
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -9,8 +9,8 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.util.json import from_json_string, to_json_string
from galaxy.model.orm import *
-from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for_changeset, get_configured_ui, get_repository_file_contents, make_tmp_directory, NOT_TOOL_CONFIGS
-from galaxy.util.shed_util import open_repository_files_folder, reversed_lower_upper_bounded_changelog, strip_path
+from galaxy.util.shed_util import create_repo_info_dict, get_changectx_for_changeset, get_configured_ui, get_repository_file_contents, NOT_TOOL_CONFIGS
+from galaxy.util.shed_util import open_repository_files_folder, reversed_lower_upper_bounded_changelog, strip_path, to_html_escaped, update_repository
from galaxy.tool_shed.encoding_util import *
from common import *
@@ -113,7 +113,7 @@
grids.GridColumn.__init__( self, col_name )
def get_value( self, trans, grid, repository ):
"""Display a SelectField whose options are the changeset_revision strings of all downloadable_revisions of this repository."""
- select_field = build_changeset_revision_select_field( trans, repository )
+ select_field = build_changeset_revision_select_field( trans, repository, downloadable_only=False )
if len( select_field.options ) > 1:
return select_field.get_html()
return repository.revision
@@ -268,7 +268,7 @@
grids.GridColumn.__init__( self, col_name )
def get_value( self, trans, grid, repository ):
"""Display a SelectField whose options are the changeset_revision strings of all download-able revisions of this repository."""
- select_field = build_changeset_revision_select_field( trans, repository )
+ select_field = build_changeset_revision_select_field( trans, repository, downloadable_only=True )
if len( select_field.options ) > 1:
return select_field.get_html()
return repository.revision
@@ -1346,19 +1346,14 @@
message = util.restore_text( params.get( 'message', '' ) )
status = params.get( 'status', 'error' )
webapp = get_webapp( trans, **kwd )
+ repository_clone_url = generate_clone_url( trans, repository_id )
repository = get_repository( trans, repository_id )
repo_dir = repository.repo_path
repo = hg.repository( get_configured_ui(), repo_dir )
ctx = get_changectx_for_changeset( repo, changeset_revision )
invalid_message = ''
- metadata_dict, invalid_files, deleted_sample_files = generate_metadata_for_changeset_revision( trans,
- repo,
- repository_id,
- ctx,
- changeset_revision,
- repo_dir,
- updating_tip=changeset_revision==repository.tip )
- for invalid_file_tup in invalid_files:
+ metadata_dict, invalid_file_tups = generate_metadata_for_changeset_revision( trans.app, repo_dir, repository_clone_url )
+ for invalid_file_tup in invalid_file_tups:
invalid_tool_config, invalid_msg = invalid_file_tup
invalid_tool_config_name = strip_path( invalid_tool_config )
if tool_config == invalid_tool_config_name:
@@ -1554,7 +1549,8 @@
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
- add_id_to_name=False )
+ add_id_to_name=False,
+ downloadable_only=False )
revision_label = get_revision_label( trans, repository, changeset_revision )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
@@ -1657,7 +1653,8 @@
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
- add_id_to_name=False )
+ add_id_to_name=False,
+ downloadable_only=False )
return trans.fill_template( '/webapps/community/repository/preview_tools_in_changeset.mako',
repository=repository,
repository_metadata_id=repository_metadata_id,
@@ -2128,7 +2125,8 @@
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
- add_id_to_name=False )
+ add_id_to_name=False,
+ downloadable_only=False )
revision_label = get_revision_label( trans, repository, changeset_revision )
repository_metadata = get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
if repository_metadata:
@@ -2185,7 +2183,8 @@
changeset_revision_select_field = build_changeset_revision_select_field( trans,
repository,
selected_value=changeset_revision,
- add_id_to_name=False )
+ add_id_to_name=False,
+ downloadable_only=False )
return trans.fill_template( "/webapps/community/repository/view_tool_metadata.mako",
repository=repository,
tool=tool,
@@ -2197,3 +2196,42 @@
webapp=webapp,
message=message,
status=status )
+
+# ----- Utility methods -----
+def build_changeset_revision_select_field( trans, repository, selected_value=None, add_id_to_name=True, downloadable_only=False ):
+ """Build a SelectField whose options are the changeset_rev strings of all downloadable revisions of the received repository."""
+ repo = hg.repository( get_configured_ui(), repository.repo_path )
+ options = []
+ changeset_tups = []
+ refresh_on_change_values = []
+ if downloadable_only:
+ repository_metadata_revisions = repository.downloadable_revisions
+ else:
+ repository_metadata_revisions = repository.metadata_revisions
+ for repository_metadata in repository_metadata_revisions:
+ changeset_revision = repository_metadata.changeset_revision
+ ctx = get_changectx_for_changeset( repo, changeset_revision )
+ if ctx:
+ rev = '%04d' % ctx.rev()
+ label = "%s:%s" % ( str( ctx.rev() ), changeset_revision )
+ else:
+ rev = '-1'
+ label = "-1:%s" % changeset_revision
+ changeset_tups.append( ( rev, label, changeset_revision ) )
+ refresh_on_change_values.append( changeset_revision )
+ # Sort options by the revision label. Even though the downloadable_revisions query sorts by update_time,
+ # the changeset revisions may not be sorted correctly because setting metadata over time will reset update_time.
+ for changeset_tup in sorted( changeset_tups ):
+ # Display the latest revision first.
+ options.insert( 0, ( changeset_tup[1], changeset_tup[2] ) )
+ if add_id_to_name:
+ name = 'changeset_revision_%d' % repository.id
+ else:
+ name = 'changeset_revision'
+ select_field = SelectField( name=name,
+ refresh_on_change=True,
+ refresh_on_change_values=refresh_on_change_values )
+ for option_tup in options:
+ selected = selected_value and option_tup[1] == selected_value
+ select_field.add_option( option_tup[0], option_tup[1], selected=selected )
+ return select_field
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/webapps/community/model/__init__.py
--- a/lib/galaxy/webapps/community/model/__init__.py
+++ b/lib/galaxy/webapps/community/model/__init__.py
@@ -166,12 +166,13 @@
fp.close()
class RepositoryMetadata( object ):
- def __init__( self, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False ):
+ def __init__( self, repository_id=None, changeset_revision=None, metadata=None, tool_versions=None, malicious=False, downloadable=False ):
self.repository_id = repository_id
self.changeset_revision = changeset_revision
self.metadata = metadata or dict()
self.tool_versions = tool_versions or dict()
self.malicious = malicious
+ self.downloadable = downloadable
class ItemRatingAssociation( object ):
def __init__( self, id=None, user=None, item=None, rating=0, comment='' ):
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 lib/galaxy/workflow/modules.py
--- a/lib/galaxy/workflow/modules.py
+++ b/lib/galaxy/workflow/modules.py
@@ -49,7 +49,7 @@
return self.name
def get_tool_id( self ):
return None
- def get_tooltip( self ):
+ def get_tooltip( self, static_path='' ):
return None
## ---- Configuration time -----------------------------------------------
@@ -258,8 +258,8 @@
return self.state.encode( self.tool, self.trans.app, secure=secure )
def get_errors( self ):
return self.errors
- def get_tooltip( self ):
- return self.tool.help
+ def get_tooltip( self, static_path='' ):
+ return self.tool.help.render( static_path=static_path )
def get_data_inputs( self ):
data_inputs = []
def callback( input, value, prefixed_name, prefixed_label ):
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 templates/admin/data_admin/betajob.mako
--- a/templates/admin/data_admin/betajob.mako
+++ /dev/null
@@ -1,35 +0,0 @@
-<%inherit file="/base.mako"/>
-<%namespace file="/message.mako" import="render_msg" />
-<%namespace file="/library/common/common.mako" import="common_javascripts" />
-
-<%!
- def inherit(context):
- if context.get('use_panels'):
- return '/webapps/galaxy/base_panels.mako'
- else:
- return '/base.mako'
-%>
-<%inherit file="${inherit(context)}"/>
-
-<%def name="init()">
-<%
- self.has_left_panel=False
- self.has_right_panel=False
- self.message_box_visible=False
- self.active_view="user"
- self.overlay_visible=False
- self.has_accessible_datasets = False
-%>
-</%def>
-<%def name="stylesheets()">
- ${parent.stylesheets()}
- ${h.css( "autocomplete_tagging" )}
-</%def>
-<%def name="javascripts()">
- ${parent.javascripts()}
- ${h.js("jquery.autocomplete", "autocomplete_tagging" )}
-</%def>
-##
-## Override methods from base.mako and base_panels.mako
-##
-<p class="panel-error-message">This feature requires that enable_beta_job_managers be set to True in your Galaxy configuration.</p>
\ No newline at end of file
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 templates/admin/data_admin/generic_error.mako
--- /dev/null
+++ b/templates/admin/data_admin/generic_error.mako
@@ -0,0 +1,35 @@
+<%inherit file="/base.mako"/>
+<%namespace file="/message.mako" import="render_msg" />
+<%namespace file="/library/common/common.mako" import="common_javascripts" />
+
+<%!
+ def inherit(context):
+ if context.get('use_panels'):
+ return '/webapps/galaxy/base_panels.mako'
+ else:
+ return '/base.mako'
+%>
+<%inherit file="${inherit(context)}"/>
+
+<%def name="init()">
+<%
+ self.has_left_panel=False
+ self.has_right_panel=False
+ self.message_box_visible=False
+ self.active_view="user"
+ self.overlay_visible=False
+ self.has_accessible_datasets = False
+%>
+</%def>
+<%def name="stylesheets()">
+ ${parent.stylesheets()}
+ ${h.css( "autocomplete_tagging" )}
+</%def>
+<%def name="javascripts()">
+ ${parent.javascripts()}
+ ${h.js("jquery.autocomplete", "autocomplete_tagging" )}
+</%def>
+##
+## Override methods from base.mako and base_panels.mako
+##
+<p class="panel-error-message">${message}</p>
\ No newline at end of file
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 templates/admin/data_admin/local_data.mako
--- a/templates/admin/data_admin/local_data.mako
+++ b/templates/admin/data_admin/local_data.mako
@@ -44,6 +44,7 @@
td, th { padding-left: 10px; padding-right: 10px; }
td.state-color-new { text-decoration: underline; }
td.panel-done-message { background-image: none; padding: 0px 10px 0px 10px; }
+ td.panel-error-message { background-image: none; padding: 0px 10px 0px 10px; }
</style><div class="toolForm">
%if message:
@@ -52,19 +53,23 @@
<div class="toolFormTitle">Currently tracked builds <a class="action-button" href="${h.url_for( controller='data_admin', action='add_genome' )}">Add new</a></div><div class="toolFormBody"><h2>Locally cached data:</h2>
- <h3>NOTE: Indexers queued here will not be reflected in the table until Galaxy is restarted.</h3>
+ <h3>NOTE: Indexes generated here will not be reflected in the table until Galaxy is restarted.</h3><table id="locfiles">
- <tr><th>Database ID</th><th>Name</th><th>Bowtie</th><th>Bowtie 2</th><th>BWA</th><th>Sam</th><th>Picard</th><th>PerM</th></tr>
- %for dbkey in sorted(genomes.keys()):
+ <tr>
+ <th>DB Key</th>
+ <th>Name</th>
+ %for label in labels:
+ <th>${labels[label]}</th>
+ %endfor
+ </tr>
+ %for dbkey in sorted(dbkeys):
<tr><td>${dbkey}</td>
- <td>${genomes[dbkey]['name']}</td>
- <td id="${dbkey}-bowtie" class="indexcell ${genomes[dbkey]['bowtie_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bowtie" data-dbkey="${dbkey}">${genomes[dbkey]['bowtie_indexes']['state']}</td>
- <td id="${dbkey}-bowtie2" class="indexcell ${genomes[dbkey]['bowtie2_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bowtie2" data-dbkey="${dbkey}">${genomes[dbkey]['bowtie2_indexes']['state']}</td>
- <td id="${dbkey}-bwa" class="indexcell ${genomes[dbkey]['bwa_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="bwa" data-dbkey="${dbkey}">${genomes[dbkey]['bwa_indexes']['state']}</td>
- <td id="${dbkey}-sam" class="indexcell ${genomes[dbkey]['sam_fa_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="sam" data-dbkey="${dbkey}">${genomes[dbkey]['sam_fa_indexes']['state']}</td>
- <td id="${dbkey}-picard" class="indexcell ${genomes[dbkey]['srma_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="picard" data-dbkey="${dbkey}">${genomes[dbkey]['srma_indexes']['state']}</td>
- <td id="${dbkey}-perm" class="indexcell ${genomes[dbkey]['perm_base_indexes']['style']}" data-fapath="${genomes[dbkey]['fapath']}" data-longname="${genomes[dbkey]['name']}" data-index="perm" data-dbkey="${dbkey}">${genomes[dbkey]['perm_base_indexes']['state']}</td>
+ <td>${indextable[dbkey]['name']}</td>
+ %for label in labels:
+ <td id="${dbkey}-${indexfuncs[label]}" class="indexcell ${styles[indextable[dbkey]['indexes'][label]]}" data-fapath="${indextable[dbkey]['path']}" data-longname="${indextable[dbkey]['name']}" data-index="${indexfuncs[label]}" data-dbkey="${dbkey}">${indextable[dbkey]['indexes'][label]}</td>
+ %endfor
+
</tr>
%endfor
</table>
@@ -124,6 +129,7 @@
jsondata["name"] = $('#job-' + jobid).attr('data-name');
jsondata["dbkey"] = $('#job-' + jobid).attr('data-dbkey');
jsondata["indexes"] = $('#job-' + jobid).attr('data-indexes');
+ tdid = jq(jsondata["dbkey"] + '-' + jsondata["indexes"]);
newhtml = makeNewHTML(jsondata);
$('#job-' + jobid).replaceWith(newhtml);
if ($.inArray(jsondata["status"], finalstates) == -1) {
@@ -133,7 +139,7 @@
});
}
if (jsondata["status"] == 'done' || jsondata["status"] == 'ok') {
- elem = $('#' + jsondata["dbkey"] + '-' + jsondata["indexes"]);
+ elem = $(tdid);
elem.html('Generated');
elem.attr('class', 'indexcell panel-done-message');
}
@@ -156,5 +162,8 @@
}
});
});
-
+
+ function jq(id) {
+ return '#' + id.replace(/(:|\.)/g,'\\$1');
+ }
</script>
\ No newline at end of file
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako
--- a/templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako
+++ b/templates/webapps/community/admin/reset_metadata_on_selected_repositories.mako
@@ -43,14 +43,17 @@
${render_msg( message, status )}
%endif
+<div class="warningmessage">
+ Resetting metadata may take a while because this process clones each change set in each selected repository's change log to a temporary location on disk.
+ Wait until this page redirects after clicking the <b>Reset metadata on selected repositories</b> button, as doing anything else will not be helpful. Watch
+ the tool shed paster log to pass the time if necessary.
+</div>
+
<div class="toolForm"><div class="toolFormTitle">Reset all metadata on each selected repository</div><form name="reset_metadata_on_selected_repositories" id="reset_metadata_on_selected_repositories" action="${h.url_for( controller='admin', action='reset_metadata_on_selected_repositories' )}" method="post" ><div class="form-row">
- Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses. Resetting metadata
- may take a while because this process clones each change set in each selected repository's change log to a temporary location on disk.
- Wait until this page redirects after clicking the <b>Reset metadata on selected repositories</b> button, as doing anything else will not
- be helpful. Watch the tool shed paster log to pass the time if necessary.
+ Check each repository for which you want to reset metadata. Repository names are followed by owners in parentheses.
</div><div style="clear: both"></div><div class="form-row">
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 templates/webapps/community/repository/tool_form.mako
--- a/templates/webapps/community/repository/tool_form.mako
+++ b/templates/webapps/community/repository/tool_form.mako
@@ -186,12 +186,15 @@
<div class="toolHelp"><div class="toolHelpBody"><%
+ tool_help = tool.help
+ # Help is Mako template, so render using current static path.
+ tool_help = tool_help.render( static_path=h.url_for( '/static' ) )
# Convert to unicode to display non-ascii characters.
- if type( tool.help ) is not unicode:
- tool.help = unicode( tool.help, 'utf-8')
+ if type( tool_help ) is not unicode:
+ tool_help = unicode( tool_help, 'utf-8')
%>
- ${tool.help}
- </div>
+ ${tool_help}
+ </div></div>
%endif
%else:
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 test/functional/test_library_templates.py
--- a/test/functional/test_library_templates.py
+++ b/test/functional/test_library_templates.py
@@ -398,7 +398,7 @@
folder_id=self.security.encode_id( folder3.id ),
upload_option='import_from_history',
hda_ids=self.security.encode_id( hda.id ),
- strings_displayed=[ '<input type="hidden" name="%s" value="Option1"/>' % select_field_name ] )
+ strings_displayed=[ '<select name="%s" last_selected_value="Option1">' % select_field_name ] )
ldda = get_latest_ldda_by_name( filename )
assert ldda is not None, 'Problem retrieving LibraryDatasetDatasetAssociation ldda from the database'
self.browse_library( cntrller='library_admin',
diff -r d960803cbacc5cdeea67ffa9a26010adba1120d6 -r 761c58466d97ba0d29af670daabc256aacf4dbb8 tools/new_operations/operation_filter.py
--- a/tools/new_operations/operation_filter.py
+++ b/tools/new_operations/operation_filter.py
@@ -3,6 +3,9 @@
from galaxy import eggs
from galaxy import jobs
from galaxy.tools.parameters import DataToolParameter
+
+from galaxy.jobs.handler import JOB_ERROR
+
# Older py compatibility
try:
set()
@@ -63,8 +66,8 @@
raise Exception( stderr )
except Exception, exc:
- data.blurb = jobs.JOB_ERROR
- data.state = jobs.JOB_ERROR
+ data.blurb = JOB_ERROR
+ data.state = JOB_ERROR
## def exec_after_process(app, inp_data, out_data, param_dict, tool=None, stdout=None, stderr=None):
## pass
https://bitbucket.org/galaxy/galaxy-central/changeset/713ee9f0ae6a/
changeset: 713ee9f0ae6a
user: dannon
date: 2012-08-13 19:22:51
summary: Merge pull request.
affected #: 5 files
diff -r fbefa03698959fc15d06d74ad9a2cf24f6001e63 -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b lib/galaxy/model/__init__.py
--- a/lib/galaxy/model/__init__.py
+++ b/lib/galaxy/model/__init__.py
@@ -544,7 +544,10 @@
else:
return False
-class Group( object ):
+class Group( object, APIItem ):
+ api_collection_visible_keys = ( 'id', 'name' )
+ api_element_visible_keys = ( 'id', 'name' )
+
def __init__( self, name = None ):
self.name = name
self.deleted = False
diff -r fbefa03698959fc15d06d74ad9a2cf24f6001e63 -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b lib/galaxy/web/api/group_roles.py
--- /dev/null
+++ b/lib/galaxy/web/api/group_roles.py
@@ -0,0 +1,124 @@
+"""
+API operations on Group objects.
+"""
+import logging
+from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy import web
+
+log = logging.getLogger( __name__ )
+
+class GroupRolesAPIController( BaseAPIController ):
+
+ @web.expose_api
+ @web.require_admin
+ def index( self, trans, group_id, **kwd ):
+ """
+ GET /api/groups/{encoded_group_id}/roles
+ Displays a collection (list) of groups.
+ """
+ decoded_group_id = trans.security.decode_id( group_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ except:
+ group = None
+ if not group:
+ trans.response.status = 400
+ return "Invalid group id ( %s ) specified." % str( group_id )
+ rval = []
+ try:
+ for gra in group.roles:
+ role = gra.role
+ encoded_id = trans.security.encode_id( role.id )
+ rval.append( dict( id = encoded_id,
+ name = role.name,
+ url = url_for( 'group_role', group_id=group_id, id=encoded_id, ) ) )
+ except Exception, e:
+ rval = "Error in group API at listing roles"
+ log.error( rval + ": %s" % str(e) )
+ trans.response.status = 500
+ return rval
+
+ @web.expose_api
+ @web.require_admin
+ def show( self, trans, id, group_id, **kwd ):
+ """
+ GET /api/groups/{encoded_group_id}/roles/{encoded_role_id}
+ Displays information about a group role.
+ """
+ role_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_role_id = trans.security.decode_id( role_id )
+ item = None
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
+ for gra in group.roles:
+ if gra.role == role:
+ item = dict( id = role_id,
+ name = role.name,
+ url = url_for( 'group_role', group_id=group_id, id=role_id) ) # TODO Fix This
+ if not item:
+ item = "role %s not in group %s" % (role.name,group.name)
+ except Exception, e:
+ item = "Error in group_role API group %s role %s" % (group.name, role.name)
+ log.error(item + ": %s" % str(e))
+ return item
+
+ @web.expose_api
+ @web.require_admin
+ def update( self, trans, id, group_id, **kwd ):
+ """
+ PUT /api/groups/{encoded_group_id}/roles/{encoded_role_id}
+ Adds a role to a group
+ """
+ role_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_role_id = trans.security.decode_id( role_id )
+ item = None
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
+ for gra in group.roles:
+ if gra.role == role:
+ item = dict( id = role_id,
+ name = role.name,
+ url = url_for( 'group_role', group_id=group_id, id=role_id) )
+ if not item:
+ gra = trans.app.model.GroupRoleAssociation( group, role )
+ # Add GroupRoleAssociation
+ trans.sa_session.add( gra )
+ trans.sa_session.flush()
+ item = dict( id = role_id,
+ name = role.name,
+ url = url_for( 'group_role', group_id=group_id, id=role_id) )
+ except Exception, e:
+ item = "Error in group_role API Adding role %s to group %s" % (role.name,group.name)
+ log.error(item + ": %s" % str(e))
+ return item
+
+ @web.expose_api
+ @web.require_admin
+ def delete( self, trans, id, group_id, **kwd ):
+ """
+ DELETE /api/groups/{encoded_group_id}/roles/{encoded_role_id}
+ Removes a role from a group
+ """
+ role_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_role_id = trans.security.decode_id( role_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ role = trans.sa_session.query( trans.app.model.Role ).get( decoded_role_id )
+ for gra in group.roles:
+ if gra.role == role:
+ trans.sa_session.delete( gra )
+ trans.sa_session.flush()
+ item = dict( id = role_id,
+ name = role.name,
+ url = url_for( 'group_role', group_id=group_id, id=role_id) )
+ if not item:
+ item = "role %s not in group %s" % (role.name,group.name)
+ except Exception, e:
+ item = "Error in group_role API Removing role %s from group %s" % (role.name,group.name)
+ log.error(item + ": %s" % str(e))
+ return item
diff -r fbefa03698959fc15d06d74ad9a2cf24f6001e63 -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b lib/galaxy/web/api/group_users.py
--- /dev/null
+++ b/lib/galaxy/web/api/group_users.py
@@ -0,0 +1,124 @@
+"""
+API operations on Group objects.
+"""
+import logging
+from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy import web
+
+log = logging.getLogger( __name__ )
+
+class GroupUsersAPIController( BaseAPIController ):
+
+ @web.expose_api
+ @web.require_admin
+ def index( self, trans, group_id, **kwd ):
+ """
+ GET /api/groups/{encoded_group_id}/users
+ Displays a collection (list) of groups.
+ """
+ decoded_group_id = trans.security.decode_id( group_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ except:
+ group = None
+ if not group:
+ trans.response.status = 400
+ return "Invalid group id ( %s ) specified." % str( group_id )
+ rval = []
+ try:
+ for uga in group.users:
+ user = uga.user
+ encoded_id = trans.security.encode_id( user.id )
+ rval.append( dict( id = encoded_id,
+ email = user.email,
+ url = url_for( 'group_user', group_id=group_id, id=encoded_id, ) ) )
+ except Exception, e:
+ rval = "Error in group API at listing users"
+ log.error( rval + ": %s" % str(e) )
+ trans.response.status = 500
+ return rval
+
+ @web.expose_api
+ @web.require_admin
+ def show( self, trans, id, group_id, **kwd ):
+ """
+ GET /api/groups/{encoded_group_id}/users/{encoded_user_id}
+ Displays information about a group user.
+ """
+ user_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_user_id = trans.security.decode_id( user_id )
+ item = None
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ for uga in group.users:
+ if uga.user == user:
+ item = dict( id = user_id,
+ email = user.email,
+ url = url_for( 'group_user', group_id=group_id, id=user_id) ) # TODO Fix This
+ if not item:
+ item = "user %s not in group %s" % (user.email,group.name)
+ except Exception, e:
+ item = "Error in group_user API group %s user %s" % (group.name, user.email)
+ log.error(item + ": %s" % str(e))
+ return item
+
+ @web.expose_api
+ @web.require_admin
+ def update( self, trans, id, group_id, **kwd ):
+ """
+ PUT /api/groups/{encoded_group_id}/users/{encoded_user_id}
+ Adds a user to a group
+ """
+ user_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_user_id = trans.security.decode_id( user_id )
+ item = None
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ for uga in group.users:
+ if uga.user == user:
+ item = dict( id = user_id,
+ email = user.email,
+ url = url_for( 'group_user', group_id=group_id, id=user_id) )
+ if not item:
+ uga = trans.app.model.UserGroupAssociation( user, group )
+ # Add UserGroupAssociations
+ trans.sa_session.add( uga )
+ trans.sa_session.flush()
+ item = dict( id = user_id,
+ email = user.email,
+ url = url_for( 'group_user', group_id=group_id, id=user_id) )
+ except Exception, e:
+ item = "Error in group_user API Adding user %s to group %s" % (user.email,group.name)
+ log.error(item + ": %s" % str(e))
+ return item
+
+ @web.expose_api
+ @web.require_admin
+ def delete( self, trans, id, group_id, **kwd ):
+ """
+ DELETE /api/groups/{encoded_group_id}/users/{encoded_user_id}
+ Removes a user from a group
+ """
+ user_id = id
+ decoded_group_id = trans.security.decode_id( group_id )
+ decoded_user_id = trans.security.decode_id( user_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
+ for uga in group.users:
+ if uga.user == user:
+ trans.sa_session.delete( uga )
+ trans.sa_session.flush()
+ item = dict( id = user_id,
+ email = user.email,
+ url = url_for( 'group_user', group_id=group_id, id=user_id) )
+ if not item:
+ item = "user %s not in group %s" % (user.email,group.name)
+ except Exception, e:
+ item = "Error in group_user API Removing user %s from group %s" % (user.email,group.name)
+ log.error(item + ": %s" % str(e))
+ return item
diff -r fbefa03698959fc15d06d74ad9a2cf24f6001e63 -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b lib/galaxy/web/api/groups.py
--- /dev/null
+++ b/lib/galaxy/web/api/groups.py
@@ -0,0 +1,128 @@
+"""
+API operations on Group objects.
+"""
+import logging
+from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy import web
+
+
+log = logging.getLogger( __name__ )
+
+
+class GroupAPIController( BaseAPIController ):
+
+ @web.expose_api
+ @web.require_admin
+ def index( self, trans, **kwd ):
+ """
+ GET /api/groups
+ Displays a collection (list) of groups.
+ """
+ rval = []
+ for group in trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.deleted == False ):
+ if trans.user_is_admin():
+ item = group.get_api_value( value_mapper={ 'id': trans.security.encode_id } )
+ encoded_id = trans.security.encode_id( group.id )
+ item['url'] = url_for( 'group', id=encoded_id )
+ rval.append( item )
+ return rval
+
+ @web.expose_api
+ def create( self, trans, payload, **kwd ):
+ """
+ POST /api/groups
+ Creates a new group.
+ """
+ log.info("groups payload%s\n" % (payload))
+ if not trans.user_is_admin():
+ trans.response.status = 403
+ return "You are not authorized to create a new group."
+ name = payload.get( 'name', None )
+ if not name:
+ trans.response.status = 400
+ return "Enter a valid name"
+ if trans.sa_session.query( trans.app.model.Group ).filter( trans.app.model.Group.table.c.name==name ).first():
+ trans.response.status = 400
+ return "A group with that name already exists"
+
+ group = trans.app.model.Group( name=name )
+ trans.sa_session.add( group )
+ user_ids = payload.get( 'user_ids', [] )
+ for i in user_ids:
+ log.info("user_id: %s\n" % (i ))
+ log.info("%s %s\n" % (i, trans.security.decode_id( i ) ))
+ users = [ trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( i ) ) for i in user_ids ]
+ role_ids = payload.get( 'role_ids', [] )
+ roles = [ trans.sa_session.query( trans.model.Role ).get( trans.security.decode_id( i ) ) for i in role_ids ]
+ trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=roles, users=users )
+ """
+ # Create the UserGroupAssociations
+ for user in users:
+ trans.app.security_agent.associate_user_group( user, group )
+ # Create the GroupRoleAssociations
+ for role in roles:
+ trans.app.security_agent.associate_group_role( group, role )
+ """
+ trans.sa_session.flush()
+ encoded_id = trans.security.encode_id( group.id )
+ item = group.get_api_value( view='element', value_mapper={ 'id': trans.security.encode_id } )
+ item['url'] = url_for( 'group', id=encoded_id )
+ return [ item ]
+
+ @web.expose_api
+ @web.require_admin
+ def show( self, trans, id, **kwd ):
+ """
+ GET /api/groups/{encoded_group_id}
+ Displays information about a group.
+ """
+ group_id = id
+ try:
+ decoded_group_id = trans.security.decode_id( group_id )
+ except TypeError:
+ trans.response.status = 400
+ return "Malformed group id ( %s ) specified, unable to decode." % str( group_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ except:
+ group = None
+ if not group:
+ trans.response.status = 400
+ return "Invalid group id ( %s ) specified." % str( group_id )
+ item = group.get_api_value( view='element', value_mapper={ 'id': trans.security.encode_id } )
+ item['url'] = url_for( 'group', id=group_id )
+ item['users_url'] = url_for( 'group_users', group_id=group_id )
+ item['roles_url'] = url_for( 'group_roles', group_id=group_id )
+ return item
+
+ @web.expose_api
+ @web.require_admin
+ def update( self, trans, id, payload, **kwd ):
+ """
+ PUT /api/groups/{encoded_group_id}
+ Modifies a group.
+ """
+ group_id = id
+ try:
+ decoded_group_id = trans.security.decode_id( group_id )
+ except TypeError:
+ trans.response.status = 400
+ return "Malformed group id ( %s ) specified, unable to decode." % str( group_id )
+ try:
+ group = trans.sa_session.query( trans.app.model.Group ).get( decoded_group_id )
+ except:
+ group = None
+ if not group:
+ trans.response.status = 400
+ return "Invalid group id ( %s ) specified." % str( group_id )
+ name = payload.get( 'name', None )
+ if name:
+ group.name = name
+ trans.sa_session.add(group)
+ user_ids = payload.get( 'user_ids', [] )
+ users = [ trans.sa_session.query( trans.model.User ).get( trans.security.decode_id( i ) ) for i in user_ids ]
+ role_ids = payload.get( 'role_ids', [] )
+ roles = [ trans.sa_session.query( trans.model.Role ).get( trans.security.decode_id( i ) ) for i in role_ids ]
+ trans.app.security_agent.set_entity_group_associations( groups=[ group ], roles=roles, users=users,delete_existing_assocs=False )
+ trans.sa_session.flush()
+
diff -r fbefa03698959fc15d06d74ad9a2cf24f6001e63 -r 713ee9f0ae6a005d8a283427c496c27f9f83c43b lib/galaxy/web/buildapp.py
--- a/lib/galaxy/web/buildapp.py
+++ b/lib/galaxy/web/buildapp.py
@@ -122,6 +122,18 @@
'permissions',
path_prefix='/api/libraries/:library_id',
parent_resources=dict( member_name='library', collection_name='libraries' ) )
+ webapp.api_mapper.resource( 'user',
+ 'users',
+ controller='group_users',
+ name_prefix='group_',
+ path_prefix='/api/groups/:group_id',
+ parent_resources=dict( member_name='group', collection_name='groups' ) )
+ webapp.api_mapper.resource( 'role',
+ 'roles',
+ controller='group_roles',
+ name_prefix='group_',
+ path_prefix='/api/groups/:group_id',
+ parent_resources=dict( member_name='group', collection_name='groups' ) )
webapp.api_mapper.resource( 'dataset', 'datasets', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'library', 'libraries', path_prefix='/api' )
webapp.api_mapper.resource( 'sample', 'samples', path_prefix='/api' )
@@ -129,6 +141,7 @@
webapp.api_mapper.resource( 'form', 'forms', path_prefix='/api' )
webapp.api_mapper.resource( 'request_type', 'request_types', path_prefix='/api' )
webapp.api_mapper.resource( 'role', 'roles', path_prefix='/api' )
+ webapp.api_mapper.resource( 'group', 'groups', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'quota', 'quotas', path_prefix='/api' )
webapp.api_mapper.resource( 'tool', 'tools', path_prefix='/api' )
webapp.api_mapper.resource_with_deleted( 'user', 'users', path_prefix='/api' )
https://bitbucket.org/galaxy/galaxy-central/changeset/966a9b393b85/
changeset: 966a9b393b85
branch: add_requirement_tags_for_plink
user: dannon
date: 2012-08-13 19:23:54
summary: Branch close
affected #: 0 files
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/afbff82a32e9/
changeset: afbff82a32e9
user: dannon
date: 2012-08-13 18:59:17
summary: Fix convert_newlines and sep2tabs to return '0' lines in the event of an empty file, instead of blowing up.
affected #: 1 file
diff -r bb76489b554107f534df7e0f829e60308fea719a -r afbff82a32e98e9261b52c5415377c0ee5ecee1d lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py
+++ b/lib/galaxy/datatypes/sniff.py
@@ -91,15 +91,16 @@
"""
fd, temp_name = tempfile.mkstemp()
fp = os.fdopen( fd, "wt" )
- for i, line in enumerate( file( fname, "U" ) ):
+ i = 0
+ for i, line in enumerate( file( fname, "U" ), 1 ):
fp.write( "%s\n" % line.rstrip( "\r\n" ) )
fp.close()
if in_place:
shutil.move( temp_name, fname )
# Return number of lines in file.
- return ( i + 1, None )
+ return ( i, None )
else:
- return ( i + 1, temp_name )
+ return ( i, temp_name )
def sep2tabs( fname, in_place=True, patt="\\s+" ):
"""
@@ -115,7 +116,8 @@
regexp = re.compile( patt )
fd, temp_name = tempfile.mkstemp()
fp = os.fdopen( fd, "wt" )
- for i, line in enumerate( file( fname ) ):
+ i = 0
+ for i, line in enumerate( file( fname ), 1):
line = line.rstrip( '\r\n' )
elems = regexp.split( line )
fp.write( "%s\n" % '\t'.join( elems ) )
@@ -123,9 +125,9 @@
if in_place:
shutil.move( temp_name, fname )
# Return number of lines in file.
- return ( i + 1, None )
+ return ( i, None )
else:
- return ( i + 1, temp_name )
+ return ( i, temp_name )
def convert_newlines_sep2tabs( fname, in_place=True, patt="\\s+" ):
"""
https://bitbucket.org/galaxy/galaxy-central/changeset/fbefa0369895/
changeset: fbefa0369895
user: dannon
date: 2012-08-13 19:00:09
summary: Whitespace cleanup.
affected #: 1 file
diff -r afbff82a32e98e9261b52c5415377c0ee5ecee1d -r fbefa03698959fc15d06d74ad9a2cf24f6001e63 lib/galaxy/datatypes/sniff.py
--- a/lib/galaxy/datatypes/sniff.py
+++ b/lib/galaxy/datatypes/sniff.py
@@ -8,7 +8,7 @@
from galaxy.datatypes.binary import unsniffable_binary_formats
log = logging.getLogger(__name__)
-
+
def get_test_fname(fname):
"""Returns test data filename"""
path, name = os.path.split(__file__)
@@ -79,7 +79,7 @@
def convert_newlines( fname, in_place=True ):
"""
- Converts in place a file from universal line endings
+ Converts in place a file from universal line endings
to Posix line endings.
>>> fname = get_test_fname('temp.txt')
@@ -159,7 +159,7 @@
def get_headers( fname, sep, count=60, is_multi_byte=False ):
"""
Returns a list with the first 'count' lines split by 'sep'
-
+
>>> fname = get_test_fname('complete.bed')
>>> get_headers(fname,'\\t')
[['chr7', '127475281', '127491632', 'NM_000230', '0', '+', '127486022', '127488767', '0', '3', '29,172,3225,', '0,10713,13126,'], ['chr7', '127486011', '127488900', 'D49487', '0', '+', '127486022', '127488767', '0', '2', '155,490,', '0,2399']]
@@ -175,12 +175,12 @@
if idx == count:
break
return headers
-
+
def is_column_based( fname, sep='\t', skip=0, is_multi_byte=False ):
"""
- Checks whether the file is column based with respect to a separator
+ Checks whether the file is column based with respect to a separator
(defaults to tab separator).
-
+
>>> fname = get_test_fname('test.gff')
>>> is_column_based(fname)
True
@@ -288,7 +288,7 @@
"""
Some classes may not have a sniff function, which is ok. In fact, the
Tabular and Text classes are 2 examples of classes that should never have
- a sniff function. Since these classes are default classes, they contain
+ a sniff function. Since these classes are default classes, they contain
few rules to filter out data of other formats, so they should be called
from this function after all other datatypes in sniff_order have not been
successfully discovered.
@@ -333,7 +333,7 @@
is_compressed = check_compressed_function( filename )
if is_compressed:
break #found compression type
- if is_compressed:
+ if is_compressed:
if ext in AUTO_DETECT_EXTENSIONS:
check_exts = COMPRESSION_DATATYPES[ compressed_type ]
elif ext in COMPRESSED_EXTENSIONS:
@@ -347,7 +347,7 @@
keep_compressed = True
is_valid = True
break
-
+
if not is_compressed:
is_valid = True
elif not keep_compressed:
@@ -373,13 +373,13 @@
def handle_uploaded_dataset_file( filename, datatypes_registry, ext = 'auto', is_multi_byte = False ):
is_valid, ext = handle_compressed_file( filename, datatypes_registry, ext = ext )
-
+
if not is_valid:
raise InappropriateDatasetContentError, 'The compressed uploaded file contains inappropriate content.'
-
+
if ext in AUTO_DETECT_EXTENSIONS:
ext = guess_ext( filename, sniff_order = datatypes_registry.sniff_order, is_multi_byte=is_multi_byte )
-
+
if check_binary( filename ):
if ext not in unsniffable_binary_formats and not datatypes_registry.get_datatype_by_extension( ext ).sniff( filename ):
raise InappropriateDatasetContentError, 'The binary uploaded file contains inappropriate content.'
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/05c643a133ec/
changeset: 05c643a133ec
user: fubar
date: 2012-08-11 01:36:29
summary: Unicode/raw for re.sub string in rgFastQC.py
affected #: 1 file
diff -r 0a800e817e84abba99331e784f622dd3ce54e900 -r 05c643a133ec9421c252647fb1eedc96f2940da5 tools/rgenetics/rgFastQC.py
--- a/tools/rgenetics/rgFastQC.py
+++ b/tools/rgenetics/rgFastQC.py
@@ -56,7 +56,7 @@
cl.append('-c %s' % self.opts.contaminants)
# patch suggested by bwlang https://bitbucket.org/galaxy/galaxy-central/pull-request/30
# use a symlink in a temporary directory so that the FastQC report reflects the history input file name
- fastqinfilename = re.sub('[^a-zA-Z0-9_\-\.]', '_', os.path.basename(self.opts.inputfilename))
+ fastqinfilename = re.sub(ru'[^a-zA-Z0-9_\-\.]', '_', os.path.basename(self.opts.inputfilename))
link_name = os.path.join(self.opts.outputdir, fastqinfilename)
os.symlink(self.opts.input, link_name)
cl.append(link_name)
https://bitbucket.org/galaxy/galaxy-central/changeset/bb76489b5541/
changeset: bb76489b5541
user: fubar
date: 2012-08-11 01:42:48
summary: Added unicode and raw flags to the re.sub string - not tested with unicode name yet..
affected #: 1 file
diff -r 05c643a133ec9421c252647fb1eedc96f2940da5 -r bb76489b554107f534df7e0f829e60308fea719a tools/rgenetics/rgFastQC.py
--- a/tools/rgenetics/rgFastQC.py
+++ b/tools/rgenetics/rgFastQC.py
@@ -56,7 +56,7 @@
cl.append('-c %s' % self.opts.contaminants)
# patch suggested by bwlang https://bitbucket.org/galaxy/galaxy-central/pull-request/30
# use a symlink in a temporary directory so that the FastQC report reflects the history input file name
- fastqinfilename = re.sub(ru'[^a-zA-Z0-9_\-\.]', '_', os.path.basename(self.opts.inputfilename))
+ fastqinfilename = re.sub(ur'[^a-zA-Z0-9_\-\.]', '_', os.path.basename(self.opts.inputfilename))
link_name = os.path.join(self.opts.outputdir, fastqinfilename)
os.symlink(self.opts.input, link_name)
cl.append(link_name)
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/48560a3547e2/
changeset: 48560a3547e2
user: fubar
date: 2012-08-11 01:21:45
summary: Add - and . to re.sub exclusion list for user supplied filename at Scott's suggestion and use _ as a substitute for
all other characters so
'cd \/; rm -rf'
becomes
'cd_____rm_-rf'
FastQC wrapper should now be reasonably hard to pervert but user filenames will be at least vaguely recognizable...
affected #: 1 file
diff -r 18462a6cbf46566307eaa4fd33c8891b5979d699 -r 48560a3547e2bc8691f16d525fac117befe58829 tools/rgenetics/rgFastQC.py
--- a/tools/rgenetics/rgFastQC.py
+++ b/tools/rgenetics/rgFastQC.py
@@ -1,4 +1,7 @@
"""
+# added sanitizer for user supplied name
+# removed shell and make cl a sequence for Popen call
+# ross lazarus August 10 2012 in response to anon insecurity report
wrapper for fastqc
called as
@@ -53,7 +56,7 @@
cl.append('-c %s' % self.opts.contaminants)
# patch suggested by bwlang https://bitbucket.org/galaxy/galaxy-central/pull-request/30
# use a symlink in a temporary directory so that the FastQC report reflects the history input file name
- fastqinfilename = re.sub('[^a-zA-Z0-9_]+', '', os.path.basename(self.opts.inputfilename))
+ fastqinfilename = re.sub('[^a-zA-Z0-9_\-\.]', '_', os.path.basename(self.opts.inputfilename))
link_name = os.path.join(self.opts.outputdir, fastqinfilename)
os.symlink(self.opts.input, link_name)
cl.append(link_name)
https://bitbucket.org/galaxy/galaxy-central/changeset/0a800e817e84/
changeset: 0a800e817e84
user: fubar
date: 2012-08-11 01:22:57
summary: Branch merge
affected #: 9 files
diff -r 48560a3547e2bc8691f16d525fac117befe58829 -r 0a800e817e84abba99331e784f622dd3ce54e900 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -40,7 +40,18 @@
if job_wrapper.dependency_shell_commands:
commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
- # Append commands to copy job outputs based on from_work_dir attribute.
+ # -- Append commands to copy job outputs based on from_work_dir attribute. --
+
+ # Set up dict of dataset id --> output path; output path can be real or
+ # false depending on outputs_to_working_directory
+ output_paths = {}
+ for dataset_path in job_wrapper.get_output_fnames():
+ path = dataset_path.real_path
+ if self.app.config.outputs_to_working_directory:
+ path = dataset_path.false_path
+ output_paths[ dataset_path.dataset_id ] = path
+
+ # Walk job's output associations to find and use from_work_dir attributes.
job = job_wrapper.get_job()
job_tool = self.app.toolbox.tools_by_id.get( job.tool_id, None )
for dataset_assoc in job.output_datasets + job.output_library_datasets:
@@ -53,12 +64,13 @@
# Copy from working dir to HDA.
# TODO: move instead of copy to save time?
source_file = os.path.join( os.path.abspath( job_wrapper.working_directory ), hda_tool_output.from_work_dir )
+ destination = output_paths[ dataset.dataset_id ]
if in_directory( source_file, job_wrapper.working_directory ):
try:
- commands += "; cp %s %s" % ( source_file, dataset.file_name )
- log.debug( "Copying %s to %s as directed by from_work_dir" % ( source_file, dataset.file_name ) )
+ commands += "; cp %s %s" % ( source_file, destination )
+ log.debug( "Copying %s to %s as directed by from_work_dir" % ( source_file, destination ) )
except ( IOError, OSError ):
- log.debug( "Could not copy %s to %s as directed by from_work_dir" % ( source_file, dataset.file_name ) )
+ log.debug( "Could not copy %s to %s as directed by from_work_dir" % ( source_file, destination ) )
else:
# Security violation.
log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, job_wrapper.working_directory ) )
diff -r 48560a3547e2bc8691f16d525fac117befe58829 -r 0a800e817e84abba99331e784f622dd3ce54e900 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -112,16 +112,30 @@
class Client(object):
"""
"""
- def __init__(self, remote_host, job_id):
+ """
+ """
+ def __init__(self, remote_host, job_id, private_key=None):
if not remote_host.endswith("/"):
remote_host = remote_host + "/"
+ ## If we don't have an explicit private_key defined, check for
+ ## one embedded in the URL. A URL of the form
+ ## https://moo@cow:8913 will try to contact https://cow:8913
+ ## with a private key of moo
+ private_key_format = "https?://(.*)@.*/?"
+ private_key_match= re.match(private_key_format, remote_host)
+ if not private_key and private_key_match:
+ private_key = private_key_match.group(1)
+ remote_host = remote_host.replace("%s@" % private_key, '', 1)
self.remote_host = remote_host
self.job_id = job_id
+ self.private_key = private_key
def url_open(self, request, data):
return urllib2.urlopen(request, data)
def __build_url(self, command, args):
+ if self.private_key:
+ args["private_key"] = self.private_key
data = urllib.urlencode(args)
url = self.remote_host + command + "?" + data
return url
diff -r 48560a3547e2bc8691f16d525fac117befe58829 -r 0a800e817e84abba99331e784f622dd3ce54e900 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1083,7 +1083,9 @@
help_pages = self.help.findall( "page" )
help_header = self.help.text
try:
- self.help = Template( util.rst_to_html(self.help.text) )
+ self.help = Template( util.rst_to_html(self.help.text), input_encoding='utf-8',
+ output_encoding='utf-8', default_filters=[ 'decode.utf8' ],
+ encoding_errors='replace' )
except:
log.exception( "error in help for tool %s" % self.name )
# Multiple help page case
@@ -1093,7 +1095,10 @@
help_footer = help_footer + help_page.tail
# Each page has to rendered all-together because of backreferences allowed by rst
try:
- self.help_by_page = [ Template( util.rst_to_html( help_header + x + help_footer ) )
+ self.help_by_page = [ Template( util.rst_to_html( help_header + x + help_footer,
+ input_encoding='utf-8', output_encoding='utf-8',
+ default_filters=[ 'decode.utf8' ],
+ encoding_errors='replace' ) )
for x in self.help_by_page ]
except:
log.exception( "error in multi-page help for tool %s" % self.name )
diff -r 48560a3547e2bc8691f16d525fac117befe58829 -r 0a800e817e84abba99331e784f622dd3ce54e900 tools/rgenetics/rgCaCo.xml
--- a/tools/rgenetics/rgCaCo.xml
+++ b/tools/rgenetics/rgCaCo.xml
@@ -1,4 +1,5 @@
<tool id="rgCaCo1" name="Case Control:">
+ <requirements><requirement type="package">plink</requirement></requirements><description>for unrelated subjects</description><command interpreter="python">
rgCaCo.py '$i.extra_files_path/$i.metadata.base_name' "$title" '$out_file1' '$logf' '$logf.files_path' '$gffout'
diff -r 48560a3547e2bc8691f16d525fac117befe58829 -r 0a800e817e84abba99331e784f622dd3ce54e900 tools/rgenetics/rgClean.xml
--- a/tools/rgenetics/rgClean.xml
+++ b/tools/rgenetics/rgClean.xml
@@ -1,4 +1,5 @@
<tool id="rgClean1" name="Clean genotypes:">
+ <requirements><requirement type="package">plink</requirement></requirements><description>filter markers, subjects</description><command interpreter="python">
diff -r 48560a3547e2bc8691f16d525fac117befe58829 -r 0a800e817e84abba99331e784f622dd3ce54e900 tools/rgenetics/rgGLM.xml
--- a/tools/rgenetics/rgGLM.xml
+++ b/tools/rgenetics/rgGLM.xml
@@ -1,4 +1,5 @@
<tool id="rgGLM1" name="Linear Models:" version="0.2">
+ <requirements><requirement type="package">plink</requirement></requirements><description>for genotype data</description><code file="rgGLM_code.py"/><command interpreter="python">
diff -r 48560a3547e2bc8691f16d525fac117befe58829 -r 0a800e817e84abba99331e784f622dd3ce54e900 tools/rgenetics/rgLDIndep.xml
--- a/tools/rgenetics/rgLDIndep.xml
+++ b/tools/rgenetics/rgLDIndep.xml
@@ -1,4 +1,5 @@
<tool id="rgLDIndep1" name="LD Independent:">
+ <requirements><requirement type="package">plink</requirement></requirements><code file="rgLDIndep_code.py"/><description>filter high LD pairs - decrease redundancy</description>
diff -r 48560a3547e2bc8691f16d525fac117befe58829 -r 0a800e817e84abba99331e784f622dd3ce54e900 tools/rgenetics/rgQC.xml
--- a/tools/rgenetics/rgQC.xml
+++ b/tools/rgenetics/rgQC.xml
@@ -1,5 +1,6 @@
<tool id="rgQC1" name="QC reports:">
-
+ <requirements><requirement type="package">plink</requirement></requirements>
+
<description>Marker and Subject measures</description><command interpreter="python">
diff -r 48560a3547e2bc8691f16d525fac117befe58829 -r 0a800e817e84abba99331e784f622dd3ce54e900 tools/rgenetics/rgTDT.xml
--- a/tools/rgenetics/rgTDT.xml
+++ b/tools/rgenetics/rgTDT.xml
@@ -1,4 +1,6 @@
<tool id="rgTDT1" name="Transmission Distortion:">
+ <requirements><requirement type="package">plink</requirement></requirements>
+
<description>for family data</description><command interpreter="python">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/b612d9dcca9f/
changeset: b612d9dcca9f
user: jmchilton
date: 2012-08-10 00:25:35
summary: Implement a mechanism to secure access to lwr. If enabled on the
server, a private token must be passed along with all
interactions. This token may be specified as part of the job runner
url as such:
wintool = lwr://https://token@host:8913/
This token will not protect the client against a rouge server that has
exploited say a DNS spoofing vulnerability. It is only meant as a
mechanism for the lwr to ensure the client is authorized. This feature
should be used in conjunction with SSL to prevent a "man-in-middle"
from intercepting this token.
affected #: 1 file
diff -r 3b5db939aebe61addd6f0b8d0c14267cf16c4144 -r b612d9dcca9fd75f7dc9592cd64e385d3300e0bb lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -112,16 +112,30 @@
class Client(object):
"""
"""
- def __init__(self, remote_host, job_id):
+ """
+ """
+ def __init__(self, remote_host, job_id, private_key=None):
if not remote_host.endswith("/"):
remote_host = remote_host + "/"
+ ## If we don't have an explicit private_key defined, check for
+ ## one embedded in the URL. A URL of the form
+ ## https://moo@cow:8913 will try to contact https://cow:8913
+ ## with a private key of moo
+ private_key_format = "https?://(.*)@.*/?"
+ private_key_match= re.match(private_key_format, remote_host)
+ if not private_key and private_key_match:
+ private_key = private_key_match.group(1)
+ remote_host = remote_host.replace("%s@" % private_key, '', 1)
self.remote_host = remote_host
self.job_id = job_id
+ self.private_key = private_key
def url_open(self, request, data):
return urllib2.urlopen(request, data)
def __build_url(self, command, args):
+ if self.private_key:
+ args["private_key"] = self.private_key
data = urllib.urlencode(args)
url = self.remote_host + command + "?" + data
return url
https://bitbucket.org/galaxy/galaxy-central/changeset/b24c67866f49/
changeset: b24c67866f49
user: natefoo
date: 2012-08-10 20:35:44
summary: Merged in jmchilton/galaxy-central-lwr-enhancement-1 (pull request #57)
affected #: 1 file
diff -r 4b0d6a9ecef858e507df4fa04fd8d8c37d542958 -r b24c67866f4942fddf50be60aa66b39f34643a55 lib/galaxy/jobs/runners/lwr.py
--- a/lib/galaxy/jobs/runners/lwr.py
+++ b/lib/galaxy/jobs/runners/lwr.py
@@ -112,16 +112,30 @@
class Client(object):
"""
"""
- def __init__(self, remote_host, job_id):
+ """
+ """
+ def __init__(self, remote_host, job_id, private_key=None):
if not remote_host.endswith("/"):
remote_host = remote_host + "/"
+ ## If we don't have an explicit private_key defined, check for
+ ## one embedded in the URL. A URL of the form
+ ## https://moo@cow:8913 will try to contact https://cow:8913
+ ## with a private key of moo
+ private_key_format = "https?://(.*)@.*/?"
+ private_key_match= re.match(private_key_format, remote_host)
+ if not private_key and private_key_match:
+ private_key = private_key_match.group(1)
+ remote_host = remote_host.replace("%s@" % private_key, '', 1)
self.remote_host = remote_host
self.job_id = job_id
+ self.private_key = private_key
def url_open(self, request, data):
return urllib2.urlopen(request, data)
def __build_url(self, command, args):
+ if self.private_key:
+ args["private_key"] = self.private_key
data = urllib.urlencode(args)
url = self.remote_host + command + "?" + data
return url
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/06b31a61793c/
changeset: 06b31a61793c
branch: add_requirement_tags_for_plink
user: jmchilton
date: 2012-02-06 22:43:05
summary: Adding plink requirement tag for various rgenetics tools requiring
plink.
affected #: 6 files
diff -r 74b6319b38b4b3876d0b81ef1296bb5afc729cc1 -r 06b31a61793c6e3b8ab39676ded73f6af95212f7 tools/rgenetics/rgCaCo.xml
--- a/tools/rgenetics/rgCaCo.xml
+++ b/tools/rgenetics/rgCaCo.xml
@@ -1,4 +1,5 @@
<tool id="rgCaCo1" name="Case Control:">
+ <requirements><requirement type="package">plink</requirement></requirements><description>for unrelated subjects</description><command interpreter="python">
rgCaCo.py '$i.extra_files_path/$i.metadata.base_name' "$title" '$out_file1' '$logf' '$logf.files_path' '$gffout'
diff -r 74b6319b38b4b3876d0b81ef1296bb5afc729cc1 -r 06b31a61793c6e3b8ab39676ded73f6af95212f7 tools/rgenetics/rgClean.xml
--- a/tools/rgenetics/rgClean.xml
+++ b/tools/rgenetics/rgClean.xml
@@ -1,4 +1,5 @@
<tool id="rgClean1" name="Clean genotypes:">
+ <requirements><requirement type="package">plink</requirement></requirements><description>filter markers, subjects</description><command interpreter="python">
diff -r 74b6319b38b4b3876d0b81ef1296bb5afc729cc1 -r 06b31a61793c6e3b8ab39676ded73f6af95212f7 tools/rgenetics/rgGLM.xml
--- a/tools/rgenetics/rgGLM.xml
+++ b/tools/rgenetics/rgGLM.xml
@@ -1,4 +1,5 @@
<tool id="rgGLM1" name="Linear Models:" version="0.2">
+ <requirements><requirement type="package">plink</requirement></requirements><description>for genotype data</description><code file="rgGLM_code.py"/><command interpreter="python">
diff -r 74b6319b38b4b3876d0b81ef1296bb5afc729cc1 -r 06b31a61793c6e3b8ab39676ded73f6af95212f7 tools/rgenetics/rgLDIndep.xml
--- a/tools/rgenetics/rgLDIndep.xml
+++ b/tools/rgenetics/rgLDIndep.xml
@@ -1,4 +1,5 @@
<tool id="rgLDIndep1" name="LD Independent:">
+ <requirements><requirement type="package">plink</requirement></requirements><code file="rgLDIndep_code.py"/><description>filter high LD pairs - decrease redundancy</description>
diff -r 74b6319b38b4b3876d0b81ef1296bb5afc729cc1 -r 06b31a61793c6e3b8ab39676ded73f6af95212f7 tools/rgenetics/rgQC.xml
--- a/tools/rgenetics/rgQC.xml
+++ b/tools/rgenetics/rgQC.xml
@@ -1,5 +1,6 @@
<tool id="rgQC1" name="QC reports:">
-
+ <requirements><requirement type="package">plink</requirement></requirements>
+
<description>Marker and Subject measures</description><command interpreter="python">
diff -r 74b6319b38b4b3876d0b81ef1296bb5afc729cc1 -r 06b31a61793c6e3b8ab39676ded73f6af95212f7 tools/rgenetics/rgTDT.xml
--- a/tools/rgenetics/rgTDT.xml
+++ b/tools/rgenetics/rgTDT.xml
@@ -1,4 +1,6 @@
<tool id="rgTDT1" name="Transmission Distortion:">
+ <requirements><requirement type="package">plink</requirement></requirements>
+
<description>for family data</description><command interpreter="python">
https://bitbucket.org/galaxy/galaxy-central/changeset/4b0d6a9ecef8/
changeset: 4b0d6a9ecef8
user: natefoo
date: 2012-08-10 20:33:59
summary: Merged in jmchilton/umn-galaxy-central/add_requirement_tags_for_plink (pull request #35)
affected #: 6 files
diff -r 35c524274296c33320190a017bc316b57cb4f759 -r 4b0d6a9ecef858e507df4fa04fd8d8c37d542958 tools/rgenetics/rgCaCo.xml
--- a/tools/rgenetics/rgCaCo.xml
+++ b/tools/rgenetics/rgCaCo.xml
@@ -1,4 +1,5 @@
<tool id="rgCaCo1" name="Case Control:">
+ <requirements><requirement type="package">plink</requirement></requirements><description>for unrelated subjects</description><command interpreter="python">
rgCaCo.py '$i.extra_files_path/$i.metadata.base_name' "$title" '$out_file1' '$logf' '$logf.files_path' '$gffout'
diff -r 35c524274296c33320190a017bc316b57cb4f759 -r 4b0d6a9ecef858e507df4fa04fd8d8c37d542958 tools/rgenetics/rgClean.xml
--- a/tools/rgenetics/rgClean.xml
+++ b/tools/rgenetics/rgClean.xml
@@ -1,4 +1,5 @@
<tool id="rgClean1" name="Clean genotypes:">
+ <requirements><requirement type="package">plink</requirement></requirements><description>filter markers, subjects</description><command interpreter="python">
diff -r 35c524274296c33320190a017bc316b57cb4f759 -r 4b0d6a9ecef858e507df4fa04fd8d8c37d542958 tools/rgenetics/rgGLM.xml
--- a/tools/rgenetics/rgGLM.xml
+++ b/tools/rgenetics/rgGLM.xml
@@ -1,4 +1,5 @@
<tool id="rgGLM1" name="Linear Models:" version="0.2">
+ <requirements><requirement type="package">plink</requirement></requirements><description>for genotype data</description><code file="rgGLM_code.py"/><command interpreter="python">
diff -r 35c524274296c33320190a017bc316b57cb4f759 -r 4b0d6a9ecef858e507df4fa04fd8d8c37d542958 tools/rgenetics/rgLDIndep.xml
--- a/tools/rgenetics/rgLDIndep.xml
+++ b/tools/rgenetics/rgLDIndep.xml
@@ -1,4 +1,5 @@
<tool id="rgLDIndep1" name="LD Independent:">
+ <requirements><requirement type="package">plink</requirement></requirements><code file="rgLDIndep_code.py"/><description>filter high LD pairs - decrease redundancy</description>
diff -r 35c524274296c33320190a017bc316b57cb4f759 -r 4b0d6a9ecef858e507df4fa04fd8d8c37d542958 tools/rgenetics/rgQC.xml
--- a/tools/rgenetics/rgQC.xml
+++ b/tools/rgenetics/rgQC.xml
@@ -1,5 +1,6 @@
<tool id="rgQC1" name="QC reports:">
-
+ <requirements><requirement type="package">plink</requirement></requirements>
+
<description>Marker and Subject measures</description><command interpreter="python">
diff -r 35c524274296c33320190a017bc316b57cb4f759 -r 4b0d6a9ecef858e507df4fa04fd8d8c37d542958 tools/rgenetics/rgTDT.xml
--- a/tools/rgenetics/rgTDT.xml
+++ b/tools/rgenetics/rgTDT.xml
@@ -1,4 +1,6 @@
<tool id="rgTDT1" name="Transmission Distortion:">
+ <requirements><requirement type="package">plink</requirement></requirements>
+
<description>for family data</description><command interpreter="python">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Explicitly specify encodings for tool help templates to avoid unicode/ascii confusion.
by Bitbucket 10 Aug '12
by Bitbucket 10 Aug '12
10 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/35c524274296/
changeset: 35c524274296
user: jgoecks
date: 2012-08-10 18:26:43
summary: Explicitly specify encodings for tool help templates to avoid unicode/ascii confusion.
affected #: 1 file
diff -r 7ac62afbcdf2b983e002b79e52753f9cf142db60 -r 35c524274296c33320190a017bc316b57cb4f759 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -1083,7 +1083,9 @@
help_pages = self.help.findall( "page" )
help_header = self.help.text
try:
- self.help = Template( util.rst_to_html(self.help.text) )
+ self.help = Template( util.rst_to_html(self.help.text), input_encoding='utf-8',
+ output_encoding='utf-8', default_filters=[ 'decode.utf8' ],
+ encoding_errors='replace' )
except:
log.exception( "error in help for tool %s" % self.name )
# Multiple help page case
@@ -1093,7 +1095,10 @@
help_footer = help_footer + help_page.tail
# Each page has to rendered all-together because of backreferences allowed by rst
try:
- self.help_by_page = [ Template( util.rst_to_html( help_header + x + help_footer ) )
+ self.help_by_page = [ Template( util.rst_to_html( help_header + x + help_footer,
+ input_encoding='utf-8', output_encoding='utf-8',
+ default_filters=[ 'decode.utf8' ],
+ encoding_errors='replace' ) )
for x in self.help_by_page ]
except:
log.exception( "error in multi-page help for tool %s" % self.name )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0

commit/galaxy-central: jgoecks: Make from_work_dir attribute work when outputs_to_working_directory is True.
by Bitbucket 10 Aug '12
by Bitbucket 10 Aug '12
10 Aug '12
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/7ac62afbcdf2/
changeset: 7ac62afbcdf2
user: jgoecks
date: 2012-08-10 15:58:59
summary: Make from_work_dir attribute work when outputs_to_working_directory is True.
affected #: 1 file
diff -r 18462a6cbf46566307eaa4fd33c8891b5979d699 -r 7ac62afbcdf2b983e002b79e52753f9cf142db60 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -40,7 +40,18 @@
if job_wrapper.dependency_shell_commands:
commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
- # Append commands to copy job outputs based on from_work_dir attribute.
+ # -- Append commands to copy job outputs based on from_work_dir attribute. --
+
+ # Set up dict of dataset id --> output path; output path can be real or
+ # false depending on outputs_to_working_directory
+ output_paths = {}
+ for dataset_path in job_wrapper.get_output_fnames():
+ path = dataset_path.real_path
+ if self.app.config.outputs_to_working_directory:
+ path = dataset_path.false_path
+ output_paths[ dataset_path.dataset_id ] = path
+
+ # Walk job's output associations to find and use from_work_dir attributes.
job = job_wrapper.get_job()
job_tool = self.app.toolbox.tools_by_id.get( job.tool_id, None )
for dataset_assoc in job.output_datasets + job.output_library_datasets:
@@ -53,12 +64,13 @@
# Copy from working dir to HDA.
# TODO: move instead of copy to save time?
source_file = os.path.join( os.path.abspath( job_wrapper.working_directory ), hda_tool_output.from_work_dir )
+ destination = output_paths[ dataset.dataset_id ]
if in_directory( source_file, job_wrapper.working_directory ):
try:
- commands += "; cp %s %s" % ( source_file, dataset.file_name )
- log.debug( "Copying %s to %s as directed by from_work_dir" % ( source_file, dataset.file_name ) )
+ commands += "; cp %s %s" % ( source_file, destination )
+ log.debug( "Copying %s to %s as directed by from_work_dir" % ( source_file, destination ) )
except ( IOError, OSError ):
- log.debug( "Could not copy %s to %s as directed by from_work_dir" % ( source_file, dataset.file_name ) )
+ log.debug( "Could not copy %s to %s as directed by from_work_dir" % ( source_file, destination ) )
else:
# Security violation.
log.exception( "from_work_dir specified a location not in the working directory: %s, %s" % ( source_file, job_wrapper.working_directory ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0