1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/94a58c2fbf39/
changeset: 94a58c2fbf39
user: dan
date: 2011-11-15 18:11:09
summary: External display applications will now return HTTP not found for non-existant parameters/datasets when requested.
affected #: 1 file
diff -r 04acee047c2c585730e9a8266de81d25d5c25d7f -r 94a58c2fbf39ed76218d4af0ac30cb286b61541c lib/galaxy/web/controllers/dataset.py
--- a/lib/galaxy/web/controllers/dataset.py
+++ b/lib/galaxy/web/controllers/dataset.py
@@ -806,7 +806,11 @@
#in case some display app wants all files to be in the same 'directory',
#data can be forced to param, but not the other way (no filename for other direction)
#get param name from url param name
- action_param = display_link.get_param_name_by_url( action_param )
+ try:
+ action_param = display_link.get_param_name_by_url( action_param )
+ except ValueError, e:
+ log.debug( e )
+ return paste.httpexceptions.HTTPNotFound( str( e ) )
value = display_link.get_param_value( action_param )
assert value, "An invalid parameter name was provided: %s" % action_param
assert value.parameter.viewable, "This parameter is not viewable."
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/04acee047c2c/
changeset: 04acee047c2c
user: greg
date: 2011-11-15 17:51:05
summary: Use mercurial's purge extension to remove untracked repository files and empty directories, and fix a bug related to deleting repository files.
affected #: 2 files
diff -r bdf334e6017658b0864ae38552d103018d956caa -r 04acee047c2c585730e9a8266de81d25d5c25d7f lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -589,36 +589,36 @@
elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
# We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
- cmd = 'hg status'
- tmp_name = tempfile.NamedTemporaryFile().name
- tmp_stdout = open( tmp_name, 'wb' )
+ # We'll use mercurial's purge extension to do this. Using this extension requires the following entry
+ # in the repository's hgrc file which was not required for some time, so we'll add it if it's missing.
+ # [extensions]
+ # hgext.purge=
+ lines = repo.opener( 'hgrc', 'rb' ).readlines()
+ if not '[extensions]\n' in lines:
+ # No extensions have been added at all, so just append to the file.
+ fp = repo.opener( 'hgrc', 'a' )
+ fp.write( '[extensions]\n' )
+ fp.write( 'hgext.purge=\n' )
+ fp.close()
+ elif not 'hgext.purge=\n' in lines:
+ # The file includes and [extensions] section, but we need to add the
+ # purge extension.
+ fp = repo.opener( 'hgrc', 'wb' )
+ for line in lines:
+ if line.startswith( '[extensions]' ):
+ fp.write( line )
+ fp.write( 'hgext.purge=\n' )
+ else:
+ fp.write( line )
+ fp.close()
+ cmd = 'hg purge'
os.chdir( repo_dir )
- proc = subprocess.Popen( args=cmd, shell=True, stdout=tmp_stdout.fileno() )
- returncode = proc.wait()
+ proc = subprocess.Popen( args=cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
+ return_code = proc.wait()
os.chdir( current_working_dir )
- tmp_stdout.close()
- if returncode == 0:
- for i, line in enumerate( open( tmp_name ) ):
- if line.startswith( '?' ) or line.startswith( 'I' ):
- files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, line.split()[1] ) ) )
- elif line.startswith( 'M' ) or line.startswith( 'A' ) or line.startswith( 'R' ):
- files_to_commit.append( os.path.abspath( os.path.join( repo_dir, line.split()[1] ) ) )
- for full_path in files_to_remove_from_disk:
- # We'll remove all files that are not tracked or ignored.
- if os.path.isdir( full_path ):
- try:
- os.rmdir( full_path )
- except OSError, e:
- # The directory is not empty
- pass
- elif os.path.isfile( full_path ):
- os.remove( full_path )
- dir = os.path.split( full_path )[0]
- try:
- os.rmdir( dir )
- except OSError, e:
- # The directory is not empty
- pass
+ if return_code != 0:
+ output = proc.stdout.read( 32768 )
+ log.debug( 'hg purge failed in repository directory %s, reason: %s' % ( repo_dir, output ) )
if files_to_commit:
if not commit_message:
commit_message = 'Committed changes to: %s' % ', '.join( files_to_commit )
diff -r bdf334e6017658b0864ae38552d103018d956caa -r 04acee047c2c585730e9a8266de81d25d5c25d7f lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1042,13 +1042,11 @@
def __create_hgrc_file( self, repository ):
# At this point, an entry for the repository is required to be in the hgweb.config
# file so we can call repository.repo_path.
- # Create a .hg/hgrc file that looks something like this:
- # [web]
- # allow_push = test
- # name = convert_characters1
- # push_ssl = False
# Since we support both http and https, we set push_ssl to False to override
# the default (which is True) in the mercurial api.
+ # The hg purge extension purges all files and directories not being tracked by
+ # mercurial in the current repository. It'll remove unknown files and empty
+ # directories. This is used in the update_for_browsing() method.
repo = hg.repository( get_configured_ui(), path=repository.repo_path )
fp = repo.opener( 'hgrc', 'wb' )
fp.write( '[paths]\n' )
@@ -1058,6 +1056,8 @@
fp.write( 'allow_push = %s\n' % repository.user.username )
fp.write( 'name = %s\n' % repository.name )
fp.write( 'push_ssl = false\n' )
+ fp.write( '[extensions]\n' )
+ fp.write( 'hgext.purge=' )
fp.close()
@web.expose
def browse_repository( self, trans, id, **kwd ):
@@ -1150,7 +1150,7 @@
tip = repository.tip
for selected_file in selected_files_to_delete:
try:
- commands.remove( repo.ui, repo, repo_file, force=True )
+ commands.remove( repo.ui, repo, selected_file, force=True )
except Exception, e:
# I never have a problem with commands.remove on a Mac, but in the test/production
# tool shed environment, it throws an exception whenever I delete all files from a
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/bdf334e60176/
changeset: bdf334e60176
user: fubar
date: 2011-11-15 17:09:50
summary: Bump FastQC version in preparation for updating executable version
affected #: 1 file
diff -r 9ad75ecd32daa6d97f20bf6ff9db354e7052d5b7 -r bdf334e6017658b0864ae38552d103018d956caa tools/rgenetics/rgFastQC.xml
--- a/tools/rgenetics/rgFastQC.xml
+++ b/tools/rgenetics/rgFastQC.xml
@@ -1,4 +1,4 @@
-<tool name="Fastqc: Fastqc QC" id="fastqc" version="0.1">
+<tool name="Fastqc: Fastqc QC" id="fastqc" version="0.2"><description>using FastQC from Babraham</description><command interpreter="python">
rgFastQC.py -i $input_file -d $html_file.files_path -o $html_file -n "$out_prefix" -f $input_file.ext -e ${GALAXY_DATA_INDEX_DIR}/shared/jars/FastQC/fastqc
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/9ad75ecd32da/
changeset: 9ad75ecd32da
user: greg
date: 2011-11-14 22:59:26
summary: Revert file content checks when up0loading to a tool shed repository as they're currently too restrictive.
affected #: 1 file
diff -r f640c7bd6ffc4904996859e06157a5f67671f978 -r 9ad75ecd32daa6d97f20bf6ff9db354e7052d5b7 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -101,32 +101,34 @@
full_path = os.path.abspath( os.path.join( repo_dir, upload_point, uploaded_file_filename ) )
else:
full_path = os.path.abspath( os.path.join( repo_dir, uploaded_file_filename ) )
- ok, message = self.__check_file_content( uploaded_file_name )
- if ok:
- # Move the uploaded file to the load_point within the repository hierarchy.
- shutil.move( uploaded_file_name, full_path )
- commands.add( repo.ui, repo, full_path )
- try:
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
- except Exception, e:
- # I never have a problem with commands.commit on a Mac, but in the test/production
- # tool shed environment, it occasionally throws a "TypeError: array item must be char"
- # exception. If this happens, we'll try the following.
- repo.dirstate.write()
- repo.commit( user=trans.user.username, text=commit_message )
- if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
- # Handle the special case where a tool_data_table_conf.xml.sample
- # file is being uploaded by parsing the file and adding new entries
- # to the in-memory trans.app.tool_data_tables dictionary as well as
- # appending them to the shed's tool_data_table_conf.xml file on disk.
- error, error_message = handle_sample_tool_data_table_conf_file( trans, full_path )
- if error:
- message = '%s<br/>%s' % ( message, error_message )
- if full_path.endswith( '.loc.sample' ):
- # Handle the special case where a xxx.loc.sample file is
- # being uploaded by copying it to ~/tool-data/xxx.loc.
- copy_sample_loc_file( trans, full_path )
- handle_email_alerts( trans, repository )
+ # TODO: enhance this method to set a flag and alert an admin to review content since
+ # the hard checks are too restrictive.
+ #ok, message = self.__check_file_content( uploaded_file_name )
+ #if ok:
+ # Move the uploaded file to the load_point within the repository hierarchy.
+ shutil.move( uploaded_file_name, full_path )
+ commands.add( repo.ui, repo, full_path )
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
+ if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
+ # Handle the special case where a tool_data_table_conf.xml.sample
+ # file is being uploaded by parsing the file and adding new entries
+ # to the in-memory trans.app.tool_data_tables dictionary as well as
+ # appending them to the shed's tool_data_table_conf.xml file on disk.
+ error, error_message = handle_sample_tool_data_table_conf_file( trans, full_path )
+ if error:
+ message = '%s<br/>%s' % ( message, error_message )
+ if full_path.endswith( '.loc.sample' ):
+ # Handle the special case where a xxx.loc.sample file is
+ # being uploaded by copying it to ~/tool-data/xxx.loc.
+ copy_sample_loc_file( trans, full_path )
+ handle_email_alerts( trans, repository )
if ok:
# Update the repository files for browsing.
update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
@@ -192,6 +194,9 @@
tar.extractall( path=full_path )
tar.close()
uploaded_file.close()
+ """
+ # TODO: enhance this method to set a flag and alert an admin to review content since
+ # the hard checks are too restrictive.
for filename_in_archive in filenames_in_archive:
if os.path.isfile( filename_in_archive ):
ok, message = self.__check_file_content( filename_in_archive )
@@ -200,6 +205,7 @@
current_working_dir = os.getcwd()
update_for_browsing( trans, repository, current_working_dir )
return False, message, []
+ """
if remove_repo_files_not_in_tar and not repository.is_new:
# We have a repository that is not new (it contains files), so discover
# those files that are in the repository, but not in the uploaded archive.
@@ -343,13 +349,15 @@
return False, message
return True, ''
def __check_file_content( self, file_path ):
+ return True, ''
message = ''
ok = True
+ head, tail = os.path.split( file_path )
if check_html( file_path ):
- message = 'Files containing HTML content cannot be uploaded to a Galaxy tool shed.'
+ message = 'The file <b>%s</b> contains HTML content which cannot be uploaded to a Galaxy tool shed.' % str( tail )
ok = False
elif check_image( file_path ):
# For now we won't allow images to be uploaded.
- message = 'Files containing images cannot be uploaded to a Galaxy tool shed.'
+ message = 'The file <b>%s</b> contains image content that cannot be uploaded to a Galaxy tool shed.' % str( tail )
ok = False
return ok, message
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/f640c7bd6ffc/
changeset: f640c7bd6ffc
user: greg
date: 2011-11-14 22:19:56
summary: Add baseline support for handling data types included in tool shed repositories, add the ability to upload files to tool shed repositories by entering a URL in the upload form, provide much better file content checking when uploading either single files or archives to a tool shed repository (niether html content nor images are currently allowed), enhance error messaging when tool config parameter tags are not functionally correct.
affected #: 11 files
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/tools/parameters/basic.py
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -164,9 +164,14 @@
@classmethod
def build( cls, tool, param ):
"""Factory method to create parameter of correct type"""
+ param_name = param.get( "name" )
+ if not param_name:
+ raise ValueError( "Tool parameters require a 'name'" )
param_type = param.get("type")
- if not param_type or param_type not in parameter_types:
- raise ValueError( "Unknown tool parameter type '%s'" % param_type )
+ if not param_type:
+ raise ValueError( "Tool parameter '%s' requires a 'type'" % ( param_name ) )
+ elif param_type not in parameter_types:
+ raise ValueError( "Tool parameter '%s' uses an unknown type '%s'" % ( param_name, param_type ) )
else:
return parameter_types[param_type]( tool, param )
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/web/base/controller.py
--- a/lib/galaxy/web/base/controller.py
+++ b/lib/galaxy/web/base/controller.py
@@ -6,6 +6,7 @@
from time import strftime
from galaxy import config, tools, web, util
from galaxy.util.hash_util import *
+from galaxy.util.json import json_fix
from galaxy.web import error, form, url_for
from galaxy.model.orm import *
from galaxy.workflow.modules import *
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/web/controllers/admin.py
--- a/lib/galaxy/web/controllers/admin.py
+++ b/lib/galaxy/web/controllers/admin.py
@@ -4,12 +4,14 @@
from galaxy.web.framework.helpers import time_ago, iff, grids
from galaxy.tools.search import ToolBoxSearch
from galaxy.tools import ToolSection, json_fix
+from galaxy.util import inflector
import logging
log = logging.getLogger( __name__ )
from galaxy.actions.admin import AdminActions
from galaxy.web.params import QuotaParamParser
from galaxy.exceptions import *
+import galaxy.datatypes.registry
class UserListGrid( grids.Grid ):
class EmailColumn( grids.TextColumn ):
@@ -871,6 +873,10 @@
os.chdir( current_working_dir )
tmp_stderr.close()
if returncode == 0:
+ # Load data types required by tools.
+ # TODO: uncomment the following when we're ready...
+ #self.__load_datatypes( trans, repo_files_dir )
+ # Load tools and tool data files required by them.
sample_files, repository_tools_tups = self.__get_repository_tools_and_sample_files( trans, tool_path, repo_files_dir )
if repository_tools_tups:
# Handle missing data table entries for tool parameters that are dynamically generated select lists.
@@ -920,8 +926,9 @@
status = 'error'
if installed_repository_names:
installed_repository_names.sort()
- message += 'These %d repositories were installed and all tools were loaded into tool panel section <b>%s</b>:<br/>' % \
- ( len( installed_repository_names ), tool_section.name )
+ num_repositories_installed = len( installed_repository_names )
+ message += 'Installed %d %s and all tools were loaded into tool panel section <b>%s</b>:<br/>Installed repositories: ' % \
+ ( num_repositories_installed, inflector.cond_plural( num_repositories_installed, 'repository' ), tool_section.name )
for i, repo_name in enumerate( installed_repository_names ):
if i == len( installed_repository_names ) -1:
message += '%s.<br/>' % repo_name
@@ -1171,6 +1178,45 @@
error = tmp_stderr.read()
tmp_stderr.close()
log.debug( 'Problem installing dependencies for tool "%s"\n%s' % ( repository_tool.name, error ) )
+ def __load_datatypes( self, trans, repo_files_dir ):
+ # Find datatypes_conf.xml if it exists.
+ datatypes_config = None
+ for root, dirs, files in os.walk( repo_files_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == 'datatypes_conf.xml':
+ datatypes_config = os.path.abspath( os.path.join( root, name ) )
+ break
+ if datatypes_config:
+ # Parse datatypes_config.
+ tree = ElementTree.parse( datatypes_config )
+ root = tree.getroot()
+ ElementInclude.include( root )
+ datatype_files = root.find( 'datatype_files' )
+ for elem in datatype_files.findall( 'datatype_file' ):
+ datatype_file_name = elem.get( 'name', None )
+ if datatype_file_name:
+ # Find the file in the installed repository.
+ relative_path = None
+ for root, dirs, files in os.walk( repo_files_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == datatype_file_name:
+ relative_path = os.path.join( root, name )
+ break
+ relative_head, relative_tail = os.path.split( relative_path )
+ # TODO: get the import_module by parsing the <registration><datatype> tags
+ if datatype_file_name.find( '.' ) > 0:
+ import_module = datatype_file_name.split( '.' )[ 0 ]
+ else:
+ import_module = datatype_file_name
+ try:
+ sys.path.insert( 0, relative_head )
+ module = __import__( import_module )
+ sys.path.pop( 0 )
+ except Exception, e:
+ log.debug( "Execption importing datatypes code file included in installed repository: %s" % str( e ) )
+ trans.app.datatypes_registry = galaxy.datatypes.registry.Registry( trans.app.config.root, datatypes_config )
def __get_repository_tools_and_sample_files( self, trans, tool_path, repo_files_dir ):
# The sample_files list contains all files whose name ends in .sample
sample_files = []
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/webapps/community/controllers/common.py
--- a/lib/galaxy/webapps/community/controllers/common.py
+++ b/lib/galaxy/webapps/community/controllers/common.py
@@ -124,34 +124,6 @@
.filter( trans.model.RepositoryMetadata.table.c.repository_id == trans.security.decode_id( id ) ) \
.order_by( trans.model.RepositoryMetadata.table.c.id.desc() ) \
.first()
-def generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict ):
- """
- Update the received metadata_dict with changes that have been applied
- to the received exported_workflow_dict. Store everything in the database.
- """
- if 'workflows' in metadata_dict:
- metadata_dict[ 'workflows' ].append( exported_workflow_dict )
- else:
- metadata_dict[ 'workflows' ] = [ exported_workflow_dict ]
- return metadata_dict
-def new_workflow_metadata_required( trans, id, metadata_dict ):
- """
- Currently everything about an exported workflow except the name is hard-coded, so there's
- no real way to differentiate versions of exported workflows. If this changes at some future
- time, this method should be enhanced accordingly.
- """
- if 'workflows' in metadata_dict:
- repository_metadata = get_latest_repository_metadata( trans, id )
- if repository_metadata:
- if repository_metadata.metadata:
- # The repository has metadata, so update the workflows value - no new record is needed.
- return False
- else:
- # There is no saved repository metadata, so we need to create a new repository_metadata table record.
- return True
- # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table
- # record is not needed.
- return False
def generate_clone_url( trans, repository_id ):
repository = get_repository( trans, repository_id )
protocol, base = trans.request.base.split( '://' )
@@ -313,6 +285,62 @@
# The received metadata_dict includes no metadata for tools, so a new repository_metadata table
# record is not needed.
return False
+def generate_workflow_metadata( trans, id, changeset_revision, exported_workflow_dict, metadata_dict ):
+ """
+ Update the received metadata_dict with changes that have been applied
+ to the received exported_workflow_dict. Store everything in the database.
+ """
+ if 'workflows' in metadata_dict:
+ metadata_dict[ 'workflows' ].append( exported_workflow_dict )
+ else:
+ metadata_dict[ 'workflows' ] = [ exported_workflow_dict ]
+ return metadata_dict
+def new_workflow_metadata_required( trans, id, metadata_dict ):
+ """
+ Currently everything about an exported workflow except the name is hard-coded, so there's
+ no real way to differentiate versions of exported workflows. If this changes at some future
+ time, this method should be enhanced accordingly.
+ """
+ if 'workflows' in metadata_dict:
+ repository_metadata = get_latest_repository_metadata( trans, id )
+ if repository_metadata:
+ if repository_metadata.metadata:
+ # The repository has metadata, so update the workflows value - no new record is needed.
+ return False
+ else:
+ # There is no saved repository metadata, so we need to create a new repository_metadata table record.
+ return True
+ # The received metadata_dict includes no metadata for workflows, so a new repository_metadata table
+ # record is not needed.
+ return False
+def generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict ):
+ """
+ Update the received metadata_dict with changes that have been applied
+ to the received datatypes_config.
+ """
+ # Parse datatypes_config.
+ tree = ElementTree.parse( datatypes_config )
+ root = tree.getroot()
+ ElementInclude.include( root )
+ repository_datatype_code_files = []
+ datatype_files = root.find( 'datatype_files' )
+ if datatype_files:
+ for elem in datatype_files.findall( 'datatype_file' ):
+ name = elem.get( 'name', None )
+ repository_datatype_code_files.append( name )
+ metadata_dict[ 'datatype_files' ] = repository_datatype_code_files
+ datatypes = []
+ registration = root.find( 'registration' )
+ if registration:
+ for elem in registration.findall( 'datatype' ):
+ extension = elem.get( 'extension', None )
+ dtype = elem.get( 'type', None )
+ mimetype = elem.get( 'mimetype', None )
+ datatypes.append( dict( extension=extension,
+ dtype=dtype,
+ mimetype=mimetype ) )
+ metadata_dict[ 'datatypes' ] = datatypes
+ return metadata_dict
def set_repository_metadata( trans, id, changeset_revision, **kwd ):
"""Set repository metadata"""
message = ''
@@ -322,28 +350,34 @@
repo = hg.repository( get_configured_ui(), repo_dir )
invalid_files = []
sample_files = []
+ datatypes_config = None
ctx = get_changectx_for_changeset( trans, repo, changeset_revision )
if ctx is not None:
metadata_dict = {}
if changeset_revision == repository.tip:
- # Find all special .sample files first.
+ # Find datatypes_conf.xml if it exists.
+ for root, dirs, files in os.walk( repo_dir ):
+ if root.find( '.hg' ) < 0:
+ for name in files:
+ if name == 'datatypes_conf.xml':
+ datatypes_config = os.path.abspath( os.path.join( root, name ) )
+ break
+ if datatypes_config:
+ metadata_dict = generate_datatypes_metadata( trans, id, changeset_revision, datatypes_config, metadata_dict )
+ # Find all special .sample files.
for root, dirs, files in os.walk( repo_dir ):
if root.find( '.hg' ) < 0:
for name in files:
if name.endswith( '.sample' ):
sample_files.append( os.path.abspath( os.path.join( root, name ) ) )
+ # Find all tool configs and exported workflows.
for root, dirs, files in os.walk( repo_dir ):
if root.find( '.hg' ) < 0 and root.find( 'hgrc' ) < 0:
if '.hg' in dirs:
- # Don't visit .hg directories - should be impossible since we don't
- # allow uploaded archives that contain .hg dirs, but just in case...
dirs.remove( '.hg' )
- if 'hgrc' in files:
- # Don't include hgrc files in commit.
- files.remove( 'hgrc' )
for name in files:
# Find all tool configs.
- if name.endswith( '.xml' ):
+ if name != 'datatypes_conf.xml' and name.endswith( '.xml' ):
try:
full_path = os.path.abspath( os.path.join( root, name ) )
tool = load_tool( trans, full_path )
@@ -373,11 +407,13 @@
# Find all special .sample files first.
for filename in ctx:
if filename.endswith( '.sample' ):
- sample_files.append( os.path.abspath( os.path.join( root, filename ) ) )
+ sample_files.append( os.path.abspath( filename ) )
# Get all tool config file names from the hgweb url, something like:
# /repos/test/convert_chars1/file/e58dcf0026c7/convert_characters.xml
for filename in ctx:
- # Find all tool configs - should not have to update metadata for workflows for now.
+ # Find all tool configs - we do not have to update metadata for workflows or datatypes in anything
+ # but repository tips (handled above) since at the time this code was written, no workflows or
+ # dataytpes_conf.xml files exist in tool shed repositories, so they can only be added in future tips.
if filename.endswith( '.xml' ):
fctx = ctx[ filename ]
# Write the contents of the old tool config to a temporary file.
@@ -532,25 +568,41 @@
# The following will delete the disk copy of only the files in the repository.
#os.system( 'hg update -r null > /dev/null 2>&1' )
repo.ui.pushbuffer()
+ files_to_remove_from_disk = []
+ files_to_commit = []
commands.status( repo.ui, repo, all=True )
status_and_file_names = repo.ui.popbuffer().strip().split( "\n" )
- # status_and_file_names looks something like:
- # ['? README', '? tmap_tool/tmap-0.0.9.tar.gz', '? dna_filtering.py', 'C filtering.py', 'C filtering.xml']
- # The codes used to show the status of files are:
- # M = modified
- # A = added
- # R = removed
- # C = clean
- # ! = deleted, but still tracked
- # ? = not tracked
- # I = ignored
- files_to_remove_from_disk = []
- files_to_commit = []
- for status_and_file_name in status_and_file_names:
- if status_and_file_name.startswith( '?' ) or status_and_file_name.startswith( 'I' ):
- files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
- elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
- files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ if status_and_file_names and status_and_file_names[ 0 ] not in [ '' ]:
+ # status_and_file_names looks something like:
+ # ['? README', '? tmap_tool/tmap-0.0.9.tar.gz', '? dna_filtering.py', 'C filtering.py', 'C filtering.xml']
+ # The codes used to show the status of files are:
+ # M = modified
+ # A = added
+ # R = removed
+ # C = clean
+ # ! = deleted, but still tracked
+ # ? = not tracked
+ # I = ignored
+ for status_and_file_name in status_and_file_names:
+ if status_and_file_name.startswith( '?' ) or status_and_file_name.startswith( 'I' ):
+ files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ elif status_and_file_name.startswith( 'M' ) or status_and_file_name.startswith( 'A' ) or status_and_file_name.startswith( 'R' ):
+ files_to_commit.append( os.path.abspath( os.path.join( repo_dir, status_and_file_name.split()[1] ) ) )
+ # We may have files on disk in the repo directory that aren't being tracked, so they must be removed.
+ cmd = 'hg status'
+ tmp_name = tempfile.NamedTemporaryFile().name
+ tmp_stdout = open( tmp_name, 'wb' )
+ os.chdir( repo_dir )
+ proc = subprocess.Popen( args=cmd, shell=True, stdout=tmp_stdout.fileno() )
+ returncode = proc.wait()
+ os.chdir( current_working_dir )
+ tmp_stdout.close()
+ if returncode == 0:
+ for i, line in enumerate( open( tmp_name ) ):
+ if line.startswith( '?' ) or line.startswith( 'I' ):
+ files_to_remove_from_disk.append( os.path.abspath( os.path.join( repo_dir, line.split()[1] ) ) )
+ elif line.startswith( 'M' ) or line.startswith( 'A' ) or line.startswith( 'R' ):
+ files_to_commit.append( os.path.abspath( os.path.join( repo_dir, line.split()[1] ) ) )
for full_path in files_to_remove_from_disk:
# We'll remove all files that are not tracked or ignored.
if os.path.isdir( full_path ):
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/webapps/community/controllers/repository.py
--- a/lib/galaxy/webapps/community/controllers/repository.py
+++ b/lib/galaxy/webapps/community/controllers/repository.py
@@ -1007,17 +1007,19 @@
if repository_path.startswith( './' ):
repository_path = repository_path.replace( './', '', 1 )
entry = "repos/%s/%s = %s" % ( repository.user.username, repository.name, repository_path.lstrip( './' ) )
+ tmp_fd, tmp_fname = tempfile.mkstemp()
if os.path.exists( hgweb_config ):
# Make a backup of the hgweb.config file since we're going to be changing it.
self.__make_hgweb_config_copy( trans, hgweb_config )
- tmp_fname = tempfile.NamedTemporaryFile()
+ new_hgweb_config = open( tmp_fname, 'wb' )
for i, line in enumerate( open( hgweb_config ) ):
- tmp_fname.write( line )
+ new_hgweb_config.write( line )
else:
- tmp_fname.write( '[paths]\n' )
- tmp_fname.write( "%s\n" % entry )
- tmp_fname.flush()
- shutil.move( tmp_fname.name, os.path.abspath( hgweb_config ) )
+ new_hgweb_config = open( tmp_fname, 'wb' )
+ new_hgweb_config.write( '[paths]\n' )
+ new_hgweb_config.write( "%s\n" % entry )
+ new_hgweb_config.flush()
+ shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
def __change_hgweb_config_entry( self, trans, repository, old_repository_name, new_repository_name ):
# Change an entry in the hgweb.config file for a repository. This only happens when
# the owner changes the name of the repository. An entry looks something like:
@@ -1028,14 +1030,15 @@
repo_dir = repository.repo_path
old_lhs = "repos/%s/%s" % ( repository.user.username, old_repository_name )
new_entry = "repos/%s/%s = %s\n" % ( repository.user.username, new_repository_name, repo_dir )
- tmp_fname = tempfile.NamedTemporaryFile()
+ tmp_fd, tmp_fname = tempfile.mkstemp()
+ new_hgweb_config = open( tmp_fname, 'wb' )
for i, line in enumerate( open( hgweb_config ) ):
if line.startswith( old_lhs ):
- tmp_fname.write( new_entry )
+ new_hgweb_config.write( new_entry )
else:
- tmp_fname.write( line )
- tmp_fname.flush()
- shutil.move( tmp_fname.name, os.path.abspath( hgweb_config ) )
+ new_hgweb_config.write( line )
+ new_hgweb_config.flush()
+ shutil.move( tmp_fname, os.path.abspath( hgweb_config ) )
def __create_hgrc_file( self, repository ):
# At this point, an entry for the repository is required to be in the hgweb.config
# file so we can call repository.repo_path.
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 lib/galaxy/webapps/community/controllers/upload.py
--- a/lib/galaxy/webapps/community/controllers/upload.py
+++ b/lib/galaxy/webapps/community/controllers/upload.py
@@ -1,4 +1,4 @@
-import sys, os, shutil, logging, tarfile, tempfile
+import sys, os, shutil, logging, tarfile, tempfile, urllib
from galaxy.web.base.controller import *
from galaxy.model.orm import *
from galaxy.datatypes.checkers import *
@@ -11,9 +11,6 @@
SUCCESS, INFO, WARNING, ERROR = "done", "info", "warning", "error"
CHUNK_SIZE = 2**20 # 1Mb
-class UploadError( Exception ):
- pass
-
class UploadController( BaseUIController ):
@web.expose
@web.require_login( 'upload', use_panels=True, webapp='community' )
@@ -32,20 +29,40 @@
remove_repo_files_not_in_tar = util.string_as_bool( params.get( 'remove_repo_files_not_in_tar', 'true' ) )
uploaded_file = None
upload_point = self.__get_upload_point( repository, **kwd )
- # Get the current repository tip.
tip = repository.tip
+ file_data = params.get( 'file_data', '' )
+ url = params.get( 'url', '' )
if params.get( 'upload_button', False ):
current_working_dir = os.getcwd()
- file_data = params.get( 'file_data', '' )
- if file_data == '':
+ if file_data == '' and url == '':
message = 'No files were entered on the upload form.'
status = 'error'
uploaded_file = None
+ elif url:
+ valid_url = True
+ try:
+ stream = urllib.urlopen( url )
+ except Exception, e:
+ valid_url = False
+ message = 'Error uploading file via http: %s' % str( e )
+ status = 'error'
+ uploaded_file = None
+ if valid_url:
+ fd, uploaded_file_name = tempfile.mkstemp()
+ uploaded_file = open( uploaded_file_name, 'wb' )
+ while 1:
+ chunk = stream.read( CHUNK_SIZE )
+ if not chunk:
+ break
+ uploaded_file.write( chunk )
+ uploaded_file.flush()
+ uploaded_file_filename = url.split( '/' )[ -1 ]
+ isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
elif file_data not in ( '', None ):
uploaded_file = file_data.file
uploaded_file_name = uploaded_file.name
uploaded_file_filename = file_data.filename
- isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
+ isempty = os.path.getsize( os.path.abspath( uploaded_file_name ) ) == 0
if uploaded_file:
isgzip = False
isbz2 = False
@@ -84,30 +101,32 @@
full_path = os.path.abspath( os.path.join( repo_dir, upload_point, uploaded_file_filename ) )
else:
full_path = os.path.abspath( os.path.join( repo_dir, uploaded_file_filename ) )
- # Move the uploaded file to the load_point within the repository hierarchy.
- shutil.move( uploaded_file_name, full_path )
- commands.add( repo.ui, repo, full_path )
- try:
- commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
- except Exception, e:
- # I never have a problem with commands.commit on a Mac, but in the test/production
- # tool shed environment, it occasionally throws a "TypeError: array item must be char"
- # exception. If this happens, we'll try the following.
- repo.dirstate.write()
- repo.commit( user=trans.user.username, text=commit_message )
- if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
- # Handle the special case where a tool_data_table_conf.xml.sample
- # file is being uploaded by parsing the file and adding new entries
- # to the in-memory trans.app.tool_data_tables dictionary as well as
- # appending them to the shed's tool_data_table_conf.xml file on disk.
- error, error_message = handle_sample_tool_data_table_conf_file( trans, full_path )
- if error:
- message = '%s<br/>%s' % ( message, error_message )
- if full_path.endswith( '.loc.sample' ):
- # Handle the special case where a xxx.loc.sample file is
- # being uploaded by copying it to ~/tool-data/xxx.loc.
- copy_sample_loc_file( trans, full_path )
- handle_email_alerts( trans, repository )
+ ok, message = self.__check_file_content( uploaded_file_name )
+ if ok:
+ # Move the uploaded file to the load_point within the repository hierarchy.
+ shutil.move( uploaded_file_name, full_path )
+ commands.add( repo.ui, repo, full_path )
+ try:
+ commands.commit( repo.ui, repo, full_path, user=trans.user.username, message=commit_message )
+ except Exception, e:
+ # I never have a problem with commands.commit on a Mac, but in the test/production
+ # tool shed environment, it occasionally throws a "TypeError: array item must be char"
+ # exception. If this happens, we'll try the following.
+ repo.dirstate.write()
+ repo.commit( user=trans.user.username, text=commit_message )
+ if full_path.endswith( 'tool_data_table_conf.xml.sample' ):
+ # Handle the special case where a tool_data_table_conf.xml.sample
+ # file is being uploaded by parsing the file and adding new entries
+ # to the in-memory trans.app.tool_data_tables dictionary as well as
+ # appending them to the shed's tool_data_table_conf.xml file on disk.
+ error, error_message = handle_sample_tool_data_table_conf_file( trans, full_path )
+ if error:
+ message = '%s<br/>%s' % ( message, error_message )
+ if full_path.endswith( '.loc.sample' ):
+ # Handle the special case where a xxx.loc.sample file is
+ # being uploaded by copying it to ~/tool-data/xxx.loc.
+ copy_sample_loc_file( trans, full_path )
+ handle_email_alerts( trans, repository )
if ok:
# Update the repository files for browsing.
update_for_browsing( trans, repository, current_working_dir, commit_message=commit_message )
@@ -146,6 +165,7 @@
selected_categories = [ trans.security.decode_id( id ) for id in category_ids ]
return trans.fill_template( '/webapps/community/repository/upload.mako',
repository=repository,
+ url=url,
commit_message=commit_message,
uncompress_file=uncompress_file,
remove_repo_files_not_in_tar=remove_repo_files_not_in_tar,
@@ -172,6 +192,14 @@
tar.extractall( path=full_path )
tar.close()
uploaded_file.close()
+ for filename_in_archive in filenames_in_archive:
+ if os.path.isfile( filename_in_archive ):
+ ok, message = self.__check_file_content( filename_in_archive )
+ if not ok:
+ # Refresh the repository files for browsing.
+ current_working_dir = os.getcwd()
+ update_for_browsing( trans, repository, current_working_dir )
+ return False, message, []
if remove_repo_files_not_in_tar and not repository.is_new:
# We have a repository that is not new (it contains files), so discover
# those files that are in the repository, but not in the uploaded archive.
@@ -314,4 +342,14 @@
message = "Uploaded archives cannot contain hgrc files."
return False, message
return True, ''
-
+ def __check_file_content( self, file_path ):
+ message = ''
+ ok = True
+ if check_html( file_path ):
+ message = 'Files containing HTML content cannot be uploaded to a Galaxy tool shed.'
+ ok = False
+ elif check_image( file_path ):
+ # For now we won't allow images to be uploaded.
+ message = 'Files containing images cannot be uploaded to a Galaxy tool shed.'
+ ok = False
+ return ok, message
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 templates/webapps/community/repository/common.mako
--- a/templates/webapps/community/repository/common.mako
+++ b/templates/webapps/community/repository/common.mako
@@ -83,7 +83,7 @@
hg clone <a href="${clone_str}">${clone_str}</a></%def>
-<%def name="render_repository_tools_and_workflows( repository_metadata_id, metadata, can_set_metadata=False, webapp='community' )">
+<%def name="render_repository_items( repository_metadata_id, metadata, can_set_metadata=False, webapp='community' )"><% from galaxy.webapps.community.controllers.common import encode, decode %>
%if metadata or can_set_metadata:
<p/>
@@ -195,6 +195,45 @@
</div><div style="clear: both"></div>
%endif
+ %if 'datatypes' in metadata:
+ <div class="form-row">
+ <table width="100%">
+ <tr bgcolor="#D8D8D8" width="100%">
+ <td><b>Data types</b></td>
+ </tr>
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ <div class="form-row">
+ <% datatypes_dicts = metadata[ 'datatypes' ] %>
+ <table class="grid">
+ <tr>
+ <td><b>extension</b></td>
+ <td><b>dtype</b></td>
+ <td><b>mimetype</b></td>
+ </tr>
+ %for datatypes_dict in datatypes_dicts:
+ <%
+ extension = datatypes_dict[ 'extension' ]
+ dtype = datatypes_dict[ 'dtype' ]
+ mimetype = datatypes_dict[ 'mimetype' ]
+ %>
+ <tr>
+ <td>${extension}</td>
+ <td>${dtype}</td>
+ <td>
+ %if mimetype:
+ ${mimetype}
+ %else:
+
+ %endif
+ </td>
+ </tr>
+ %endfor
+ </table>
+ </div>
+ <div style="clear: both"></div>
+ %endif
%endif
%if can_set_metadata:
<form name="set_metadata" action="${h.url_for( controller='repository', action='set_metadata', id=trans.security.encode_id( repository.id ), ctx_str=changeset_revision )}" method="post">
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 templates/webapps/community/repository/manage_repository.mako
--- a/templates/webapps/community/repository/manage_repository.mako
+++ b/templates/webapps/community/repository/manage_repository.mako
@@ -184,7 +184,7 @@
</form></div></div>
-${render_repository_tools_and_workflows( repository_metadata_id, metadata, can_set_metadata=True )}
+${render_repository_items( repository_metadata_id, metadata, can_set_metadata=True )}
<p/><div class="toolForm"><div class="toolFormTitle">Manage categories</div>
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 templates/webapps/community/repository/preview_tools_in_changeset.mako
--- a/templates/webapps/community/repository/preview_tools_in_changeset.mako
+++ b/templates/webapps/community/repository/preview_tools_in_changeset.mako
@@ -104,4 +104,4 @@
</div></div><p/>
-${render_repository_tools_and_workflows( repository_metadata_id, metadata, webapp=webapp )}
+${render_repository_items( repository_metadata_id, metadata, webapp=webapp )}
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 templates/webapps/community/repository/upload.mako
--- a/templates/webapps/community/repository/upload.mako
+++ b/templates/webapps/community/repository/upload.mako
@@ -73,7 +73,16 @@
</div><div style="clear: both"></div></div>
-
+ <div class="form-row">
+ <label>Url:</label>
+ <div class="form-row-input">
+ <input name="url" type="textfield" value="${url}" size="40"/>
+ </div>
+ <div class="toolParamHelp" style="clear: both;">
+ Enter a URL to upload your files via http.
+ </div>
+ <div style="clear: both"></div>
+ </div><div class="form-row"><%
if uncompress_file:
diff -r 128f167ce12bd5723c5a8124d1d7ea692daf240f -r f640c7bd6ffc4904996859e06157a5f67671f978 templates/webapps/community/repository/view_repository.mako
--- a/templates/webapps/community/repository/view_repository.mako
+++ b/templates/webapps/community/repository/view_repository.mako
@@ -186,7 +186,7 @@
%endif
</div></div>
-${render_repository_tools_and_workflows( repository_metadata_id, metadata, webapp=webapp )}
+${render_repository_items( repository_metadata_id, metadata, webapp=webapp )}
%if repository.categories:
<p/><div class="toolForm">
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/changeset/128f167ce12b/
changeset: 128f167ce12b
user: dan
date: 2011-11-14 19:26:29
summary: Tool help updates.
affected #: 4 files
diff -r 4a47e724738420ce883ffb27b31e280c93ceefcd -r 128f167ce12bd5723c5a8124d1d7ea692daf240f tools/filters/secure_hash_message_digest.xml
--- a/tools/filters/secure_hash_message_digest.xml
+++ b/tools/filters/secure_hash_message_digest.xml
@@ -35,5 +35,11 @@
This tool outputs Secure Hashes / Message Digests of a dataset using the user selected algorithms.
+------
+
+**Citation**
+
+If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
+
</help></tool>
diff -r 4a47e724738420ce883ffb27b31e280c93ceefcd -r 128f167ce12bd5723c5a8124d1d7ea692daf240f tools/filters/wc_gnu.xml
--- a/tools/filters/wc_gnu.xml
+++ b/tools/filters/wc_gnu.xml
@@ -62,5 +62,11 @@
#lines words characters
7499 41376 624971
+------
+
+**Citation**
+
+If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
+
</help></tool>
diff -r 4a47e724738420ce883ffb27b31e280c93ceefcd -r 128f167ce12bd5723c5a8124d1d7ea692daf240f tools/peak_calling/ccat_wrapper.xml
--- a/tools/peak_calling/ccat_wrapper.xml
+++ b/tools/peak_calling/ccat_wrapper.xml
@@ -112,7 +112,7 @@
<output name="output_top_file" file="peakcalling_ccat/3.0/ccat_test_top_out_1.interval.sorted.re_match" compare="re_match" sort="true" /><output name="output_log_file" file="peakcalling_ccat/3.0/ccat_test_log_out_1.txt" /></test>
- <!-- Test below gives different results on different architectures,
+ <!-- Test below gives different answers on different architectures,
e.g.: x86_64 GNU/Linux gave an extra line (additional peak called) when compared to the version running on 10.6.0 Darwin i386
slidingWinSize was fixed to be 1000, default as per readme.txt
-->
@@ -140,6 +140,8 @@
**Citation**
+For the underlying tool, please cite `Xu H, Handoko L, Wei X, Ye C, Sheng J, Wei CL, Lin F, Sung WK. A signal-noise model for significance analysis of ChIP-seq with negative control. Bioinformatics. 2010 May 1;26(9):1199-204. <http://www.ncbi.nlm.nih.gov/pubmed/20371496>`_
+
If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
</help>
diff -r 4a47e724738420ce883ffb27b31e280c93ceefcd -r 128f167ce12bd5723c5a8124d1d7ea692daf240f tools/peak_calling/macs_wrapper.xml
--- a/tools/peak_calling/macs_wrapper.xml
+++ b/tools/peak_calling/macs_wrapper.xml
@@ -231,6 +231,8 @@
**Citation**
+For the underlying tool, please cite `Zhang Y, Liu T, Meyer CA, Eeckhoute J, Johnson DS, Bernstein BE, Nusbaum C, Myers RM, Brown M, Li W, Liu XS. Model-based analysis of ChIP-Seq (MACS). Genome Biol. 2008;9(9):R137. <http://www.ncbi.nlm.nih.gov/pubmed/18798982>`_
+
If you use this tool in Galaxy, please cite Blankenberg D, et al. *In preparation.*
</help>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.