1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/4e6216506b6f/
Changeset: 4e6216506b6f
User: dan
Date: 2015-01-27 19:52:55+00:00
Summary: Fix for re-displaying user registration form when registering as an email of an admin_user that is already declared in galaxy.ini (cntrller r dmb). NB: Automatic refresh of masthead after success did not seem to be occuring.
Affected #: 1 file
diff -r df3b9b974104f827e8e50e7996a396ffdf2bc5bd -r 4e6216506b6f480742d94b0ec0b6206c0e8ebc3d templates/user/register.mako
--- a/templates/user/register.mako
+++ b/templates/user/register.mako
@@ -17,7 +17,7 @@
## An admin user may be creating a new user account, in which case we want to display the registration form.
## But if the current user is not an admin user, then don't display the registration form.
-%if trans.user_is_admin() or not trans.user:
+%if ( cntrller=='admin' and trans.user_is_admin() ) or not trans.user:
${render_registration_form()}
%if trans.app.config.get( 'terms_url', None ) is not None:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/df3b9b974104/
Changeset: df3b9b974104
User: jmchilton
Date: 2015-01-27 18:48:01+00:00
Summary: Add comments to config/galaxy.ini.sample about recent workflow changes.
Thanks for Philip Mabon for letting me know there were not included along with the original changes.
Affected #: 1 file
diff -r 5f59c9e4d36d796b40a4817c38b66f5def94dc2d -r df3b9b974104f827e8e50e7996a396ffdf2bc5bd config/galaxy.ini.sample
--- a/config/galaxy.ini.sample
+++ b/config/galaxy.ini.sample
@@ -764,6 +764,25 @@
# Enable Galaxy to communicate directly with a sequencer
#enable_sequencer_communication = False
+
+# Enable beta workflow modules that should not yet be considered part of Galaxy's
+# stable API.
+#enable_beta_workflow_modules = False
+
+# Force usage of Galaxy's beta workflow scheduler under certain circumstances -
+# this workflow scheduling forces Galaxy to schedule workflows in the background
+# so initial submission of the workflows is signficantly sped up. This does
+# however force the user to refresh their history manually to see newly scheduled
+# steps (for "normal" workflows - steps are still scheduled far in advance of
+# them being queued and scheduling here doesn't refer to actual cluster job
+# scheduling).
+# Workflows containing more than the specified number of steps will always use
+# the Galaxy's beta workflow scheduling.
+#force_beta_workflow_scheduled_min_steps=250
+# Switch to using Galaxy's beta workflow scheduling for all workflows involving
+# ccollections.
+#force_beta_workflow_scheduled_for_collections=False
+
# Enable authentication via OpenID. Allows users to log in to their Galaxy
# account by authenticating with an OpenID provider.
#enable_openid = False
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
2 new commits in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/a439922a4956/
Changeset: a439922a4956
User: dannon
Date: 2015-01-26 21:25:19+00:00
Summary: Allow cancelling of upload jobs. This won't cancel the actual upload, but it'll kill (or prevent) the processing job if it exists. This also allows one to clean up 'zombie' upload jobs that never leave 'upload' state via the admin panel.
Affected #: 1 file
diff -r 0733c87123cbc2f7009a125c5e57d630bb8296a6 -r a439922a4956cdef237925540f4a70922ae8a74c templates/admin/jobs.mako
--- a/templates/admin/jobs.mako
+++ b/templates/admin/jobs.mako
@@ -55,11 +55,7 @@
</tr>
%for job in jobs:
<td>
- %if job.state == 'upload':
-
- %else:
- <input type="checkbox" name="stop" value="${job.id}"/>
- %endif
+ <input type="checkbox" name="stop" value="${job.id}"/></td><td>${job.id}</td>
%if job.history and job.history.user:
https://bitbucket.org/galaxy/galaxy-central/commits/1096077cabbd/
Changeset: 1096077cabbd
User: dannon
Date: 2015-01-26 21:25:29+00:00
Summary: Merge.
Affected #: 2 files
diff -r a439922a4956cdef237925540f4a70922ae8a74c -r 1096077cabbd4046b7da9ce45cecf1151b7ebc75 config/tool_data_table_conf.xml.sample
--- a/config/tool_data_table_conf.xml.sample
+++ b/config/tool_data_table_conf.xml.sample
@@ -60,4 +60,9 @@
<columns>value, dbkey, name, path</columns><file path="tool-data/mosaik_index.loc" /></table>
+ <!-- Locations of indexes in the 2bit format -->
+ <table name="twobit" comment_char="#">
+ <columns>value, path</columns>
+ <file path="tool-data/twobit.loc" />
+ </table></tables>
diff -r a439922a4956cdef237925540f4a70922ae8a74c -r 1096077cabbd4046b7da9ce45cecf1151b7ebc75 lib/galaxy/visualization/genomes.py
--- a/lib/galaxy/visualization/genomes.py
+++ b/lib/galaxy/visualization/genomes.py
@@ -179,33 +179,59 @@
"""
def __init__( self, app ):
+ self.app = app
# Create list of genomes from app.genome_builds
self.genomes = {}
- for key, description in app.genome_builds.get_genome_build_names():
+ # Store internal versions of data tables for twobit and __dbkey__
+ self._table_versions = { 'twobit': None, '__dbkeys__': None }
+ self.reload_genomes()
+
+ def reload_genomes( self ):
+ self.genomes = {}
+ # Store table versions for later
+ for table_name in self._table_versions.keys():
+ table = self.app.tool_data_tables.get( table_name, None )
+ if table is not None:
+ self._table_versions[ table_name ] = table._loaded_content_version
+
+ twobit_table = self.app.tool_data_tables.get( 'twobit', None )
+ twobit_fields = {}
+ if twobit_table is None:
+ # Add genome data (twobit files) to genomes, directly from twobit.loc
+ try:
+ for line in open( os.path.join( self.app.config.tool_data_path, "twobit.loc" ) ):
+ if line.startswith("#"): continue
+ val = line.split()
+ if len( val ) == 2:
+ key, path = val
+ twobit_fields[ key ] = path
+ except IOError, e:
+ # Thrown if twobit.loc does not exist.
+ log.exception( "Error reading twobit.loc: %s", e )
+ for key, description in self.app.genome_builds.get_genome_build_names():
self.genomes[ key ] = Genome( key, description )
+ # Add len files to genomes.
+ self.genomes[ key ].len_file = self.app.genome_builds.get_chrom_info( key )[0]
+ if self.genomes[ key ].len_file:
+ if not os.path.exists( self.genomes[ key ].len_file ):
+ self.genomes[ key ].len_file = None
+ # Add genome data (twobit files) to genomes.
+ if twobit_table is not None:
+ self.genomes[ key ].twobit_file = twobit_table.get_entry( 'value', key, 'path', default=None )
+ elif key in twobit_fields:
+ self.genomes[ key ].twobit_file = twobit_fields[ key ]
+
- # Add len files to genomes.
- len_files = glob.glob( os.path.join( app.config.len_file_path, "*.len" ) )
- for f in len_files:
- key = os.path.split( f )[1].split( ".len" )[0]
- if key in self.genomes:
- self.genomes[ key ].len_file = f
-
- # Add genome data (twobit files) to genomes.
- try:
- for line in open( os.path.join( app.config.tool_data_path, "twobit.loc" ) ):
- if line.startswith("#"): continue
- val = line.split()
- if len( val ) == 2:
- key, path = val
- if key in self.genomes:
- self.genomes[ key ].twobit_file = path
- except IOError, e:
- # Thrown if twobit.loc does not exist.
- log.exception( str( e ) )
+ def check_and_reload( self ):
+ # Check if tables have been modified, if so reload
+ for table_name, table_version in self._table_versions.iteritems():
+ table = self.app.tool_data_tables.get( table_name, None )
+ if table is not None and not table.is_current_version( table_version ):
+ return self.reload_genomes()
def get_build( self, dbkey ):
""" Returns build for the given key. """
+ self.check_and_reload()
rval = None
if dbkey in self.genomes:
rval = self.genomes[ dbkey ]
@@ -214,6 +240,7 @@
def get_dbkeys( self, trans, chrom_info=False, **kwd ):
""" Returns all known dbkeys. If chrom_info is True, only dbkeys with
chromosome lengths are returned. """
+ self.check_and_reload()
dbkeys = []
# Add user's custom keys to dbkeys.
@@ -241,7 +268,7 @@
Returns a naturally sorted list of chroms/contigs for a given dbkey.
Use either chrom or low to specify the starting chrom in the return list.
"""
-
+ self.check_and_reload()
# If there is no dbkey owner, default to current user.
dbkey_owner, dbkey = decode_dbkey( dbkey )
if dbkey_owner:
@@ -303,6 +330,7 @@
Returns true if there is reference data for the specified dbkey. If dbkey is custom,
dbkey_owner is needed to determine if there is reference data.
"""
+ self.check_and_reload()
# Look for key in built-in builds.
if dbkey in self.genomes and self.genomes[ dbkey ].twobit_file:
# There is built-in reference data.
@@ -323,7 +351,7 @@
"""
Return reference data for a build.
"""
-
+ self.check_and_reload()
# If there is no dbkey owner, default to current user.
dbkey_owner, dbkey = decode_dbkey( dbkey )
if dbkey_owner:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/1ab223c3860e/
Changeset: 1ab223c3860e
User: dan
Date: 2015-01-26 20:11:06+00:00
Summary: Load genomes list from data tables for visualizations. Based off of Pull Request: https://bitbucket.org/galaxy/galaxy-central/pull-request/601/load-genomes-l…
Affected #: 2 files
diff -r 0733c87123cbc2f7009a125c5e57d630bb8296a6 -r 1ab223c3860e533785527074f36e2d1e16a596f0 config/tool_data_table_conf.xml.sample
--- a/config/tool_data_table_conf.xml.sample
+++ b/config/tool_data_table_conf.xml.sample
@@ -60,4 +60,9 @@
<columns>value, dbkey, name, path</columns><file path="tool-data/mosaik_index.loc" /></table>
+ <!-- Locations of indexes in the 2bit format -->
+ <table name="twobit" comment_char="#">
+ <columns>value, path</columns>
+ <file path="tool-data/twobit.loc" />
+ </table></tables>
diff -r 0733c87123cbc2f7009a125c5e57d630bb8296a6 -r 1ab223c3860e533785527074f36e2d1e16a596f0 lib/galaxy/visualization/genomes.py
--- a/lib/galaxy/visualization/genomes.py
+++ b/lib/galaxy/visualization/genomes.py
@@ -179,33 +179,59 @@
"""
def __init__( self, app ):
+ self.app = app
# Create list of genomes from app.genome_builds
self.genomes = {}
- for key, description in app.genome_builds.get_genome_build_names():
+ # Store internal versions of data tables for twobit and __dbkey__
+ self._table_versions = { 'twobit': None, '__dbkeys__': None }
+ self.reload_genomes()
+
+ def reload_genomes( self ):
+ self.genomes = {}
+ # Store table versions for later
+ for table_name in self._table_versions.keys():
+ table = self.app.tool_data_tables.get( table_name, None )
+ if table is not None:
+ self._table_versions[ table_name ] = table._loaded_content_version
+
+ twobit_table = self.app.tool_data_tables.get( 'twobit', None )
+ twobit_fields = {}
+ if twobit_table is None:
+ # Add genome data (twobit files) to genomes, directly from twobit.loc
+ try:
+ for line in open( os.path.join( self.app.config.tool_data_path, "twobit.loc" ) ):
+ if line.startswith("#"): continue
+ val = line.split()
+ if len( val ) == 2:
+ key, path = val
+ twobit_fields[ key ] = path
+ except IOError, e:
+ # Thrown if twobit.loc does not exist.
+ log.exception( "Error reading twobit.loc: %s", e )
+ for key, description in self.app.genome_builds.get_genome_build_names():
self.genomes[ key ] = Genome( key, description )
+ # Add len files to genomes.
+ self.genomes[ key ].len_file = self.app.genome_builds.get_chrom_info( key )[0]
+ if self.genomes[ key ].len_file:
+ if not os.path.exists( self.genomes[ key ].len_file ):
+ self.genomes[ key ].len_file = None
+ # Add genome data (twobit files) to genomes.
+ if twobit_table is not None:
+ self.genomes[ key ].twobit_file = twobit_table.get_entry( 'value', key, 'path', default=None )
+ elif key in twobit_fields:
+ self.genomes[ key ].twobit_file = twobit_fields[ key ]
+
- # Add len files to genomes.
- len_files = glob.glob( os.path.join( app.config.len_file_path, "*.len" ) )
- for f in len_files:
- key = os.path.split( f )[1].split( ".len" )[0]
- if key in self.genomes:
- self.genomes[ key ].len_file = f
-
- # Add genome data (twobit files) to genomes.
- try:
- for line in open( os.path.join( app.config.tool_data_path, "twobit.loc" ) ):
- if line.startswith("#"): continue
- val = line.split()
- if len( val ) == 2:
- key, path = val
- if key in self.genomes:
- self.genomes[ key ].twobit_file = path
- except IOError, e:
- # Thrown if twobit.loc does not exist.
- log.exception( str( e ) )
+ def check_and_reload( self ):
+ # Check if tables have been modified, if so reload
+ for table_name, table_version in self._table_versions.iteritems():
+ table = self.app.tool_data_tables.get( table_name, None )
+ if table is not None and not table.is_current_version( table_version ):
+ return self.reload_genomes()
def get_build( self, dbkey ):
""" Returns build for the given key. """
+ self.check_and_reload()
rval = None
if dbkey in self.genomes:
rval = self.genomes[ dbkey ]
@@ -214,6 +240,7 @@
def get_dbkeys( self, trans, chrom_info=False, **kwd ):
""" Returns all known dbkeys. If chrom_info is True, only dbkeys with
chromosome lengths are returned. """
+ self.check_and_reload()
dbkeys = []
# Add user's custom keys to dbkeys.
@@ -241,7 +268,7 @@
Returns a naturally sorted list of chroms/contigs for a given dbkey.
Use either chrom or low to specify the starting chrom in the return list.
"""
-
+ self.check_and_reload()
# If there is no dbkey owner, default to current user.
dbkey_owner, dbkey = decode_dbkey( dbkey )
if dbkey_owner:
@@ -303,6 +330,7 @@
Returns true if there is reference data for the specified dbkey. If dbkey is custom,
dbkey_owner is needed to determine if there is reference data.
"""
+ self.check_and_reload()
# Look for key in built-in builds.
if dbkey in self.genomes and self.genomes[ dbkey ].twobit_file:
# There is built-in reference data.
@@ -323,7 +351,7 @@
"""
Return reference data for a build.
"""
-
+ self.check_and_reload()
# If there is no dbkey owner, default to current user.
dbkey_owner, dbkey = decode_dbkey( dbkey )
if dbkey_owner:
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/418633a36608/
Changeset: 418633a36608
User: dannon
Date: 2015-01-26 15:43:02+00:00
Summary: One more path permission increase for admins under remote_user.
Affected #: 1 file
diff -r 39f3f0586c48f81452e3a7a6af1f5e97c4ef8ed0 -r 418633a366083e171f2a3583b591d81a7b333c41 lib/galaxy/web/framework/middleware/remoteuser.py
--- a/lib/galaxy/web/framework/middleware/remoteuser.py
+++ b/lib/galaxy/web/framework/middleware/remoteuser.py
@@ -116,6 +116,8 @@
pass # Admin users may be impersonating, allow logout.
elif path_info.startswith( '/user/manage_user_info' ) and environ[ self.remote_user_header ] in self.admin_users:
pass # Admin users need to be able to change user information
+ elif path_info.startswith( '/user/edit_info' ) and environ[ self.remote_user_header ] in self.admin_users:
+ pass # Admin users need to be able to change user information
elif path_info.startswith( '/user/api_keys' ):
pass # api keys can be managed when remote_user is in use
elif path_info.startswith( '/user/edit_username' ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.