galaxy-commits
Threads by month
- ----- 2024 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2023 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2022 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2021 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2020 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2019 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2018 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2017 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2016 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2015 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2014 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2013 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2012 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2011 -----
- December
- November
- October
- September
- August
- July
- June
- May
- April
- March
- February
- January
- ----- 2010 -----
- December
- November
- October
- September
- August
- July
- June
- May
October 2013
- 1 participants
- 226 discussions
commit/galaxy-central: jmchilton: Refactor job status checking logic into its own module.
by commits-noreply@bitbucket.org 10 Oct '13
by commits-noreply@bitbucket.org 10 Oct '13
10 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/c78f3f687fbc/
Changeset: c78f3f687fbc
User: jmchilton
Date: 2013-10-10 07:17:17
Summary: Refactor job status checking logic into its own module.
Add unit tests for some basic behaviors.
Affected #: 5 files
diff -r ba45d14a2c8fe72554390872deb5f4ffdd66170f -r c78f3f687fbc8592d32a67738e57d847c00ce0c6 lib/galaxy/jobs/__init__.py
--- a/lib/galaxy/jobs/__init__.py
+++ b/lib/galaxy/jobs/__init__.py
@@ -25,6 +25,7 @@
from galaxy.util.bunch import Bunch
from galaxy.util.expressions import ExpressionContext
from galaxy.util.json import from_json_string
+from .output_checker import check_output
log = logging.getLogger( __name__ )
@@ -1079,158 +1080,7 @@
self.cleanup()
def check_tool_output( self, stdout, stderr, tool_exit_code, job ):
- """
- Check the output of a tool - given the stdout, stderr, and the tool's
- exit code, return True if the tool exited succesfully and False
- otherwise. No exceptions should be thrown. If this code encounters
- an exception, it returns True so that the workflow can continue;
- otherwise, a bug in this code could halt workflow progress.
- Note that, if the tool did not define any exit code handling or
- any stdio/stderr handling, then it reverts back to previous behavior:
- if stderr contains anything, then False is returned.
- Note that the job id is just for messages.
- """
- # By default, the tool succeeded. This covers the case where the code
- # has a bug but the tool was ok, and it lets a workflow continue.
- success = True
-
- try:
- # Check exit codes and match regular expressions against stdout and
- # stderr if this tool was configured to do so.
- # If there is a regular expression for scanning stdout/stderr,
- # then we assume that the tool writer overwrote the default
- # behavior of just setting an error if there is *anything* on
- # stderr.
- if ( len( self.tool.stdio_regexes ) > 0 or
- len( self.tool.stdio_exit_codes ) > 0 ):
- # Check the exit code ranges in the order in which
- # they were specified. Each exit_code is a StdioExitCode
- # that includes an applicable range. If the exit code was in
- # that range, then apply the error level and add a message.
- # If we've reached a fatal error rule, then stop.
- max_error_level = galaxy.tools.StdioErrorLevel.NO_ERROR
- if tool_exit_code != None:
- for stdio_exit_code in self.tool.stdio_exit_codes:
- if ( tool_exit_code >= stdio_exit_code.range_start and
- tool_exit_code <= stdio_exit_code.range_end ):
- # Tack on a generic description of the code
- # plus a specific code description. For example,
- # this might prepend "Job 42: Warning (Out of Memory)\n".
- code_desc = stdio_exit_code.desc
- if ( None == code_desc ):
- code_desc = ""
- tool_msg = ( "%s: Exit code %d (%s)" % (
- galaxy.tools.StdioErrorLevel.desc( stdio_exit_code.error_level ),
- tool_exit_code,
- code_desc ) )
- log.info( "Job %s: %s" % (job.get_id_tag(), tool_msg) )
- stderr = tool_msg + "\n" + stderr
- max_error_level = max( max_error_level,
- stdio_exit_code.error_level )
- if ( max_error_level >=
- galaxy.tools.StdioErrorLevel.FATAL ):
- break
-
- if max_error_level < galaxy.tools.StdioErrorLevel.FATAL:
- # We'll examine every regex. Each regex specifies whether
- # it is to be run on stdout, stderr, or both. (It is
- # possible for neither stdout nor stderr to be scanned,
- # but those regexes won't be used.) We record the highest
- # error level, which are currently "warning" and "fatal".
- # If fatal, then we set the job's state to ERROR.
- # If warning, then we still set the job's state to OK
- # but include a message. We'll do this if we haven't seen
- # a fatal error yet
- for regex in self.tool.stdio_regexes:
- # If ( this regex should be matched against stdout )
- # - Run the regex's match pattern against stdout
- # - If it matched, then determine the error level.
- # o If it was fatal, then we're done - break.
- # Repeat the stdout stuff for stderr.
- # TODO: Collapse this into a single function.
- if ( regex.stdout_match ):
- regex_match = re.search( regex.match, stdout,
- re.IGNORECASE )
- if ( regex_match ):
- rexmsg = self.regex_err_msg( regex_match, regex)
- log.info( "Job %s: %s"
- % ( job.get_id_tag(), rexmsg ) )
- stdout = rexmsg + "\n" + stdout
- max_error_level = max( max_error_level,
- regex.error_level )
- if ( max_error_level >=
- galaxy.tools.StdioErrorLevel.FATAL ):
- break
-
- if ( regex.stderr_match ):
- regex_match = re.search( regex.match, stderr,
- re.IGNORECASE )
- if ( regex_match ):
- rexmsg = self.regex_err_msg( regex_match, regex)
- log.info( "Job %s: %s"
- % ( job.get_id_tag(), rexmsg ) )
- stderr = rexmsg + "\n" + stderr
- max_error_level = max( max_error_level,
- regex.error_level )
- if ( max_error_level >=
- galaxy.tools.StdioErrorLevel.FATAL ):
- break
-
- # If we encountered a fatal error, then we'll need to set the
- # job state accordingly. Otherwise the job is ok:
- if max_error_level >= galaxy.tools.StdioErrorLevel.FATAL:
- success = False
- else:
- success = True
-
- # When there are no regular expressions and no exit codes to check,
- # default to the previous behavior: when there's anything on stderr
- # the job has an error, and the job is ok otherwise.
- else:
- # TODO: Add in the tool and job id:
- # log.debug( "Tool did not define exit code or stdio handling; "
- # + "checking stderr for success" )
- if stderr:
- success = False
- else:
- success = True
-
- # On any exception, return True.
- except:
- tb = traceback.format_exc()
- log.warning( "Tool check encountered unexpected exception; "
- + "assuming tool was successful: " + tb )
- success = True
-
- # Store the modified stdout and stderr in the job:
- if None != job:
- job.stdout = stdout
- job.stderr = stderr
-
- return success
-
- def regex_err_msg( self, match, regex ):
- """
- Return a message about the match on tool output using the given
- ToolStdioRegex regex object. The regex_match is a MatchObject
- that will contain the string matched on.
- """
- # Get the description for the error level:
- err_msg = galaxy.tools.StdioErrorLevel.desc( regex.error_level ) + ": "
- # If there's a description for the regular expression, then use it.
- # Otherwise, we'll take the first 256 characters of the match.
- if None != regex.desc:
- err_msg += regex.desc
- else:
- mstart = match.start()
- mend = match.end()
- err_msg += "Matched on "
- # TODO: Move the constant 256 somewhere else besides here.
- if mend - mstart > 256:
- err_msg += match.string[ mstart : mstart+256 ] + "..."
- else:
- err_msg += match.string[ mstart: mend ]
- return err_msg
+ return check_output( self.tool, stdout, stderr, tool_exit_code, job )
def cleanup( self ):
# remove temporary files
diff -r ba45d14a2c8fe72554390872deb5f4ffdd66170f -r c78f3f687fbc8592d32a67738e57d847c00ce0c6 lib/galaxy/jobs/error_level.py
--- /dev/null
+++ b/lib/galaxy/jobs/error_level.py
@@ -0,0 +1,25 @@
+
+
+# These determine stdio-based error levels from matching on regular expressions
+# and exit codes. They are meant to be used comparatively, such as showing
+# that warning < fatal. This is really meant to just be an enum.
+class StdioErrorLevel( object ):
+ NO_ERROR = 0
+ LOG = 1
+ WARNING = 2
+ FATAL = 3
+ MAX = 3
+ descs = {
+ NO_ERROR: 'No error',
+ LOG: 'Log',
+ WARNING: 'Warning',
+ FATAL: 'Fatal error',
+ }
+
+ @staticmethod
+ def desc( error_level ):
+ err_msg = "Unknown error"
+ if ( error_level > 0 and
+ error_level <= StdioErrorLevel.MAX ):
+ err_msg = StdioErrorLevel.descs[ error_level ]
+ return err_msg
diff -r ba45d14a2c8fe72554390872deb5f4ffdd66170f -r c78f3f687fbc8592d32a67738e57d847c00ce0c6 lib/galaxy/jobs/output_checker.py
--- /dev/null
+++ b/lib/galaxy/jobs/output_checker.py
@@ -0,0 +1,164 @@
+import re
+from .error_level import StdioErrorLevel
+import traceback
+
+from logging import getLogger
+log = getLogger( __name__ )
+
+
+def check_output( tool, stdout, stderr, tool_exit_code, job ):
+ """
+ Check the output of a tool - given the stdout, stderr, and the tool's
+ exit code, return True if the tool exited succesfully and False
+ otherwise. No exceptions should be thrown. If this code encounters
+ an exception, it returns True so that the workflow can continue;
+ otherwise, a bug in this code could halt workflow progress.
+
+ Note that, if the tool did not define any exit code handling or
+ any stdio/stderr handling, then it reverts back to previous behavior:
+ if stderr contains anything, then False is returned.
+
+ Note that the job id is just for messages.
+ """
+ # By default, the tool succeeded. This covers the case where the code
+ # has a bug but the tool was ok, and it lets a workflow continue.
+ success = True
+
+ try:
+ # Check exit codes and match regular expressions against stdout and
+ # stderr if this tool was configured to do so.
+ # If there is a regular expression for scanning stdout/stderr,
+ # then we assume that the tool writer overwrote the default
+ # behavior of just setting an error if there is *anything* on
+ # stderr.
+ if ( len( tool.stdio_regexes ) > 0 or
+ len( tool.stdio_exit_codes ) > 0 ):
+ # Check the exit code ranges in the order in which
+ # they were specified. Each exit_code is a StdioExitCode
+ # that includes an applicable range. If the exit code was in
+ # that range, then apply the error level and add a message.
+ # If we've reached a fatal error rule, then stop.
+ max_error_level = StdioErrorLevel.NO_ERROR
+ if tool_exit_code != None:
+ for stdio_exit_code in tool.stdio_exit_codes:
+ if ( tool_exit_code >= stdio_exit_code.range_start and
+ tool_exit_code <= stdio_exit_code.range_end ):
+ # Tack on a generic description of the code
+ # plus a specific code description. For example,
+ # this might prepend "Job 42: Warning (Out of Memory)\n".
+ code_desc = stdio_exit_code.desc
+ if ( None == code_desc ):
+ code_desc = ""
+ tool_msg = ( "%s: Exit code %d (%s)" % (
+ StdioErrorLevel.desc( stdio_exit_code.error_level ),
+ tool_exit_code,
+ code_desc ) )
+ log.info( "Job %s: %s" % (job.get_id_tag(), tool_msg) )
+ stderr = tool_msg + "\n" + stderr
+ max_error_level = max( max_error_level,
+ stdio_exit_code.error_level )
+ if ( max_error_level >=
+ StdioErrorLevel.FATAL ):
+ break
+
+ if max_error_level < StdioErrorLevel.FATAL:
+ # We'll examine every regex. Each regex specifies whether
+ # it is to be run on stdout, stderr, or both. (It is
+ # possible for neither stdout nor stderr to be scanned,
+ # but those regexes won't be used.) We record the highest
+ # error level, which are currently "warning" and "fatal".
+ # If fatal, then we set the job's state to ERROR.
+ # If warning, then we still set the job's state to OK
+ # but include a message. We'll do this if we haven't seen
+ # a fatal error yet
+ for regex in tool.stdio_regexes:
+ # If ( this regex should be matched against stdout )
+ # - Run the regex's match pattern against stdout
+ # - If it matched, then determine the error level.
+ # o If it was fatal, then we're done - break.
+ # Repeat the stdout stuff for stderr.
+ # TODO: Collapse this into a single function.
+ if ( regex.stdout_match ):
+ regex_match = re.search( regex.match, stdout,
+ re.IGNORECASE )
+ if ( regex_match ):
+ rexmsg = __regex_err_msg( regex_match, regex)
+ log.info( "Job %s: %s"
+ % ( job.get_id_tag(), rexmsg ) )
+ stdout = rexmsg + "\n" + stdout
+ max_error_level = max( max_error_level,
+ regex.error_level )
+ if ( max_error_level >=
+ StdioErrorLevel.FATAL ):
+ break
+
+ if ( regex.stderr_match ):
+ regex_match = re.search( regex.match, stderr,
+ re.IGNORECASE )
+ if ( regex_match ):
+ rexmsg = __regex_err_msg( regex_match, regex)
+ log.info( "Job %s: %s"
+ % ( job.get_id_tag(), rexmsg ) )
+ stderr = rexmsg + "\n" + stderr
+ max_error_level = max( max_error_level,
+ regex.error_level )
+ if ( max_error_level >=
+ StdioErrorLevel.FATAL ):
+ break
+
+ # If we encountered a fatal error, then we'll need to set the
+ # job state accordingly. Otherwise the job is ok:
+ if max_error_level >= StdioErrorLevel.FATAL:
+ success = False
+ else:
+ success = True
+
+ # When there are no regular expressions and no exit codes to check,
+ # default to the previous behavior: when there's anything on stderr
+ # the job has an error, and the job is ok otherwise.
+ else:
+ # TODO: Add in the tool and job id:
+ # log.debug( "Tool did not define exit code or stdio handling; "
+ # + "checking stderr for success" )
+ if stderr:
+ success = False
+ else:
+ success = True
+
+ # On any exception, return True.
+ except:
+ tb = traceback.format_exc()
+ log.warning( "Tool check encountered unexpected exception; "
+ + "assuming tool was successful: " + tb )
+ success = True
+
+ # Store the modified stdout and stderr in the job:
+ if None != job:
+ job.stdout = stdout
+ job.stderr = stderr
+
+ return success
+
+
+def __regex_err_msg( match, regex ):
+ """
+ Return a message about the match on tool output using the given
+ ToolStdioRegex regex object. The regex_match is a MatchObject
+ that will contain the string matched on.
+ """
+ # Get the description for the error level:
+ err_msg = StdioErrorLevel.desc( regex.error_level ) + ": "
+ # If there's a description for the regular expression, then use it.
+ # Otherwise, we'll take the first 256 characters of the match.
+ if None != regex.desc:
+ err_msg += regex.desc
+ else:
+ mstart = match.start()
+ mend = match.end()
+ err_msg += "Matched on "
+ # TODO: Move the constant 256 somewhere else besides here.
+ if mend - mstart > 256:
+ err_msg += match.string[ mstart : mstart + 256 ] + "..."
+ else:
+ err_msg += match.string[ mstart: mend ]
+ return err_msg
diff -r ba45d14a2c8fe72554390872deb5f4ffdd66170f -r c78f3f687fbc8592d32a67738e57d847c00ce0c6 lib/galaxy/tools/__init__.py
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -34,6 +34,7 @@
from sqlalchemy import and_
from galaxy import jobs, model
+from galaxy.jobs.error_level import StdioErrorLevel
from galaxy.datatypes.metadata import JobExternalOutputMetadataWrapper
from galaxy.jobs import ParallelismInfo
from galaxy.tools.actions import DefaultToolAction
@@ -64,33 +65,11 @@
from tool_shed.util import shed_util_common
from .loader import load_tool, template_macro_params
+
log = logging.getLogger( __name__ )
WORKFLOW_PARAMETER_REGULAR_EXPRESSION = re.compile( '''\$\{.+?\}''' )
-# These determine stdio-based error levels from matching on regular expressions
-# and exit codes. They are meant to be used comparatively, such as showing
-# that warning < fatal. This is really meant to just be an enum.
-class StdioErrorLevel( object ):
- NO_ERROR = 0
- LOG = 1
- WARNING = 2
- FATAL = 3
- MAX = 3
- descs = {
- NO_ERROR : 'No error',
- LOG: 'Log',
- WARNING : 'Warning',
- FATAL : 'Fatal error'
- }
- @staticmethod
- def desc( error_level ):
- err_msg = "Unknown error"
- if ( error_level > 0 and
- error_level <= StdioErrorLevel.MAX ):
- err_msg = StdioErrorLevel.descs[ error_level ]
- return err_msg
-
class ToolNotFoundException( Exception ):
pass
diff -r ba45d14a2c8fe72554390872deb5f4ffdd66170f -r c78f3f687fbc8592d32a67738e57d847c00ce0c6 test/unit/test_job_output_checker.py
--- /dev/null
+++ b/test/unit/test_job_output_checker.py
@@ -0,0 +1,62 @@
+from unittest import TestCase
+from galaxy.util.bunch import Bunch
+from galaxy.jobs.output_checker import check_output
+from galaxy.jobs.error_level import StdioErrorLevel
+
+
+class OutputCheckerTestCase( TestCase ):
+
+ def setUp( self ):
+ self.tool = Bunch(
+ stdio_regexes=[],
+ stdio_exit_codes=[],
+ )
+ self.job = Bunch(
+ stdout=None,
+ stderr=None,
+ get_id_tag=lambda: "test_id",
+ )
+ self.stdout = ''
+ self.stderr = ''
+ self.tool_exit_code = None
+
+ def test_default_no_stderr_success( self ):
+ self.__assertSuccessful()
+
+ def test_default_stderr_failure( self ):
+ self.stderr = 'foo'
+ self.__assertNotSuccessful()
+
+ def test_exit_code_error( self ):
+ mock_exit_code = Bunch( range_start=1, range_end=1, error_level=StdioErrorLevel.FATAL, desc=None )
+ self.tool.stdio_exit_codes.append( mock_exit_code )
+ self.tool_exit_code = 1
+ self.__assertNotSuccessful()
+
+ def test_exit_code_success( self ):
+ mock_exit_code = Bunch( range_start=1, range_end=1, error_level=StdioErrorLevel.FATAL, desc=None )
+ self.tool.stdio_exit_codes.append( mock_exit_code )
+ self.tool_exit_code = 0
+ self.__assertSuccessful()
+
+ def test_problematic_strings( self ):
+ problematic_str = '\x80abc'
+ regex_rule = Bunch( match=r'.abc', stdout_match=False, stderr_match=True, error_level=StdioErrorLevel.FATAL, desc=None )
+ self.tool.stdio_regexes = [ regex_rule ]
+ self.stderr = problematic_str
+ self.__assertNotSuccessful()
+
+ problematic_str = '\x80abc'
+ regex_rule = Bunch( match=r'.abcd', stdout_match=False, stderr_match=True, error_level=StdioErrorLevel.FATAL, desc=None )
+ self.tool.stdio_regexes = [ regex_rule ]
+ self.stderr = problematic_str
+ self.__assertSuccessful()
+
+ def __assertSuccessful( self ):
+ self.assertTrue( self.__check_output() )
+
+ def __assertNotSuccessful( self ):
+ self.assertFalse( self.__check_output() )
+
+ def __check_output( self ):
+ return check_output( self.tool, self.stdout, self.stderr, self.tool_exit_code, self.job )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: martenson: user activation flow tweaked, link at the masthead message providing activation resending
by commits-noreply@bitbucket.org 09 Oct '13
by commits-noreply@bitbucket.org 09 Oct '13
09 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/ba45d14a2c8f/
Changeset: ba45d14a2c8f
User: martenson
Date: 2013-10-09 21:12:36
Summary: user activation flow tweaked, link at the masthead message providing activation resending
Affected #: 3 files
diff -r 9b738973adafffa45605e2c27ecb769ea21e8d94 -r ba45d14a2c8fe72554390872deb5f4ffdd66170f lib/galaxy/webapps/galaxy/controllers/user.py
--- a/lib/galaxy/webapps/galaxy/controllers/user.py
+++ b/lib/galaxy/webapps/galaxy/controllers/user.py
@@ -153,11 +153,11 @@
if not trans.user.active and trans.app.config.user_activation_on: # Account activation is ON and the user is INACTIVE.
if ( trans.app.config.activation_grace_period != 0 ): # grace period is ON
if self.is_outside_grace_period( trans, trans.user.create_time ): # User is outside the grace period. Login is disabled and he will have the activation email resent.
- message = self.resend_verification_email( trans, trans.user.email )
+ message, status = self.resend_verification_email( trans, trans.user.email, trans.user.username )
else: # User is within the grace period, let him log in.
pass
else: # Grace period is off. Login is disabled and user will have the activation email resent.
- message = self.resend_verification_email( trans, trans.user.email )
+ message, status = self.resend_verification_email( trans, trans.user.email, trans.user.username )
elif not user_openid.user or user_openid.user == trans.user:
if openid_provider_obj.id:
user_openid.provider = openid_provider_obj.id
@@ -484,6 +484,7 @@
status = kwd.get( 'status', 'error' )
email = kwd.get( 'email', '' )
password = kwd.get( 'password', '' )
+ username = kwd.get( 'username', '' )
redirect = kwd.get( 'redirect', trans.request.referer ).strip()
success = False
user = trans.sa_session.query( trans.app.model.User ).filter( trans.app.model.User.table.c.email==email ).first()
@@ -502,11 +503,11 @@
elif trans.app.config.user_activation_on and not user.active: # activation is ON and the user is INACTIVE
if ( trans.app.config.activation_grace_period != 0 ): # grace period is ON
if self.is_outside_grace_period( trans, user.create_time ): # User is outside the grace period. Login is disabled and he will have the activation email resent.
- message = self.resend_verification_email( trans, email )
+ message, status = self.resend_verification_email( trans, email, username )
else: # User is within the grace period, let him log in.
message, success, status = self.proceed_login( trans, user, redirect )
else: # Grace period is off. Login is disabled and user will have the activation email resent.
- message = self.resend_verification_email( trans, email )
+ message, status = self.resend_verification_email( trans, email, username )
else: # activation is OFF
message, success, status = self.proceed_login( trans, user, redirect )
return ( message, status, user, success )
@@ -526,19 +527,36 @@
success = True
status = 'done'
return message, success, status
-
- def resend_verification_email( self, trans, email ):
+
+ @web.expose
+ def resend_verification ( self, trans ):
"""
- Function resends the verification email in case user wants to log in with an inactive account.
+ Exposed function for use outside of the class. E.g. when user click on the resend link in the masthead.
"""
- is_activation_sent = self.send_verification_email( trans, email )
+ message, status = self.resend_verification_email( trans, None, None )
+ if status == 'done':
+ return trans.show_ok_message( message )
+ else:
+ return trans.show_error_message( message )
+
+ def resend_verification_email( self, trans, email, username ):
+ """
+ Function resends the verification email in case user wants to log in with an inactive account or he clicks the resend link.
+ """
+ if email is None: # User is coming from outside registration form, load email from trans
+ email = trans.user.email
+ if username is None: # User is coming from outside registration form, load email from trans
+ username = trans.user.username
+ is_activation_sent = self.send_verification_email( trans, email, username)
if is_activation_sent:
- message = 'This account has not been activated yet. The activation link has been sent again. Please check your email address %s.<br>' % email
+ message = 'This account has not been activated yet. The activation link has been sent again. Please check your email address <b>%s</b> including the spam/trash folder.<br><a target="_top" href="%s">Return to the home page</a>.' % ( email, url_for( '/' ) )
+ status = 'error'
else:
- message = 'This account has not been activated yet but we are unable to send the activation link. Please contact your local Galaxy administrator.'
+ message = 'This account has not been activated yet but we are unable to send the activation link. Please contact your local Galaxy administrator.<br><a target="_top" href="%s">Return to the home page</a>.' % url_for( '/' )
+ status = 'error'
if trans.app.config.error_email_to is not None:
- message += ' Contact: %s' % trans.app.config.error_email_to
- return message
+ message += '<br>Error contact: %s' % trans.app.config.error_email_to
+ return message, status
def is_outside_grace_period ( self, trans, create_time ):
"""
@@ -729,9 +747,9 @@
success = False
else:
if trans.webapp.name == 'galaxy' and trans.app.config.user_activation_on:
- is_activation_sent = self.send_verification_email( trans, email )
+ is_activation_sent = self.send_verification_email( trans, email, username )
if is_activation_sent:
- message = 'Now logged in as %s.<br>Verification email has been sent to your email address. Please verify it by clicking the activation link in the email.<br><a target="_top" href="%s">Return to the home page.</a>' % ( user.email, url_for( '/' ) )
+ message = 'Now logged in as %s.<br>Verification email has been sent to your email address. Please verify it by clicking the activation link in the email.<br>Please check your spam/trash folder in case you cannot find the message.<br><a target="_top" href="%s">Return to the home page.</a>' % ( user.email, url_for( '/' ) )
success = True
else:
message = 'Unable to send activation email, please contact your local Galaxy administrator.'
@@ -743,10 +761,12 @@
success = True
return ( message, status, user, success )
- def send_verification_email( self, trans, email ):
+ def send_verification_email( self, trans, email, username ):
"""
Send the verification email containing the activation link to the user's email.
"""
+ if username is None:
+ username = trans.user.username
activation_link = self.prepare_activation_link( trans, email )
body = ("Hello %s,\n\n"
@@ -755,7 +775,7 @@
"By clicking on the above link and opening a Galaxy account you are also confirming that you have read and agreed to Galaxy's Terms and Conditions for use of this service (%s). This includes a quota limit of one account per user. Attempts to subvert this limit by creating multiple accounts or through any other method may result in termination of all associated accounts and data.\n\n"
"Please contact us if you need help with your account at: %s. You can also browse resources available at: %s. \n\n"
"More about the Galaxy Project can be found at galaxyproject.org\n\n"
- "Your Galaxy Team" % ( trans.user.username, email, datetime.utcnow().strftime( "%D" ), trans.request.host, activation_link,trans.app.config.terms_url, trans.app.config.error_email_to, trans.app.config.instance_resource_url ))
+ "Your Galaxy Team" % ( username, email, datetime.utcnow().strftime( "%D" ), trans.request.host, activation_link,trans.app.config.terms_url, trans.app.config.error_email_to, trans.app.config.instance_resource_url ))
to = email
frm = trans.app.config.activation_email
subject = 'Galaxy Account Activation'
diff -r 9b738973adafffa45605e2c27ecb769ea21e8d94 -r ba45d14a2c8fe72554390872deb5f4ffdd66170f lib/galaxy/webapps/tool_shed/config.py
--- a/lib/galaxy/webapps/tool_shed/config.py
+++ b/lib/galaxy/webapps/tool_shed/config.py
@@ -66,6 +66,7 @@
self.activation_grace_period = kwargs.get( 'activation_grace_period', None )
self.inactivity_box_content = kwargs.get( 'inactivity_box_content', None )
self.registration_warning_message = kwargs.get( 'registration_warning_message', None )
+ self.terms_url = kwargs.get( 'terms_url', None )
self.blacklist_location = kwargs.get( 'blacklist_file', None )
self.blacklist_content = None
self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
diff -r 9b738973adafffa45605e2c27ecb769ea21e8d94 -r ba45d14a2c8fe72554390872deb5f4ffdd66170f templates/base/base_panels.mako
--- a/templates/base/base_panels.mako
+++ b/templates/base/base_panels.mako
@@ -312,7 +312,7 @@
</div>
%if self.show_inactivity_warning:
<div id="inactivebox" class="panel-warning-message">
- ${app.config.inactivity_box_content}
+ ${app.config.inactivity_box_content} <a href="${h.url_for( controller='user', action='resend_verification' )}">Resend verification.</a></div>
%endif
${self.overlay(visible=self.overlay_visible)}
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jmchilton: Fix stdio return code handling that has been broken for a few releases.
by commits-noreply@bitbucket.org 09 Oct '13
by commits-noreply@bitbucket.org 09 Oct '13
09 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9b738973adaf/
Changeset: 9b738973adaf
User: jmchilton
Date: 2013-10-09 19:48:26
Summary: Fix stdio return code handling that has been broken for a few releases.
Affected #: 1 file
diff -r 50f5157b063f5567fa2cea820d85bb4abd233f91 -r 9b738973adafffa45605e2c27ecb769ea21e8d94 lib/galaxy/jobs/runners/__init__.py
--- a/lib/galaxy/jobs/runners/__init__.py
+++ b/lib/galaxy/jobs/runners/__init__.py
@@ -167,16 +167,28 @@
if job_wrapper.dependency_shell_commands:
commands = "; ".join( job_wrapper.dependency_shell_commands + [ commands ] )
+ # Coping work dir outputs or setting metadata will mask return code of
+ # tool command. If these are used capture the return code and ensure
+ # the last thing that happens is an exit with return code.
+ capture_return_code_command = "; return_code=$?"
+ captured_return_code = False
+
# Append commands to copy job outputs based on from_work_dir attribute.
if include_work_dir_outputs:
work_dir_outputs = self.get_work_dir_outputs( job_wrapper )
if work_dir_outputs:
+ if not captured_return_code:
+ commands += capture_return_code_command
+ captured_return_code = True
commands += "; " + "; ".join( [ "if [ -f %s ] ; then cp %s %s ; fi" %
( source_file, source_file, destination ) for ( source_file, destination ) in work_dir_outputs ] )
# Append metadata setting commands, we don't want to overwrite metadata
# that was copied over in init_meta(), as per established behavior
if include_metadata and job_wrapper.requires_setting_metadata:
+ if not captured_return_code:
+ commands += capture_return_code_command
+ captured_return_code = True
commands += "; cd %s; " % os.path.abspath( os.getcwd() )
commands += job_wrapper.setup_external_metadata(
exec_dir = os.path.abspath( os.getcwd() ),
@@ -185,6 +197,11 @@
output_fnames = job_wrapper.get_output_fnames(),
set_extension = False,
kwds = { 'overwrite' : False } )
+
+
+ if captured_return_code:
+ commands += '; sh -c "exit $return_code"'
+
return commands
def get_work_dir_outputs( self, job_wrapper ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: greg: Code cleanup for defining and building repository dependency relationships when installing tool shed repositories into Galaxy and fixes for handling Galaxy tool panel section selection when installing repository dependencies.
by commits-noreply@bitbucket.org 09 Oct '13
by commits-noreply@bitbucket.org 09 Oct '13
09 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/50f5157b063f/
Changeset: 50f5157b063f
User: greg
Date: 2013-10-09 19:33:31
Summary: Code cleanup for defining and building repository dependency relationships when installing tool shed repositories into Galaxy and fixes for handling Galaxy tool panel section selection when installing repository dependencies.
Affected #: 8 files
diff -r 53d53422782c2b1837959d6192e81c9886e2d4eb -r 50f5157b063f5567fa2cea820d85bb4abd233f91 lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
--- a/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
+++ b/lib/galaxy/webapps/galaxy/api/tool_shed_repositories.py
@@ -286,7 +286,6 @@
installation_dict = dict( install_repository_dependencies=install_repository_dependencies,
new_tool_panel_section=new_tool_panel_section,
no_changes_checked=False,
- reinstalling=False,
repo_info_dicts=repo_info_dicts,
tool_panel_section=tool_panel_section,
tool_path=tool_path,
diff -r 53d53422782c2b1837959d6192e81c9886e2d4eb -r 50f5157b063f5567fa2cea820d85bb4abd233f91 lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
--- a/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
+++ b/lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py
@@ -854,7 +854,6 @@
installation_dict = dict( install_repository_dependencies=install_repository_dependencies,
new_tool_panel_section=new_tool_panel_section,
no_changes_checked=False,
- reinstalling=False,
repo_info_dicts=repo_info_dicts,
tool_panel_section=tool_panel_section,
tool_path=tool_path,
@@ -1099,7 +1098,6 @@
tool_path=tool_path,
tool_shed_url=tool_shed_url,
repo_info_dicts=repo_info_dicts,
- reinstalling=True,
install_repository_dependencies=install_repository_dependencies,
no_changes_checked=no_changes_checked,
tool_panel_section=tool_panel_section,
diff -r 53d53422782c2b1837959d6192e81c9886e2d4eb -r 50f5157b063f5567fa2cea820d85bb4abd233f91 lib/galaxy/webapps/tool_shed/model/__init__.py
--- a/lib/galaxy/webapps/tool_shed/model/__init__.py
+++ b/lib/galaxy/webapps/tool_shed/model/__init__.py
@@ -154,6 +154,10 @@
self.times_downloaded = times_downloaded
self.deprecated = deprecated
+ def allow_push( self, app ):
+ repo = hg.repository( ui.ui(), self.repo_path( app ) )
+ return repo.ui.config( 'web', 'allow_push' )
+
def can_change_type( self, app ):
# Allow changing the type only if the repository has no contents, has never been installed, or has never been changed from
# the default type.
@@ -172,12 +176,6 @@
return True
return False
- def to_dict( self, view='collection', value_mapper=None ):
- rval = super( Repository, self ).to_dict( view=view, value_mapper=value_mapper )
- if 'user_id' in rval:
- rval[ 'owner' ] = self.user.username
- return rval
-
def get_changesets_for_setting_metadata( self, app ):
type_class = self.get_type_class( app )
return type_class.get_changesets_for_setting_metadata( app, self )
@@ -185,6 +183,11 @@
def get_type_class( self, app ):
return app.repository_types_registry.get_class_by_label( self.type )
+ def is_new( self, app ):
+ repo = hg.repository( ui.ui(), self.repo_path( app ) )
+ tip_ctx = repo.changectx( repo.changelog.tip() )
+ return tip_ctx.rev() < 0
+
def repo_path( self, app ):
return app.hgweb_config_manager.get_entry( os.path.join( "repos", self.user.username, self.name ) )
@@ -193,19 +196,6 @@
tip_ctx = repo.changectx( repo.changelog.tip() )
return "%s:%s" % ( str( tip_ctx.rev() ), str( repo.changectx( repo.changelog.tip() ) ) )
- def tip( self, app ):
- repo = hg.repository( ui.ui(), self.repo_path( app ) )
- return str( repo.changectx( repo.changelog.tip() ) )
-
- def is_new( self, app ):
- repo = hg.repository( ui.ui(), self.repo_path( app ) )
- tip_ctx = repo.changectx( repo.changelog.tip() )
- return tip_ctx.rev() < 0
-
- def allow_push( self, app ):
- repo = hg.repository( ui.ui(), self.repo_path( app ) )
- return repo.ui.config( 'web', 'allow_push' )
-
def set_allow_push( self, app, usernames, remove_auth='' ):
allow_push = util.listify( self.allow_push( app ) )
if remove_auth:
@@ -227,6 +217,16 @@
fp.write( line )
fp.close()
+ def tip( self, app ):
+ repo = hg.repository( ui.ui(), self.repo_path( app ) )
+ return str( repo.changectx( repo.changelog.tip() ) )
+
+ def to_dict( self, view='collection', value_mapper=None ):
+ rval = super( Repository, self ).to_dict( view=view, value_mapper=value_mapper )
+ if 'user_id' in rval:
+ rval[ 'owner' ] = self.user.username
+ return rval
+
class RepositoryMetadata( object, Dictifiable ):
dict_collection_visible_keys = ( 'id', 'repository_id', 'changeset_revision', 'malicious', 'downloadable', 'has_repository_dependencies', 'includes_datatypes',
diff -r 53d53422782c2b1837959d6192e81c9886e2d4eb -r 50f5157b063f5567fa2cea820d85bb4abd233f91 lib/tool_shed/galaxy_install/repository_util.py
--- a/lib/tool_shed/galaxy_install/repository_util.py
+++ b/lib/tool_shed/galaxy_install/repository_util.py
@@ -430,7 +430,6 @@
install_repository_dependencies = installation_dict[ 'install_repository_dependencies' ]
new_tool_panel_section = installation_dict[ 'new_tool_panel_section' ]
no_changes_checked = installation_dict[ 'no_changes_checked' ]
- reinstalling = installation_dict[ 'reinstalling' ]
repo_info_dicts = installation_dict[ 'repo_info_dicts' ]
tool_panel_section = installation_dict[ 'tool_panel_section' ]
tool_path = installation_dict[ 'tool_path' ]
@@ -440,7 +439,6 @@
tool_path=tool_path,
tool_shed_url=tool_shed_url,
repo_info_dicts=repo_info_dicts,
- reinstalling=reinstalling,
install_repository_dependencies=install_repository_dependencies,
no_changes_checked=no_changes_checked,
tool_panel_section=tool_panel_section,
diff -r 53d53422782c2b1837959d6192e81c9886e2d4eb -r 50f5157b063f5567fa2cea820d85bb4abd233f91 lib/tool_shed/util/metadata_util.py
--- a/lib/tool_shed/util/metadata_util.py
+++ b/lib/tool_shed/util/metadata_util.py
@@ -1240,10 +1240,14 @@
return repository_dependency_tup, is_valid, error_message
repo = hg.repository( suc.get_configured_ui(), repository.repo_path( app ) )
# The received changeset_revision may be None since defining it in the dependency definition is optional. If this is the case,
- # the default will be to set it's value to the repository dependency tip revision.
- if changeset_revision is None:
- changeset_revision = str( repo.changectx( repo.changelog.tip() ) )
+ # the default will be to set it's value to the repository dependency tip revision. This probably occurs only when handling
+ # circular dependency definitions.
+ tip_ctx = repo.changectx( repo.changelog.tip() )
+ # Make sure the repo.changlog includes at least 1 revision.
+ if changeset_revision is None and tip_ctx.rev() >= 0:
+ changeset_revision = str( tip_ctx )
repository_dependency_tup = [ toolshed, name, owner, changeset_revision, prior_installation_required, str( only_if_compiling_contained_td ) ]
+ return repository_dependency_tup, is_valid, error_message
else:
# Find the specified changeset revision in the repository's changelog to see if it's valid.
found = False
diff -r 53d53422782c2b1837959d6192e81c9886e2d4eb -r 50f5157b063f5567fa2cea820d85bb4abd233f91 lib/tool_shed/util/repository_dependency_util.py
--- a/lib/tool_shed/util/repository_dependency_util.py
+++ b/lib/tool_shed/util/repository_dependency_util.py
@@ -95,155 +95,116 @@
return False
return True
-def create_repository_dependency_objects( trans, tool_path, tool_shed_url, repo_info_dicts, reinstalling=False, install_repository_dependencies=False,
+def create_repository_dependency_objects( trans, tool_path, tool_shed_url, repo_info_dicts, install_repository_dependencies=False,
no_changes_checked=False, tool_panel_section=None, new_tool_panel_section=None ):
"""
Discover all repository dependencies and make sure all tool_shed_repository and associated repository_dependency records exist as well as
- the dependency relationships between installed repositories. This method is called when new repositories are being installed into a Galaxy
- instance and when uninstalled repositories are being reinstalled.
+ the dependency relationships between installed repositories. This method is called when uninstalled repositories are being reinstalled.
+ If the user elected to install repository dependencies, all items in the all_repo_info_dicts list will be processed. However, if repository
+ dependencies are not to be installed, only those items contained in the received repo_info_dicts list will be processed.
"""
- # The following list will be maintained within this method to contain all created or updated tool shed repositories, including repository dependencies
- # that may not be installed.
+ # The following list will be maintained within this method to contain all created or updated tool shed repositories, including repository
+ # dependencies that may not be installed.
all_created_or_updated_tool_shed_repositories = []
- # There will be a one-to-one mapping between items in 3 lists: created_or_updated_tool_shed_repositories, tool_panel_section_keys and filtered_repo_info_dicts.
- # The 3 lists will filter out repository dependencies that are not to be installed.
+ # There will be a one-to-one mapping between items in 3 lists: created_or_updated_tool_shed_repositories, tool_panel_section_keys and
+ # filtered_repo_info_dicts. The 3 lists will filter out repository dependencies that are not to be installed.
created_or_updated_tool_shed_repositories = []
tool_panel_section_keys = []
- # Repositories will be filtered (e.g., if already installed, if elected to not be installed, etc), so filter the associated repo_info_dicts accordingly.
+ # Repositories will be filtered (e.g., if already installed, if elected to not be installed, etc), so filter the associated repo_info_dicts
+ # accordingly.
filtered_repo_info_dicts = []
- # Discover all repository dependencies and retrieve information for installing them. Even if the user elected to not install repository dependencies we have
- # to make sure all repository dependency objects exist so that the appropriate repository dependency relationships can be built.
+ # Discover all repository dependencies and retrieve information for installing them. Even if the user elected to not install repository
+ # dependencies we have to make sure all repository dependency objects exist so that the appropriate repository dependency relationships can
+ # be built.
all_required_repo_info_dict = common_install_util.get_required_repo_info_dicts( trans, tool_shed_url, repo_info_dicts )
all_repo_info_dicts = all_required_repo_info_dict.get( 'all_repo_info_dicts', [] )
if not all_repo_info_dicts:
# No repository dependencies were discovered so process the received repositories.
all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
for repo_info_dict in all_repo_info_dicts:
- for name, repo_info_tuple in repo_info_dict.items():
- description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
- suc.get_repo_info_tuple_contents( repo_info_tuple )
- # Make sure the repository was not already installed.
- installed_tool_shed_repository, installed_changeset_revision = suc.repository_was_previously_installed( trans, tool_shed_url, name, repo_info_tuple )
- if installed_tool_shed_repository:
- tool_section, new_tool_panel_section, tool_panel_section_key = tool_util.handle_tool_panel_selection( trans=trans,
- metadata=installed_tool_shed_repository.metadata,
- no_changes_checked=no_changes_checked,
- tool_panel_section=tool_panel_section,
- new_tool_panel_section=new_tool_panel_section )
- if reinstalling or install_repository_dependencies:
- # If the user elected to install repository dependencies, all items in the all_repo_info_dicts list will be processed. However, if
- # repository dependencies are not to be installed, only those items contained in the received repo_info_dicts list will be processed.
- if is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
- if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
- trans.model.ToolShedRepository.installation_status.NEW,
- trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
+ # If the user elected to install repository dependencies, all items in the all_repo_info_dicts list will be processed. However, if
+ # repository dependencies are not to be installed, only those items contained in the received repo_info_dicts list will be processed
+ # but the the all_repo_info_dicts list will be used to create all defined repository dependency relationships.
+ if is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ) or install_repository_dependencies:
+ for name, repo_info_tuple in repo_info_dict.items():
+ can_update_db_record = False
+ description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+ suc.get_repo_info_tuple_contents( repo_info_tuple )
+ # See if the repository has an existing record in the database.
+ repository_db_record, installed_changeset_revision = \
+ suc.repository_was_previously_installed( trans, tool_shed_url, name, repo_info_tuple )
+ if repository_db_record:
+ if repository_db_record.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED,
+ trans.model.ToolShedRepository.installation_status.CLONING,
+ trans.model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS,
+ trans.model.ToolShedRepository.installation_status.INSTALLING_REPOSITORY_DEPENDENCIES,
+ trans.model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES,
+ trans.model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES ]:
+ log.debug( "Skipping installation of tool_shed_repository '%s' because it's installation status is '%s'." % \
+ ( str( repository_db_record.name ), str( repository_db_record.status ) ) )
+ else:
+ if repository_db_record.status in [ trans.model.ToolShedRepository.installation_status.ERROR,
+ trans.model.ToolShedRepository.installation_status.NEW,
+ trans.model.ToolShedRepository.installation_status.UNINSTALLED ]:
# The current tool shed repository is not currently installed, so we can update it's record in the database.
- can_update = True
- name = installed_tool_shed_repository.name
- description = installed_tool_shed_repository.description
- installed_changeset_revision = installed_tool_shed_repository.installed_changeset_revision
- metadata_dict = installed_tool_shed_repository.metadata
- dist_to_shed = installed_tool_shed_repository.dist_to_shed
- elif installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
+ name = repository_db_record.name
+ installed_changeset_revision = repository_db_record.installed_changeset_revision
+ metadata_dict = repository_db_record.metadata
+ dist_to_shed = repository_db_record.dist_to_shed
+ can_update_db_record = True
+ elif repository_db_record.status in [ trans.model.ToolShedRepository.installation_status.DEACTIVATED ]:
# The current tool shed repository is deactivated, so updating it's database record is not necessary - just activate it.
- log.debug( "Reactivating deactivated tool_shed_repository '%s'." % str( installed_tool_shed_repository.name ) )
- common_install_util.activate_repository( trans, installed_tool_shed_repository )
- can_update = False
- else:
- # The tool shed repository currently being processed is already installed or is in the process of being installed, so it's record
- # in the database cannot be updated.
- if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED ]:
- log.debug( "Skipping installation of tool_shed_repository '%s' because it is already installed." % \
- str( installed_tool_shed_repository.name ) )
- else:
- log.debug( "Skipping installation of tool_shed_repository '%s' because it's installation status is '%s'." % \
- ( str( installed_tool_shed_repository.name ), str( installed_tool_shed_repository.status ) ) )
- can_update = False
+ log.debug( "Reactivating deactivated tool_shed_repository '%s'." % str( repository_db_record.name ) )
+ common_install_util.activate_repository( trans, repository_db_record )
+ # No additional updates to the database record are necessary.
+ can_update_db_record = False
+ elif repository_db_record.status not in [ trans.model.ToolShedRepository.installation_status.NEW ]:
+ # Set changeset_revision here so suc.create_or_update_tool_shed_repository will find the previously installed
+ # and uninstalled repository instead of creating a new record.
+ changeset_revision = repository_db_record.installed_changeset_revision
+ suc.reset_previously_installed_repository( trans, repository_db_record )
+ can_update_db_record = True
+ else:
+ # No record exists in the database for the repository currently being processed.
+ installed_changeset_revision = changeset_revision
+ metadata_dict = {}
+ dist_to_shed = False
+ can_update_db_record = True
+ if can_update_db_record:
+ # The database record for the tool shed repository currently being processed can be updated. Get the repository metadata
+ # to see where it was previously located in the tool panel.
+ if repository_db_record and repository_db_record.metadata:
+ tool_section, new_tool_panel_section, tool_panel_section_key = \
+ tool_util.handle_tool_panel_selection( trans=trans,
+ metadata=repository_db_record.metadata,
+ no_changes_checked=no_changes_checked,
+ tool_panel_section=tool_panel_section,
+ new_tool_panel_section=new_tool_panel_section )
else:
- # This block will be reached only if reinstalling is True, install_repository_dependencies is False and is_in_repo_info_dicts is False.
- # The tool shed repository currently being processed must be a repository dependency that the user elected to not install, so it's
- # record in the database cannot be updated.
- debug_msg = "Skipping installation of tool_shed_repository '%s' because it is likely a " % str( installed_tool_shed_repository.name )
- debug_msg += "repository dependency that was elected to not be installed."
- log.debug( debug_msg )
- can_update = False
- else:
- # This block will be reached only if reinstalling is False and install_repository_dependencies is False. This implies that the tool shed
- # repository currently being processed has already been installed.
- if installed_tool_shed_repository.status in [ trans.model.ToolShedRepository.installation_status.INSTALLED ]:
- # Since the repository currently being processed is already in the INSTALLED state, skip it and process the next repository in the
- # list if there is one.
- log.debug( "Skipping installation of tool_shed_repository '%s' because it's installation status is '%s'." % \
- ( str( installed_tool_shed_repository.name ), str( installed_tool_shed_repository.status ) ) )
- can_update = False
- else:
- # The repository currently being processed is in some state other than INSTALLED, so reset it for installation.
- debug_msg = "Resetting tool_shed_repository '%s' for installation.\n" % str( installed_tool_shed_repository.name )
- debug_msg += "The current state of the tool_shed_repository is:\n"
- debug_msg += "deleted: %s\n" % str( installed_tool_shed_repository.deleted )
- debug_msg += "tool_shed_status: %s\n" % str( installed_tool_shed_repository.tool_shed_status )
- debug_msg += "uninstalled: %s\n" % str( installed_tool_shed_repository.uninstalled )
- debug_msg += "status: %s\n" % str( installed_tool_shed_repository.status )
- debug_msg += "error_message: %s\n" % str( installed_tool_shed_repository.error_message )
- log.debug( debug_msg )
- suc.reset_previously_installed_repository( trans, installed_tool_shed_repository )
- can_update = True
- reinstalling = True
- # Set changeset_revision here so suc.create_or_update_tool_shed_repository will find the previously installed
- # and uninstalled repository instead of creating a new record.
- changeset_revision = installed_tool_shed_repository.installed_changeset_revision
- else:
- # A tool shed repository is being installed into a Galaxy instance for the first time, or we're attempting to install it or reinstall it resulted
- # in an error. In the latter case, the repository record in the database has no metadata and it's status has been set to 'New'. In either case,
- # the repository's database record may be updated.
- can_update = True
- installed_changeset_revision = changeset_revision
- metadata_dict = {}
- dist_to_shed = False
- if can_update:
- # The database record for the tool shed repository currently being processed can be updated.
- if reinstalling or install_repository_dependencies:
- # Get the repository metadata to see where it was previously located in the tool panel.
- if installed_tool_shed_repository:
- # The tool shed repository status is one of 'New', 'Uninstalled', or 'Error'.
- tool_section, new_tool_panel_section, tool_panel_section_key = tool_util.handle_tool_panel_selection( trans=trans,
- metadata=installed_tool_shed_repository.metadata,
- no_changes_checked=no_changes_checked,
- tool_panel_section=tool_panel_section,
- new_tool_panel_section=new_tool_panel_section )
- else:
- # We're installing a new tool shed repository that does not yet have a database record. This repository is a repository dependency
- # of a different repository being installed.
+ # We're installing a new tool shed repository that does not yet have a database record.
tool_panel_section_key, tool_section = tool_util.handle_tool_panel_section( trans,
tool_panel_section=tool_panel_section,
new_tool_panel_section=new_tool_panel_section )
-
- else:
- # We're installing a new tool shed repository that does not yet have a database record.
- tool_panel_section_key, tool_section = tool_util.handle_tool_panel_section( trans,
- tool_panel_section=tool_panel_section,
- new_tool_panel_section=new_tool_panel_section )
- tool_shed_repository = suc.create_or_update_tool_shed_repository( app=trans.app,
- name=name,
- description=description,
- installed_changeset_revision=changeset_revision,
- ctx_rev=ctx_rev,
- repository_clone_url=repository_clone_url,
- metadata_dict={},
- status=trans.model.ToolShedRepository.installation_status.NEW,
- current_changeset_revision=changeset_revision,
- owner=repository_owner,
- dist_to_shed=False )
- if tool_shed_repository not in all_created_or_updated_tool_shed_repositories:
- # Add the processed tool shed repository to the list of all processed repositories maintained within this method.
- all_created_or_updated_tool_shed_repositories.append( tool_shed_repository )
- # Only append the tool shed repository to the list of created_or_updated_tool_shed_repositories if it is supposed to be installed.
- if install_repository_dependencies or is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
- if tool_shed_repository not in created_or_updated_tool_shed_repositories:
- # Keep the one-to-one mapping between items in 3 lists.
- created_or_updated_tool_shed_repositories.append( tool_shed_repository )
- tool_panel_section_keys.append( tool_panel_section_key )
- filtered_repo_info_dicts.append( repo_info_dict )
+ tool_shed_repository = suc.create_or_update_tool_shed_repository( app=trans.app,
+ name=name,
+ description=description,
+ installed_changeset_revision=changeset_revision,
+ ctx_rev=ctx_rev,
+ repository_clone_url=repository_clone_url,
+ metadata_dict={},
+ status=trans.model.ToolShedRepository.installation_status.NEW,
+ current_changeset_revision=changeset_revision,
+ owner=repository_owner,
+ dist_to_shed=False )
+ if tool_shed_repository not in all_created_or_updated_tool_shed_repositories:
+ all_created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+ # Only append the tool shed repository to the list of created_or_updated_tool_shed_repositories if it is supposed to be installed.
+ if install_repository_dependencies or is_in_repo_info_dicts( repo_info_dict, repo_info_dicts ):
+ if tool_shed_repository not in created_or_updated_tool_shed_repositories:
+ # Keep the one-to-one mapping between items in 3 lists.
+ created_or_updated_tool_shed_repositories.append( tool_shed_repository )
+ tool_panel_section_keys.append( tool_panel_section_key )
+ filtered_repo_info_dicts.append( repo_info_dict )
# Build repository dependency relationships even if the user chose to not install repository dependencies.
build_repository_dependency_relationships( trans, all_repo_info_dicts, all_created_or_updated_tool_shed_repositories )
return created_or_updated_tool_shed_repositories, tool_panel_section_keys, all_repo_info_dicts, filtered_repo_info_dicts
diff -r 53d53422782c2b1837959d6192e81c9886e2d4eb -r 50f5157b063f5567fa2cea820d85bb4abd233f91 lib/tool_shed/util/shed_util_common.py
--- a/lib/tool_shed/util/shed_util_common.py
+++ b/lib/tool_shed/util/shed_util_common.py
@@ -1467,9 +1467,17 @@
def reset_previously_installed_repository( trans, repository ):
"""
- Reset the atrributes of a tool_shed_repository that was previsouly installed. The repository will be in some state other than with a
- status of INSTALLED, so all atributes will be set to the default NEW state. This will enable the repository to be freshly installed.
+ Reset the atrributes of a tool_shed_repository that was previsouly installed. The repository will be in some state other than INSTALLED,
+ so all atributes will be set to the default NEW state. This will enable the repository to be freshly installed.
"""
+ debug_msg = "Resetting tool_shed_repository '%s' for installation.\n" % str( repository.name )
+ debug_msg += "The current state of the tool_shed_repository is:\n"
+ debug_msg += "deleted: %s\n" % str( repository.deleted )
+ debug_msg += "tool_shed_status: %s\n" % str( repository.tool_shed_status )
+ debug_msg += "uninstalled: %s\n" % str( repository.uninstalled )
+ debug_msg += "status: %s\n" % str( repository.status )
+ debug_msg += "error_message: %s\n" % str( repository.error_message )
+ log.debug( debug_msg )
repository.deleted = False
repository.tool_shed_status = None
repository.uninstalled = False
diff -r 53d53422782c2b1837959d6192e81c9886e2d4eb -r 50f5157b063f5567fa2cea820d85bb4abd233f91 lib/tool_shed/util/tool_util.py
--- a/lib/tool_shed/util/tool_util.py
+++ b/lib/tool_shed/util/tool_util.py
@@ -745,7 +745,8 @@
else:
elems.append( elem )
else:
- log.debug( "The '%s' data table file was not found, but was expected to be copied from '%s' during repository installation.", tool_data_table_conf_filename, TOOL_DATA_TABLE_FILE_SAMPLE_NAME )
+ log.debug( "The '%s' data table file was not found, but was expected to be copied from '%s' during repository installation.",
+ tool_data_table_conf_filename, TOOL_DATA_TABLE_FILE_SAMPLE_NAME )
for elem in elems:
if elem.tag == 'table':
for file_elem in elem.findall( 'file' ):
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix and enhance admin/view_tool_data_tables.
by commits-noreply@bitbucket.org 09 Oct '13
by commits-noreply@bitbucket.org 09 Oct '13
09 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/53d53422782c/
Changeset: 53d53422782c
User: dan
Date: 2013-10-09 18:01:54
Summary: Fix and enhance admin/view_tool_data_tables.
Affected #: 2 files
diff -r affd8fe24fc149b354fa3b603d205f4539e0804d -r 53d53422782c2b1837959d6192e81c9886e2d4eb lib/galaxy/tools/data/__init__.py
--- a/lib/galaxy/tools/data/__init__.py
+++ b/lib/galaxy/tools/data/__init__.py
@@ -43,6 +43,9 @@
except KeyError:
return default
+ def get_tables( self ):
+ return self.data_tables
+
def load_from_config_file( self, config_filename, tool_data_path, from_shed_config=False ):
"""
This method is called under 3 conditions:
diff -r affd8fe24fc149b354fa3b603d205f4539e0804d -r 53d53422782c2b1837959d6192e81c9886e2d4eb templates/admin/view_data_tables_registry.mako
--- a/templates/admin/view_data_tables_registry.mako
+++ b/templates/admin/view_data_tables_registry.mako
@@ -7,34 +7,39 @@
<%
ctr = 0
- data_tables = trans.app.tool_data_tables
- sorted_data_table_elem_names = sorted( trans.app.tool_data_tables.data_table_elem_names )
+ sorted_data_tables = sorted( trans.app.tool_data_tables.get_tables().items() )
%><div class="toolForm">
- <div class="toolFormTitle">Current data table registry contains ${len( sorted_data_table_elem_names )} data tables</div>
+ <div class="toolFormTitle">Current data table registry contains ${len( sorted_data_tables )} data tables</div><div class="toolFormBody"><table class="manage-table colored" border="0" cellspacing="0" cellpadding="0" width="100%"><tr><th bgcolor="#D8D8D8">Name</th>
+ <th bgcolor="#D8D8D8">Filename</th><th bgcolor="#D8D8D8">Tool data path</th><th bgcolor="#D8D8D8">Missing index file</th></tr>
- %for data_table_elem_name in sorted_data_table_elem_names:
- <% data_table = data_tables[ data_table_elem_name ] %>
+ %for data_table_elem_name, data_table in sorted_data_tables:
%if ctr % 2 == 1:
<tr class="odd_row">
%else:
<tr class="tr">
%endif
<td>${data_table.name}</td>
- <td>${data_table.tool_data_path}</td>
- <td>
- %if data_table.missing_index_file:
- ${data_table.missing_index_file}
+ %for i, ( filename, file_dict ) in enumerate( data_table.filenames.iteritems() ):
+ %if i > 0:
+ <tr><td></td>
%endif
- </td>
- </tr>
+ <td>${ filename | h }</td>
+ <td>${ file_dict.get( 'tool_data_path' ) | h }</td>
+ <td>
+ %if not file_dict.get( 'found' ):
+ missing
+ %endif
+ </td>
+ </tr>
+ %endfor
<% ctr += 1 %>
%endfor
</table>
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Fix for lib/galaxy/datatypes/converters/fastq_to_fqtoc.py
by commits-noreply@bitbucket.org 09 Oct '13
by commits-noreply@bitbucket.org 09 Oct '13
09 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/affd8fe24fc1/
Changeset: affd8fe24fc1
User: dan
Date: 2013-10-09 16:52:36
Summary: Fix for lib/galaxy/datatypes/converters/fastq_to_fqtoc.py
Affected #: 1 file
diff -r 9741409d89322b3da25e13be8a65a062a688f823 -r affd8fe24fc149b354fa3b603d205f4539e0804d lib/galaxy/datatypes/converters/fastq_to_fqtoc.py
--- a/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py
+++ b/lib/galaxy/datatypes/converters/fastq_to_fqtoc.py
@@ -28,7 +28,7 @@
lines_per_chunk = 4*sequences
chunk_begin = 0
- in_file = open(input_name)
+ in_file = open(input_fname)
out_file.write('{"sections" : [');
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: dan: Remove references to g2.trac.bx.psu.edu. Several references still exist in rgenetics converter tools and rgenetics test-data.
by commits-noreply@bitbucket.org 09 Oct '13
by commits-noreply@bitbucket.org 09 Oct '13
09 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/9741409d8932/
Changeset: 9741409d8932
User: dan
Date: 2013-10-09 16:50:05
Summary: Remove references to g2.trac.bx.psu.edu. Several references still exist in rgenetics converter tools and rgenetics test-data.
Affected #: 3 files
diff -r 8af9e0ac6d2a5fba8e69bd7fb5ae980d7ba1614a -r 9741409d89322b3da25e13be8a65a062a688f823 lib/galaxy/config.py
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -192,7 +192,7 @@
self.message_box_content = kwargs.get( 'message_box_content', None )
self.message_box_class = kwargs.get( 'message_box_class', 'info' )
self.support_url = kwargs.get( 'support_url', 'http://wiki.g2.bx.psu.edu/Support' )
- self.wiki_url = kwargs.get( 'wiki_url', 'http://g2.trac.bx.psu.edu/' )
+ self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.galaxyproject.org/' )
self.blog_url = kwargs.get( 'blog_url', None )
self.screencasts_url = kwargs.get( 'screencasts_url', None )
self.library_import_dir = kwargs.get( 'library_import_dir', None )
diff -r 8af9e0ac6d2a5fba8e69bd7fb5ae980d7ba1614a -r 9741409d89322b3da25e13be8a65a062a688f823 templates/webapps/galaxy/admin/center.mako
--- a/templates/webapps/galaxy/admin/center.mako
+++ b/templates/webapps/galaxy/admin/center.mako
@@ -106,7 +106,7 @@
<p><strong>Data Security and Data Libraries</strong></p><p/><strong>Security</strong> - Data security in Galaxy is a new feature, so familiarize yourself with the details which can be found
- here or in our <a href="http://g2.trac.bx.psu.edu/wiki/SecurityFeatures" target="_blank">data security page</a>. The data security
+ here or in our <a href="http://wiki.galaxyproject.org/Learn/Security%20Features" target="_blank">data security page</a>. The data security
process incorporates users, groups and roles, and enables the application of certain permissions on datasets, specifically "access"
and "manage permissions". By default, the "manage permissions" permission is associated with the dataset owner's private role, and
the "access" permission is not set, making the dataset public. With these default permissions, users should not see any difference
diff -r 8af9e0ac6d2a5fba8e69bd7fb5ae980d7ba1614a -r 9741409d89322b3da25e13be8a65a062a688f823 universe_wsgi.ini.sample
--- a/universe_wsgi.ini.sample
+++ b/universe_wsgi.ini.sample
@@ -368,7 +368,7 @@
#logo_url = /
# The URL linked by the "Galaxy Wiki" link in the "Help" menu.
-#wiki_url = http://wiki.g2.bx.psu.edu/
+#wiki_url = http://wiki.galaxyproject.org/
# The URL linked by the "Support" link in the "Help" menu.
#support_url = http://wiki.g2.bx.psu.edu/Support
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: Dave Bouvier: Prevent duplicate lines from being written to an env.sh file when installing a tool dependency or re-running a tool migration script with tool dependencies specified to be installed.
by commits-noreply@bitbucket.org 08 Oct '13
by commits-noreply@bitbucket.org 08 Oct '13
08 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/8af9e0ac6d2a/
Changeset: 8af9e0ac6d2a
User: Dave Bouvier
Date: 2013-10-08 22:48:05
Summary: Prevent duplicate lines from being written to an env.sh file when installing a tool dependency or re-running a tool migration script with tool dependencies specified to be installed.
Affected #: 3 files
diff -r b61927f340f30a636204ec560522acf9b76fd262 -r 8af9e0ac6d2a5fba8e69bd7fb5ae980d7ba1614a lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/fabric_util.py
@@ -19,12 +19,41 @@
from fabric.api import lcd
from fabric.api import local
from fabric.api import settings
+from fabric.api import hide
log = logging.getLogger( __name__ )
INSTALLATION_LOG = 'INSTALLATION.log'
VIRTUALENV_URL = 'https://pypi.python.org/packages/source/v/virtualenv/virtualenv-1.9.1.tar.gz'
+def add_to_env_shell_file( text, env_shell_file ):
+ '''
+ Append a line to a file, if this line does not already exist in the file. Creates the file and sets the mode to
+ executable if the file does not exist. Equivalent to a local version of fabric.contrib.files.append.
+ '''
+ if not os.path.exists( env_shell_file ):
+ local( 'touch %s' % env_shell_file )
+ # Explicitly set env.sh executable.
+ with settings( hide( 'everything' ), warn_only=True ):
+ local( 'chmod +x %s' % env_shell_file )
+ return_code = 0
+ # Convert the contents to a list, in order to support adding one or more lines to env.sh.
+ if isinstance( text, basestring ):
+ text = [ text ]
+ for line in text:
+ # Build a regex to search for the relevant line in env.sh.
+ regex = td_common_util.egrep_escape( line )
+ # If the line exists, egrep will return a success.
+ with settings( hide( 'everything' ), warn_only=True ):
+ egrep_cmd = 'egrep "^%s$" %s' % ( regex, env_shell_file )
+ contains_line = local( egrep_cmd ).succeeded
+ if contains_line:
+ continue
+ # If not, then append the line, escaping any single quotes in the shell command.
+ line = line.replace( "'", r"'\\''" )
+ return_code = local( "echo '%s' >> %s" % ( line, env_shell_file ) ).return_code
+ return return_code
+
def check_fabric_version():
version = env.version
if int( version.split( "." )[ 0 ] ) < 1:
@@ -301,8 +330,8 @@
for env_var_dict in env_var_dicts:
# Check for the presence of the $ENV[] key string and populate it if possible.
env_var_dict = handle_environment_variables( app, tool_dependency, install_dir, env_var_dict, cmds )
- env_command = td_common_util.create_or_update_env_shell_file( install_dir, env_var_dict )
- return_code = handle_command( app, tool_dependency, install_dir, env_command )
+ env_entry, env_file = td_common_util.create_or_update_env_shell_file( install_dir, env_var_dict )
+ return_code = add_to_env_shell_file( env_entry, env_file )
if return_code:
return
elif action_type == 'set_environment_for_install':
@@ -344,13 +373,13 @@
log.error( "virtualenv's site-packages directory '%s' does not exist", output.stdout )
return
modify_env_command_dict = dict( name="PYTHONPATH", action="prepend_to", value=output.stdout )
- modify_env_command = td_common_util.create_or_update_env_shell_file( install_dir, modify_env_command_dict )
- return_code = handle_command( app, tool_dependency, install_dir, modify_env_command )
+ env_entry, env_file = td_common_util.create_or_update_env_shell_file( install_dir, env_var_dict )
+ return_code = add_to_env_shell_file( env_entry, env_file )
if return_code:
return
modify_env_command_dict = dict( name="PATH", action="prepend_to", value=os.path.join( venv_directory, "bin" ) )
- modify_env_command = td_common_util.create_or_update_env_shell_file( install_dir, modify_env_command_dict )
- return_code = handle_command( app, tool_dependency, install_dir, modify_env_command )
+ env_entry, env_file = td_common_util.create_or_update_env_shell_file( install_dir, modify_env_command_dict )
+ return_code = add_to_env_shell_file( env_entry, env_file )
if return_code:
return
elif action_type == 'shell_command':
diff -r b61927f340f30a636204ec560522acf9b76fd262 -r 8af9e0ac6d2a5fba8e69bd7fb5ae980d7ba1614a lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/install_util.py
@@ -820,10 +820,10 @@
type='set_environment',
status=app.model.ToolDependency.installation_status.INSTALLING,
set_status=True )
- cmd = td_common_util.create_or_update_env_shell_file( install_dir, env_var_dict )
+ env_entry, env_file = td_common_util.create_or_update_env_shell_file( install_dir, env_var_dict )
if env_var_version == '1.0':
# Handle setting environment variables using a fabric method.
- fabric_util.handle_command( app, tool_dependency, install_dir, cmd )
+ fabric_util.add_to_env_shell_file( env_entry, env_file )
sa_session.refresh( tool_dependency )
if tool_dependency.status != app.model.ToolDependency.installation_status.ERROR:
tool_dependency.status = app.model.ToolDependency.installation_status.INSTALLED
diff -r b61927f340f30a636204ec560522acf9b76fd262 -r 8af9e0ac6d2a5fba8e69bd7fb5ae980d7ba1614a lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
--- a/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
+++ b/lib/tool_shed/galaxy_install/tool_dependencies/td_common_util.py
@@ -1,5 +1,6 @@
import logging
import os
+import re
import shutil
import sys
import tarfile
@@ -170,26 +171,9 @@
changed_value = '%s' % env_var_value
elif env_var_action == 'append_to':
changed_value = '$%s:%s' % ( env_var_name, env_var_value )
- line = "%s=%s; export %s" % (env_var_name, changed_value, env_var_name)
- return create_or_update_env_shell_file_with_command(install_dir, line)
-
-
-def create_or_update_env_shell_file_with_command( install_dir, command ):
- """
- Return a shell expression which when executed will create or update
- a Galaxy env.sh dependency file in the specified install_dir containing
- the supplied command.
- """
- env_shell_file_path = '%s/env.sh' % install_dir
- if os.path.exists( env_shell_file_path ):
- write_action = '>>'
- else:
- write_action = '>'
- cmd = "echo %s %s %s;chmod +x %s" % ( __shellquote(command),
- write_action,
- __shellquote(env_shell_file_path),
- __shellquote(env_shell_file_path))
- return cmd
+ line = "%s=%s; export %s" % ( env_var_name, changed_value, env_var_name )
+ env_shell_file_path = os.path.join( install_dir, 'env.sh' )
+ return line, env_shell_file_path
def download_binary( url, work_dir ):
'''
@@ -199,6 +183,17 @@
dir = url_download( work_dir, downloaded_filename, url, extract=False )
return downloaded_filename
+def egrep_escape( text ):
+ """Escape ``text`` to allow literal matching using egrep."""
+ regex = re.escape( text )
+ # Seems like double escaping is needed for \
+ regex = regex.replace( '\\\\', '\\\\\\' )
+ # Triple-escaping seems to be required for $ signs
+ regex = regex.replace( r'\$', r'\\\$' )
+ # Whereas single quotes should not be escaped
+ regex = regex.replace( r"\'", "'" )
+ return regex
+
def format_traceback():
ex_type, ex, tb = sys.exc_info()
return ''.join( traceback.format_tb( tb ) )
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
commit/galaxy-central: jgoecks: Fixes for 82b1789: (a) incorporate default values correctly and (b) use Backbone-style gets to access config values.
by commits-noreply@bitbucket.org 08 Oct '13
by commits-noreply@bitbucket.org 08 Oct '13
08 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/b61927f340f3/
Changeset: b61927f340f3
User: jgoecks
Date: 2013-10-08 18:25:10
Summary: Fixes for 82b1789: (a) incorporate default values correctly and (b) use Backbone-style gets to access config values.
Affected #: 1 file
diff -r 5d5729d40b4f394dc6884d22daa39e3696842b80 -r b61927f340f30a636204ec560522acf9b76fd262 static/scripts/viz/trackster/tracks.js
--- a/static/scripts/viz/trackster/tracks.js
+++ b/static/scripts/viz/trackster/tracks.js
@@ -222,7 +222,7 @@
],
saved_values: obj_dict.prefs,
onchange: function() {
- this.track.set_name(this.track.config.values.name);
+ this.track.set_name(this.track.config.get('values').name);
}
});
this.prefs = this.config.get('values');
@@ -1136,7 +1136,7 @@
},
get_base_color: function(base) {
- return this.config.values[ base.toLowerCase() + '_color' ] || this.config.values[ 'n_color' ];
+ return this.config.get('values')[ base.toLowerCase() + '_color' ] || this.config.get('values')[ 'n_color' ];
}
});
@@ -1893,16 +1893,29 @@
*/
var Config = Backbone.Model.extend({
initialize: function(options) {
- // values is a simple param_key -- value dictionary shared with drawables.
- this.set('values', {});
+ // values is a simple param_key-to-value dictionary used to store
+ // param values.
+ var values = {};
+
+ // Set default values.
+ _.each(options.params, function(p) {
+ values[p.key] = p.default_value;
+ });
// Restore saved values.
if (options.saved_values) {
- this.restore_values(options.saved_values);
+ _.each( this.get('params'), function(p) {
+ if (p.key in options.saved_values) {
+ values[p.key] = options.saved_values[p.key];
+ }
+ });
}
+
+ this.set('values', values);
+
+ // HACK: to make onchange work as written, attach track at the
+ // top level of model and call onchange on custom event.
if (options.onchange) {
- // HACK: to make onchange work as written, attach track at the
- // top level of model and call onchange on custom event.
this.track = options.track;
this.on('change:values', options.onchange, this);
}
@@ -1955,18 +1968,7 @@
else {
return false;
}
- },
-
- /**
- * Restore config values from a dictionary.
- */
- restore_values: function( values ) {
- var self = this;
- _.each( this.get('params'), function(param) {
- self.set_param_value(param.key, values[param.key] || param.default_value);
- });
- },
-
+ }
});
var ConfigView = Backbone.View.extend({
@@ -2563,7 +2565,7 @@
track.tile_cache.clear();
in_drag = false;
if (!in_handle) { drag_control.hide(); }
- track.config.values.height = track.visible_height_px;
+ track.config.get('values').height = track.visible_height_px;
track.changed();
}).appendTo(track.container_div);
},
@@ -2575,8 +2577,8 @@
// Set modes, init mode.
this.display_modes = new_modes;
this.mode = (init_mode ? init_mode :
- (this.config && this.config.values['mode'] ?
- this.config.values['mode'] : this.display_modes[0])
+ (this.config && this.config.get('values')['mode'] ?
+ this.config.get('values')['mode'] : this.display_modes[0])
);
this.action_icons.mode_icon.attr("title", "Set display mode (now: " + this.mode + ")");
@@ -2926,7 +2928,7 @@
var track = this;
// TODO: is it necessary to store the mode in two places (.mode and track_config)?
track.mode = new_mode;
- track.config.values['mode'] = new_mode;
+ track.config.get('values')['mode'] = new_mode;
// FIXME: find a better way to get Auto data w/o clearing cache; using mode in the
// data manager would work if Auto data were checked for compatibility when a specific
// mode is chosen.
@@ -3955,7 +3957,7 @@
extend(FeatureTrack.prototype, Drawable.prototype, TiledTrack.prototype, {
set_painter_from_config: function() {
- if ( this.config.values['connector_style'] === 'arcs' ) {
+ if ( this.config.get('values')['connector_style'] === 'arcs' ) {
this.painter = painters.ArcLinkedFeaturePainter;
} else {
this.painter = painters.LinkedFeaturePainter;
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0
08 Oct '13
1 new commit in galaxy-central:
https://bitbucket.org/galaxy/galaxy-central/commits/5d5729d40b4f/
Changeset: 5d5729d40b4f
User: guerler
Date: 2013-10-08 18:12:48
Summary: Update modal and upload
Affected #: 2 files
diff -r 125a7cafb851d9b70195466389acf08dd9b98fd6 -r 5d5729d40b4f394dc6884d22daa39e3696842b80 static/scripts/galaxy.modal.js
--- a/static/scripts/galaxy.modal.js
+++ b/static/scripts/galaxy.modal.js
@@ -12,9 +12,10 @@
elMain: '#everything',
// defaults inputs
- optionsDefaults: {
- title : "galaxy-modal",
- body : "No content available."
+ optionsDefault: {
+ title : "galaxy-modal",
+ body : "",
+ backdrop : true
},
// initialize
@@ -30,8 +31,16 @@
this.initialize(options);
// fix height
- var body = (this.$el).find('.modal-body');
- body.css('max-height', $(document).height() / 2);
+ this.$body.css('max-height', $(document).height() / 2);
+
+ // set max-height so that modal does not exceed window size and is in middle of page.
+ /*/ TODO: this could perhaps be handled better using CSS.
+ this.$body.css( "max-height",
+ $(window).height() -
+ this.$footer.outerHeight() -
+ this.$header.outerHeight() -
+ parseInt( this.$dialog.css( "padding-top" ), 10 ) -
+ parseInt( this.$dialog.css( "padding-bottom" ), 10 ));*/
// show
if (this.visible)
@@ -39,7 +48,7 @@
else
this.$el.fadeIn('fast');
- // set visibility flag
+ // set flag
this.visible = true;
},
@@ -47,8 +56,8 @@
hide: function(){
// fade out
this.$el.fadeOut('fast');
-
- // set visibility flag
+
+ // set flag
this.visible = false;
},
@@ -56,11 +65,11 @@
create: function(options) {
// configure options
options = _.defaults(options, this.optionsDefault);
-
+
// check for progress bar request
if (options.body == 'progress')
- options.body = '<div class="progress progress-striped active"><div class="progress-bar progress-bar-info" style="width:100%"></div></div>';
-
+ options.body = $('<div class="progress progress-striped active"><div class="progress-bar progress-bar-info" style="width:100%"></div></div>');
+
// remove former element
if (this.$el)
this.$el.remove();
@@ -69,45 +78,54 @@
this.setElement(this.template(options.title));
// link elements
- var body = (this.$el).find('.modal-body');
- var footer = (this.$el).find('.modal-footer');
- var buttons = (this.$el).find('.buttons');
+ this.$body = (this.$el).find('.modal-body');
+ this.$footer = (this.$el).find('.modal-footer');
+ this.$buttons = (this.$el).find('.buttons');
+ this.$backdrop = (this.$el).find('.modal-backdrop');
// append body
- body.append($(options.body));
-
+ this.$body.html(options.body);
+
// fix height if available
if (options.height)
- body.css('height', options.height);
+ this.$body.css('height', options.height);
+ // fix min-width so that modal cannot shrink considerably if new content is loaded.
+ this.$body.css('min-width', this.$body.width());
+
+ // configure background
+ if (!options.backdrop)
+ this.$backdrop.removeClass('in');
+
// append buttons
if (options.buttons) {
// link functions
+ var self = this;
$.each(options.buttons, function(name, value) {
- buttons.append($('<button id="' + String(name).toLowerCase() + '"></button>').text(name).click(value)).append(" ");
+ self.$buttons.append($('<button id="' + String(name).toLowerCase() + '"></button>').text(name).click(value)).append(" ");
});
} else
// hide footer
- footer.hide();
+ this.$footer.hide();
// append to main element
$(this.elMain).append($(this.el));
},
// enable buttons
- enable: function(name) {
- $(this.el).find('#' + String(name).toLowerCase()).prop('disabled', false);
+ enableButton: function(name) {
+ this.$buttons.find('#' + String(name).toLowerCase()).prop('disabled', false);
},
// disable buttons
- disable: function(name) {
- $(this.el).find('#' + String(name).toLowerCase()).prop('disabled', true);
+ disableButton: function(name) {
+ this.$buttons.find('#' + String(name).toLowerCase()).prop('disabled', true);
},
// returns scroll top for body element
scrollTop: function()
{
- return $(this.el).find('.modal-body').scrollTop();
+ return this.$body.scrollTop();
},
/*
@@ -116,12 +134,13 @@
// fill regular modal template
template: function(title) {
- return '<div class="modal in">' +
- '<div class="modal-backdrop in" style="z-index: -1;"></div>' +
+ return '<div class="modal">' +
+ '<div class="modal-backdrop fade in" style="z-index: -1;"></div>' +
'<div class="modal-dialog">' +
'<div class="modal-content">' +
'<div class="modal-header">' +
- '<span><h3 class="title">' + title + '</h3></span>' +
+ '<button type="button" class="close" style="display: none;">×</button>' +
+ '<h4 class="title">' + title + '</h4>' +
'</div>' +
'<div class="modal-body"></div>' +
'<div class="modal-footer">' +
diff -r 125a7cafb851d9b70195466389acf08dd9b98fd6 -r 5d5729d40b4f394dc6884d22daa39e3696842b80 static/scripts/galaxy.upload.js
--- a/static/scripts/galaxy.upload.js
+++ b/static/scripts/galaxy.upload.js
@@ -426,21 +426,21 @@
// update reset button
if (this.counter.running == 0 && this.counter.announce + this.counter.success + this.counter.error > 0)
- this.modal.enable('Reset');
+ this.modal.enableButton('Reset');
else
- this.modal.disable('Reset');
+ this.modal.disableButton('Reset');
// update upload button
if (this.counter.running == 0 && this.counter.announce > 0)
- this.modal.enable('Upload');
+ this.modal.enableButton('Upload');
else
- this.modal.disable('Upload');
+ this.modal.disableButton('Upload');
// select upload button
if (this.counter.running == 0)
- this.modal.enable('Select');
+ this.modal.enableButton('Select');
else
- this.modal.disable('Select');
+ this.modal.disableButton('Select');
},
// load html template
Repository URL: https://bitbucket.org/galaxy/galaxy-central/
--
This is a commit notification from bitbucket.org. You are receiving
this because you have the service enabled, addressing the recipient of
this email.
1
0