Commit f5e563d7 authored by Thomas Uram's avatar Thomas Uram

Migrate to Python 2+3 exception handling

parent 223d86d1
......@@ -46,7 +46,7 @@ class JobStatusReceiver(MessageReceiver.MessageReceiver):
# get the subjob for this message
try:
subjob = ArgoSubJob.objects.get(job_id=statusMsg.job_id)
except Exception,e:
except Exception as e:
logger.error(' exception received while retreiving ArgoSubJob with id = ' + str(statusMsg.job_id) + ': ' + str(e))
# acknoledge message
channel.basic_ack(method_frame.delivery_tag)
......@@ -59,7 +59,7 @@ class JobStatusReceiver(MessageReceiver.MessageReceiver):
# get the argo job for this subjob
try:
argojob = ArgoJob.objects.get(job_id=subjob.job_id)
except Exception,e:
except Exception as e:
logger.error(' exception received while retrieving ArgoJob with id = ' + str(subjob.job_id + ': ' + str(e)))
# acknoledge message
channel.basic_ack(method_frame.delivery_tag)
......@@ -127,7 +127,7 @@ class JobStatusReceiver(MessageReceiver.MessageReceiver):
self.process_queue.put(QueueMessage.QueueMessage(argojob.job_id,
QueueMessage.JobStatusReceiverMessageNoBody))
except Exception,e:
except Exception as e:
logger.exception("Error consuming status message: " + str(e))
self.process_queue.put(QueueMessage.QueueMessage(0,
QueueMessage.JobStatusReceiverFailed))
......
......@@ -41,7 +41,7 @@ class UserJobReceiver(MessageReceiver.MessageReceiver):
# convert body text to ArgoUserJob
try:
userjob = Serializer.deserialize(body)
except Exception,e:
except Exception as e:
logger.error(' received exception while deserializing message to create ArgoUserJob, \nexception message: ' + str(e) + '\n message body: \n' + body + ' \n cannot continue with this job, ignoring it and moving on.')
# acknoledge message
channel.basic_ack(method_frame.delivery_tag)
......@@ -53,7 +53,7 @@ class UserJobReceiver(MessageReceiver.MessageReceiver):
db_backend = load_backend(connections.databases[DEFAULT_DB_ALIAS]['ENGINE'])
db_conn = db_backend.DatabaseWrapper(connections.databases[DEFAULT_DB_ALIAS], db_connection_id)
connections[db_connection_id] = db_conn
except Exception,e:
except Exception as e:
logger.error(' received exception while creating DB connection, exception message: ' + str(e) + ' \n job id: ' + str(userjob['user_id']) + ' job user: ' + userjob['username'] + ' job description: ' + userjob['description'] + '\n cannot continue with this job, moving on.')
# acknoledge message
channel.basic_ack(method_frame.delivery_tag)
......@@ -115,7 +115,7 @@ class UserJobReceiver(MessageReceiver.MessageReceiver):
argojob.subjob_pk_list = Serializer.serialize(subjob_pks)
argojob.save()
self.process_queue.put(QueueMessage.QueueMessage(argojob.pk,0,'new job received'))
except Exception,e:
except Exception as e:
message = 'received an exception while parsing the incomping user job. Exception: ' + str(e) + '; userjob id = ' + str(userjob['user_id']) + '; job_id = ' + str(argojob.job_id) + '; job_name = ' + userjob['name']
logger.error(message)
......
......@@ -62,7 +62,7 @@ def submit_subjob(job):
except SubJobIndexOutOfRange:
message = 'All Subjobs Completed'
job.state = SUBJOBS_COMPLETED.name
except Exception,e:
except Exception as e:
message = ('Exception received while submitting subjob to '
+ subjob.site + ' for job pk=' + str(job.pk) + ' argo_id='
+ str(job.job_id) + ': ' + str(e))
......@@ -91,7 +91,7 @@ def stage_in(job):
try:
transfer.stage_in(job.input_url + '/',job.working_directory + '/')
job.state = STAGED_IN.name
except Exception,e:
except Exception as e:
message = 'Exception received during stage_in: ' + str(e)
logger.exception(message)
job.state = STAGE_IN_FAILED.name
......@@ -110,7 +110,7 @@ def stage_out(job):
try:
transfer.stage_out(str(job.working_directory) + '/', str(job.output_url) + '/')
job.state = STAGED_OUT.name
except Exception,e:
except Exception as e:
message = 'Exception received during stage_out: ' + str(e)
logger.exception(message)
job.state = STAGE_OUT_FAILED.name
......@@ -155,7 +155,7 @@ def send_status_message(job,message=None):
subject = 'ARGO Job Status Report',
body = body,
)
except Exception,e:
except Exception as e:
logger.exception('exception received while trying to send status email. Exception: ' + str(e))
# if job has an argo job status routing key, send a message there
......@@ -361,7 +361,7 @@ class ArgoJob(models.Model):
try:
shutil.rmtree(self.working_directory)
logger.info('removed job path: ' + str(self.working_directory))
except Exception,e:
except Exception as e:
logger.error('Error trying to remove argo job path: ' + str(self.working_directory) + ' Exception: ' + str(e))
# delete subjobs
......@@ -373,7 +373,7 @@ class ArgoJob(models.Model):
# call base class delete function
try:
super(ArgoJob,self).delete()
except Exception,e:
except Exception as e:
logger.error('pk='+str(self.pk) + ' Received exception during "delete": ' + str(e))
# must do this to force django to create a DB table for ARGO independent of the one created for Balsam
......
......@@ -31,7 +31,7 @@ class BalsamJobReceiver(MessageReceiver.MessageReceiver):
try:
job = models.BalsamJob()
job.deserialize(body)
except Exception,e:
except Exception as e:
logger.exception('error deserializing incoming job. body = ' + body + ' not conitnuing with this job.')
channel.basic_ack(method_frame.delivery_tag)
return
......@@ -43,7 +43,7 @@ class BalsamJobReceiver(MessageReceiver.MessageReceiver):
db_backend = utils.load_backend(connections.databases[DEFAULT_DB_ALIAS]['ENGINE'])
db_conn = db_backend.DatabaseWrapper(connections.databases[DEFAULT_DB_ALIAS], db_connection_id)
connections[db_connection_id] = db_conn
except Exception,e:
except Exception as e:
logger.exception(' received exception while creating DB connection, exception message: ')
# acknoledge message
channel.basic_ack(method_frame.delivery_tag)
......
......@@ -18,7 +18,7 @@ class BalsamJobStatus:
self.job_id = job.job_id
try:
self.serialized_job = job.serialize()
except Exception,e:
except Exception as e:
logger.exception('serialize failed: ' + str(job.__dict__))
raise SerializeFailed('Received exception while serializing BalsamJob')
......@@ -27,7 +27,7 @@ class BalsamJobStatus:
try:
job.deserialize(self.serialized_job)
return job
except Exception,e:
except Exception as e:
logger.exception('deserialize failed: ' + str(self.serialized_job))
raise DeserializeFailed('Received exception while deserializing BalsamJob')
return None
......@@ -35,7 +35,7 @@ class BalsamJobStatus:
def serialize(self):
try:
return Serializer.serialize(self.__dict__)
except Exception,e:
except Exception as e:
logger.exception('serialize failed: ' + str(self.__dict__))
raise SerializeFailed('Received exception while serializing BalsamJobStatus: ' + str(e))
......@@ -43,6 +43,6 @@ class BalsamJobStatus:
try:
self.__dict__ = Serializer.deserialize(text)
self.job_id = int(str(self.job_id))
except Exception,e:
except Exception as e:
logger.exception('deserialize failed')
raise DeserializeFailed('Received exception while deserializing BalsamJobStatus: ' + str(e))
......@@ -4,7 +4,7 @@ logger = logging.getLogger(__name__)
def write_checksum(exe,filename = None):
try:
p = subprocess.Popen(['openssl','md5',exe],stdout=subprocess.PIPE,stderr=subprocess.PIPE)
except Exception,e:
except Exception as e:
logger.error('ERROR running MD5 checksum on executable: ' + exe + ', exception: ' + str(e))
return None
stdout,stderr = p.communicate()
......
......@@ -109,7 +109,7 @@ def get_job_status(job):
cmd = settings.BALSAM_SCHEDULER_STATUS_EXE + ' -j ' + str(job.scheduler_id) + ' -o state -n'
try:
p = subprocess.Popen(shlex.split(cmd),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
except Exception,e:
except Exception as e:
logger.error(' exception while trying get job status for job id = ' + job.originating_source_id + ': ' + str(e))
raise
......
......@@ -86,7 +86,7 @@ class ArgoUserJob:
tmp = copy.deepcopy(self)
tmp.subjobs = serial_subjobs
return Serializer.serialize(tmp.__dict__)
except Exception,e:
except Exception as e:
raise Exception('Exception received while serializing ArgoUserJob: ' + str(e))
@staticmethod
......@@ -100,7 +100,7 @@ class ArgoUserJob:
tmp = ArgoUserSubJob.deserialize(subjob)
subjobs.append(tmp)
tmp.subjobs = subjobs
except Exception,e:
except Exception as e:
raise Exception('Exception received while deserializing ArgoUserJob: ' + str(e))
# convert unicode strings to strings
......
......@@ -25,7 +25,7 @@ class TransitionJob(multiprocessing.Process):
db_backend = utils.load_backend(connections.databases[DEFAULT_DB_ALIAS]['ENGINE'])
db_conn = db_backend.DatabaseWrapper(connections.databases[DEFAULT_DB_ALIAS], db_connection_id)
connections[db_connection_id] = db_conn
except Exception,e:
except Exception as e:
self.queue.put(QueueMessage.QueueMessage(self.entry_pk,QueueMessage.TransitionDbConnectionFailed,
'Failed to get local connection to DB. Exception: ' + str(e)))
return
......@@ -35,7 +35,7 @@ class TransitionJob(multiprocessing.Process):
# retreive job from DB
try:
job = self.job_base_class.objects.get(pk=self.entry_pk)
except Exception,e:
except Exception as e:
self.queue.put(QueueMessage.QueueMessage(self.entry_pk,QueueMessage.TransitionDbRetrieveFailed,
'Failed to retrieve job id ' + str(self.entry_pk) + ' from DB for base_class ' + str(self.job_base_class.__name__) + '. Exception: ' + str(e)))
return
......@@ -50,7 +50,7 @@ class TransitionJob(multiprocessing.Process):
else:
logger.debug(' pk='+str(job.pk) + ' state='+job.state + ' transition_function is None')
logger.debug(' pk='+str(job.pk) + ' state='+job.state + ' transition_function=' + str(self.transition_function.__name__) + ' completed')
except Exception,e:
except Exception as e:
message = 'Transition function, '
if self.transition_function is None:
message += 'None'
......
......@@ -12,6 +12,6 @@ def run_subprocess(cmd,ignore_nonzero_return=False):
if p.returncode != 0 and not ignore_nonzero_return:
raise SubprocessNonzeroReturnCode(stdout)
return stdout
except Exception,e:
except Exception as e:
logger.exception('exception received')
raise SubprocessFailed(str(e))
\ No newline at end of file
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment