is there a pythonic way to try something up to a maximum number of times? [duplicate]
How about:
conn = MySQLdb.connect(host, user, password, database)
cursor = conn.cursor()
attempts = 0
while attempts < 3:
try:
cursor.execute(query)
rows = cursor.fetchall()
for row in rows:
# do something with the data
break
except MySQLdb.Error, e:
attempts += 1
print "MySQL Error %d: %s" % (e.args[0], e.args[1])
Building on Dana's answer, you might want to do this as a decorator:
def retry(howmany):
def tryIt(func):
def f():
attempts = 0
while attempts < howmany:
try:
return func()
except:
attempts += 1
return f
return tryIt
Then...
@retry(5)
def the_db_func():
# [...]
Enhanced version that uses the decorator
module
import decorator, time
def retry(howmany, *exception_types, **kwargs):
timeout = kwargs.get('timeout', 0.0) # seconds
@decorator.decorator
def tryIt(func, *fargs, **fkwargs):
for _ in xrange(howmany):
try: return func(*fargs, **fkwargs)
except exception_types or Exception:
if timeout is not None: time.sleep(timeout)
return tryIt
Then...
@retry(5, MySQLdb.Error, timeout=0.5)
def the_db_func():
# [...]
To install the decorator
module:
$ easy_install decorator
UPDATE: there is a better maintained fork of the retrying library called tenacity, which supports more features and is in general more flexible.
The API changes slightly:
@retry(stop=stop_after_attempt(7))
def stop_after_7_attempts():
print("Stopping after 7 attempts")
@retry(wait=wait_fixed(2))
def wait_2_s():
print("Wait 2 second between retries")
@retry(wait=wait_exponential(multiplier=1, min=4, max=10))
def wait_exponential_1000():
print("Wait 2^x * 1000 milliseconds between each retry,")
print("up to 10 seconds, then 10 seconds afterwards")
Yes, there is the retrying library, which has a decorator that implements several kinds of retrying logic that you can combine:
Some examples:
@retry(stop_max_attempt_number=7)
def stop_after_7_attempts():
print("Stopping after 7 attempts")
@retry(wait_fixed=2000)
def wait_2_s():
print("Wait 2 second between retries")
@retry(wait_exponential_multiplier=1000, wait_exponential_max=10000)
def wait_exponential_1000():
print("Wait 2^x * 1000 milliseconds between each retry,")
print("up to 10 seconds, then 10 seconds afterwards")
conn = MySQLdb.connect(host, user, password, database)
cursor = conn.cursor()
for i in range(3):
try:
cursor.execute(query)
rows = cursor.fetchall()
for row in rows:
# do something with the data
break
except MySQLdb.Error, e:
print "MySQL Error %d: %s" % (e.args[0], e.args[1])
I'd refactor it like so:
def callee(cursor):
cursor.execute(query)
rows = cursor.fetchall()
for row in rows:
# do something with the data
def caller(attempt_count=3, wait_interval=20):
""":param wait_interval: In seconds."""
conn = MySQLdb.connect(host, user, password, database)
cursor = conn.cursor()
for attempt_number in range(attempt_count):
try:
callee(cursor)
except MySQLdb.Error, e:
logging.warn("MySQL Error %d: %s", e.args[0], e.args[1])
time.sleep(wait_interval)
else:
break
Factoring out the callee
function seems to break up the functionality so that it's easy to see the business logic without getting bogged down in the retry code.