Edited to add benchmark below.
You can wrap a generator with a lock. For example,
import threading
class LockedIterator(object):
def __init__(self, it):
self.lock = threading.Lock()
self.it = it.__iter__()
def __iter__(self): return self
def next(self):
self.lock.acquire()
try:
return self.it.next()
finally:
self.lock.release()
gen = [x*2 for x in [1,2,3,4]]
g2 = LockedIterator(gen)
print list(g2)
Locking takes 50ms on my system, Queue takes 350ms. Queue is useful when you really do have a queue; for example, if you have incoming HTTP requests and you want to queue them for processing by worker threads. (That doesn't fit in the Python iterator model--once an iterator runs out of items, it's done.) If you really do have an iterator, then LockedIterator is a faster and simpler way to make it thread safe.
from datetime import datetime
import threading
num_worker_threads = 4
class LockedIterator(object):
def __init__(self, it):
self.lock = threading.Lock()
self.it = it.__iter__()
def __iter__(self): return self
def next(self):
self.lock.acquire()
try:
return self.it.next()
finally:
self.lock.release()
def test_locked(it):
it = LockedIterator(it)
def worker():
try:
for i in it:
pass
except Exception, e:
print e
raise
threads = []
for i in range(num_worker_threads):
t = threading.Thread(target=worker)
threads.append(t)
t.start()
for t in threads:
t.join()
def test_queue(it):
from Queue import Queue
def worker():
try:
while True:
item = q.get()
q.task_done()
except Exception, e:
print e
raise
q = Queue()
for i in range(num_worker_threads):
t = threading.Thread(target=worker)
t.setDaemon(True)
t.start()
t1 = datetime.now()
for item in it:
q.put(item)
q.join()
start_time = datetime.now()
it = [x*2 for x in range(1,10000)]
test_locked(it)
#test_queue(it)
end_time = datetime.now()
took = end_time-start_time
print "took %.01f" % ((took.seconds + took.microseconds/1000000.0)*1000)