I'm not an expert on this by any means, but it looks like the standard way to maintain session state with urllib2 is to create a custom opener instance for each session. That looks like this:
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor())
Then you use that opener to do whatever authentication you have to, and all the session state will remain within the opener object itself. Then you can pass the opener object as an argument for the parallel requests.
Here is an example script that logs in to secondlife.com for multiple users in parallel, and makes multiple page requests for each user, also in parallel. The login procedure for this particular site is tricky because it involves capturing a CSRF token from the first request before being able to log in with the second. For that reason, the login method is quite messy. The principle should be the same, though, for whatever site you're interested in.
import eventlet
from eventlet.green import urllib2
import re
login_url = 'https://secure-web28.secondlife.com/my/account/login.php?lang=en&type=second-life-member&nextpage=/my/index.php?lang=en'
pool = eventlet.GreenPool(10)
def fetch_title(opener, url):
match = re.search(r'<title>(.*)</title>', opener.open(url).read())
if match:
return match.group(1)
else:
return "no title"
def login(login_url, fullname, password):
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor())
login_page = opener.open(login_url).read()
csrf_token = re.search(r'<input type="hidden" name="CSRFToken" value="(.*)"/>', login_page).group(1)
username, lastname = fullname.split()
auth = "CSRFToken=%s&form[type]=second-life-member&form[nextpage]=/my/index.php?lang=en&form[persistent]=Y&form[form_action]=Log%%20In&form[form_lang]=en&form[username]=%s&form[lastname]=%s&form[password]=%s&submit=Submit" % (
csrf_token, username, lastname, password)
logged_in = opener.open(login_url, auth).read()
return opener
def login_and_fetch(login_url, fullname, password, page_urls):
opener = login(login_url, fullname, password)
# note that this deliberately uses the global pool
pile = eventlet.GreenPile(pool)
for url in page_urls:
pile.spawn(fetch_title, opener, url)
return pile
login_urls = [login_url] *2
usernames = [...]
passwords = [...]
page_urls = [['https://secure-web28.secondlife.com/my/account/?lang=en-US',
'https://secure-web28.secondlife.com/my/community/events/index.php?lang=en-US']] * 2
for user_iter in pool.imap(login_and_fetch, login_urls, usernames, passwords, page_urls):
for title in user_iter:
print "got title", title