Saturday, July 4, 2009

Simple File Cache Between Python Processes

Here is a simple way to cache pickled data between multiple Python processes.

#!/usr/bin/env python

from __future__ import with_statement

import cPickle as pickle
import os
import sys
import time

CACHE_TTL = 300 # seconds
LOCK_TTL = 60
DATA_FILE = '/tmp/pickle_file'
LOCK_FILE = '/tmp/pickle_lock'

def get_data():
data_store = {}
locked = False
if os.path.exists(LOCK_FILE):
lock_file_info = os.stat(LOCK_FILE)
# Check if lock file is stale
if lock_file_info[9] + LOCK_TTL < time.mktime(time.localtime()):
os.remove(LOCK_FILE)
else:
locked = True
if not os.path.exists(DATA_FILE):
lock_file_obj = open(LOCK_FILE, 'w')
data_store = refresh_data()
data_file_obj = open(DATA_FILE, 'w')
pickle.Pickler(data_file_obj).dump(data_store)
data_file_obj.close()
lock_file_obj.close()
os.remove(LOCK_FILE)
else:
data_file_obj = open(DATA_FILE, 'r')
data_store = pickle.Unpickler(data_file_obj).load()
data_file_obj.close()
if not locked:
if time.mktime(time.localtime()) > data_store['expire']:
lock_file_obj = open(LOCK_FILE, 'w')
data_store = refresh_data()
data_file_obj = open(DATA_FILE, 'w')
pickle.Pickler(data_file_obj).dump(data_store)
data_file_obj.close()
lock_file_obj.close()
os.remove(LOCK_FILE)
return data_store

def refresh_data():
"""
Only needs to return a dictionary with 'expire' key set
"""
data_store = {}
with open('/dev/random', 'r') as rnd:
data_store['k1'] = rnd.readline()
data_store['expire'] = time.mktime(time.localtime()) + CACHE_TTL
return data_store


print get_data()

No comments:

Post a Comment