Python实现的最近最少使用算法
本文实例讲述了Python实现的最近最少使用算法。分享给大家供大家参考。具体如下:
#lrucache.py--asimpleLRU(Least-Recently-Used)cacheclass
#Copyright2004EvanProdromou<evan@bad.dynu.ca>
#LicensedundertheAcademicFreeLicense2.1
#LicensedforftputilundertherevisedBSDlicense
#withpermissionbytheauthor,EvanProdromou.Many
#thanks,Evan!:-)
#
#Theoriginalfileisavailableat
#http://pypi.python.org/pypi/lrucache/0.2.
#arch-tag:LRUcachemainmodule
"""asimpleLRU(Least-Recently-Used)cachemodule
ThismoduleprovidesverysimpleLRU(Least-Recently-Used)cache
functionality.
An*in-memorycache*isusefulforstoringtheresultsofan
'expe\nsive'process(onethattakesalotoftimeorresources)for
laterre-use.Typicalexamplesareaccessingdatafromthefilesystem,
adatabase,oranetworklocation.Ifyouknowyou'llneedtore-read
thedataagain,itcanhelptokeepitinacache.
You*can*useaPythondictionaryasacacheforsomepurposes.
However,iftheresultsyou'recachingarelarge,oryouhavealotof
possibleresults,thiscanbeimpracticalmemory-wise.
An*LRUcache*,ontheotherhand,onlykeeps_some_oftheresultsin
memory,whichkeepsyoufromoverusingresources.Thecacheisbounded
byamaximumsize;ifyoutrytoaddmorevaluestothecache,itwill
automaticallydiscardthevaluesthatyouhaven'treadorwrittento
inthelongesttime.Inotherwords,theleast-recently-useditemsare
discarded.[1]_
..[1]:'Discarded'heremeans'removedfromthecache'.
"""
from__future__importgenerators
importtime
fromheapqimportheappush,heappop,heapify
#thesuffixafterthehyphendenotesmodificationsbythe
#ftputilprojectwithrespecttotheoriginalversion
__version__="0.2-1"
__all__=['CacheKeyError','LRUCache','DEFAULT_SIZE']
__docformat__='reStructuredTexten'
DEFAULT_SIZE=16
"""DefaultsizeofanewLRUCacheobject,ifno'size'argumentisgiven."""
classCacheKeyError(KeyError):
"""Errorraisedwhencacherequestsfail
Whenacacherecordisaccessedwhichnolongerexists(orneverdid),
thiserrorisraised.Toavoidit,youmaywanttocheckfortheexistence
ofacacherecordbeforereadingordeletingit."""
pass
classLRUCache(object):
"""Least-Recently-Used(LRU)cache.
Instancesofthisclassprovidealeast-recently-used(LRU)cache.They
emulateaPythonmappingtype.YoucanuseanLRUcachemoreorlesslike
aPythondictionary,withtheexceptionthatobjectsyouputintothe
cachemaybediscardedbeforeyoutakethemout.
Someexampleusage::
cache=LRUCache(32)#newcache
cache['foo']=get_file_contents('foo')#orwhatever
if'foo'incache:#ifit'sstillincache...
#usecachedversion
contents=cache['foo']
else:
#recalculate
contents=get_file_contents('foo')
#storeincachefornexttime
cache['foo']=contents
printcache.size#Maximumsize
printlen(cache)#0<=len(cache)<=cache.size
cache.size=10#Auto-shrinkonsizeassignment
foriinrange(50):#note:largerthancachesize
cache[i]=i
if0notincache:print'Zerowasdiscarded.'
if42incache:
delcache[42]#Manualdeletion
forjincache:#iterate(inLRUorder)
printj,cache[j]#iteratorproduceskeys,notvalues
"""
class__Node(object):
"""Recordofacachedvalue.Notforpublicconsumption."""
def__init__(self,key,obj,timestamp,sort_key):
object.__init__(self)
self.key=key
self.obj=obj
self.atime=timestamp
self.mtime=self.atime
self._sort_key=sort_key
def__cmp__(self,other):
returncmp(self._sort_key,other._sort_key)
def__repr__(self):
return"<%s%s=>%s(%s)>"%\
(self.__class__,self.key,self.obj,\
time.asctime(time.localtime(self.atime)))
def__init__(self,size=DEFAULT_SIZE):
#Checkarguments
ifsize<=0:
raiseValueError,size
eliftype(size)isnottype(0):
raiseTypeError,size
object.__init__(self)
self.__heap=[]
self.__dict={}
"""Maximumsizeofthecache.
Ifmorethan'size'elementsareaddedtothecache,
theleast-recently-usedoneswillbediscarded."""
self.size=size
self.__counter=0
def_sort_key(self):
"""Returnanewintegervalueuponeverycall.
Cachenodesneedamonotonicallyincreasingtimeindicator.
time.time()andtime.clock()don'tguaranteethisina
platform-independentway.
"""
self.__counter+=1
returnself.__counter
def__len__(self):
returnlen(self.__heap)
def__contains__(self,key):
returnself.__dict.has_key(key)
def__setitem__(self,key,obj):
ifself.__dict.has_key(key):
node=self.__dict[key]
#updatenodeobjectin-place
node.obj=obj
node.atime=time.time()
node.mtime=node.atime
node._sort_key=self._sort_key()
heapify(self.__heap)
else:
#sizemayhavebeenreset,soweloop
whilelen(self.__heap)>=self.size:
lru=heappop(self.__heap)
delself.__dict[lru.key]
node=self.__Node(key,obj,time.time(),self._sort_key())
self.__dict[key]=node
heappush(self.__heap,node)
def__getitem__(self,key):
ifnotself.__dict.has_key(key):
raiseCacheKeyError(key)
else:
node=self.__dict[key]
#updatenodeobjectin-place
node.atime=time.time()
node._sort_key=self._sort_key()
heapify(self.__heap)
returnnode.obj
def__delitem__(self,key):
ifnotself.__dict.has_key(key):
raiseCacheKeyError(key)
else:
node=self.__dict[key]
delself.__dict[key]
self.__heap.remove(node)
heapify(self.__heap)
returnnode.obj
def__iter__(self):
copy=self.__heap[:]
whilelen(copy)>0:
node=heappop(copy)
yieldnode.key
raiseStopIteration
def__setattr__(self,name,value):
object.__setattr__(self,name,value)
#automagicallyshrinkheaponresize
ifname=='size':
whilelen(self.__heap)>value:
lru=heappop(self.__heap)
delself.__dict[lru.key]
def__repr__(self):
return"<%s(%delements)>"%(str(self.__class__),len(self.__heap))
defmtime(self,key):
"""Returnthelastmodificationtimeforthecacherecordwithkey.
Maybeusefulforcacheinstanceswherethestoredvaluescanget
'stale',suchascachingfileornetworkresourcecontents."""
ifnotself.__dict.has_key(key):
raiseCacheKeyError(key)
else:
node=self.__dict[key]
returnnode.mtime
if__name__=="__main__":
cache=LRUCache(25)
printcache
foriinrange(50):
cache[i]=str(i)
printcache
if46incache:
print"46incache"
delcache[46]
printcache
cache.size=10
printcache
cache[46]='46'
printcache
printlen(cache)
forcincache:
printc
printcache
printcache.mtime(46)
forcincache:
printc
希望本文所述对大家的Python程序设计有所帮助。