Caching in Python with lru_cache


Python tip:

Improve the performance of a function with the lru_cache decorator from functools:

@functools.lru_cache(maxsize=128, typed=False)
  • maxsize: max number of stored arguments
  • typed: different types will be cached separately (i.e., 3 != 3.0)
  • function args and kwargs must be hashable
  • perfect for when you need to periodically call an expensive function with the same arguments

Example:

import timeit
from functools import lru_cache

import requests


@lru_cache(maxsize=10, typed=False)
def open_web_page(website_url):
    requests.get(website_url)


without_cache = """
import requests

def open_web_page(website_url):
    requests.get(website_url)

urls = [
    'https://testdriven.io',
    'https://testdriven.io',
    'https://google.com',
    'https://google.com',
]

for url in urls:
    open_web_page(url)
"""

with_cache = """
from functools import lru_cache

import requests

@lru_cache(maxsize=10, typed=False)
def open_web_page(website_url):
    requests.get(website_url)

urls = [
    'https://testdriven.io',
    'https://testdriven.io',
    'https://google.com',
    'https://google.com',
]
for url in urls:
    open_web_page(url)
"""


print(timeit.timeit(without_cache, number=5))
# => 7.195018381

print(timeit.timeit(with_cache, number=5))
# => 3.6599477370000004