Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added database cache benchmark #81

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file.
Empty file.
73 changes: 73 additions & 0 deletions benchmarks/cache_benchmarks/database_cache/benchmark.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import random

from django.core.cache import caches
from django.core.management import call_command

from ...utils import bench_setup


class DatabaseCacheBackend:
Hisham-Pak marked this conversation as resolved.
Show resolved Hide resolved
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do you think we could add coverage for the some of the other backends too? The benchmarks would be the same, it's just a different setting? 🤔

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we do this in separate pull request?

def setup(self):
bench_setup()
call_command("createcachetable", verbosity=0)
random.seed(0)

self.cache = caches["db"]
self.int_key = "int_key"
self.cache.set(self.int_key, 0)

def time_add(self):
for _ in range(100):
self.cache.add(self.random_key(), self.random_binary())
Hisham-Pak marked this conversation as resolved.
Show resolved Hide resolved

def time_get(self):
for _ in range(100):
self.cache.get(self.random_key())

def time_set(self):
for _ in range(100):
self.cache.set(self.random_key(), self.random_binary())

def time_get_or_set(self):
for _ in range(100):
self.cache.get_or_set(self.random_key(), self.random_binary())

def time_touch(self):
for _ in range(100):
self.cache.touch(self.random_key())

def time_delete(self):
for _ in range(100):
self.cache.delete(self.random_key())

def time_get_many(self):
for _ in range(100):
self.cache.get_many([self.random_key() for x in range(100)])

def time_set_many(self):
for _ in range(100):
self.cache.set_many(
{self.random_key(): self.random_binary() for x in range(100)}
)

def time_delete_many(self):
for _ in range(100):
self.cache.delete_many([self.random_key() for x in range(100)])

def time_clear(self):
for _ in range(100):
self.cache.clear()

def time_incr(self):
for _ in range(100):
self.cache.incr(self.int_key)

def time_decr(self):
for _ in range(100):
self.cache.decr(self.int_key)

def random_key(self):
return "key_{}".format(random.randint(1, 500))

def random_binary(self):
return random.randint(1, 1024**1) * random.randint(0, 255)
8 changes: 8 additions & 0 deletions benchmarks/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,13 @@
"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"},
}

CACHES = {
"db": {
"BACKEND": "django.core.cache.backends.db.DatabaseCache",
"LOCATION": "cache_table",
},
}

INSTALLED_APPS = [
"django.contrib.auth",
"django.contrib.contenttypes",
Expand Down Expand Up @@ -54,6 +61,7 @@
"benchmarks.query_benchmarks.query_select_related",
"benchmarks.req_resp_benchmarks.default_middleware",
"benchmarks.req_resp_benchmarks.http_methods",
"benchmarks.cache_benchmarks.database_cache",
]

SECRET_KEY = "NOT REALLY SECRET"
Expand Down
192 changes: 192 additions & 0 deletions results/benchmarks.json
Original file line number Diff line number Diff line change
@@ -1,4 +1,196 @@
{
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_add": {
Hisham-Pak marked this conversation as resolved.
Show resolved Hide resolved
"code": "class DatabaseCacheBackend:\n def time_add(self):\n for _ in range(100):\n self.cache.add(self.random_key(), self.random_binary())\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_add",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "068ce1f47f80990f0eeb45965d11260da8e178e70c678241a275c2eee908b680",
"warmup_time": -1
},
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_clear": {
"code": "class DatabaseCacheBackend:\n def time_clear(self):\n for _ in range(100):\n self.cache.clear()\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_clear",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "949122620be7c68cbd7249e8ef151cd794a7f7e82f2d703ec2d65a3f7b72ab72",
"warmup_time": -1
},
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_decr": {
"code": "class DatabaseCacheBackend:\n def time_decr(self):\n for _ in range(100):\n self.cache.decr(self.int_key)\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_decr",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "9c8dfd36603e0b69964f151c3ab330c5653af7069031b664d016925319e39eed",
"warmup_time": -1
},
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_delete": {
"code": "class DatabaseCacheBackend:\n def time_delete(self):\n for _ in range(100):\n self.cache.delete(self.random_key())\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_delete",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "07f0a8ee98f405b3c9cdacfa3fbd7d647457be48ca7714da8e56372d37e88fbc",
"warmup_time": -1
},
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_delete_many": {
"code": "class DatabaseCacheBackend:\n def time_delete_many(self):\n for _ in range(100):\n self.cache.delete_many([self.random_key() for x in range(100)])\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_delete_many",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "a04f9fcb13aee07e2d95a68d20ac814d4065def9efef59c0472fd293ac63256f",
"warmup_time": -1
},
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_get": {
"code": "class DatabaseCacheBackend:\n def time_get(self):\n for _ in range(100):\n self.cache.get(self.random_key())\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_get",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "52a8f75efff5681f972451f3e3af030e973d587ed7c15aaac9ceedd955958e9f",
"warmup_time": -1
},
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_get_many": {
"code": "class DatabaseCacheBackend:\n def time_get_many(self):\n for _ in range(100):\n self.cache.get_many([self.random_key() for x in range(100)])\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_get_many",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "7dd0ec68c7105e52259b3898aeb8335e13e48b6b4fe72e7613b922b5378f156c",
"warmup_time": -1
},
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_get_or_set": {
"code": "class DatabaseCacheBackend:\n def time_get_or_set(self):\n for _ in range(100):\n self.cache.get_or_set(self.random_key(), self.random_binary())\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_get_or_set",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "e745c4556a6e6d8467ee9d92ffcdd35e46538fa3d3b7f41262fac3741ce408a0",
"warmup_time": -1
},
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_incr": {
"code": "class DatabaseCacheBackend:\n def time_incr(self):\n for _ in range(100):\n self.cache.incr(self.int_key)\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_incr",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "2528a8b9f20cd63f856f2433c945a86b1ed47d80067ed274d0658c82a294b55e",
"warmup_time": -1
},
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_set": {
"code": "class DatabaseCacheBackend:\n def time_set(self):\n for _ in range(100):\n self.cache.set(self.random_key(), self.random_binary())\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_set",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "531e6b038e1a3c1d508e8384158bcd6a5a8f0fb4e0882f745d4c1857f9bc50aa",
"warmup_time": -1
},
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_set_many": {
"code": "class DatabaseCacheBackend:\n def time_set_many(self):\n for _ in range(100):\n self.cache.set_many(\n {self.random_key(): self.random_binary() for x in range(100)}\n )\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_set_many",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "0e00525996147e94dd7c38af71224e7f7cae740572cb1b9c1127cde1b9be5259",
"warmup_time": -1
},
"cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_touch": {
"code": "class DatabaseCacheBackend:\n def time_touch(self):\n for _ in range(100):\n self.cache.touch(self.random_key())\n\n def setup(self):\n bench_setup()\n call_command(\"createcachetable\", verbosity=0)\n random.seed(0)\n \n self.cache = caches[\"db\"]\n self.int_key = \"int_key\"\n self.cache.set(self.int_key, 0)",
"min_run_count": 2,
"name": "cache_benchmarks.database_cache.benchmark.DatabaseCacheBackend.time_touch",
"number": 0,
"param_names": [],
"params": [],
"repeat": 0,
"rounds": 2,
"sample_time": 0.01,
"timeout": 60.0,
"type": "time",
"unit": "seconds",
"version": "1f38b4b59244617c19320bbf8226e74165bba613d89fb314cfec928a8b55dd96",
"warmup_time": -1
},
"data_struct_benchmarks.multi_value_dict.benchmark.MultiValueDictBench.time_multi_value_dict": {
"code": "class MultiValueDictBench:\n def time_multi_value_dict(self):\n for i in range(1000):\n case_dict = MultiValueDict(self.case)\n \n case_dict[\"a\"]\n case_dict[\"b\"]\n case_dict[\"c\"]\n \n case_dict.update(self.update)\n copy.copy(case_dict)\n copy.deepcopy(case_dict)\n \n case_dict.items()\n case_dict.lists()\n for i in case_dict:\n i\n \n case_dict[\"a\"] = \"A\"\n case_dict[\"b\"] = \"B\"\n case_dict[\"c\"] = \"C\"\n\n def setup(self):\n bench_setup()\n self.case = {\"a\": [\"a\"], \"b\": [\"a\", \"b\"], \"c\": [\"a\", \"b\", \"c\"]}\n self.update = {\"a\": [\"a\"], \"b\": [\"a\", \"b\"], \"c\": [\"a\", \"b\", \"c\"]}",
"min_run_count": 2,
Expand Down
Loading