-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #15 from ProducerMatt/benching
A ton of performance-oriented work
- Loading branch information
Showing
35 changed files
with
1,613 additions
and
527 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -33,3 +33,5 @@ erl_crash.dump | |
/.env | ||
|
||
/priv | ||
|
||
/eflame |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
#!/bin/sh | ||
if [[ -z "$1" ]]; then | ||
echo "Needs a .exs script" | ||
exit 1 | ||
fi | ||
MIX_ENV="bench" mix run $1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,36 @@ | ||
TYPE CHECKING MAKES PROCESSING TWICE AS SLOW | ||
##### With input 20 fast modules, 32 messages, 4 divisions ##### | ||
Name ips average deviation median 99th % | ||
Typechecking 0.29 3.42 s ±3.95% 3.34 s 3.64 s | ||
Typecheck + no struct checks 0.47 2.12 s ±7.62% 2.02 s 2.46 s | ||
No typechecking + struct checks 0.60 1.66 s ±4.60% 1.62 s 1.84 s | ||
No typechecking + no struct checks 9.56 104.56 ms ±2.29% 104.08 ms 116.36 ms | ||
|
||
MNESIA CONCURRENCY MAKES LITTLE DIFFERENCE | ||
##### With input 20 fast modules, 512 messages, 8 divisions ##### | ||
Name ips average deviation median 99th % | ||
with concurrency 1.19 840.46 ms ±1.91% 835.33 ms 914.21 ms | ||
without concurrency 1.18 845.20 ms ±2.58% 836.34 ms 923.01 ms | ||
|
||
MNESIA COMPRESSION FOR INTERACTIONS IS PRETTY GOOD | ||
with compression: | ||
Elixir.Stampede.Tables.Interactions: with 41984 records occupying 23285913 words of mem | ||
without compression: | ||
Elixir.Stampede.Tables.Interactions: with 41984 records occupying 61680281 words of mem | ||
|
||
OVERHEAD FOR SENDING CFG+MSG TO NEW THREAD ISN'T A PROBLEM | ||
##### With input 20 fast modules, 512 messages, 8 divisions ##### | ||
Name ips average deviation median 99th % | ||
Avoiding sending data through threads 1.19 839.05 ms ±2.00% 835.25 ms 948.81 ms | ||
Copying cfg and msg to new thread 1.19 841.37 ms ±1.47% 838.30 ms 891.12 ms | ||
Combine query/response into one func 1.18 844.46 ms ±1.87% 837.42 ms 905.90 ms | ||
|
||
FOR PLUGIN MULTITHREADING, NO DIFFERENCE FOUND BETWEEN DIRECT Task.Async AND Task.Supervisor AND PartitionSupervisor | ||
(no stats) | ||
|
||
SYMBOLIC TRACEBACK LOGGING SAVES MEMORY | ||
IOLIST: 9728 records occupying 5428632 words of mem | ||
averaging 558 words per record | ||
|
||
SYMBOLIC: 10752 records occupying 2264216 words of mem | ||
averaging 210 words per record |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,103 @@ | ||
alias Stampede, as: S | ||
|
||
# 2nd place | ||
text_chunk = fn text, len, max_pieces, premade_regex -> | ||
r = premade_regex | ||
|
||
Regex.scan(r, text, trim: true, capture: :all_but_first) | ||
|> Enum.take(max_pieces) | ||
|> Enum.map(&hd/1) | ||
end | ||
|
||
# 1st place | ||
text_chunk_binary_part = fn text, len, max_pieces, premade_regex -> | ||
r = premade_regex | ||
|
||
Regex.scan(r, text, trim: true, capture: :all_but_first, return: :index) | ||
|> Enum.take(max_pieces) | ||
|> Enum.map(fn [{i, l}] -> | ||
binary_part(text, i, l) | ||
end) | ||
end | ||
|
||
defmodule T do | ||
# very distant last place | ||
def text_chunk_iter(text, len, max_pieces) when max_pieces > 0 do | ||
txt_length = String.length(text) | ||
|
||
do_text_chunk_iter( | ||
text, | ||
len, | ||
max_pieces, | ||
txt_length | ||
) | ||
|> Enum.reverse() | ||
end | ||
|
||
def do_text_chunk_iter(text, len, max_pieces, txt_length, acc \\ []) | ||
def do_text_chunk_iter(_text, _len, 0, _txt_length, acc), do: acc | ||
|
||
def do_text_chunk_iter(text, len, max_pieces, txt_length, acc) | ||
when max_pieces > 0 and txt_length < len, | ||
do: [text | acc] | ||
|
||
def do_text_chunk_iter(text, len, max_pieces, txt_length, acc) | ||
when max_pieces > 0 do | ||
if len > txt_length do | ||
[text | acc] | ||
else | ||
do_text_chunk_iter( | ||
String.slice(text, len, txt_length), | ||
len, | ||
max_pieces - 1, | ||
txt_length - len, | ||
[String.slice(text, 0..len) | acc] | ||
) | ||
end | ||
end | ||
end | ||
|
||
split_size = 1999 | ||
max_pieces = 10 | ||
|
||
fake_work = fn chunks -> | ||
Enum.reduce(chunks, [], fn elem, lst -> | ||
Process.sleep(10) | ||
|
||
unless is_binary(elem) do | ||
raise "bad split" | ||
end | ||
|
||
[elem | lst] | ||
end) | ||
end | ||
|
||
inputs = %{ | ||
"small message" => div(split_size, 4) |> S.random_string_weak(), | ||
"medium message" => (split_size * 4) |> S.random_string_weak(), | ||
"large message" => (split_size * (max_pieces + div(max_pieces, 3))) |> S.random_string_weak(), | ||
"malicious message" => 9_999_999 |> S.random_string_weak() | ||
} | ||
|
||
reg = Regex.compile!("(.{1,#{split_size}})", "us") | ||
|
||
Benchee.run( | ||
%{ | ||
"regex scan" => fn txt -> | ||
text_chunk.(txt, split_size, max_pieces, reg) | ||
|> fake_work.() | ||
end, | ||
"regex index referencing" => fn txt -> | ||
text_chunk_binary_part.(txt, split_size, max_pieces, reg) | ||
|> fake_work.() | ||
end, | ||
"manual substrings" => fn txt -> | ||
T.text_chunk_iter(txt, split_size, max_pieces) | ||
|> fake_work.() | ||
end | ||
}, | ||
inputs: inputs, | ||
time: 20, | ||
memory_time: 3, | ||
pre_check: true | ||
) |
Oops, something went wrong.