Skip to content

Commit

Permalink
Merge pull request #276 from martinsumner/mas-i275-logging
Browse files Browse the repository at this point in the history
Mas i275 logging
  • Loading branch information
martinsumner authored May 11, 2019
2 parents 9ab401c + aedd515 commit 984a2ab
Show file tree
Hide file tree
Showing 8 changed files with 59 additions and 44 deletions.
2 changes: 1 addition & 1 deletion src/leveled.app.src
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{application, leveled,
[
{description, "Key Value store based on LSM-Tree and designed for larger values"},
{vsn, "0.9.13"},
{vsn, "0.9.14"},
{registered, []},
{applications, [
kernel,
Expand Down
6 changes: 3 additions & 3 deletions src/leveled_bookie.erl
Original file line number Diff line number Diff line change
Expand Up @@ -109,13 +109,13 @@
-define(CACHE_SIZE_JITTER, 25).
-define(JOURNAL_SIZE_JITTER, 20).
-define(ABSOLUTEMAX_JOURNALSIZE, 4000000000).
-define(LONG_RUNNING, 200000).
% An individual task taking > 200ms gets a specific log
-define(LONG_RUNNING, 1000000).
% An individual task taking > 1s gets a specific log
-define(COMPRESSION_METHOD, lz4).
-define(COMPRESSION_POINT, on_receipt).
-define(LOG_LEVEL, info).
-define(TIMING_SAMPLESIZE, 100).
-define(TIMING_SAMPLECOUNTDOWN, 10000).
-define(TIMING_SAMPLECOUNTDOWN, 50000).
-define(DUMMY, dummy). % Dummy key used for mput operations
-define(MAX_KEYCHECK_FREQUENCY, 100).
-define(MIN_KEYCHECK_FREQUENCY, 1).
Expand Down
2 changes: 1 addition & 1 deletion src/leveled_cdb.erl
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@
-define(WRITE_OPS, [binary, raw, read, write]).
-define(PENDING_ROLL_WAIT, 30).
-define(DELETE_TIMEOUT, 10000).
-define(TIMING_SAMPLECOUNTDOWN, 1000).
-define(TIMING_SAMPLECOUNTDOWN, 5000).
-define(TIMING_SAMPLESIZE, 100).
-define(MAX_OBJECT_SIZE, 1000000000).
% 1GB but really should be much smaller than this
Expand Down
10 changes: 7 additions & 3 deletions src/leveled_inker.erl
Original file line number Diff line number Diff line change
Expand Up @@ -733,10 +733,14 @@ handle_cast({clerk_complete, ManifestSnippet, FilesToDelete}, State) ->
pending_removals=FilesToDelete,
compaction_pending=false}};
handle_cast({release_snapshot, Snapshot}, State) ->
Rs = lists:keydelete(Snapshot, 1, State#state.registered_snapshots),
leveled_log:log("I0003", [Snapshot]),
leveled_log:log("I0004", [length(Rs)]),
{noreply, State#state{registered_snapshots=Rs}};
case lists:keydelete(Snapshot, 1, State#state.registered_snapshots) of
[] ->
{noreply, State#state{registered_snapshots=[]}};
Rs ->
leveled_log:log("I0004", [length(Rs)]),
{noreply, State#state{registered_snapshots=Rs}}
end;
handle_cast({log_level, LogLevel}, State) ->
INC = State#state.clerk,
ok = leveled_iclerk:clerk_loglevel(INC, LogLevel),
Expand Down
10 changes: 5 additions & 5 deletions src/leveled_log.erl
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@
{"B0008",
{info, "Bucket list finds no more results"}},
{"B0009",
{info, "Bucket list finds Bucket ~w"}},
{debug, "Bucket list finds Bucket ~w"}},
{"B0011",
{warn, "Call to destroy the store and so all files to be removed"}},
{"B0013",
Expand Down Expand Up @@ -191,7 +191,7 @@
{"PC006",
{debug, "Work prompted but none needed"}},
{"PC007",
{info, "Clerk prompting Penciller regarding manifest change"}},
{debug, "Clerk prompting Penciller regarding manifest change"}},
{"PC008",
{info, "Merge from level ~w to merge into ~w files below"}},
{"PC009",
Expand Down Expand Up @@ -252,7 +252,7 @@
++ "pmem_fetch=~w merge_lists=~w build_slots=~w "
++ "build_summary=~w read_switch=~w"}},
{"SST12",
{info, "SST Timings for sample_count=~w"
{info, "SST Timings at level=~w for sample_count=~w"
++ " at timing points index_query_time=~w"
++ " lookup_cache_time=~w slot_index_time=~w "
++ " fetch_cache_time=~w slot_fetch_time=~w"
Expand All @@ -269,9 +269,9 @@
{info, "Unexpected failure to fetch value for Key=~w SQN=~w "
++ "with reason ~w"}},
{"I0002",
{info, "Journal snapshot ~w registered at SQN ~w"}},
{debug, "Journal snapshot ~w registered at SQN ~w"}},
{"I0003",
{info, "Journal snapshot ~w released"}},
{debug, "Journal snapshot ~w released"}},
{"I0004",
{info, "Remaining number of journal snapshots is ~w"}},
{"I0005",
Expand Down
10 changes: 6 additions & 4 deletions src/leveled_penciller.erl
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@
-define(PROMPT_WAIT_ONL0, 5).
-define(WORKQUEUE_BACKLOG_TOLERANCE, 4).
-define(COIN_SIDECOUNT, 5).
-define(SLOW_FETCH, 100000).
-define(SLOW_FETCH, 500000). % Log a very slow fetch - longer than 500ms
-define(ITERATOR_SCANWIDTH, 4).
-define(TIMING_SAMPLECOUNTDOWN, 10000).
-define(TIMING_SAMPLESIZE, 100).
Expand Down Expand Up @@ -1142,9 +1142,10 @@ start_from_file(PCLopts) ->
%% Open manifest
Manifest0 = leveled_pmanifest:open_manifest(RootPath),
OpenFun =
fun(FN) ->
fun(FN, Level) ->
{ok, Pid, {_FK, _LK}, Bloom} =
leveled_sst:sst_open(sst_rootpath(RootPath), FN, OptsSST),
leveled_sst:sst_open(sst_rootpath(RootPath),
FN, OptsSST, Level),
{Pid, Bloom}
end,
SQNFun = fun leveled_sst:sst_getmaxsequencenumber/1,
Expand All @@ -1161,7 +1162,8 @@ start_from_file(PCLopts) ->
leveled_log:log("P0015", [L0FN]),
L0Open = leveled_sst:sst_open(sst_rootpath(RootPath),
L0FN,
OptsSST),
OptsSST,
0),
{ok, L0Pid, {L0StartKey, L0EndKey}, Bloom} = L0Open,
L0SQN = leveled_sst:sst_getmaxsequencenumber(L0Pid),
L0Entry = #manifest_entry{start_key = L0StartKey,
Expand Down
4 changes: 2 additions & 2 deletions src/leveled_pmanifest.erl
Original file line number Diff line number Diff line change
Expand Up @@ -557,7 +557,7 @@ load_level(LevelIdx, Level, LoadFun, SQNFun) ->
HigherLevelLoadFun =
fun(ME, {L_Out, L_MaxSQN, FileList, BloomL}) ->
FN = ME#manifest_entry.filename,
{P, Bloom} = LoadFun(FN),
{P, Bloom} = LoadFun(FN, LevelIdx),
SQN = SQNFun(P),
{[ME#manifest_entry{owner=P}|L_Out],
max(SQN, L_MaxSQN),
Expand All @@ -567,7 +567,7 @@ load_level(LevelIdx, Level, LoadFun, SQNFun) ->
LowerLevelLoadFun =
fun({EK, ME}, {L_Out, L_MaxSQN, FileList, BloomL}) ->
FN = ME#manifest_entry.filename,
{P, Bloom} = LoadFun(FN),
{P, Bloom} = LoadFun(FN, LevelIdx),
SQN = SQNFun(P),
{[{EK, ME#manifest_entry{owner=P}}|L_Out],
max(SQN, L_MaxSQN),
Expand Down
59 changes: 34 additions & 25 deletions src/leveled_sst.erl
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@
-define(DELETE_TIMEOUT, 10000).
-define(TREE_TYPE, idxt).
-define(TREE_SIZE, 4).
-define(TIMING_SAMPLECOUNTDOWN, 10000).
-define(TIMING_SAMPLECOUNTDOWN, 20000).
-define(TIMING_SAMPLESIZE, 100).
-define(CACHE_SIZE, 32).
-define(BLOCK_LENGTHS_LENGTH, 20).
Expand Down Expand Up @@ -113,7 +113,7 @@
-export([sst_new/6,
sst_new/8,
sst_newlevelzero/7,
sst_open/3,
sst_open/4,
sst_get/2,
sst_get/3,
sst_expandpointer/5,
Expand Down Expand Up @@ -192,7 +192,8 @@
starting_pid :: pid()|undefined,
fetch_cache = array:new([{size, ?CACHE_SIZE}]),
new_slots :: list()|undefined,
deferred_startup_tuple :: tuple()|undefined}).
deferred_startup_tuple :: tuple()|undefined,
level :: non_neg_integer()|undefined}).

-record(sst_timings,
{sample_count = 0 :: integer(),
Expand Down Expand Up @@ -224,7 +225,7 @@
%%% API
%%%============================================================================

-spec sst_open(string(), string(), sst_options())
-spec sst_open(string(), string(), sst_options(), non_neg_integer())
-> {ok, pid(),
{leveled_codec:ledger_key(), leveled_codec:ledger_key()},
binary()}.
Expand All @@ -236,10 +237,11 @@
%% term order.
%%
%% The filename should include the file extension.
sst_open(RootPath, Filename, OptsSST) ->
sst_open(RootPath, Filename, OptsSST, Level) ->
{ok, Pid} = gen_fsm:start_link(?MODULE, [], []),
case gen_fsm:sync_send_event(Pid,
{sst_open, RootPath, Filename, OptsSST},
{sst_open,
RootPath, Filename, OptsSST, Level},
infinity) of
{ok, {SK, EK}, Bloom} ->
{ok, Pid, {SK, EK}, Bloom}
Expand Down Expand Up @@ -456,8 +458,8 @@ sst_checkready(Pid) ->
%% This simply prompts a GC on the PID now (as this may now be a long-lived
%% file, so don't want all the startup state to be held on memory - want to
%% proactively drop it
sst_switchlevels(Pid, _NewLevel) ->
gen_fsm:send_event(Pid, switch_levels).
sst_switchlevels(Pid, NewLevel) ->
gen_fsm:send_event(Pid, {switch_levels, NewLevel}).

-spec sst_close(pid()) -> ok.
%% @doc
Expand All @@ -481,15 +483,15 @@ sst_printtimings(Pid) ->
init([]) ->
{ok, starting, #state{}}.

starting({sst_open, RootPath, Filename, OptsSST}, _From, State) ->
starting({sst_open, RootPath, Filename, OptsSST, Level}, _From, State) ->
leveled_log:save(OptsSST#sst_options.log_options),
{UpdState, Bloom} =
read_file(Filename, State#state{root_path=RootPath}),
Summary = UpdState#state.summary,
{reply,
{ok, {Summary#summary.first_key, Summary#summary.last_key}, Bloom},
reader,
UpdState};
UpdState#state{level = Level}};
starting({sst_new,
RootPath, Filename, Level,
{SlotList, FirstKey}, MaxSQN,
Expand Down Expand Up @@ -519,14 +521,15 @@ starting({sst_new,
{ok, {Summary#summary.first_key, Summary#summary.last_key}, Bloom},
reader,
UpdState#state{blockindex_cache = BlockIndex,
starting_pid = StartingPID}};
starting_pid = StartingPID,
level = Level}};
starting({sst_newlevelzero, RootPath, Filename,
Penciller, MaxSQN,
OptsSST, IdxModDate}, _From, State) ->
DeferredStartupTuple =
{RootPath, Filename, Penciller, MaxSQN, OptsSST, IdxModDate},
{reply, ok, starting,
State#state{deferred_startup_tuple = DeferredStartupTuple}};
State#state{deferred_startup_tuple = DeferredStartupTuple, level = 0}};
starting(close, _From, State) ->
% No file should have been created, so nothing to close.
{stop, normal, ok, State}.
Expand Down Expand Up @@ -630,7 +633,9 @@ reader({get_kv, LedgerKey, Hash}, _From, State) ->
fetch(LedgerKey, Hash, State, State#state.timings),

{UpdTimings0, CountDown} =
update_statetimings(UpdTimings, State#state.timings_countdown),
update_statetimings(UpdTimings,
State#state.timings_countdown,
State#state.level),

{reply, Result, reader, UpdState#state{timings = UpdTimings0,
timings_countdown = CountDown}};
Expand Down Expand Up @@ -689,7 +694,7 @@ reader(get_maxsequencenumber, _From, State) ->
Summary = State#state.summary,
{reply, Summary#summary.max_sqn, reader, State};
reader(print_timings, _From, State) ->
log_timings(State#state.timings),
log_timings(State#state.timings, State#state.level),
{reply, ok, reader, State};
reader({set_for_delete, Penciller}, _From, State) ->
leveled_log:log("SST06", [State#state.filename]),
Expand All @@ -711,9 +716,9 @@ reader(close, _From, State) ->
ok = file:close(State#state.handle),
{stop, normal, ok, State}.

reader(switch_levels, State) ->
reader({switch_levels, NewLevel}, State) ->
erlang:garbage_collect(self()),
{next_state, reader, State}.
{next_state, reader, State#state{level = NewLevel}}.


delete_pending({get_kv, LedgerKey, Hash}, _From, State) ->
Expand Down Expand Up @@ -2479,7 +2484,7 @@ log_buildtimings(Timings, LI) ->
element(2, LI)]).


-spec update_statetimings(sst_timings(), integer())
-spec update_statetimings(sst_timings(), integer(), non_neg_integer())
-> {sst_timings(), integer()}.
%% @doc
%%
Expand All @@ -2490,23 +2495,27 @@ log_buildtimings(Timings, LI) ->
%%
%% Outside of sample windows the timings object should be set to the atom
%% no_timing. no_timing is a valid state for the cdb_timings type.
update_statetimings(no_timing, 0) ->
update_statetimings(no_timing, 0, _Level) ->
{#sst_timings{}, 0};
update_statetimings(Timings, 0) ->
update_statetimings(Timings, 0, Level) ->
case Timings#sst_timings.sample_count of
SC when SC >= ?TIMING_SAMPLESIZE ->
log_timings(Timings),
{no_timing, leveled_rand:uniform(2 * ?TIMING_SAMPLECOUNTDOWN)};
log_timings(Timings, Level),
% If file at lower level wait longer before tsking another
% sample
{no_timing,
leveled_rand:uniform(2 * ?TIMING_SAMPLECOUNTDOWN)};
_SC ->
{Timings, 0}
end;
update_statetimings(no_timing, N) ->
update_statetimings(no_timing, N, _Level) ->
{no_timing, N - 1}.

log_timings(no_timing) ->
log_timings(no_timing, _Level) ->
ok;
log_timings(Timings) ->
leveled_log:log("SST12", [Timings#sst_timings.sample_count,
log_timings(Timings, Level) ->
leveled_log:log("SST12", [Level,
Timings#sst_timings.sample_count,
Timings#sst_timings.index_query_time,
Timings#sst_timings.lookup_cache_time,
Timings#sst_timings.slot_index_time,
Expand Down

0 comments on commit 984a2ab

Please sign in to comment.