Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions doc/operation-and-maintenance/MongooseIM-metrics.md
Original file line number Diff line number Diff line change
Expand Up @@ -50,16 +50,17 @@ All metrics are divided into the following groups:

<h3>`histogram`</h3>

A histogram collects values and groups them in buckets.
A histogram collects values and exposes the number of observations and their sum (as counters), along with the 50th, 75th, 90th, 95th, 99th, and 99.9th percentiles with 1% accuracy.

**Example:**
```
# TYPE xmpp_element_in_byte_size histogram
# TYPE xmpp_element_in_byte_size summary
# HELP xmpp_element_in_byte_size Event: xmpp_element_in, Metric: byte_size
xmpp_element_in_byte_size_bucket{connection_type="c2s",host_type="localhost",le="1"} 0
xmpp_element_in_byte_size_count{connection_type="c2s",host_type="localhost"} 0
xmpp_element_in_byte_size_sum{connection_type="c2s",host_type="localhost"} 0
xmpp_element_in_byte_size{connection_type="c2s",host_type="localhost",quantile="0.5"} 0
...
xmpp_element_in_byte_size_bucket{connection_type="c2s",host_type="localhost",le="1073741824"} 0
xmpp_element_in_byte_size_bucket{connection_type="c2s",host_type="localhost",le="+Inf"} 0
xmpp_element_in_byte_size{connection_type="c2s",host_type="localhost",quantile="0.999"} 0
```

=== "Exometer"
Expand Down
4 changes: 2 additions & 2 deletions rebar.config
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,8 @@
{exometer_report_statsd, {git, "https://github.com/esl/exometer_report_statsd.git", {branch, "master"}}},
{syslogger, "0.3.0"},
{flatlog, "0.1.2"},
{prometheus, "5.0.0"},
{prometheus_cowboy, "0.1.9"},
{prometheus, "6.1.1"},
{prometheus_cowboy, "0.2.0"},

%%% Stateless libraries
{opuntia, "1.1.2"},
Expand Down
30 changes: 15 additions & 15 deletions rebar.lock
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{"1.2.0",
[{<<"accept">>,{pkg,<<"accept">>,<<"0.3.6">>},2},
[{<<"accept">>,{pkg,<<"accept">>,<<"0.3.7">>},2},
{<<"amqp_client">>,{pkg,<<"amqp_client">>,<<"4.2.1">>},0},
{<<"backoff">>,{pkg,<<"backoff">>,<<"1.1.6">>},1},
{<<"base16">>,{pkg,<<"base16">>,<<"2.0.1">>},1},
Expand All @@ -19,6 +19,7 @@
{<<"credentials_obfuscation">>,
{pkg,<<"credentials_obfuscation">>,<<"3.5.0">>},
1},
{<<"ddskerl">>,{pkg,<<"ddskerl">>,<<"0.4.2">>},1},
{<<"eini">>,{pkg,<<"eini">>,<<"1.2.9">>},1},
{<<"eodbc">>,{pkg,<<"eodbc">>,<<"0.2.0">>},0},
{<<"epgsql">>,{pkg,<<"epgsql">>,<<"4.7.1">>},0},
Expand Down Expand Up @@ -87,10 +88,9 @@
0},
{<<"parse_trans">>,{pkg,<<"parse_trans">>,<<"3.4.0">>},1},
{<<"pooler">>,{pkg,<<"pooler">>,<<"1.5.3">>},1},
{<<"prometheus">>,{pkg,<<"prometheus">>,<<"5.0.0">>},0},
{<<"prometheus_cowboy">>,{pkg,<<"prometheus_cowboy">>,<<"0.1.9">>},0},
{<<"prometheus_httpd">>,{pkg,<<"prometheus_httpd">>,<<"2.1.13">>},1},
{<<"quantile_estimator">>,{pkg,<<"quantile_estimator">>,<<"1.0.2">>},1},
{<<"prometheus">>,{pkg,<<"prometheus">>,<<"6.1.1">>},0},
{<<"prometheus_cowboy">>,{pkg,<<"prometheus_cowboy">>,<<"0.2.0">>},0},
{<<"prometheus_httpd">>,{pkg,<<"prometheus_httpd">>,<<"2.1.15">>},1},
{<<"quickrand">>,{pkg,<<"quickrand">>,<<"2.0.7">>},1},
{<<"rabbit_common">>,{pkg,<<"rabbit_common">>,<<"4.2.1">>},1},
{<<"ranch">>,{pkg,<<"ranch">>,<<"2.2.0">>},0},
Expand Down Expand Up @@ -118,7 +118,7 @@
{<<"worker_pool">>,{pkg,<<"worker_pool">>,<<"6.4.0">>},0}]}.
[
{pkg_hash,[
{<<"accept">>, <<"AD44AC7D704BF70EF8FB2E313EF5B978F9D1330BDDAC64509E93AFDA13281215">>},
{<<"accept">>, <<"CD6E34A2D7E28CA38B2D3CB233734CA0C221EFBC1F171F91FEC5F162CC2D18DA">>},
{<<"amqp_client">>, <<"CFF0CC13186E57457DC5745F1B3A4127C6857717CB8F5920DC457C84D0AD00A2">>},
{<<"backoff">>, <<"83B72ED2108BA1EE8F7D1C22E0B4A00CFE3593A67DBC792799E8CCE9F42F796B">>},
{<<"base16">>, <<"F0549F732E03BE8124ED0D19FD5EE52146CC8BE24C48CBC3F23AB44B157F11A2">>},
Expand All @@ -132,6 +132,7 @@
{<<"cowlib">>, <<"623791C56C1CC9DF54A71A9C55147A401549917F00A2E48A6AE12B812C586CED">>},
{<<"cpool">>, <<"76222AA1DAC0F8089707167BD69D221EDA63DB65B8BD67DBF6E133075392EEDC">>},
{<<"credentials_obfuscation">>, <<"61E282ADFB4439486B3994FAAEC69543C7EE6CC7E70C6340E8853FD9DEAF8219">>},
{<<"ddskerl">>, <<"A51A90BE9AC9B36A94017670BED479C623B10CA9D4BDA1EDF3A0E48CAEEADA2A">>},
{<<"eini">>, <<"FCC3CBD49BBDD9A1D9735C7365DAFFCD84481CCE81E6CB80537883AA44AC4895">>},
{<<"eodbc">>, <<"81AC713AE657FC4F67FEA210AE6D4575DDD7003C96953363908EA35E80F33F22">>},
{<<"epgsql">>, <<"D4E47CAE46C18C8AFA88E34D59A9B4BAE16368D7CE1EB3DA24FA755EB28393EB">>},
Expand Down Expand Up @@ -164,10 +165,9 @@
{<<"p1_utils">>, <<"67B0C4AC9FA3BA3EF563B31AA111B0A004439A37FAC85E027F1C3617E1C7EC6C">>},
{<<"parse_trans">>, <<"BB87AC362A03CA674EBB7D9D498F45C03256ADED7214C9101F7035EF44B798C7">>},
{<<"pooler">>, <<"898CD1FA301FC42D4A8ED598CE139B71CA85B54C16AB161152B5CC5FBDCFA1A8">>},
{<<"prometheus">>, <<"8A37A3216D8DB019D19068602669C9819C099120F8E39994DD1BD3A3F5553376">>},
{<<"prometheus_cowboy">>, <<"D9D5B300516A61ED5AE31391F8EEEEB202230081D32A1813F2D78772B6F274E1">>},
{<<"prometheus_httpd">>, <<"F086390B4E4E3F41112889B745BAC53D26437B6139496E6700C2508858F5985B">>},
{<<"quantile_estimator">>, <<"ECD281D40110FDD9BA62685531E4435E0839A52FD1058DA5564F1763E4642EF7">>},
{<<"prometheus">>, <<"3C9D8E2D4FCF948450550693F6A82FAE013D3665BA10AA55955B64C2875FADB3">>},
{<<"prometheus_cowboy">>, <<"526F75D9850A9125496F78BCEECCA0F237BC7B403C976D44508543AE5967DAD9">>},
{<<"prometheus_httpd">>, <<"8F767D819A5D36275EAB9264AFF40D87279151646776069BF69FBDBBD562BD75">>},
{<<"quickrand">>, <<"D2BD76676A446E6A058D678444B7FDA1387B813710D1AF6D6E29BB92186C8820">>},
{<<"rabbit_common">>, <<"1D64E391E12116B76B1425EB96B7552DE51F0301093EBA669B5334F4759CC1E8">>},
{<<"ranch">>, <<"25528F82BC8D7C6152C57666CA99EC716510FE0925CB188172F41CE93117B1B0">>},
Expand All @@ -186,7 +186,7 @@
{<<"uuid">>, <<"B2078D2CC814F53AFA52D36C91E08962C7E7373585C623F4C0EA6DFB04B2AF94">>},
{<<"worker_pool">>, <<"0347B805A8E5804B5676A9885FB3B9B6C1627099C449C3C67C0E8E6AF79E9AA6">>}]},
{pkg_hash_ext,[
{<<"accept">>, <<"A5167FA1AE90315C3F1DD189446312F8A55D00EFA357E9C569BDA47736B874C3">>},
{<<"accept">>, <<"CA69388943F5DAD2E7232A5478F16086E3C872F48E32B88B378E1885A59F5649">>},
{<<"amqp_client">>, <<"8AE00B055A58500E0557F73D9C0FFE257487131E603F7F84FE72CBFAAF03838A">>},
{<<"backoff">>, <<"CF0CFFF8995FB20562F822E5CC47D8CCF664C5ECDC26A684CBE85C225F9D7C39">>},
{<<"base16">>, <<"06EA2D48343282E712160BA89F692B471DB8B36ABE8394F3445FF9032251D772">>},
Expand All @@ -200,6 +200,7 @@
{<<"cowlib">>, <<"0AF652D1550C8411C3B58EED7A035A7FB088C0B86AFF6BC504B0BC3B7F791AA2">>},
{<<"cpool">>, <<"430E18DF4A9D584EB1ED0D196A87CC02E878AF5B4888BFDC9B65F86A96480E30">>},
{<<"credentials_obfuscation">>, <<"843ADBE3246861CE0F1A0FA3222F384834EB31DEFD8D6B9CBA7AFD2977C957BC">>},
{<<"ddskerl">>, <<"63F907373D7E548151D584D4DA8A38928FD26EC9477B94C0FFAAD87D7CB69FE7">>},
{<<"eini">>, <<"DA64AE8DB7C2F502E6F20CDF44CD3D9BE364412B87FF49FEBF282540F673DFCB">>},
{<<"eodbc">>, <<"ACF72A61349AFD535306F036B8A17C45FA095E039F7C0211E671CCCBD709A8C8">>},
{<<"epgsql">>, <<"B6D86B7DC42C8555B1D4E20880E5099D6D6D053148000E188E548F98E4E01836">>},
Expand Down Expand Up @@ -232,10 +233,9 @@
{<<"p1_utils">>, <<"D0379E8C1156B98BD64F8129C1DE022FCCA4F2FDB7486CE73BF0ED2C3376B04C">>},
{<<"parse_trans">>, <<"F99E368830BEA44552224E37E04943A54874F08B8590485DE8D13832B63A2DC3">>},
{<<"pooler">>, <<"058D85C5081289B90E97E4DDDBC3BB5A3B4A19A728AB3BC88C689EFCC36A07C7">>},
{<<"prometheus">>, <<"80D29564A5DC4490B53FD225D752B65FB0DBEBA41497F96D62223338127C5659">>},
{<<"prometheus_cowboy">>, <<"5F71C039DEB9E9FF9DD6366BC74C907A463872B85286E619EFF0BDA15111695A">>},
{<<"prometheus_httpd">>, <<"9B5A44D1F6FBB3C3FE6F85F06DAFE680AD9FFD591EC65A10BB51DFF0FBBE45D2">>},
{<<"quantile_estimator">>, <<"DB404793D6384995A1AC6DD973E2CEE5BE9FCC128765BDBA53D87C564E296B64">>},
{<<"prometheus">>, <<"C3ECC7A35676948089E28FF4383CCB85DCEF447F0982C01E5630A084008C456A">>},
{<<"prometheus_cowboy">>, <<"2C7EB12F4B970D91E3B47BAAD0F138F6ADC34E53EEB0AE18068FF0AFAB441B24">>},
{<<"prometheus_httpd">>, <<"67736D000745184D5013C58A63E947821AB90CB9320BC2E6AE5D3061C6FFE039">>},
{<<"quickrand">>, <<"B8ACBF89A224BC217C3070CA8BEBC6EB236DBE7F9767993B274084EA044D35F0">>},
{<<"rabbit_common">>, <<"FF509B07E639B1784898C28031E5204FEA14260172E4FC339F94405586037E40">>},
{<<"ranch">>, <<"FA0B99A1780C80218A4197A59EA8D3BDAE32FBFF7E88527D7D8A4787EFF4F8E7">>},
Expand Down
2 changes: 1 addition & 1 deletion src/instrument/mongoose_instrument_exometer.erl
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ handle_metric_event(EventName, Labels, MetricName, MetricType, Measurements) ->
ok
end.

-spec update_metric(exometer:name(), spiral | histogram, integer()) -> ok.
-spec update_metric(exometer:name(), mongoose_instrument:metric_type(), integer()) -> ok.
update_metric(Name, gauge, Value) when is_integer(Value) ->
ok = exometer:update(Name, Value);
update_metric(Name, counter, Value) when is_integer(Value) ->
Expand Down
21 changes: 9 additions & 12 deletions src/instrument/mongoose_instrument_prometheus.erl
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,13 @@ declare_metric(MetricSpec, counter) ->
declare_metric(MetricSpec, spiral) ->
prometheus_counter:declare(MetricSpec);
declare_metric(MetricSpec, histogram) ->
prometheus_histogram:declare([{buckets, histogram_buckets()} | MetricSpec]).
prometheus_quantile_summary:declare([
{quantiles, [0.5, 0.75, 0.90, 0.95, 0.99, 0.999]},
{error, 0.01},
%% Measuring in µs suffices for actions lasting up to a day (with 1% accuracy).
%% Measuring in bytes suffices for sizes up to 81 GB (with 1% accuracy).
{bound, 1260}
| MetricSpec]).

-spec reset_metric(name(), [mongoose_instrument:label_value()],
mongoose_instrument:metric_type()) -> boolean().
Expand All @@ -68,7 +74,7 @@ reset_metric(Name, LabelValues, counter) ->
reset_metric(Name, LabelValues, spiral) ->
prometheus_counter:remove(Name, LabelValues);
reset_metric(Name, LabelValues, histogram) ->
prometheus_histogram:remove(Name, LabelValues).
prometheus_quantile_summary:remove(Name, LabelValues).

-spec initialize_metric(name(), [mongoose_instrument:label_value()],
mongoose_instrument:metric_type()) -> ok.
Expand All @@ -92,15 +98,6 @@ metric_spec(EventName, LabelKeys, MetricName) ->
{duration_unit, false} % prevent unwanted implicit conversions, e.g. seconds -> microseconds
].

-spec histogram_buckets() -> [integer()].
histogram_buckets() ->
histogram_buckets([], 1 bsl 30). % ~1.07 * 10^9

histogram_buckets(AccBuckets, Val) when Val > 0 ->
histogram_buckets([Val | AccBuckets], Val bsr 1);
histogram_buckets(AccBuckets, _Val) ->
AccBuckets.

-spec handle_metric_event(mongoose_instrument:event_name(), [mongoose_instrument:label_value()],
mongoose_instrument:metric_name(), mongoose_instrument:metric_type(),
mongoose_instrument:measurements()) -> ok.
Expand Down Expand Up @@ -139,4 +136,4 @@ update_metric(Name, Labels, counter, Value) when is_integer(Value) ->
update_metric(Name, Labels, spiral, Value) when is_integer(Value), Value >= 0 ->
ok = prometheus_counter:inc(Name, Labels, Value);
update_metric(Name, Labels, histogram, Value) when is_integer(Value) ->
ok = prometheus_histogram:observe(Name, Labels, Value).
ok = prometheus_quantile_summary:observe(Name, Labels, Value).
61 changes: 50 additions & 11 deletions test/mongoose_instrument_metrics_SUITE.erl
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,24 @@
-define(HOST_TYPE, <<"localhost">>).
-define(HOST_TYPE2, <<"test type">>).

-define(assertRelEqual(Expect, Actual, Tolerance),
(fun() ->
__Exp = (Expect),
__Act = (Actual),
__Tol = (Tolerance),
case __Exp == 0 of
true -> ?assertEqual(__Exp, __Act);
false ->
__RelErr = abs(__Exp - __Act) / __Exp,
__Check = case __RelErr < __Tol of
true -> ok;
false -> {fail, [{expected, __Exp}, {actual, __Act},
{rel_error, __RelErr}, {tolerance, __Tol}]}
end,
?assertEqual(ok, __Check)
end
end)()).

-import(mongoose_instrument_exometer, [exometer_metric_name/3]).
%% Setup and teardown

Expand All @@ -30,6 +48,7 @@ groups() ->
prometheus_counter_cannot_be_decreased,
prometheus_counter_is_updated_separately_for_different_labels,
prometheus_histogram_is_created_and_updated,
prometheus_histogram_is_calculated_correctly,
prometheus_histogram_is_updated_separately_for_different_labels,
multiple_prometheus_metrics_are_updated]},
{exometer, [parallel], [exometer_skips_non_metric_event,
Expand Down Expand Up @@ -184,13 +203,33 @@ prometheus_histogram_is_created_and_updated(Config) ->
ok = mongoose_instrument:set_up(Event, ?LABELS, #{metrics => #{time => histogram}}),

%% Prometheus histogram shows no value if there is no data
?assertEqual(undefined, prometheus_histogram:value(Metric, [?HOST_TYPE])),
?assertEqual(undefined, prometheus_quantile_summary:value(Metric, [?HOST_TYPE])),
ok = mongoose_instrument:execute(Event, ?LABELS, #{time => 1}),
?assertMatch({[1, 0|_], 1}, prometheus_histogram:value(Metric, [?HOST_TYPE])),
?assertMatch({1, 1, _}, prometheus_quantile_summary:value(Metric, [?HOST_TYPE])),
ok = mongoose_instrument:execute(Event, ?LABELS, #{time => 1}),
?assertMatch({[2, 0|_], 2}, prometheus_histogram:value(Metric, [?HOST_TYPE])),
?assertMatch({2, 2, _}, prometheus_quantile_summary:value(Metric, [?HOST_TYPE])),
ok = mongoose_instrument:execute(Event, ?LABELS, #{time => 2}),
?assertMatch({[2, 1|_], 4}, prometheus_histogram:value(Metric, [?HOST_TYPE])).
?assertMatch({3, 4, _}, prometheus_quantile_summary:value(Metric, [?HOST_TYPE])).

prometheus_histogram_is_calculated_correctly(Config) ->
Event = ?config(event, Config),
Metric = prom_name(Event, time),
ok = mongoose_instrument:set_up(Event, ?LABELS, #{metrics => #{time => histogram}}),

%% Prometheus histogram shows no value if there is no data
?assertEqual(undefined, prometheus_quantile_summary:value(Metric, [?HOST_TYPE])),
Values = [1, 2, 2, 2, 2, 2, 2, 9, 9, 10],
lists:foreach(fun(V) -> ok = mongoose_instrument:execute(Event, ?LABELS, #{time => V}) end, Values),
Sum = lists:sum(Values),
Length = length(Values),
?assertMatch({Length, Sum, _}, prometheus_quantile_summary:value(Metric, [?HOST_TYPE])),
%% Check some quantiles
{_, _, Quantiles} = prometheus_quantile_summary:value(Metric, [?HOST_TYPE]),
Tolerance = 0.01,
?assertRelEqual(2, proplists:get_value(0.5, Quantiles), Tolerance),
?assertRelEqual(9, proplists:get_value(0.9, Quantiles), Tolerance),
?assertRelEqual(10, proplists:get_value(0.95, Quantiles), Tolerance),
?assertRelEqual(10, proplists:get_value(0.99, Quantiles), Tolerance).

prometheus_histogram_is_updated_separately_for_different_labels(Config) ->
Event = ?config(event, Config),
Expand All @@ -199,8 +238,8 @@ prometheus_histogram_is_updated_separately_for_different_labels(Config) ->
ok = mongoose_instrument:set_up(Event, ?LABELS2, #{metrics => #{time => histogram}}),
ok = mongoose_instrument:execute(Event, ?LABELS, #{time => 1}),
ok = mongoose_instrument:execute(Event, ?LABELS2, #{time => 2}),
?assertMatch({[1, 0|_], 1}, prometheus_histogram:value(Metric, [?HOST_TYPE])),
?assertMatch({[0, 1|_], 2}, prometheus_histogram:value(Metric, [?HOST_TYPE2])).
?assertMatch({1, 1, _}, prometheus_quantile_summary:value(Metric, [?HOST_TYPE])),
?assertMatch({1, 2, _}, prometheus_quantile_summary:value(Metric, [?HOST_TYPE2])).

multiple_prometheus_metrics_are_updated(Config) ->
Event = ?config(event, Config),
Expand All @@ -211,18 +250,18 @@ multiple_prometheus_metrics_are_updated(Config) ->
%% Update both metrics
ok = mongoose_instrument:execute(Event, ?LABELS, #{count => 1, time => 2}),
?assertEqual(1, prometheus_counter:value(Counter, [?HOST_TYPE])),
HistogramValue = prometheus_histogram:value(Histogram, [?HOST_TYPE]),
?assertMatch({[0, 1|_], 2}, HistogramValue),
HistogramValue = prometheus_quantile_summary:value(Histogram, [?HOST_TYPE]),
?assertMatch({1, 2, _}, HistogramValue),

%% Update only one metric
ok = mongoose_instrument:execute(Event, ?LABELS, #{count => 2}),
?assertEqual(3, prometheus_counter:value(Counter, [?HOST_TYPE])),
?assertEqual(HistogramValue, prometheus_histogram:value(Histogram, [?HOST_TYPE])),
?assertEqual(HistogramValue, prometheus_quantile_summary:value(Histogram, [?HOST_TYPE])),

%% No update
ok = mongoose_instrument:execute(Event, ?LABELS, #{something => irrelevant}),
?assertEqual(3, prometheus_counter:value(Counter, [?HOST_TYPE])),
?assertEqual(HistogramValue, prometheus_histogram:value(Histogram, [?HOST_TYPE])).
?assertEqual(HistogramValue, prometheus_quantile_summary:value(Histogram, [?HOST_TYPE])).

exometer_skips_non_metric_event(Config) ->
Event = ?config(event, Config),
Expand Down Expand Up @@ -386,7 +425,7 @@ prometheus_and_exometer_metrics_are_updated(Config) ->
?assertEqual({ok, [{count, 1}]}, exometer:get_value([?HOST_TYPE, Event, count], count)),
?assertEqual({ok, [{mean, 2}]}, exometer:get_value([?HOST_TYPE, Event, time], mean)),
?assertEqual(1, prometheus_counter:value(prom_name(Event, count), [?HOST_TYPE])),
?assertMatch({[0, 1|_], 2}, prometheus_histogram:value(prom_name(Event, time), [?HOST_TYPE])).
?assertMatch({1, 2, _}, prometheus_quantile_summary:value(prom_name(Event, time), [?HOST_TYPE])).

%% Helpers

Expand Down
34 changes: 5 additions & 29 deletions test/prometheus_endpoint_SUITE.erl
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ test_metrics(Event, Labels) ->
check_counter(Event, requests, Labels, 1, Scraped), % 'spiral' is a Prometheus counter
check_gauge(Event, sessions, Labels, 5, Scraped), % 'counter' is a Prometheus gauge
check_gauge(Event, seconds, Labels, 10, Scraped),
check_histogram(Event, time, Labels, #{count => 1, sum => 2, bucket_num => 32}, Scraped).
check_histogram(Event, time, Labels, #{count => 1, sum => 2}, Scraped).

%% Checks for the parsed metrics

Expand All @@ -71,38 +71,14 @@ check_gauge(Event, Metric, Labels, ExpValue, Scraped) ->
?assertEqual({Labels, ExpValue}, Value).

check_histogram(Event, Metric, Labels, ExpValues, Scraped) ->
#{count := ExpCount, sum := ExpSum, bucket_num := ExpBucketNum} = ExpValues,
[Type, Help] = get_metric([Event, Metric], Scraped),
?assertEqual({<<"TYPE">>, <<"histogram">>}, Type),
#{count := ExpCount, sum := ExpSum} = ExpValues,
[Type, Help | _] = get_metric([Event, Metric], Scraped),
?assertEqual({<<"TYPE">>, <<"summary">>}, Type),
?assertEqual({<<"HELP">>, help(Event, Metric)}, Help),
[Count] = get_metric([Event, Metric, count], Scraped),
?assertEqual({Labels, ExpCount}, Count),
[Sum] = get_metric([Event, Metric, sum], Scraped),
?assertEqual({Labels, ExpSum}, Sum),
Buckets = get_metric([Event, Metric, bucket], Scraped),
?assertEqual(ExpBucketNum, length(Buckets)),
check_buckets(ExpCount, Labels, Buckets).

%% Check that the histogram buckets have growing thresholds and counts,
%% and that the last bucket has the expected total count (because they are cumulative).
check_buckets(ExpCount, Labels, Buckets) ->
InitState = #{labels => Labels, last_count => 0, last_threshold => 0},
#{final_count := LastCount} = lists:foldl(fun check_bucket/2, InitState, Buckets),
?assertEqual(ExpCount, LastCount).

check_bucket({Labels, Count}, State) ->
#{labels := BaseLabels, last_count := LastCount, last_threshold := LastThreshold} = State,
{ThresholdBin, Labels1} = maps:take(le, Labels),
?assertEqual(Labels1, BaseLabels),
?assert(Count >= LastCount),
case ThresholdBin of
<<"+Inf">> ->
#{final_count => Count};
_ ->
Threshold = binary_to_integer(ThresholdBin),
?assert(Threshold > LastThreshold),
State#{last_count => Count, last_threshold => Threshold}
end.
?assertEqual({Labels, ExpSum}, Sum).

help(Event, Metric) ->
<<"Event: ", (atom_to_binary(Event))/binary, ", Metric: ", (atom_to_binary(Metric))/binary>>.
Expand Down