Merge branch 'stable'
This commit is contained in:
commit
8cbffc8561
|
|
@ -1,12 +0,0 @@
|
|||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("amqp_client/include/amqp_client.hrl").
|
||||
|
||||
-define(OK, 200).
|
||||
-define(CREATED, 201).
|
||||
-define(NO_CONTENT, 204).
|
||||
-define(SEE_OTHER, 303).
|
||||
-define(BAD_REQUEST, 400).
|
||||
-define(NOT_AUTHORISED, 401).
|
||||
%%-define(NOT_FOUND, 404). Defined for AMQP by amqp_client.hrl (as 404)
|
||||
%% httpc seems to get racy when using HTTP 1.1
|
||||
-define(HTTPC_OPTS, [{version, "HTTP/1.0"}, {autoredirect, false}]).
|
||||
|
|
@ -671,10 +671,12 @@ or:
|
|||
<pre>{"password_hash":"2lmoth8l4H0DViLaK9Fxi6l9ds8=", "tags":"administrator"}</pre>
|
||||
The <code>tags</code> key is mandatory. Either
|
||||
<code>password</code> or <code>password_hash</code>
|
||||
must be set. Setting <code>password_hash</code> to "" will ensure the
|
||||
must be set. Setting <code>password_hash</code> to <code>""</code> will ensure the
|
||||
user cannot use a password to log in. <code>tags</code> is a
|
||||
comma-separated list of tags for the user. Currently recognised tags
|
||||
are "administrator", "monitoring" and "management".
|
||||
are <code>administrator</code>, <code>monitoring</code> and <code>management</code>.
|
||||
<code>password_hash</code> must be generated using the algorithm described
|
||||
<a href="http://rabbitmq.com/passwords.html#password-generation">here</a>.
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
|
|
|
|||
|
|
@ -492,24 +492,21 @@ format_rate(node_persister_stats,
|
|||
{TIR, TIB, TIA, TIWC, TIWB, TIWAT, TIS, TISAT, TISC,
|
||||
TISEAT, TIRC, TMRTC, TMDTC, TMSRC, TMSWC, TQIJWC, TQIWC, TQIRC,
|
||||
TIO, TIOAT},
|
||||
{RIR, RIB, RIA, RIWC, RIWB, RIWAT, RIS, RISAT, RISC,
|
||||
RISEAT, RIRC, RMRTC, RMDTC, RMSRC, RMSWC, RQIJWC, RQIWC, RQIRC,
|
||||
RIO, RIOAT},
|
||||
{RIR, RIB, _RIA, RIWC, RIWB, _RIWAT, RIS, _RISAT, RISC,
|
||||
_RISEAT, RIRC, RMRTC, RMDTC, RMSRC, RMSWC, RQIJWC, RQIWC, RQIRC,
|
||||
RIO, _RIOAT},
|
||||
{SIR, SIB, SIA, SIWC, SIWB, SIWAT, SIS, SISAT, SISC,
|
||||
SISEAT, SIRC, SMRTC, SMDTC, SMSRC, SMSWC, SQIJWC, SQIWC, SQIRC,
|
||||
SIO, SIOAT},
|
||||
{STIR, STIB, STIA, STIWC, STIWB, STIWAT, STIS, STISAT, STISC,
|
||||
STISEAT, STIRC, STMRTC, STMDTC, STMSRC, STMSWC, STQIJWC, STQIWC, STQIRC,
|
||||
STIO, STIOAT}, Length) ->
|
||||
{STIR, STIB, _STIA, STIWC, STIWB, _STIWAT, STIS, _STISAT, STISC,
|
||||
_STISEAT, STIRC, STMRTC, STMDTC, STMSRC, STMSWC, STQIJWC, STQIWC, STQIRC,
|
||||
STIO, _STIOAT}, Length) ->
|
||||
%% Calculates average times for read/write/sync/seek from the
|
||||
%% accumulated time and count
|
||||
%% io_<op>_avg_time is the average operation time for the life of the node
|
||||
%% io_<op>_avg_time_details/rate is the average operation time during the
|
||||
%% last time unit calculated (thus similar to an instant rate)
|
||||
|
||||
|
||||
%% TODO avg_time
|
||||
|
||||
[
|
||||
{io_read_count, TIR},
|
||||
{io_read_count_details, [{rate, RIR},
|
||||
|
|
@ -518,8 +515,8 @@ format_rate(node_persister_stats,
|
|||
{io_read_bytes_details, [{rate, RIB},
|
||||
{samples, SIB}] ++ average(SIB, STIB, Length)},
|
||||
{io_read_avg_time, avg_time(TIA, TIR)},
|
||||
{io_read_avg_time_details, [{rate, avg_time(RIA, RIR)},
|
||||
{samples, SIA}] ++ average(SIA, STIA, Length)},
|
||||
{io_read_avg_time_details, [{samples, unit_samples(SIA, SIR)}] ++
|
||||
avg_time_details(avg_time(TIA, TIR))},
|
||||
{io_write_count, TIWC},
|
||||
{io_write_count_details, [{rate, RIWC},
|
||||
{samples, SIWC}] ++ average(SIWC, STIWC, Length)},
|
||||
|
|
@ -527,20 +524,20 @@ format_rate(node_persister_stats,
|
|||
{io_write_bytes_details, [{rate, RIWB},
|
||||
{samples, SIWB}] ++ average(SIWB, STIWB, Length)},
|
||||
{io_write_avg_time, avg_time(TIWAT, TIWC)},
|
||||
{io_write_avg_time_details, [{rate, avg_time(RIWAT, RIWC)},
|
||||
{samples, SIWAT}] ++ average(SIWAT, STIWAT, Length)},
|
||||
{io_write_avg_time_details, [{samples, unit_samples(SIWAT, SIWC)}] ++
|
||||
avg_time_details(avg_time(TIWAT, TIWC))},
|
||||
{io_sync_count, TIS},
|
||||
{io_sync_count_details, [{rate, RIS},
|
||||
{samples, SIS}] ++ average(SIS, STIS, Length)},
|
||||
{io_sync_avg_time, avg_time(TISAT, TIS)},
|
||||
{io_sync_avg_time_details, [{rate, avg_time(RISAT, RIS)},
|
||||
{samples, SISAT}] ++ average(SISAT, STISAT, Length)},
|
||||
{io_sync_avg_time_details, [{samples, unit_samples(SISAT, SIS)}] ++
|
||||
avg_time_details(avg_time(TISAT, TIS))},
|
||||
{io_seek_count, TISC},
|
||||
{io_seek_count_details, [{rate, RISC},
|
||||
{samples, SISC}] ++ average(SISC, STISC, Length)},
|
||||
{io_seek_avg_time, avg_time(TISEAT, TISC)},
|
||||
{io_seek_avg_time_details, [{rate, avg_time(RISEAT, RISC)},
|
||||
{samples, SISEAT}] ++ average(SISEAT, STISEAT, Length)},
|
||||
{io_seek_avg_time_details, [{samples, unit_samples(SISEAT, SISC)}] ++
|
||||
avg_time_details(avg_time(TISEAT, TISC))},
|
||||
{io_reopen_count, TIRC},
|
||||
{io_reopen_count_details, [{rate, RIRC},
|
||||
{samples, SIRC}] ++ average(SIRC, STIRC, Length)},
|
||||
|
|
@ -569,8 +566,8 @@ format_rate(node_persister_stats,
|
|||
{io_file_handle_open_attempt_count_details, [{rate, RIO},
|
||||
{samples, SIO}] ++ average(SIO, STIO, Length)},
|
||||
{io_file_handle_open_attempt_avg_time, avg_time(TIOAT, TIO)},
|
||||
{io_file_handle_open_attempt_avg_time_details, [{rate, avg_time(RIOAT, RIO)},
|
||||
{samples, SIOAT}] ++ average(SIOAT, STIOAT, Length)}
|
||||
{io_file_handle_open_attempt_avg_time_details,
|
||||
[{samples, unit_samples(SIOAT, SIO)}] ++ avg_time_details(avg_time(TIOAT, TIO))}
|
||||
];
|
||||
format_rate(node_node_coarse_stats, {TS, TR}, {RS, RR}, {SS, SR}, {STS, STR}, Length) ->
|
||||
[
|
||||
|
|
@ -582,6 +579,10 @@ format_rate(node_node_coarse_stats, {TS, TR}, {RS, RR}, {SS, SR}, {STS, STR}, Le
|
|||
{samples, SR}] ++ average(SR, STR, Length)}
|
||||
].
|
||||
|
||||
avg_time_details(Avg) ->
|
||||
%% Rates don't make sense here, populate it with the average.
|
||||
[{rate, Avg}, {avg_rate, Avg}, {avg, Avg}].
|
||||
|
||||
average(_Samples, _Total, Length) when Length =< 1->
|
||||
[];
|
||||
average(Samples, Total, Length) ->
|
||||
|
|
@ -662,5 +663,13 @@ avg_time(_Total, Count) when Count == 0;
|
|||
avg_time(Total, Count) ->
|
||||
(Total / Count) / ?MICRO_TO_MILLI.
|
||||
|
||||
unit_samples(Total, Count) ->
|
||||
lists:zipwith(fun(T, C) ->
|
||||
TS = proplists:get_value(timestamp, T),
|
||||
Sample = avg_time(proplists:get_value(sample, T),
|
||||
proplists:get_value(sample, C)),
|
||||
[{sample, Sample}, {timestamp, TS}]
|
||||
end, Total, Count).
|
||||
|
||||
empty(Table, Def) ->
|
||||
rabbit_mgmt_data:empty(Table, Def).
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@
|
|||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include_lib("rabbit_common/include/rabbit_core_metrics.hrl").
|
||||
-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_metrics.hrl").
|
||||
-include("rabbit_mgmt_test.hrl").
|
||||
-include_lib("rabbitmq_ct_helpers/include/rabbit_mgmt_test.hrl").
|
||||
|
||||
-import(rabbit_ct_broker_helpers, [get_node_config/3, restart_node/2]).
|
||||
-import(rabbit_mgmt_test_util, [http_get/2, http_put/4, http_delete/3]).
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@
|
|||
-include_lib("proper/include/proper.hrl").
|
||||
-include_lib("rabbit_common/include/rabbit_core_metrics.hrl").
|
||||
-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_metrics.hrl").
|
||||
-include("rabbit_mgmt_test.hrl").
|
||||
-include_lib("rabbitmq_ct_helpers/include/rabbit_mgmt_test.hrl").
|
||||
|
||||
-import(rabbit_ct_broker_helpers, [get_node_config/3]).
|
||||
-import(rabbit_mgmt_test_util, [http_get/2, http_get_from_node/3]).
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
-include_lib("common_test/include/ct.hrl").
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
-include("rabbit_mgmt_test.hrl").
|
||||
-include_lib("rabbitmq_ct_helpers/include/rabbit_mgmt_test.hrl").
|
||||
|
||||
-import(rabbit_ct_client_helpers, [close_connection/1, close_channel/1,
|
||||
open_unmanaged_connection/1]).
|
||||
|
|
|
|||
|
|
@ -340,11 +340,18 @@ check_total(Results, Totals, _Samples, Table) ->
|
|||
end
|
||||
end, Expected).
|
||||
|
||||
is_avg_time_details(Detail) ->
|
||||
match == re:run(atom_to_list(Detail), "avg_time_details$", [{capture, none}]).
|
||||
|
||||
check_samples(Results, _Totals, Samples, Table) ->
|
||||
Details = details(Table),
|
||||
%% Lookup list for the position of the key in the stats tuple
|
||||
Pairs = lists:zip(Details, lists:seq(1, length(Details))),
|
||||
|
||||
NonAvgTimeDetails = lists:filter(fun(D) ->
|
||||
not is_avg_time_details(D)
|
||||
end, Details),
|
||||
|
||||
%% Check that all samples in the results match one of the samples in the inputs
|
||||
lists:all(fun(Detail) ->
|
||||
RSamples = get_from_detail(samples, Detail, Results),
|
||||
|
|
@ -359,7 +366,7 @@ check_samples(Results, _Totals, Samples, Table) ->
|
|||
Samples)
|
||||
end
|
||||
end, RSamples)
|
||||
end, Details)
|
||||
end, NonAvgTimeDetails)
|
||||
%% ensure that not all samples are 0
|
||||
andalso lists:all(fun(Detail) ->
|
||||
RSamples = get_from_detail(samples, Detail, Results),
|
||||
|
|
@ -371,6 +378,14 @@ check_samples(Results, _Totals, Samples, Table) ->
|
|||
check_avg_rate(Results, _Totals, _Samples, Table) ->
|
||||
Details = details(Table),
|
||||
|
||||
NonAvgTimeDetails = lists:filter(fun(D) ->
|
||||
not is_avg_time_details(D)
|
||||
end, Details),
|
||||
|
||||
AvgTimeDetails = lists:filter(fun(D) ->
|
||||
is_avg_time_details(D)
|
||||
end, Details),
|
||||
|
||||
lists:all(fun(Detail) ->
|
||||
AvgRate = get_from_detail(avg_rate, Detail, Results),
|
||||
Samples = get_from_detail(samples, Detail, Results),
|
||||
|
|
@ -379,17 +394,37 @@ check_avg_rate(Results, _Totals, _Samples, Table) ->
|
|||
S1 = proplists:get_value(sample, lists:last(Samples)),
|
||||
T1 = proplists:get_value(timestamp, lists:last(Samples)),
|
||||
AvgRate == ((S2 - S1) * 1000 / (T2 - T1))
|
||||
end, Details).
|
||||
end, NonAvgTimeDetails) andalso
|
||||
lists:all(fun(Detail) ->
|
||||
Avg = get_from_detail(avg_rate, Detail, Results),
|
||||
Samples = get_from_detail(samples, Detail, Results),
|
||||
First = proplists:get_value(sample, hd(Samples)),
|
||||
Avg == First
|
||||
end, AvgTimeDetails).
|
||||
|
||||
check_avg(Results, _Totals, _Samples, Table) ->
|
||||
Details = details(Table),
|
||||
|
||||
NonAvgTimeDetails = lists:filter(fun(D) ->
|
||||
not is_avg_time_details(D)
|
||||
end, Details),
|
||||
|
||||
AvgTimeDetails = lists:filter(fun(D) ->
|
||||
is_avg_time_details(D)
|
||||
end, Details),
|
||||
|
||||
lists:all(fun(Detail) ->
|
||||
Avg = get_from_detail(avg, Detail, Results),
|
||||
Samples = get_from_detail(samples, Detail, Results),
|
||||
Sum = lists:sum([proplists:get_value(sample, S) || S <- Samples]),
|
||||
Avg == (Sum / length(Samples))
|
||||
end, Details).
|
||||
end, NonAvgTimeDetails) andalso
|
||||
lists:all(fun(Detail) ->
|
||||
Avg = get_from_detail(avg, Detail, Results),
|
||||
Samples = get_from_detail(samples, Detail, Results),
|
||||
First = proplists:get_value(sample, hd(Samples)),
|
||||
Avg == First
|
||||
end, AvgTimeDetails).
|
||||
|
||||
get_from_detail(Tag, Detail, Results) ->
|
||||
proplists:get_value(Tag, proplists:get_value(Detail, Results), []).
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@
|
|||
-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_records.hrl").
|
||||
-include_lib("rabbitmq_management_agent/include/rabbit_mgmt_metrics.hrl").
|
||||
-include("rabbit_mgmt.hrl").
|
||||
-include("rabbit_mgmt_test.hrl").
|
||||
-include_lib("rabbitmq_ct_helpers/include/rabbit_mgmt_test.hrl").
|
||||
-import(rabbit_mgmt_test_util, [assert_list/2,
|
||||
reset_management_settings/1]).
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue