1 |
|
%%%---------------------------------------------------------------------- |
2 |
|
%%% File : mod_caps.erl |
3 |
|
%%% Author : Magnus Henoch <henoch@dtek.chalmers.se> |
4 |
|
%%% Purpose : Request and cache Entity Capabilities (XEP-0115) |
5 |
|
%%% Created : 7 Oct 2006 by Magnus Henoch <henoch@dtek.chalmers.se> |
6 |
|
%%% |
7 |
|
%%% |
8 |
|
%%% ejabberd, Copyright (C) 2002-2015 ProcessOne |
9 |
|
%%% |
10 |
|
%%% This program is free software; you can redistribute it and/or |
11 |
|
%%% modify it under the terms of the GNU General Public License as |
12 |
|
%%% published by the Free Software Foundation; either version 2 of the |
13 |
|
%%% License, or (at your option) any later version. |
14 |
|
%%% |
15 |
|
%%% This program is distributed in the hope that it will be useful, |
16 |
|
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of |
17 |
|
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
18 |
|
%%% General Public License for more details. |
19 |
|
%%% |
20 |
|
%%% You should have received a copy of the GNU General Public License along |
21 |
|
%%% with this program; if not, write to the Free Software Foundation, Inc., |
22 |
|
%%% 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
23 |
|
%%% |
24 |
|
%%% 2009, improvements from ProcessOne to support correct PEP handling |
25 |
|
%%% through s2s, use less memory, and speedup global caps handling |
26 |
|
%%%---------------------------------------------------------------------- |
27 |
|
|
28 |
|
-module(mod_caps). |
29 |
|
|
30 |
|
-author('henoch@dtek.chalmers.se'). |
31 |
|
|
32 |
|
-xep([{xep, 115}, {version, "1.6.0"}]). |
33 |
|
|
34 |
|
-behaviour(gen_server). |
35 |
|
-behaviour(gen_mod). |
36 |
|
-behaviour(mongoose_module_metrics). |
37 |
|
|
38 |
|
-export([read_caps/1, caps_stream_features/3, |
39 |
|
disco_local_features/3, disco_local_identity/3, disco_info/3]). |
40 |
|
|
41 |
|
%% gen_mod callbacks |
42 |
|
-export([start/2, start_link/2, stop/1, config_spec/0, supported_features/0]). |
43 |
|
|
44 |
|
%% gen_server callbacks |
45 |
|
-export([init/1, handle_info/2, handle_call/3, |
46 |
|
handle_cast/2, terminate/2, code_change/3]). |
47 |
|
|
48 |
|
-export([user_send_presence/3, |
49 |
|
user_receive_presence/3, |
50 |
|
get_pep_recipients/3, |
51 |
|
filter_pep_recipient/3]). |
52 |
|
|
53 |
|
%% for test cases |
54 |
|
-export([delete_caps/1, make_disco_hash/2]). |
55 |
|
-ignore_xref([delete_caps/1, make_disco_hash/2, read_caps/1, start_link/2]). |
56 |
|
|
57 |
|
-include("mongoose.hrl"). |
58 |
|
-include("mongoose_config_spec.hrl"). |
59 |
|
|
60 |
|
-include("jlib.hrl"). |
61 |
|
|
62 |
|
-define(PROCNAME, ejabberd_mod_caps). |
63 |
|
|
64 |
|
-define(BAD_HASH_LIFETIME, 600). |
65 |
|
|
66 |
|
-record(caps, |
67 |
|
{ |
68 |
|
node = <<>> :: binary(), |
69 |
|
version = <<>> :: binary(), |
70 |
|
hash = <<>> :: binary(), |
71 |
|
exts = [] :: [binary()] |
72 |
|
}). |
73 |
|
|
74 |
|
-type caps() :: #caps{}. |
75 |
|
-type caps_resources() :: gb_trees:tree(jid:simple_jid(), caps()). |
76 |
|
|
77 |
|
-export_type([caps/0]). |
78 |
|
|
79 |
|
-type features() :: [binary()]. |
80 |
|
-type maybe_pending_features() :: features() | pos_integer(). |
81 |
|
-type node_pair() :: {binary(), binary()}. |
82 |
|
|
83 |
|
-record(caps_features, |
84 |
|
{ |
85 |
|
node_pair = {<<>>, <<>>} :: node_pair(), |
86 |
|
features = [] :: maybe_pending_features() |
87 |
|
}). |
88 |
|
|
89 |
|
-record(state, {host_type :: mongooseim:host_type()}). |
90 |
|
|
91 |
|
-type state() :: #state{}. |
92 |
|
|
93 |
|
-spec start_link(mongooseim:host_type(), gen_mod:module_opts()) -> any(). |
94 |
|
start_link(HostType, Opts) -> |
95 |
6 |
Proc = gen_mod:get_module_proc(HostType, ?PROCNAME), |
96 |
6 |
gen_server:start_link({local, Proc}, ?MODULE, |
97 |
|
[HostType, Opts], []). |
98 |
|
|
99 |
|
-spec start(mongooseim:host_type(), gen_mod:module_opts()) -> any(). |
100 |
|
start(HostType, Opts) -> |
101 |
6 |
Proc = gen_mod:get_module_proc(HostType, ?PROCNAME), |
102 |
6 |
ChildSpec = {Proc, {?MODULE, start_link, [HostType, Opts]}, |
103 |
|
transient, 1000, worker, [?MODULE]}, |
104 |
6 |
ejabberd_sup:start_child(ChildSpec). |
105 |
|
|
106 |
|
-spec stop(mongooseim:host_type()) -> any(). |
107 |
|
stop(HostType) -> |
108 |
6 |
Proc = gen_mod:get_module_proc(HostType, ?PROCNAME), |
109 |
6 |
gen_server:call(Proc, stop), |
110 |
6 |
ejabberd_sup:stop_child(Proc). |
111 |
|
|
112 |
|
-spec config_spec() -> mongoose_config_spec:config_section(). |
113 |
|
config_spec() -> |
114 |
208 |
#section{ |
115 |
|
items = #{<<"cache_size">> => #option{type = integer, |
116 |
|
validate = positive}, |
117 |
|
<<"cache_life_time">> => #option{type = integer, |
118 |
|
validate = positive} |
119 |
|
}, |
120 |
|
defaults = #{<<"cache_size">> => 1000, |
121 |
|
<<"cache_life_time">> => timer:hours(24) div 1000} |
122 |
|
}. |
123 |
|
|
124 |
:-( |
supported_features() -> [dynamic_domains]. |
125 |
|
|
126 |
|
-spec get_features_list(mongooseim:host_type(), nothing | caps()) -> features(). |
127 |
|
get_features_list(HostType, Caps) -> |
128 |
15 |
case get_features(HostType, Caps) of |
129 |
:-( |
unknown -> []; |
130 |
15 |
Features -> Features |
131 |
|
end. |
132 |
|
|
133 |
|
-spec get_features(mongooseim:host_type(), nothing | caps()) -> unknown | features(). |
134 |
:-( |
get_features(_HostType, nothing) -> []; |
135 |
|
get_features(HostType, #caps{node = Node, version = Version, exts = Exts}) -> |
136 |
15 |
SubNodes = [Version | Exts], |
137 |
15 |
lists:foldl(fun (SubNode, Acc) -> |
138 |
15 |
NodePair = {Node, SubNode}, |
139 |
15 |
case cache_tab:lookup(caps_features, NodePair, |
140 |
|
caps_read_fun(HostType, NodePair)) |
141 |
|
of |
142 |
|
{ok, Features} when is_list(Features) -> |
143 |
15 |
Features ++ Acc; |
144 |
|
_ when Acc == [] -> |
145 |
:-( |
unknown; |
146 |
|
_ -> |
147 |
:-( |
Acc |
148 |
|
end |
149 |
|
end, |
150 |
|
[], SubNodes). |
151 |
|
|
152 |
|
-spec read_caps([exml:element()]) -> nothing | caps(). |
153 |
215 |
read_caps(Els) -> read_caps(Els, nothing). |
154 |
|
|
155 |
|
read_caps([#xmlel{name = <<"c">>, attrs = Attrs} | Tail], Result) -> |
156 |
22 |
case xml:get_attr_s(<<"xmlns">>, Attrs) of |
157 |
|
?NS_CAPS -> |
158 |
22 |
Node = xml:get_attr_s(<<"node">>, Attrs), |
159 |
22 |
Version = xml:get_attr_s(<<"ver">>, Attrs), |
160 |
22 |
Hash = xml:get_attr_s(<<"hash">>, Attrs), |
161 |
22 |
Exts = mongoose_bin:tokens(xml:get_attr_s(<<"ext">>, Attrs), <<" ">>), |
162 |
22 |
read_caps(Tail, #caps{node = Node, hash = Hash, version = Version, exts = Exts}); |
163 |
:-( |
_ -> read_caps(Tail, Result) |
164 |
|
end; |
165 |
|
read_caps([#xmlel{name = <<"x">>, attrs = Attrs} | Tail], Result) -> |
166 |
:-( |
case xml:get_attr_s(<<"xmlns">>, Attrs) of |
167 |
:-( |
?NS_MUC_USER -> nothing; |
168 |
:-( |
_ -> read_caps(Tail, Result) |
169 |
|
end; |
170 |
|
read_caps([_ | Tail], Result) -> |
171 |
13 |
read_caps(Tail, Result); |
172 |
215 |
read_caps([], Result) -> Result. |
173 |
|
|
174 |
|
-spec user_send_presence(Acc, Params, Extra) -> {ok, Acc} when |
175 |
|
Acc :: mongoose_acc:t(), |
176 |
|
Params :: map(), |
177 |
|
Extra :: map(). |
178 |
|
user_send_presence(Acc, _, _) -> |
179 |
146 |
{From, To, Packet} = mongoose_acc:packet(Acc), |
180 |
146 |
{ok, user_send_presence(Acc, From, To, Packet)}. |
181 |
|
|
182 |
|
-spec user_send_presence(mongoose_acc:t(), jid:jid(), jid:jid(), exml:element()) -> mongoose_acc:t(). |
183 |
|
user_send_presence(Acc, |
184 |
|
#jid{luser = User, lserver = LServer} = From, |
185 |
|
#jid{luser = User, lserver = LServer, lresource = <<>>}, |
186 |
|
#xmlel{attrs = Attrs, children = Elements}) -> |
187 |
84 |
Type = xml:get_attr_s(<<"type">>, Attrs), |
188 |
84 |
handle_presence(Acc, LServer, From, Type, Elements); |
189 |
|
user_send_presence(Acc, _, _, _) -> |
190 |
62 |
Acc. |
191 |
|
|
192 |
|
-spec caps_stream_features(Acc, Params, Extra) -> {ok, Acc} when |
193 |
|
Acc :: [exml:element()], |
194 |
|
Params :: #{lserver := jid:lserver()}, |
195 |
|
Extra :: #{host_type := mongooseim:host_type()}. |
196 |
|
caps_stream_features(Acc, #{lserver := LServer}, #{host_type := HostType}) -> |
197 |
168 |
NewAcc = case make_my_disco_hash(HostType, LServer) of |
198 |
|
<<>> -> |
199 |
:-( |
Acc; |
200 |
|
Hash -> |
201 |
168 |
[#xmlel{name = <<"c">>, |
202 |
|
attrs = [{<<"xmlns">>, ?NS_CAPS}, {<<"hash">>, <<"sha-1">>}, |
203 |
|
{<<"node">>, ?MONGOOSE_URI}, {<<"ver">>, Hash}], |
204 |
|
children = []} |
205 |
|
| Acc] |
206 |
|
end, |
207 |
168 |
{ok, NewAcc}. |
208 |
|
|
209 |
|
-spec disco_local_features(mongoose_disco:feature_acc(), |
210 |
|
map(), |
211 |
|
map()) -> {ok, mongoose_disco:feature_acc()}. |
212 |
|
disco_local_features(Acc = #{node := Node}, _, _) -> |
213 |
173 |
NewAcc = case is_valid_node(Node) of |
214 |
2 |
true -> Acc#{node := <<>>}; |
215 |
171 |
false -> Acc |
216 |
|
end, |
217 |
173 |
{ok, NewAcc}. |
218 |
|
|
219 |
|
-spec disco_local_identity(Acc, Params, Extra) -> {ok, Acc} when |
220 |
|
Acc :: mongoose_disco:identity_acc(), |
221 |
|
Params :: map(), |
222 |
|
Extra :: gen_hook:extra(). |
223 |
|
disco_local_identity(Acc = #{node := Node}, _, _) -> |
224 |
173 |
NewAcc = case is_valid_node(Node) of |
225 |
2 |
true -> Acc#{node := <<>>}; |
226 |
171 |
false -> Acc |
227 |
|
end, |
228 |
173 |
{ok, NewAcc}. |
229 |
|
|
230 |
|
-spec disco_info(Acc, Params, Extra) -> {ok, Acc} when |
231 |
|
Acc :: mongoose_disco:identity_acc(), |
232 |
|
Params :: map(), |
233 |
|
Extra :: gen_hook:extra(). |
234 |
|
disco_info(Acc = #{node := Node}, _, _) -> |
235 |
174 |
NewAcc = case is_valid_node(Node) of |
236 |
2 |
true -> Acc#{node := <<>>}; |
237 |
172 |
false -> Acc |
238 |
|
end, |
239 |
174 |
{ok, NewAcc}. |
240 |
|
|
241 |
|
-spec handle_presence(mongoose_acc:t(), jid:lserver(), jid:jid(), binary(), [exml:element()]) -> |
242 |
|
mongoose_acc:t(). |
243 |
|
handle_presence(Acc, LServer, From, Type, Elements) when Type =:= <<>>; |
244 |
|
Type =:= <<"available">> -> |
245 |
84 |
case read_caps(Elements) of |
246 |
|
nothing -> |
247 |
75 |
Acc; |
248 |
|
#caps{version = Version, exts = Exts} = Caps -> |
249 |
9 |
feature_request(Acc, LServer, From, Caps, [Version | Exts]) |
250 |
|
end; |
251 |
|
handle_presence(Acc, _LServer, _From, _Type, _Elements) -> |
252 |
:-( |
Acc. |
253 |
|
|
254 |
|
-spec user_receive_presence(mongoose_acc:t(), mongoose_c2s_hooks:params(), gen_hook:extra()) -> |
255 |
|
mongoose_c2s_hooks:result(). |
256 |
|
user_receive_presence(Acc0, #{c2s_data := C2SData}, _Extra) -> |
257 |
274 |
{From, To, #xmlel{attrs = Attrs, children = Els} = Packet} = mongoose_acc:packet(Acc0), |
258 |
274 |
?LOG_DEBUG(#{what => user_receive_presence, |
259 |
|
to => jid:to_binary(To), from => jid:to_binary(From), |
260 |
274 |
exml_packet => Packet, c2s_state => C2SData}), |
261 |
274 |
Type = xml:get_attr_s(<<"type">>, Attrs), |
262 |
274 |
#jid{lserver = LServer} = mongoose_c2s:get_jid(C2SData), |
263 |
274 |
Acc = case mongoose_domain_api:get_host_type(From#jid.lserver) of |
264 |
|
{error, not_found} -> |
265 |
:-( |
handle_presence(Acc0, LServer, From, Type, Els); |
266 |
|
{ok, _} -> |
267 |
274 |
Acc0 %% it was already handled in 'user_send_presence' |
268 |
|
end, |
269 |
274 |
case mongoose_c2s:get_mod_state(C2SData, mod_presence) of |
270 |
|
{ok, Presences} -> |
271 |
274 |
Subscription = get_subscription(From, Presences), |
272 |
274 |
Insert = (Type == <<>> orelse Type == <<"available">>) |
273 |
274 |
and (Subscription == both orelse Subscription == to), |
274 |
274 |
Delete = Type == <<"unavailable">> orelse Type == <<"error">>, |
275 |
274 |
case Insert orelse Delete of |
276 |
|
true -> |
277 |
131 |
LFrom = jid:to_lower(From), |
278 |
131 |
Rs = case mongoose_c2s:get_mod_state(C2SData, ?MODULE) of |
279 |
50 |
{ok, Rs1} -> Rs1; |
280 |
81 |
{error, not_found} -> gb_trees:empty() |
281 |
|
end, |
282 |
131 |
Caps = read_caps(Els), |
283 |
131 |
NewRs = case Caps of |
284 |
|
nothing when Insert == true -> |
285 |
103 |
Rs; |
286 |
|
_ when Insert == true -> |
287 |
13 |
?LOG_DEBUG(#{what => caps_set_caps, |
288 |
|
caps => Caps, |
289 |
|
to => jid:to_binary(To), |
290 |
|
from => jid:to_binary(From), |
291 |
|
exml_packet => Packet, |
292 |
13 |
c2s_state => C2SData}), |
293 |
13 |
upsert_caps(LFrom, Caps, Rs); |
294 |
|
_ -> |
295 |
15 |
gb_trees:delete_any(LFrom, Rs) |
296 |
|
end, |
297 |
131 |
{ok, mongoose_c2s_acc:to_acc(Acc, state_mod, {?MODULE, NewRs})}; |
298 |
|
false -> |
299 |
143 |
{ok, Acc} |
300 |
|
end; |
301 |
|
{error, not_found} -> |
302 |
:-( |
{ok, Acc} |
303 |
|
end. |
304 |
|
|
305 |
|
get_subscription(From, Presences) -> |
306 |
274 |
BareFrom = jid:to_bare(From), |
307 |
274 |
F = mod_presence:is_subscribed_to_my_presence(From, BareFrom, Presences), |
308 |
274 |
T = mod_presence:am_i_subscribed_to_presence(From, BareFrom, Presences), |
309 |
274 |
case {F, T} of |
310 |
204 |
{true, true} -> both; |
311 |
4 |
{true, false} -> from; |
312 |
36 |
{false, true} -> to; |
313 |
30 |
{false, false} -> none |
314 |
|
end. |
315 |
|
|
316 |
|
-spec upsert_caps(jid:simple_jid(), caps(), caps_resources()) -> caps_resources(). |
317 |
|
upsert_caps(LFrom, Caps, Rs) -> |
318 |
13 |
case gb_trees:lookup(LFrom, Rs) of |
319 |
:-( |
{value, Caps} -> Rs; |
320 |
|
none -> |
321 |
13 |
gb_trees:insert(LFrom, Caps, Rs); |
322 |
|
_ -> |
323 |
:-( |
gb_trees:update(LFrom, Caps, Rs) |
324 |
|
end. |
325 |
|
|
326 |
|
-spec get_pep_recipients(Acc, Params, Extra) -> {ok, Acc} when |
327 |
|
Acc :: [jid:simple_jid()], |
328 |
|
Params :: #{c2s_data := mongoose_c2s:data(), feature := binary()}, |
329 |
|
Extra :: map(). |
330 |
|
get_pep_recipients(InAcc, #{c2s_data := C2SData, feature := Feature}, _) -> |
331 |
18 |
HostType = mongoose_c2s:get_host_type(C2SData), |
332 |
18 |
NewAcc = case mongoose_c2s:get_mod_state(C2SData, ?MODULE) of |
333 |
|
{ok, Rs} -> |
334 |
18 |
filter_recipients_by_caps(HostType, InAcc, Feature, Rs); |
335 |
:-( |
_ -> InAcc |
336 |
|
end, |
337 |
18 |
{ok, NewAcc}; |
338 |
:-( |
get_pep_recipients(Acc, _, _) -> {ok, Acc}. |
339 |
|
|
340 |
|
-spec filter_recipients_by_caps(mongooseim:host_type(), Acc, binary(), caps_resources()) -> Acc |
341 |
|
when Acc :: [jid:simple_jid()]. |
342 |
|
filter_recipients_by_caps(HostType, InAcc, Feature, Rs) -> |
343 |
18 |
gb_trees_fold(fun(USR, Caps, Acc) -> |
344 |
2 |
case lists:member(Feature, get_features_list(HostType, Caps)) of |
345 |
2 |
true -> [USR | Acc]; |
346 |
:-( |
false -> Acc |
347 |
|
end |
348 |
|
end, |
349 |
|
InAcc, Rs). |
350 |
|
|
351 |
|
-spec filter_pep_recipient(Acc, Params, Extra) -> {ok | stop, Acc} when |
352 |
|
Acc :: boolean(), |
353 |
|
Params :: #{c2s_data := mongoose_c2s:data(), feature := binary(), to := jid:jid()}, |
354 |
|
Extra :: gen_hook:extra(). |
355 |
|
filter_pep_recipient(InAcc, #{c2s_data := C2SData, feature := Feature, to := To}, _) -> |
356 |
7 |
case mongoose_c2s:get_mod_state(C2SData, ?MODULE) of |
357 |
|
{ok, Rs} -> |
358 |
7 |
?LOG_DEBUG(#{what => caps_lookup, text => <<"Look for CAPS for To jid">>, |
359 |
7 |
acc => InAcc, c2s_state => C2SData, caps_resources => Rs}), |
360 |
7 |
LTo = jid:to_lower(To), |
361 |
7 |
case gb_trees:lookup(LTo, Rs) of |
362 |
|
{value, Caps} -> |
363 |
4 |
HostType = mongoose_c2s:get_host_type(C2SData), |
364 |
4 |
Drop = not lists:member(Feature, get_features_list(HostType, Caps)), |
365 |
4 |
{stop, Drop}; |
366 |
|
none -> |
367 |
3 |
{stop, true} |
368 |
|
end; |
369 |
:-( |
_ -> {ok, InAcc} |
370 |
|
end. |
371 |
|
|
372 |
|
init_db(mnesia) -> |
373 |
6 |
case catch mnesia:table_info(caps_features, storage_type) of |
374 |
|
{'EXIT', _} -> |
375 |
2 |
ok; |
376 |
|
disc_only_copies -> |
377 |
4 |
ok; |
378 |
|
_ -> |
379 |
:-( |
mnesia:delete_table(caps_features) |
380 |
|
end, |
381 |
6 |
mongoose_mnesia:create_table(caps_features, |
382 |
|
[{disc_only_copies, [node()]}, |
383 |
|
{local_content, true}, |
384 |
|
{attributes, record_info(fields, caps_features)}]). |
385 |
|
|
386 |
|
-spec init(list()) -> {ok, state()}. |
387 |
|
init([HostType, #{cache_size := MaxSize, cache_life_time := LifeTime}]) -> |
388 |
6 |
init_db(db_type(HostType)), |
389 |
6 |
cache_tab:new(caps_features, [{max_size, MaxSize}, {life_time, LifeTime}]), |
390 |
6 |
gen_hook:add_handlers(hooks(HostType)), |
391 |
6 |
{ok, #state{host_type = HostType}}. |
392 |
|
|
393 |
|
-spec handle_call(term(), any(), state()) -> |
394 |
|
{stop, normal, ok, state()} | {reply, {error, any()}, state()}. |
395 |
|
handle_call(stop, _From, State) -> |
396 |
6 |
{stop, normal, ok, State}; |
397 |
|
handle_call(_Req, _From, State) -> |
398 |
:-( |
{reply, {error, badarg}, State}. |
399 |
|
|
400 |
|
-spec handle_cast(any(), state()) -> {noreply, state()}. |
401 |
:-( |
handle_cast(_Msg, State) -> {noreply, State}. |
402 |
|
|
403 |
|
-spec handle_info(any(), state()) -> {noreply, state()}. |
404 |
:-( |
handle_info(_Info, State) -> {noreply, State}. |
405 |
|
|
406 |
|
-spec terminate(any(), state()) -> ok. |
407 |
|
terminate(_Reason, #state{host_type = HostType}) -> |
408 |
6 |
gen_hook:delete_handlers(hooks(HostType)). |
409 |
|
|
410 |
|
hooks(HostType) -> |
411 |
12 |
[{disco_local_features, HostType, fun ?MODULE:disco_local_features/3, #{}, 1}, |
412 |
|
{get_pep_recipients, HostType, fun ?MODULE:get_pep_recipients/3, #{}, 75}, |
413 |
|
{filter_pep_recipient, HostType, fun ?MODULE:filter_pep_recipient/3, #{}, 75}, |
414 |
|
{user_send_presence, HostType, fun ?MODULE:user_send_presence/3, #{}, 75}, |
415 |
|
{user_receive_presence, HostType, fun ?MODULE:user_receive_presence/3, #{}, 1}, |
416 |
|
{c2s_stream_features, HostType, fun ?MODULE:caps_stream_features/3, #{}, 75}, |
417 |
|
{s2s_stream_features, HostType, fun ?MODULE:caps_stream_features/3, #{}, 75}, |
418 |
|
{disco_local_identity, HostType, fun ?MODULE:disco_local_identity/3, #{}, 1}, |
419 |
|
{disco_info, HostType, fun ?MODULE:disco_info/3, #{}, 1}, |
420 |
|
{disco_local_features, HostType, fun ?MODULE:disco_local_features/3, #{}, 1} |
421 |
|
]. |
422 |
|
|
423 |
|
-spec code_change(any(), state(), any()) -> {ok, state()}. |
424 |
:-( |
code_change(_OldVsn, State, _Extra) -> {ok, State}. |
425 |
|
|
426 |
|
-spec feature_request(mongoose_acc:t(), jid:lserver(), jid:jid(), caps(), [binary()]) -> |
427 |
|
mongoose_acc:t(). |
428 |
|
feature_request(Acc, LServer, From, Caps, [SubNode | Tail] = SubNodes) -> |
429 |
9 |
Node = Caps#caps.node, |
430 |
9 |
NodePair = {Node, SubNode}, |
431 |
9 |
HostType = mongoose_acc:host_type(Acc), |
432 |
9 |
case cache_tab:lookup(caps_features, NodePair, caps_read_fun(HostType, NodePair)) of |
433 |
|
{ok, Fs} when is_list(Fs) -> |
434 |
:-( |
feature_request(Acc, LServer, From, Caps, Tail); |
435 |
|
Other -> |
436 |
9 |
NeedRequest = case Other of |
437 |
:-( |
{ok, TS} -> os:system_time(second) >= TS + (?BAD_HASH_LIFETIME); |
438 |
9 |
_ -> true |
439 |
|
end, |
440 |
9 |
F = fun (_From, _To, _Acc1, timeout) -> |
441 |
|
%% IQ request timed out, skip this node |
442 |
:-( |
feature_request(Acc, LServer, From, Caps, Tail); |
443 |
|
(_From, _To, Acc1, IQReply) -> |
444 |
9 |
feature_response(Acc1, IQReply, LServer, From, Caps, SubNodes) |
445 |
|
end, |
446 |
9 |
case NeedRequest of |
447 |
|
true -> |
448 |
9 |
IQ = #iq{type = get, xmlns = ?NS_DISCO_INFO, |
449 |
|
sub_el = |
450 |
|
[#xmlel{name = <<"query">>, |
451 |
|
attrs = |
452 |
|
[{<<"xmlns">>, ?NS_DISCO_INFO}, |
453 |
|
{<<"node">>, |
454 |
|
<<Node/binary, "#", |
455 |
|
SubNode/binary>>}], |
456 |
|
children = []}]}, |
457 |
9 |
cache_tab:insert(caps_features, NodePair, os:system_time(second), |
458 |
|
caps_write_fun(HostType, NodePair, os:system_time(second))), |
459 |
9 |
ejabberd_local:route_iq(jid:make_noprep(<<>>, LServer, <<>>), From, Acc, IQ, F), |
460 |
9 |
Acc; |
461 |
:-( |
false -> feature_request(Acc, LServer, From, Caps, Tail) |
462 |
|
end |
463 |
|
end; |
464 |
|
feature_request(Acc, _LServer, From, Caps, []) -> |
465 |
|
%% feature_request is never executed with empty SubNodes list |
466 |
|
%% so if we end up here, it means the caps are known |
467 |
9 |
HostType = mongoose_acc:host_type(Acc), |
468 |
9 |
mongoose_hooks:caps_recognised(Acc, From, self(), get_features_list(HostType, Caps)). |
469 |
|
|
470 |
|
-spec feature_response(mongoose_acc:t(), jlib:iq(), jid:lserver(), jid:jid(), caps(), [binary()]) -> |
471 |
|
mongoose_acc:t(). |
472 |
|
feature_response(Acc, #iq{type = result, sub_el = [#xmlel{children = Els}]}, |
473 |
|
LServer, From, Caps, [SubNode | SubNodes]) -> |
474 |
9 |
HostType = mongoose_acc:host_type(Acc), |
475 |
9 |
NodePair = {Caps#caps.node, SubNode}, |
476 |
9 |
case check_hash(Caps, Els) of |
477 |
|
true -> |
478 |
9 |
Features = lists:flatmap(fun (#xmlel{name = <<"feature">>, |
479 |
|
attrs = FAttrs}) -> |
480 |
54 |
[xml:get_attr_s(<<"var">>, FAttrs)]; |
481 |
9 |
(_) -> [] |
482 |
|
end, |
483 |
|
Els), |
484 |
9 |
cache_tab:insert(caps_features, NodePair, |
485 |
|
Features, |
486 |
|
caps_write_fun(HostType, NodePair, Features)); |
487 |
:-( |
false -> ok |
488 |
|
end, |
489 |
9 |
feature_request(Acc, LServer, From, Caps, SubNodes); |
490 |
|
feature_response(Acc, _IQResult, LServer, From, Caps, [_SubNode | SubNodes]) -> |
491 |
:-( |
feature_request(Acc, LServer, From, Caps, SubNodes). |
492 |
|
|
493 |
|
-spec caps_read_fun(mongooseim:host_type(), node_pair()) -> |
494 |
|
fun(() -> {ok, maybe_pending_features()} | error). |
495 |
|
caps_read_fun(HostType, Node) -> |
496 |
24 |
DBType = db_type(HostType), |
497 |
24 |
caps_read_fun(HostType, Node, DBType). |
498 |
|
|
499 |
|
caps_read_fun(_HostType, Node, mnesia) -> |
500 |
24 |
fun () -> |
501 |
9 |
case mnesia:dirty_read({caps_features, Node}) of |
502 |
:-( |
[#caps_features{features = Features}] -> {ok, Features}; |
503 |
9 |
_ -> error |
504 |
|
end |
505 |
|
end. |
506 |
|
|
507 |
|
-spec caps_write_fun(mongooseim:host_type(), node_pair(), maybe_pending_features()) -> |
508 |
|
fun(() -> ok). |
509 |
|
caps_write_fun(HostType, Node, Features) -> |
510 |
18 |
DBType = db_type(HostType), |
511 |
18 |
caps_write_fun(HostType, Node, Features, DBType). |
512 |
|
|
513 |
|
caps_write_fun(_HostType, Node, Features, mnesia) -> |
514 |
18 |
fun () -> |
515 |
18 |
mnesia:dirty_write(#caps_features{node_pair = Node, |
516 |
|
features = Features}) |
517 |
|
end. |
518 |
|
|
519 |
|
-spec delete_caps(node_pair()) -> ok. |
520 |
|
delete_caps(Node) -> |
521 |
:-( |
cache_tab:delete(caps_features, Node, caps_delete_fun(Node)). |
522 |
|
|
523 |
|
-spec caps_delete_fun(node_pair()) -> fun(() -> ok). |
524 |
|
caps_delete_fun(Node) -> |
525 |
:-( |
fun () -> |
526 |
:-( |
mnesia:dirty_delete(caps_features, Node) |
527 |
|
end. |
528 |
|
|
529 |
|
-spec make_my_disco_hash(mongooseim:host_type(), jid:lserver()) -> binary(). |
530 |
|
make_my_disco_hash(HostType, LServer) -> |
531 |
168 |
JID = jid:make(<<>>, LServer, <<>>), |
532 |
168 |
case mongoose_disco:get_local_features(HostType, JID, JID, <<>>, <<>>) of |
533 |
|
empty -> |
534 |
:-( |
<<>>; |
535 |
|
{result, FeaturesXML} -> |
536 |
168 |
IdentityXML = mongoose_disco:get_local_identity(HostType, JID, JID, <<>>, <<>>), |
537 |
168 |
InfoXML = mongoose_disco:get_info(HostType, mod_disco, <<>>, <<>>), |
538 |
168 |
make_disco_hash(IdentityXML ++ InfoXML ++ FeaturesXML, sha1) |
539 |
|
end. |
540 |
|
|
541 |
|
-spec make_disco_hash([exml:element()], HashAlgorithm :: atom()) -> binary(). |
542 |
|
make_disco_hash(DiscoEls, Algo) -> |
543 |
186 |
Concat = list_to_binary([concat_identities(DiscoEls), |
544 |
|
concat_features(DiscoEls), concat_info(DiscoEls)]), |
545 |
186 |
jlib:encode_base64(case Algo of |
546 |
:-( |
md5 -> erlang:md5(Concat); |
547 |
186 |
sha1 -> crypto:hash(sha, Concat); |
548 |
:-( |
sha224 -> crypto:hash(sha224, Concat); |
549 |
:-( |
sha256 -> crypto:hash(sha256, Concat); |
550 |
:-( |
sha384 -> crypto:hash(sha384, Concat); |
551 |
:-( |
sha512 -> crypto:hash(sha512, Concat) |
552 |
|
end). |
553 |
|
|
554 |
|
check_hash(Caps, Els) -> |
555 |
9 |
case Caps#caps.hash of |
556 |
|
<<"md5">> -> |
557 |
:-( |
Caps#caps.version == make_disco_hash(Els, md5); |
558 |
|
<<"sha-1">> -> |
559 |
9 |
Caps#caps.version == make_disco_hash(Els, sha1); |
560 |
|
<<"sha-224">> -> |
561 |
:-( |
Caps#caps.version == make_disco_hash(Els, sha224); |
562 |
|
<<"sha-256">> -> |
563 |
:-( |
Caps#caps.version == make_disco_hash(Els, sha256); |
564 |
|
<<"sha-384">> -> |
565 |
:-( |
Caps#caps.version == make_disco_hash(Els, sha384); |
566 |
|
<<"sha-512">> -> |
567 |
:-( |
Caps#caps.version == make_disco_hash(Els, sha512); |
568 |
:-( |
_ -> true |
569 |
|
end. |
570 |
|
|
571 |
|
concat_features(Els) -> |
572 |
186 |
lists:usort(lists:flatmap(fun (#xmlel{name = |
573 |
|
<<"feature">>, |
574 |
|
attrs = Attrs}) -> |
575 |
6948 |
[[xml:get_attr_s(<<"var">>, Attrs), $<]]; |
576 |
354 |
(_) -> [] |
577 |
|
end, |
578 |
|
Els)). |
579 |
|
|
580 |
|
concat_identities(Els) -> |
581 |
186 |
lists:sort(lists:flatmap(fun (#xmlel{name = |
582 |
|
<<"identity">>, |
583 |
|
attrs = Attrs}) -> |
584 |
186 |
[[xml:get_attr_s(<<"category">>, Attrs), |
585 |
|
$/, xml:get_attr_s(<<"type">>, Attrs), |
586 |
|
$/, |
587 |
|
xml:get_attr_s(<<"xml:lang">>, Attrs), |
588 |
|
$/, xml:get_attr_s(<<"name">>, Attrs), |
589 |
|
$<]]; |
590 |
7116 |
(_) -> [] |
591 |
|
end, |
592 |
|
Els)). |
593 |
|
|
594 |
|
concat_info(Els) -> |
595 |
186 |
lists:sort(lists:flatmap(fun(El) -> |
596 |
7302 |
concat_xdata_fields(mongoose_data_forms:parse_form(El)) |
597 |
|
end, |
598 |
|
Els)). |
599 |
|
|
600 |
|
concat_xdata_fields(#{type := <<"result">>, kvs := KVs, ns := NS}) -> |
601 |
168 |
Res = maps:fold(fun(Var, Values, VarFields) -> |
602 |
62 |
NewField = [[V, $<] || V <- [Var | lists:sort(Values)]], |
603 |
62 |
[NewField | VarFields] |
604 |
|
end, [], KVs), |
605 |
168 |
[[NS, $<, lists:sort(Res)]]; |
606 |
|
concat_xdata_fields(_) -> |
607 |
7134 |
[]. |
608 |
|
|
609 |
|
gb_trees_fold(F, Acc, Tree) -> |
610 |
18 |
Iter = gb_trees:iterator(Tree), |
611 |
18 |
gb_trees_fold_iter(F, Acc, Iter). |
612 |
|
|
613 |
|
gb_trees_fold_iter(F, Acc, Iter) -> |
614 |
20 |
case gb_trees:next(Iter) of |
615 |
|
{Key, Val, NewIter} -> |
616 |
2 |
NewAcc = F(Key, Val, Acc), |
617 |
2 |
gb_trees_fold_iter(F, NewAcc, NewIter); |
618 |
18 |
_ -> Acc |
619 |
|
end. |
620 |
|
|
621 |
|
is_valid_node(Node) -> |
622 |
520 |
case mongoose_bin:tokens(Node, <<"#">>) of |
623 |
|
[?MONGOOSE_URI|_] -> |
624 |
6 |
true; |
625 |
|
_ -> |
626 |
514 |
false |
627 |
|
end. |
628 |
|
|
629 |
|
db_type(_HostType) -> |
630 |
48 |
mnesia. |