1 |
|
%%%---------------------------------------------------------------------- |
2 |
|
%%% File : mod_caps.erl |
3 |
|
%%% Author : Magnus Henoch <henoch@dtek.chalmers.se> |
4 |
|
%%% Purpose : Request and cache Entity Capabilities (XEP-0115) |
5 |
|
%%% Created : 7 Oct 2006 by Magnus Henoch <henoch@dtek.chalmers.se> |
6 |
|
%%% |
7 |
|
%%% |
8 |
|
%%% ejabberd, Copyright (C) 2002-2015 ProcessOne |
9 |
|
%%% |
10 |
|
%%% This program is free software; you can redistribute it and/or |
11 |
|
%%% modify it under the terms of the GNU General Public License as |
12 |
|
%%% published by the Free Software Foundation; either version 2 of the |
13 |
|
%%% License, or (at your option) any later version. |
14 |
|
%%% |
15 |
|
%%% This program is distributed in the hope that it will be useful, |
16 |
|
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of |
17 |
|
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
18 |
|
%%% General Public License for more details. |
19 |
|
%%% |
20 |
|
%%% You should have received a copy of the GNU General Public License along |
21 |
|
%%% with this program; if not, write to the Free Software Foundation, Inc., |
22 |
|
%%% 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. |
23 |
|
%%% |
24 |
|
%%% 2009, improvements from ProcessOne to support correct PEP handling |
25 |
|
%%% through s2s, use less memory, and speedup global caps handling |
26 |
|
%%%---------------------------------------------------------------------- |
27 |
|
|
28 |
|
-module(mod_caps). |
29 |
|
|
30 |
|
-author('henoch@dtek.chalmers.se'). |
31 |
|
|
32 |
|
-xep([{xep, 115}, {version, "1.5"}]). |
33 |
|
|
34 |
|
-behaviour(gen_server). |
35 |
|
-behaviour(gen_mod). |
36 |
|
-behaviour(mongoose_module_metrics). |
37 |
|
|
38 |
|
-export([read_caps/1, caps_stream_features/3, |
39 |
|
disco_local_features/1, disco_local_identity/1, disco_info/1]). |
40 |
|
|
41 |
|
%% gen_mod callbacks |
42 |
|
-export([start/2, start_link/2, stop/1, config_spec/0, supported_features/0]). |
43 |
|
|
44 |
|
%% gen_server callbacks |
45 |
|
-export([init/1, handle_info/2, handle_call/3, |
46 |
|
handle_cast/2, terminate/2, code_change/3]). |
47 |
|
|
48 |
|
-export([user_send_packet/4, user_receive_packet/5, |
49 |
|
c2s_presence_in/4, c2s_filter_packet/5, |
50 |
|
c2s_broadcast_recipients/5]). |
51 |
|
|
52 |
|
%% for test cases |
53 |
|
-export([delete_caps/1, make_disco_hash/2]). |
54 |
|
|
55 |
|
-ignore_xref([c2s_broadcast_recipients/5, c2s_filter_packet/5, c2s_presence_in/4, |
56 |
|
caps_stream_features/3, delete_caps/1, disco_info/1, disco_local_features/1, |
57 |
|
disco_local_identity/1, make_disco_hash/2, read_caps/1, start_link/2, |
58 |
|
user_receive_packet/5, user_send_packet/4]). |
59 |
|
|
60 |
|
-include("mongoose.hrl"). |
61 |
|
-include("mongoose_config_spec.hrl"). |
62 |
|
|
63 |
|
-include("jlib.hrl"). |
64 |
|
|
65 |
|
-define(PROCNAME, ejabberd_mod_caps). |
66 |
|
|
67 |
|
-define(BAD_HASH_LIFETIME, 600). |
68 |
|
|
69 |
|
-record(caps, |
70 |
|
{ |
71 |
|
node = <<>> :: binary(), |
72 |
|
version = <<>> :: binary(), |
73 |
|
hash = <<>> :: binary(), |
74 |
|
exts = [] :: [binary()] |
75 |
|
}). |
76 |
|
|
77 |
|
-type caps() :: #caps{}. |
78 |
|
-type caps_resources() :: gb_trees:tree(jid:simple_jid(), caps()). |
79 |
|
|
80 |
|
-export_type([caps/0]). |
81 |
|
|
82 |
|
-type features() :: [binary()]. |
83 |
|
-type maybe_pending_features() :: features() | pos_integer(). |
84 |
|
-type node_pair() :: {binary(), binary()}. |
85 |
|
|
86 |
|
-record(caps_features, |
87 |
|
{ |
88 |
|
node_pair = {<<>>, <<>>} :: node_pair(), |
89 |
|
features = [] :: maybe_pending_features() |
90 |
|
}). |
91 |
|
|
92 |
|
-record(state, {host_type :: mongooseim:host_type()}). |
93 |
|
|
94 |
|
-type state() :: #state{}. |
95 |
|
|
96 |
|
-spec start_link(mongooseim:host_type(), list()) -> any(). |
97 |
|
start_link(HostType, Opts) -> |
98 |
3 |
Proc = gen_mod:get_module_proc(HostType, ?PROCNAME), |
99 |
3 |
gen_server:start_link({local, Proc}, ?MODULE, |
100 |
|
[HostType, Opts], []). |
101 |
|
|
102 |
|
-spec start(mongooseim:host_type(), list()) -> any(). |
103 |
|
start(HostType, Opts) -> |
104 |
3 |
Proc = gen_mod:get_module_proc(HostType, ?PROCNAME), |
105 |
3 |
ChildSpec = {Proc, {?MODULE, start_link, [HostType, Opts]}, |
106 |
|
transient, 1000, worker, [?MODULE]}, |
107 |
3 |
ejabberd_sup:start_child(ChildSpec). |
108 |
|
|
109 |
|
-spec stop(mongooseim:host_type()) -> any(). |
110 |
|
stop(HostType) -> |
111 |
3 |
Proc = gen_mod:get_module_proc(HostType, ?PROCNAME), |
112 |
3 |
gen_server:call(Proc, stop), |
113 |
3 |
ejabberd_sup:stop_child(Proc). |
114 |
|
|
115 |
|
-spec config_spec() -> mongoose_config_spec:config_section(). |
116 |
|
config_spec() -> |
117 |
160 |
#section{ |
118 |
|
items = #{<<"cache_size">> => #option{type = integer, |
119 |
|
validate = positive}, |
120 |
|
<<"cache_life_time">> => #option{type = integer, |
121 |
|
validate = positive} |
122 |
|
} |
123 |
|
}. |
124 |
|
|
125 |
:-( |
supported_features() -> [dynamic_domains]. |
126 |
|
|
127 |
|
-spec get_features_list(mongooseim:host_type(), nothing | caps()) -> features(). |
128 |
|
get_features_list(HostType, Caps) -> |
129 |
11 |
case get_features(HostType, Caps) of |
130 |
:-( |
unknown -> []; |
131 |
11 |
Features -> Features |
132 |
|
end. |
133 |
|
|
134 |
|
-spec get_features(mongooseim:host_type(), nothing | caps()) -> unknown | features(). |
135 |
:-( |
get_features(_HostType, nothing) -> []; |
136 |
|
get_features(HostType, #caps{node = Node, version = Version, exts = Exts}) -> |
137 |
11 |
SubNodes = [Version | Exts], |
138 |
11 |
lists:foldl(fun (SubNode, Acc) -> |
139 |
11 |
NodePair = {Node, SubNode}, |
140 |
11 |
case cache_tab:lookup(caps_features, NodePair, |
141 |
|
caps_read_fun(HostType, NodePair)) |
142 |
|
of |
143 |
|
{ok, Features} when is_list(Features) -> |
144 |
11 |
Features ++ Acc; |
145 |
|
_ when Acc == [] -> |
146 |
:-( |
unknown; |
147 |
|
_ -> |
148 |
:-( |
Acc |
149 |
|
end |
150 |
|
end, |
151 |
|
[], SubNodes). |
152 |
|
|
153 |
|
-spec read_caps([exml:element()]) -> nothing | caps(). |
154 |
156 |
read_caps(Els) -> read_caps(Els, nothing). |
155 |
|
|
156 |
|
read_caps([#xmlel{name = <<"c">>, attrs = Attrs} | Tail], Result) -> |
157 |
15 |
case xml:get_attr_s(<<"xmlns">>, Attrs) of |
158 |
|
?NS_CAPS -> |
159 |
15 |
Node = xml:get_attr_s(<<"node">>, Attrs), |
160 |
15 |
Version = xml:get_attr_s(<<"ver">>, Attrs), |
161 |
15 |
Hash = xml:get_attr_s(<<"hash">>, Attrs), |
162 |
15 |
Exts = mongoose_bin:tokens(xml:get_attr_s(<<"ext">>, Attrs), <<" ">>), |
163 |
15 |
read_caps(Tail, #caps{node = Node, hash = Hash, version = Version, exts = Exts}); |
164 |
:-( |
_ -> read_caps(Tail, Result) |
165 |
|
end; |
166 |
|
read_caps([#xmlel{name = <<"x">>, attrs = Attrs} | Tail], Result) -> |
167 |
:-( |
case xml:get_attr_s(<<"xmlns">>, Attrs) of |
168 |
:-( |
?NS_MUC_USER -> nothing; |
169 |
:-( |
_ -> read_caps(Tail, Result) |
170 |
|
end; |
171 |
|
read_caps([_ | Tail], Result) -> |
172 |
:-( |
read_caps(Tail, Result); |
173 |
156 |
read_caps([], Result) -> Result. |
174 |
|
|
175 |
|
-spec user_send_packet(mongoose_acc:t(), jid:jid(), jid:jid(), exml:element()) -> mongoose_acc:t(). |
176 |
|
user_send_packet(Acc, |
177 |
|
#jid{luser = User, lserver = LServer} = From, |
178 |
|
#jid{luser = User, lserver = LServer, lresource = <<>>}, |
179 |
|
#xmlel{name = <<"presence">>, attrs = Attrs, children = Elements}) -> |
180 |
66 |
Type = xml:get_attr_s(<<"type">>, Attrs), |
181 |
66 |
handle_presence(Acc, LServer, From, Type, Elements); |
182 |
|
user_send_packet(Acc, _From, _To, _Pkt) -> |
183 |
143 |
Acc. |
184 |
|
|
185 |
|
-spec user_receive_packet(mongoose_acc:t(), jid:jid(), jid:jid(), jid:jid(), exml:element()) -> |
186 |
|
mongoose_acc:t(). |
187 |
|
user_receive_packet(Acc, #jid{lserver = LServer}, From, _To, |
188 |
|
#xmlel{name = <<"presence">>, attrs = Attrs, children = Elements}) -> |
189 |
129 |
Type = xml:get_attr_s(<<"type">>, Attrs), |
190 |
129 |
case mongoose_domain_api:get_host_type(From#jid.lserver) of |
191 |
|
{error, not_found} -> |
192 |
:-( |
handle_presence(Acc, LServer, From, Type, Elements); |
193 |
|
{ok, _} -> |
194 |
129 |
Acc %% it was already handled in 'user_send_packet' |
195 |
|
end; |
196 |
|
user_receive_packet(Acc, _JID, _From, _To, _Pkt) -> |
197 |
178 |
Acc. |
198 |
|
|
199 |
|
-spec handle_presence(mongoose_acc:t(), jid:lserver(), jid:jid(), binary(), [exml:element()]) -> |
200 |
|
mongoose_acc:t(). |
201 |
|
handle_presence(Acc, LServer, From, Type, Elements) when Type =:= <<>>; |
202 |
|
Type =:= <<"available">> -> |
203 |
66 |
case read_caps(Elements) of |
204 |
|
nothing -> |
205 |
60 |
Acc; |
206 |
|
#caps{version = Version, exts = Exts} = Caps -> |
207 |
6 |
feature_request(Acc, LServer, From, Caps, [Version | Exts]) |
208 |
|
end; |
209 |
|
handle_presence(Acc, _LServer, _From, _Type, _Elements) -> |
210 |
:-( |
Acc. |
211 |
|
|
212 |
|
-spec caps_stream_features([exml:element()], mongooseim:host_type(), jid:lserver()) -> |
213 |
|
[exml:element()]. |
214 |
|
caps_stream_features(Acc, HostType, LServer) -> |
215 |
131 |
case make_my_disco_hash(HostType, LServer) of |
216 |
|
<<>> -> |
217 |
:-( |
Acc; |
218 |
|
Hash -> |
219 |
131 |
[#xmlel{name = <<"c">>, |
220 |
|
attrs = [{<<"xmlns">>, ?NS_CAPS}, {<<"hash">>, <<"sha-1">>}, |
221 |
|
{<<"node">>, ?MONGOOSE_URI}, {<<"ver">>, Hash}], |
222 |
|
children = []} |
223 |
|
| Acc] |
224 |
|
end. |
225 |
|
|
226 |
|
-spec disco_local_features(mongoose_disco:feature_acc()) -> mongoose_disco:feature_acc(). |
227 |
|
disco_local_features(Acc = #{node := Node}) -> |
228 |
133 |
case is_valid_node(Node) of |
229 |
1 |
true -> Acc#{node := <<>>}; |
230 |
132 |
false -> Acc |
231 |
|
end. |
232 |
|
|
233 |
|
-spec disco_local_identity(mongoose_disco:identity_acc()) -> mongoose_disco:identity_acc(). |
234 |
|
disco_local_identity(Acc = #{node := Node}) -> |
235 |
133 |
case is_valid_node(Node) of |
236 |
1 |
true -> Acc#{node := <<>>}; |
237 |
132 |
false -> Acc |
238 |
|
end. |
239 |
|
|
240 |
|
-spec disco_info(mongoose_disco:info_acc()) -> mongoose_disco:info_acc(). |
241 |
|
disco_info(Acc = #{node := Node}) -> |
242 |
134 |
case is_valid_node(Node) of |
243 |
1 |
true -> Acc#{node := <<>>}; |
244 |
133 |
false -> Acc |
245 |
|
end. |
246 |
|
|
247 |
|
-spec c2s_presence_in(ejabberd_c2s:state(), jid:jid(), jid:jid(), exml:element()) -> |
248 |
|
ejabberd_c2s:state(). |
249 |
|
c2s_presence_in(C2SState, From, To, Packet = #xmlel{attrs = Attrs, children = Els}) -> |
250 |
193 |
?LOG_DEBUG(#{what => caps_c2s_presence_in, |
251 |
|
to => jid:to_binary(To), from => jid:to_binary(From), |
252 |
193 |
exml_packet => Packet, c2s_state => C2SState}), |
253 |
193 |
Type = xml:get_attr_s(<<"type">>, Attrs), |
254 |
193 |
Subscription = ejabberd_c2s:get_subscription(From, C2SState), |
255 |
193 |
Insert = (Type == <<>> orelse Type == <<"available">>) |
256 |
193 |
and (Subscription == both orelse Subscription == to), |
257 |
193 |
Delete = Type == <<"unavailable">> orelse Type == <<"error">>, |
258 |
193 |
case Insert orelse Delete of |
259 |
|
true -> |
260 |
90 |
LFrom = jid:to_lower(From), |
261 |
90 |
Rs = case ejabberd_c2s:get_aux_field(caps_resources, |
262 |
|
C2SState) |
263 |
|
of |
264 |
26 |
{ok, Rs1} -> Rs1; |
265 |
64 |
error -> gb_trees:empty() |
266 |
|
end, |
267 |
90 |
Caps = read_caps(Els), |
268 |
90 |
NewRs = case Caps of |
269 |
74 |
nothing when Insert == true -> Rs; |
270 |
|
_ when Insert == true -> |
271 |
9 |
?LOG_DEBUG(#{what => caps_set_caps, caps => Caps, |
272 |
|
to => jid:to_binary(To), from => jid:to_binary(From), |
273 |
9 |
exml_packet => Packet, c2s_state => C2SState}), |
274 |
9 |
upsert_caps(LFrom, Caps, Rs); |
275 |
7 |
_ -> gb_trees:delete_any(LFrom, Rs) |
276 |
|
end, |
277 |
90 |
ejabberd_c2s:set_aux_field(caps_resources, NewRs, |
278 |
|
C2SState); |
279 |
103 |
false -> C2SState |
280 |
|
end. |
281 |
|
|
282 |
|
-spec upsert_caps(jid:simple_jid(), caps(), caps_resources()) -> caps_resources(). |
283 |
|
upsert_caps(LFrom, Caps, Rs) -> |
284 |
9 |
case gb_trees:lookup(LFrom, Rs) of |
285 |
:-( |
{value, Caps} -> Rs; |
286 |
|
none -> |
287 |
9 |
gb_trees:insert(LFrom, Caps, Rs); |
288 |
|
_ -> |
289 |
:-( |
gb_trees:update(LFrom, Caps, Rs) |
290 |
|
end. |
291 |
|
|
292 |
|
-spec c2s_filter_packet(Acc, ejabberd_c2s:state(), {atom(), binary()}, |
293 |
|
jid:jid(), exml:element()) -> Acc |
294 |
|
when Acc :: boolean(). |
295 |
|
c2s_filter_packet(InAcc, C2SState, {pep_message, Feature}, To, _Packet) -> |
296 |
4 |
case ejabberd_c2s:get_aux_field(caps_resources, C2SState) of |
297 |
|
{ok, Rs} -> |
298 |
4 |
?LOG_DEBUG(#{what => caps_lookup, text => <<"Look for CAPS for To jid">>, |
299 |
4 |
acc => InAcc, c2s_state => C2SState, caps_resources => Rs}), |
300 |
4 |
LTo = jid:to_lower(To), |
301 |
4 |
case gb_trees:lookup(LTo, Rs) of |
302 |
|
{value, Caps} -> |
303 |
3 |
HostType = ejabberd_c2s_state:host_type(C2SState), |
304 |
3 |
Drop = not lists:member(Feature, get_features_list(HostType, Caps)), |
305 |
3 |
{stop, Drop}; |
306 |
|
none -> |
307 |
1 |
{stop, true} |
308 |
|
end; |
309 |
:-( |
_ -> InAcc |
310 |
|
end; |
311 |
:-( |
c2s_filter_packet(Acc, _, _, _, _) -> Acc. |
312 |
|
|
313 |
|
-spec c2s_broadcast_recipients(Acc, ejabberd_c2s:state(), {atom(), binary()}, |
314 |
|
jid:jid(), exml:element()) -> Acc |
315 |
|
when Acc :: [jid:simple_jid()]. |
316 |
|
c2s_broadcast_recipients(InAcc, C2SState, {pep_message, Feature}, _From, _Packet) -> |
317 |
10 |
HostType = ejabberd_c2s_state:host_type(C2SState), |
318 |
10 |
case ejabberd_c2s:get_aux_field(caps_resources, C2SState) of |
319 |
|
{ok, Rs} -> |
320 |
10 |
filter_recipients_by_caps(HostType, InAcc, Feature, Rs); |
321 |
:-( |
_ -> InAcc |
322 |
|
end; |
323 |
:-( |
c2s_broadcast_recipients(Acc, _, _, _, _) -> Acc. |
324 |
|
|
325 |
|
-spec filter_recipients_by_caps(mongooseim:host_type(), Acc, |
326 |
|
{atom(), binary()}, caps_resources()) -> Acc |
327 |
|
when Acc :: [jid:simple_jid()]. |
328 |
|
filter_recipients_by_caps(HostType, InAcc, Feature, Rs) -> |
329 |
10 |
gb_trees_fold(fun(USR, Caps, Acc) -> |
330 |
2 |
case lists:member(Feature, get_features_list(HostType, Caps)) of |
331 |
2 |
true -> [USR | Acc]; |
332 |
:-( |
false -> Acc |
333 |
|
end |
334 |
|
end, |
335 |
|
InAcc, Rs). |
336 |
|
|
337 |
|
init_db(mnesia) -> |
338 |
3 |
case catch mnesia:table_info(caps_features, storage_type) of |
339 |
|
{'EXIT', _} -> |
340 |
1 |
ok; |
341 |
|
disc_only_copies -> |
342 |
2 |
ok; |
343 |
|
_ -> |
344 |
:-( |
mnesia:delete_table(caps_features) |
345 |
|
end, |
346 |
3 |
mnesia:create_table(caps_features, |
347 |
|
[{disc_only_copies, [node()]}, |
348 |
|
{local_content, true}, |
349 |
|
{attributes, |
350 |
|
record_info(fields, caps_features)}]), |
351 |
3 |
mnesia:add_table_copy(caps_features, node(), |
352 |
|
disc_only_copies). |
353 |
|
|
354 |
|
-spec init(list()) -> {ok, state()}. |
355 |
|
init([HostType, Opts]) -> |
356 |
3 |
init_db(db_type(HostType)), |
357 |
3 |
MaxSize = gen_mod:get_opt(cache_size, Opts, 1000), |
358 |
3 |
LifeTime = gen_mod:get_opt(cache_life_time, Opts, timer:hours(24) div 1000), |
359 |
3 |
cache_tab:new(caps_features, [{max_size, MaxSize}, {life_time, LifeTime}]), |
360 |
3 |
ejabberd_hooks:add(hooks(HostType)), |
361 |
3 |
{ok, #state{host_type = HostType}}. |
362 |
|
|
363 |
|
-spec handle_call(term(), any(), state()) -> |
364 |
|
{stop, normal, ok, state()} | {reply, {error, any()}, state()}. |
365 |
|
handle_call(stop, _From, State) -> |
366 |
3 |
{stop, normal, ok, State}; |
367 |
|
handle_call(_Req, _From, State) -> |
368 |
:-( |
{reply, {error, badarg}, State}. |
369 |
|
|
370 |
|
-spec handle_cast(any(), state()) -> {noreply, state()}. |
371 |
:-( |
handle_cast(_Msg, State) -> {noreply, State}. |
372 |
|
|
373 |
|
-spec handle_info(any(), state()) -> {noreply, state()}. |
374 |
:-( |
handle_info(_Info, State) -> {noreply, State}. |
375 |
|
|
376 |
|
-spec terminate(any(), state()) -> ok. |
377 |
|
terminate(_Reason, #state{host_type = HostType}) -> |
378 |
3 |
ejabberd_hooks:delete(hooks(HostType)). |
379 |
|
|
380 |
|
hooks(HostType) -> |
381 |
6 |
[{c2s_presence_in, HostType, ?MODULE, c2s_presence_in, 75}, |
382 |
|
{c2s_filter_packet, HostType, ?MODULE, c2s_filter_packet, 75}, |
383 |
|
{c2s_broadcast_recipients, HostType, ?MODULE, c2s_broadcast_recipients, 75}, |
384 |
|
{user_send_packet, HostType, ?MODULE, user_send_packet, 75}, |
385 |
|
{user_receive_packet, HostType, ?MODULE, user_receive_packet, 75}, |
386 |
|
{c2s_stream_features, HostType, ?MODULE, caps_stream_features, 75}, |
387 |
|
{s2s_stream_features, HostType, ?MODULE, caps_stream_features, 75}, |
388 |
|
{disco_local_features, HostType, ?MODULE, disco_local_features, 1}, |
389 |
|
{disco_local_identity, HostType, ?MODULE, disco_local_identity, 1}, |
390 |
|
{disco_info, HostType, ?MODULE, disco_info, 1}]. |
391 |
|
|
392 |
|
-spec code_change(any(), state(), any()) -> {ok, state()}. |
393 |
:-( |
code_change(_OldVsn, State, _Extra) -> {ok, State}. |
394 |
|
|
395 |
|
-spec feature_request(mongoose_acc:t(), jid:lserver(), jid:jid(), caps(), [binary()]) -> |
396 |
|
mongoose_acc:t(). |
397 |
|
feature_request(Acc, LServer, From, Caps, [SubNode | Tail] = SubNodes) -> |
398 |
6 |
Node = Caps#caps.node, |
399 |
6 |
NodePair = {Node, SubNode}, |
400 |
6 |
HostType = mongoose_acc:host_type(Acc), |
401 |
6 |
case cache_tab:lookup(caps_features, NodePair, caps_read_fun(HostType, NodePair)) of |
402 |
|
{ok, Fs} when is_list(Fs) -> |
403 |
:-( |
feature_request(Acc, LServer, From, Caps, Tail); |
404 |
|
Other -> |
405 |
6 |
NeedRequest = case Other of |
406 |
:-( |
{ok, TS} -> os:system_time(second) >= TS + (?BAD_HASH_LIFETIME); |
407 |
6 |
_ -> true |
408 |
|
end, |
409 |
6 |
F = fun (_From, _To, Acc1, IQReply) -> |
410 |
6 |
feature_response(Acc1, IQReply, LServer, From, Caps, SubNodes) |
411 |
|
end, |
412 |
6 |
case NeedRequest of |
413 |
|
true -> |
414 |
6 |
IQ = #iq{type = get, xmlns = ?NS_DISCO_INFO, |
415 |
|
sub_el = |
416 |
|
[#xmlel{name = <<"query">>, |
417 |
|
attrs = |
418 |
|
[{<<"xmlns">>, ?NS_DISCO_INFO}, |
419 |
|
{<<"node">>, |
420 |
|
<<Node/binary, "#", |
421 |
|
SubNode/binary>>}], |
422 |
|
children = []}]}, |
423 |
6 |
cache_tab:insert(caps_features, NodePair, os:system_time(second), |
424 |
|
caps_write_fun(HostType, NodePair, os:system_time(second))), |
425 |
6 |
ejabberd_local:route_iq(jid:make_noprep(<<>>, LServer, <<>>), From, Acc, IQ, F), |
426 |
6 |
Acc; |
427 |
:-( |
false -> feature_request(Acc, LServer, From, Caps, Tail) |
428 |
|
end |
429 |
|
end; |
430 |
|
feature_request(Acc, _LServer, From, Caps, []) -> |
431 |
|
%% feature_request is never executed with empty SubNodes list |
432 |
|
%% so if we end up here, it means the caps are known |
433 |
6 |
HostType = mongoose_acc:host_type(Acc), |
434 |
6 |
mongoose_hooks:caps_recognised(Acc, From, self(), get_features_list(HostType, Caps)). |
435 |
|
|
436 |
|
-spec feature_response(mongoose_acc:t(), jlib:iq(), jid:lserver(), jid:jid(), caps(), [binary()]) -> |
437 |
|
mongoose_acc:t(). |
438 |
|
feature_response(Acc, #iq{type = result, sub_el = [#xmlel{children = Els}]}, |
439 |
|
LServer, From, Caps, [SubNode | SubNodes]) -> |
440 |
6 |
HostType = mongoose_acc:host_type(Acc), |
441 |
6 |
NodePair = {Caps#caps.node, SubNode}, |
442 |
6 |
case check_hash(Caps, Els) of |
443 |
|
true -> |
444 |
6 |
Features = lists:flatmap(fun (#xmlel{name = <<"feature">>, |
445 |
|
attrs = FAttrs}) -> |
446 |
36 |
[xml:get_attr_s(<<"var">>, FAttrs)]; |
447 |
6 |
(_) -> [] |
448 |
|
end, |
449 |
|
Els), |
450 |
6 |
cache_tab:insert(caps_features, NodePair, |
451 |
|
Features, |
452 |
|
caps_write_fun(HostType, NodePair, Features)); |
453 |
:-( |
false -> ok |
454 |
|
end, |
455 |
6 |
feature_request(Acc, LServer, From, Caps, SubNodes); |
456 |
|
feature_response(Acc, _IQResult, LServer, From, Caps, [_SubNode | SubNodes]) -> |
457 |
:-( |
feature_request(Acc, LServer, From, Caps, SubNodes). |
458 |
|
|
459 |
|
-spec caps_read_fun(mongooseim:host_type(), node_pair()) -> |
460 |
|
fun(() -> {ok, maybe_pending_features()} | error). |
461 |
|
caps_read_fun(HostType, Node) -> |
462 |
17 |
DBType = db_type(HostType), |
463 |
17 |
caps_read_fun(HostType, Node, DBType). |
464 |
|
|
465 |
|
caps_read_fun(_HostType, Node, mnesia) -> |
466 |
17 |
fun () -> |
467 |
6 |
case mnesia:dirty_read({caps_features, Node}) of |
468 |
:-( |
[#caps_features{features = Features}] -> {ok, Features}; |
469 |
6 |
_ -> error |
470 |
|
end |
471 |
|
end. |
472 |
|
|
473 |
|
-spec caps_write_fun(mongooseim:host_type(), node_pair(), maybe_pending_features()) -> |
474 |
|
fun(() -> ok). |
475 |
|
caps_write_fun(HostType, Node, Features) -> |
476 |
12 |
DBType = db_type(HostType), |
477 |
12 |
caps_write_fun(HostType, Node, Features, DBType). |
478 |
|
|
479 |
|
caps_write_fun(_HostType, Node, Features, mnesia) -> |
480 |
12 |
fun () -> |
481 |
12 |
mnesia:dirty_write(#caps_features{node_pair = Node, |
482 |
|
features = Features}) |
483 |
|
end. |
484 |
|
|
485 |
|
-spec delete_caps(node_pair()) -> ok. |
486 |
|
delete_caps(Node) -> |
487 |
:-( |
cache_tab:delete(caps_features, Node, caps_delete_fun(Node)). |
488 |
|
|
489 |
|
-spec caps_delete_fun(node_pair()) -> fun(() -> ok). |
490 |
|
caps_delete_fun(Node) -> |
491 |
:-( |
fun () -> |
492 |
:-( |
mnesia:dirty_delete(caps_features, Node) |
493 |
|
end. |
494 |
|
|
495 |
|
-spec make_my_disco_hash(mongooseim:host_type(), jid:lserver()) -> binary(). |
496 |
|
make_my_disco_hash(HostType, LServer) -> |
497 |
131 |
JID = jid:make(<<>>, LServer, <<>>), |
498 |
131 |
case mongoose_disco:get_local_features(HostType, JID, JID, <<>>, <<>>) of |
499 |
|
empty -> |
500 |
:-( |
<<>>; |
501 |
|
{result, FeaturesXML} -> |
502 |
131 |
IdentityXML = mongoose_disco:get_local_identity(HostType, JID, JID, <<>>, <<>>), |
503 |
131 |
InfoXML = mongoose_disco:get_info(HostType, undefined, <<>>, <<>>), |
504 |
131 |
make_disco_hash(IdentityXML ++ InfoXML ++ FeaturesXML, sha1) |
505 |
|
end. |
506 |
|
|
507 |
|
-spec make_disco_hash([exml:element()], HashAlgorithm :: atom()) -> binary(). |
508 |
|
make_disco_hash(DiscoEls, Algo) -> |
509 |
143 |
Concat = list_to_binary([concat_identities(DiscoEls), |
510 |
|
concat_features(DiscoEls), concat_info(DiscoEls)]), |
511 |
143 |
jlib:encode_base64(case Algo of |
512 |
:-( |
md5 -> erlang:md5(Concat); |
513 |
143 |
sha1 -> crypto:hash(sha, Concat); |
514 |
:-( |
sha224 -> crypto:hash(sha224, Concat); |
515 |
:-( |
sha256 -> crypto:hash(sha256, Concat); |
516 |
:-( |
sha384 -> crypto:hash(sha384, Concat); |
517 |
:-( |
sha512 -> crypto:hash(sha512, Concat) |
518 |
|
end). |
519 |
|
|
520 |
|
check_hash(Caps, Els) -> |
521 |
6 |
case Caps#caps.hash of |
522 |
|
<<"md5">> -> |
523 |
:-( |
Caps#caps.version == make_disco_hash(Els, md5); |
524 |
|
<<"sha-1">> -> |
525 |
6 |
Caps#caps.version == make_disco_hash(Els, sha1); |
526 |
|
<<"sha-224">> -> |
527 |
:-( |
Caps#caps.version == make_disco_hash(Els, sha224); |
528 |
|
<<"sha-256">> -> |
529 |
:-( |
Caps#caps.version == make_disco_hash(Els, sha256); |
530 |
|
<<"sha-384">> -> |
531 |
:-( |
Caps#caps.version == make_disco_hash(Els, sha384); |
532 |
|
<<"sha-512">> -> |
533 |
:-( |
Caps#caps.version == make_disco_hash(Els, sha512); |
534 |
:-( |
_ -> true |
535 |
|
end. |
536 |
|
|
537 |
|
concat_features(Els) -> |
538 |
143 |
lists:usort(lists:flatmap(fun (#xmlel{name = |
539 |
|
<<"feature">>, |
540 |
|
attrs = Attrs}) -> |
541 |
5935 |
[[xml:get_attr_s(<<"var">>, Attrs), $<]]; |
542 |
274 |
(_) -> [] |
543 |
|
end, |
544 |
|
Els)). |
545 |
|
|
546 |
|
concat_identities(Els) -> |
547 |
143 |
lists:sort(lists:flatmap(fun (#xmlel{name = |
548 |
|
<<"identity">>, |
549 |
|
attrs = Attrs}) -> |
550 |
143 |
[[xml:get_attr_s(<<"category">>, Attrs), |
551 |
|
$/, xml:get_attr_s(<<"type">>, Attrs), |
552 |
|
$/, |
553 |
|
xml:get_attr_s(<<"xml:lang">>, Attrs), |
554 |
|
$/, xml:get_attr_s(<<"name">>, Attrs), |
555 |
|
$<]]; |
556 |
6066 |
(_) -> [] |
557 |
|
end, |
558 |
|
Els)). |
559 |
|
|
560 |
|
concat_info(Els) -> |
561 |
143 |
lists:sort(lists:flatmap(fun (#xmlel{name = <<"x">>, |
562 |
|
attrs = Attrs, children = Fields}) -> |
563 |
131 |
case {xml:get_attr_s(<<"xmlns">>, Attrs), |
564 |
|
xml:get_attr_s(<<"type">>, Attrs)} |
565 |
|
of |
566 |
|
{?NS_XDATA, <<"result">>} -> |
567 |
131 |
[concat_xdata_fields(Fields)]; |
568 |
:-( |
_ -> [] |
569 |
|
end; |
570 |
6078 |
(_) -> [] |
571 |
|
end, |
572 |
|
Els)). |
573 |
|
|
574 |
|
concat_xdata_fields(Fields) -> |
575 |
131 |
{FormType, Res} = |
576 |
|
lists:foldl(fun(#xmlel{name = <<"field">>, children = Els} = FieldEl, |
577 |
|
{FormType0, VarFields} = Acc) -> |
578 |
131 |
case exml_query:attr(FieldEl, <<"var">>, <<"">>) of |
579 |
:-( |
<<"">> -> Acc; |
580 |
|
<<"FORM_TYPE">> -> |
581 |
131 |
{exml_query:path(FieldEl, [{element, <<"value">>}, cdata]), |
582 |
|
VarFields}; |
583 |
|
Var -> |
584 |
:-( |
NewField = [[Var, $<], extract_values_sorted_cdatas(Els)], |
585 |
:-( |
{FormType0, [NewField | VarFields]} |
586 |
|
end; |
587 |
:-( |
(_, Acc) -> Acc |
588 |
|
end, |
589 |
|
{<<"">>, []}, Fields), |
590 |
131 |
[FormType, $<, lists:sort(Res)]. |
591 |
|
|
592 |
|
extract_values_sorted_cdatas(Els) -> |
593 |
:-( |
lists:sort(lists:flatmap(fun extract_value_cdata/1, Els)). |
594 |
|
|
595 |
|
extract_value_cdata(#xmlel{name = <<"value">>} = ValueEl) -> |
596 |
:-( |
[[exml_query:cdata(ValueEl), $<]]; |
597 |
|
extract_value_cdata(_) -> |
598 |
:-( |
[]. |
599 |
|
|
600 |
|
gb_trees_fold(F, Acc, Tree) -> |
601 |
10 |
Iter = gb_trees:iterator(Tree), |
602 |
10 |
gb_trees_fold_iter(F, Acc, Iter). |
603 |
|
|
604 |
|
gb_trees_fold_iter(F, Acc, Iter) -> |
605 |
12 |
case gb_trees:next(Iter) of |
606 |
|
{Key, Val, NewIter} -> |
607 |
2 |
NewAcc = F(Key, Val, Acc), |
608 |
2 |
gb_trees_fold_iter(F, NewAcc, NewIter); |
609 |
10 |
_ -> Acc |
610 |
|
end. |
611 |
|
|
612 |
|
is_valid_node(Node) -> |
613 |
400 |
case mongoose_bin:tokens(Node, <<"#">>) of |
614 |
|
[?MONGOOSE_URI|_] -> |
615 |
3 |
true; |
616 |
|
_ -> |
617 |
397 |
false |
618 |
|
end. |
619 |
|
|
620 |
|
db_type(_HostType) -> |
621 |
32 |
mnesia. |