aboutsummaryrefslogtreecommitdiffstats
path: root/core
diff options
context:
space:
mode:
Diffstat (limited to 'core')
-rw-r--r--core/certmanager.lua2
-rw-r--r--core/moduleapi.lua83
-rw-r--r--core/portmanager.lua55
-rw-r--r--core/rostermanager.lua28
-rw-r--r--core/s2smanager.lua3
-rw-r--r--core/statsmanager.lua1
6 files changed, 163 insertions, 9 deletions
diff --git a/core/certmanager.lua b/core/certmanager.lua
index 5282a6f5..63f314f8 100644
--- a/core/certmanager.lua
+++ b/core/certmanager.lua
@@ -106,7 +106,7 @@ local core_defaults = {
capath = "/etc/ssl/certs";
depth = 9;
protocol = "tlsv1+";
- verify = (ssl_x509 and { "peer", "client_once", }) or "none";
+ verify = "none";
options = {
cipher_server_preference = luasec_has.options.cipher_server_preference;
no_ticket = luasec_has.options.no_ticket;
diff --git a/core/moduleapi.lua b/core/moduleapi.lua
index 10f9f04d..c6193cfd 100644
--- a/core/moduleapi.lua
+++ b/core/moduleapi.lua
@@ -14,13 +14,16 @@ local pluginloader = require "util.pluginloader";
local timer = require "util.timer";
local resolve_relative_path = require"util.paths".resolve_relative_path;
local st = require "util.stanza";
+local cache = require "util.cache";
+local errutil = require "util.error";
+local promise = require "util.promise";
local t_insert, t_remove, t_concat = table.insert, table.remove, table.concat;
local error, setmetatable, type = error, setmetatable, type;
local ipairs, pairs, select = ipairs, pairs, select;
local tonumber, tostring = tonumber, tostring;
local require = require;
-local pack = table.pack or function(...) return {n=select("#",...), ...}; end -- table.pack is only in 5.2
+local pack = table.pack or require "util.table".pack; -- table.pack is only in 5.2
local unpack = table.unpack or unpack; --luacheck: ignore 113 -- renamed in 5.2
local prosody = prosody;
@@ -361,6 +364,84 @@ function api:send(stanza, origin)
return core_post_stanza(origin or hosts[self.host], stanza);
end
+function api:send_iq(stanza, origin, timeout)
+ local iq_cache = self._iq_cache;
+ if not iq_cache then
+ iq_cache = cache.new(256, function (_, iq)
+ iq.reject(errutil.new({
+ type = "wait", condition = "resource-constraint",
+ text = "evicted from iq tracking cache"
+ }));
+ self:unhook(iq.result_event, iq.result_handler);
+ self:unhook(iq.error_event, iq.error_handler);
+ end);
+ self._iq_cache = iq_cache;
+ end
+ return promise.new(function (resolve, reject)
+ local event_type;
+ if stanza.attr.from == self.host then
+ event_type = "host";
+ else -- assume bare since we can't hook full jids
+ event_type = "bare";
+ end
+ local result_event = "iq-result/"..event_type.."/"..stanza.attr.id;
+ local error_event = "iq-error/"..event_type.."/"..stanza.attr.id;
+ local cache_key = event_type.."/"..stanza.attr.id;
+
+ local function result_handler(event)
+ if event.stanza.attr.from == stanza.attr.to then
+ resolve(event);
+ return true;
+ end
+ end
+
+ local function error_handler(event)
+ if event.stanza.attr.from == stanza.attr.to then
+ reject(errutil.from_stanza(event.stanza), event);
+ return true;
+ end
+ end
+
+ if iq_cache:get(cache_key) then
+ reject(errutil.new({
+ type = "modify", condition = "conflict",
+ text = "iq stanza id attribute already used",
+ }));
+ return;
+ end
+
+ self:hook(result_event, result_handler);
+ self:hook(error_event, error_handler);
+
+ local timeout_handle = self:add_timer(timeout or 120, function ()
+ reject(errutil.new({
+ type = "wait", condition = "remote-server-timeout",
+ text = "IQ stanza timed out",
+ }));
+ self:unhook(result_event, result_handler);
+ self:unhook(error_event, error_handler);
+ iq_cache:set(cache_key, nil);
+ end);
+
+ local ok = iq_cache:set(cache_key, {
+ reject = reject, resolve = resolve,
+ timeout_handle = timeout_handle,
+ result_event = result_event, error_event = error_event,
+ result_handler = result_handler, error_handler = error_handler;
+ });
+
+ if not ok then
+ reject(errutil.new({
+ type = "wait", condition = "internal-server-error",
+ text = "Could not store IQ tracking data"
+ }));
+ return;
+ end
+
+ self:send(stanza, origin);
+ end);
+end
+
function api:broadcast(jids, stanza, iter)
for jid in (iter or it.values)(jids) do
local new_stanza = st.clone(stanza);
diff --git a/core/portmanager.lua b/core/portmanager.lua
index 1ed37da0..5aef07d7 100644
--- a/core/portmanager.lua
+++ b/core/portmanager.lua
@@ -10,6 +10,7 @@ local set = require "util.set";
local table = table;
local setmetatable, rawset, rawget = setmetatable, rawset, rawget;
local type, tonumber, tostring, ipairs = type, tonumber, tostring, ipairs;
+local pairs = pairs;
local prosody = prosody;
local fire_event = prosody.events.fire_event;
@@ -95,7 +96,7 @@ local function activate(service_name)
}
bind_ports = set.new(type(bind_ports) ~= "table" and { bind_ports } or bind_ports );
- local mode, ssl = listener.default_mode or default_mode;
+ local mode = listener.default_mode or default_mode;
local hooked_ports = {};
for interface in bind_interfaces do
@@ -107,12 +108,12 @@ local function activate(service_name)
log("error", "Multiple services configured to listen on the same port ([%s]:%d): %s, %s", interface, port,
active_services:search(nil, interface, port)[1][1].service.name or "<unnamed>", service_name or "<unnamed>");
else
- local err;
+ local ssl, cfg, err;
-- Create SSL context for this service/port
if service_info.encryption == "ssl" then
local global_ssl_config = config.get("*", "ssl") or {};
local prefix_ssl_config = config.get("*", config_prefix.."ssl") or global_ssl_config;
- ssl, err = certmanager.create_context(service_info.name.." port "..port, "server",
+ ssl, err, cfg = certmanager.create_context(service_info.name.." port "..port, "server",
prefix_ssl_config[interface],
prefix_ssl_config[port],
prefix_ssl_config,
@@ -126,7 +127,12 @@ local function activate(service_name)
end
if not err then
-- Start listening on interface+port
- local handler, err = server.addserver(interface, port_number, listener, mode, ssl);
+ local handler, err = server.listen(interface, port_number, listener, {
+ read_size = mode,
+ tls_ctx = ssl,
+ tls_direct = service_info.encryption == "ssl";
+ sni_hosts = {},
+ });
if not handler then
log("error", "Failed to open server port %d on %s, %s", port_number, interface,
error_to_friendly_message(service_name, port_number, err));
@@ -136,6 +142,7 @@ local function activate(service_name)
active_services:add(service_name, interface, port_number, {
server = handler;
service = service_info;
+ tls_cfg = cfg;
});
end
end
@@ -221,15 +228,55 @@ end
-- Event handlers
+local function add_sni_host(host, service)
+ -- local global_ssl_config = config.get(host, "ssl") or {};
+ for name, interface, port, n, active_service --luacheck: ignore 213
+ in active_services:iter(service, nil, nil, nil) do
+ if active_service.server.hosts and active_service.tls_cfg then
+ -- local config_prefix = (active_service.config_prefix or name).."_";
+ -- if config_prefix == "_" then
+ -- config_prefix = "";
+ -- end
+ -- local prefix_ssl_config = config.get(host, config_prefix.."ssl") or global_ssl_config;
+ -- FIXME only global 'ssl' settings are mixed in here
+ -- TODO per host and per service settings should be merged in,
+ -- without overriding the per-host certificate
+ local ssl, err, cfg = certmanager.create_context(host, "server");
+ if ssl then
+ active_service.server.hosts[host] = ssl;
+ if not active_service.tls_cfg.certificate then
+ active_service.server.tls_ctx = ssl;
+ active_service.tls_cfg = cfg;
+ end
+ else
+ log("error", "err = %q", err);
+ end
+ end
+ end
+end
+
prosody.events.add_handler("item-added/net-provider", function (event)
local item = event.item;
register_service(item.name, item);
+ for host in pairs(prosody.hosts) do
+ add_sni_host(host, item.name);
+ end
end);
prosody.events.add_handler("item-removed/net-provider", function (event)
local item = event.item;
unregister_service(item.name, item);
end);
+prosody.events.add_handler("host-activated", add_sni_host);
+prosody.events.add_handler("host-deactivated", function (host)
+ for name, interface, port, n, active_service --luacheck: ignore 213
+ in active_services:iter(nil, nil, nil, nil) do
+ if active_service.tls_cfg then
+ active_service.server.hosts[host] = nil;
+ end
+ end
+end);
+
return {
activate = activate;
deactivate = deactivate;
diff --git a/core/rostermanager.lua b/core/rostermanager.lua
index 61b08002..d551a1b1 100644
--- a/core/rostermanager.lua
+++ b/core/rostermanager.lua
@@ -12,6 +12,7 @@
local log = require "util.logger".init("rostermanager");
local new_id = require "util.id".short;
+local new_cache = require "util.cache".new;
local pairs = pairs;
local tostring = tostring;
@@ -111,6 +112,23 @@ local function load_roster(username, host)
else -- Attempt to load roster for non-loaded user
log("debug", "load_roster: loading for offline user: %s", jid);
end
+ local roster_cache = hosts[host] and hosts[host].roster_cache;
+ if not roster_cache then
+ if hosts[host] then
+ roster_cache = new_cache(1024);
+ hosts[host].roster_cache = roster_cache;
+ end
+ else
+ roster = roster_cache:get(jid);
+ if roster then
+ log("debug", "load_roster: cache hit");
+ roster_cache:set(jid, roster);
+ if user then user.roster = roster; end
+ return roster;
+ else
+ log("debug", "load_roster: cache miss, loading from storage");
+ end
+ end
local roster_store = storagemanager.open(host, "roster", "keyval");
local data, err = roster_store:get(username);
roster = data or {};
@@ -134,6 +152,10 @@ local function load_roster(username, host)
if not err then
hosts[host].events.fire_event("roster-load", { username = username, host = host, roster = roster });
end
+ if roster_cache and not user then
+ log("debug", "load_roster: caching loaded roster");
+ roster_cache:set(jid, roster);
+ end
return roster, err;
end
@@ -263,15 +285,15 @@ end
function is_contact_pending_in(username, host, jid)
local roster = load_roster(username, host);
- return roster[false].pending[jid];
+ return roster[false].pending[jid] ~= nil;
end
-local function set_contact_pending_in(username, host, jid)
+local function set_contact_pending_in(username, host, jid, stanza)
local roster = load_roster(username, host);
local item = roster[jid];
if item and (item.subscription == "from" or item.subscription == "both") then
return; -- false
end
- roster[false].pending[jid] = true;
+ roster[false].pending[jid] = st.is_stanza(stanza) and st.preserialize(stanza) or true;
return save_roster(username, host, roster, jid);
end
function is_contact_pending_out(username, host, jid)
diff --git a/core/s2smanager.lua b/core/s2smanager.lua
index 58269c49..0ba5e7c6 100644
--- a/core/s2smanager.lua
+++ b/core/s2smanager.lua
@@ -50,6 +50,9 @@ local resting_session = { -- Resting, not dead
close = function (session)
session.log("debug", "Attempt to close already-closed session");
end;
+ reset_stream = function (session)
+ session.log("debug", "Attempt to reset stream of already-closed session");
+ end;
filter = function (type, data) return data; end; --luacheck: ignore 212/type
}; resting_session.__index = resting_session;
diff --git a/core/statsmanager.lua b/core/statsmanager.lua
index 237b1dd5..50798ad0 100644
--- a/core/statsmanager.lua
+++ b/core/statsmanager.lua
@@ -97,6 +97,7 @@ if stats then
end
timer.add_task(stats_interval, collect);
prosody.events.add_handler("server-started", function () collect() end, -1);
+ prosody.events.add_handler("server-stopped", function () collect() end, -1);
else
log("debug", "Statistics enabled using %s provider, collection is disabled", stats_provider_name);
end