aboutsummaryrefslogtreecommitdiffstats
path: root/plugins
diff options
context:
space:
mode:
Diffstat (limited to 'plugins')
-rw-r--r--plugins/mod_admin_shell.lua119
-rw-r--r--plugins/mod_admin_socket.lua7
-rw-r--r--plugins/mod_authz_internal.lua6
-rw-r--r--plugins/mod_cloud_notify.lua653
-rw-r--r--plugins/mod_cron.lua4
-rw-r--r--plugins/mod_flags.lua157
-rw-r--r--plugins/mod_invites.lua36
-rw-r--r--plugins/mod_pep.lua3
-rw-r--r--plugins/mod_pubsub/commands.lib.lua239
-rw-r--r--plugins/mod_pubsub/mod_pubsub.lua48
-rw-r--r--plugins/mod_roster.lua174
-rw-r--r--plugins/mod_storage_sql.lua4
12 files changed, 1370 insertions, 80 deletions
diff --git a/plugins/mod_admin_shell.lua b/plugins/mod_admin_shell.lua
index 0b8d3c43..974ed8d9 100644
--- a/plugins/mod_admin_shell.lua
+++ b/plugins/mod_admin_shell.lua
@@ -25,6 +25,8 @@ local _G = _G;
local prosody = _G.prosody;
local unpack = table.unpack;
+local cache = require "prosody.util.cache";
+local new_short_id = require "prosody.util.id".short;
local iterators = require "prosody.util.iterators";
local keys, values = iterators.keys, iterators.values;
local jid_bare, jid_split, jid_join, jid_resource, jid_compare = import("prosody.util.jid", "bare", "prepped_split", "join", "resource", "compare");
@@ -170,6 +172,47 @@ local function send_repl_output(session, line, attr)
return session.send(st.stanza("repl-output", attr):text(tostring(line)));
end
+local function request_repl_input(session, input_type)
+ if input_type ~= "password" then
+ return promise.reject("internal error - unsupported input type "..tostring(input_type));
+ end
+ local pending_inputs = session.pending_inputs;
+ if not pending_inputs then
+ pending_inputs = cache.new(5, function (input_id, input_promise) --luacheck: ignore 212/input_id
+ input_promise.reject();
+ end);
+ session.pending_inputs = pending_inputs;
+ end
+
+ local input_id = new_short_id();
+ local p = promise.new(function (resolve, reject)
+ pending_inputs:set(input_id, { resolve = resolve, reject = reject });
+ end):finally(function ()
+ pending_inputs:set(input_id, nil);
+ end);
+ session.send(st.stanza("repl-request-input", { type = input_type, id = input_id }));
+ return p;
+end
+
+module:hook("admin-disconnected", function (event)
+ local pending_inputs = event.session.pending_inputs;
+ if not pending_inputs then return; end
+ for input_promise in pending_inputs:values() do
+ input_promise.reject();
+ end
+end);
+
+module:hook("admin/repl-requested-input", function (event)
+ local input_id = event.stanza.attr.id;
+ local input_promise = event.origin.pending_inputs:get(input_id);
+ if not input_promise then
+ event.origin.send(st.stanza("repl-result", { type = "error" }):text("Internal error - unexpected input"));
+ return true;
+ end
+ input_promise.resolve(event.stanza:get_text());
+ return true;
+end);
+
function console:new_session(admin_session)
local session = {
send = function (t)
@@ -185,6 +228,9 @@ function console:new_session(admin_session)
write = function (t)
return send_repl_output(admin_session, t, { eol = "0" });
end;
+ request_input = function (input_type)
+ return request_repl_input(admin_session, input_type);
+ end;
serialize = tostring;
disconnect = function () admin_session:close(); end;
is_connected = function ()
@@ -266,25 +312,33 @@ local function handle_line(event)
end
end
- local taskok, message = chunk();
+ local function send_result(taskok, message)
+ if not message then
+ if type(taskok) ~= "string" and useglobalenv then
+ taskok = session.serialize(taskok);
+ end
+ result:text("Result: "..tostring(taskok));
+ elseif (not taskok) and message then
+ result.attr.type = "error";
+ result:text("Error: "..tostring(message));
+ else
+ result:text("OK: "..tostring(message));
+ end
- if promise.is_promise(taskok) then
- taskok, message = async.wait_for(taskok);
+ event.origin.send(result);
end
- if not message then
- if type(taskok) ~= "string" and useglobalenv then
- taskok = session.serialize(taskok);
- end
- result:text("Result: "..tostring(taskok));
- elseif (not taskok) and message then
- result.attr.type = "error";
- result:text("Error: "..tostring(message));
+ local taskok, message = chunk();
+
+ if promise.is_promise(taskok) then
+ taskok:next(function (resolved_message)
+ send_result(true, resolved_message);
+ end, function (rejected_message)
+ send_result(nil, rejected_message);
+ end);
else
- result:text("OK: "..tostring(message));
+ send_result(taskok, message);
end
-
- event.origin.send(result);
end
module:hook("admin/repl-input", function (event)
@@ -1670,12 +1724,13 @@ function def_env.user:create(jid, password, role)
role = module:get_option_string("default_provisioned_role", "prosody:member");
end
- local ok, err = um.create_user_with_role(username, password, host, role);
- if not ok then
- return nil, "Could not create user: "..err;
- end
-
- return true, ("Created %s with role '%s'"):format(jid, role);
+ return promise.resolve(password or self.session.request_input("password")):next(function (password_)
+ local ok, err = um.create_user_with_role(username, password_, host, role);
+ if not ok then
+ return promise.reject("Could not create user: "..err);
+ end
+ return ("Created %s with role '%s'"):format(jid, role);
+ end);
end
describe_command [[user:disable(jid) - Disable the specified user account, preventing login]]
@@ -1734,12 +1789,15 @@ function def_env.user:password(jid, password)
elseif not um.user_exists(username, host) then
return nil, "No such user";
end
- local ok, err = um.set_password(username, password, host, nil);
- if ok then
- return true, "User password changed";
- else
- return nil, "Could not change password for user: "..err;
- end
+
+ return promise.resolve(password or self.session.request_input("password")):next(function (password_)
+ local ok, err = um.set_password(username, password_, host, nil);
+ if ok then
+ return "User password changed";
+ else
+ return promise.reject("Could not change password for user: "..err);
+ end
+ end);
end
describe_command [[user:roles(jid, host) - Show current roles for an user]]
@@ -2431,12 +2489,15 @@ describe_command [[stats:show(pattern) - Show internal statistics, optionally fi
-- Undocumented currently, you can append :histogram() or :cfgraph() to stats:show() for rendered graphs.
function def_env.stats:show(name_filter)
local statsman = require "prosody.core.statsmanager"
+ local metric_registry = statsman.get_metric_registry();
+ if not metric_registry then
+ return nil, [[Statistics disabled. Try `statistics = "internal"` in the global section of the config file and restart.]];
+ end
local collect = statsman.collect
if collect then
-- force collection if in manual mode
collect()
end
- local metric_registry = statsman.get_metric_registry();
local displayed_stats = new_stats_context(self);
for family_name, metric_family in iterators.sorted_pairs(metric_registry:get_metric_families()) do
if not name_filter or family_name:match(name_filter) then
@@ -2481,7 +2542,7 @@ local host_commands = {};
local function new_item_handlers(command_host)
local function on_command_added(event)
local command = event.item;
- local mod_name = command._provided_by and ("mod_"..command._provided_by) or "<unknown module>";
+ local mod_name = event.source and ("mod_"..event.source.name) or "<unknown module>";
if not schema.validate(command_metadata_schema, command) or type(command.handler) ~= "function" then
module:log("warn", "Ignoring command added by %s: missing or invalid data", mod_name);
return;
@@ -2568,7 +2629,7 @@ local function new_item_handlers(command_host)
module = command._provided_by;
};
- module:log("debug", "Shell command added by mod_%s: %s:%s()", mod_name, command.section, command.name);
+ module:log("debug", "Shell command added by %s: %s:%s()", mod_name, command.section, command.name);
end
local function on_command_removed(event)
diff --git a/plugins/mod_admin_socket.lua b/plugins/mod_admin_socket.lua
index ad6aa5d7..b7b6d5f5 100644
--- a/plugins/mod_admin_socket.lua
+++ b/plugins/mod_admin_socket.lua
@@ -54,7 +54,12 @@ end);
local conn, sock;
-local listeners = adminstream.server(sessions, fire_admin_event).listeners;
+local admin_server = adminstream.server(sessions, fire_admin_event);
+local listeners = admin_server.listeners;
+
+module:hook_object_event(admin_server.events, "disconnected", function (event)
+ return module:fire_event("admin-disconnected", event);
+end);
local function accept_connection()
module:log("debug", "accepting...");
diff --git a/plugins/mod_authz_internal.lua b/plugins/mod_authz_internal.lua
index 07091a04..7a06c904 100644
--- a/plugins/mod_authz_internal.lua
+++ b/plugins/mod_authz_internal.lua
@@ -8,7 +8,7 @@ local roles = require "prosody.util.roles";
local config_global_admin_jids = module:context("*"):get_option_set("admins", {}) / normalize;
local config_admin_jids = module:get_option_inherited_set("admins", {}) / normalize;
local host = module.host;
-local host_suffix = host:gsub("^[^%.]+%.", "");
+local host_suffix = module:get_option_string("parent_host", (host:gsub("^[^%.]+%.", "")));
local hosts = prosody.hosts;
local is_anon_host = module:get_option_string("authentication") == "anonymous";
@@ -18,8 +18,8 @@ local is_component = hosts[host].type == "component";
local host_user_role, server_user_role, public_user_role;
if is_component then
host_user_role = module:get_option_string("host_user_role", "prosody:registered");
- server_user_role = module:get_option_string("server_user_role");
- public_user_role = module:get_option_string("public_user_role");
+ server_user_role = module:get_option_string("server_user_role", "prosody:guest");
+ public_user_role = module:get_option_string("public_user_role", "prosody:guest");
end
local role_store = module:open_store("account_roles");
diff --git a/plugins/mod_cloud_notify.lua b/plugins/mod_cloud_notify.lua
new file mode 100644
index 00000000..987be84f
--- /dev/null
+++ b/plugins/mod_cloud_notify.lua
@@ -0,0 +1,653 @@
+-- XEP-0357: Push (aka: My mobile OS vendor won't let me have persistent TCP connections)
+-- Copyright (C) 2015-2016 Kim Alvefur
+-- Copyright (C) 2017-2019 Thilo Molitor
+--
+-- This file is MIT/X11 licensed.
+
+local os_time = os.time;
+local st = require"util.stanza";
+local jid = require"util.jid";
+local dataform = require"util.dataforms".new;
+local hashes = require"util.hashes";
+local random = require"util.random";
+local cache = require"util.cache";
+local watchdog = require "util.watchdog";
+
+local xmlns_push = "urn:xmpp:push:0";
+
+-- configuration
+local include_body = module:get_option_boolean("push_notification_with_body", false);
+local include_sender = module:get_option_boolean("push_notification_with_sender", false);
+local max_push_errors = module:get_option_number("push_max_errors", 16);
+local max_push_devices = module:get_option_number("push_max_devices", 5);
+local dummy_body = module:get_option_string("push_notification_important_body", "New Message!");
+local extended_hibernation_timeout = module:get_option_number("push_max_hibernation_timeout", 72*3600); -- use same timeout like ejabberd
+
+local host_sessions = prosody.hosts[module.host].sessions;
+local push_errors = module:shared("push_errors");
+local id2node = {};
+local id2identifier = {};
+
+-- For keeping state across reloads while caching reads
+-- This uses util.cache for caching the most recent devices and removing all old devices when max_push_devices is reached
+local push_store = (function()
+ local store = module:open_store();
+ local push_services = {};
+ local api = {};
+ --luacheck: ignore 212/self
+ function api:get(user)
+ if not push_services[user] then
+ local loaded, err = store:get(user);
+ if not loaded and err then
+ module:log("warn", "Error reading push notification storage for user '%s': %s", user, tostring(err));
+ push_services[user] = cache.new(max_push_devices):table();
+ return push_services[user], false;
+ end
+ if loaded then
+ push_services[user] = cache.new(max_push_devices):table();
+ -- copy over plain table loaded from disk into our cache
+ for k, v in pairs(loaded) do push_services[user][k] = v; end
+ else
+ push_services[user] = cache.new(max_push_devices):table();
+ end
+ end
+ return push_services[user], true;
+ end
+ function api:flush_to_disk(user)
+ local plain_table = {};
+ for k, v in pairs(push_services[user]) do plain_table[k] = v; end
+ local ok, err = store:set(user, plain_table);
+ if not ok then
+ module:log("error", "Error writing push notification storage for user '%s': %s", user, tostring(err));
+ return false;
+ end
+ return true;
+ end
+ function api:set_identifier(user, push_identifier, data)
+ local services = self:get(user);
+ services[push_identifier] = data;
+ end
+ return api;
+end)();
+
+
+-- Forward declarations, as both functions need to reference each other
+local handle_push_success, handle_push_error;
+
+function handle_push_error(event)
+ local stanza = event.stanza;
+ local error_type, condition, error_text = stanza:get_error();
+ local node = id2node[stanza.attr.id];
+ local identifier = id2identifier[stanza.attr.id];
+ if node == nil then
+ module:log("warn", "Received push error with unrecognised id: %s", stanza.attr.id);
+ return false; -- unknown stanza? Ignore for now!
+ end
+ local from = stanza.attr.from;
+ local user_push_services = push_store:get(node);
+ local found, changed = false, false;
+
+ for push_identifier, _ in pairs(user_push_services) do
+ if push_identifier == identifier then
+ found = true;
+ if user_push_services[push_identifier] and user_push_services[push_identifier].jid == from and error_type ~= "wait" then
+ push_errors[push_identifier] = push_errors[push_identifier] + 1;
+ module:log("info", "Got error <%s:%s:%s> for identifier '%s': "
+ .."error count for this identifier is now at %s", error_type, condition, error_text or "", push_identifier,
+ tostring(push_errors[push_identifier]));
+ if push_errors[push_identifier] >= max_push_errors then
+ module:log("warn", "Disabling push notifications for identifier '%s'", push_identifier);
+ -- remove push settings from sessions
+ if host_sessions[node] then
+ for _, session in pairs(host_sessions[node].sessions) do
+ if session.push_identifier == push_identifier then
+ session.push_identifier = nil;
+ session.push_settings = nil;
+ session.first_hibernated_push = nil;
+ -- check for prosody 0.12 mod_smacks
+ if session.hibernating_watchdog and session.original_smacks_callback and session.original_smacks_timeout then
+ -- restore old smacks watchdog
+ session.hibernating_watchdog:cancel();
+ session.hibernating_watchdog = watchdog.new(session.original_smacks_timeout, session.original_smacks_callback);
+ end
+ end
+ end
+ end
+ -- save changed global config
+ changed = true;
+ user_push_services[push_identifier] = nil
+ push_errors[push_identifier] = nil;
+ -- unhook iq handlers for this identifier (if possible)
+ module:unhook("iq-error/host/"..stanza.attr.id, handle_push_error);
+ module:unhook("iq-result/host/"..stanza.attr.id, handle_push_success);
+ id2node[stanza.attr.id] = nil;
+ id2identifier[stanza.attr.id] = nil;
+ end
+ elseif user_push_services[push_identifier] and user_push_services[push_identifier].jid == from and error_type == "wait" then
+ module:log("debug", "Got error <%s:%s:%s> for identifier '%s': "
+ .."NOT increasing error count for this identifier", error_type, condition, error_text or "", push_identifier);
+ else
+ module:log("debug", "Unhandled push error <%s:%s:%s> from %s for identifier '%s'",
+ error_type, condition, error_text or "", from, push_identifier
+ );
+ end
+ end
+ end
+ if changed then
+ push_store:flush_to_disk(node);
+ elseif not found then
+ module:log("warn", "Unable to find matching registration for push error <%s:%s:%s> from %s", error_type, condition, error_text or "", from);
+ end
+ return true;
+end
+
+function handle_push_success(event)
+ local stanza = event.stanza;
+ local node = id2node[stanza.attr.id];
+ local identifier = id2identifier[stanza.attr.id];
+ if node == nil then return false; end -- unknown stanza? Ignore for now!
+ local from = stanza.attr.from;
+ local user_push_services = push_store:get(node);
+
+ for push_identifier, _ in pairs(user_push_services) do
+ if push_identifier == identifier then
+ if user_push_services[push_identifier] and user_push_services[push_identifier].jid == from and push_errors[push_identifier] > 0 then
+ push_errors[push_identifier] = 0;
+ -- unhook iq handlers for this identifier (if possible)
+ module:unhook("iq-error/host/"..stanza.attr.id, handle_push_error);
+ module:unhook("iq-result/host/"..stanza.attr.id, handle_push_success);
+ id2node[stanza.attr.id] = nil;
+ id2identifier[stanza.attr.id] = nil;
+ module:log("debug", "Push succeeded, error count for identifier '%s' is now at %s again",
+ push_identifier, tostring(push_errors[push_identifier])
+ );
+ end
+ end
+ end
+ return true;
+end
+
+-- http://xmpp.org/extensions/xep-0357.html#disco
+local function account_dico_info(event)
+ (event.reply or event.stanza):tag("feature", {var=xmlns_push}):up();
+end
+module:hook("account-disco-info", account_dico_info);
+
+-- http://xmpp.org/extensions/xep-0357.html#enabling
+local function push_enable(event)
+ local origin, stanza = event.origin, event.stanza;
+ local enable = stanza.tags[1];
+ origin.log("debug", "Attempting to enable push notifications");
+ -- MUST contain a 'jid' attribute of the XMPP Push Service being enabled
+ local push_jid = enable.attr.jid;
+ -- SHOULD contain a 'node' attribute
+ local push_node = enable.attr.node;
+ -- CAN contain a 'include_payload' attribute
+ local include_payload = enable.attr.include_payload;
+ if not push_jid then
+ origin.log("debug", "Push notification enable request missing the 'jid' field");
+ origin.send(st.error_reply(stanza, "modify", "bad-request", "Missing jid"));
+ return true;
+ end
+ if push_jid == stanza.attr.from then
+ origin.log("debug", "Push notification enable request 'jid' field identical to our own");
+ origin.send(st.error_reply(stanza, "modify", "bad-request", "JID must be different from ours"));
+ return true;
+ end
+ local publish_options = enable:get_child("x", "jabber:x:data");
+ if not publish_options then
+ -- Could be intentional
+ origin.log("debug", "No publish options in request");
+ end
+ local push_identifier = push_jid .. "<" .. (push_node or "");
+ local push_service = {
+ jid = push_jid;
+ node = push_node;
+ include_payload = include_payload;
+ options = publish_options and st.preserialize(publish_options);
+ timestamp = os_time();
+ client_id = origin.client_id;
+ resource = not origin.client_id and origin.resource or nil;
+ language = stanza.attr["xml:lang"];
+ };
+ local allow_registration = module:fire_event("cloud_notify/registration", {
+ origin = origin, stanza = stanza, push_info = push_service;
+ });
+ if allow_registration == false then
+ return true; -- Assume error reply already sent
+ end
+ push_store:set_identifier(origin.username, push_identifier, push_service);
+ local ok = push_store:flush_to_disk(origin.username);
+ if not ok then
+ origin.send(st.error_reply(stanza, "wait", "internal-server-error"));
+ else
+ origin.push_identifier = push_identifier;
+ origin.push_settings = push_service;
+ origin.first_hibernated_push = nil;
+ origin.log("info", "Push notifications enabled for %s (%s)", tostring(stanza.attr.from), tostring(origin.push_identifier));
+ origin.send(st.reply(stanza));
+ end
+ return true;
+end
+module:hook("iq-set/self/"..xmlns_push..":enable", push_enable);
+
+-- http://xmpp.org/extensions/xep-0357.html#disabling
+local function push_disable(event)
+ local origin, stanza = event.origin, event.stanza;
+ local push_jid = stanza.tags[1].attr.jid; -- MUST include a 'jid' attribute
+ local push_node = stanza.tags[1].attr.node; -- A 'node' attribute MAY be included
+ if not push_jid then
+ origin.send(st.error_reply(stanza, "modify", "bad-request", "Missing jid"));
+ return true;
+ end
+ local user_push_services = push_store:get(origin.username);
+ for key, push_info in pairs(user_push_services) do
+ if push_info.jid == push_jid and (not push_node or push_info.node == push_node) then
+ origin.log("info", "Push notifications disabled (%s)", tostring(key));
+ if origin.push_identifier == key then
+ origin.push_identifier = nil;
+ origin.push_settings = nil;
+ origin.first_hibernated_push = nil;
+ -- check for prosody 0.12 mod_smacks
+ if origin.hibernating_watchdog and origin.original_smacks_callback and origin.original_smacks_timeout then
+ -- restore old smacks watchdog
+ origin.hibernating_watchdog:cancel();
+ origin.hibernating_watchdog = watchdog.new(origin.original_smacks_timeout, origin.original_smacks_callback);
+ end
+ end
+ user_push_services[key] = nil;
+ push_errors[key] = nil;
+ for stanza_id, identifier in pairs(id2identifier) do
+ if identifier == key then
+ module:unhook("iq-error/host/"..stanza_id, handle_push_error);
+ module:unhook("iq-result/host/"..stanza_id, handle_push_success);
+ id2node[stanza_id] = nil;
+ id2identifier[stanza_id] = nil;
+ end
+ end
+ end
+ end
+ local ok = push_store:flush_to_disk(origin.username);
+ if not ok then
+ origin.send(st.error_reply(stanza, "wait", "internal-server-error"));
+ else
+ origin.send(st.reply(stanza));
+ end
+ return true;
+end
+module:hook("iq-set/self/"..xmlns_push..":disable", push_disable);
+
+-- urgent stanzas should be delivered without delay
+local function is_urgent(stanza)
+ -- TODO
+ if stanza.name == "message" then
+ if stanza:get_child("propose", "urn:xmpp:jingle-message:0") then
+ return true, "jingle call";
+ end
+ end
+end
+
+-- is this push a high priority one (this is needed for ios apps not using voip pushes)
+local function is_important(stanza)
+ local st_name = stanza and stanza.name or nil;
+ if not st_name then return false; end -- nonzas are never important here
+ if st_name == "presence" then
+ return false; -- same for presences
+ elseif st_name == "message" then
+ -- unpack carbon copied message stanzas
+ local carbon = stanza:find("{urn:xmpp:carbons:2}/{urn:xmpp:forward:0}/{jabber:client}message");
+ local stanza_direction = carbon and stanza:child_with_name("sent") and "out" or "in";
+ if carbon then stanza = carbon; end
+ local st_type = stanza.attr.type;
+
+ -- headline message are always not important
+ if st_type == "headline" then return false; end
+
+ -- carbon copied outgoing messages are not important
+ if carbon and stanza_direction == "out" then return false; end
+
+ -- We can't check for body contents in encrypted messages, so let's treat them as important
+ -- Some clients don't even set a body or an empty body for encrypted messages
+
+ -- check omemo https://xmpp.org/extensions/inbox/omemo.html
+ if stanza:get_child("encrypted", "eu.siacs.conversations.axolotl") or stanza:get_child("encrypted", "urn:xmpp:omemo:0") then return true; end
+
+ -- check xep27 pgp https://xmpp.org/extensions/xep-0027.html
+ if stanza:get_child("x", "jabber:x:encrypted") then return true; end
+
+ -- check xep373 pgp (OX) https://xmpp.org/extensions/xep-0373.html
+ if stanza:get_child("openpgp", "urn:xmpp:openpgp:0") then return true; end
+
+ -- XEP-0353: Jingle Message Initiation (incoming call request)
+ if stanza:get_child("propose", "urn:xmpp:jingle-message:0") then return true; end
+
+ local body = stanza:get_child_text("body");
+
+ -- groupchat subjects are not important here
+ if st_type == "groupchat" and stanza:get_child_text("subject") then
+ return false;
+ end
+
+ -- empty bodies are not important
+ return body ~= nil and body ~= "";
+ end
+ return false; -- this stanza wasn't one of the above cases --> it is not important, too
+end
+
+local push_form = dataform {
+ { name = "FORM_TYPE"; type = "hidden"; value = "urn:xmpp:push:summary"; };
+ { name = "message-count"; type = "text-single"; };
+ { name = "pending-subscription-count"; type = "text-single"; };
+ { name = "last-message-sender"; type = "jid-single"; };
+ { name = "last-message-body"; type = "text-single"; };
+};
+
+-- http://xmpp.org/extensions/xep-0357.html#publishing
+local function handle_notify_request(stanza, node, user_push_services, log_push_decline)
+ local pushes = 0;
+ if not #user_push_services then return pushes end
+
+ for push_identifier, push_info in pairs(user_push_services) do
+ local send_push = true; -- only send push to this node when not already done for this stanza or if no stanza is given at all
+ if stanza then
+ if not stanza._push_notify then stanza._push_notify = {}; end
+ if stanza._push_notify[push_identifier] then
+ if log_push_decline then
+ module:log("debug", "Already sent push notification for %s@%s to %s (%s)", node, module.host, push_info.jid, tostring(push_info.node));
+ end
+ send_push = false;
+ end
+ stanza._push_notify[push_identifier] = true;
+ end
+
+ if send_push then
+ -- construct push stanza
+ local stanza_id = hashes.sha256(random.bytes(8), true);
+ local push_notification_payload = st.stanza("notification", { xmlns = xmlns_push });
+ local form_data = {
+ -- hardcode to 1 because other numbers are just meaningless (the XEP does not specify *what exactly* to count)
+ ["message-count"] = "1";
+ };
+ if stanza and include_sender then
+ form_data["last-message-sender"] = stanza.attr.from;
+ end
+ if stanza and include_body then
+ form_data["last-message-body"] = stanza:get_child_text("body");
+ elseif stanza and dummy_body and is_important(stanza) then
+ form_data["last-message-body"] = tostring(dummy_body);
+ end
+
+ push_notification_payload:add_child(push_form:form(form_data));
+
+ local push_publish = st.iq({ to = push_info.jid, from = module.host, type = "set", id = stanza_id })
+ :tag("pubsub", { xmlns = "http://jabber.org/protocol/pubsub" })
+ :tag("publish", { node = push_info.node })
+ :tag("item")
+ :add_child(push_notification_payload)
+ :up()
+ :up();
+
+ if push_info.options then
+ push_publish:tag("publish-options"):add_child(st.deserialize(push_info.options));
+ end
+ -- send out push
+ module:log("debug", "Sending %s push notification for %s@%s to %s (%s)",
+ form_data["last-message-body"] and "important" or "unimportant",
+ node, module.host, push_info.jid, tostring(push_info.node)
+ );
+ -- module:log("debug", "PUSH STANZA: %s", tostring(push_publish));
+ local push_event = {
+ notification_stanza = push_publish;
+ notification_payload = push_notification_payload;
+ original_stanza = stanza;
+ username = node;
+ push_info = push_info;
+ push_summary = form_data;
+ important = not not form_data["last-message-body"];
+ };
+
+ if module:fire_event("cloud_notify/push", push_event) then
+ module:log("debug", "Push was blocked by event handler: %s", push_event.reason or "Unknown reason");
+ else
+ -- handle push errors for this node
+ if push_errors[push_identifier] == nil then
+ push_errors[push_identifier] = 0;
+ end
+ module:hook("iq-error/host/"..stanza_id, handle_push_error);
+ module:hook("iq-result/host/"..stanza_id, handle_push_success);
+ id2node[stanza_id] = node;
+ id2identifier[stanza_id] = push_identifier;
+ module:send(push_publish);
+ pushes = pushes + 1;
+ end
+ end
+ end
+ return pushes;
+end
+
+-- small helper function to extract relevant push settings
+local function get_push_settings(stanza, session)
+ local to = stanza.attr.to;
+ local node = to and jid.split(to) or session.username;
+ local user_push_services = push_store:get(node);
+ return node, user_push_services;
+end
+
+-- publish on offline message
+module:hook("message/offline/handle", function(event)
+ local node, user_push_services = get_push_settings(event.stanza, event.origin);
+ module:log("debug", "Invoking cloud handle_notify_request() for offline stanza");
+ handle_notify_request(event.stanza, node, user_push_services, true);
+end, 1);
+
+-- publish on bare groupchat
+-- this picks up MUC messages when there are no devices connected
+module:hook("message/bare/groupchat", function(event)
+ module:log("debug", "Invoking cloud handle_notify_request() for bare groupchat stanza");
+ local node, user_push_services = get_push_settings(event.stanza, event.origin);
+ handle_notify_request(event.stanza, node, user_push_services, true);
+end, 1);
+
+
+local function process_stanza_queue(queue, session, queue_type)
+ if not session.push_identifier then return; end
+ local user_push_services = {[session.push_identifier] = session.push_settings};
+ local notified = { unimportant = false; important = false }
+ for i=1, #queue do
+ local stanza = queue[i];
+ -- fast ignore of already pushed stanzas
+ if stanza and not (stanza._push_notify and stanza._push_notify[session.push_identifier]) then
+ local node = get_push_settings(stanza, session);
+ local stanza_type = "unimportant";
+ if dummy_body and is_important(stanza) then stanza_type = "important"; end
+ if not notified[stanza_type] then -- only notify if we didn't try to push for this stanza type already
+ -- session.log("debug", "Invoking cloud handle_notify_request() for smacks queued stanza: %d", i);
+ if handle_notify_request(stanza, node, user_push_services, false) ~= 0 then
+ if session.hibernating and not session.first_hibernated_push then
+ -- if important stanzas are treated differently (pushed with last-message-body field set to dummy string)
+ -- if the message was important (e.g. had a last-message-body field) OR if we treat all pushes equally,
+ -- then record the time of first push in the session for the smack module which will extend its hibernation
+ -- timeout based on the value of session.first_hibernated_push
+ if not dummy_body or (dummy_body and is_important(stanza)) then
+ session.first_hibernated_push = os_time();
+ -- check for prosody 0.12 mod_smacks
+ if session.hibernating_watchdog and session.original_smacks_callback and session.original_smacks_timeout then
+ -- restore old smacks watchdog (--> the start of our original timeout will be delayed until first push)
+ session.hibernating_watchdog:cancel();
+ session.hibernating_watchdog = watchdog.new(session.original_smacks_timeout, session.original_smacks_callback);
+ end
+ end
+ end
+ session.log("debug", "Cloud handle_notify_request() > 0, not notifying for other %s queued stanzas of type %s", queue_type, stanza_type);
+ notified[stanza_type] = true
+ end
+ end
+ end
+ if notified.unimportant and notified.important then break; end -- stop processing the queue if all push types are exhausted
+ end
+end
+
+-- publish on unacked smacks message (use timer to send out push for all stanzas submitted in a row only once)
+local function process_stanza(session, stanza)
+ if session.push_identifier then
+ session.log("debug", "adding new stanza to push_queue");
+ if not session.push_queue then session.push_queue = {}; end
+ local queue = session.push_queue;
+ queue[#queue+1] = st.clone(stanza);
+ if not session.awaiting_push_timer then -- timer not already running --> start new timer
+ session.log("debug", "Invoking cloud handle_notify_request() for newly smacks queued stanza (in a moment)");
+ session.awaiting_push_timer = module:add_timer(1.0, function ()
+ session.log("debug", "Invoking cloud handle_notify_request() for newly smacks queued stanzas (now in timer)");
+ process_stanza_queue(session.push_queue, session, "push");
+ session.push_queue = {}; -- clean up queue after push
+ session.awaiting_push_timer = nil;
+ end);
+ end
+ end
+ return stanza;
+end
+
+local function process_smacks_stanza(event)
+ local session = event.origin;
+ local stanza = event.stanza;
+ if not session.push_identifier then
+ session.log("debug", "NOT invoking cloud handle_notify_request() for newly smacks queued stanza (session.push_identifier is not set: %s)",
+ session.push_identifier
+ );
+ else
+ process_stanza(session, stanza)
+ end
+end
+
+-- smacks hibernation is started
+local function hibernate_session(event)
+ local session = event.origin;
+ local queue = event.queue;
+ session.first_hibernated_push = nil;
+ if session.push_identifier and session.hibernating_watchdog then -- check for prosody 0.12 mod_smacks
+ -- save old watchdog callback and timeout
+ session.original_smacks_callback = session.hibernating_watchdog.callback;
+ session.original_smacks_timeout = session.hibernating_watchdog.timeout;
+ -- cancel old watchdog and create a new watchdog with extended timeout
+ session.hibernating_watchdog:cancel();
+ session.hibernating_watchdog = watchdog.new(extended_hibernation_timeout, function()
+ session.log("debug", "Push-extended smacks watchdog triggered");
+ if session.original_smacks_callback then
+ session.log("debug", "Calling original smacks watchdog handler");
+ session.original_smacks_callback();
+ end
+ end);
+ end
+ -- process unacked stanzas
+ process_stanza_queue(queue, session, "smacks");
+end
+
+-- smacks hibernation is ended
+local function restore_session(event)
+ local session = event.resumed;
+ if session then -- older smacks module versions send only the "intermediate" session in event.session and no session.resumed one
+ if session.awaiting_push_timer then
+ session.awaiting_push_timer:stop();
+ session.awaiting_push_timer = nil;
+ end
+ session.first_hibernated_push = nil;
+ -- the extended smacks watchdog will be canceled by the smacks module, no need to anything here
+ end
+end
+
+-- smacks ack is delayed
+local function ack_delayed(event)
+ local session = event.origin;
+ local queue = event.queue;
+ local stanza = event.stanza;
+ if not session.push_identifier then return; end
+ if stanza then process_stanza(session, stanza); return; end -- don't iterate through smacks queue if we know which stanza triggered this
+ for i=1, #queue do
+ local queued_stanza = queue[i];
+ -- process unacked stanzas (handle_notify_request() will only send push requests for new stanzas)
+ process_stanza(session, queued_stanza);
+ end
+end
+
+-- archive message added
+local function archive_message_added(event)
+ -- event is: { origin = origin, stanza = stanza, for_user = store_user, id = id }
+ -- only notify for new mam messages when at least one device is online
+ if not event.for_user or not host_sessions[event.for_user] then return; end
+ local stanza = event.stanza;
+ local user_session = host_sessions[event.for_user].sessions;
+ local to = stanza.attr.to;
+ to = to and jid.split(to) or event.origin.username;
+
+ -- only notify if the stanza destination is the mam user we store it for
+ if event.for_user == to then
+ local user_push_services = push_store:get(to);
+
+ -- Urgent stanzas are time-sensitive (e.g. calls) and should
+ -- be pushed immediately to avoid getting stuck in the smacks
+ -- queue in case of dead connections, for example
+ local is_urgent_stanza, urgent_reason = is_urgent(event.stanza);
+
+ local notify_push_services;
+ if is_urgent_stanza then
+ module:log("debug", "Urgent push for %s (%s)", to, urgent_reason);
+ notify_push_services = user_push_services;
+ else
+ -- only notify nodes with no active sessions (smacks is counted as active and handled separate)
+ notify_push_services = {};
+ for identifier, push_info in pairs(user_push_services) do
+ local identifier_found = nil;
+ for _, session in pairs(user_session) do
+ if session.push_identifier == identifier then
+ identifier_found = session;
+ break;
+ end
+ end
+ if identifier_found then
+ identifier_found.log("debug", "Not cloud notifying '%s' of new MAM stanza (session still alive)", identifier);
+ else
+ notify_push_services[identifier] = push_info;
+ end
+ end
+ end
+
+ handle_notify_request(event.stanza, to, notify_push_services, true);
+ end
+end
+
+module:hook("smacks-hibernation-start", hibernate_session);
+module:hook("smacks-hibernation-end", restore_session);
+module:hook("smacks-ack-delayed", ack_delayed);
+module:hook("smacks-hibernation-stanza-queued", process_smacks_stanza);
+module:hook("archive-message-added", archive_message_added);
+
+local function send_ping(event)
+ local user = event.user;
+ local push_services = event.push_services or push_store:get(user);
+ module:log("debug", "Handling event 'cloud-notify-ping' for user '%s'", user);
+ local retval = handle_notify_request(nil, user, push_services, true);
+ module:log("debug", "handle_notify_request() returned %s", tostring(retval));
+end
+-- can be used by other modules to ping one or more (or all) push endpoints
+module:hook("cloud-notify-ping", send_ping);
+
+module:log("info", "Module loaded");
+function module.unload()
+ module:log("info", "Unloading module");
+ -- cleanup some settings, reloading this module can cause process_smacks_stanza() to stop working otherwise
+ for user, _ in pairs(host_sessions) do
+ for _, session in pairs(host_sessions[user].sessions) do
+ if session.awaiting_push_timer then session.awaiting_push_timer:stop(); end
+ session.awaiting_push_timer = nil;
+ session.push_queue = nil;
+ session.first_hibernated_push = nil;
+ -- check for prosody 0.12 mod_smacks
+ if session.hibernating_watchdog and session.original_smacks_callback and session.original_smacks_timeout then
+ -- restore old smacks watchdog
+ session.hibernating_watchdog:cancel();
+ session.hibernating_watchdog = watchdog.new(session.original_smacks_timeout, session.original_smacks_callback);
+ end
+ end
+ end
+ module:log("info", "Module unloaded");
+end
diff --git a/plugins/mod_cron.lua b/plugins/mod_cron.lua
index 29c1aa93..67b68514 100644
--- a/plugins/mod_cron.lua
+++ b/plugins/mod_cron.lua
@@ -8,6 +8,10 @@ local cron_spread_factor = module:get_option_number("cron_spread_factor", 0);
local active_hosts = {}
+if prosody.process_type == "prosodyctl" then
+ return; -- Yes, it happens...
+end
+
function module.add_host(host_module)
local last_run_times = host_module:open_store("cron", "map");
diff --git a/plugins/mod_flags.lua b/plugins/mod_flags.lua
new file mode 100644
index 00000000..694b608b
--- /dev/null
+++ b/plugins/mod_flags.lua
@@ -0,0 +1,157 @@
+local jid_node = require "prosody.util.jid".node;
+
+local flags = module:open_store("account_flags", "keyval+");
+
+-- API
+
+function add_flag(username, flag, comment)
+ local flag_data = {
+ when = os.time();
+ comment = comment;
+ };
+
+ local ok, err = flags:set_key(username, flag, flag_data);
+ if not ok then
+ return nil, err;
+ end
+
+ module:fire_event("user-flag-added/"..flag, {
+ user = username;
+ flag = flag;
+ data = flag_data;
+ });
+
+ return true;
+end
+
+function remove_flag(username, flag)
+ local ok, err = flags:set_key(username, flag, nil);
+ if not ok then
+ return nil, err;
+ end
+
+ module:fire_event("user-flag-removed/"..flag, {
+ user = username;
+ flag = flag;
+ });
+
+ return true;
+end
+
+function has_flag(username, flag) -- luacheck: ignore 131/has_flag
+ local ok, err = flags:get_key(username, flag);
+ if not ok and err then
+ error("Failed to check flags for user: "..err);
+ end
+ return not not ok;
+end
+
+function get_flag_info(username, flag) -- luacheck: ignore 131/get_flag_info
+ return flags:get_key(username, flag);
+end
+
+-- Shell commands
+
+local function get_username(jid)
+ return (assert(jid_node(jid), "please supply a valid user JID"));
+end
+
+module:add_item("shell-command", {
+ section = "flags";
+ section_desc = "View and manage flags on user accounts";
+ name = "list";
+ desc = "List flags for the given user account";
+ args = {
+ { name = "jid", type = "string" };
+ };
+ host_selector = "jid";
+ handler = function(self, jid) --luacheck: ignore 212/self
+ local c = 0;
+
+ local user_flags, err = flags:get(get_username(jid));
+
+ if not user_flags and err then
+ return false, "Unable to list flags: "..err;
+ end
+
+ if user_flags then
+ local print = self.session.print;
+
+ for flag_name, flag_data in pairs(user_flags) do
+ print(flag_name, os.date("%Y-%m-%d %R", flag_data.when), flag_data.comment);
+ c = c + 1;
+ end
+ end
+
+ return true, ("%d flags listed"):format(c);
+ end;
+});
+
+module:add_item("shell-command", {
+ section = "flags";
+ section_desc = "View and manage flags on user accounts";
+ name = "add";
+ desc = "Add a flag to the given user account, with optional comment";
+ args = {
+ { name = "jid", type = "string" };
+ { name = "flag", type = "string" };
+ { name = "comment", type = "string" };
+ };
+ host_selector = "jid";
+ handler = function(self, jid, flag, comment) --luacheck: ignore 212/self
+ local username = get_username(jid);
+
+ local ok, err = add_flag(username, flag, comment);
+ if not ok then
+ return false, "Failed to add flag: "..err;
+ end
+
+ return true, "Flag added";
+ end;
+});
+
+module:add_item("shell-command", {
+ section = "flags";
+ section_desc = "View and manage flags on user accounts";
+ name = "remove";
+ desc = "Remove a flag from the given user account";
+ args = {
+ { name = "jid", type = "string" };
+ { name = "flag", type = "string" };
+ };
+ host_selector = "jid";
+ handler = function(self, jid, flag) --luacheck: ignore 212/self
+ local username = get_username(jid);
+
+ local ok, err = remove_flag(username, flag);
+ if not ok then
+ return false, "Failed to remove flag: "..err;
+ end
+
+ return true, "Flag removed";
+ end;
+});
+
+module:add_item("shell-command", {
+ section = "flags";
+ section_desc = "View and manage flags on user accounts";
+ name = "find";
+ desc = "Find all user accounts with a given flag on the specified host";
+ args = {
+ { name = "host", type = "string" };
+ { name = "flag", type = "string" };
+ };
+ host_selector = "host";
+ handler = function(self, host, flag) --luacheck: ignore 212/self 212/host
+ local users_with_flag = flags:get_key_from_all(flag);
+
+ local print = self.session.print;
+ local c = 0;
+ for user, flag_data in pairs(users_with_flag) do
+ print(user, os.date("%Y-%m-%d %R", flag_data.when), flag_data.comment);
+ c = c + 1;
+ end
+
+ return true, ("%d accounts listed"):format(c);
+ end;
+});
diff --git a/plugins/mod_invites.lua b/plugins/mod_invites.lua
index 5ee9430a..1dfc8804 100644
--- a/plugins/mod_invites.lua
+++ b/plugins/mod_invites.lua
@@ -6,6 +6,14 @@ local jid_split = require "prosody.util.jid".split;
local argparse = require "prosody.util.argparse";
local human_io = require "prosody.util.human.io";
+local url_escape = require "util.http".urlencode;
+local render_url = require "util.interpolation".new("%b{}", url_escape, {
+ urlescape = url_escape;
+ noscheme = function (urlstring)
+ return (urlstring:gsub("^[^:]+:", ""));
+ end;
+});
+
local default_ttl = module:get_option_period("invite_expiry", "1 week");
local token_storage;
@@ -202,6 +210,34 @@ function use(token) --luacheck: ignore 131/use
return invite and invite:use();
end
+-- Point at e.g. a deployment of https://github.com/modernxmpp/easy-xmpp-invitation
+-- This URL must always be absolute, as it is shared standalone
+local invite_url_template = module:get_option_string("invites_page");
+local invites_page_supports = module:get_option_set("invites_page_supports", { "account", "contact", "account-and-contact" });
+
+local function add_landing_url(invite)
+ if not invite_url_template or invite.landing_page then return; end
+
+ -- Determine whether this type of invitation is supported by the landing page
+ local invite_type;
+ if invite.type == "register" then
+ invite_type = "account";
+ elseif invite.type == "roster" then
+ if invite.allow_registration then
+ invite_type = "account-and-contact";
+ else
+ invite_type = "contact-only";
+ end
+ end
+ if not invites_page_supports:contains(invite_type) then
+ return; -- Invitation type unsupported
+ end
+
+ invite.landing_page = render_url(invite_url_template, { host = module.host, invite = invite });
+end
+
+module:hook("invite-created", add_landing_url, -1);
+
--- shell command
module:add_item("shell-command", {
section = "invite";
diff --git a/plugins/mod_pep.lua b/plugins/mod_pep.lua
index 33eee2ec..b0dfe423 100644
--- a/plugins/mod_pep.lua
+++ b/plugins/mod_pep.lua
@@ -531,3 +531,6 @@ module:hook_global("user-deleted", function(event)
recipients[username] = nil;
end);
+module:require("mod_pubsub/commands").add_commands(function (service_jid)
+ return get_pep_service((jid_split(service_jid)));
+end);
diff --git a/plugins/mod_pubsub/commands.lib.lua b/plugins/mod_pubsub/commands.lib.lua
new file mode 100644
index 00000000..d07b226f
--- /dev/null
+++ b/plugins/mod_pubsub/commands.lib.lua
@@ -0,0 +1,239 @@
+local it = require "prosody.util.iterators";
+local st = require "prosody.util.stanza";
+
+local pubsub_lib = module:require("mod_pubsub/pubsub");
+
+local function add_commands(get_service)
+ module:add_item("shell-command", {
+ section = "pubsub";
+ section_desc = "Manage publish/subscribe nodes";
+ name = "list_nodes";
+ desc = "List nodes on a pubsub service";
+ args = {
+ { name = "service_jid", type = "string" };
+ };
+ host_selector = "service_jid";
+
+ handler = function (self, service_jid) --luacheck: ignore 212/self
+ -- luacheck: ignore 431/service
+ local service = get_service(service_jid);
+ local nodes = select(2, assert(service:get_nodes(true)));
+ local count = 0;
+ for node_name in pairs(nodes) do
+ count = count + 1;
+ self.session.print(node_name);
+ end
+ return true, ("%d nodes"):format(count);
+ end;
+ });
+
+ module:add_item("shell-command", {
+ section = "pubsub";
+ section_desc = "Manage publish/subscribe nodes";
+ name = "list_items";
+ desc = "List items on a pubsub node";
+ args = {
+ { name = "service_jid", type = "string" };
+ { name = "node_name", type = "string" };
+ };
+ host_selector = "service_jid";
+
+ handler = function (self, service_jid, node_name) --luacheck: ignore 212/self
+ -- luacheck: ignore 431/service
+ local service = get_service(service_jid);
+ local items = select(2, assert(service:get_items(node_name, true)));
+
+ local count = 0;
+ for item_name in pairs(items) do
+ count = count + 1;
+ self.session.print(item_name);
+ end
+ return true, ("%d items"):format(count);
+ end;
+ });
+
+ module:add_item("shell-command", {
+ section = "pubsub";
+ section_desc = "Manage publish/subscribe nodes";
+ name = "get_item";
+ desc = "Show item content on a pubsub node";
+ args = {
+ { name = "service_jid", type = "string" };
+ { name = "node_name", type = "string" };
+ { name = "item_name", type = "string" };
+ };
+ host_selector = "service_jid";
+
+ handler = function (self, service_jid, node_name, item_name) --luacheck: ignore 212/self
+ -- luacheck: ignore 431/service
+ local service = get_service(service_jid);
+ local items = select(2, assert(service:get_items(node_name, true)));
+
+ if not items[item_name] then
+ return false, "Item not found";
+ end
+
+ self.session.print(items[item_name]);
+
+ return true;
+ end;
+ });
+
+ module:add_item("shell-command", {
+ section = "pubsub";
+ section_desc = "Manage publish/subscribe nodes";
+ name = "get_node_config";
+ desc = "Get the current configuration for a node";
+ args = {
+ { name = "service_jid", type = "string" };
+ { name = "node_name", type = "string" };
+ { name = "option_name", type = "string" };
+ };
+ host_selector = "service_jid";
+
+ handler = function (self, service_jid, node_name, option_name) --luacheck: ignore 212/self
+ -- luacheck: ignore 431/service
+ local service = get_service(service_jid);
+ local config = select(2, assert(service:get_node_config(node_name, true)));
+
+ local config_form = pubsub_lib.node_config_form:form(config, "submit");
+
+ local count = 0;
+ if option_name then
+ count = 1;
+ local field = config_form:get_child_with_attr("field", nil, "var", option_name);
+ if not field then
+ return false, "option not found";
+ end
+ self.session.print(field:get_child_text("value"));
+ else
+ local opts = {};
+ for field in config_form:childtags("field") do
+ opts[field.attr.var] = field:get_child_text("value");
+ end
+ for k, v in it.sorted_pairs(opts) do
+ count = count + 1;
+ self.session.print(k, v);
+ end
+ end
+
+ return true, ("Showing %d config options"):format(count);
+ end;
+ });
+
+ module:add_item("shell-command", {
+ section = "pubsub";
+ section_desc = "Manage publish/subscribe nodes";
+ name = "set_node_config_option";
+ desc = "Set a config option on a pubsub node";
+ args = {
+ { name = "service_jid", type = "string" };
+ { name = "node_name", type = "string" };
+ { name = "option_name", type = "string" };
+ { name = "option_value", type = "string" };
+ };
+ host_selector = "service_jid";
+
+ handler = function (self, service_jid, node_name, option_name, option_value) --luacheck: ignore 212/self
+ -- luacheck: ignore 431/service
+ local service = get_service(service_jid);
+ local config = select(2, assert(service:get_node_config(node_name, true)));
+
+ local new_config_form = st.stanza("x", { xmlns = "jabber:x:data" })
+ :tag("field", { var = option_name })
+ :text_tag("value", option_value)
+ :up();
+
+ local new_config = pubsub_lib.node_config_form:data(new_config_form, config);
+
+ assert(service:set_node_config(node_name, true, new_config));
+
+ local applied_config = select(2, assert(service:get_node_config(node_name, true)));
+
+ local applied_config_form = pubsub_lib.node_config_form:form(applied_config, "submit");
+ local applied_field = applied_config_form:get_child_with_attr("field", nil, "var", option_name);
+ if not applied_field then
+ return false, "Unknown config field: "..option_name;
+ end
+ return true, "Applied config: "..applied_field:get_child_text("value");
+ end;
+ });
+
+ module:add_item("shell-command", {
+ section = "pubsub";
+ section_desc = "Manage publish/subscribe nodes";
+ name = "delete_item";
+ desc = "Delete a single item from a node";
+ args = {
+ { name = "service_jid", type = "string" };
+ { name = "node_name", type = "string" };
+ { name = "item_name", type = "string" };
+ };
+ host_selector = "service_jid";
+
+ handler = function (self, service_jid, node_name, item_name) --luacheck: ignore 212/self
+ -- luacheck: ignore 431/service
+ local service = get_service(service_jid);
+ return assert(service:retract(node_name, true, item_name));
+ end;
+ });
+
+ module:add_item("shell-command", {
+ section = "pubsub";
+ section_desc = "Manage publish/subscribe nodes";
+ name = "delete_all_items";
+ desc = "Delete all items from a node";
+ args = {
+ { name = "service_jid", type = "string" };
+ { name = "node_name", type = "string" };
+ { name = "notify_subscribers", type = "string" };
+ };
+ host_selector = "service_jid";
+
+ handler = function (self, service_jid, node_name, notify_subscribers) --luacheck: ignore 212/self
+ -- luacheck: ignore 431/service
+ local service = get_service(service_jid);
+ return assert(service:purge(node_name, true, notify_subscribers == "true"));
+ end;
+ });
+
+ module:add_item("shell-command", {
+ section = "pubsub";
+ section_desc = "Manage publish/subscribe nodes";
+ name = "create_node";
+ desc = "Create a new node";
+ args = {
+ { name = "service_jid", type = "string" };
+ { name = "node_name", type = "string" };
+ };
+ host_selector = "service_jid";
+
+ handler = function (self, service_jid, node_name) --luacheck: ignore 212/self
+ -- luacheck: ignore 431/service
+ local service = get_service(service_jid);
+ return assert(service:create(node_name, true));
+ end;
+ });
+
+ module:add_item("shell-command", {
+ section = "pubsub";
+ section_desc = "Manage publish/subscribe nodes";
+ name = "delete_node";
+ desc = "Delete a node entirely";
+ args = {
+ { name = "service_jid", type = "string" };
+ { name = "node_name", type = "string" };
+ };
+ host_selector = "service_jid";
+
+ handler = function (self, service_jid, node_name) --luacheck: ignore 212/self
+ -- luacheck: ignore 431/service
+ local service = get_service(service_jid);
+ return assert(service:delete(node_name, true));
+ end;
+ });
+end
+
+return {
+ add_commands = add_commands;
+}
diff --git a/plugins/mod_pubsub/mod_pubsub.lua b/plugins/mod_pubsub/mod_pubsub.lua
index c17d9e63..5a590893 100644
--- a/plugins/mod_pubsub/mod_pubsub.lua
+++ b/plugins/mod_pubsub/mod_pubsub.lua
@@ -184,8 +184,11 @@ module:hook("host-disco-items", function (event)
if not ok then
return;
end
- for node, node_obj in pairs(ret) do
- reply:tag("item", { jid = module.host, node = node, name = node_obj.config.title }):up();
+ for node in pairs(ret) do
+ local ok, meta = service:get_node_metadata(node, stanza.attr.from);
+ if ok then
+ reply:tag("item", { jid = module.host, node = node, name = meta.title }):up();
+ end
end
end);
@@ -205,7 +208,7 @@ local function get_affiliation(jid, _, action)
-- Only one affiliation is allowed to create nodes by default
return "owner";
end
- if module:may(":service-admin", bare_jid) then
+ if module:could(":service-admin", bare_jid) then
return admin_aff;
end
end
@@ -274,41 +277,4 @@ local function get_service(service_jid)
return assert(assert(prosody.hosts[service_jid], "Unknown pubsub service").modules.pubsub, "Not a pubsub service").service;
end
-module:add_item("shell-command", {
- section = "pubsub";
- section_desc = "Manage publish/subscribe nodes";
- name = "create_node";
- desc = "Create a node with the specified name";
- args = {
- { name = "service_jid", type = "string" };
- { name = "node_name", type = "string" };
- };
- host_selector = "service_jid";
-
- handler = function (self, service_jid, node_name) --luacheck: ignore 212/self
- return get_service(service_jid):create(node_name, true);
- end;
-});
-
-module:add_item("shell-command", {
- section = "pubsub";
- section_desc = "Manage publish/subscribe nodes";
- name = "list_nodes";
- desc = "List nodes on a pubsub service";
- args = {
- { name = "service_jid", type = "string" };
- };
- host_selector = "service_jid";
-
- handler = function (self, service_jid) --luacheck: ignore 212/self
- -- luacheck: ignore 431/service
- local service = get_service(service_jid);
- local nodes = select(2, assert(service:get_nodes(true)));
- local count = 0;
- for node_name in pairs(nodes) do
- count = count + 1;
- self.session.print(node_name);
- end
- return true, ("%d nodes"):format(count);
- end;
-});
+module:require("commands").add_commands(get_service);
diff --git a/plugins/mod_roster.lua b/plugins/mod_roster.lua
index 82016d27..5ffdfe1a 100644
--- a/plugins/mod_roster.lua
+++ b/plugins/mod_roster.lua
@@ -15,10 +15,11 @@ local jid_prep = require "prosody.util.jid".prep;
local tonumber = tonumber;
local pairs = pairs;
-local rm_load_roster = require "prosody.core.rostermanager".load_roster;
-local rm_remove_from_roster = require "prosody.core.rostermanager".remove_from_roster;
-local rm_add_to_roster = require "prosody.core.rostermanager".add_to_roster;
-local rm_roster_push = require "prosody.core.rostermanager".roster_push;
+local rostermanager = require "prosody.core.rostermanager";
+local rm_load_roster = rostermanager.load_roster;
+local rm_remove_from_roster = rostermanager.remove_from_roster;
+local rm_add_to_roster = rostermanager.add_to_roster;
+local rm_roster_push = rostermanager.roster_push;
module:add_feature("jabber:iq:roster");
@@ -147,3 +148,168 @@ module:hook_global("user-deleted", function(event)
end
end
end, 300);
+
+-- API/commands
+
+-- Make a *one-way* subscription. User will see when contact is online,
+-- contact will not see when user is online.
+function subscribe(user_jid, contact_jid)
+ local user_username, user_host = jid_split(user_jid);
+ local contact_username, contact_host = jid_split(contact_jid);
+
+ -- Update user's roster to say subscription request is pending. Bare hosts (e.g. components) don't have rosters.
+ if user_username ~= nil then
+ rostermanager.set_contact_pending_out(user_username, user_host, contact_jid);
+ end
+
+ if prosody.hosts[contact_host] then -- Sending to a local host?
+ -- Update contact's roster to say subscription request is pending...
+ rostermanager.set_contact_pending_in(contact_username, contact_host, user_jid);
+ -- Update contact's roster to say subscription request approved...
+ rostermanager.subscribed(contact_username, contact_host, user_jid);
+ -- Update user's roster to say subscription request approved. Bare hosts (e.g. components) don't have rosters.
+ if user_username ~= nil then
+ rostermanager.process_inbound_subscription_approval(user_username, user_host, contact_jid);
+ end
+ else
+ -- Send a subscription request
+ local sub_request = st.presence({ from = user_jid, to = contact_jid, type = "subscribe" });
+ module:send(sub_request);
+ end
+
+ return true;
+end
+
+-- Make a mutual subscription between jid1 and jid2. Each JID will see
+-- when the other one is online.
+function subscribe_both(jid1, jid2)
+ local ok1, err1 = subscribe(jid1, jid2);
+ local ok2, err2 = subscribe(jid2, jid1);
+ return ok1 and ok2, err1 or err2;
+end
+
+-- Unsubscribes user from contact (not contact from user, if subscribed).
+function unsubscribe(user_jid, contact_jid)
+ local user_username, user_host = jid_split(user_jid);
+ local contact_username, contact_host = jid_split(contact_jid);
+
+ -- Update user's roster to say subscription is cancelled...
+ rostermanager.unsubscribe(user_username, user_host, contact_jid);
+ if prosody.hosts[contact_host] then -- Local host?
+ -- Update contact's roster to say subscription is cancelled...
+ rostermanager.unsubscribed(contact_username, contact_host, user_jid);
+ end
+ return true;
+end
+
+-- Cancel any subscription in either direction.
+function unsubscribe_both(jid1, jid2)
+ local ok1 = unsubscribe(jid1, jid2);
+ local ok2 = unsubscribe(jid2, jid1);
+ return ok1 and ok2;
+end
+
+module:add_item("shell-command", {
+ section = "roster";
+ section_desc = "View and manage user rosters (contact lists)";
+ name = "show";
+ desc = "Show a user's current roster";
+ args = {
+ { name = "jid", type = "string" };
+ { name = "sub", type = "string" };
+ };
+ host_selector = "jid";
+ handler = function(self, jid, sub) --luacheck: ignore 212/self
+ local print = self.session.print;
+ local it = require "prosody.util.iterators";
+
+ local roster = assert(rm_load_roster(jid_split(jid)));
+
+ local function sort_func(a, b)
+ if type(a) == "string" and type(b) == "string" then
+ return a < b;
+ else
+ return a == false;
+ end
+ end
+
+ local count = 0;
+ if sub == "pending" then
+ local pending_subs = roster[false].pending or {};
+ for pending_jid in it.sorted_pairs(pending_subs) do
+ print(pending_jid);
+ end
+ else
+ for contact, item in it.sorted_pairs(roster, sort_func) do
+ if contact and (not sub or sub == item.subscription) then
+ count = count + 1;
+ print(contact, ("sub=%s\task=%s"):format(item.subscription or "none", item.ask or "none"));
+ end
+ end
+ end
+
+ return true, ("Showing %d entries"):format(count);
+ end;
+});
+
+module:add_item("shell-command", {
+ section = "roster";
+ section_desc = "View and manage user rosters (contact lists)";
+ name = "subscribe";
+ desc = "Subscribe a user to another JID";
+ args = {
+ { name = "jid", type = "string" };
+ { name = "contact", type = "string" };
+ };
+ host_selector = "jid";
+ handler = function(self, jid, contact) --luacheck: ignore 212/self
+ return subscribe(jid, contact);
+ end;
+});
+
+module:add_item("shell-command", {
+ section = "roster";
+ section_desc = "View and manage user rosters (contact lists)";
+ name = "subscribe_both";
+ desc = "Subscribe a user and a contact JID to each other";
+ args = {
+ { name = "jid", type = "string" };
+ { name = "contact", type = "string" };
+ };
+ host_selector = "jid";
+ handler = function(self, jid, contact) --luacheck: ignore 212/self
+ return subscribe_both(jid, contact);
+ end;
+});
+
+
+module:add_item("shell-command", {
+ section = "roster";
+ section_desc = "View and manage user rosters (contact lists)";
+ name = "unsubscribe";
+ desc = "Unsubscribe a user from another JID";
+ args = {
+ { name = "jid", type = "string" };
+ { name = "contact", type = "string" };
+ };
+ host_selector = "jid";
+ handler = function(self, jid, contact) --luacheck: ignore 212/self
+ return unsubscribe(jid, contact);
+ end;
+});
+
+module:add_item("shell-command", {
+ section = "roster";
+ section_desc = "View and manage user rosters (contact lists)";
+ name = "unsubscribe_both";
+ desc = "Unubscribe a user and a contact JID from each other";
+ args = {
+ { name = "jid", type = "string" };
+ { name = "contact", type = "string" };
+ };
+ host_selector = "jid";
+ handler = function(self, jid, contact) --luacheck: ignore 212/self
+ return unsubscribe_both(jid, contact);
+ end;
+});
+
diff --git a/plugins/mod_storage_sql.lua b/plugins/mod_storage_sql.lua
index 3f606160..f24f11fc 100644
--- a/plugins/mod_storage_sql.lua
+++ b/plugins/mod_storage_sql.lua
@@ -14,11 +14,11 @@ local t_concat = table.concat;
local have_dbisql, dbisql = pcall(require, "prosody.util.sql");
local have_sqlite, sqlite = pcall(require, "prosody.util.sqlite3");
if not have_dbisql then
- module:log("debug", "Could not load LuaDBI, error was: %s", dbisql)
+ module:log("debug", "Could not load LuaDBI: %s", dbisql)
dbisql = nil;
end
if not have_sqlite then
- module:log("debug", "Could not load LuaSQLite3, error was: %s", sqlite)
+ module:log("debug", "Could not load LuaSQLite3: %s", sqlite)
sqlite = nil;
end
if not (have_dbisql or have_sqlite) then