Implement ACL runtime enforcement and management API
This commit is contained in:
250
lib/parrhesia/api/acl.ex
Normal file
250
lib/parrhesia/api/acl.ex
Normal file
@@ -0,0 +1,250 @@
|
||||
defmodule Parrhesia.API.ACL do
|
||||
@moduledoc """
|
||||
Public ACL API and rule matching for protected sync traffic.
|
||||
"""
|
||||
|
||||
alias Parrhesia.API.RequestContext
|
||||
alias Parrhesia.Protocol.Filter
|
||||
alias Parrhesia.Storage
|
||||
|
||||
@spec grant(map(), keyword()) :: :ok | {:error, term()}
|
||||
def grant(rule, _opts \\ []) do
|
||||
with {:ok, _stored_rule} <- Storage.acl().put_rule(%{}, normalize_rule(rule)) do
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
@spec revoke(map(), keyword()) :: :ok | {:error, term()}
|
||||
def revoke(rule, _opts \\ []) do
|
||||
Storage.acl().delete_rule(%{}, normalize_delete_selector(rule))
|
||||
end
|
||||
|
||||
@spec list(keyword()) :: {:ok, [map()]} | {:error, term()}
|
||||
def list(opts \\ []) do
|
||||
Storage.acl().list_rules(%{}, normalize_list_opts(opts))
|
||||
end
|
||||
|
||||
@spec check(atom(), map(), keyword()) :: :ok | {:error, term()}
|
||||
def check(capability, subject, opts \\ [])
|
||||
|
||||
def check(capability, subject, opts)
|
||||
when capability in [:sync_read, :sync_write] and is_map(subject) do
|
||||
context = Keyword.get(opts, :context, %RequestContext{})
|
||||
|
||||
with {:ok, normalized_capability} <- normalize_capability(capability),
|
||||
{:ok, normalized_context} <- normalize_context(context),
|
||||
{:ok, protected_filters} <- protected_filters() do
|
||||
if protected_subject?(normalized_capability, subject, protected_filters) do
|
||||
authorize_subject(normalized_capability, subject, normalized_context)
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def check(_capability, _subject, _opts), do: {:error, :invalid_acl_capability}
|
||||
|
||||
@spec protected_read?(map()) :: boolean()
|
||||
def protected_read?(filter) when is_map(filter) do
|
||||
case protected_filters() do
|
||||
{:ok, protected_filters} ->
|
||||
protected_subject?(:sync_read, filter, protected_filters)
|
||||
|
||||
{:error, _reason} ->
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
def protected_read?(_filter), do: false
|
||||
|
||||
@spec protected_write?(map()) :: boolean()
|
||||
def protected_write?(event) when is_map(event) do
|
||||
case protected_filters() do
|
||||
{:ok, protected_filters} ->
|
||||
protected_subject?(:sync_write, event, protected_filters)
|
||||
|
||||
{:error, _reason} ->
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
def protected_write?(_event), do: false
|
||||
|
||||
defp authorize_subject(capability, subject, %RequestContext{} = context) do
|
||||
if MapSet.size(context.authenticated_pubkeys) == 0 do
|
||||
{:error, :auth_required}
|
||||
else
|
||||
capability
|
||||
|> list_rules_for_capability()
|
||||
|> authorize_against_rules(capability, context.authenticated_pubkeys, subject)
|
||||
end
|
||||
end
|
||||
|
||||
defp list_rules_for_capability(capability) do
|
||||
Storage.acl().list_rules(%{}, principal_type: :pubkey, capability: capability)
|
||||
end
|
||||
|
||||
defp authorize_against_rules({:ok, rules}, capability, authenticated_pubkeys, subject) do
|
||||
if Enum.any?(authenticated_pubkeys, &principal_authorized?(&1, subject, rules)) do
|
||||
:ok
|
||||
else
|
||||
{:error, denial_reason(capability)}
|
||||
end
|
||||
end
|
||||
|
||||
defp authorize_against_rules({:error, reason}, _capability, _authenticated_pubkeys, _subject),
|
||||
do: {:error, reason}
|
||||
|
||||
defp principal_authorized?(authenticated_pubkey, subject, rules) do
|
||||
Enum.any?(rules, fn rule ->
|
||||
rule.principal == authenticated_pubkey and
|
||||
rule_covers_subject?(rule.capability, rule.match, subject)
|
||||
end)
|
||||
end
|
||||
|
||||
defp rule_covers_subject?(:sync_read, rule_match, filter),
|
||||
do: filter_within_rule?(filter, rule_match)
|
||||
|
||||
defp rule_covers_subject?(:sync_write, rule_match, event),
|
||||
do: Filter.matches_filter?(event, rule_match)
|
||||
|
||||
defp protected_subject?(:sync_read, filter, protected_filters) do
|
||||
Enum.any?(protected_filters, &filters_overlap?(filter, &1))
|
||||
end
|
||||
|
||||
defp protected_subject?(:sync_write, event, protected_filters) do
|
||||
Enum.any?(protected_filters, &Filter.matches_filter?(event, &1))
|
||||
end
|
||||
|
||||
defp filters_overlap?(left, right) when is_map(left) and is_map(right) do
|
||||
comparable_keys =
|
||||
left
|
||||
|> comparable_filter_keys(right)
|
||||
|> Enum.reject(&(&1 in ["limit", "search", "since", "until"]))
|
||||
|
||||
Enum.all?(
|
||||
comparable_keys,
|
||||
&filter_constraint_compatible?(Map.get(left, &1), Map.get(right, &1), &1)
|
||||
) and
|
||||
filter_ranges_overlap?(left, right)
|
||||
end
|
||||
|
||||
defp filter_constraint_compatible?(nil, _right, _key), do: true
|
||||
defp filter_constraint_compatible?(_left, nil, _key), do: true
|
||||
|
||||
defp filter_constraint_compatible?(left, right, _key) when is_list(left) and is_list(right) do
|
||||
MapSet.disjoint?(MapSet.new(left), MapSet.new(right)) == false
|
||||
end
|
||||
|
||||
defp filter_constraint_compatible?(left, right, _key), do: left == right
|
||||
|
||||
defp filter_within_rule?(filter, rule_match) when is_map(filter) and is_map(rule_match) do
|
||||
Enum.reject(rule_match, fn {key, _value} -> key in ["since", "until", "limit", "search"] end)
|
||||
|> Enum.all?(fn {key, rule_value} ->
|
||||
requested_value = Map.get(filter, key)
|
||||
requested_constraint_within_rule?(requested_value, rule_value, key)
|
||||
end) and filter_range_within_rule?(filter, rule_match)
|
||||
end
|
||||
|
||||
defp requested_constraint_within_rule?(nil, _rule_value, _key), do: false
|
||||
|
||||
defp requested_constraint_within_rule?(requested_values, rule_values, _key)
|
||||
when is_list(requested_values) and is_list(rule_values) do
|
||||
requested_values
|
||||
|> MapSet.new()
|
||||
|> MapSet.subset?(MapSet.new(rule_values))
|
||||
end
|
||||
|
||||
defp requested_constraint_within_rule?(requested_value, rule_value, _key),
|
||||
do: requested_value == rule_value
|
||||
|
||||
defp denial_reason(:sync_read), do: :sync_read_not_allowed
|
||||
defp denial_reason(:sync_write), do: :sync_write_not_allowed
|
||||
|
||||
defp normalize_context(%RequestContext{} = context), do: {:ok, normalize_pubkeys(context)}
|
||||
defp normalize_context(_context), do: {:error, :invalid_context}
|
||||
|
||||
defp normalize_pubkeys(%RequestContext{} = context) do
|
||||
normalized_pubkeys =
|
||||
context.authenticated_pubkeys
|
||||
|> Enum.map(&String.downcase/1)
|
||||
|> MapSet.new()
|
||||
|
||||
%RequestContext{context | authenticated_pubkeys: normalized_pubkeys}
|
||||
end
|
||||
|
||||
defp normalize_rule(rule) when is_map(rule), do: rule
|
||||
defp normalize_rule(_rule), do: %{}
|
||||
|
||||
defp normalize_delete_selector(selector) when is_map(selector), do: selector
|
||||
defp normalize_delete_selector(_selector), do: %{}
|
||||
|
||||
defp normalize_list_opts(opts) do
|
||||
[]
|
||||
|> maybe_put_opt(:principal_type, Keyword.get(opts, :principal_type))
|
||||
|> maybe_put_opt(:principal, normalize_list_principal(Keyword.get(opts, :principal)))
|
||||
|> maybe_put_opt(:capability, Keyword.get(opts, :capability))
|
||||
end
|
||||
|
||||
defp normalize_list_principal(nil), do: nil
|
||||
|
||||
defp normalize_list_principal(principal) when is_binary(principal),
|
||||
do: String.downcase(principal)
|
||||
|
||||
defp normalize_list_principal(principal), do: principal
|
||||
|
||||
defp maybe_put_opt(opts, _key, nil), do: opts
|
||||
defp maybe_put_opt(opts, key, value), do: Keyword.put(opts, key, value)
|
||||
|
||||
defp normalize_capability(capability) do
|
||||
case capability do
|
||||
:sync_read -> {:ok, :sync_read}
|
||||
:sync_write -> {:ok, :sync_write}
|
||||
_other -> {:error, :invalid_acl_capability}
|
||||
end
|
||||
end
|
||||
|
||||
defp protected_filters do
|
||||
filters =
|
||||
:parrhesia
|
||||
|> Application.get_env(:acl, [])
|
||||
|> Keyword.get(:protected_filters, [])
|
||||
|
||||
if is_list(filters) and
|
||||
Enum.all?(filters, &(match?(%{}, &1) and Filter.validate_filter(&1) == :ok)) do
|
||||
{:ok, filters}
|
||||
else
|
||||
{:error, :invalid_protected_filters}
|
||||
end
|
||||
end
|
||||
|
||||
defp comparable_filter_keys(left, right) do
|
||||
Map.keys(left)
|
||||
|> Kernel.++(Map.keys(right))
|
||||
|> Enum.uniq()
|
||||
end
|
||||
|
||||
defp filter_ranges_overlap?(left, right) do
|
||||
since = max(boundary_value(left, "since", :lower), boundary_value(right, "since", :lower))
|
||||
until = min(boundary_value(left, "until", :upper), boundary_value(right, "until", :upper))
|
||||
since <= until
|
||||
end
|
||||
|
||||
defp filter_range_within_rule?(filter, rule_match) do
|
||||
requested_since = Map.get(filter, "since")
|
||||
requested_until = Map.get(filter, "until")
|
||||
rule_since = Map.get(rule_match, "since")
|
||||
rule_until = Map.get(rule_match, "until")
|
||||
|
||||
lower_ok? =
|
||||
is_nil(rule_since) or (is_integer(requested_since) and requested_since >= rule_since)
|
||||
|
||||
upper_ok? =
|
||||
is_nil(rule_until) or (is_integer(requested_until) and requested_until <= rule_until)
|
||||
|
||||
lower_ok? and upper_ok?
|
||||
end
|
||||
|
||||
defp boundary_value(filter, key, :lower), do: Map.get(filter, key, 0)
|
||||
defp boundary_value(filter, key, :upper), do: Map.get(filter, key, 9_223_372_036_854_775_807)
|
||||
end
|
||||
84
lib/parrhesia/api/admin.ex
Normal file
84
lib/parrhesia/api/admin.ex
Normal file
@@ -0,0 +1,84 @@
|
||||
defmodule Parrhesia.API.Admin do
|
||||
@moduledoc """
|
||||
Public management API facade.
|
||||
"""
|
||||
|
||||
alias Parrhesia.API.ACL
|
||||
alias Parrhesia.Storage
|
||||
|
||||
@supported_acl_methods ~w(acl_grant acl_revoke acl_list)
|
||||
|
||||
@spec execute(String.t() | atom(), map(), keyword()) :: {:ok, map()} | {:error, term()}
|
||||
def execute(method, params, opts \\ [])
|
||||
|
||||
def execute(method, params, _opts) when is_map(params) do
|
||||
case normalize_method_name(method) do
|
||||
"acl_grant" -> acl_grant(params)
|
||||
"acl_revoke" -> acl_revoke(params)
|
||||
"acl_list" -> acl_list(params)
|
||||
"supportedmethods" -> {:ok, %{"methods" => supported_methods()}}
|
||||
other_method -> Storage.admin().execute(%{}, other_method, params)
|
||||
end
|
||||
end
|
||||
|
||||
def execute(method, _params, _opts),
|
||||
do: {:error, {:unsupported_method, normalize_method_name(method)}}
|
||||
|
||||
@spec stats(keyword()) :: {:ok, map()} | {:error, term()}
|
||||
def stats(_opts \\ []), do: Storage.admin().execute(%{}, :stats, %{})
|
||||
|
||||
@spec health(keyword()) :: {:ok, map()} | {:error, term()}
|
||||
def health(_opts \\ []), do: {:ok, %{"status" => "ok"}}
|
||||
|
||||
@spec list_audit_logs(keyword()) :: {:ok, [map()]} | {:error, term()}
|
||||
def list_audit_logs(opts \\ []) do
|
||||
Storage.admin().list_audit_logs(%{}, opts)
|
||||
end
|
||||
|
||||
defp acl_grant(params) do
|
||||
with :ok <- ACL.grant(params) do
|
||||
{:ok, %{"ok" => true}}
|
||||
end
|
||||
end
|
||||
|
||||
defp acl_revoke(params) do
|
||||
with :ok <- ACL.revoke(params) do
|
||||
{:ok, %{"ok" => true}}
|
||||
end
|
||||
end
|
||||
|
||||
defp acl_list(params) do
|
||||
with {:ok, rules} <- ACL.list(acl_list_opts(params)) do
|
||||
{:ok, %{"rules" => rules}}
|
||||
end
|
||||
end
|
||||
|
||||
defp acl_list_opts(params) do
|
||||
[]
|
||||
|> maybe_put_opt(:principal_type, fetch_value(params, :principal_type))
|
||||
|> maybe_put_opt(:principal, fetch_value(params, :principal))
|
||||
|> maybe_put_opt(:capability, fetch_value(params, :capability))
|
||||
end
|
||||
|
||||
defp supported_methods do
|
||||
storage_supported =
|
||||
case Storage.admin().execute(%{}, :supportedmethods, %{}) do
|
||||
{:ok, methods} when is_list(methods) -> methods
|
||||
{:ok, %{"methods" => methods}} when is_list(methods) -> methods
|
||||
_other -> []
|
||||
end
|
||||
|
||||
(storage_supported ++ @supported_acl_methods)
|
||||
|> Enum.uniq()
|
||||
|> Enum.sort()
|
||||
end
|
||||
|
||||
defp maybe_put_opt(opts, _key, nil), do: opts
|
||||
defp maybe_put_opt(opts, key, value), do: Keyword.put(opts, key, value)
|
||||
|
||||
defp fetch_value(map, key), do: Map.get(map, key) || Map.get(map, Atom.to_string(key))
|
||||
|
||||
defp normalize_method_name(method) when is_atom(method), do: Atom.to_string(method)
|
||||
defp normalize_method_name(method) when is_binary(method), do: method
|
||||
defp normalize_method_name(method), do: inspect(method)
|
||||
end
|
||||
28
lib/parrhesia/api/request_context.ex
Normal file
28
lib/parrhesia/api/request_context.ex
Normal file
@@ -0,0 +1,28 @@
|
||||
defmodule Parrhesia.API.RequestContext do
|
||||
@moduledoc """
|
||||
Shared request context used across API and policy surfaces.
|
||||
"""
|
||||
|
||||
defstruct authenticated_pubkeys: MapSet.new(),
|
||||
actor: nil,
|
||||
caller: :local,
|
||||
remote_ip: nil,
|
||||
subscription_id: nil,
|
||||
peer_id: nil,
|
||||
metadata: %{}
|
||||
|
||||
@type t :: %__MODULE__{
|
||||
authenticated_pubkeys: MapSet.t(String.t()),
|
||||
actor: term(),
|
||||
caller: atom(),
|
||||
remote_ip: String.t() | nil,
|
||||
subscription_id: String.t() | nil,
|
||||
peer_id: String.t() | nil,
|
||||
metadata: map()
|
||||
}
|
||||
|
||||
@spec put_metadata(t(), map()) :: t()
|
||||
def put_metadata(%__MODULE__{} = context, metadata) when is_map(metadata) do
|
||||
%__MODULE__{context | metadata: Map.merge(context.metadata, metadata)}
|
||||
end
|
||||
end
|
||||
68
lib/parrhesia/policy/connection_policy.ex
Normal file
68
lib/parrhesia/policy/connection_policy.ex
Normal file
@@ -0,0 +1,68 @@
|
||||
defmodule Parrhesia.Policy.ConnectionPolicy do
|
||||
@moduledoc """
|
||||
Connection/session-level policy checks shared by websocket and management entrypoints.
|
||||
"""
|
||||
|
||||
alias Parrhesia.Storage
|
||||
|
||||
@spec authorize_remote_ip(tuple() | String.t() | nil) :: :ok | {:error, :ip_blocked}
|
||||
def authorize_remote_ip(remote_ip) do
|
||||
case normalize_ip(remote_ip) do
|
||||
nil ->
|
||||
:ok
|
||||
|
||||
normalized_ip ->
|
||||
case Storage.moderation().ip_blocked?(%{}, normalized_ip) do
|
||||
{:ok, true} -> {:error, :ip_blocked}
|
||||
_other -> :ok
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@spec authorize_authenticated_pubkey(String.t()) :: :ok | {:error, :pubkey_not_allowed}
|
||||
def authorize_authenticated_pubkey(pubkey) when is_binary(pubkey) do
|
||||
if allowlist_active?() do
|
||||
case Storage.moderation().pubkey_allowed?(%{}, pubkey) do
|
||||
{:ok, true} -> :ok
|
||||
_other -> {:error, :pubkey_not_allowed}
|
||||
end
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
@spec authorize_authenticated_pubkeys(MapSet.t(String.t())) ::
|
||||
:ok | {:error, :auth_required | :pubkey_not_allowed}
|
||||
def authorize_authenticated_pubkeys(authenticated_pubkeys) do
|
||||
if allowlist_active?() do
|
||||
cond do
|
||||
MapSet.size(authenticated_pubkeys) == 0 ->
|
||||
{:error, :auth_required}
|
||||
|
||||
Enum.any?(authenticated_pubkeys, &(authorize_authenticated_pubkey(&1) == :ok)) ->
|
||||
:ok
|
||||
|
||||
true ->
|
||||
{:error, :pubkey_not_allowed}
|
||||
end
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp allowlist_active? do
|
||||
case Storage.moderation().has_allowed_pubkeys?(%{}) do
|
||||
{:ok, true} -> true
|
||||
_other -> false
|
||||
end
|
||||
end
|
||||
|
||||
defp normalize_ip(nil), do: nil
|
||||
defp normalize_ip({_, _, _, _} = remote_ip), do: :inet.ntoa(remote_ip) |> to_string()
|
||||
|
||||
defp normalize_ip({_, _, _, _, _, _, _, _} = remote_ip),
|
||||
do: :inet.ntoa(remote_ip) |> to_string()
|
||||
|
||||
defp normalize_ip(remote_ip) when is_binary(remote_ip), do: remote_ip
|
||||
defp normalize_ip(_remote_ip), do: nil
|
||||
end
|
||||
@@ -3,11 +3,17 @@ defmodule Parrhesia.Policy.EventPolicy do
|
||||
Write/read policy checks for relay operations.
|
||||
"""
|
||||
|
||||
alias Parrhesia.API.ACL
|
||||
alias Parrhesia.API.RequestContext
|
||||
alias Parrhesia.Policy.ConnectionPolicy
|
||||
alias Parrhesia.Storage
|
||||
|
||||
@type policy_error ::
|
||||
:auth_required
|
||||
| :pubkey_not_allowed
|
||||
| :restricted_giftwrap
|
||||
| :sync_read_not_allowed
|
||||
| :sync_write_not_allowed
|
||||
| :marmot_group_h_tag_required
|
||||
| :marmot_group_h_values_exceeded
|
||||
| :marmot_group_filter_window_too_wide
|
||||
@@ -33,15 +39,31 @@ defmodule Parrhesia.Policy.EventPolicy do
|
||||
|
||||
@spec authorize_read([map()], MapSet.t(String.t())) :: :ok | {:error, policy_error()}
|
||||
def authorize_read(filters, authenticated_pubkeys) when is_list(filters) do
|
||||
authorize_read(filters, authenticated_pubkeys, request_context(authenticated_pubkeys))
|
||||
end
|
||||
|
||||
@spec authorize_read([map()], MapSet.t(String.t()), RequestContext.t()) ::
|
||||
:ok | {:error, policy_error()}
|
||||
def authorize_read(filters, authenticated_pubkeys, %RequestContext{} = context)
|
||||
when is_list(filters) do
|
||||
auth_required? = config_bool([:policies, :auth_required_for_reads], false)
|
||||
|
||||
cond do
|
||||
match?(
|
||||
{:error, _reason},
|
||||
ConnectionPolicy.authorize_authenticated_pubkeys(authenticated_pubkeys)
|
||||
) ->
|
||||
ConnectionPolicy.authorize_authenticated_pubkeys(authenticated_pubkeys)
|
||||
|
||||
auth_required? and MapSet.size(authenticated_pubkeys) == 0 ->
|
||||
{:error, :auth_required}
|
||||
|
||||
giftwrap_restricted?(filters, authenticated_pubkeys) ->
|
||||
{:error, :restricted_giftwrap}
|
||||
|
||||
match?({:error, _reason}, authorize_sync_reads(filters, context)) ->
|
||||
authorize_sync_reads(filters, context)
|
||||
|
||||
true ->
|
||||
enforce_marmot_group_read_guardrails(filters)
|
||||
end
|
||||
@@ -49,8 +71,17 @@ defmodule Parrhesia.Policy.EventPolicy do
|
||||
|
||||
@spec authorize_write(map(), MapSet.t(String.t())) :: :ok | {:error, policy_error()}
|
||||
def authorize_write(event, authenticated_pubkeys) when is_map(event) do
|
||||
authorize_write(event, authenticated_pubkeys, request_context(authenticated_pubkeys))
|
||||
end
|
||||
|
||||
@spec authorize_write(map(), MapSet.t(String.t()), RequestContext.t()) ::
|
||||
:ok | {:error, policy_error()}
|
||||
def authorize_write(event, authenticated_pubkeys, %RequestContext{} = context)
|
||||
when is_map(event) do
|
||||
checks = [
|
||||
fn -> ConnectionPolicy.authorize_authenticated_pubkeys(authenticated_pubkeys) end,
|
||||
fn -> maybe_require_auth_for_write(authenticated_pubkeys) end,
|
||||
fn -> authorize_sync_write(event, context) end,
|
||||
fn -> reject_if_pubkey_banned(event) end,
|
||||
fn -> reject_if_event_banned(event) end,
|
||||
fn -> enforce_pow(event) end,
|
||||
@@ -69,10 +100,17 @@ defmodule Parrhesia.Policy.EventPolicy do
|
||||
|
||||
@spec error_message(policy_error()) :: String.t()
|
||||
def error_message(:auth_required), do: "auth-required: authentication required"
|
||||
def error_message(:pubkey_not_allowed), do: "restricted: authenticated pubkey is not allowed"
|
||||
|
||||
def error_message(:restricted_giftwrap),
|
||||
do: "restricted: giftwrap access requires recipient authentication"
|
||||
|
||||
def error_message(:sync_read_not_allowed),
|
||||
do: "restricted: sync read not allowed for authenticated pubkey"
|
||||
|
||||
def error_message(:sync_write_not_allowed),
|
||||
do: "restricted: sync write not allowed for authenticated pubkey"
|
||||
|
||||
def error_message(:marmot_group_h_tag_required),
|
||||
do: "restricted: kind 445 queries must include a #h tag"
|
||||
|
||||
@@ -143,6 +181,19 @@ defmodule Parrhesia.Policy.EventPolicy do
|
||||
end
|
||||
end
|
||||
|
||||
defp authorize_sync_reads(filters, %RequestContext{} = context) do
|
||||
Enum.reduce_while(filters, :ok, fn filter, :ok ->
|
||||
case ACL.check(:sync_read, filter, context: context) do
|
||||
:ok -> {:cont, :ok}
|
||||
{:error, reason} -> {:halt, {:error, reason}}
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp authorize_sync_write(event, %RequestContext{} = context) do
|
||||
ACL.check(:sync_write, event, context: context)
|
||||
end
|
||||
|
||||
defp giftwrap_restricted?(filters, authenticated_pubkeys) do
|
||||
if MapSet.size(authenticated_pubkeys) == 0 do
|
||||
any_filter_targets_giftwrap?(filters)
|
||||
@@ -672,4 +723,8 @@ defmodule Parrhesia.Policy.EventPolicy do
|
||||
default
|
||||
end
|
||||
end
|
||||
|
||||
defp request_context(authenticated_pubkeys) do
|
||||
%RequestContext{authenticated_pubkeys: authenticated_pubkeys}
|
||||
end
|
||||
end
|
||||
|
||||
@@ -8,6 +8,7 @@ defmodule Parrhesia.Storage do
|
||||
|
||||
@default_modules [
|
||||
events: Parrhesia.Storage.Adapters.Postgres.Events,
|
||||
acl: Parrhesia.Storage.Adapters.Postgres.ACL,
|
||||
moderation: Parrhesia.Storage.Adapters.Postgres.Moderation,
|
||||
groups: Parrhesia.Storage.Adapters.Postgres.Groups,
|
||||
admin: Parrhesia.Storage.Adapters.Postgres.Admin
|
||||
@@ -19,6 +20,9 @@ defmodule Parrhesia.Storage do
|
||||
@spec moderation() :: module()
|
||||
def moderation, do: fetch_module!(:moderation, Parrhesia.Storage.Moderation)
|
||||
|
||||
@spec acl() :: module()
|
||||
def acl, do: fetch_module!(:acl, Parrhesia.Storage.ACL)
|
||||
|
||||
@spec groups() :: module()
|
||||
def groups, do: fetch_module!(:groups, Parrhesia.Storage.Groups)
|
||||
|
||||
|
||||
14
lib/parrhesia/storage/acl.ex
Normal file
14
lib/parrhesia/storage/acl.ex
Normal file
@@ -0,0 +1,14 @@
|
||||
defmodule Parrhesia.Storage.ACL do
|
||||
@moduledoc """
|
||||
Storage callbacks for persisted ACL rules.
|
||||
"""
|
||||
|
||||
@type context :: map()
|
||||
@type rule :: map()
|
||||
@type opts :: keyword()
|
||||
@type reason :: term()
|
||||
|
||||
@callback put_rule(context(), rule()) :: {:ok, rule()} | {:error, reason()}
|
||||
@callback delete_rule(context(), map()) :: :ok | {:error, reason()}
|
||||
@callback list_rules(context(), opts()) :: {:ok, [rule()]} | {:error, reason()}
|
||||
end
|
||||
157
lib/parrhesia/storage/adapters/memory/acl.ex
Normal file
157
lib/parrhesia/storage/adapters/memory/acl.ex
Normal file
@@ -0,0 +1,157 @@
|
||||
defmodule Parrhesia.Storage.Adapters.Memory.ACL do
|
||||
@moduledoc """
|
||||
In-memory prototype adapter for `Parrhesia.Storage.ACL`.
|
||||
"""
|
||||
|
||||
alias Parrhesia.Storage.Adapters.Memory.Store
|
||||
|
||||
@behaviour Parrhesia.Storage.ACL
|
||||
|
||||
@impl true
|
||||
def put_rule(_context, rule) when is_map(rule) do
|
||||
with {:ok, normalized_rule} <- normalize_rule(rule) do
|
||||
Store.get_and_update(fn state -> put_rule_in_state(state, normalized_rule) end)
|
||||
end
|
||||
end
|
||||
|
||||
def put_rule(_context, _rule), do: {:error, :invalid_acl_rule}
|
||||
|
||||
@impl true
|
||||
def delete_rule(_context, selector) when is_map(selector) do
|
||||
case normalize_delete_selector(selector) do
|
||||
{:ok, {:id, id}} ->
|
||||
Store.update(fn state ->
|
||||
%{state | acl_rules: Enum.reject(state.acl_rules, &(&1.id == id))}
|
||||
end)
|
||||
|
||||
:ok
|
||||
|
||||
{:ok, {:exact, rule}} ->
|
||||
Store.update(fn state ->
|
||||
%{state | acl_rules: Enum.reject(state.acl_rules, &same_rule?(&1, rule))}
|
||||
end)
|
||||
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
def delete_rule(_context, _selector), do: {:error, :invalid_acl_rule}
|
||||
|
||||
@impl true
|
||||
def list_rules(_context, opts) when is_list(opts) do
|
||||
rules =
|
||||
Store.get(fn state -> Enum.reverse(state.acl_rules) end)
|
||||
|> Enum.filter(fn rule ->
|
||||
matches_principal_type?(rule, Keyword.get(opts, :principal_type)) and
|
||||
matches_principal?(rule, Keyword.get(opts, :principal)) and
|
||||
matches_capability?(rule, Keyword.get(opts, :capability))
|
||||
end)
|
||||
|
||||
{:ok, rules}
|
||||
end
|
||||
|
||||
def list_rules(_context, _opts), do: {:error, :invalid_opts}
|
||||
|
||||
defp put_rule_in_state(state, normalized_rule) do
|
||||
case Enum.find(state.acl_rules, &same_rule?(&1, normalized_rule)) do
|
||||
nil ->
|
||||
next_id = state.next_acl_rule_id
|
||||
persisted_rule = Map.put(normalized_rule, :id, next_id)
|
||||
|
||||
{{:ok, persisted_rule},
|
||||
%{
|
||||
state
|
||||
| acl_rules: [persisted_rule | state.acl_rules],
|
||||
next_acl_rule_id: next_id + 1
|
||||
}}
|
||||
|
||||
existing_rule ->
|
||||
{{:ok, existing_rule}, state}
|
||||
end
|
||||
end
|
||||
|
||||
defp matches_principal_type?(_rule, nil), do: true
|
||||
defp matches_principal_type?(rule, principal_type), do: rule.principal_type == principal_type
|
||||
|
||||
defp matches_principal?(_rule, nil), do: true
|
||||
defp matches_principal?(rule, principal), do: rule.principal == principal
|
||||
|
||||
defp matches_capability?(_rule, nil), do: true
|
||||
defp matches_capability?(rule, capability), do: rule.capability == capability
|
||||
|
||||
defp same_rule?(left, right) do
|
||||
left.principal_type == right.principal_type and
|
||||
left.principal == right.principal and
|
||||
left.capability == right.capability and
|
||||
left.match == right.match
|
||||
end
|
||||
|
||||
defp normalize_delete_selector(%{"id" => id}), do: normalize_delete_selector(%{id: id})
|
||||
|
||||
defp normalize_delete_selector(%{id: id}) when is_integer(id) and id > 0,
|
||||
do: {:ok, {:id, id}}
|
||||
|
||||
defp normalize_delete_selector(selector) do
|
||||
case normalize_rule(selector) do
|
||||
{:ok, rule} -> {:ok, {:exact, rule}}
|
||||
{:error, reason} -> {:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp normalize_rule(rule) when is_map(rule) do
|
||||
with {:ok, principal_type} <- normalize_principal_type(fetch(rule, :principal_type)),
|
||||
{:ok, principal} <- normalize_principal(fetch(rule, :principal)),
|
||||
{:ok, capability} <- normalize_capability(fetch(rule, :capability)),
|
||||
{:ok, match} <- normalize_match(fetch(rule, :match)) do
|
||||
{:ok,
|
||||
%{
|
||||
principal_type: principal_type,
|
||||
principal: principal,
|
||||
capability: capability,
|
||||
match: match
|
||||
}}
|
||||
end
|
||||
end
|
||||
|
||||
defp normalize_rule(_rule), do: {:error, :invalid_acl_rule}
|
||||
|
||||
defp normalize_principal_type(:pubkey), do: {:ok, :pubkey}
|
||||
defp normalize_principal_type("pubkey"), do: {:ok, :pubkey}
|
||||
defp normalize_principal_type(_value), do: {:error, :invalid_acl_principal_type}
|
||||
|
||||
defp normalize_principal(value) when is_binary(value) and byte_size(value) == 64,
|
||||
do: {:ok, String.downcase(value)}
|
||||
|
||||
defp normalize_principal(_value), do: {:error, :invalid_acl_principal}
|
||||
|
||||
defp normalize_capability(:sync_read), do: {:ok, :sync_read}
|
||||
defp normalize_capability(:sync_write), do: {:ok, :sync_write}
|
||||
defp normalize_capability("sync_read"), do: {:ok, :sync_read}
|
||||
defp normalize_capability("sync_write"), do: {:ok, :sync_write}
|
||||
defp normalize_capability(_value), do: {:error, :invalid_acl_capability}
|
||||
|
||||
defp normalize_match(match) when is_map(match) do
|
||||
normalized_match =
|
||||
Enum.reduce(match, %{}, fn
|
||||
{key, values}, acc when is_binary(key) ->
|
||||
Map.put(acc, key, values)
|
||||
|
||||
{key, values}, acc when is_atom(key) ->
|
||||
Map.put(acc, Atom.to_string(key), values)
|
||||
|
||||
_entry, acc ->
|
||||
acc
|
||||
end)
|
||||
|
||||
{:ok, normalized_match}
|
||||
end
|
||||
|
||||
defp normalize_match(_match), do: {:error, :invalid_acl_match}
|
||||
|
||||
defp fetch(map, key) do
|
||||
Map.get(map, key) || Map.get(map, Atom.to_string(key))
|
||||
end
|
||||
end
|
||||
@@ -33,6 +33,11 @@ defmodule Parrhesia.Storage.Adapters.Memory.Moderation do
|
||||
{:ok, Store.get(fn state -> MapSet.member?(state.allowed_pubkeys, pubkey) end)}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def has_allowed_pubkeys?(_context) do
|
||||
{:ok, Store.get(fn state -> MapSet.size(state.allowed_pubkeys) > 0 end)}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def ban_event(_context, event_id), do: update_ban_set(:events, event_id, :add)
|
||||
|
||||
|
||||
@@ -10,6 +10,8 @@ defmodule Parrhesia.Storage.Adapters.Memory.Store do
|
||||
deleted: MapSet.new(),
|
||||
bans: %{pubkeys: MapSet.new(), events: MapSet.new(), ips: MapSet.new()},
|
||||
allowed_pubkeys: MapSet.new(),
|
||||
acl_rules: [],
|
||||
next_acl_rule_id: 1,
|
||||
groups: %{},
|
||||
roles: %{},
|
||||
audit_logs: []
|
||||
|
||||
273
lib/parrhesia/storage/adapters/postgres/acl.ex
Normal file
273
lib/parrhesia/storage/adapters/postgres/acl.ex
Normal file
@@ -0,0 +1,273 @@
|
||||
defmodule Parrhesia.Storage.Adapters.Postgres.ACL do
|
||||
@moduledoc """
|
||||
PostgreSQL-backed implementation for `Parrhesia.Storage.ACL`.
|
||||
"""
|
||||
|
||||
import Ecto.Query
|
||||
|
||||
alias Parrhesia.Repo
|
||||
|
||||
@behaviour Parrhesia.Storage.ACL
|
||||
|
||||
@impl true
|
||||
def put_rule(_context, rule) when is_map(rule) do
|
||||
with {:ok, normalized_rule} <- normalize_rule(rule) do
|
||||
normalized_rule
|
||||
|> find_matching_rule()
|
||||
|> maybe_insert_rule(normalized_rule)
|
||||
end
|
||||
end
|
||||
|
||||
def put_rule(_context, _rule), do: {:error, :invalid_acl_rule}
|
||||
|
||||
defp maybe_insert_rule(nil, normalized_rule), do: insert_rule(normalized_rule)
|
||||
defp maybe_insert_rule(existing_rule, _normalized_rule), do: {:ok, existing_rule}
|
||||
|
||||
@impl true
|
||||
def delete_rule(_context, selector) when is_map(selector) do
|
||||
case normalize_delete_selector(selector) do
|
||||
{:ok, {:id, id}} ->
|
||||
query = from(rule in "acl_rules", where: rule.id == ^id)
|
||||
{_deleted, _result} = Repo.delete_all(query)
|
||||
:ok
|
||||
|
||||
{:ok, {:exact, rule}} ->
|
||||
query =
|
||||
from(stored_rule in "acl_rules",
|
||||
where:
|
||||
stored_rule.principal_type == ^rule.principal_type and
|
||||
stored_rule.principal == ^rule.principal and
|
||||
stored_rule.capability == ^rule.capability and
|
||||
stored_rule.match == ^rule.match
|
||||
)
|
||||
|
||||
{_deleted, _result} = Repo.delete_all(query)
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
def delete_rule(_context, _selector), do: {:error, :invalid_acl_rule}
|
||||
|
||||
@impl true
|
||||
def list_rules(_context, opts) when is_list(opts) do
|
||||
query =
|
||||
from(rule in "acl_rules",
|
||||
order_by: [
|
||||
asc: rule.principal_type,
|
||||
asc: rule.principal,
|
||||
asc: rule.capability,
|
||||
asc: rule.id
|
||||
],
|
||||
select: %{
|
||||
id: rule.id,
|
||||
principal_type: rule.principal_type,
|
||||
principal: rule.principal,
|
||||
capability: rule.capability,
|
||||
match: rule.match,
|
||||
inserted_at: rule.inserted_at
|
||||
}
|
||||
)
|
||||
|> maybe_filter_principal_type(Keyword.get(opts, :principal_type))
|
||||
|> maybe_filter_principal(Keyword.get(opts, :principal))
|
||||
|> maybe_filter_capability(Keyword.get(opts, :capability))
|
||||
|
||||
{:ok, Enum.map(Repo.all(query), &normalize_persisted_rule/1)}
|
||||
end
|
||||
|
||||
def list_rules(_context, _opts), do: {:error, :invalid_opts}
|
||||
|
||||
defp maybe_filter_principal_type(query, nil), do: query
|
||||
|
||||
defp maybe_filter_principal_type(query, principal_type) when is_atom(principal_type) do
|
||||
maybe_filter_principal_type(query, Atom.to_string(principal_type))
|
||||
end
|
||||
|
||||
defp maybe_filter_principal_type(query, principal_type) when is_binary(principal_type) do
|
||||
where(query, [rule], rule.principal_type == ^principal_type)
|
||||
end
|
||||
|
||||
defp maybe_filter_principal_type(query, _principal_type), do: query
|
||||
|
||||
defp maybe_filter_principal(query, nil), do: query
|
||||
|
||||
defp maybe_filter_principal(query, principal) when is_binary(principal) do
|
||||
case decode_hex_or_binary(principal, 32, :invalid_acl_principal) do
|
||||
{:ok, decoded_principal} -> where(query, [rule], rule.principal == ^decoded_principal)
|
||||
{:error, _reason} -> where(query, [rule], false)
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_filter_principal(query, _principal), do: query
|
||||
|
||||
defp maybe_filter_capability(query, nil), do: query
|
||||
|
||||
defp maybe_filter_capability(query, capability) when is_atom(capability) do
|
||||
maybe_filter_capability(query, Atom.to_string(capability))
|
||||
end
|
||||
|
||||
defp maybe_filter_capability(query, capability) when is_binary(capability) do
|
||||
where(query, [rule], rule.capability == ^capability)
|
||||
end
|
||||
|
||||
defp maybe_filter_capability(query, _capability), do: query
|
||||
|
||||
defp find_matching_rule(normalized_rule) do
|
||||
query =
|
||||
from(stored_rule in "acl_rules",
|
||||
where:
|
||||
stored_rule.principal_type == ^normalized_rule.principal_type and
|
||||
stored_rule.principal == ^normalized_rule.principal and
|
||||
stored_rule.capability == ^normalized_rule.capability and
|
||||
stored_rule.match == ^normalized_rule.match,
|
||||
limit: 1,
|
||||
select: %{
|
||||
id: stored_rule.id,
|
||||
principal_type: stored_rule.principal_type,
|
||||
principal: stored_rule.principal,
|
||||
capability: stored_rule.capability,
|
||||
match: stored_rule.match,
|
||||
inserted_at: stored_rule.inserted_at
|
||||
}
|
||||
)
|
||||
|
||||
case Repo.one(query) do
|
||||
nil -> nil
|
||||
stored_rule -> normalize_persisted_rule(stored_rule)
|
||||
end
|
||||
end
|
||||
|
||||
defp insert_rule(normalized_rule) do
|
||||
now = DateTime.utc_now() |> DateTime.truncate(:microsecond)
|
||||
|
||||
row = %{
|
||||
principal_type: normalized_rule.principal_type,
|
||||
principal: normalized_rule.principal,
|
||||
capability: normalized_rule.capability,
|
||||
match: normalized_rule.match,
|
||||
inserted_at: now
|
||||
}
|
||||
|
||||
case Repo.insert_all("acl_rules", [row], returning: [:id, :inserted_at]) do
|
||||
{1, [inserted_row]} ->
|
||||
{:ok, normalize_persisted_rule(Map.merge(row, Map.new(inserted_row)))}
|
||||
|
||||
_other ->
|
||||
{:error, :acl_rule_insert_failed}
|
||||
end
|
||||
end
|
||||
|
||||
defp normalize_persisted_rule(rule) do
|
||||
%{
|
||||
id: rule.id,
|
||||
principal_type: normalize_principal_type(rule.principal_type),
|
||||
principal: Base.encode16(rule.principal, case: :lower),
|
||||
capability: normalize_capability(rule.capability),
|
||||
match: normalize_match(rule.match),
|
||||
inserted_at: rule.inserted_at
|
||||
}
|
||||
end
|
||||
|
||||
defp normalize_delete_selector(%{"id" => id}), do: normalize_delete_selector(%{id: id})
|
||||
|
||||
defp normalize_delete_selector(%{id: id}) when is_integer(id) and id > 0,
|
||||
do: {:ok, {:id, id}}
|
||||
|
||||
defp normalize_delete_selector(selector) do
|
||||
case normalize_rule(selector) do
|
||||
{:ok, normalized_rule} -> {:ok, {:exact, normalized_rule}}
|
||||
{:error, reason} -> {:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp normalize_rule(rule) when is_map(rule) do
|
||||
with {:ok, principal_type} <- normalize_principal_type_value(fetch(rule, :principal_type)),
|
||||
{:ok, principal} <-
|
||||
decode_hex_or_binary(fetch(rule, :principal), 32, :invalid_acl_principal),
|
||||
{:ok, capability} <- normalize_capability_value(fetch(rule, :capability)),
|
||||
{:ok, match} <- normalize_match_value(fetch(rule, :match)) do
|
||||
{:ok,
|
||||
%{
|
||||
principal_type: principal_type,
|
||||
principal: principal,
|
||||
capability: capability,
|
||||
match: match
|
||||
}}
|
||||
end
|
||||
end
|
||||
|
||||
defp normalize_rule(_rule), do: {:error, :invalid_acl_rule}
|
||||
|
||||
defp normalize_principal_type("pubkey"), do: :pubkey
|
||||
defp normalize_principal_type(principal_type), do: principal_type
|
||||
|
||||
defp normalize_capability("sync_read"), do: :sync_read
|
||||
defp normalize_capability("sync_write"), do: :sync_write
|
||||
defp normalize_capability(capability), do: capability
|
||||
|
||||
defp normalize_principal_type_value(:pubkey), do: {:ok, "pubkey"}
|
||||
defp normalize_principal_type_value("pubkey"), do: {:ok, "pubkey"}
|
||||
defp normalize_principal_type_value(_principal_type), do: {:error, :invalid_acl_principal_type}
|
||||
|
||||
defp normalize_capability_value(:sync_read), do: {:ok, "sync_read"}
|
||||
defp normalize_capability_value(:sync_write), do: {:ok, "sync_write"}
|
||||
defp normalize_capability_value("sync_read"), do: {:ok, "sync_read"}
|
||||
defp normalize_capability_value("sync_write"), do: {:ok, "sync_write"}
|
||||
defp normalize_capability_value(_capability), do: {:error, :invalid_acl_capability}
|
||||
|
||||
defp normalize_match_value(match) when is_map(match) do
|
||||
normalized_match =
|
||||
Enum.reduce(match, %{}, fn
|
||||
{key, values}, acc when is_binary(key) ->
|
||||
Map.put(acc, key, values)
|
||||
|
||||
{key, values}, acc when is_atom(key) ->
|
||||
Map.put(acc, Atom.to_string(key), values)
|
||||
|
||||
_entry, acc ->
|
||||
acc
|
||||
end)
|
||||
|
||||
{:ok, normalize_match(normalized_match)}
|
||||
end
|
||||
|
||||
defp normalize_match_value(_match), do: {:error, :invalid_acl_match}
|
||||
|
||||
defp normalize_match(match) when is_map(match) do
|
||||
Enum.reduce(match, %{}, fn
|
||||
{key, values}, acc when is_binary(key) and is_list(values) ->
|
||||
Map.put(acc, key, Enum.uniq(values))
|
||||
|
||||
{key, value}, acc when is_binary(key) ->
|
||||
Map.put(acc, key, value)
|
||||
|
||||
_entry, acc ->
|
||||
acc
|
||||
end)
|
||||
end
|
||||
|
||||
defp normalize_match(_match), do: %{}
|
||||
|
||||
defp fetch(map, key) do
|
||||
Map.get(map, key) || Map.get(map, Atom.to_string(key))
|
||||
end
|
||||
|
||||
defp decode_hex_or_binary(value, expected_bytes, _reason)
|
||||
when is_binary(value) and byte_size(value) == expected_bytes,
|
||||
do: {:ok, value}
|
||||
|
||||
defp decode_hex_or_binary(value, expected_bytes, reason) when is_binary(value) do
|
||||
if byte_size(value) == expected_bytes * 2 do
|
||||
case Base.decode16(value, case: :mixed) do
|
||||
{:ok, decoded} -> {:ok, decoded}
|
||||
:error -> {:error, reason}
|
||||
end
|
||||
else
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp decode_hex_or_binary(_value, _expected_bytes, reason), do: {:error, reason}
|
||||
end
|
||||
@@ -20,6 +20,7 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Admin do
|
||||
case method_name do
|
||||
"ping" -> {:ok, %{"status" => "ok"}}
|
||||
"stats" -> {:ok, relay_stats()}
|
||||
"supportedmethods" -> {:ok, %{"methods" => supported_methods()}}
|
||||
"list_audit_logs" -> list_audit_logs(%{}, audit_list_opts(params))
|
||||
_other -> execute_moderation_method(moderation, method_name, params)
|
||||
end
|
||||
@@ -84,15 +85,36 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Admin do
|
||||
defp relay_stats do
|
||||
events_count = Repo.aggregate("events", :count, :id)
|
||||
banned_pubkeys = Repo.aggregate("banned_pubkeys", :count, :pubkey)
|
||||
allowed_pubkeys = Repo.aggregate("allowed_pubkeys", :count, :pubkey)
|
||||
blocked_ips = Repo.aggregate("blocked_ips", :count, :ip)
|
||||
acl_rules = Repo.aggregate("acl_rules", :count, :id)
|
||||
|
||||
%{
|
||||
"events" => events_count,
|
||||
"banned_pubkeys" => banned_pubkeys,
|
||||
"allowed_pubkeys" => allowed_pubkeys,
|
||||
"acl_rules" => acl_rules,
|
||||
"blocked_ips" => blocked_ips
|
||||
}
|
||||
end
|
||||
|
||||
defp supported_methods do
|
||||
[
|
||||
"allow_pubkey",
|
||||
"ban_event",
|
||||
"ban_pubkey",
|
||||
"block_ip",
|
||||
"disallow_pubkey",
|
||||
"list_audit_logs",
|
||||
"ping",
|
||||
"stats",
|
||||
"supportedmethods",
|
||||
"unban_event",
|
||||
"unban_pubkey",
|
||||
"unblock_ip"
|
||||
]
|
||||
end
|
||||
|
||||
defp execute_moderation_method(moderation, "ban_pubkey", params),
|
||||
do: execute_pubkey_method(fn ctx, value -> moderation.ban_pubkey(ctx, value) end, params)
|
||||
|
||||
|
||||
@@ -67,6 +67,11 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Moderation do
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def has_allowed_pubkeys?(_context) do
|
||||
{:ok, scope_populated?(:allowed_pubkeys)}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def ban_event(_context, event_id) do
|
||||
with {:ok, normalized_event_id} <- normalize_hex_or_binary(event_id, 32, :invalid_event_id),
|
||||
@@ -163,6 +168,24 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Moderation do
|
||||
end
|
||||
end
|
||||
|
||||
defp scope_populated?(scope) do
|
||||
{table, field} = cache_scope_source!(scope)
|
||||
|
||||
if moderation_cache_enabled?() do
|
||||
case cache_table_ref() do
|
||||
:undefined ->
|
||||
scope_populated_db?(table, field)
|
||||
|
||||
cache_table ->
|
||||
ensure_cache_scope_loaded(scope, cache_table)
|
||||
|
||||
:ets.select_count(cache_table, [{{{:member, scope, :_}, true}, [], [true]}]) > 0
|
||||
end
|
||||
else
|
||||
scope_populated_db?(table, field)
|
||||
end
|
||||
end
|
||||
|
||||
defp ensure_cache_scope_loaded(scope, table) do
|
||||
loaded_key = cache_loaded_key(scope)
|
||||
|
||||
@@ -246,6 +269,16 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Moderation do
|
||||
Repo.one(query) == 1
|
||||
end
|
||||
|
||||
defp scope_populated_db?(table, field) do
|
||||
query =
|
||||
from(record in table,
|
||||
select: field(record, ^field),
|
||||
limit: 1
|
||||
)
|
||||
|
||||
not is_nil(Repo.one(query))
|
||||
end
|
||||
|
||||
defp normalize_hex_or_binary(value, expected_bytes, _reason)
|
||||
when is_binary(value) and byte_size(value) == expected_bytes,
|
||||
do: {:ok, value}
|
||||
|
||||
@@ -16,6 +16,7 @@ defmodule Parrhesia.Storage.Moderation do
|
||||
@callback allow_pubkey(context(), pubkey()) :: :ok | {:error, reason()}
|
||||
@callback disallow_pubkey(context(), pubkey()) :: :ok | {:error, reason()}
|
||||
@callback pubkey_allowed?(context(), pubkey()) :: {:ok, boolean()} | {:error, reason()}
|
||||
@callback has_allowed_pubkeys?(context()) :: {:ok, boolean()} | {:error, reason()}
|
||||
|
||||
@callback ban_event(context(), event_id()) :: :ok | {:error, reason()}
|
||||
@callback unban_event(context(), event_id()) :: :ok | {:error, reason()}
|
||||
|
||||
@@ -21,6 +21,9 @@ defmodule Parrhesia.TestSupport.PermissiveModeration do
|
||||
@impl true
|
||||
def pubkey_allowed?(_context, _pubkey), do: {:ok, true}
|
||||
|
||||
@impl true
|
||||
def has_allowed_pubkeys?(_context), do: {:ok, false}
|
||||
|
||||
@impl true
|
||||
def ban_event(_context, _event_id), do: :ok
|
||||
|
||||
|
||||
@@ -5,10 +5,12 @@ defmodule Parrhesia.Web.Connection do
|
||||
|
||||
@behaviour WebSock
|
||||
|
||||
alias Parrhesia.API.RequestContext
|
||||
alias Parrhesia.Auth.Challenges
|
||||
alias Parrhesia.Fanout.MultiNode
|
||||
alias Parrhesia.Groups.Flow
|
||||
alias Parrhesia.Negentropy.Sessions
|
||||
alias Parrhesia.Policy.ConnectionPolicy
|
||||
alias Parrhesia.Policy.EventPolicy
|
||||
alias Parrhesia.Protocol
|
||||
alias Parrhesia.Protocol.Filter
|
||||
@@ -49,6 +51,7 @@ defmodule Parrhesia.Web.Connection do
|
||||
auth_challenges: Challenges,
|
||||
auth_challenge: nil,
|
||||
relay_url: nil,
|
||||
remote_ip: nil,
|
||||
negentropy_sessions: Sessions,
|
||||
outbound_queue: :queue.new(),
|
||||
outbound_queue_size: 0,
|
||||
@@ -79,6 +82,7 @@ defmodule Parrhesia.Web.Connection do
|
||||
auth_challenges: GenServer.server() | nil,
|
||||
auth_challenge: String.t() | nil,
|
||||
relay_url: String.t() | nil,
|
||||
remote_ip: String.t() | nil,
|
||||
negentropy_sessions: GenServer.server() | nil,
|
||||
outbound_queue: :queue.queue({String.t(), map()}),
|
||||
outbound_queue_size: non_neg_integer(),
|
||||
@@ -105,6 +109,7 @@ defmodule Parrhesia.Web.Connection do
|
||||
auth_challenges: auth_challenges,
|
||||
auth_challenge: maybe_issue_auth_challenge(auth_challenges),
|
||||
relay_url: relay_url(opts),
|
||||
remote_ip: remote_ip(opts),
|
||||
negentropy_sessions: negentropy_sessions(opts),
|
||||
max_outbound_queue: max_outbound_queue(opts),
|
||||
outbound_overflow_strategy: outbound_overflow_strategy(opts),
|
||||
@@ -230,7 +235,12 @@ defmodule Parrhesia.Web.Connection do
|
||||
result =
|
||||
with :ok <- validate_event_payload_size(event, next_state.max_event_bytes),
|
||||
:ok <- Protocol.validate_event(event),
|
||||
:ok <- EventPolicy.authorize_write(event, next_state.authenticated_pubkeys),
|
||||
:ok <-
|
||||
EventPolicy.authorize_write(
|
||||
event,
|
||||
next_state.authenticated_pubkeys,
|
||||
request_context(next_state)
|
||||
),
|
||||
:ok <- maybe_process_group_event(event),
|
||||
{:ok, _result, message} <- persist_event(event) do
|
||||
{:ok, message}
|
||||
@@ -286,7 +296,12 @@ defmodule Parrhesia.Web.Connection do
|
||||
started_at = System.monotonic_time()
|
||||
|
||||
with :ok <- Filter.validate_filters(filters),
|
||||
:ok <- EventPolicy.authorize_read(filters, state.authenticated_pubkeys),
|
||||
:ok <-
|
||||
EventPolicy.authorize_read(
|
||||
filters,
|
||||
state.authenticated_pubkeys,
|
||||
request_context(state, subscription_id)
|
||||
),
|
||||
{:ok, next_state} <- upsert_subscription(state, subscription_id, filters),
|
||||
:ok <- maybe_upsert_index_subscription(next_state, subscription_id, filters),
|
||||
{:ok, events} <- query_initial_events(filters, state.authenticated_pubkeys) do
|
||||
@@ -306,9 +321,19 @@ defmodule Parrhesia.Web.Connection do
|
||||
{:error, :auth_required} ->
|
||||
restricted_close(state, subscription_id, EventPolicy.error_message(:auth_required))
|
||||
|
||||
{:error, :pubkey_not_allowed} ->
|
||||
restricted_close(state, subscription_id, EventPolicy.error_message(:pubkey_not_allowed))
|
||||
|
||||
{:error, :restricted_giftwrap} ->
|
||||
restricted_close(state, subscription_id, EventPolicy.error_message(:restricted_giftwrap))
|
||||
|
||||
{:error, :sync_read_not_allowed} ->
|
||||
restricted_close(
|
||||
state,
|
||||
subscription_id,
|
||||
EventPolicy.error_message(:sync_read_not_allowed)
|
||||
)
|
||||
|
||||
{:error, :marmot_group_h_tag_required} ->
|
||||
restricted_close(
|
||||
state,
|
||||
@@ -374,7 +399,12 @@ defmodule Parrhesia.Web.Connection do
|
||||
started_at = System.monotonic_time()
|
||||
|
||||
with :ok <- Filter.validate_filters(filters),
|
||||
:ok <- EventPolicy.authorize_read(filters, state.authenticated_pubkeys),
|
||||
:ok <-
|
||||
EventPolicy.authorize_read(
|
||||
filters,
|
||||
state.authenticated_pubkeys,
|
||||
request_context(state, subscription_id)
|
||||
),
|
||||
{:ok, count} <- count_events(filters, state.authenticated_pubkeys),
|
||||
{:ok, payload} <- build_count_payload(filters, count, options) do
|
||||
Telemetry.emit(
|
||||
@@ -389,6 +419,13 @@ defmodule Parrhesia.Web.Connection do
|
||||
{:error, :auth_required} ->
|
||||
restricted_count_notice(state, subscription_id, EventPolicy.error_message(:auth_required))
|
||||
|
||||
{:error, :pubkey_not_allowed} ->
|
||||
restricted_count_notice(
|
||||
state,
|
||||
subscription_id,
|
||||
EventPolicy.error_message(:pubkey_not_allowed)
|
||||
)
|
||||
|
||||
{:error, :restricted_giftwrap} ->
|
||||
restricted_count_notice(
|
||||
state,
|
||||
@@ -396,6 +433,13 @@ defmodule Parrhesia.Web.Connection do
|
||||
EventPolicy.error_message(:restricted_giftwrap)
|
||||
)
|
||||
|
||||
{:error, :sync_read_not_allowed} ->
|
||||
restricted_count_notice(
|
||||
state,
|
||||
subscription_id,
|
||||
EventPolicy.error_message(:sync_read_not_allowed)
|
||||
)
|
||||
|
||||
{:error, :marmot_group_h_tag_required} ->
|
||||
restricted_count_notice(
|
||||
state,
|
||||
@@ -428,7 +472,8 @@ defmodule Parrhesia.Web.Connection do
|
||||
|
||||
with :ok <- Protocol.validate_event(auth_event),
|
||||
:ok <- validate_auth_event(state, auth_event),
|
||||
:ok <- validate_auth_challenge(state, auth_event) do
|
||||
:ok <- validate_auth_challenge(state, auth_event),
|
||||
:ok <- authorize_authenticated_pubkey(auth_event) do
|
||||
pubkey = Map.get(auth_event, "pubkey")
|
||||
|
||||
next_state =
|
||||
@@ -449,7 +494,12 @@ defmodule Parrhesia.Web.Connection do
|
||||
|
||||
defp handle_neg_open(%__MODULE__{} = state, subscription_id, filter, message) do
|
||||
with :ok <- Filter.validate_filters([filter]),
|
||||
:ok <- EventPolicy.authorize_read([filter], state.authenticated_pubkeys),
|
||||
:ok <-
|
||||
EventPolicy.authorize_read(
|
||||
[filter],
|
||||
state.authenticated_pubkeys,
|
||||
request_context(state, subscription_id)
|
||||
),
|
||||
{:ok, response_message} <-
|
||||
maybe_open_negentropy(state, subscription_id, filter, message) do
|
||||
response =
|
||||
@@ -545,7 +595,9 @@ defmodule Parrhesia.Web.Connection do
|
||||
defp error_message_for_ingest_failure(reason)
|
||||
when reason in [
|
||||
:auth_required,
|
||||
:pubkey_not_allowed,
|
||||
:restricted_giftwrap,
|
||||
:sync_write_not_allowed,
|
||||
:protected_event_requires_auth,
|
||||
:protected_event_pubkey_mismatch,
|
||||
:pow_below_minimum,
|
||||
@@ -702,7 +754,9 @@ defmodule Parrhesia.Web.Connection do
|
||||
:invalid_search,
|
||||
:invalid_tag_filter,
|
||||
:auth_required,
|
||||
:pubkey_not_allowed,
|
||||
:restricted_giftwrap,
|
||||
:sync_read_not_allowed,
|
||||
:marmot_group_h_tag_required,
|
||||
:marmot_group_h_values_exceeded,
|
||||
:marmot_group_filter_window_too_wide
|
||||
@@ -829,6 +883,7 @@ defmodule Parrhesia.Web.Connection do
|
||||
defp auth_error_message(:auth_event_too_old), do: "invalid: AUTH event is too old"
|
||||
defp auth_error_message(:challenge_mismatch), do: "invalid: AUTH challenge mismatch"
|
||||
defp auth_error_message(:missing_challenge), do: "invalid: AUTH challenge unavailable"
|
||||
defp auth_error_message(:pubkey_not_allowed), do: EventPolicy.error_message(:pubkey_not_allowed)
|
||||
defp auth_error_message(reason) when is_binary(reason), do: reason
|
||||
defp auth_error_message(reason), do: "invalid: #{inspect(reason)}"
|
||||
|
||||
@@ -1422,6 +1477,23 @@ defmodule Parrhesia.Web.Connection do
|
||||
|> normalize_relay_url()
|
||||
end
|
||||
|
||||
defp remote_ip(opts) when is_list(opts) do
|
||||
opts
|
||||
|> Keyword.get(:remote_ip)
|
||||
|> normalize_remote_ip()
|
||||
end
|
||||
|
||||
defp remote_ip(opts) when is_map(opts) do
|
||||
opts
|
||||
|> Map.get(:remote_ip)
|
||||
|> normalize_remote_ip()
|
||||
end
|
||||
|
||||
defp remote_ip(_opts), do: nil
|
||||
|
||||
defp normalize_remote_ip(remote_ip) when is_binary(remote_ip) and remote_ip != "", do: remote_ip
|
||||
defp normalize_remote_ip(_remote_ip), do: nil
|
||||
|
||||
defp max_frame_bytes(opts) when is_list(opts) do
|
||||
opts
|
||||
|> Keyword.get(:max_frame_bytes)
|
||||
@@ -1542,6 +1614,21 @@ defmodule Parrhesia.Web.Connection do
|
||||
|> Keyword.get(:auth_max_age_seconds, @default_auth_max_age_seconds)
|
||||
end
|
||||
|
||||
defp request_context(%__MODULE__{} = state, subscription_id \\ nil) do
|
||||
%RequestContext{
|
||||
authenticated_pubkeys: state.authenticated_pubkeys,
|
||||
caller: :websocket,
|
||||
remote_ip: state.remote_ip,
|
||||
subscription_id: subscription_id
|
||||
}
|
||||
end
|
||||
|
||||
defp authorize_authenticated_pubkey(%{"pubkey" => pubkey}) when is_binary(pubkey) do
|
||||
ConnectionPolicy.authorize_authenticated_pubkey(pubkey)
|
||||
end
|
||||
|
||||
defp authorize_authenticated_pubkey(_auth_event), do: {:error, :invalid_event}
|
||||
|
||||
defp maybe_allow_event_ingest(
|
||||
%__MODULE__{
|
||||
event_ingest_window_started_at_ms: window_started_at_ms,
|
||||
|
||||
@@ -5,8 +5,8 @@ defmodule Parrhesia.Web.Management do
|
||||
|
||||
import Plug.Conn
|
||||
|
||||
alias Parrhesia.API.Admin
|
||||
alias Parrhesia.Auth.Nip98
|
||||
alias Parrhesia.Storage
|
||||
|
||||
@spec handle(Plug.Conn.t()) :: Plug.Conn.t()
|
||||
def handle(conn) do
|
||||
@@ -59,11 +59,11 @@ defmodule Parrhesia.Web.Management do
|
||||
defp parse_payload(_payload), do: {:error, :invalid_payload}
|
||||
|
||||
defp execute_method(payload) do
|
||||
Storage.admin().execute(%{}, payload.method, payload.params)
|
||||
Admin.execute(payload.method, payload.params)
|
||||
end
|
||||
|
||||
defp append_audit_log(auth_event, payload, result) do
|
||||
Storage.admin().append_audit_log(%{}, %{
|
||||
Parrhesia.Storage.admin().append_audit_log(%{}, %{
|
||||
method: payload.method,
|
||||
actor_pubkey: Map.get(auth_event, "pubkey"),
|
||||
params: payload.params,
|
||||
|
||||
@@ -3,6 +3,7 @@ defmodule Parrhesia.Web.Router do
|
||||
|
||||
use Plug.Router
|
||||
|
||||
alias Parrhesia.Policy.ConnectionPolicy
|
||||
alias Parrhesia.Web.Management
|
||||
alias Parrhesia.Web.Metrics
|
||||
alias Parrhesia.Web.Readiness
|
||||
@@ -38,25 +39,34 @@ defmodule Parrhesia.Web.Router do
|
||||
end
|
||||
|
||||
post "/management" do
|
||||
Management.handle(conn)
|
||||
case ConnectionPolicy.authorize_remote_ip(conn.remote_ip) do
|
||||
:ok -> Management.handle(conn)
|
||||
{:error, :ip_blocked} -> send_resp(conn, 403, "forbidden")
|
||||
end
|
||||
end
|
||||
|
||||
get "/relay" do
|
||||
if accepts_nip11?(conn) do
|
||||
body = JSON.encode!(RelayInfo.document())
|
||||
case ConnectionPolicy.authorize_remote_ip(conn.remote_ip) do
|
||||
:ok ->
|
||||
if accepts_nip11?(conn) do
|
||||
body = JSON.encode!(RelayInfo.document())
|
||||
|
||||
conn
|
||||
|> put_resp_content_type("application/nostr+json")
|
||||
|> send_resp(200, body)
|
||||
else
|
||||
conn
|
||||
|> WebSockAdapter.upgrade(
|
||||
Parrhesia.Web.Connection,
|
||||
%{relay_url: relay_url(conn)},
|
||||
timeout: 60_000,
|
||||
max_frame_size: max_frame_bytes()
|
||||
)
|
||||
|> halt()
|
||||
conn
|
||||
|> put_resp_content_type("application/nostr+json")
|
||||
|> send_resp(200, body)
|
||||
else
|
||||
conn
|
||||
|> WebSockAdapter.upgrade(
|
||||
Parrhesia.Web.Connection,
|
||||
%{relay_url: relay_url(conn), remote_ip: remote_ip(conn)},
|
||||
timeout: 60_000,
|
||||
max_frame_size: max_frame_bytes()
|
||||
)
|
||||
|> halt()
|
||||
end
|
||||
|
||||
{:error, :ip_blocked} ->
|
||||
send_resp(conn, 403, "forbidden")
|
||||
end
|
||||
end
|
||||
|
||||
@@ -90,4 +100,12 @@ defmodule Parrhesia.Web.Router do
|
||||
defp max_frame_bytes do
|
||||
Parrhesia.Config.get([:limits, :max_frame_bytes], 1_048_576)
|
||||
end
|
||||
|
||||
defp remote_ip(conn) do
|
||||
case conn.remote_ip do
|
||||
{_, _, _, _} = remote_ip -> :inet.ntoa(remote_ip) |> to_string()
|
||||
{_, _, _, _, _, _, _, _} = remote_ip -> :inet.ntoa(remote_ip) |> to_string()
|
||||
_other -> nil
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user