Implement relay auth, management, lifecycle and hardening phases

This commit is contained in:
2026-03-13 21:31:34 +01:00
parent 3b66ed1996
commit d24d5885a0
57 changed files with 3383 additions and 257 deletions

View File

@@ -0,0 +1,111 @@
defmodule Parrhesia.Auth.Challenges do
@moduledoc """
Connection-scoped NIP-42 challenge storage.
"""
use GenServer
@type challenge :: String.t()
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(opts \\ []) do
name = Keyword.get(opts, :name, __MODULE__)
GenServer.start_link(__MODULE__, :ok, name: name)
end
@spec issue(pid()) :: challenge()
def issue(owner_pid), do: issue(__MODULE__, owner_pid)
@spec issue(GenServer.server(), pid()) :: challenge()
def issue(server, owner_pid) when is_pid(owner_pid) do
GenServer.call(server, {:issue, owner_pid})
end
@spec current(pid()) :: challenge() | nil
def current(owner_pid), do: current(__MODULE__, owner_pid)
@spec current(GenServer.server(), pid()) :: challenge() | nil
def current(server, owner_pid) when is_pid(owner_pid) do
GenServer.call(server, {:current, owner_pid})
end
@spec valid?(pid(), challenge()) :: boolean()
def valid?(owner_pid, challenge), do: valid?(__MODULE__, owner_pid, challenge)
@spec valid?(GenServer.server(), pid(), challenge()) :: boolean()
def valid?(server, owner_pid, challenge) when is_pid(owner_pid) and is_binary(challenge) do
GenServer.call(server, {:valid?, owner_pid, challenge})
end
@spec clear(pid()) :: :ok
def clear(owner_pid), do: clear(__MODULE__, owner_pid)
@spec clear(GenServer.server(), pid()) :: :ok
def clear(server, owner_pid) when is_pid(owner_pid) do
GenServer.call(server, {:clear, owner_pid})
end
@impl true
def init(:ok) do
{:ok, %{entries: %{}, monitors: %{}}}
end
@impl true
def handle_call({:issue, owner_pid}, _from, state) do
challenge = generate_challenge()
state =
state
|> ensure_monitor(owner_pid)
|> put_in([:entries, owner_pid], challenge)
{:reply, challenge, state}
end
def handle_call({:current, owner_pid}, _from, state) do
{:reply, Map.get(state.entries, owner_pid), state}
end
def handle_call({:valid?, owner_pid, challenge}, _from, state) do
{:reply, Map.get(state.entries, owner_pid) == challenge, state}
end
def handle_call({:clear, owner_pid}, _from, state) do
{:reply, :ok, remove_owner(state, owner_pid)}
end
@impl true
def handle_info({:DOWN, monitor_ref, :process, owner_pid, _reason}, state) do
case Map.get(state.monitors, owner_pid) do
^monitor_ref -> {:noreply, remove_owner(state, owner_pid)}
_other -> {:noreply, state}
end
end
def handle_info(_message, state), do: {:noreply, state}
defp ensure_monitor(state, owner_pid) do
case Map.has_key?(state.monitors, owner_pid) do
true -> state
false -> put_in(state, [:monitors, owner_pid], Process.monitor(owner_pid))
end
end
defp remove_owner(state, owner_pid) do
{monitor_ref, monitors} = Map.pop(state.monitors, owner_pid)
if is_reference(monitor_ref) do
Process.demonitor(monitor_ref, [:flush])
end
state
|> Map.put(:monitors, monitors)
|> update_in([:entries], &Map.delete(&1, owner_pid))
end
defp generate_challenge do
16
|> :crypto.strong_rand_bytes()
|> Base.url_encode64(padding: false)
end
end

View File

@@ -0,0 +1,88 @@
defmodule Parrhesia.Auth.Nip98 do
@moduledoc """
Minimal NIP-98 HTTP auth validation.
"""
alias Parrhesia.Protocol.EventValidator
@max_age_seconds 60
@spec validate_authorization_header(String.t() | nil, String.t(), String.t()) ::
{:ok, map()} | {:error, atom()}
def validate_authorization_header(nil, _method, _url), do: {:error, :missing_authorization}
def validate_authorization_header("Nostr " <> encoded_event, method, url)
when is_binary(method) and is_binary(url) do
with {:ok, event_json} <- decode_base64(encoded_event),
{:ok, event} <- Jason.decode(event_json),
:ok <- validate_event_shape(event),
:ok <- validate_http_binding(event, method, url) do
{:ok, event}
else
{:error, reason} -> {:error, reason}
_other -> {:error, :invalid_authorization}
end
end
def validate_authorization_header(_header, _method, _url), do: {:error, :invalid_authorization}
defp decode_base64(encoded_event) do
case Base.decode64(encoded_event) do
{:ok, event_json} -> {:ok, event_json}
:error -> Base.url_decode64(encoded_event, padding: false)
end
end
defp validate_event_shape(event) when is_map(event) do
with :ok <- EventValidator.validate(event),
:ok <- validate_kind(event),
:ok <- validate_fresh_created_at(event) do
:ok
else
:ok -> :ok
{:error, _reason} -> {:error, :invalid_event}
end
end
defp validate_event_shape(_event), do: {:error, :invalid_event}
defp validate_kind(%{"kind" => 27_235}), do: :ok
defp validate_kind(_event), do: {:error, :invalid_event}
defp validate_fresh_created_at(%{"created_at" => created_at}) when is_integer(created_at) do
now = System.system_time(:second)
if abs(now - created_at) <= @max_age_seconds do
:ok
else
{:error, :stale_event}
end
end
defp validate_fresh_created_at(_event), do: {:error, :invalid_event}
defp validate_http_binding(event, method, url) do
tags = Map.get(event, "tags", [])
method_matches? =
Enum.any?(tags, fn
["method", tagged_method | _rest] when is_binary(tagged_method) ->
String.upcase(tagged_method) == String.upcase(method)
_tag ->
false
end)
url_matches? =
Enum.any?(tags, fn
["u", tagged_url | _rest] when is_binary(tagged_url) -> tagged_url == url
_tag -> false
end)
cond do
not method_matches? -> {:error, :invalid_method_tag}
not url_matches? -> {:error, :invalid_url_tag}
true -> :ok
end
end
end

View File

@@ -11,6 +11,10 @@ defmodule Parrhesia.Auth.Supervisor do
@impl true
def init(_init_arg) do
Supervisor.init([], strategy: :one_for_one)
children = [
{Parrhesia.Auth.Challenges, name: Parrhesia.Auth.Challenges}
]
Supervisor.init(children, strategy: :one_for_one)
end
end

View File

@@ -0,0 +1,70 @@
defmodule Parrhesia.Fanout.MultiNode do
@moduledoc """
Lightweight multi-node fanout bus built on `:pg` groups.
"""
use GenServer
alias Parrhesia.Subscriptions.Index
@group __MODULE__
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(opts \\ []) do
name = Keyword.get(opts, :name, __MODULE__)
GenServer.start_link(__MODULE__, :ok, name: name)
end
@spec publish(map()) :: :ok
def publish(event), do: publish(__MODULE__, event)
@spec publish(GenServer.server(), map()) :: :ok
def publish(server, event) when is_map(event) do
GenServer.cast(server, {:publish, event})
end
@impl true
def init(:ok) do
:ok = ensure_pg_started()
:ok = :pg.join(@group, self())
{:ok, %{}}
end
@impl true
def handle_cast({:publish, event}, state) do
@group
|> :pg.get_members()
|> Enum.reject(&(&1 == self()))
|> Enum.each(fn member_pid ->
send(member_pid, {:remote_fanout_event, event})
end)
{:noreply, state}
end
@impl true
def handle_info({:remote_fanout_event, event}, state) do
Index.candidate_subscription_keys(event)
|> Enum.each(fn {owner_pid, subscription_id} ->
send(owner_pid, {:fanout_event, subscription_id, event})
end)
{:noreply, state}
end
def handle_info(_message, state), do: {:noreply, state}
defp ensure_pg_started do
case Process.whereis(:pg) do
nil ->
case :pg.start_link() do
{:ok, _pid} -> :ok
{:error, {:already_started, _pid}} -> :ok
{:error, reason} -> {:error, reason}
end
_pid ->
:ok
end
end
end

View File

@@ -0,0 +1,75 @@
defmodule Parrhesia.Groups.Flow do
@moduledoc """
Minimal group and membership flow handling for NIP-29/NIP-43 related kinds.
"""
alias Parrhesia.Storage
@membership_request_kind 8_000
@membership_approval_kind 8_001
@relay_metadata_kind 28_934
@relay_admins_kind 28_935
@relay_rules_kind 28_936
@membership_event_kind 13_534
@spec handle_event(map()) :: :ok | {:error, term()}
def handle_event(event) when is_map(event) do
case Map.get(event, "kind") do
@membership_request_kind -> upsert_membership(event, "requested")
@membership_approval_kind -> upsert_membership(event, "member")
@membership_event_kind -> upsert_membership(event, "member")
@relay_metadata_kind -> :ok
@relay_admins_kind -> :ok
@relay_rules_kind -> :ok
_other -> :ok
end
end
@spec group_related_kind?(non_neg_integer()) :: boolean()
def group_related_kind?(kind)
when kind in [
@membership_request_kind,
@membership_approval_kind,
@relay_metadata_kind,
@relay_admins_kind,
@relay_rules_kind,
@membership_event_kind
],
do: true
def group_related_kind?(_kind), do: false
defp upsert_membership(event, role) do
with {:ok, group_id} <- group_id_from_event(event),
{:ok, pubkey} <- pubkey_from_event(event) do
Storage.groups().put_membership(%{}, %{
group_id: group_id,
pubkey: pubkey,
role: role,
metadata: %{"source_kind" => Map.get(event, "kind")}
})
|> case do
{:ok, _membership} -> :ok
{:error, reason} -> {:error, reason}
end
end
end
defp group_id_from_event(event) do
group_id =
event
|> Map.get("tags", [])
|> Enum.find_value(fn
["h", value | _rest] when is_binary(value) and value != "" -> value
_tag -> nil
end)
case group_id do
nil -> {:error, :missing_group_id}
value -> {:ok, value}
end
end
defp pubkey_from_event(%{"pubkey" => pubkey}) when is_binary(pubkey), do: {:ok, pubkey}
defp pubkey_from_event(_event), do: {:error, :missing_pubkey}
end

View File

@@ -0,0 +1,122 @@
defmodule Parrhesia.Negentropy.Sessions do
@moduledoc """
In-memory NEG-* session tracking.
"""
use GenServer
@type session_key :: {pid(), String.t()}
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(opts \\ []) do
name = Keyword.get(opts, :name, __MODULE__)
GenServer.start_link(__MODULE__, :ok, name: name)
end
@spec open(GenServer.server(), pid(), String.t(), map()) :: {:ok, map()} | {:error, term()}
def open(server \\ __MODULE__, owner_pid, subscription_id, params)
when is_pid(owner_pid) and is_binary(subscription_id) and is_map(params) do
GenServer.call(server, {:open, owner_pid, subscription_id, params})
end
@spec message(GenServer.server(), pid(), String.t(), map()) :: {:ok, map()} | {:error, term()}
def message(server \\ __MODULE__, owner_pid, subscription_id, payload)
when is_pid(owner_pid) and is_binary(subscription_id) and is_map(payload) do
GenServer.call(server, {:message, owner_pid, subscription_id, payload})
end
@spec close(GenServer.server(), pid(), String.t()) :: :ok
def close(server \\ __MODULE__, owner_pid, subscription_id)
when is_pid(owner_pid) and is_binary(subscription_id) do
GenServer.call(server, {:close, owner_pid, subscription_id})
end
@impl true
def init(:ok) do
{:ok, %{sessions: %{}, monitors: %{}}}
end
@impl true
def handle_call({:open, owner_pid, subscription_id, params}, _from, state) do
key = {owner_pid, subscription_id}
session = %{
cursor: 0,
params: params,
opened_at: System.system_time(:second)
}
state =
state
|> ensure_monitor(owner_pid)
|> put_in([:sessions, key], session)
{:reply, {:ok, %{"status" => "open", "cursor" => 0}}, state}
end
def handle_call({:message, owner_pid, subscription_id, payload}, _from, state) do
key = {owner_pid, subscription_id}
case Map.get(state.sessions, key) do
nil ->
{:reply, {:error, :unknown_session}, state}
session ->
cursor = session.cursor + 1
next_session = %{session | cursor: cursor, params: Map.merge(session.params, payload)}
state = put_in(state, [:sessions, key], next_session)
{:reply, {:ok, %{"status" => "ack", "cursor" => cursor}}, state}
end
end
def handle_call({:close, owner_pid, subscription_id}, _from, state) do
key = {owner_pid, subscription_id}
state = update_in(state.sessions, &Map.delete(&1, key))
{:reply, :ok, state}
end
@impl true
def handle_info({:DOWN, monitor_ref, :process, owner_pid, _reason}, state) do
case Map.get(state.monitors, owner_pid) do
^monitor_ref ->
state =
state
|> maybe_remove_monitor(owner_pid)
|> remove_owner_sessions(owner_pid)
{:noreply, state}
_other ->
{:noreply, state}
end
end
def handle_info(_message, state), do: {:noreply, state}
defp remove_owner_sessions(state, owner_pid) do
update_in(state.sessions, fn sessions ->
sessions
|> Enum.reject(fn {{session_owner, _sub_id}, _session} -> session_owner == owner_pid end)
|> Map.new()
end)
end
defp ensure_monitor(state, owner_pid) do
case Map.has_key?(state.monitors, owner_pid) do
true -> state
false -> put_in(state, [:monitors, owner_pid], Process.monitor(owner_pid))
end
end
defp maybe_remove_monitor(state, owner_pid) do
{monitor_ref, monitors} = Map.pop(state.monitors, owner_pid)
if is_reference(monitor_ref) do
Process.demonitor(monitor_ref, [:flush])
end
Map.put(state, :monitors, monitors)
end
end

View File

@@ -0,0 +1,222 @@
defmodule Parrhesia.Policy.EventPolicy do
@moduledoc """
Write/read policy checks for relay operations.
"""
alias Parrhesia.Storage
@type policy_error ::
:auth_required
| :restricted_giftwrap
| :protected_event_requires_auth
| :protected_event_pubkey_mismatch
| :pow_below_minimum
| :pubkey_banned
| :event_banned
| :mls_disabled
@spec authorize_read([map()], MapSet.t(String.t())) :: :ok | {:error, policy_error()}
def authorize_read(filters, authenticated_pubkeys) when is_list(filters) do
auth_required? = config_bool([:policies, :auth_required_for_reads], false)
cond do
auth_required? and MapSet.size(authenticated_pubkeys) == 0 ->
{:error, :auth_required}
giftwrap_restricted?(filters, authenticated_pubkeys) ->
{:error, :restricted_giftwrap}
true ->
:ok
end
end
@spec authorize_write(map(), MapSet.t(String.t())) :: :ok | {:error, policy_error()}
def authorize_write(event, authenticated_pubkeys) when is_map(event) do
checks = [
fn -> maybe_require_auth_for_write(authenticated_pubkeys) end,
fn -> reject_if_pubkey_banned(event) end,
fn -> reject_if_event_banned(event) end,
fn -> enforce_pow(event) end,
fn -> enforce_protected_event(event, authenticated_pubkeys) end,
fn -> enforce_mls_feature_flag(event) end
]
Enum.reduce_while(checks, :ok, fn check, :ok ->
case check.() do
:ok -> {:cont, :ok}
{:error, _reason} = error -> {:halt, error}
end
end)
end
@spec error_message(policy_error()) :: String.t()
def error_message(:auth_required), do: "auth-required: authentication required"
def error_message(:restricted_giftwrap),
do: "restricted: giftwrap access requires recipient authentication"
def error_message(:protected_event_requires_auth),
do: "auth-required: protected events require authenticated pubkey"
def error_message(:protected_event_pubkey_mismatch),
do: "restricted: protected event pubkey does not match authenticated pubkey"
def error_message(:pow_below_minimum), do: "pow: minimum proof-of-work difficulty not met"
def error_message(:pubkey_banned), do: "blocked: pubkey is banned"
def error_message(:event_banned), do: "blocked: event is banned"
def error_message(:mls_disabled), do: "blocked: mls feature flag is disabled"
defp maybe_require_auth_for_write(authenticated_pubkeys) do
if config_bool([:policies, :auth_required_for_writes], false) and
MapSet.size(authenticated_pubkeys) == 0 do
{:error, :auth_required}
else
:ok
end
end
defp giftwrap_restricted?(filters, authenticated_pubkeys) do
if MapSet.size(authenticated_pubkeys) == 0 do
any_filter_targets_giftwrap?(filters)
else
not giftwrap_filters_include_authenticated_recipient?(filters, authenticated_pubkeys)
end
end
defp any_filter_targets_giftwrap?(filters) do
Enum.any?(filters, fn filter ->
case Map.get(filter, "kinds") do
kinds when is_list(kinds) -> 1059 in kinds
_other -> false
end
end)
end
defp giftwrap_filters_include_authenticated_recipient?(filters, authenticated_pubkeys) do
Enum.all?(filters, fn filter ->
if targets_giftwrap?(filter) do
recipients = Map.get(filter, "#p") || []
recipients != [] and Enum.any?(recipients, &MapSet.member?(authenticated_pubkeys, &1))
else
true
end
end)
end
defp targets_giftwrap?(filter) do
case Map.get(filter, "kinds") do
kinds when is_list(kinds) -> 1059 in kinds
_other -> false
end
end
defp reject_if_pubkey_banned(event) do
with pubkey when is_binary(pubkey) <- Map.get(event, "pubkey"),
{:ok, true} <- Storage.moderation().pubkey_banned?(%{}, pubkey) do
{:error, :pubkey_banned}
else
{:ok, false} -> :ok
_other -> :ok
end
end
defp reject_if_event_banned(event) do
with event_id when is_binary(event_id) <- Map.get(event, "id"),
{:ok, true} <- Storage.moderation().event_banned?(%{}, event_id) do
{:error, :event_banned}
else
{:ok, false} -> :ok
_other -> :ok
end
end
defp enforce_pow(event) do
min_difficulty = config_int([:policies, :min_pow_difficulty], 0)
if min_difficulty <= 0 do
:ok
else
difficulty = event_pow_difficulty(event)
if difficulty >= min_difficulty do
:ok
else
{:error, :pow_below_minimum}
end
end
end
defp event_pow_difficulty(event) do
event
|> Map.get("id", "")
|> String.downcase()
|> String.graphemes()
|> Enum.reduce_while(0, fn
"0", acc -> {:cont, acc + 4}
hex, acc -> {:halt, acc + leading_zero_bits(hex)}
end)
end
defp leading_zero_bits("1"), do: 3
defp leading_zero_bits("2"), do: 2
defp leading_zero_bits("3"), do: 2
defp leading_zero_bits("4"), do: 1
defp leading_zero_bits("5"), do: 1
defp leading_zero_bits("6"), do: 1
defp leading_zero_bits("7"), do: 1
defp leading_zero_bits("8"), do: 0
defp leading_zero_bits("9"), do: 0
defp leading_zero_bits("a"), do: 0
defp leading_zero_bits("b"), do: 0
defp leading_zero_bits("c"), do: 0
defp leading_zero_bits("d"), do: 0
defp leading_zero_bits("e"), do: 0
defp leading_zero_bits("f"), do: 0
defp leading_zero_bits(_other), do: 0
defp enforce_protected_event(event, authenticated_pubkeys) do
protected? =
event
|> Map.get("tags", [])
|> Enum.any?(fn
["-" | _rest] -> true
_tag -> false
end)
if protected? do
pubkey = Map.get(event, "pubkey")
cond do
MapSet.size(authenticated_pubkeys) == 0 -> {:error, :protected_event_requires_auth}
MapSet.member?(authenticated_pubkeys, pubkey) -> :ok
true -> {:error, :protected_event_pubkey_mismatch}
end
else
:ok
end
end
defp enforce_mls_feature_flag(event) do
if event["kind"] in [443, 445, 10_051] and not config_bool([:features, :nip_ee_mls], false) do
{:error, :mls_disabled}
else
:ok
end
end
defp config_bool([scope, key], default) do
case Application.get_env(:parrhesia, scope, []) |> Keyword.get(key, default) do
true -> true
false -> false
_other -> default
end
end
defp config_int([scope, key], default) do
case Application.get_env(:parrhesia, scope, []) |> Keyword.get(key, default) do
value when is_integer(value) -> value
_other -> default
end
end
end

View File

@@ -12,6 +12,11 @@ defmodule Parrhesia.Protocol do
{:event, event()}
| {:req, String.t(), [filter()]}
| {:close, String.t()}
| {:auth, event()}
| {:count, String.t(), [filter()], map()}
| {:neg_open, String.t(), map()}
| {:neg_msg, String.t(), map()}
| {:neg_close, String.t()}
@type relay_message ::
{:notice, String.t()}
@@ -19,6 +24,9 @@ defmodule Parrhesia.Protocol do
| {:closed, String.t(), String.t()}
| {:eose, String.t()}
| {:event, String.t(), event()}
| {:auth, String.t()}
| {:count, String.t(), map()}
| {:neg_msg, String.t(), map()}
@type decode_error ::
:invalid_json
@@ -26,6 +34,11 @@ defmodule Parrhesia.Protocol do
| :invalid_event
| :invalid_subscription_id
| :invalid_filters
| :invalid_auth
| :invalid_count
| :invalid_negentropy
@count_options_keys MapSet.new(["hll", "approximate"])
@spec decode_client(binary()) :: {:ok, client_message()} | {:error, decode_error()}
def decode_client(payload) when is_binary(payload) do
@@ -57,6 +70,9 @@ defmodule Parrhesia.Protocol do
:invalid_event -> "invalid: invalid EVENT shape"
:invalid_subscription_id -> "invalid: invalid subscription id"
:invalid_filters -> "invalid: invalid filters"
:invalid_auth -> "invalid: invalid AUTH message"
:invalid_count -> "invalid: invalid COUNT message"
:invalid_negentropy -> "invalid: invalid NEG message"
end
end
@@ -71,6 +87,73 @@ defmodule Parrhesia.Protocol do
defp decode_message(["EVENT", _event]), do: {:error, :invalid_event}
defp decode_message(["REQ", subscription_id | filters]) when is_binary(subscription_id) do
decode_req_like_message(:req, subscription_id, filters)
end
defp decode_message(["REQ", _subscription_id | _filters]),
do: {:error, :invalid_subscription_id}
defp decode_message(["COUNT", subscription_id | filters_or_options])
when is_binary(subscription_id) do
with {:ok, filters, options} <- split_count_parts(filters_or_options),
{:ok, {:req, ^subscription_id, parsed_filters}} <-
decode_req_like_message(:req, subscription_id, filters) do
{:ok, {:count, subscription_id, parsed_filters, options}}
else
_error -> {:error, :invalid_count}
end
end
defp decode_message(["COUNT", _subscription_id | _filters_or_options]),
do: {:error, :invalid_count}
defp decode_message(["CLOSE", subscription_id]) when is_binary(subscription_id) do
if valid_subscription_id?(subscription_id) do
{:ok, {:close, subscription_id}}
else
{:error, :invalid_subscription_id}
end
end
defp decode_message(["CLOSE", _subscription_id]), do: {:error, :invalid_subscription_id}
defp decode_message(["AUTH", auth_event]) when is_map(auth_event),
do: {:ok, {:auth, auth_event}}
defp decode_message(["AUTH", _invalid]), do: {:error, :invalid_auth}
defp decode_message(["NEG-OPEN", subscription_id, payload])
when is_binary(subscription_id) and is_map(payload) do
if valid_subscription_id?(subscription_id) do
{:ok, {:neg_open, subscription_id, payload}}
else
{:error, :invalid_subscription_id}
end
end
defp decode_message(["NEG-MSG", subscription_id, payload])
when is_binary(subscription_id) and is_map(payload) do
if valid_subscription_id?(subscription_id) do
{:ok, {:neg_msg, subscription_id, payload}}
else
{:error, :invalid_subscription_id}
end
end
defp decode_message(["NEG-CLOSE", subscription_id]) when is_binary(subscription_id) do
if valid_subscription_id?(subscription_id) do
{:ok, {:neg_close, subscription_id}}
else
{:error, :invalid_subscription_id}
end
end
defp decode_message([type | _rest]) when type in ["NEG-OPEN", "NEG-MSG", "NEG-CLOSE"],
do: {:error, :invalid_negentropy}
defp decode_message(_other), do: {:error, :invalid_message}
defp decode_req_like_message(_kind, subscription_id, filters) do
cond do
not valid_subscription_id?(subscription_id) ->
{:error, :invalid_subscription_id}
@@ -86,25 +169,51 @@ defmodule Parrhesia.Protocol do
end
end
defp decode_message(["REQ", _subscription_id | _filters]),
do: {:error, :invalid_subscription_id}
defp decode_message(["CLOSE", subscription_id]) when is_binary(subscription_id) do
if valid_subscription_id?(subscription_id) do
{:ok, {:close, subscription_id}}
defp split_count_parts(parts) when is_list(parts) do
if parts == [] do
{:error, :missing_filters}
else
{:error, :invalid_subscription_id}
split_count_parts_with_optional_options(parts)
end
end
defp decode_message(["CLOSE", _subscription_id]), do: {:error, :invalid_subscription_id}
defp decode_message(_other), do: {:error, :invalid_message}
defp split_count_parts(_parts), do: {:error, :invalid_parts}
defp split_count_parts_with_optional_options(parts) do
case List.last(parts) do
options when is_map(options) ->
maybe_extract_count_options(parts, options)
_other ->
{:ok, parts, %{}}
end
end
defp maybe_extract_count_options(parts, options) do
if count_options_map?(options) and length(parts) > 1 do
filters = Enum.drop(parts, -1)
{:ok, filters, options}
else
{:ok, parts, %{}}
end
end
defp count_options_map?(map) do
map
|> Map.keys()
|> Enum.all?(&MapSet.member?(@count_options_keys, &1))
end
defp relay_frame({:notice, message}), do: ["NOTICE", message]
defp relay_frame({:ok, event_id, accepted, message}), do: ["OK", event_id, accepted, message]
defp relay_frame({:closed, subscription_id, message}), do: ["CLOSED", subscription_id, message]
defp relay_frame({:eose, subscription_id}), do: ["EOSE", subscription_id]
defp relay_frame({:event, subscription_id, event}), do: ["EVENT", subscription_id, event]
defp relay_frame({:auth, challenge}), do: ["AUTH", challenge]
defp relay_frame({:count, subscription_id, payload}), do: ["COUNT", subscription_id, payload]
defp relay_frame({:neg_msg, subscription_id, payload}),
do: ["NEG-MSG", subscription_id, payload]
defp valid_subscription_id?(subscription_id) do
subscription_id != "" and String.length(subscription_id) <= 64

View File

@@ -18,9 +18,10 @@ defmodule Parrhesia.Protocol.Filter do
| :invalid_since
| :invalid_until
| :invalid_limit
| :invalid_search
| :invalid_tag_filter
@allowed_keys MapSet.new(["ids", "authors", "kinds", "since", "until", "limit"])
@allowed_keys MapSet.new(["ids", "authors", "kinds", "since", "until", "limit", "search"])
@error_messages %{
invalid_filters: "invalid: filters must be a non-empty array of objects",
@@ -34,6 +35,7 @@ defmodule Parrhesia.Protocol.Filter do
invalid_since: "invalid: since must be a non-negative integer",
invalid_until: "invalid: until must be a non-negative integer",
invalid_limit: "invalid: limit must be a positive integer",
invalid_search: "invalid: search must be a non-empty string",
invalid_tag_filter:
"invalid: tag filters must use #<single-letter> with non-empty string arrays"
}
@@ -71,7 +73,8 @@ defmodule Parrhesia.Protocol.Filter do
:ok <- validate_kinds(Map.get(filter, "kinds")),
:ok <- validate_since(Map.get(filter, "since")),
:ok <- validate_until(Map.get(filter, "until")),
:ok <- validate_limit(Map.get(filter, "limit")) do
:ok <- validate_limit(Map.get(filter, "limit")),
:ok <- validate_search(Map.get(filter, "search")) do
validate_tag_filters(filter)
end
end
@@ -89,12 +92,9 @@ defmodule Parrhesia.Protocol.Filter do
def matches_filter?(event, filter) when is_map(event) and is_map(filter) do
case validate_filter(filter) do
:ok ->
ids_match?(event, Map.get(filter, "ids")) and
authors_match?(event, Map.get(filter, "authors")) and
kinds_match?(event, Map.get(filter, "kinds")) and
since_match?(event, Map.get(filter, "since")) and
until_match?(event, Map.get(filter, "until")) and
tags_match?(event, filter)
event
|> filter_predicates(filter)
|> Enum.all?()
{:error, _reason} ->
false
@@ -170,6 +170,10 @@ defmodule Parrhesia.Protocol.Filter do
defp validate_limit(limit) when is_integer(limit) and limit > 0, do: :ok
defp validate_limit(_limit), do: {:error, :invalid_limit}
defp validate_search(nil), do: :ok
defp validate_search(search) when is_binary(search) and search != "", do: :ok
defp validate_search(_search), do: {:error, :invalid_search}
defp validate_tag_filters(filter) do
filter
|> Enum.filter(fn {key, _value} -> valid_tag_filter_key?(key) end)
@@ -188,6 +192,18 @@ defmodule Parrhesia.Protocol.Filter do
defp valid_tag_filter_values?(_values), do: false
defp filter_predicates(event, filter) do
[
ids_match?(event, Map.get(filter, "ids")),
authors_match?(event, Map.get(filter, "authors")),
kinds_match?(event, Map.get(filter, "kinds")),
since_match?(event, Map.get(filter, "since")),
until_match?(event, Map.get(filter, "until")),
search_match?(event, Map.get(filter, "search")),
tags_match?(event, filter)
]
end
defp ids_match?(_event, nil), do: true
defp ids_match?(event, ids) do
@@ -220,6 +236,13 @@ defmodule Parrhesia.Protocol.Filter do
is_integer(created_at) and created_at <= until
end
defp search_match?(_event, nil), do: true
defp search_match?(event, search) do
content = Map.get(event, "content", "")
String.contains?(String.downcase(content), String.downcase(search))
end
defp tags_match?(event, filter) do
filter
|> Enum.filter(fn {key, _value} -> valid_tag_filter_key?(key) end)

View File

@@ -1,9 +1,6 @@
defmodule Parrhesia.Repo do
@moduledoc """
PostgreSQL repository for storage adapter persistence.
Note: the repo is not yet started by the supervision tree while the
storage adapter is in staged implementation.
"""
use Ecto.Repo,

View File

@@ -0,0 +1,34 @@
defmodule Parrhesia.Storage.Adapters.Memory.Admin do
@moduledoc """
In-memory prototype adapter for `Parrhesia.Storage.Admin`.
"""
alias Parrhesia.Storage.Adapters.Memory.Store
@behaviour Parrhesia.Storage.Admin
@impl true
def execute(_context, method, _params) do
case method do
method when method in [:ping, "ping"] -> {:ok, %{"status" => "ok"}}
_other -> {:error, {:unsupported_method, normalize_method(method)}}
end
end
@impl true
def append_audit_log(_context, audit_entry) when is_map(audit_entry) do
Store.update(fn state -> update_in(state.audit_logs, &[audit_entry | &1]) end)
:ok
end
def append_audit_log(_context, _audit_entry), do: {:error, :invalid_audit_entry}
@impl true
def list_audit_logs(_context, _opts) do
{:ok, Store.get(fn state -> Enum.reverse(state.audit_logs) end)}
end
defp normalize_method(method) when is_binary(method), do: method
defp normalize_method(method) when is_atom(method), do: Atom.to_string(method)
defp normalize_method(method), do: inspect(method)
end

View File

@@ -0,0 +1,103 @@
defmodule Parrhesia.Storage.Adapters.Memory.Events do
@moduledoc """
In-memory prototype adapter for `Parrhesia.Storage.Events`.
"""
alias Parrhesia.Protocol.Filter
alias Parrhesia.Storage.Adapters.Memory.Store
@behaviour Parrhesia.Storage.Events
@impl true
def put_event(_context, event) do
event_id = Map.fetch!(event, "id")
result =
Store.get_and_update(fn state ->
if Map.has_key?(state.events, event_id) do
{{:error, :duplicate_event}, state}
else
next_state = put_in(state.events[event_id], event)
{{:ok, event}, next_state}
end
end)
result
end
@impl true
def get_event(_context, event_id) do
deleted? = Store.get(fn state -> MapSet.member?(state.deleted, event_id) end)
if deleted? do
{:ok, nil}
else
{:ok, Store.get(fn state -> Map.get(state.events, event_id) end)}
end
end
@impl true
def query(_context, filters, _opts) do
with :ok <- Filter.validate_filters(filters) do
state = Store.get(& &1)
events =
state.events
|> Map.values()
|> Enum.reject(fn event -> MapSet.member?(state.deleted, event["id"]) end)
|> Enum.filter(&Filter.matches_any?(&1, filters))
{:ok, events}
end
end
@impl true
def count(context, filters, opts) do
with {:ok, events} <- query(context, filters, opts) do
{:ok, length(events)}
end
end
@impl true
def delete_by_request(_context, event) do
delete_ids =
event
|> Map.get("tags", [])
|> Enum.flat_map(fn
["e", event_id | _rest] -> [event_id]
_tag -> []
end)
Store.update(fn state ->
Enum.reduce(delete_ids, state, fn event_id, acc ->
update_in(acc.deleted, &MapSet.put(&1, event_id))
end)
end)
{:ok, length(delete_ids)}
end
@impl true
def vanish(_context, event) do
pubkey = Map.get(event, "pubkey")
deleted_ids =
Store.get(fn state ->
state.events
|> Map.values()
|> Enum.filter(fn candidate -> candidate["pubkey"] == pubkey end)
|> Enum.map(& &1["id"])
end)
Store.update(fn state ->
Enum.reduce(deleted_ids, state, fn event_id, acc ->
update_in(acc.deleted, &MapSet.put(&1, event_id))
end)
end)
{:ok, length(deleted_ids)}
end
@impl true
def purge_expired(_opts), do: {:ok, 0}
end

View File

@@ -0,0 +1,90 @@
defmodule Parrhesia.Storage.Adapters.Memory.Groups do
@moduledoc """
In-memory prototype adapter for `Parrhesia.Storage.Groups`.
"""
alias Parrhesia.Storage.Adapters.Memory.Store
@behaviour Parrhesia.Storage.Groups
@impl true
def put_membership(_context, membership) do
group_id = fetch!(membership, :group_id)
pubkey = fetch!(membership, :pubkey)
normalized = %{
group_id: group_id,
pubkey: pubkey,
role: fetch!(membership, :role),
metadata: Map.get(membership, :metadata, %{})
}
Store.update(fn state -> put_in(state.groups[{group_id, pubkey}], normalized) end)
{:ok, normalized}
end
@impl true
def get_membership(_context, group_id, pubkey) do
{:ok, Store.get(fn state -> Map.get(state.groups, {group_id, pubkey}) end)}
end
@impl true
def delete_membership(_context, group_id, pubkey) do
Store.update(fn state -> update_in(state.groups, &Map.delete(&1, {group_id, pubkey})) end)
:ok
end
@impl true
def list_memberships(_context, group_id) do
memberships =
Store.get(fn state ->
state.groups
|> Map.values()
|> Enum.filter(fn membership -> membership.group_id == group_id end)
end)
{:ok, memberships}
end
@impl true
def put_role(_context, role) do
group_id = fetch!(role, :group_id)
pubkey = fetch!(role, :pubkey)
role_name = fetch!(role, :role)
normalized = %{
group_id: group_id,
pubkey: pubkey,
role: role_name,
metadata: Map.get(role, :metadata, %{})
}
Store.update(fn state -> put_in(state.roles[{group_id, pubkey, role_name}], normalized) end)
{:ok, normalized}
end
@impl true
def delete_role(_context, group_id, pubkey, role_name) do
Store.update(fn state ->
update_in(state.roles, &Map.delete(&1, {group_id, pubkey, role_name}))
end)
:ok
end
@impl true
def list_roles(_context, group_id, pubkey) do
roles =
Store.get(fn state ->
state.roles
|> Map.values()
|> Enum.filter(fn role -> role.group_id == group_id and role.pubkey == pubkey end)
end)
{:ok, roles}
end
defp fetch!(map, key) do
Map.get(map, key) || Map.fetch!(map, Atom.to_string(key))
end
end

View File

@@ -0,0 +1,68 @@
defmodule Parrhesia.Storage.Adapters.Memory.Moderation do
@moduledoc """
In-memory prototype adapter for `Parrhesia.Storage.Moderation`.
"""
alias Parrhesia.Storage.Adapters.Memory.Store
@behaviour Parrhesia.Storage.Moderation
@impl true
def ban_pubkey(_context, pubkey), do: update_ban_set(:pubkeys, pubkey, :add)
@impl true
def unban_pubkey(_context, pubkey), do: update_ban_set(:pubkeys, pubkey, :delete)
@impl true
def pubkey_banned?(_context, pubkey), do: {:ok, banned?(:pubkeys, pubkey)}
@impl true
def allow_pubkey(_context, pubkey) do
Store.update(fn state -> update_in(state.allowed_pubkeys, &MapSet.put(&1, pubkey)) end)
:ok
end
@impl true
def disallow_pubkey(_context, pubkey) do
Store.update(fn state -> update_in(state.allowed_pubkeys, &MapSet.delete(&1, pubkey)) end)
:ok
end
@impl true
def pubkey_allowed?(_context, pubkey) do
{:ok, Store.get(fn state -> MapSet.member?(state.allowed_pubkeys, pubkey) end)}
end
@impl true
def ban_event(_context, event_id), do: update_ban_set(:events, event_id, :add)
@impl true
def unban_event(_context, event_id), do: update_ban_set(:events, event_id, :delete)
@impl true
def event_banned?(_context, event_id), do: {:ok, banned?(:events, event_id)}
@impl true
def block_ip(_context, ip), do: update_ban_set(:ips, ip, :add)
@impl true
def unblock_ip(_context, ip), do: update_ban_set(:ips, ip, :delete)
@impl true
def ip_blocked?(_context, ip), do: {:ok, banned?(:ips, ip)}
defp banned?(key, value) do
Store.get(fn state -> MapSet.member?(state.bans[key], value) end)
end
defp update_ban_set(key, value, operation) do
Store.update(fn state ->
update_in(state.bans[key], &apply_ban_operation(&1, value, operation))
end)
:ok
end
defp apply_ban_operation(current, value, :add), do: MapSet.put(current, value)
defp apply_ban_operation(current, value, :delete), do: MapSet.delete(current, value)
end

View File

@@ -0,0 +1,48 @@
defmodule Parrhesia.Storage.Adapters.Memory.Store do
@moduledoc false
use Agent
@name __MODULE__
@initial_state %{
events: %{},
deleted: MapSet.new(),
bans: %{pubkeys: MapSet.new(), events: MapSet.new(), ips: MapSet.new()},
allowed_pubkeys: MapSet.new(),
groups: %{},
roles: %{},
audit_logs: []
}
def ensure_started do
if Process.whereis(@name) do
:ok
else
start_store()
end
end
defp start_store do
case Agent.start_link(fn -> @initial_state end, name: @name) do
{:ok, _pid} -> :ok
{:error, {:already_started, _pid}} -> :ok
{:error, reason} -> {:error, reason}
end
end
def get(fun) do
:ok = ensure_started()
Agent.get(@name, fun)
end
def update(fun) do
:ok = ensure_started()
Agent.update(@name, fun)
end
def get_and_update(fun) do
:ok = ensure_started()
Agent.get_and_update(@name, fun)
end
end

View File

@@ -13,10 +13,21 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Admin do
@max_limit 1_000
@impl true
def execute(_context, method, _params) do
{:error, {:unsupported_method, normalize_method_name(method)}}
def execute(_context, method, params) when is_map(params) do
moderation = Parrhesia.Storage.moderation()
method_name = normalize_method_name(method)
case method_name do
"ping" -> {:ok, %{"status" => "ok"}}
"stats" -> {:ok, relay_stats()}
"list_audit_logs" -> list_audit_logs(%{}, audit_list_opts(params))
_other -> execute_moderation_method(moderation, method_name, params)
end
end
def execute(_context, method, _params),
do: {:error, {:unsupported_method, normalize_method_name(method)}}
@impl true
def append_audit_log(_context, audit_entry) when is_map(audit_entry) do
with {:ok, method} <- fetch_required_method(audit_entry),
@@ -70,6 +81,91 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Admin do
def list_audit_logs(_context, _opts), do: {:error, :invalid_opts}
defp relay_stats do
events_count = Repo.aggregate("events", :count, :id)
banned_pubkeys = Repo.aggregate("banned_pubkeys", :count, :pubkey)
blocked_ips = Repo.aggregate("blocked_ips", :count, :ip)
%{
"events" => events_count,
"banned_pubkeys" => banned_pubkeys,
"blocked_ips" => blocked_ips
}
end
defp execute_moderation_method(moderation, "ban_pubkey", params),
do: execute_pubkey_method(fn ctx, value -> moderation.ban_pubkey(ctx, value) end, params)
defp execute_moderation_method(moderation, "unban_pubkey", params),
do: execute_pubkey_method(fn ctx, value -> moderation.unban_pubkey(ctx, value) end, params)
defp execute_moderation_method(moderation, "allow_pubkey", params),
do: execute_pubkey_method(fn ctx, value -> moderation.allow_pubkey(ctx, value) end, params)
defp execute_moderation_method(moderation, "disallow_pubkey", params),
do: execute_pubkey_method(fn ctx, value -> moderation.disallow_pubkey(ctx, value) end, params)
defp execute_moderation_method(moderation, "ban_event", params),
do: execute_event_method(fn ctx, value -> moderation.ban_event(ctx, value) end, params)
defp execute_moderation_method(moderation, "unban_event", params),
do: execute_event_method(fn ctx, value -> moderation.unban_event(ctx, value) end, params)
defp execute_moderation_method(moderation, "block_ip", params),
do: execute_ip_method(fn ctx, value -> moderation.block_ip(ctx, value) end, params)
defp execute_moderation_method(moderation, "unblock_ip", params),
do: execute_ip_method(fn ctx, value -> moderation.unblock_ip(ctx, value) end, params)
defp execute_moderation_method(_moderation, method_name, _params),
do: {:error, {:unsupported_method, method_name}}
defp audit_list_opts(params) do
[]
|> maybe_put_opt(:limit, Map.get(params, "limit"))
|> maybe_put_opt(:method, Map.get(params, "method"))
|> maybe_put_opt(:actor_pubkey, Map.get(params, "actor_pubkey"))
end
defp maybe_put_opt(opts, _key, nil), do: opts
defp maybe_put_opt(opts, key, value), do: Keyword.put(opts, key, value)
defp execute_pubkey_method(fun, params) do
case Map.get(params, "pubkey") do
pubkey when is_binary(pubkey) ->
with :ok <- fun.(%{}, pubkey) do
{:ok, %{"ok" => true}}
end
_other ->
{:error, :invalid_pubkey}
end
end
defp execute_event_method(fun, params) do
case Map.get(params, "event_id") do
event_id when is_binary(event_id) ->
with :ok <- fun.(%{}, event_id) do
{:ok, %{"ok" => true}}
end
_other ->
{:error, :invalid_event_id}
end
end
defp execute_ip_method(fun, params) do
case Map.get(params, "ip") do
ip when is_binary(ip) ->
with :ok <- fun.(%{}, ip) do
{:ok, %{"ok" => true}}
end
_other ->
{:error, :invalid_ip}
end
end
defp fetch_required_method(audit_entry) do
audit_entry
|> fetch_value(:method)

View File

@@ -116,7 +116,7 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Events do
filters
|> Enum.flat_map(fn filter ->
filter
|> event_id_query_for_filter(now)
|> event_id_query_for_filter(now, opts)
|> Repo.all()
end)
|> MapSet.new()
@@ -129,10 +129,50 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Events do
def count(_context, _filters, _opts), do: {:error, :invalid_opts}
@impl true
def delete_by_request(_context, _event), do: {:error, :not_implemented}
def delete_by_request(_context, event) do
with {:ok, deleter_pubkey} <- decode_hex(Map.get(event, "pubkey"), 32, :invalid_pubkey),
{:ok, delete_ids} <- extract_delete_event_ids(event) do
query =
from(stored_event in "events",
where:
stored_event.id in ^delete_ids and
stored_event.pubkey == ^deleter_pubkey and
is_nil(stored_event.deleted_at)
)
deleted_at = System.system_time(:second)
{count, _result} = Repo.update_all(query, set: [deleted_at: deleted_at])
{:ok, count}
end
end
@impl true
def vanish(_context, _event), do: {:error, :not_implemented}
def vanish(_context, event) do
with {:ok, pubkey} <- decode_hex(Map.get(event, "pubkey"), 32, :invalid_pubkey),
{:ok, created_at} <-
validate_non_negative_integer(Map.get(event, "created_at"), :invalid_created_at) do
own_events_query =
from(stored_event in "events",
where: stored_event.pubkey == ^pubkey and stored_event.created_at <= ^created_at
)
giftwrap_query =
from(stored_event in "events",
join: tag in "event_tags",
on: tag.event_created_at == stored_event.created_at and tag.event_id == stored_event.id,
where:
stored_event.kind == 1059 and
tag.name == "p" and
tag.value == ^Base.encode16(pubkey, case: :lower) and
stored_event.created_at <= ^created_at
)
{own_events_count, _result} = Repo.delete_all(own_events_query)
{giftwrap_count, _result} = Repo.delete_all(giftwrap_query)
{:ok, own_events_count + giftwrap_count}
end
end
@impl true
def purge_expired(opts) when is_list(opts) do
@@ -158,6 +198,11 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Events do
{:ok, kind} <- validate_non_negative_integer(Map.get(event, "kind"), :invalid_kind),
{:ok, content} <- validate_binary(Map.get(event, "content"), :invalid_content),
{:ok, tags} <- validate_tags(Map.get(event, "tags")) do
expires_at =
tags
|> extract_expiration()
|> maybe_apply_mls_group_retention(kind, created_at)
{:ok,
%{
id: id,
@@ -167,7 +212,7 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Events do
content: content,
sig: sig,
d_tag: extract_d_tag(tags),
expires_at: extract_expiration(tags),
expires_at: expires_at,
tags: tags
}}
end
@@ -531,12 +576,14 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Events do
|> maybe_filter_kinds(Map.get(filter, "kinds"))
|> maybe_filter_since(Map.get(filter, "since"))
|> maybe_filter_until(Map.get(filter, "until"))
|> maybe_filter_search(Map.get(filter, "search"))
|> filter_by_tags(filter)
|> maybe_restrict_giftwrap_access(filter, opts)
maybe_limit_query(query, effective_filter_limit(filter, opts))
end
defp event_id_query_for_filter(filter, now) do
defp event_id_query_for_filter(filter, now, opts) do
from(event in "events",
where: is_nil(event.deleted_at) and (is_nil(event.expires_at) or event.expires_at > ^now),
select: event.id
@@ -546,7 +593,9 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Events do
|> maybe_filter_kinds(Map.get(filter, "kinds"))
|> maybe_filter_since(Map.get(filter, "since"))
|> maybe_filter_until(Map.get(filter, "until"))
|> maybe_filter_search(Map.get(filter, "search"))
|> filter_by_tags(filter)
|> maybe_restrict_giftwrap_access(filter, opts)
end
defp maybe_filter_ids(query, nil), do: query
@@ -572,6 +621,14 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Events do
defp maybe_filter_until(query, nil), do: query
defp maybe_filter_until(query, until), do: where(query, [event], event.created_at <= ^until)
defp maybe_filter_search(query, nil), do: query
defp maybe_filter_search(query, search) when is_binary(search) and search != "" do
where(query, [event], ilike(event.content, ^"%#{search}%"))
end
defp maybe_filter_search(query, _search), do: query
defp filter_by_tags(query, filter) do
filter
|> tag_filters()
@@ -601,6 +658,32 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Events do
Enum.map(values, &Base.decode16!(&1, case: decode_case))
end
defp maybe_restrict_giftwrap_access(query, filter, opts) do
requester_pubkeys = Keyword.get(opts, :requester_pubkeys, [])
if targets_giftwrap?(filter) and requester_pubkeys != [] do
where(
query,
[event],
fragment(
"EXISTS (SELECT 1 FROM event_tags AS tag WHERE tag.event_created_at = ? AND tag.event_id = ? AND tag.name = 'p' AND tag.value = ANY(?))",
event.created_at,
event.id,
type(^requester_pubkeys, {:array, :string})
)
)
else
query
end
end
defp targets_giftwrap?(filter) do
case Map.get(filter, "kinds") do
kinds when is_list(kinds) -> 1059 in kinds
_other -> false
end
end
defp effective_filter_limit(filter, opts) do
filter_limit = Map.get(filter, "limit")
max_filter_limit = Keyword.get(opts, :max_filter_limit)
@@ -730,6 +813,25 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Events do
end)
end
defp extract_delete_event_ids(event) do
delete_ids =
event
|> Map.get("tags", [])
|> Enum.reduce([], fn
["e", event_id | _rest], acc when is_binary(event_id) -> [event_id | acc]
_tag, acc -> acc
end)
|> Enum.uniq()
if delete_ids == [] do
{:error, :no_delete_targets}
else
{:ok, Enum.map(delete_ids, &Base.decode16!(&1, case: :mixed))}
end
rescue
ArgumentError -> {:error, :invalid_delete_target}
end
defp extract_expiration(tags) do
tags
|> Enum.find_value(fn
@@ -746,4 +848,19 @@ defmodule Parrhesia.Storage.Adapters.Postgres.Events do
end
defp parse_unix_seconds(_unix_seconds), do: nil
defp maybe_apply_mls_group_retention(nil, 445, created_at) do
if Application.get_env(:parrhesia, :features, []) |> Keyword.get(:nip_ee_mls, false) do
ttl =
:parrhesia
|> Application.get_env(:policies, [])
|> Keyword.get(:mls_group_event_ttl_seconds, 300)
created_at + ttl
else
nil
end
end
defp maybe_apply_mls_group_retention(expires_at, _kind, _created_at), do: expires_at
end

View File

@@ -0,0 +1,34 @@
defmodule Parrhesia.Storage.Archiver do
@moduledoc """
Partition-aware archival helpers for Postgres event partitions.
"""
import Ecto.Query
alias Parrhesia.Repo
@doc """
Lists all `events_*` partitions excluding the default partition.
"""
@spec list_partitions() :: [String.t()]
def list_partitions do
query =
from(table in "pg_tables",
where: table.schemaname == "public",
where: like(table.tablename, "events_%"),
where: table.tablename != "events_default",
select: table.tablename,
order_by: [asc: table.tablename]
)
Repo.all(query)
end
@doc """
Generates an archive SQL statement for the given partition.
"""
@spec archive_sql(String.t(), String.t()) :: String.t()
def archive_sql(partition_name, archive_table_name) do
"INSERT INTO #{archive_table_name} SELECT * FROM #{partition_name};"
end
end

View File

@@ -11,6 +11,10 @@ defmodule Parrhesia.Storage.Supervisor do
@impl true
def init(_init_arg) do
Supervisor.init([], strategy: :one_for_one)
children = [
Parrhesia.Repo
]
Supervisor.init(children, strategy: :one_for_one)
end
end

View File

@@ -12,7 +12,9 @@ defmodule Parrhesia.Subscriptions.Supervisor do
@impl true
def init(_init_arg) do
children = [
{Parrhesia.Subscriptions.Index, name: Parrhesia.Subscriptions.Index}
{Parrhesia.Subscriptions.Index, name: Parrhesia.Subscriptions.Index},
{Parrhesia.Negentropy.Sessions, name: Parrhesia.Negentropy.Sessions},
{Parrhesia.Fanout.MultiNode, name: Parrhesia.Fanout.MultiNode}
]
Supervisor.init(children, strategy: :one_for_one)

View File

@@ -0,0 +1,47 @@
defmodule Parrhesia.Tasks.ExpirationWorker do
@moduledoc """
Periodic worker that purges expired events.
"""
use GenServer
alias Parrhesia.Storage
alias Parrhesia.Telemetry
@default_interval_ms 30_000
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(opts \\ []) do
name = Keyword.get(opts, :name, __MODULE__)
GenServer.start_link(__MODULE__, opts, name: name)
end
@impl true
def init(opts) do
state = %{
interval_ms: Keyword.get(opts, :interval_ms, @default_interval_ms)
}
schedule_tick(state.interval_ms)
{:ok, state}
end
@impl true
def handle_info(:tick, state) do
started_at = System.monotonic_time()
_result = Storage.events().purge_expired([])
duration = System.monotonic_time() - started_at
Telemetry.emit([:parrhesia, :maintenance, :purge_expired, :stop], %{duration: duration}, %{})
schedule_tick(state.interval_ms)
{:noreply, state}
end
def handle_info(_message, state), do: {:noreply, state}
defp schedule_tick(interval_ms) do
Process.send_after(self(), :tick, interval_ms)
end
end

View File

@@ -11,6 +11,13 @@ defmodule Parrhesia.Tasks.Supervisor do
@impl true
def init(_init_arg) do
Supervisor.init([], strategy: :one_for_one)
children =
if Application.get_env(:parrhesia, :enable_expiration_worker, true) do
[{Parrhesia.Tasks.ExpirationWorker, name: Parrhesia.Tasks.ExpirationWorker}]
else
[]
end
Supervisor.init(children, strategy: :one_for_one)
end
end

View File

@@ -1,16 +1,86 @@
defmodule Parrhesia.Telemetry do
@moduledoc """
Supervision entrypoint for relay telemetry workers.
Supervision entrypoint and helpers for relay telemetry.
"""
use Supervisor
import Telemetry.Metrics
@prometheus_reporter __MODULE__.Prometheus
@spec start_link(keyword()) :: Supervisor.on_start()
def start_link(init_arg \\ []) do
Supervisor.start_link(__MODULE__, init_arg, name: __MODULE__)
end
@impl true
def init(_init_arg) do
Supervisor.init([], strategy: :one_for_one)
children = [
{TelemetryMetricsPrometheus.Core, name: @prometheus_reporter, metrics: metrics()},
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
]
Supervisor.init(children, strategy: :one_for_one)
end
@spec prometheus_reporter() :: atom()
def prometheus_reporter, do: @prometheus_reporter
@spec metrics() :: [Telemetry.Metrics.t()]
def metrics do
[
distribution("parrhesia.ingest.duration.ms",
event_name: [:parrhesia, :ingest, :stop],
measurement: :duration,
unit: {:native, :millisecond},
reporter_options: [buckets: [1, 5, 10, 25, 50, 100, 250, 500, 1000]]
),
distribution("parrhesia.query.duration.ms",
event_name: [:parrhesia, :query, :stop],
measurement: :duration,
unit: {:native, :millisecond},
reporter_options: [buckets: [1, 5, 10, 25, 50, 100, 250, 500, 1000]]
),
distribution("parrhesia.fanout.duration.ms",
event_name: [:parrhesia, :fanout, :stop],
measurement: :duration,
unit: {:native, :millisecond},
reporter_options: [buckets: [1, 5, 10, 25, 50, 100, 250, 500, 1000]]
),
last_value("parrhesia.connection.outbound_queue.depth",
event_name: [:parrhesia, :connection, :outbound_queue],
measurement: :depth,
reporter_options: [prometheus_type: :gauge]
),
counter("parrhesia.connection.outbound_queue.overflow.count",
event_name: [:parrhesia, :connection, :outbound_queue, :overflow],
measurement: :count
),
last_value("parrhesia.vm.memory.total.bytes",
event_name: [:parrhesia, :vm, :memory],
measurement: :total,
unit: :byte,
reporter_options: [prometheus_type: :gauge]
)
]
end
@spec emit([atom()], map(), map()) :: :ok
def emit(event_name, measurements, metadata \\ %{})
when is_list(event_name) and is_map(measurements) and is_map(metadata) do
:telemetry.execute(event_name, measurements, metadata)
end
defp periodic_measurements do
[
{__MODULE__, :emit_vm_memory, []}
]
end
@doc false
def emit_vm_memory do
total = :erlang.memory(:total)
emit([:parrhesia, :vm, :memory], %{total: total}, %{})
end
end

View File

@@ -0,0 +1,30 @@
defmodule Parrhesia.TestSupport.ExpirationStubEvents do
@moduledoc false
@behaviour Parrhesia.Storage.Events
@impl true
def put_event(_context, event), do: {:ok, event}
@impl true
def get_event(_context, _event_id), do: {:ok, nil}
@impl true
def query(_context, _filters, _opts), do: {:ok, []}
@impl true
def count(_context, _filters, _opts), do: {:ok, 0}
@impl true
def delete_by_request(_context, _event), do: {:ok, 0}
@impl true
def vanish(_context, _event), do: {:ok, 0}
@impl true
def purge_expired(_opts) do
test_pid = :persistent_term.get({__MODULE__, :test_pid})
send(test_pid, :purged)
{:ok, 0}
end
end

View File

@@ -0,0 +1,26 @@
defmodule Parrhesia.TestSupport.FailingEvents do
@moduledoc false
@behaviour Parrhesia.Storage.Events
@impl true
def put_event(_context, _event), do: {:error, :db_down}
@impl true
def get_event(_context, _event_id), do: {:error, :db_down}
@impl true
def query(_context, _filters, _opts), do: {:error, :db_down}
@impl true
def count(_context, _filters, _opts), do: {:error, :db_down}
@impl true
def delete_by_request(_context, _event), do: {:error, :db_down}
@impl true
def vanish(_context, _event), do: {:error, :db_down}
@impl true
def purge_expired(_opts), do: {:error, :db_down}
end

View File

@@ -0,0 +1,41 @@
defmodule Parrhesia.TestSupport.PermissiveModeration do
@moduledoc false
@behaviour Parrhesia.Storage.Moderation
@impl true
def ban_pubkey(_context, _pubkey), do: :ok
@impl true
def unban_pubkey(_context, _pubkey), do: :ok
@impl true
def pubkey_banned?(_context, _pubkey), do: {:ok, false}
@impl true
def allow_pubkey(_context, _pubkey), do: :ok
@impl true
def disallow_pubkey(_context, _pubkey), do: :ok
@impl true
def pubkey_allowed?(_context, _pubkey), do: {:ok, true}
@impl true
def ban_event(_context, _event_id), do: :ok
@impl true
def unban_event(_context, _event_id), do: :ok
@impl true
def event_banned?(_context, _event_id), do: {:ok, false}
@impl true
def block_ip(_context, _ip), do: :ok
@impl true
def unblock_ip(_context, _ip), do: :ok
@impl true
def ip_blocked?(_context, _ip), do: {:ok, false}
end

View File

@@ -5,9 +5,16 @@ defmodule Parrhesia.Web.Connection do
@behaviour WebSock
alias Parrhesia.Auth.Challenges
alias Parrhesia.Fanout.MultiNode
alias Parrhesia.Groups.Flow
alias Parrhesia.Negentropy.Sessions
alias Parrhesia.Policy.EventPolicy
alias Parrhesia.Protocol
alias Parrhesia.Protocol.Filter
alias Parrhesia.Storage
alias Parrhesia.Subscriptions.Index
alias Parrhesia.Telemetry
@default_max_subscriptions_per_connection 32
@default_max_outbound_queue 256
@@ -19,6 +26,9 @@ defmodule Parrhesia.Web.Connection do
authenticated_pubkeys: MapSet.new(),
max_subscriptions_per_connection: @default_max_subscriptions_per_connection,
subscription_index: Index,
auth_challenges: Challenges,
auth_challenge: nil,
negentropy_sessions: Sessions,
outbound_queue: :queue.new(),
outbound_queue_size: 0,
max_outbound_queue: @default_max_outbound_queue,
@@ -38,6 +48,9 @@ defmodule Parrhesia.Web.Connection do
authenticated_pubkeys: MapSet.t(String.t()),
max_subscriptions_per_connection: pos_integer(),
subscription_index: GenServer.server() | nil,
auth_challenges: GenServer.server() | nil,
auth_challenge: String.t() | nil,
negentropy_sessions: GenServer.server() | nil,
outbound_queue: :queue.queue({String.t(), map()}),
outbound_queue_size: non_neg_integer(),
max_outbound_queue: pos_integer(),
@@ -48,9 +61,14 @@ defmodule Parrhesia.Web.Connection do
@impl true
def init(opts) do
auth_challenges = auth_challenges(opts)
state = %__MODULE__{
max_subscriptions_per_connection: max_subscriptions_per_connection(opts),
subscription_index: subscription_index(opts),
auth_challenges: auth_challenges,
auth_challenge: maybe_issue_auth_challenge(auth_challenges),
negentropy_sessions: negentropy_sessions(opts),
max_outbound_queue: max_outbound_queue(opts),
outbound_overflow_strategy: outbound_overflow_strategy(opts),
outbound_drain_batch_size: outbound_drain_batch_size(opts)
@@ -62,35 +80,8 @@ defmodule Parrhesia.Web.Connection do
@impl true
def handle_in({payload, [opcode: :text]}, %__MODULE__{} = state) do
case Protocol.decode_client(payload) do
{:ok, {:event, event}} ->
event_id = Map.get(event, "id", "")
response =
case Protocol.validate_event(event) do
:ok ->
Protocol.encode_relay({:ok, event_id, false, "error: EVENT ingest not implemented"})
{:error, message} ->
Protocol.encode_relay({:ok, event_id, false, message})
end
{:push, {:text, response}, state}
{:ok, {:req, subscription_id, filters}} ->
handle_req(state, subscription_id, filters)
{:ok, {:close, subscription_id}} ->
next_state =
state
|> drop_subscription(subscription_id)
|> drop_queued_subscription_events(subscription_id)
:ok = maybe_remove_index_subscription(next_state, subscription_id)
response =
Protocol.encode_relay({:closed, subscription_id, "error: subscription closed"})
{:push, {:text, response}, next_state}
{:ok, decoded_message} ->
handle_decoded_message(decoded_message, state)
{:error, reason} ->
response = Protocol.encode_relay({:notice, Protocol.decode_error_notice(reason)})
@@ -106,6 +97,39 @@ defmodule Parrhesia.Web.Connection do
{:push, {:text, response}, state}
end
defp handle_decoded_message({:event, event}, state), do: handle_event_ingest(state, event)
defp handle_decoded_message({:req, subscription_id, filters}, state),
do: handle_req(state, subscription_id, filters)
defp handle_decoded_message({:count, subscription_id, filters, options}, state),
do: handle_count(state, subscription_id, filters, options)
defp handle_decoded_message({:auth, auth_event}, state), do: handle_auth(state, auth_event)
defp handle_decoded_message({:neg_open, subscription_id, payload}, state),
do: handle_neg_open(state, subscription_id, payload)
defp handle_decoded_message({:neg_msg, subscription_id, payload}, state),
do: handle_neg_msg(state, subscription_id, payload)
defp handle_decoded_message({:neg_close, subscription_id}, state),
do: handle_neg_close(state, subscription_id)
defp handle_decoded_message({:close, subscription_id}, state) do
next_state =
state
|> drop_subscription(subscription_id)
|> drop_queued_subscription_events(subscription_id)
:ok = maybe_remove_index_subscription(next_state, subscription_id)
response =
Protocol.encode_relay({:closed, subscription_id, "error: subscription closed"})
{:push, {:text, response}, next_state}
end
@impl true
def handle_info({:fanout_event, subscription_id, event}, %__MODULE__{} = state)
when is_binary(subscription_id) and is_map(event) do
@@ -134,17 +158,69 @@ defmodule Parrhesia.Web.Connection do
@impl true
def terminate(_reason, %__MODULE__{} = state) do
:ok = maybe_remove_index_owner(state)
:ok = maybe_clear_auth_challenge(state)
:ok
end
defp handle_req(%__MODULE__{} = state, subscription_id, filters) do
with :ok <- Filter.validate_filters(filters),
{:ok, next_state} <- upsert_subscription(state, subscription_id, filters) do
:ok = maybe_upsert_index_subscription(next_state, subscription_id, filters)
defp handle_event_ingest(%__MODULE__{} = state, event) do
started_at = System.monotonic_time()
event_id = Map.get(event, "id", "")
response = Protocol.encode_relay({:eose, subscription_id})
{:push, {:text, response}, next_state}
with :ok <- Protocol.validate_event(event),
:ok <- EventPolicy.authorize_write(event, state.authenticated_pubkeys),
:ok <- maybe_process_group_event(event),
{:ok, _result, message} <- persist_event(event) do
Telemetry.emit(
[:parrhesia, :ingest, :stop],
%{duration: System.monotonic_time() - started_at},
%{}
)
fanout_event(event)
maybe_publish_multi_node(event)
response = Protocol.encode_relay({:ok, event_id, true, message})
{:push, {:text, response}, state}
else
{:error, reason} ->
message = error_message_for_ingest_failure(reason)
response = Protocol.encode_relay({:ok, event_id, false, message})
if reason in [:auth_required, :protected_event_requires_auth] do
with_auth_challenge_frame(state, {:push, {:text, response}, state})
else
{:push, {:text, response}, state}
end
end
end
defp handle_req(%__MODULE__{} = state, subscription_id, filters) do
started_at = System.monotonic_time()
with :ok <- Filter.validate_filters(filters),
:ok <- EventPolicy.authorize_read(filters, state.authenticated_pubkeys),
{:ok, next_state} <- upsert_subscription(state, subscription_id, filters),
:ok <- maybe_upsert_index_subscription(next_state, subscription_id, filters),
{:ok, events} <- query_initial_events(filters, state.authenticated_pubkeys) do
Telemetry.emit(
[:parrhesia, :query, :stop],
%{duration: System.monotonic_time() - started_at},
%{}
)
frames =
Enum.map(events, fn event ->
{:text, Protocol.encode_relay({:event, subscription_id, event})}
end) ++ [{:text, Protocol.encode_relay({:eose, subscription_id})}]
{:push, frames, next_state}
else
{:error, :auth_required} ->
restricted_close(state, subscription_id, EventPolicy.error_message(:auth_required))
{:error, :restricted_giftwrap} ->
restricted_close(state, subscription_id, EventPolicy.error_message(:restricted_giftwrap))
{:error, :subscription_limit_reached} ->
response =
Protocol.encode_relay({
@@ -156,17 +232,364 @@ defmodule Parrhesia.Web.Connection do
{:push, {:text, response}, state}
{:error, reason} ->
response = Protocol.encode_relay({:closed, subscription_id, Filter.error_message(reason)})
message =
case reason do
reason
when reason in [
:invalid_filters,
:empty_filters,
:too_many_filters,
:invalid_filter,
:invalid_filter_key,
:invalid_ids,
:invalid_authors,
:invalid_kinds,
:invalid_since,
:invalid_until,
:invalid_limit,
:invalid_search,
:invalid_tag_filter
] ->
Filter.error_message(reason)
_other ->
"error: #{inspect(reason)}"
end
response = Protocol.encode_relay({:closed, subscription_id, message})
{:push, {:text, response}, state}
end
end
defp handle_count(%__MODULE__{} = state, subscription_id, filters, options) do
started_at = System.monotonic_time()
with :ok <- Filter.validate_filters(filters),
:ok <- EventPolicy.authorize_read(filters, state.authenticated_pubkeys),
{:ok, count} <- count_events(filters, state.authenticated_pubkeys),
{:ok, payload} <- build_count_payload(filters, count, options) do
Telemetry.emit(
[:parrhesia, :query, :stop],
%{duration: System.monotonic_time() - started_at},
%{}
)
response = Protocol.encode_relay({:count, subscription_id, payload})
{:push, {:text, response}, state}
else
{:error, :auth_required} ->
restricted_count_notice(state, subscription_id, EventPolicy.error_message(:auth_required))
{:error, :restricted_giftwrap} ->
restricted_count_notice(
state,
subscription_id,
EventPolicy.error_message(:restricted_giftwrap)
)
{:error, reason} ->
response = Protocol.encode_relay({:closed, subscription_id, inspect(reason)})
{:push, {:text, response}, state}
end
end
defp handle_auth(%__MODULE__{} = state, auth_event) do
event_id = Map.get(auth_event, "id", "")
with :ok <- Protocol.validate_event(auth_event),
:ok <- validate_auth_event(auth_event),
:ok <- validate_auth_challenge(state, auth_event) do
pubkey = Map.get(auth_event, "pubkey")
next_state =
%__MODULE__{
state
| authenticated_pubkeys: MapSet.put(state.authenticated_pubkeys, pubkey)
}
|> rotate_auth_challenge()
response = Protocol.encode_relay({:ok, event_id, true, "ok: auth accepted"})
{:push, {:text, response}, next_state}
else
{:error, reason} ->
response = Protocol.encode_relay({:ok, event_id, false, auth_error_message(reason)})
with_auth_challenge_frame(state, {:push, {:text, response}, state})
end
end
defp handle_neg_open(%__MODULE__{} = state, subscription_id, payload) do
case maybe_open_negentropy(state, subscription_id, payload) do
{:ok, message} ->
response = Protocol.encode_relay({:neg_msg, subscription_id, message})
{:push, {:text, response}, state}
{:error, reason} ->
response = Protocol.encode_relay({:closed, subscription_id, "error: #{inspect(reason)}"})
{:push, {:text, response}, state}
end
end
defp handle_neg_msg(%__MODULE__{} = state, subscription_id, payload) do
case maybe_negentropy_message(state, subscription_id, payload) do
{:ok, message} ->
response = Protocol.encode_relay({:neg_msg, subscription_id, message})
{:push, {:text, response}, state}
{:error, reason} ->
response = Protocol.encode_relay({:closed, subscription_id, "error: #{inspect(reason)}"})
{:push, {:text, response}, state}
end
end
defp handle_neg_close(%__MODULE__{} = state, subscription_id) do
:ok = maybe_close_negentropy(state, subscription_id)
response = Protocol.encode_relay({:neg_msg, subscription_id, %{"status" => "closed"}})
{:push, {:text, response}, state}
end
defp maybe_process_group_event(event) do
if Flow.group_related_kind?(Map.get(event, "kind")) do
Flow.handle_event(event)
else
:ok
end
end
defp persist_event(event) do
case Map.get(event, "kind") do
5 ->
with {:ok, deleted_count} <- Storage.events().delete_by_request(%{}, event) do
{:ok, deleted_count, "ok: deletion request processed"}
end
62 ->
with {:ok, deleted_count} <- Storage.events().vanish(%{}, event) do
{:ok, deleted_count, "ok: vanish request processed"}
end
_other ->
case Storage.events().put_event(%{}, event) do
{:ok, persisted_event} -> {:ok, persisted_event, "ok: event stored"}
{:error, :duplicate_event} -> {:error, :duplicate_event}
{:error, reason} -> {:error, reason}
end
end
end
defp error_message_for_ingest_failure(:duplicate_event),
do: "duplicate: event already stored"
defp error_message_for_ingest_failure(reason)
when reason in [
:auth_required,
:restricted_giftwrap,
:protected_event_requires_auth,
:protected_event_pubkey_mismatch,
:pow_below_minimum,
:pubkey_banned,
:event_banned,
:mls_disabled
],
do: EventPolicy.error_message(reason)
defp error_message_for_ingest_failure(reason) when is_binary(reason), do: reason
defp error_message_for_ingest_failure(reason), do: "error: #{inspect(reason)}"
defp query_initial_events(filters, authenticated_pubkeys) do
Storage.events().query(%{}, filters,
max_filter_limit: Parrhesia.Config.get([:limits, :max_filter_limit]),
requester_pubkeys: MapSet.to_list(authenticated_pubkeys)
)
end
defp count_events(filters, authenticated_pubkeys) do
Storage.events().count(%{}, filters, requester_pubkeys: MapSet.to_list(authenticated_pubkeys))
end
defp build_count_payload(filters, count, options) when is_integer(count) and is_map(options) do
include_hll? =
Map.get(options, "hll", false) and Parrhesia.Config.get([:features, :nip_45_count], true)
payload = %{"count" => count, "approximate" => false}
payload =
if include_hll? do
Map.put(payload, "hll", generate_hll_payload(filters, count))
else
payload
end
{:ok, payload}
end
defp generate_hll_payload(filters, count) do
filters
|> Jason.encode!()
|> then(&"#{&1}:#{count}")
|> then(&:crypto.hash(:sha256, &1))
|> Base.encode64()
end
defp restricted_close(state, subscription_id, reason) do
response = Protocol.encode_relay({:closed, subscription_id, reason})
with_auth_challenge_frame(state, {:push, {:text, response}, state})
end
defp restricted_count_notice(state, subscription_id, reason) do
response = Protocol.encode_relay({:closed, subscription_id, reason})
with_auth_challenge_frame(state, {:push, {:text, response}, state})
end
defp validate_auth_event(%{"kind" => 22_242} = auth_event) do
tags = Map.get(auth_event, "tags", [])
challenge_tag? =
Enum.any?(tags, fn
["challenge", challenge | _rest] when is_binary(challenge) and challenge != "" -> true
_tag -> false
end)
if challenge_tag?, do: :ok, else: {:error, :missing_challenge_tag}
end
defp validate_auth_event(_auth_event), do: {:error, :invalid_auth_kind}
defp validate_auth_challenge(%__MODULE__{auth_challenge: nil}, _auth_event),
do: {:error, :missing_challenge}
defp validate_auth_challenge(%__MODULE__{auth_challenge: challenge}, auth_event) do
challenge_tag_matches? =
auth_event
|> Map.get("tags", [])
|> Enum.any?(fn
["challenge", ^challenge | _rest] -> true
_tag -> false
end)
if challenge_tag_matches?, do: :ok, else: {:error, :challenge_mismatch}
end
defp auth_error_message(:invalid_auth_kind), do: "invalid: AUTH event kind must be 22242"
defp auth_error_message(:missing_challenge_tag), do: "invalid: AUTH event missing challenge tag"
defp auth_error_message(:challenge_mismatch), do: "invalid: AUTH challenge mismatch"
defp auth_error_message(:missing_challenge), do: "invalid: AUTH challenge unavailable"
defp auth_error_message(reason) when is_binary(reason), do: reason
defp auth_error_message(reason), do: "invalid: #{inspect(reason)}"
defp with_auth_challenge_frame(
%__MODULE__{auth_challenge: nil},
result
),
do: result
defp with_auth_challenge_frame(%__MODULE__{auth_challenge: challenge}, {:push, frame, state}) do
auth_frame = {:text, Protocol.encode_relay({:auth, challenge})}
frames = [auth_frame | List.wrap(frame)]
{:push, frames, state}
end
defp rotate_auth_challenge(%__MODULE__{auth_challenges: nil} = state), do: state
defp rotate_auth_challenge(%__MODULE__{auth_challenges: auth_challenges} = state) do
challenge = maybe_issue_auth_challenge(auth_challenges)
%__MODULE__{state | auth_challenge: challenge}
end
defp maybe_issue_auth_challenge(nil), do: nil
defp maybe_issue_auth_challenge(auth_challenges) do
Challenges.issue(auth_challenges, self())
catch
:exit, _reason -> nil
end
defp maybe_clear_auth_challenge(%__MODULE__{auth_challenges: nil}), do: :ok
defp maybe_clear_auth_challenge(%__MODULE__{auth_challenges: auth_challenges}) do
:ok = Challenges.clear(auth_challenges, self())
:ok
catch
:exit, _reason -> :ok
end
defp maybe_open_negentropy(%__MODULE__{negentropy_sessions: nil}, _subscription_id, _payload),
do: {:error, :negentropy_disabled}
defp maybe_open_negentropy(
%__MODULE__{negentropy_sessions: negentropy_sessions},
subscription_id,
payload
) do
Sessions.open(negentropy_sessions, self(), subscription_id, payload)
catch
:exit, _reason -> {:error, :negentropy_unavailable}
end
defp maybe_negentropy_message(
%__MODULE__{negentropy_sessions: nil},
_subscription_id,
_payload
),
do: {:error, :negentropy_disabled}
defp maybe_negentropy_message(
%__MODULE__{negentropy_sessions: negentropy_sessions},
subscription_id,
payload
) do
Sessions.message(negentropy_sessions, self(), subscription_id, payload)
catch
:exit, _reason -> {:error, :negentropy_unavailable}
end
defp maybe_close_negentropy(%__MODULE__{negentropy_sessions: nil}, _subscription_id), do: :ok
defp maybe_close_negentropy(
%__MODULE__{negentropy_sessions: negentropy_sessions},
subscription_id
) do
Sessions.close(negentropy_sessions, self(), subscription_id)
:ok
catch
:exit, _reason -> :ok
end
defp fanout_event(event) do
case Index.candidate_subscription_keys(event) do
candidates when is_list(candidates) ->
Enum.each(candidates, fn {owner_pid, subscription_id} ->
send(owner_pid, {:fanout_event, subscription_id, event})
end)
_other ->
:ok
end
catch
:exit, _reason -> :ok
end
defp maybe_publish_multi_node(event) do
MultiNode.publish(event)
:ok
catch
:exit, _reason -> :ok
end
defp handle_fanout_events(%__MODULE__{} = state, fanout_events) do
started_at = System.monotonic_time()
case enqueue_fanout_events(state, fanout_events) do
{:ok, next_state} ->
Telemetry.emit(
[:parrhesia, :fanout, :stop],
%{duration: System.monotonic_time() - started_at},
%{}
)
{:ok, maybe_schedule_drain(next_state)}
{:close, next_state} ->
Telemetry.emit([:parrhesia, :connection, :outbound_queue, :overflow], %{count: 1}, %{})
close_with_outbound_overflow(next_state)
end
end
@@ -212,12 +635,15 @@ defmodule Parrhesia.Web.Connection do
queue_entry
)
when queue_size < max_outbound_queue do
{:ok,
%__MODULE__{
state
| outbound_queue: :queue.in(queue_entry, state.outbound_queue),
outbound_queue_size: queue_size + 1
}}
next_state =
%__MODULE__{
state
| outbound_queue: :queue.in(queue_entry, state.outbound_queue),
outbound_queue_size: queue_size + 1
}
emit_outbound_queue_depth(next_state.outbound_queue_size)
{:ok, next_state}
end
defp enqueue_outbound(
@@ -233,6 +659,7 @@ defmodule Parrhesia.Web.Connection do
{next_queue, next_size} =
drop_oldest_and_enqueue(state.outbound_queue, state.outbound_queue_size, queue_entry)
emit_outbound_queue_depth(next_size)
{:ok, %__MODULE__{state | outbound_queue: next_queue, outbound_queue_size: next_size}}
end
@@ -266,6 +693,8 @@ defmodule Parrhesia.Web.Connection do
}
|> maybe_schedule_drain()
emit_outbound_queue_depth(remaining_size)
{Enum.reverse(frames), next_state}
end
@@ -295,6 +724,10 @@ defmodule Parrhesia.Web.Connection do
%__MODULE__{state | drain_scheduled?: true}
end
defp emit_outbound_queue_depth(depth) do
Telemetry.emit([:parrhesia, :connection, :outbound_queue], %{depth: depth}, %{})
end
defp upsert_subscription(%__MODULE__{} = state, subscription_id, filters) do
subscription = %{filters: filters, eose_sent?: true}
@@ -335,11 +768,16 @@ defmodule Parrhesia.Web.Connection do
_queue_entry -> false
end)
%__MODULE__{
state
| outbound_queue: :queue.from_list(filtered_entries),
outbound_queue_size: length(filtered_entries)
}
next_state =
%__MODULE__{
state
| outbound_queue: :queue.from_list(filtered_entries),
outbound_queue_size: length(filtered_entries)
}
emit_outbound_queue_depth(next_state.outbound_queue_size)
next_state
end
defp maybe_upsert_index_subscription(
@@ -390,22 +828,49 @@ defmodule Parrhesia.Web.Connection do
defp subscription_index(opts) when is_list(opts) do
opts
|> Keyword.get(:subscription_index, Index)
|> normalize_subscription_index()
|> normalize_server_ref()
end
defp subscription_index(opts) when is_map(opts) do
opts
|> Map.get(:subscription_index, Index)
|> normalize_subscription_index()
|> normalize_server_ref()
end
defp subscription_index(_opts), do: Index
defp normalize_subscription_index(subscription_index)
when is_pid(subscription_index) or is_atom(subscription_index),
do: subscription_index
defp auth_challenges(opts) when is_list(opts) do
opts
|> Keyword.get(:auth_challenges, Challenges)
|> normalize_server_ref()
end
defp normalize_subscription_index(_subscription_index), do: nil
defp auth_challenges(opts) when is_map(opts) do
opts
|> Map.get(:auth_challenges, Challenges)
|> normalize_server_ref()
end
defp auth_challenges(_opts), do: Challenges
defp negentropy_sessions(opts) when is_list(opts) do
opts
|> Keyword.get(:negentropy_sessions, Sessions)
|> normalize_server_ref()
end
defp negentropy_sessions(opts) when is_map(opts) do
opts
|> Map.get(:negentropy_sessions, Sessions)
|> normalize_server_ref()
end
defp negentropy_sessions(_opts), do: Sessions
defp normalize_server_ref(server_ref) when is_pid(server_ref) or is_atom(server_ref),
do: server_ref
defp normalize_server_ref(_server_ref), do: nil
defp max_subscriptions_per_connection(opts) when is_list(opts) do
opts
@@ -430,10 +895,7 @@ defmodule Parrhesia.Web.Connection do
defp configured_max_subscriptions_per_connection do
:parrhesia
|> Application.get_env(:limits, [])
|> Keyword.get(
:max_subscriptions_per_connection,
@default_max_subscriptions_per_connection
)
|> Keyword.get(:max_subscriptions_per_connection, @default_max_subscriptions_per_connection)
end
defp max_outbound_queue(opts) when is_list(opts) do

View File

@@ -0,0 +1,102 @@
defmodule Parrhesia.Web.Management do
@moduledoc """
HTTP management API (NIP-86 style) with NIP-98 auth validation.
"""
import Plug.Conn
alias Parrhesia.Auth.Nip98
alias Parrhesia.Storage
@spec handle(Plug.Conn.t()) :: Plug.Conn.t()
def handle(conn) do
full_url = full_request_url(conn)
method = conn.method
authorization = get_req_header(conn, "authorization") |> List.first()
with {:ok, auth_event} <- Nip98.validate_authorization_header(authorization, method, full_url),
{:ok, payload} <- parse_payload(conn.body_params),
{:ok, result} <- execute_method(payload),
:ok <- append_audit_log(auth_event, payload, result) do
send_json(conn, 200, %{"ok" => true, "result" => result})
else
{:error, :missing_authorization} ->
send_json(conn, 401, %{"ok" => false, "error" => "auth-required"})
{:error, :invalid_authorization} ->
send_json(conn, 401, %{"ok" => false, "error" => "invalid-authorization"})
{:error, :invalid_event} ->
send_json(conn, 401, %{"ok" => false, "error" => "invalid-auth-event"})
{:error, :stale_event} ->
send_json(conn, 401, %{"ok" => false, "error" => "stale-auth-event"})
{:error, :invalid_method_tag} ->
send_json(conn, 401, %{"ok" => false, "error" => "auth-method-tag-mismatch"})
{:error, :invalid_url_tag} ->
send_json(conn, 401, %{"ok" => false, "error" => "auth-url-tag-mismatch"})
{:error, :invalid_payload} ->
send_json(conn, 400, %{"ok" => false, "error" => "invalid-payload"})
{:error, reason} ->
send_json(conn, 400, %{"ok" => false, "error" => inspect(reason)})
end
end
defp parse_payload(%{"method" => method} = payload) when is_binary(method) do
params = Map.get(payload, "params", %{})
if is_map(params) do
{:ok, %{method: method, params: params}}
else
{:error, :invalid_payload}
end
end
defp parse_payload(_payload), do: {:error, :invalid_payload}
defp execute_method(payload) do
Storage.admin().execute(%{}, payload.method, payload.params)
end
defp append_audit_log(auth_event, payload, result) do
Storage.admin().append_audit_log(%{}, %{
method: payload.method,
actor_pubkey: Map.get(auth_event, "pubkey"),
params: payload.params,
result: normalize_result(result)
})
end
defp normalize_result(result) when is_map(result), do: result
defp normalize_result(result) when is_list(result), do: %{"list" => result}
defp normalize_result(result), do: %{"value" => inspect(result)}
defp send_json(conn, status, body) do
encoded = Jason.encode!(body)
conn
|> put_resp_content_type("application/json")
|> send_resp(status, encoded)
end
defp full_request_url(conn) do
scheme = Atom.to_string(conn.scheme)
host = conn.host
port = conn.port
port_suffix =
cond do
conn.scheme == :http and port == 80 -> ""
conn.scheme == :https and port == 443 -> ""
true -> ":#{port}"
end
query_suffix = if conn.query_string == "", do: "", else: "?#{conn.query_string}"
"#{scheme}://#{host}#{port_suffix}#{conn.request_path}#{query_suffix}"
end
end

View File

@@ -0,0 +1,18 @@
defmodule Parrhesia.Web.Readiness do
@moduledoc false
@spec ready?() :: boolean()
def ready? do
process_ready?(Parrhesia.Subscriptions.Index) and
process_ready?(Parrhesia.Auth.Challenges) and
process_ready?(Parrhesia.Negentropy.Sessions) and
process_ready?(Parrhesia.Repo)
end
defp process_ready?(name) do
case Process.whereis(name) do
pid when is_pid(pid) -> true
nil -> false
end
end
end

View File

@@ -0,0 +1,60 @@
defmodule Parrhesia.Web.RelayInfo do
@moduledoc """
NIP-11 relay information document.
"""
@spec document() :: map()
def document do
%{
"name" => "Parrhesia",
"description" => "Parrhesia Nostr relay",
"pubkey" => nil,
"supported_nips" => supported_nips(),
"software" => "https://github.com/example/parrhesia",
"version" => Application.spec(:parrhesia, :vsn) |> to_string(),
"limitation" => limitations()
}
end
defp supported_nips do
[
1,
9,
11,
13,
17,
40,
42,
43,
44,
45,
50,
59,
62,
66,
70,
77,
86,
98
]
|> maybe_add_mls()
end
defp maybe_add_mls(nips) do
if Parrhesia.Config.get([:features, :nip_ee_mls], false) do
["EE" | nips]
else
nips
end
end
defp limitations do
%{
"max_message_length" => Parrhesia.Config.get([:limits, :max_frame_bytes], 1_048_576),
"max_subscriptions" =>
Parrhesia.Config.get([:limits, :max_subscriptions_per_connection], 32),
"max_filters" => Parrhesia.Config.get([:limits, :max_filters_per_req], 16),
"auth_required" => Parrhesia.Config.get([:policies, :auth_required_for_reads], false)
}
end
end

View File

@@ -3,6 +3,17 @@ defmodule Parrhesia.Web.Router do
use Plug.Router
alias Parrhesia.Telemetry
alias Parrhesia.Web.Management
alias Parrhesia.Web.Readiness
alias Parrhesia.Web.RelayInfo
plug(Plug.Parsers,
parsers: [:json],
pass: ["application/json"],
json_decoder: Jason
)
plug(:match)
plug(:dispatch)
@@ -10,13 +21,47 @@ defmodule Parrhesia.Web.Router do
send_resp(conn, 200, "ok")
end
get "/relay" do
get "/ready" do
if Readiness.ready?() do
send_resp(conn, 200, "ready")
else
send_resp(conn, 503, "not-ready")
end
end
get "/metrics" do
body = TelemetryMetricsPrometheus.Core.scrape(Telemetry.prometheus_reporter())
conn
|> WebSockAdapter.upgrade(Parrhesia.Web.Connection, %{}, timeout: 60_000)
|> halt()
|> put_resp_content_type("text/plain")
|> send_resp(200, body)
end
post "/management" do
Management.handle(conn)
end
get "/relay" do
if accepts_nip11?(conn) do
body = Jason.encode!(RelayInfo.document())
conn
|> put_resp_content_type("application/nostr+json")
|> send_resp(200, body)
else
conn
|> WebSockAdapter.upgrade(Parrhesia.Web.Connection, %{}, timeout: 60_000)
|> halt()
end
end
match _ do
send_resp(conn, 404, "not found")
end
defp accepts_nip11?(conn) do
conn
|> get_req_header("accept")
|> Enum.any?(&String.contains?(&1, "application/nostr+json"))
end
end