feat: NIF-77 negentropy sync

This commit is contained in:
2026-03-16 16:00:15 +01:00
parent 4c2c93deb3
commit 39dbc069a7
22 changed files with 1194 additions and 101 deletions

View File

@@ -0,0 +1,42 @@
defmodule Parrhesia.Negentropy.EngineTest do
use ExUnit.Case, async: true
alias Parrhesia.Negentropy.Engine
alias Parrhesia.Negentropy.Message
test "returns exact id list for small mismatched ranges" do
server_items = [
%{created_at: 10, id: <<1::size(256)>>},
%{created_at: 11, id: <<2::size(256)>>}
]
assert {:ok, response} = Engine.answer(server_items, Engine.initial_message([]))
assert {:ok, [%{mode: :id_list, payload: ids, upper_bound: :infinity}]} =
Message.decode(response)
assert ids == Enum.map(server_items, & &1.id)
end
test "splits large mismatched fingerprint ranges" do
client_items =
Enum.map(1..4, fn idx ->
%{created_at: 100 + idx, id: <<idx::size(256)>>}
end)
server_items =
client_items ++ [%{created_at: 200, id: <<99::size(256)>>}]
initial_message = Engine.initial_message(client_items, id_list_threshold: 1)
assert {:ok, response} = Engine.answer(server_items, initial_message, id_list_threshold: 1)
assert {:ok, ranges} = Message.decode(response)
assert Enum.all?(ranges, &(&1.mode in [:fingerprint, :id_list]))
assert length(ranges) >= 2
end
test "downgrades unsupported versions" do
assert {:ok, <<0x61>>} = Engine.answer([], <<0x62>>)
end
end

View File

@@ -0,0 +1,28 @@
defmodule Parrhesia.Negentropy.MessageTest do
use ExUnit.Case, async: true
alias Parrhesia.Negentropy.Message
test "encodes and decodes mixed range messages" do
first_id = <<1::size(256)>>
second_id = <<2::size(256)>>
boundary =
Message.split_bound(%{created_at: 10, id: first_id}, %{created_at: 10, id: second_id})
ranges = [
%{upper_bound: boundary, mode: :fingerprint, payload: <<0::size(128)>>},
%{upper_bound: {11, Message.zero_id()}, mode: :id_list, payload: [second_id]},
%{upper_bound: :infinity, mode: :skip, payload: nil}
]
assert {:ok, decoded_ranges} = ranges |> Message.encode() |> Message.decode()
assert decoded_ranges ==
Enum.reject(ranges, &(&1.mode == :skip and &1.upper_bound == :infinity))
end
test "rejects malformed bounds and payloads" do
assert {:error, :invalid_message} = Message.decode(<<0x61, 0x00, 0x01, 0x02>>)
end
end

View File

@@ -1,19 +1,64 @@
defmodule Parrhesia.Negentropy.SessionsTest do
use ExUnit.Case, async: true
use ExUnit.Case, async: false
alias Ecto.Adapters.SQL.Sandbox
alias Parrhesia.Negentropy.Engine
alias Parrhesia.Negentropy.Message
alias Parrhesia.Negentropy.Sessions
alias Parrhesia.Protocol.EventValidator
alias Parrhesia.Repo
alias Parrhesia.Storage.Adapters.Postgres.Events
test "opens, advances and closes sessions" do
setup_all do
if is_nil(Process.whereis(Repo)) do
start_supervised!(Repo)
end
Sandbox.mode(Repo, :manual)
:ok
end
setup do
:ok = Sandbox.checkout(Repo)
:ok
end
test "opens, responds, advances and closes sessions" do
server = start_supervised!({Sessions, name: nil})
Sandbox.allow(Repo, self(), server)
assert {:ok, %{"status" => "open", "cursor" => 0}} =
Sessions.open(server, self(), "sub-neg", %{"cursor" => 0})
first =
persist_event(%{
"created_at" => 1_700_100_000,
"content" => "neg-1"
})
assert {:ok, %{"status" => "ack", "cursor" => 1}} =
Sessions.message(server, self(), "sub-neg", %{"delta" => "abc"})
second =
persist_event(%{
"created_at" => 1_700_100_001,
"content" => "neg-2"
})
initial_message = Engine.initial_message([])
assert {:ok, response_message} =
Sessions.open(server, self(), "sub-neg", %{"kinds" => [1]}, initial_message)
assert {:ok, [%{mode: :id_list, payload: ids, upper_bound: :infinity}]} =
Message.decode(response_message)
assert ids == [
Base.decode16!(first["id"], case: :mixed),
Base.decode16!(second["id"], case: :mixed)
]
{:ok, refs} = Events.query_event_refs(%{}, [%{"kinds" => [1]}], [])
matching_message = Engine.initial_message(refs)
assert {:ok, <<0x61>>} = Sessions.message(server, self(), "sub-neg", matching_message)
assert :ok = Sessions.close(server, self(), "sub-neg")
assert {:error, :unknown_session} = Sessions.message(server, self(), "sub-neg", %{})
assert {:error, :unknown_session} = Sessions.message(server, self(), "sub-neg", <<0x61>>)
end
test "rejects oversized NEG payloads" do
@@ -28,8 +73,16 @@ defmodule Parrhesia.Negentropy.SessionsTest do
sweep_interval_seconds: 60}
)
Sandbox.allow(Repo, self(), server)
assert {:error, :payload_too_large} =
Sessions.open(server, self(), "sub-neg", %{"delta" => String.duplicate("a", 256)})
Sessions.open(
server,
self(),
"sub-neg",
%{"kinds" => [1]},
String.duplicate(<<0x61>>, 128)
)
end
test "enforces per-owner session limits" do
@@ -44,10 +97,60 @@ defmodule Parrhesia.Negentropy.SessionsTest do
sweep_interval_seconds: 60}
)
assert {:ok, %{"status" => "open", "cursor" => 0}} =
Sessions.open(server, self(), "sub-1", %{})
Sandbox.allow(Repo, self(), server)
assert {:ok, _response} =
Sessions.open(server, self(), "sub-1", %{"kinds" => [1]}, Engine.initial_message([]))
assert {:error, :owner_session_limit_reached} =
Sessions.open(server, self(), "sub-2", %{})
Sessions.open(server, self(), "sub-2", %{"kinds" => [1]}, Engine.initial_message([]))
end
test "blocks queries larger than the configured session snapshot limit" do
server =
start_supervised!(
{Sessions,
name: nil,
max_payload_bytes: 1024,
max_sessions_per_owner: 8,
max_total_sessions: 16,
max_idle_seconds: 60,
sweep_interval_seconds: 60,
max_items_per_session: 1}
)
Sandbox.allow(Repo, self(), server)
persist_event(%{"created_at" => 1_700_200_000, "content" => "first"})
persist_event(%{"created_at" => 1_700_200_001, "content" => "second"})
assert {:error, :query_too_big} =
Sessions.open(
server,
self(),
"sub-neg",
%{"kinds" => [1]},
Engine.initial_message([])
)
end
defp persist_event(overrides) do
event = build_event(overrides)
assert {:ok, persisted_event} = Events.put_event(%{}, event)
persisted_event
end
defp build_event(overrides) do
base_event = %{
"pubkey" => String.duplicate("1", 64),
"created_at" => System.system_time(:second),
"kind" => 1,
"tags" => [],
"content" => "negentropy-test",
"sig" => String.duplicate("2", 128)
}
event = Map.merge(base_event, overrides)
Map.put(event, "id", EventValidator.compute_id(event))
end
end

View File

@@ -41,11 +41,13 @@ defmodule Parrhesia.ProtocolTest do
assert {:ok, {:auth, ^auth_event}} =
Protocol.decode_client(JSON.encode!(["AUTH", auth_event]))
assert {:ok, {:neg_open, "sub-neg", %{"cursor" => 0}}} =
Protocol.decode_client(JSON.encode!(["NEG-OPEN", "sub-neg", %{"cursor" => 0}]))
assert {:ok, {:neg_open, "sub-neg", %{"kinds" => [1]}, <<0x61>>}} =
Protocol.decode_client(
JSON.encode!(["NEG-OPEN", "sub-neg", %{"kinds" => [1]}, "61"])
)
assert {:ok, {:neg_msg, "sub-neg", %{"delta" => "abc"}}} =
Protocol.decode_client(JSON.encode!(["NEG-MSG", "sub-neg", %{"delta" => "abc"}]))
assert {:ok, {:neg_msg, "sub-neg", <<0x61, 0x00>>}} =
Protocol.decode_client(JSON.encode!(["NEG-MSG", "sub-neg", "6100"]))
assert {:ok, {:neg_close, "sub-neg"}} =
Protocol.decode_client(JSON.encode!(["NEG-CLOSE", "sub-neg"]))
@@ -90,6 +92,12 @@ defmodule Parrhesia.ProtocolTest do
count_frame = Protocol.encode_relay({:count, "sub-1", %{"count" => 1}})
assert JSON.decode!(count_frame) == ["COUNT", "sub-1", %{"count" => 1}]
neg_message_frame = Protocol.encode_relay({:neg_msg, "sub-neg", "61"})
assert JSON.decode!(neg_message_frame) == ["NEG-MSG", "sub-neg", "61"]
neg_error_frame = Protocol.encode_relay({:neg_err, "sub-neg", "closed: too slow"})
assert JSON.decode!(neg_error_frame) == ["NEG-ERR", "sub-neg", "closed: too slow"]
end
defp valid_event do

View File

@@ -8,12 +8,23 @@ defmodule Parrhesia.Storage.Adapters.Memory.AdapterTest do
test "memory adapter supports basic behavior contract operations" do
event_id = String.duplicate("a", 64)
event = %{"id" => event_id, "pubkey" => "pk", "kind" => 1, "tags" => [], "content" => "hello"}
event = %{
"id" => event_id,
"pubkey" => "pk",
"created_at" => 1_700_000_000,
"kind" => 1,
"tags" => [],
"content" => "hello"
}
assert {:ok, _event} = Events.put_event(%{}, event)
assert {:ok, [result]} = Events.query(%{}, [%{"ids" => [event_id]}], [])
assert result["id"] == event_id
assert {:ok, [%{created_at: 1_700_000_000, id: <<_::size(256)>>}]} =
Events.query_event_refs(%{}, [%{"ids" => [event_id]}], [])
assert :ok = Moderation.ban_pubkey(%{}, "pk")
assert {:ok, true} = Moderation.pubkey_banned?(%{}, "pk")

View File

@@ -106,6 +106,37 @@ defmodule Parrhesia.Storage.Adapters.Postgres.EventsQueryCountTest do
assert Enum.map(results, & &1["id"]) == [newest["id"], tie_winner_id]
end
test "query_event_refs/3 returns sorted lightweight refs for negentropy" do
author = String.duplicate("9", 64)
later =
persist_event(%{
"pubkey" => author,
"created_at" => 1_700_000_510,
"kind" => 1,
"content" => "later"
})
earlier =
persist_event(%{
"pubkey" => author,
"created_at" => 1_700_000_500,
"kind" => 1,
"content" => "earlier"
})
assert {:ok, refs} =
Events.query_event_refs(%{}, [%{"authors" => [author], "kinds" => [1]}], [])
assert refs == [
%{
created_at: earlier["created_at"],
id: Base.decode16!(earlier["id"], case: :mixed)
},
%{created_at: later["created_at"], id: Base.decode16!(later["id"], case: :mixed)}
]
end
test "count/3 ORs filters, deduplicates matches and respects tag filters" do
now = 1_700_001_000
target_pubkey = String.duplicate("f", 64)

View File

@@ -3,7 +3,16 @@ defmodule Parrhesia.Storage.BehaviourContractsTest do
test "events behavior exposes expected callbacks" do
assert callback_names(Parrhesia.Storage.Events) ==
[:count, :delete_by_request, :get_event, :purge_expired, :put_event, :query, :vanish]
[
:count,
:delete_by_request,
:get_event,
:purge_expired,
:put_event,
:query,
:query_event_refs,
:vanish
]
end
test "moderation behavior exposes expected callbacks" do

View File

@@ -2,6 +2,8 @@ defmodule Parrhesia.Web.ConnectionTest do
use ExUnit.Case, async: false
alias Ecto.Adapters.SQL.Sandbox
alias Parrhesia.Negentropy.Engine
alias Parrhesia.Negentropy.Message
alias Parrhesia.Protocol.EventValidator
alias Parrhesia.Repo
alias Parrhesia.Web.Connection
@@ -435,23 +437,109 @@ defmodule Parrhesia.Web.ConnectionTest do
]
end
test "NEG sessions open and close" do
state = connection_state()
test "NEG sessions open, return reconciliation payloads and close silently" do
negentropy_sessions =
start_supervised!(
{Parrhesia.Negentropy.Sessions,
name: nil,
max_payload_bytes: 1024,
max_sessions_per_owner: 8,
max_total_sessions: 16,
max_idle_seconds: 60,
sweep_interval_seconds: 60}
)
open_payload = JSON.encode!(["NEG-OPEN", "neg-1", %{"cursor" => 0}])
Sandbox.allow(Repo, self(), negentropy_sessions)
state = connection_state(negentropy_sessions: negentropy_sessions)
first =
valid_event(%{
"created_at" => 1_700_300_000,
"content" => "neg-a"
})
second =
valid_event(%{
"created_at" => 1_700_300_001,
"content" => "neg-b"
})
assert {:push, {:text, _response}, _next_state} =
Connection.handle_in({JSON.encode!(["EVENT", first]), [opcode: :text]}, state)
assert {:push, {:text, _response}, _next_state} =
Connection.handle_in({JSON.encode!(["EVENT", second]), [opcode: :text]}, state)
open_payload =
JSON.encode!([
"NEG-OPEN",
"neg-1",
%{"kinds" => [1]},
Base.encode16(Engine.initial_message([]), case: :lower)
])
assert {:push, {:text, open_response}, _next_state} =
Connection.handle_in({open_payload, [opcode: :text]}, state)
assert ["NEG-MSG", "neg-1", %{"status" => "open", "cursor" => 0}] =
JSON.decode!(open_response)
assert ["NEG-MSG", "neg-1", response_hex] = JSON.decode!(open_response)
assert {:ok, [%{mode: :id_list, payload: ids, upper_bound: :infinity}]} =
response_hex |> Base.decode16!(case: :mixed) |> Message.decode()
assert ids == [
Base.decode16!(first["id"], case: :mixed),
Base.decode16!(second["id"], case: :mixed)
]
close_payload = JSON.encode!(["NEG-CLOSE", "neg-1"])
assert {:push, {:text, close_response}, _next_state} =
assert {:ok, _next_state} =
Connection.handle_in({close_payload, [opcode: :text]}, state)
end
assert JSON.decode!(close_response) == ["NEG-MSG", "neg-1", %{"status" => "closed"}]
test "NEG sessions return NEG-ERR for oversized snapshots" do
negentropy_sessions =
start_supervised!(
{Parrhesia.Negentropy.Sessions,
name: nil,
max_payload_bytes: 1024,
max_sessions_per_owner: 8,
max_total_sessions: 16,
max_idle_seconds: 60,
sweep_interval_seconds: 60,
max_items_per_session: 1}
)
Sandbox.allow(Repo, self(), negentropy_sessions)
state = connection_state(negentropy_sessions: negentropy_sessions)
first = valid_event(%{"created_at" => 1_700_301_000, "content" => "neg-big-a"})
second = valid_event(%{"created_at" => 1_700_301_001, "content" => "neg-big-b"})
assert {:push, {:text, _response}, _next_state} =
Connection.handle_in({JSON.encode!(["EVENT", first]), [opcode: :text]}, state)
assert {:push, {:text, _response}, _next_state} =
Connection.handle_in({JSON.encode!(["EVENT", second]), [opcode: :text]}, state)
open_payload =
JSON.encode!([
"NEG-OPEN",
"neg-oversized",
%{"kinds" => [1]},
Base.encode16(Engine.initial_message([]), case: :lower)
])
assert {:push, {:text, response}, _next_state} =
Connection.handle_in({open_payload, [opcode: :text]}, state)
assert JSON.decode!(response) == [
"NEG-ERR",
"neg-oversized",
"blocked: negentropy query is too big"
]
end
test "CLOSE removes subscription and replies with CLOSED" do