test: expand protocol property-based coverage

This commit is contained in:
2026-03-20 02:32:41 +01:00
parent a15856bdac
commit 8b5231fa0d
2 changed files with 146 additions and 8 deletions

View File

@@ -0,0 +1,77 @@
defmodule Parrhesia.Protocol.EventValidatorPropertyTest do
use ExUnit.Case, async: true
use ExUnitProperties
alias Parrhesia.Protocol.EventValidator
property "compute_id always returns lowercase 64-char hex" do
check all(event <- event_payload()) do
id = EventValidator.compute_id(event)
assert byte_size(id) == 64
assert {:ok, _decoded} = Base.decode16(id, case: :lower)
end
end
property "compute_id depends only on the canonical NIP-01 tuple fields" do
check all(
event <- event_payload(),
replacement_id <- hex64(),
replacement_sig <- hex128(),
extra_value <- StreamData.string(:alphanumeric)
) do
original = EventValidator.compute_id(event)
mutated_event =
event
|> Map.put("id", replacement_id)
|> Map.put("sig", replacement_sig)
|> Map.put("extra_field", extra_value)
assert EventValidator.compute_id(mutated_event) == original
end
end
defp event_payload do
gen all(
pubkey <- hex64(),
created_at <- StreamData.non_negative_integer(),
kind <- StreamData.integer(0..65_535),
tags <- tags(),
content <- StreamData.string(:printable, max_length: 256),
sig <- hex128(),
id <- hex64()
) do
%{
"id" => id,
"pubkey" => pubkey,
"created_at" => created_at,
"kind" => kind,
"tags" => tags,
"content" => content,
"sig" => sig
}
end
end
defp tags do
StreamData.list_of(tag(), max_length: 8)
end
defp tag do
StreamData.list_of(StreamData.string(:printable, min_length: 1, max_length: 32),
min_length: 1,
max_length: 4
)
end
defp hex64 do
StreamData.binary(length: 32)
|> StreamData.map(&Base.encode16(&1, case: :lower))
end
defp hex128 do
StreamData.binary(length: 64)
|> StreamData.map(&Base.encode16(&1, case: :lower))
end
end

View File

@@ -10,22 +10,83 @@ defmodule Parrhesia.Protocol.FilterPropertyTest do
candidate_authors <- list_of(hex64(), min_length: 1, max_length: 5),
created_at <- StreamData.non_negative_integer()
) do
event = %{
"pubkey" => author,
"kind" => 1,
"created_at" => created_at,
"tags" => [],
"content" => ""
}
event = base_event(author, created_at)
filter = %{"authors" => candidate_authors}
assert Filter.matches_filter?(event, filter) == author in candidate_authors
end
end
property "since and until filters follow timestamp boundaries" do
check all(
author <- hex64(),
created_at <- StreamData.non_negative_integer(),
since <- StreamData.non_negative_integer(),
until <- StreamData.non_negative_integer()
) do
event = base_event(author, created_at)
assert Filter.matches_filter?(event, %{"since" => since}) == created_at >= since
assert Filter.matches_filter?(event, %{"until" => until}) == created_at <= until
end
end
property "tag filters match when any configured value is present on the event" do
check all(
author <- hex64(),
created_at <- StreamData.non_negative_integer(),
tag_value <- short_string(),
extra_values <- list_of(short_string(), min_length: 1, max_length: 5)
) do
event =
base_event(author, created_at)
|> Map.put("tags", [["e", tag_value]])
matching_filter = %{"#e" => Enum.uniq([tag_value | extra_values])}
non_matching_filter = %{"#e" => Enum.map(extra_values, &("nomatch:" <> &1))}
assert Filter.matches_filter?(event, matching_filter)
refute Filter.matches_filter?(event, non_matching_filter)
end
end
property "invalid tag filters are rejected during matching" do
check all(
author <- hex64(),
created_at <- StreamData.non_negative_integer(),
invalid_value <- invalid_tag_filter_value()
) do
event = base_event(author, created_at)
refute Filter.matches_filter?(event, %{"#e" => [invalid_value]})
end
end
defp base_event(author, created_at) do
%{
"pubkey" => author,
"kind" => 1,
"created_at" => created_at,
"tags" => [],
"content" => ""
}
end
defp hex64 do
StreamData.binary(length: 32)
|> StreamData.map(&Base.encode16(&1, case: :lower))
end
defp short_string do
StreamData.string(:alphanumeric, min_length: 1, max_length: 16)
end
defp invalid_tag_filter_value do
StreamData.one_of([
StreamData.integer(),
StreamData.boolean(),
StreamData.map_of(StreamData.string(:alphanumeric), StreamData.integer(), max_length: 2),
StreamData.list_of(StreamData.integer(), max_length: 2)
])
end
end