Merge branch 'nsfw-api-mrf' into 'develop'
NSFW API Policy See merge request pleroma/pleroma!3471
This commit is contained in:
commit
e93ae96e13
1
changelog.d/add-nsfw-mrf.add
Normal file
1
changelog.d/add-nsfw-mrf.add
Normal file
@ -0,0 +1 @@
|
||||
Add NSFW-detecting MRF
|
@ -415,6 +415,13 @@ config :pleroma, :mrf_object_age,
|
||||
threshold: 604_800,
|
||||
actions: [:delist, :strip_followers]
|
||||
|
||||
config :pleroma, :mrf_nsfw_api,
|
||||
url: "http://127.0.0.1:5000/",
|
||||
threshold: 0.7,
|
||||
mark_sensitive: true,
|
||||
unlist: false,
|
||||
reject: false
|
||||
|
||||
config :pleroma, :mrf_follow_bot, follower_nickname: nil
|
||||
|
||||
config :pleroma, :mrf_inline_quote, template: "<bdi>RT:</bdi> {url}"
|
||||
|
15
installation/nsfw-api.service
Normal file
15
installation/nsfw-api.service
Normal file
@ -0,0 +1,15 @@
|
||||
[Unit]
|
||||
Description=NSFW API
|
||||
After=docker.service
|
||||
Requires=docker.service
|
||||
|
||||
[Service]
|
||||
TimeoutStartSec=0
|
||||
Restart=always
|
||||
ExecStartPre=-/usr/bin/docker stop %n
|
||||
ExecStartPre=-/usr/bin/docker rm %n
|
||||
ExecStartPre=/usr/bin/docker pull eugencepoi/nsfw_api:latest
|
||||
ExecStart=/usr/bin/docker run --rm -p 127.0.0.1:5000:5000/tcp --env PORT=5000 --name %n eugencepoi/nsfw_api:latest
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
265
lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex
Normal file
265
lib/pleroma/web/activity_pub/mrf/nsfw_api_policy.ex
Normal file
@ -0,0 +1,265 @@
|
||||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy do
|
||||
@moduledoc """
|
||||
Hide, delete, or mark sensitive NSFW content with artificial intelligence.
|
||||
|
||||
Requires a NSFW API server, configured like so:
|
||||
|
||||
config :pleroma, Pleroma.Web.ActivityPub.MRF.NsfwMRF,
|
||||
url: "http://127.0.0.1:5000/",
|
||||
threshold: 0.7,
|
||||
mark_sensitive: true,
|
||||
unlist: false,
|
||||
reject: false
|
||||
|
||||
The NSFW API server must implement an HTTP endpoint like this:
|
||||
|
||||
curl http://localhost:5000/?url=https://fedi.com/images/001.jpg
|
||||
|
||||
Returning a response like this:
|
||||
|
||||
{"score", 0.314}
|
||||
|
||||
Where a score is 0-1, with `1` being definitely NSFW.
|
||||
|
||||
A good API server is here: https://github.com/EugenCepoi/nsfw_api
|
||||
You can run it with Docker with a one-liner:
|
||||
|
||||
docker run -it -p 127.0.0.1:5000:5000/tcp --env PORT=5000 eugencepoi/nsfw_api:latest
|
||||
|
||||
Options:
|
||||
|
||||
- `url`: Base URL of the API server. Default: "http://127.0.0.1:5000/"
|
||||
- `threshold`: Lowest score to take action on. Default: `0.7`
|
||||
- `mark_sensitive`: Mark sensitive all detected NSFW content? Default: `true`
|
||||
- `unlist`: Unlist all detected NSFW content? Default: `false`
|
||||
- `reject`: Reject all detected NSFW content (takes precedence)? Default: `false`
|
||||
"""
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Constants
|
||||
alias Pleroma.HTTP
|
||||
alias Pleroma.User
|
||||
|
||||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
||||
@behaviour Pleroma.Web.ActivityPub.MRF.Policy
|
||||
@policy :mrf_nsfw_api
|
||||
|
||||
def build_request_url(url) do
|
||||
Config.get([@policy, :url])
|
||||
|> URI.parse()
|
||||
|> fix_path()
|
||||
|> Map.put(:query, "url=#{url}")
|
||||
|> URI.to_string()
|
||||
end
|
||||
|
||||
def parse_url(url) do
|
||||
request = build_request_url(url)
|
||||
|
||||
with {:ok, %Tesla.Env{body: body}} <- HTTP.get(request) do
|
||||
Jason.decode(body)
|
||||
else
|
||||
error ->
|
||||
Logger.warn("""
|
||||
[NsfwApiPolicy]: The API server failed. Skipping.
|
||||
#{inspect(error)}
|
||||
""")
|
||||
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
def check_url_nsfw(url) when is_binary(url) do
|
||||
threshold = Config.get([@policy, :threshold])
|
||||
|
||||
case parse_url(url) do
|
||||
{:ok, %{"score" => score}} when score >= threshold ->
|
||||
{:nsfw, %{url: url, score: score, threshold: threshold}}
|
||||
|
||||
{:ok, %{"score" => score}} ->
|
||||
{:sfw, %{url: url, score: score, threshold: threshold}}
|
||||
|
||||
_ ->
|
||||
{:sfw, %{url: url, score: nil, threshold: threshold}}
|
||||
end
|
||||
end
|
||||
|
||||
def check_url_nsfw(%{"href" => url}) when is_binary(url) do
|
||||
check_url_nsfw(url)
|
||||
end
|
||||
|
||||
def check_url_nsfw(url) do
|
||||
threshold = Config.get([@policy, :threshold])
|
||||
{:sfw, %{url: url, score: nil, threshold: threshold}}
|
||||
end
|
||||
|
||||
def check_attachment_nsfw(%{"url" => urls} = attachment) when is_list(urls) do
|
||||
if Enum.all?(urls, &match?({:sfw, _}, check_url_nsfw(&1))) do
|
||||
{:sfw, attachment}
|
||||
else
|
||||
{:nsfw, attachment}
|
||||
end
|
||||
end
|
||||
|
||||
def check_attachment_nsfw(%{"url" => url} = attachment) when is_binary(url) do
|
||||
case check_url_nsfw(url) do
|
||||
{:sfw, _} -> {:sfw, attachment}
|
||||
{:nsfw, _} -> {:nsfw, attachment}
|
||||
end
|
||||
end
|
||||
|
||||
def check_attachment_nsfw(attachment), do: {:sfw, attachment}
|
||||
|
||||
def check_object_nsfw(%{"attachment" => attachments} = object) when is_list(attachments) do
|
||||
if Enum.all?(attachments, &match?({:sfw, _}, check_attachment_nsfw(&1))) do
|
||||
{:sfw, object}
|
||||
else
|
||||
{:nsfw, object}
|
||||
end
|
||||
end
|
||||
|
||||
def check_object_nsfw(%{"object" => %{} = child_object} = object) do
|
||||
case check_object_nsfw(child_object) do
|
||||
{:sfw, _} -> {:sfw, object}
|
||||
{:nsfw, _} -> {:nsfw, object}
|
||||
end
|
||||
end
|
||||
|
||||
def check_object_nsfw(object), do: {:sfw, object}
|
||||
|
||||
@impl true
|
||||
def filter(object) do
|
||||
with {:sfw, object} <- check_object_nsfw(object) do
|
||||
{:ok, object}
|
||||
else
|
||||
{:nsfw, _data} -> handle_nsfw(object)
|
||||
_ -> {:reject, "NSFW: Attachment rejected"}
|
||||
end
|
||||
end
|
||||
|
||||
defp handle_nsfw(object) do
|
||||
if Config.get([@policy, :reject]) do
|
||||
{:reject, object}
|
||||
else
|
||||
{:ok,
|
||||
object
|
||||
|> maybe_unlist()
|
||||
|> maybe_mark_sensitive()}
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_unlist(object) do
|
||||
if Config.get([@policy, :unlist]) do
|
||||
unlist(object)
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_mark_sensitive(object) do
|
||||
if Config.get([@policy, :mark_sensitive]) do
|
||||
mark_sensitive(object)
|
||||
else
|
||||
object
|
||||
end
|
||||
end
|
||||
|
||||
def unlist(%{"to" => to, "cc" => cc, "actor" => actor} = object) do
|
||||
with %User{} = user <- User.get_cached_by_ap_id(actor) do
|
||||
to =
|
||||
[user.follower_address | to]
|
||||
|> List.delete(Constants.as_public())
|
||||
|> Enum.uniq()
|
||||
|
||||
cc =
|
||||
[Constants.as_public() | cc]
|
||||
|> List.delete(user.follower_address)
|
||||
|> Enum.uniq()
|
||||
|
||||
object
|
||||
|> Map.put("to", to)
|
||||
|> Map.put("cc", cc)
|
||||
else
|
||||
_ -> raise "[NsfwApiPolicy]: Could not find user #{actor}"
|
||||
end
|
||||
end
|
||||
|
||||
def mark_sensitive(%{"object" => child_object} = object) when is_map(child_object) do
|
||||
Map.put(object, "object", mark_sensitive(child_object))
|
||||
end
|
||||
|
||||
def mark_sensitive(object) when is_map(object) do
|
||||
tags = (object["tag"] || []) ++ ["nsfw"]
|
||||
|
||||
object
|
||||
|> Map.put("tag", tags)
|
||||
|> Map.put("sensitive", true)
|
||||
end
|
||||
|
||||
# Hackney needs a trailing slash
|
||||
defp fix_path(%URI{path: path} = uri) when is_binary(path) do
|
||||
path = String.trim_trailing(path, "/") <> "/"
|
||||
Map.put(uri, :path, path)
|
||||
end
|
||||
|
||||
defp fix_path(%URI{path: nil} = uri), do: Map.put(uri, :path, "/")
|
||||
|
||||
@impl true
|
||||
def describe do
|
||||
options = %{
|
||||
threshold: Config.get([@policy, :threshold]),
|
||||
mark_sensitive: Config.get([@policy, :mark_sensitive]),
|
||||
unlist: Config.get([@policy, :unlist]),
|
||||
reject: Config.get([@policy, :reject])
|
||||
}
|
||||
|
||||
{:ok, %{@policy => options}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def config_description do
|
||||
%{
|
||||
key: @policy,
|
||||
related_policy: to_string(__MODULE__),
|
||||
label: "NSFW API Policy",
|
||||
description:
|
||||
"Hide, delete, or mark sensitive NSFW content with artificial intelligence. Requires running an external API server.",
|
||||
children: [
|
||||
%{
|
||||
key: :url,
|
||||
type: :string,
|
||||
description: "Base URL of the API server.",
|
||||
suggestions: ["http://127.0.0.1:5000/"]
|
||||
},
|
||||
%{
|
||||
key: :threshold,
|
||||
type: :float,
|
||||
description: "Lowest score to take action on. Between 0 and 1.",
|
||||
suggestions: [0.7]
|
||||
},
|
||||
%{
|
||||
key: :mark_sensitive,
|
||||
type: :boolean,
|
||||
description: "Mark sensitive all detected NSFW content?",
|
||||
suggestions: [true]
|
||||
},
|
||||
%{
|
||||
key: :unlist,
|
||||
type: :boolean,
|
||||
description: "Unlist sensitive all detected NSFW content?",
|
||||
suggestions: [false]
|
||||
},
|
||||
%{
|
||||
key: :reject,
|
||||
type: :boolean,
|
||||
description: "Reject sensitive all detected NSFW content (takes precedence)?",
|
||||
suggestions: [false]
|
||||
}
|
||||
]
|
||||
}
|
||||
end
|
||||
end
|
267
test/pleroma/web/activity_pub/mrf/nsfw_api_policy_test.exs
Normal file
267
test/pleroma/web/activity_pub/mrf/nsfw_api_policy_test.exs
Normal file
@ -0,0 +1,267 @@
|
||||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.MRF.NsfwApiPolicyTest do
|
||||
use Pleroma.DataCase
|
||||
|
||||
import ExUnit.CaptureLog
|
||||
import Pleroma.Factory
|
||||
|
||||
alias Pleroma.Constants
|
||||
alias Pleroma.Web.ActivityPub.MRF.NsfwApiPolicy
|
||||
|
||||
require Pleroma.Constants
|
||||
|
||||
@policy :mrf_nsfw_api
|
||||
|
||||
@sfw_url "https://kittens.co/kitty.gif"
|
||||
@nsfw_url "https://b00bies.com/nsfw.jpg"
|
||||
@timeout_url "http://time.out/i.jpg"
|
||||
|
||||
setup_all do
|
||||
clear_config(@policy,
|
||||
url: "http://127.0.0.1:5000/",
|
||||
threshold: 0.7,
|
||||
mark_sensitive: true,
|
||||
unlist: false,
|
||||
reject: false
|
||||
)
|
||||
end
|
||||
|
||||
setup do
|
||||
Tesla.Mock.mock(fn
|
||||
# NSFW URL
|
||||
%{method: :get, url: "http://127.0.0.1:5000/?url=#{@nsfw_url}"} ->
|
||||
%Tesla.Env{status: 200, body: ~s({"score":0.99772077798843384,"url":"#{@nsfw_url}"})}
|
||||
|
||||
# SFW URL
|
||||
%{method: :get, url: "http://127.0.0.1:5000/?url=#{@sfw_url}"} ->
|
||||
%Tesla.Env{status: 200, body: ~s({"score":0.00011714912398019806,"url":"#{@sfw_url}"})}
|
||||
|
||||
# Timeout URL
|
||||
%{method: :get, url: "http://127.0.0.1:5000/?url=#{@timeout_url}"} ->
|
||||
{:error, :timeout}
|
||||
|
||||
# Fallback URL
|
||||
%{method: :get, url: "http://127.0.0.1:5000/?url=" <> url} ->
|
||||
body =
|
||||
~s({"error_code":500,"error_reason":"[Errno -2] Name or service not known","url":"#{url}"})
|
||||
|
||||
%Tesla.Env{status: 500, body: body}
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
describe "build_request_url/1" do
|
||||
test "it works" do
|
||||
expected = "http://127.0.0.1:5000/?url=https://b00bies.com/nsfw.jpg"
|
||||
assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected
|
||||
end
|
||||
|
||||
test "it adds a trailing slash" do
|
||||
clear_config([@policy, :url], "http://localhost:5000")
|
||||
|
||||
expected = "http://localhost:5000/?url=https://b00bies.com/nsfw.jpg"
|
||||
assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected
|
||||
end
|
||||
|
||||
test "it adds a trailing slash preserving the path" do
|
||||
clear_config([@policy, :url], "http://localhost:5000/nsfw_api")
|
||||
|
||||
expected = "http://localhost:5000/nsfw_api/?url=https://b00bies.com/nsfw.jpg"
|
||||
assert NsfwApiPolicy.build_request_url(@nsfw_url) == expected
|
||||
end
|
||||
end
|
||||
|
||||
describe "parse_url/1" do
|
||||
test "returns decoded JSON from the API server" do
|
||||
expected = %{"score" => 0.99772077798843384, "url" => @nsfw_url}
|
||||
assert NsfwApiPolicy.parse_url(@nsfw_url) == {:ok, expected}
|
||||
end
|
||||
|
||||
test "warns when the API server fails" do
|
||||
expected = "[NsfwApiPolicy]: The API server failed. Skipping."
|
||||
assert capture_log(fn -> NsfwApiPolicy.parse_url(@timeout_url) end) =~ expected
|
||||
end
|
||||
|
||||
test "returns {:error, _} tuple when the API server fails" do
|
||||
capture_log(fn ->
|
||||
assert {:error, _} = NsfwApiPolicy.parse_url(@timeout_url)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
describe "check_url_nsfw/1" do
|
||||
test "returns {:nsfw, _} tuple" do
|
||||
expected = {:nsfw, %{url: @nsfw_url, score: 0.99772077798843384, threshold: 0.7}}
|
||||
assert NsfwApiPolicy.check_url_nsfw(@nsfw_url) == expected
|
||||
end
|
||||
|
||||
test "returns {:sfw, _} tuple" do
|
||||
expected = {:sfw, %{url: @sfw_url, score: 0.00011714912398019806, threshold: 0.7}}
|
||||
assert NsfwApiPolicy.check_url_nsfw(@sfw_url) == expected
|
||||
end
|
||||
|
||||
test "returns {:sfw, _} on failure" do
|
||||
expected = {:sfw, %{url: @timeout_url, score: nil, threshold: 0.7}}
|
||||
|
||||
capture_log(fn ->
|
||||
assert NsfwApiPolicy.check_url_nsfw(@timeout_url) == expected
|
||||
end)
|
||||
end
|
||||
|
||||
test "works with map URL" do
|
||||
expected = {:nsfw, %{url: @nsfw_url, score: 0.99772077798843384, threshold: 0.7}}
|
||||
assert NsfwApiPolicy.check_url_nsfw(%{"href" => @nsfw_url}) == expected
|
||||
end
|
||||
end
|
||||
|
||||
describe "check_attachment_nsfw/1" do
|
||||
test "returns {:nsfw, _} if any items are NSFW" do
|
||||
attachment = %{"url" => [%{"href" => @nsfw_url}, @nsfw_url, @sfw_url]}
|
||||
assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:nsfw, attachment}
|
||||
end
|
||||
|
||||
test "returns {:sfw, _} if all items are SFW" do
|
||||
attachment = %{"url" => [%{"href" => @sfw_url}, @sfw_url, @sfw_url]}
|
||||
assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:sfw, attachment}
|
||||
end
|
||||
|
||||
test "works with binary URL" do
|
||||
attachment = %{"url" => @nsfw_url}
|
||||
assert NsfwApiPolicy.check_attachment_nsfw(attachment) == {:nsfw, attachment}
|
||||
end
|
||||
end
|
||||
|
||||
describe "check_object_nsfw/1" do
|
||||
test "returns {:nsfw, _} if any items are NSFW" do
|
||||
object = %{"attachment" => [%{"url" => [%{"href" => @nsfw_url}, @sfw_url]}]}
|
||||
assert NsfwApiPolicy.check_object_nsfw(object) == {:nsfw, object}
|
||||
end
|
||||
|
||||
test "returns {:sfw, _} if all items are SFW" do
|
||||
object = %{"attachment" => [%{"url" => [%{"href" => @sfw_url}, @sfw_url]}]}
|
||||
assert NsfwApiPolicy.check_object_nsfw(object) == {:sfw, object}
|
||||
end
|
||||
|
||||
test "works with embedded object" do
|
||||
object = %{"object" => %{"attachment" => [%{"url" => [%{"href" => @nsfw_url}, @sfw_url]}]}}
|
||||
assert NsfwApiPolicy.check_object_nsfw(object) == {:nsfw, object}
|
||||
end
|
||||
end
|
||||
|
||||
describe "unlist/1" do
|
||||
test "unlist addressing" do
|
||||
user = insert(:user)
|
||||
|
||||
object = %{
|
||||
"to" => [Constants.as_public()],
|
||||
"cc" => [user.follower_address, "https://hello.world/users/alex"],
|
||||
"actor" => user.ap_id
|
||||
}
|
||||
|
||||
expected = %{
|
||||
"to" => [user.follower_address],
|
||||
"cc" => [Constants.as_public(), "https://hello.world/users/alex"],
|
||||
"actor" => user.ap_id
|
||||
}
|
||||
|
||||
assert NsfwApiPolicy.unlist(object) == expected
|
||||
end
|
||||
|
||||
test "raise if user isn't found" do
|
||||
object = %{
|
||||
"to" => [Constants.as_public()],
|
||||
"cc" => [],
|
||||
"actor" => "https://hello.world/users/alex"
|
||||
}
|
||||
|
||||
assert_raise(RuntimeError, fn ->
|
||||
NsfwApiPolicy.unlist(object)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
describe "mark_sensitive/1" do
|
||||
test "adds nsfw tag and marks sensitive" do
|
||||
object = %{"tag" => ["yolo"]}
|
||||
expected = %{"tag" => ["yolo", "nsfw"], "sensitive" => true}
|
||||
assert NsfwApiPolicy.mark_sensitive(object) == expected
|
||||
end
|
||||
|
||||
test "works with embedded object" do
|
||||
object = %{"object" => %{"tag" => ["yolo"]}}
|
||||
expected = %{"object" => %{"tag" => ["yolo", "nsfw"], "sensitive" => true}}
|
||||
assert NsfwApiPolicy.mark_sensitive(object) == expected
|
||||
end
|
||||
end
|
||||
|
||||
describe "filter/1" do
|
||||
setup do
|
||||
user = insert(:user)
|
||||
|
||||
nsfw_object = %{
|
||||
"to" => [Constants.as_public()],
|
||||
"cc" => [user.follower_address],
|
||||
"actor" => user.ap_id,
|
||||
"attachment" => [%{"url" => @nsfw_url}]
|
||||
}
|
||||
|
||||
sfw_object = %{
|
||||
"to" => [Constants.as_public()],
|
||||
"cc" => [user.follower_address],
|
||||
"actor" => user.ap_id,
|
||||
"attachment" => [%{"url" => @sfw_url}]
|
||||
}
|
||||
|
||||
%{user: user, nsfw_object: nsfw_object, sfw_object: sfw_object}
|
||||
end
|
||||
|
||||
test "passes SFW object through", %{sfw_object: object} do
|
||||
{:ok, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "passes NSFW object through when actions are disabled", %{nsfw_object: object} do
|
||||
clear_config([@policy, :mark_sensitive], false)
|
||||
clear_config([@policy, :unlist], false)
|
||||
clear_config([@policy, :reject], false)
|
||||
{:ok, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "passes NSFW object through when :threshold is 1", %{nsfw_object: object} do
|
||||
clear_config([@policy, :reject], true)
|
||||
clear_config([@policy, :threshold], 1)
|
||||
{:ok, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "rejects SFW object through when :threshold is 0", %{sfw_object: object} do
|
||||
clear_config([@policy, :reject], true)
|
||||
clear_config([@policy, :threshold], 0)
|
||||
{:reject, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "rejects NSFW when :reject is enabled", %{nsfw_object: object} do
|
||||
clear_config([@policy, :reject], true)
|
||||
{:reject, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "passes NSFW through when :reject is disabled", %{nsfw_object: object} do
|
||||
clear_config([@policy, :reject], false)
|
||||
{:ok, _} = NsfwApiPolicy.filter(object)
|
||||
end
|
||||
|
||||
test "unlists NSFW when :unlist is enabled", %{user: user, nsfw_object: object} do
|
||||
clear_config([@policy, :unlist], true)
|
||||
{:ok, object} = NsfwApiPolicy.filter(object)
|
||||
assert object["to"] == [user.follower_address]
|
||||
end
|
||||
|
||||
test "passes NSFW through when :unlist is disabled", %{nsfw_object: object} do
|
||||
clear_config([@policy, :unlist], false)
|
||||
{:ok, object} = NsfwApiPolicy.filter(object)
|
||||
assert object["to"] == [Constants.as_public()]
|
||||
end
|
||||
end
|
||||
end
|
Loading…
Reference in New Issue
Block a user