2018-12-23 12:11:29 -08:00
|
|
|
# Pleroma: A lightweight social networking server
|
2023-01-02 12:38:50 -08:00
|
|
|
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 12:11:29 -08:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-04-13 06:49:42 -07:00
|
|
|
defmodule Pleroma.ObjectTest do
|
|
|
|
use Pleroma.DataCase
|
2020-01-19 08:45:20 -08:00
|
|
|
use Oban.Testing, repo: Pleroma.Repo
|
2020-12-22 11:04:33 -08:00
|
|
|
|
2019-09-18 08:31:24 -07:00
|
|
|
import ExUnit.CaptureLog
|
2023-12-10 22:37:39 -08:00
|
|
|
import Mox
|
2017-04-13 06:49:42 -07:00
|
|
|
import Pleroma.Factory
|
2019-04-17 04:52:01 -07:00
|
|
|
import Tesla.Mock
|
2020-12-22 11:04:33 -08:00
|
|
|
|
2019-09-18 09:53:51 -07:00
|
|
|
alias Pleroma.Activity
|
2020-12-22 11:04:33 -08:00
|
|
|
alias Pleroma.Hashtag
|
2019-02-10 13:57:38 -08:00
|
|
|
alias Pleroma.Object
|
2019-03-04 18:52:23 -08:00
|
|
|
alias Pleroma.Repo
|
2020-01-19 08:45:20 -08:00
|
|
|
alias Pleroma.Tests.ObanHelpers
|
2023-12-10 22:37:39 -08:00
|
|
|
alias Pleroma.UnstubbedConfigMock, as: ConfigMock
|
2019-09-18 09:53:51 -07:00
|
|
|
alias Pleroma.Web.CommonAPI
|
2017-04-13 06:49:42 -07:00
|
|
|
|
2019-04-17 04:52:01 -07:00
|
|
|
setup do
|
|
|
|
mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
2023-12-10 22:37:39 -08:00
|
|
|
ConfigMock |> stub_with(Pleroma.Test.StaticConfig)
|
2019-04-17 04:52:01 -07:00
|
|
|
:ok
|
|
|
|
end
|
|
|
|
|
2017-04-13 06:49:42 -07:00
|
|
|
test "returns an object by it's AP id" do
|
|
|
|
object = insert(:note)
|
2017-05-09 09:11:51 -07:00
|
|
|
found_object = Object.get_by_ap_id(object.data["id"])
|
2017-04-13 06:49:42 -07:00
|
|
|
|
|
|
|
assert object == found_object
|
|
|
|
end
|
2017-05-09 09:11:51 -07:00
|
|
|
|
|
|
|
describe "generic changeset" do
|
|
|
|
test "it ensures uniqueness of the id" do
|
|
|
|
object = insert(:note)
|
|
|
|
cs = Object.change(%Object{}, %{data: %{id: object.data["id"]}})
|
|
|
|
assert cs.valid?
|
|
|
|
|
2018-02-12 01:13:54 -08:00
|
|
|
{:error, _result} = Repo.insert(cs)
|
2017-05-09 09:11:51 -07:00
|
|
|
end
|
|
|
|
end
|
2018-11-01 00:37:07 -07:00
|
|
|
|
|
|
|
describe "deletion function" do
|
|
|
|
test "deletes an object" do
|
|
|
|
object = insert(:note)
|
|
|
|
found_object = Object.get_by_ap_id(object.data["id"])
|
|
|
|
|
|
|
|
assert object == found_object
|
|
|
|
|
|
|
|
Object.delete(found_object)
|
|
|
|
|
|
|
|
found_object = Object.get_by_ap_id(object.data["id"])
|
|
|
|
|
|
|
|
refute object == found_object
|
2018-12-24 16:00:06 -08:00
|
|
|
|
|
|
|
assert found_object.data["type"] == "Tombstone"
|
2018-11-01 00:37:07 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
test "ensures cache is cleared for the object" do
|
|
|
|
object = insert(:note)
|
|
|
|
cached_object = Object.get_cached_by_ap_id(object.data["id"])
|
|
|
|
|
|
|
|
assert object == cached_object
|
|
|
|
|
2019-09-09 11:53:08 -07:00
|
|
|
Cachex.put(:web_resp_cache, URI.parse(object.data["id"]).path, "cofe")
|
|
|
|
|
2018-11-01 00:37:07 -07:00
|
|
|
Object.delete(cached_object)
|
|
|
|
|
2018-11-01 01:30:10 -07:00
|
|
|
{:ok, nil} = Cachex.get(:object_cache, "object:#{object.data["id"]}")
|
2019-09-09 11:53:08 -07:00
|
|
|
{:ok, nil} = Cachex.get(:web_resp_cache, URI.parse(object.data["id"]).path)
|
2018-11-01 00:37:07 -07:00
|
|
|
|
|
|
|
cached_object = Object.get_cached_by_ap_id(object.data["id"])
|
|
|
|
|
|
|
|
refute object == cached_object
|
2018-12-24 16:00:06 -08:00
|
|
|
|
|
|
|
assert cached_object.data["type"] == "Tombstone"
|
2018-11-01 00:37:07 -07:00
|
|
|
end
|
|
|
|
end
|
2018-12-03 21:00:11 -08:00
|
|
|
|
2020-01-12 10:48:58 -08:00
|
|
|
describe "delete attachments" do
|
2020-03-20 08:33:00 -07:00
|
|
|
setup do: clear_config([Pleroma.Upload])
|
|
|
|
setup do: clear_config([:instance, :cleanup_attachments])
|
2020-01-12 10:48:58 -08:00
|
|
|
|
2020-01-30 14:20:37 -08:00
|
|
|
test "Disabled via config" do
|
2021-01-16 12:05:31 -08:00
|
|
|
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
|
|
|
|
clear_config([:instance, :cleanup_attachments], false)
|
2020-01-30 14:20:37 -08:00
|
|
|
|
|
|
|
file = %Plug.Upload{
|
2020-10-13 08:37:24 -07:00
|
|
|
content_type: "image/jpeg",
|
2020-01-30 14:20:37 -08:00
|
|
|
path: Path.absname("test/fixtures/image.jpg"),
|
|
|
|
filename: "an_image.jpg"
|
|
|
|
}
|
|
|
|
|
|
|
|
user = insert(:user)
|
|
|
|
|
|
|
|
{:ok, %Object{} = attachment} =
|
|
|
|
Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id)
|
|
|
|
|
|
|
|
%{data: %{"attachment" => [%{"url" => [%{"href" => href}]}]}} =
|
|
|
|
note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
|
|
|
|
|
|
|
|
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
|
|
|
|
|
|
|
|
path = href |> Path.dirname() |> Path.basename()
|
|
|
|
|
|
|
|
assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
|
|
|
|
|
|
|
|
Object.delete(note)
|
|
|
|
|
|
|
|
ObanHelpers.perform(all_enqueued(worker: Pleroma.Workers.AttachmentsCleanupWorker))
|
|
|
|
|
|
|
|
assert Object.get_by_id(note.id).data["deleted"]
|
|
|
|
refute Object.get_by_id(attachment.id) == nil
|
|
|
|
|
|
|
|
assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
|
|
|
|
end
|
|
|
|
|
2020-01-12 10:48:58 -08:00
|
|
|
test "in subdirectories" do
|
2021-01-16 12:05:31 -08:00
|
|
|
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
|
|
|
|
clear_config([:instance, :cleanup_attachments], true)
|
2020-01-12 10:48:58 -08:00
|
|
|
|
|
|
|
file = %Plug.Upload{
|
2020-10-13 08:37:24 -07:00
|
|
|
content_type: "image/jpeg",
|
2020-01-12 10:48:58 -08:00
|
|
|
path: Path.absname("test/fixtures/image.jpg"),
|
|
|
|
filename: "an_image.jpg"
|
|
|
|
}
|
|
|
|
|
|
|
|
user = insert(:user)
|
|
|
|
|
|
|
|
{:ok, %Object{} = attachment} =
|
|
|
|
Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id)
|
|
|
|
|
|
|
|
%{data: %{"attachment" => [%{"url" => [%{"href" => href}]}]}} =
|
|
|
|
note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
|
|
|
|
|
|
|
|
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
|
|
|
|
|
|
|
|
path = href |> Path.dirname() |> Path.basename()
|
|
|
|
|
|
|
|
assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
|
|
|
|
|
|
|
|
Object.delete(note)
|
|
|
|
|
2020-01-19 08:45:20 -08:00
|
|
|
ObanHelpers.perform(all_enqueued(worker: Pleroma.Workers.AttachmentsCleanupWorker))
|
|
|
|
|
2020-01-30 14:20:37 -08:00
|
|
|
assert Object.get_by_id(note.id).data["deleted"]
|
2020-01-12 10:48:58 -08:00
|
|
|
assert Object.get_by_id(attachment.id) == nil
|
|
|
|
|
|
|
|
assert {:ok, []} == File.ls("#{uploads_dir}/#{path}")
|
|
|
|
end
|
|
|
|
|
|
|
|
test "with dedupe enabled" do
|
2021-01-16 12:05:31 -08:00
|
|
|
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
|
|
|
|
clear_config([Pleroma.Upload, :filters], [Pleroma.Upload.Filter.Dedupe])
|
|
|
|
clear_config([:instance, :cleanup_attachments], true)
|
2020-01-12 10:48:58 -08:00
|
|
|
|
|
|
|
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
|
|
|
|
|
|
|
|
File.mkdir_p!(uploads_dir)
|
|
|
|
|
|
|
|
file = %Plug.Upload{
|
2020-10-13 08:37:24 -07:00
|
|
|
content_type: "image/jpeg",
|
2020-01-12 10:48:58 -08:00
|
|
|
path: Path.absname("test/fixtures/image.jpg"),
|
|
|
|
filename: "an_image.jpg"
|
|
|
|
}
|
|
|
|
|
|
|
|
user = insert(:user)
|
|
|
|
|
|
|
|
{:ok, %Object{} = attachment} =
|
|
|
|
Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id)
|
|
|
|
|
|
|
|
%{data: %{"attachment" => [%{"url" => [%{"href" => href}]}]}} =
|
|
|
|
note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
|
|
|
|
|
|
|
|
filename = Path.basename(href)
|
|
|
|
|
|
|
|
assert {:ok, files} = File.ls(uploads_dir)
|
|
|
|
assert filename in files
|
|
|
|
|
|
|
|
Object.delete(note)
|
|
|
|
|
2020-01-19 08:45:20 -08:00
|
|
|
ObanHelpers.perform(all_enqueued(worker: Pleroma.Workers.AttachmentsCleanupWorker))
|
|
|
|
|
2020-01-30 14:20:37 -08:00
|
|
|
assert Object.get_by_id(note.id).data["deleted"]
|
2020-01-12 10:48:58 -08:00
|
|
|
assert Object.get_by_id(attachment.id) == nil
|
|
|
|
assert {:ok, files} = File.ls(uploads_dir)
|
|
|
|
refute filename in files
|
|
|
|
end
|
2020-01-19 11:04:14 -08:00
|
|
|
|
|
|
|
test "with objects that have legacy data.url attribute" do
|
2021-01-16 12:05:31 -08:00
|
|
|
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
|
|
|
|
clear_config([:instance, :cleanup_attachments], true)
|
2020-01-19 11:04:14 -08:00
|
|
|
|
|
|
|
file = %Plug.Upload{
|
2020-10-13 08:37:24 -07:00
|
|
|
content_type: "image/jpeg",
|
2020-01-19 11:04:14 -08:00
|
|
|
path: Path.absname("test/fixtures/image.jpg"),
|
|
|
|
filename: "an_image.jpg"
|
|
|
|
}
|
|
|
|
|
|
|
|
user = insert(:user)
|
|
|
|
|
|
|
|
{:ok, %Object{} = attachment} =
|
|
|
|
Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id)
|
|
|
|
|
|
|
|
{:ok, %Object{}} = Object.create(%{url: "https://google.com", actor: user.ap_id})
|
|
|
|
|
|
|
|
%{data: %{"attachment" => [%{"url" => [%{"href" => href}]}]}} =
|
|
|
|
note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
|
2020-01-28 06:22:24 -08:00
|
|
|
|
|
|
|
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
|
|
|
|
|
|
|
|
path = href |> Path.dirname() |> Path.basename()
|
|
|
|
|
|
|
|
assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
|
|
|
|
|
|
|
|
Object.delete(note)
|
|
|
|
|
|
|
|
ObanHelpers.perform(all_enqueued(worker: Pleroma.Workers.AttachmentsCleanupWorker))
|
|
|
|
|
2020-01-30 14:20:37 -08:00
|
|
|
assert Object.get_by_id(note.id).data["deleted"]
|
2020-01-28 06:22:24 -08:00
|
|
|
assert Object.get_by_id(attachment.id) == nil
|
|
|
|
|
|
|
|
assert {:ok, []} == File.ls("#{uploads_dir}/#{path}")
|
|
|
|
end
|
|
|
|
|
|
|
|
test "With custom base_url" do
|
2021-01-16 12:05:31 -08:00
|
|
|
clear_config([Pleroma.Upload, :uploader], Pleroma.Uploaders.Local)
|
|
|
|
clear_config([Pleroma.Upload, :base_url], "https://sub.domain.tld/dir/")
|
|
|
|
clear_config([:instance, :cleanup_attachments], true)
|
2020-01-28 06:22:24 -08:00
|
|
|
|
|
|
|
file = %Plug.Upload{
|
2020-10-13 08:37:24 -07:00
|
|
|
content_type: "image/jpeg",
|
2020-01-28 06:22:24 -08:00
|
|
|
path: Path.absname("test/fixtures/image.jpg"),
|
|
|
|
filename: "an_image.jpg"
|
|
|
|
}
|
|
|
|
|
|
|
|
user = insert(:user)
|
|
|
|
|
|
|
|
{:ok, %Object{} = attachment} =
|
|
|
|
Pleroma.Web.ActivityPub.ActivityPub.upload(file, actor: user.ap_id)
|
|
|
|
|
|
|
|
%{data: %{"attachment" => [%{"url" => [%{"href" => href}]}]}} =
|
|
|
|
note = insert(:note, %{user: user, data: %{"attachment" => [attachment.data]}})
|
2020-01-19 11:04:14 -08:00
|
|
|
|
|
|
|
uploads_dir = Pleroma.Config.get!([Pleroma.Uploaders.Local, :uploads])
|
|
|
|
|
|
|
|
path = href |> Path.dirname() |> Path.basename()
|
|
|
|
|
|
|
|
assert {:ok, ["an_image.jpg"]} == File.ls("#{uploads_dir}/#{path}")
|
|
|
|
|
|
|
|
Object.delete(note)
|
|
|
|
|
|
|
|
ObanHelpers.perform(all_enqueued(worker: Pleroma.Workers.AttachmentsCleanupWorker))
|
|
|
|
|
2020-01-30 14:20:37 -08:00
|
|
|
assert Object.get_by_id(note.id).data["deleted"]
|
2020-01-19 11:04:14 -08:00
|
|
|
assert Object.get_by_id(attachment.id) == nil
|
|
|
|
|
|
|
|
assert {:ok, []} == File.ls("#{uploads_dir}/#{path}")
|
|
|
|
end
|
2020-01-12 10:48:58 -08:00
|
|
|
end
|
|
|
|
|
2018-12-03 21:00:11 -08:00
|
|
|
describe "normalizer" do
|
2021-01-04 04:38:31 -08:00
|
|
|
@url "http://mastodon.example.org/@admin/99541947525187367"
|
|
|
|
test "does not fetch unknown objects by default" do
|
|
|
|
assert nil == Object.normalize(@url)
|
2018-12-03 21:00:11 -08:00
|
|
|
end
|
|
|
|
|
2021-01-04 04:38:31 -08:00
|
|
|
test "fetches unknown objects when fetch is explicitly true" do
|
|
|
|
%Object{} = object = Object.normalize(@url, fetch: true)
|
2018-12-03 21:00:11 -08:00
|
|
|
|
2021-01-04 04:38:31 -08:00
|
|
|
assert object.data["url"] == @url
|
2018-12-03 21:00:11 -08:00
|
|
|
end
|
|
|
|
|
2021-01-04 04:38:31 -08:00
|
|
|
test "does not fetch unknown objects when fetch is false" do
|
2018-12-03 21:01:21 -08:00
|
|
|
assert is_nil(
|
2021-01-04 04:38:31 -08:00
|
|
|
Object.normalize(@url,
|
|
|
|
fetch: false
|
|
|
|
)
|
2018-12-03 21:01:21 -08:00
|
|
|
)
|
2018-12-03 21:00:11 -08:00
|
|
|
end
|
|
|
|
end
|
2019-09-18 08:13:21 -07:00
|
|
|
|
|
|
|
describe "get_by_id_and_maybe_refetch" do
|
2019-09-18 09:53:51 -07:00
|
|
|
setup do
|
2019-09-18 08:13:21 -07:00
|
|
|
mock(fn
|
|
|
|
%{method: :get, url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"} ->
|
2020-10-28 08:08:23 -07:00
|
|
|
%Tesla.Env{
|
|
|
|
status: 200,
|
|
|
|
body: File.read!("test/fixtures/tesla_mock/poll_original.json"),
|
|
|
|
headers: HttpRequestMock.activitypub_object_headers()
|
|
|
|
}
|
2019-09-18 08:13:21 -07:00
|
|
|
|
|
|
|
env ->
|
|
|
|
apply(HttpRequestMock, :request, [env])
|
|
|
|
end)
|
|
|
|
|
2019-09-18 09:53:51 -07:00
|
|
|
mock_modified = fn resp ->
|
|
|
|
mock(fn
|
|
|
|
%{method: :get, url: "https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"} ->
|
|
|
|
resp
|
|
|
|
|
|
|
|
env ->
|
|
|
|
apply(HttpRequestMock, :request, [env])
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
on_exit(fn -> mock(fn env -> apply(HttpRequestMock, :request, [env]) end) end)
|
|
|
|
|
|
|
|
[mock_modified: mock_modified]
|
|
|
|
end
|
|
|
|
|
|
|
|
test "refetches if the time since the last refetch is greater than the interval", %{
|
|
|
|
mock_modified: mock_modified
|
|
|
|
} do
|
2019-09-18 08:13:21 -07:00
|
|
|
%Object{} =
|
2021-01-04 04:38:31 -08:00
|
|
|
object =
|
|
|
|
Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d",
|
|
|
|
fetch: true
|
|
|
|
)
|
2019-09-18 08:13:21 -07:00
|
|
|
|
2019-11-06 03:00:03 -08:00
|
|
|
Object.set_cache(object)
|
|
|
|
|
2019-09-18 08:13:21 -07:00
|
|
|
assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4
|
|
|
|
assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0
|
|
|
|
|
2019-09-18 09:53:51 -07:00
|
|
|
mock_modified.(%Tesla.Env{
|
|
|
|
status: 200,
|
2020-10-28 08:08:23 -07:00
|
|
|
body: File.read!("test/fixtures/tesla_mock/poll_modified.json"),
|
|
|
|
headers: HttpRequestMock.activitypub_object_headers()
|
2019-09-18 09:53:51 -07:00
|
|
|
})
|
2019-09-18 08:13:21 -07:00
|
|
|
|
|
|
|
updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: -1)
|
2019-11-06 03:00:03 -08:00
|
|
|
object_in_cache = Object.get_cached_by_ap_id(object.data["id"])
|
|
|
|
assert updated_object == object_in_cache
|
2019-09-18 08:13:21 -07:00
|
|
|
assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 8
|
|
|
|
assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 3
|
|
|
|
end
|
|
|
|
|
2019-09-18 09:53:51 -07:00
|
|
|
test "returns the old object if refetch fails", %{mock_modified: mock_modified} do
|
2019-09-18 08:13:21 -07:00
|
|
|
%Object{} =
|
2021-01-04 04:38:31 -08:00
|
|
|
object =
|
|
|
|
Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d",
|
|
|
|
fetch: true
|
|
|
|
)
|
2019-09-18 08:13:21 -07:00
|
|
|
|
2019-11-06 03:00:03 -08:00
|
|
|
Object.set_cache(object)
|
|
|
|
|
2019-09-18 08:13:21 -07:00
|
|
|
assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4
|
|
|
|
assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0
|
|
|
|
|
2019-09-18 08:31:24 -07:00
|
|
|
assert capture_log(fn ->
|
2019-09-18 09:53:51 -07:00
|
|
|
mock_modified.(%Tesla.Env{status: 404, body: ""})
|
2019-09-18 08:31:24 -07:00
|
|
|
|
|
|
|
updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: -1)
|
2019-11-06 03:00:03 -08:00
|
|
|
object_in_cache = Object.get_cached_by_ap_id(object.data["id"])
|
|
|
|
assert updated_object == object_in_cache
|
2019-09-18 08:31:24 -07:00
|
|
|
assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 4
|
|
|
|
assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 0
|
|
|
|
end) =~
|
|
|
|
"[error] Couldn't refresh https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d"
|
2019-09-18 08:13:21 -07:00
|
|
|
end
|
|
|
|
|
2019-09-18 09:53:51 -07:00
|
|
|
test "does not refetch if the time since the last refetch is greater than the interval", %{
|
|
|
|
mock_modified: mock_modified
|
|
|
|
} do
|
2019-09-18 08:13:21 -07:00
|
|
|
%Object{} =
|
2021-01-04 04:38:31 -08:00
|
|
|
object =
|
|
|
|
Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d",
|
|
|
|
fetch: true
|
|
|
|
)
|
2019-09-18 08:13:21 -07:00
|
|
|
|
2019-11-06 03:00:03 -08:00
|
|
|
Object.set_cache(object)
|
|
|
|
|
2019-09-18 08:13:21 -07:00
|
|
|
assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4
|
|
|
|
assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0
|
|
|
|
|
2019-09-18 09:53:51 -07:00
|
|
|
mock_modified.(%Tesla.Env{
|
|
|
|
status: 200,
|
2020-10-28 08:08:23 -07:00
|
|
|
body: File.read!("test/fixtures/tesla_mock/poll_modified.json"),
|
|
|
|
headers: HttpRequestMock.activitypub_object_headers()
|
2019-09-18 09:53:51 -07:00
|
|
|
})
|
2019-09-18 08:13:21 -07:00
|
|
|
|
|
|
|
updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: 100)
|
2019-11-06 03:00:03 -08:00
|
|
|
object_in_cache = Object.get_cached_by_ap_id(object.data["id"])
|
|
|
|
assert updated_object == object_in_cache
|
2019-09-18 08:13:21 -07:00
|
|
|
assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 4
|
|
|
|
assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 0
|
|
|
|
end
|
2019-09-18 09:53:51 -07:00
|
|
|
|
|
|
|
test "preserves internal fields on refetch", %{mock_modified: mock_modified} do
|
|
|
|
%Object{} =
|
2021-01-04 04:38:31 -08:00
|
|
|
object =
|
|
|
|
Object.normalize("https://patch.cx/objects/9a172665-2bc5-452d-8428-2361d4c33b1d",
|
|
|
|
fetch: true
|
|
|
|
)
|
2019-09-18 09:53:51 -07:00
|
|
|
|
2019-11-06 03:00:03 -08:00
|
|
|
Object.set_cache(object)
|
|
|
|
|
2019-09-18 09:53:51 -07:00
|
|
|
assert Enum.at(object.data["oneOf"], 0)["replies"]["totalItems"] == 4
|
|
|
|
assert Enum.at(object.data["oneOf"], 1)["replies"]["totalItems"] == 0
|
|
|
|
|
|
|
|
user = insert(:user)
|
|
|
|
activity = Activity.get_create_by_object_ap_id(object.data["id"])
|
2019-10-16 07:16:39 -07:00
|
|
|
{:ok, activity} = CommonAPI.favorite(user, activity.id)
|
|
|
|
object = Object.get_by_ap_id(activity.data["object"])
|
2019-09-18 09:53:51 -07:00
|
|
|
|
|
|
|
assert object.data["like_count"] == 1
|
|
|
|
|
|
|
|
mock_modified.(%Tesla.Env{
|
|
|
|
status: 200,
|
2020-10-28 08:08:23 -07:00
|
|
|
body: File.read!("test/fixtures/tesla_mock/poll_modified.json"),
|
|
|
|
headers: HttpRequestMock.activitypub_object_headers()
|
2019-09-18 09:53:51 -07:00
|
|
|
})
|
|
|
|
|
|
|
|
updated_object = Object.get_by_id_and_maybe_refetch(object.id, interval: -1)
|
2019-11-06 03:00:03 -08:00
|
|
|
object_in_cache = Object.get_cached_by_ap_id(object.data["id"])
|
|
|
|
assert updated_object == object_in_cache
|
2019-09-18 09:53:51 -07:00
|
|
|
assert Enum.at(updated_object.data["oneOf"], 0)["replies"]["totalItems"] == 8
|
|
|
|
assert Enum.at(updated_object.data["oneOf"], 1)["replies"]["totalItems"] == 3
|
|
|
|
|
|
|
|
assert updated_object.data["like_count"] == 1
|
|
|
|
end
|
2019-09-18 08:13:21 -07:00
|
|
|
end
|
2020-12-22 11:04:33 -08:00
|
|
|
|
|
|
|
describe ":hashtags association" do
|
|
|
|
test "Hashtag records are created with Object record and updated on its change" do
|
|
|
|
user = insert(:user)
|
|
|
|
|
|
|
|
{:ok, %{object: object}} =
|
|
|
|
CommonAPI.post(user, %{status: "some text #hashtag1 #hashtag2 ..."})
|
|
|
|
|
|
|
|
assert [%Hashtag{name: "hashtag1"}, %Hashtag{name: "hashtag2"}] =
|
|
|
|
Enum.sort_by(object.hashtags, & &1.name)
|
|
|
|
|
|
|
|
{:ok, object} = Object.update_data(object, %{"tag" => []})
|
|
|
|
|
|
|
|
assert [] = object.hashtags
|
|
|
|
|
|
|
|
object = Object.get_by_id(object.id) |> Repo.preload(:hashtags)
|
|
|
|
assert [] = object.hashtags
|
|
|
|
|
|
|
|
{:ok, object} = Object.update_data(object, %{"tag" => ["abc", "def"]})
|
|
|
|
|
|
|
|
assert [%Hashtag{name: "abc"}, %Hashtag{name: "def"}] =
|
|
|
|
Enum.sort_by(object.hashtags, & &1.name)
|
|
|
|
end
|
|
|
|
end
|
2023-03-26 12:12:40 -07:00
|
|
|
|
|
|
|
describe "get_emoji_reactions/1" do
|
|
|
|
test "3-tuple current format" do
|
|
|
|
object = %Object{
|
|
|
|
data: %{
|
|
|
|
"reactions" => [
|
|
|
|
["x", ["https://some/user"], "https://some/emoji"]
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
assert Object.get_emoji_reactions(object) == object.data["reactions"]
|
|
|
|
end
|
|
|
|
|
|
|
|
test "2-tuple legacy format" do
|
|
|
|
object = %Object{
|
|
|
|
data: %{
|
|
|
|
"reactions" => [
|
|
|
|
["x", ["https://some/user"]]
|
|
|
|
]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
assert Object.get_emoji_reactions(object) == [["x", ["https://some/user"], nil]]
|
|
|
|
end
|
|
|
|
|
|
|
|
test "Map format" do
|
|
|
|
object = %Object{
|
|
|
|
data: %{
|
|
|
|
"reactions" => %{
|
|
|
|
"x" => ["https://some/user"]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
assert Object.get_emoji_reactions(object) == [["x", ["https://some/user"], nil]]
|
|
|
|
end
|
|
|
|
end
|
2017-04-13 06:49:42 -07:00
|
|
|
end
|