Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into seanking/pleroma-fix_install_fe_bug
This commit is contained in:
commit
f26b580e80
11
CHANGELOG.md
11
CHANGELOG.md
@ -3,6 +3,17 @@ All notable changes to this project will be documented in this file.
|
|||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
|
## unreleased-patch - ???
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Rich media failure tracking (along with `:failure_backoff` option)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Mastodon API: Search parameter `following` now correctly returns the followings rather than the followers
|
||||||
|
- Mastodon API: Timelines hanging for (`number of posts with links * rich media timeout`) in the worst case.
|
||||||
|
Reduced to just rich media timeout.
|
||||||
|
- Password resets no longer processed for deactivated accounts
|
||||||
|
|
||||||
## [2.1.0] - 2020-08-28
|
## [2.1.0] - 2020-08-28
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
@ -412,6 +412,7 @@ config :pleroma, :rich_media,
|
|||||||
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
||||||
Pleroma.Web.RichMedia.Parsers.OEmbed
|
Pleroma.Web.RichMedia.Parsers.OEmbed
|
||||||
],
|
],
|
||||||
|
failure_backoff: 60_000,
|
||||||
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
|
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
|
||||||
|
|
||||||
config :pleroma, :media_proxy,
|
config :pleroma, :media_proxy,
|
||||||
@ -740,19 +741,23 @@ config :pleroma, :connections_pool,
|
|||||||
config :pleroma, :pools,
|
config :pleroma, :pools,
|
||||||
federation: [
|
federation: [
|
||||||
size: 50,
|
size: 50,
|
||||||
max_waiting: 10
|
max_waiting: 10,
|
||||||
|
timeout: 10_000
|
||||||
],
|
],
|
||||||
media: [
|
media: [
|
||||||
size: 50,
|
size: 50,
|
||||||
max_waiting: 10
|
max_waiting: 10,
|
||||||
|
timeout: 10_000
|
||||||
],
|
],
|
||||||
upload: [
|
upload: [
|
||||||
size: 25,
|
size: 25,
|
||||||
max_waiting: 5
|
max_waiting: 5,
|
||||||
|
timeout: 15_000
|
||||||
],
|
],
|
||||||
default: [
|
default: [
|
||||||
size: 10,
|
size: 10,
|
||||||
max_waiting: 2
|
max_waiting: 2,
|
||||||
|
timeout: 5_000
|
||||||
]
|
]
|
||||||
|
|
||||||
config :pleroma, :hackney_pools,
|
config :pleroma, :hackney_pools,
|
||||||
|
@ -2385,6 +2385,13 @@ config :pleroma, :config_description, [
|
|||||||
suggestions: [
|
suggestions: [
|
||||||
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl
|
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :failure_backoff,
|
||||||
|
type: :integer,
|
||||||
|
description:
|
||||||
|
"Amount of milliseconds after request failure, during which the request will not be retried.",
|
||||||
|
suggestions: [60_000]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -361,6 +361,7 @@ config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
|
|||||||
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
|
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
|
||||||
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"].
|
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"].
|
||||||
* `parsers`: list of Rich Media parsers.
|
* `parsers`: list of Rich Media parsers.
|
||||||
|
* `failure_backoff`: Amount of milliseconds after request failure, during which the request will not be retried.
|
||||||
|
|
||||||
## HTTP server
|
## HTTP server
|
||||||
|
|
||||||
|
@ -124,7 +124,9 @@ defmodule Mix.Tasks.Pleroma.Frontend do
|
|||||||
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
|
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
|
||||||
|
|
||||||
with {:ok, %{status: 200, body: zip_body}} <-
|
with {:ok, %{status: 200, body: zip_body}} <-
|
||||||
Pleroma.HTTP.get(url, [], timeout: 120_000, recv_timeout: 120_000) do
|
Pleroma.HTTP.get(url, [],
|
||||||
|
adapter: [pool: :media, timeout: 120_000, recv_timeout: 120_000]
|
||||||
|
) do
|
||||||
unzip(zip_body, dest)
|
unzip(zip_body, dest)
|
||||||
else
|
else
|
||||||
e -> {:error, e}
|
e -> {:error, e}
|
||||||
|
@ -22,6 +22,7 @@ defmodule Pleroma.Application do
|
|||||||
def repository, do: @repository
|
def repository, do: @repository
|
||||||
|
|
||||||
def user_agent do
|
def user_agent do
|
||||||
|
if Process.whereis(Pleroma.Web.Endpoint) do
|
||||||
case Config.get([:http, :user_agent], :default) do
|
case Config.get([:http, :user_agent], :default) do
|
||||||
:default ->
|
:default ->
|
||||||
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
|
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
|
||||||
@ -30,6 +31,10 @@ defmodule Pleroma.Application do
|
|||||||
custom ->
|
custom ->
|
||||||
custom
|
custom
|
||||||
end
|
end
|
||||||
|
else
|
||||||
|
# fallback, if endpoint is not started yet
|
||||||
|
"Pleroma Data Loader"
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# See http://elixir-lang.org/docs/stable/elixir/Application.html
|
# See http://elixir-lang.org/docs/stable/elixir/Application.html
|
||||||
|
@ -83,6 +83,11 @@ defmodule Pleroma.Gun.ConnectionPool.Worker do
|
|||||||
end)
|
end)
|
||||||
|
|
||||||
{ref, state} = pop_in(state.client_monitors[client_pid])
|
{ref, state} = pop_in(state.client_monitors[client_pid])
|
||||||
|
# DOWN message can receive right after `remove_client` call and cause worker to terminate
|
||||||
|
state =
|
||||||
|
if is_nil(ref) do
|
||||||
|
state
|
||||||
|
else
|
||||||
Process.demonitor(ref)
|
Process.demonitor(ref)
|
||||||
|
|
||||||
timer =
|
timer =
|
||||||
@ -93,7 +98,10 @@ defmodule Pleroma.Gun.ConnectionPool.Worker do
|
|||||||
nil
|
nil
|
||||||
end
|
end
|
||||||
|
|
||||||
{:reply, :ok, %{state | timer: timer}, :hibernate}
|
%{state | timer: timer}
|
||||||
|
end
|
||||||
|
|
||||||
|
{:reply, :ok, state, :hibernate}
|
||||||
end
|
end
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
@ -103,16 +111,21 @@ defmodule Pleroma.Gun.ConnectionPool.Worker do
|
|||||||
{:stop, :normal, state}
|
{:stop, :normal, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:gun_up, _pid, _protocol}, state) do
|
||||||
|
{:noreply, state, :hibernate}
|
||||||
|
end
|
||||||
|
|
||||||
# Gracefully shutdown if the connection got closed without any streams left
|
# Gracefully shutdown if the connection got closed without any streams left
|
||||||
@impl true
|
@impl true
|
||||||
def handle_info({:gun_down, _pid, _protocol, _reason, []}, state) do
|
def handle_info({:gun_down, _pid, _protocol, _reason, []}, state) do
|
||||||
{:stop, :normal, state}
|
{:stop, :normal, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
# Otherwise, shutdown with an error
|
# Otherwise, wait for retry
|
||||||
@impl true
|
@impl true
|
||||||
def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams} = down_message, state) do
|
def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams}, state) do
|
||||||
{:stop, {:error, down_message}, state}
|
{:noreply, state, :hibernate}
|
||||||
end
|
end
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
|
@ -109,8 +109,9 @@ defmodule Pleroma.HTML do
|
|||||||
result =
|
result =
|
||||||
content
|
content
|
||||||
|> Floki.parse_fragment!()
|
|> Floki.parse_fragment!()
|
||||||
|> Floki.filter_out("a.mention,a.hashtag,a.attachment,a[rel~=\"tag\"]")
|
|> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
|
||||||
|> Floki.attribute("a", "href")
|
|> Enum.take(1)
|
||||||
|
|> Floki.attribute("href")
|
||||||
|> Enum.at(0)
|
|> Enum.at(0)
|
||||||
|
|
||||||
{:commit, {:ok, result}}
|
{:commit, {:ok, result}}
|
||||||
|
@ -11,7 +11,6 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
|||||||
@type proxy_type() :: :socks4 | :socks5
|
@type proxy_type() :: :socks4 | :socks5
|
||||||
@type host() :: charlist() | :inet.ip_address()
|
@type host() :: charlist() | :inet.ip_address()
|
||||||
|
|
||||||
alias Pleroma.Config
|
|
||||||
alias Pleroma.HTTP.AdapterHelper
|
alias Pleroma.HTTP.AdapterHelper
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@ -44,27 +43,13 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
|||||||
@spec options(URI.t(), keyword()) :: keyword()
|
@spec options(URI.t(), keyword()) :: keyword()
|
||||||
def options(%URI{} = uri, opts \\ []) do
|
def options(%URI{} = uri, opts \\ []) do
|
||||||
@defaults
|
@defaults
|
||||||
|> put_timeout()
|
|
||||||
|> Keyword.merge(opts)
|
|> Keyword.merge(opts)
|
||||||
|> adapter_helper().options(uri)
|
|> adapter_helper().options(uri)
|
||||||
end
|
end
|
||||||
|
|
||||||
# For Hackney, this is the time a connection can stay idle in the pool.
|
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
|
||||||
# For Gun, this is the timeout to receive a message from Gun.
|
|
||||||
defp put_timeout(opts) do
|
|
||||||
{config_key, default} =
|
|
||||||
if adapter() == Tesla.Adapter.Gun do
|
|
||||||
{:pools, Config.get([:pools, :default, :timeout], 5_000)}
|
|
||||||
else
|
|
||||||
{:hackney_pools, 10_000}
|
|
||||||
end
|
|
||||||
|
|
||||||
timeout = Config.get([config_key, opts[:pool], :timeout], default)
|
|
||||||
|
|
||||||
Keyword.merge(opts, timeout: timeout)
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_conn(uri, opts), do: adapter_helper().get_conn(uri, opts)
|
def get_conn(uri, opts), do: adapter_helper().get_conn(uri, opts)
|
||||||
|
|
||||||
defp adapter, do: Application.get_env(:tesla, :adapter)
|
defp adapter, do: Application.get_env(:tesla, :adapter)
|
||||||
|
|
||||||
defp adapter_helper do
|
defp adapter_helper do
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
||||||
@behaviour Pleroma.HTTP.AdapterHelper
|
@behaviour Pleroma.HTTP.AdapterHelper
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
alias Pleroma.Gun.ConnectionPool
|
alias Pleroma.Gun.ConnectionPool
|
||||||
alias Pleroma.HTTP.AdapterHelper
|
alias Pleroma.HTTP.AdapterHelper
|
||||||
|
|
||||||
@ -14,31 +15,46 @@ defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
|||||||
connect_timeout: 5_000,
|
connect_timeout: 5_000,
|
||||||
domain_lookup_timeout: 5_000,
|
domain_lookup_timeout: 5_000,
|
||||||
tls_handshake_timeout: 5_000,
|
tls_handshake_timeout: 5_000,
|
||||||
retry: 0,
|
retry: 1,
|
||||||
retry_timeout: 1000,
|
retry_timeout: 1000,
|
||||||
await_up_timeout: 5_000
|
await_up_timeout: 5_000
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@type pool() :: :federation | :upload | :media | :default
|
||||||
|
|
||||||
@spec options(keyword(), URI.t()) :: keyword()
|
@spec options(keyword(), URI.t()) :: keyword()
|
||||||
def options(incoming_opts \\ [], %URI{} = uri) do
|
def options(incoming_opts \\ [], %URI{} = uri) do
|
||||||
proxy =
|
proxy =
|
||||||
Pleroma.Config.get([:http, :proxy_url])
|
[:http, :proxy_url]
|
||||||
|
|> Config.get()
|
||||||
|> AdapterHelper.format_proxy()
|
|> AdapterHelper.format_proxy()
|
||||||
|
|
||||||
config_opts = Pleroma.Config.get([:http, :adapter], [])
|
config_opts = Config.get([:http, :adapter], [])
|
||||||
|
|
||||||
@defaults
|
@defaults
|
||||||
|> Keyword.merge(config_opts)
|
|> Keyword.merge(config_opts)
|
||||||
|> add_scheme_opts(uri)
|
|> add_scheme_opts(uri)
|
||||||
|> AdapterHelper.maybe_add_proxy(proxy)
|
|> AdapterHelper.maybe_add_proxy(proxy)
|
||||||
|> Keyword.merge(incoming_opts)
|
|> Keyword.merge(incoming_opts)
|
||||||
|
|> put_timeout()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp add_scheme_opts(opts, %{scheme: "http"}), do: opts
|
defp add_scheme_opts(opts, %{scheme: "http"}), do: opts
|
||||||
|
|
||||||
defp add_scheme_opts(opts, %{scheme: "https"}) do
|
defp add_scheme_opts(opts, %{scheme: "https"}) do
|
||||||
opts
|
Keyword.put(opts, :certificates_verification, true)
|
||||||
|> Keyword.put(:certificates_verification, true)
|
end
|
||||||
|
|
||||||
|
defp put_timeout(opts) do
|
||||||
|
# this is the timeout to receive a message from Gun
|
||||||
|
Keyword.put_new(opts, :timeout, pool_timeout(opts[:pool]))
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec pool_timeout(pool()) :: non_neg_integer()
|
||||||
|
def pool_timeout(pool) do
|
||||||
|
default = Config.get([:pools, :default, :timeout], 5_000)
|
||||||
|
|
||||||
|
Config.get([:pools, pool, :timeout], default)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
|
@spec get_conn(URI.t(), keyword()) :: {:ok, keyword()} | {:error, atom()}
|
||||||
@ -51,11 +67,11 @@ defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
|||||||
|
|
||||||
@prefix Pleroma.Gun.ConnectionPool
|
@prefix Pleroma.Gun.ConnectionPool
|
||||||
def limiter_setup do
|
def limiter_setup do
|
||||||
wait = Pleroma.Config.get([:connections_pool, :connection_acquisition_wait])
|
wait = Config.get([:connections_pool, :connection_acquisition_wait])
|
||||||
retries = Pleroma.Config.get([:connections_pool, :connection_acquisition_retries])
|
retries = Config.get([:connections_pool, :connection_acquisition_retries])
|
||||||
|
|
||||||
:pools
|
:pools
|
||||||
|> Pleroma.Config.get([])
|
|> Config.get([])
|
||||||
|> Enum.each(fn {name, opts} ->
|
|> Enum.each(fn {name, opts} ->
|
||||||
max_running = Keyword.get(opts, :size, 50)
|
max_running = Keyword.get(opts, :size, 50)
|
||||||
max_waiting = Keyword.get(opts, :max_waiting, 10)
|
max_waiting = Keyword.get(opts, :max_waiting, 10)
|
||||||
@ -69,7 +85,6 @@ defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
|||||||
case result do
|
case result do
|
||||||
:ok -> :ok
|
:ok -> :ok
|
||||||
{:error, :existing} -> :ok
|
{:error, :existing} -> :ok
|
||||||
e -> raise e
|
|
||||||
end
|
end
|
||||||
end)
|
end)
|
||||||
|
|
||||||
|
@ -11,6 +11,8 @@ defmodule Pleroma.HTTP.ExAws do
|
|||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
|
def request(method, url, body \\ "", headers \\ [], http_opts \\ []) do
|
||||||
|
http_opts = Keyword.put_new(http_opts, :adapter, pool: :upload)
|
||||||
|
|
||||||
case HTTP.request(method, url, body, headers, http_opts) do
|
case HTTP.request(method, url, body, headers, http_opts) do
|
||||||
{:ok, env} ->
|
{:ok, env} ->
|
||||||
{:ok, %{status_code: env.status, headers: env.headers, body: env.body}}
|
{:ok, %{status_code: env.status, headers: env.headers, body: env.body}}
|
||||||
|
@ -11,6 +11,8 @@ defmodule Pleroma.HTTP.Tzdata do
|
|||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def get(url, headers, options) do
|
def get(url, headers, options) do
|
||||||
|
options = Keyword.put_new(options, :adapter, pool: :default)
|
||||||
|
|
||||||
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
|
with {:ok, %Tesla.Env{} = env} <- HTTP.get(url, headers, options) do
|
||||||
{:ok, {env.status, env.headers, env.body}}
|
{:ok, {env.status, env.headers, env.body}}
|
||||||
end
|
end
|
||||||
@ -18,6 +20,8 @@ defmodule Pleroma.HTTP.Tzdata do
|
|||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def head(url, headers, options) do
|
def head(url, headers, options) do
|
||||||
|
options = Keyword.put_new(options, :adapter, pool: :default)
|
||||||
|
|
||||||
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
|
with {:ok, %Tesla.Env{} = env} <- HTTP.head(url, headers, options) do
|
||||||
{:ok, {env.status, env.headers}}
|
{:ok, {env.status, env.headers}}
|
||||||
end
|
end
|
||||||
|
@ -150,7 +150,9 @@ defmodule Pleroma.Instances.Instance do
|
|||||||
defp scrape_favicon(%URI{} = instance_uri) do
|
defp scrape_favicon(%URI{} = instance_uri) do
|
||||||
try do
|
try do
|
||||||
with {:ok, %Tesla.Env{body: html}} <-
|
with {:ok, %Tesla.Env{body: html}} <-
|
||||||
Pleroma.HTTP.get(to_string(instance_uri), [{:Accept, "text/html"}]),
|
Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}],
|
||||||
|
adapter: [pool: :media]
|
||||||
|
),
|
||||||
favicon_rel <-
|
favicon_rel <-
|
||||||
html
|
html
|
||||||
|> Floki.parse_document!()
|
|> Floki.parse_document!()
|
||||||
|
@ -164,12 +164,12 @@ defmodule Pleroma.Object.Fetcher do
|
|||||||
date: date
|
date: date
|
||||||
})
|
})
|
||||||
|
|
||||||
[{"signature", signature}]
|
{"signature", signature}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp sign_fetch(headers, id, date) do
|
defp sign_fetch(headers, id, date) do
|
||||||
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
||||||
headers ++ make_signature(id, date)
|
[make_signature(id, date) | headers]
|
||||||
else
|
else
|
||||||
headers
|
headers
|
||||||
end
|
end
|
||||||
@ -177,7 +177,7 @@ defmodule Pleroma.Object.Fetcher do
|
|||||||
|
|
||||||
defp maybe_date_fetch(headers, date) do
|
defp maybe_date_fetch(headers, date) do
|
||||||
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
||||||
headers ++ [{"date", date}]
|
[{"date", date} | headers]
|
||||||
else
|
else
|
||||||
headers
|
headers
|
||||||
end
|
end
|
||||||
|
@ -46,12 +46,23 @@ defmodule Pleroma.Uploaders.S3 do
|
|||||||
|
|
||||||
op =
|
op =
|
||||||
if streaming do
|
if streaming do
|
||||||
|
op =
|
||||||
upload.tempfile
|
upload.tempfile
|
||||||
|> ExAws.S3.Upload.stream_file()
|
|> ExAws.S3.Upload.stream_file()
|
||||||
|> ExAws.S3.upload(bucket, s3_name, [
|
|> ExAws.S3.upload(bucket, s3_name, [
|
||||||
{:acl, :public_read},
|
{:acl, :public_read},
|
||||||
{:content_type, upload.content_type}
|
{:content_type, upload.content_type}
|
||||||
])
|
])
|
||||||
|
|
||||||
|
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Gun do
|
||||||
|
# set s3 upload timeout to respect :upload pool timeout
|
||||||
|
# timeout should be slightly larger, so s3 can retry upload on fail
|
||||||
|
timeout = Pleroma.HTTP.AdapterHelper.Gun.pool_timeout(:upload) + 1_000
|
||||||
|
opts = Keyword.put(op.opts, :timeout, timeout)
|
||||||
|
Map.put(op, :opts, opts)
|
||||||
|
else
|
||||||
|
op
|
||||||
|
end
|
||||||
else
|
else
|
||||||
{:ok, file_data} = File.read(upload.tempfile)
|
{:ok, file_data} = File.read(upload.tempfile)
|
||||||
|
|
||||||
|
@ -116,7 +116,7 @@ defmodule Pleroma.User.Search do
|
|||||||
end
|
end
|
||||||
|
|
||||||
defp base_query(_user, false), do: User
|
defp base_query(_user, false), do: User
|
||||||
defp base_query(user, true), do: User.get_followers_query(user)
|
defp base_query(user, true), do: User.get_friends_query(user)
|
||||||
|
|
||||||
defp filter_invisible_users(query) do
|
defp filter_invisible_users(query) do
|
||||||
from(q in query, where: q.invisible == false)
|
from(q in query, where: q.invisible == false)
|
||||||
|
@ -114,7 +114,7 @@ defmodule Pleroma.Web.ApiSpec.ListOperation do
|
|||||||
description: "Add accounts to the given list.",
|
description: "Add accounts to the given list.",
|
||||||
operationId: "ListController.add_to_list",
|
operationId: "ListController.add_to_list",
|
||||||
parameters: [id_param()],
|
parameters: [id_param()],
|
||||||
requestBody: add_remove_accounts_request(),
|
requestBody: add_remove_accounts_request(true),
|
||||||
security: [%{"oAuth" => ["write:lists"]}],
|
security: [%{"oAuth" => ["write:lists"]}],
|
||||||
responses: %{
|
responses: %{
|
||||||
200 => Operation.response("Empty object", "application/json", %Schema{type: :object})
|
200 => Operation.response("Empty object", "application/json", %Schema{type: :object})
|
||||||
@ -127,8 +127,16 @@ defmodule Pleroma.Web.ApiSpec.ListOperation do
|
|||||||
tags: ["Lists"],
|
tags: ["Lists"],
|
||||||
summary: "Remove accounts from list",
|
summary: "Remove accounts from list",
|
||||||
operationId: "ListController.remove_from_list",
|
operationId: "ListController.remove_from_list",
|
||||||
parameters: [id_param()],
|
parameters: [
|
||||||
requestBody: add_remove_accounts_request(),
|
id_param(),
|
||||||
|
Operation.parameter(
|
||||||
|
:account_ids,
|
||||||
|
:query,
|
||||||
|
%Schema{type: :array, items: %Schema{type: :string}},
|
||||||
|
"Array of account IDs"
|
||||||
|
)
|
||||||
|
],
|
||||||
|
requestBody: add_remove_accounts_request(false),
|
||||||
security: [%{"oAuth" => ["write:lists"]}],
|
security: [%{"oAuth" => ["write:lists"]}],
|
||||||
responses: %{
|
responses: %{
|
||||||
200 => Operation.response("Empty object", "application/json", %Schema{type: :object})
|
200 => Operation.response("Empty object", "application/json", %Schema{type: :object})
|
||||||
@ -171,7 +179,7 @@ defmodule Pleroma.Web.ApiSpec.ListOperation do
|
|||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp add_remove_accounts_request do
|
defp add_remove_accounts_request(required) when is_boolean(required) do
|
||||||
request_body(
|
request_body(
|
||||||
"Parameters",
|
"Parameters",
|
||||||
%Schema{
|
%Schema{
|
||||||
@ -180,9 +188,9 @@ defmodule Pleroma.Web.ApiSpec.ListOperation do
|
|||||||
properties: %{
|
properties: %{
|
||||||
account_ids: %Schema{type: :array, description: "Array of account IDs", items: FlakeID}
|
account_ids: %Schema{type: :array, description: "Array of account IDs", items: FlakeID}
|
||||||
},
|
},
|
||||||
required: [:account_ids]
|
required: required && [:account_ids]
|
||||||
},
|
},
|
||||||
required: true
|
required: required
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -59,17 +59,11 @@ defmodule Pleroma.Web.MastodonAPI.AuthController do
|
|||||||
def password_reset(conn, params) do
|
def password_reset(conn, params) do
|
||||||
nickname_or_email = params["email"] || params["nickname"]
|
nickname_or_email = params["email"] || params["nickname"]
|
||||||
|
|
||||||
with {:ok, _} <- TwitterAPI.password_reset(nickname_or_email) do
|
TwitterAPI.password_reset(nickname_or_email)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_status(:no_content)
|
|> put_status(:no_content)
|
||||||
|> json("")
|
|> json("")
|
||||||
else
|
|
||||||
{:error, "unknown user"} ->
|
|
||||||
send_resp(conn, :not_found, "")
|
|
||||||
|
|
||||||
{:error, _} ->
|
|
||||||
send_resp(conn, :bad_request, "")
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp local_mastodon_root_path(conn) do
|
defp local_mastodon_root_path(conn) do
|
||||||
|
@ -74,7 +74,7 @@ defmodule Pleroma.Web.MastodonAPI.ListController do
|
|||||||
|
|
||||||
# DELETE /api/v1/lists/:id/accounts
|
# DELETE /api/v1/lists/:id/accounts
|
||||||
def remove_from_list(
|
def remove_from_list(
|
||||||
%{assigns: %{list: list}, body_params: %{account_ids: account_ids}} = conn,
|
%{assigns: %{list: list}, params: %{account_ids: account_ids}} = conn,
|
||||||
_
|
_
|
||||||
) do
|
) do
|
||||||
Enum.each(account_ids, fn account_id ->
|
Enum.each(account_ids, fn account_id ->
|
||||||
@ -86,6 +86,10 @@ defmodule Pleroma.Web.MastodonAPI.ListController do
|
|||||||
json(conn, %{})
|
json(conn, %{})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def remove_from_list(%{body_params: params} = conn, _) do
|
||||||
|
remove_from_list(%{conn | params: params}, %{})
|
||||||
|
end
|
||||||
|
|
||||||
defp list_by_id_and_user(%{assigns: %{user: user}, params: %{id: id}} = conn, _) do
|
defp list_by_id_and_user(%{assigns: %{user: user}, params: %{id: id}} = conn, _) do
|
||||||
case Pleroma.List.get(id, user) do
|
case Pleroma.List.get(id, user) do
|
||||||
%Pleroma.List{} = list -> assign(conn, :list, list)
|
%Pleroma.List{} = list -> assign(conn, :list, list)
|
||||||
|
@ -23,6 +23,17 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||||||
|
|
||||||
import Pleroma.Web.ActivityPub.Visibility, only: [get_visibility: 1, visible_for_user?: 2]
|
import Pleroma.Web.ActivityPub.Visibility, only: [get_visibility: 1, visible_for_user?: 2]
|
||||||
|
|
||||||
|
# This is a naive way to do this, just spawning a process per activity
|
||||||
|
# to fetch the preview. However it should be fine considering
|
||||||
|
# pagination is restricted to 40 activities at a time
|
||||||
|
defp fetch_rich_media_for_activities(activities) do
|
||||||
|
Enum.each(activities, fn activity ->
|
||||||
|
spawn(fn ->
|
||||||
|
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
# TODO: Add cached version.
|
# TODO: Add cached version.
|
||||||
defp get_replied_to_activities([]), do: %{}
|
defp get_replied_to_activities([]), do: %{}
|
||||||
|
|
||||||
@ -80,6 +91,11 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||||||
|
|
||||||
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
|
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
|
||||||
activities = Enum.filter(opts.activities, & &1)
|
activities = Enum.filter(opts.activities, & &1)
|
||||||
|
|
||||||
|
# Start fetching rich media before doing anything else, so that later calls to get the cards
|
||||||
|
# only block for timeout in the worst case, as opposed to
|
||||||
|
# length(activities_with_links) * timeout
|
||||||
|
fetch_rich_media_for_activities(activities)
|
||||||
replied_to_activities = get_replied_to_activities(activities)
|
replied_to_activities = get_replied_to_activities(activities)
|
||||||
|
|
||||||
parent_activities =
|
parent_activities =
|
||||||
|
@ -96,6 +96,6 @@ defmodule Pleroma.Web.RichMedia.Helpers do
|
|||||||
@rich_media_options
|
@rich_media_options
|
||||||
end
|
end
|
||||||
|
|
||||||
Pleroma.HTTP.get(url, headers, options)
|
Pleroma.HTTP.get(url, headers, adapter: options)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -3,6 +3,8 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.RichMedia.Parser do
|
defmodule Pleroma.Web.RichMedia.Parser do
|
||||||
|
require Logger
|
||||||
|
|
||||||
defp parsers do
|
defp parsers do
|
||||||
Pleroma.Config.get([:rich_media, :parsers])
|
Pleroma.Config.get([:rich_media, :parsers])
|
||||||
end
|
end
|
||||||
@ -10,17 +12,29 @@ defmodule Pleroma.Web.RichMedia.Parser do
|
|||||||
def parse(nil), do: {:error, "No URL provided"}
|
def parse(nil), do: {:error, "No URL provided"}
|
||||||
|
|
||||||
if Pleroma.Config.get(:env) == :test do
|
if Pleroma.Config.get(:env) == :test do
|
||||||
|
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
|
||||||
def parse(url), do: parse_url(url)
|
def parse(url), do: parse_url(url)
|
||||||
else
|
else
|
||||||
|
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
|
||||||
def parse(url) do
|
def parse(url) do
|
||||||
try do
|
with {:ok, data} <- get_cached_or_parse(url),
|
||||||
Cachex.fetch!(:rich_media_cache, url, fn _ ->
|
{:ok, _} <- set_ttl_based_on_image(data, url) do
|
||||||
{:commit, parse_url(url)}
|
{:ok, data}
|
||||||
end)
|
else
|
||||||
|> set_ttl_based_on_image(url)
|
error ->
|
||||||
rescue
|
Logger.error(fn -> "Rich media error: #{inspect(error)}" end)
|
||||||
e ->
|
end
|
||||||
{:error, "Cachex error: #{inspect(e)}"}
|
end
|
||||||
|
|
||||||
|
defp get_cached_or_parse(url) do
|
||||||
|
case Cachex.fetch!(:rich_media_cache, url, fn _ -> {:commit, parse_url(url)} end) do
|
||||||
|
{:ok, _data} = res ->
|
||||||
|
res
|
||||||
|
|
||||||
|
{:error, _} = e ->
|
||||||
|
ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000)
|
||||||
|
Cachex.expire(:rich_media_cache, url, ttl)
|
||||||
|
e
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -47,19 +61,26 @@ defmodule Pleroma.Web.RichMedia.Parser do
|
|||||||
config :pleroma, :rich_media,
|
config :pleroma, :rich_media,
|
||||||
ttl_setters: [MyModule]
|
ttl_setters: [MyModule]
|
||||||
"""
|
"""
|
||||||
def set_ttl_based_on_image({:ok, data}, url) do
|
@spec set_ttl_based_on_image(map(), String.t()) ::
|
||||||
with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url),
|
{:ok, Integer.t() | :noop} | {:error, :no_key}
|
||||||
ttl when is_number(ttl) <- get_ttl_from_image(data, url) do
|
def set_ttl_based_on_image(data, url) do
|
||||||
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
|
case get_ttl_from_image(data, url) do
|
||||||
{:ok, data}
|
{:ok, ttl} when is_number(ttl) ->
|
||||||
else
|
ttl = ttl * 1000
|
||||||
|
|
||||||
|
case Cachex.expire_at(:rich_media_cache, url, ttl) do
|
||||||
|
{:ok, true} -> {:ok, ttl}
|
||||||
|
{:ok, false} -> {:error, :no_key}
|
||||||
|
end
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
{:ok, data}
|
{:ok, :noop}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_ttl_from_image(data, url) do
|
defp get_ttl_from_image(data, url) do
|
||||||
Pleroma.Config.get([:rich_media, :ttl_setters])
|
[:rich_media, :ttl_setters]
|
||||||
|
|> Pleroma.Config.get()
|
||||||
|> Enum.reduce({:ok, nil}, fn
|
|> Enum.reduce({:ok, nil}, fn
|
||||||
module, {:ok, _ttl} ->
|
module, {:ok, _ttl} ->
|
||||||
module.ttl(data, url)
|
module.ttl(data, url)
|
||||||
@ -70,23 +91,16 @@ defmodule Pleroma.Web.RichMedia.Parser do
|
|||||||
end
|
end
|
||||||
|
|
||||||
defp parse_url(url) do
|
defp parse_url(url) do
|
||||||
try do
|
with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url),
|
||||||
{:ok, %Tesla.Env{body: html}} = Pleroma.Web.RichMedia.Helpers.rich_media_get(url)
|
{:ok, html} <- Floki.parse_document(html) do
|
||||||
|
|
||||||
html
|
html
|
||||||
|> parse_html()
|
|
||||||
|> maybe_parse()
|
|> maybe_parse()
|
||||||
|> Map.put("url", url)
|
|> Map.put("url", url)
|
||||||
|> clean_parsed_data()
|
|> clean_parsed_data()
|
||||||
|> check_parsed_data()
|
|> check_parsed_data()
|
||||||
rescue
|
|
||||||
e ->
|
|
||||||
{:error, "Parsing error: #{inspect(e)} #{inspect(__STACKTRACE__)}"}
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp parse_html(html), do: Floki.parse_document!(html)
|
|
||||||
|
|
||||||
defp maybe_parse(html) do
|
defp maybe_parse(html) do
|
||||||
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
||||||
case parser.parse(html, acc) do
|
case parser.parse(html, acc) do
|
||||||
|
@ -10,20 +10,15 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do
|
|||||||
|> parse_query_params()
|
|> parse_query_params()
|
||||||
|> format_query_params()
|
|> format_query_params()
|
||||||
|> get_expiration_timestamp()
|
|> get_expiration_timestamp()
|
||||||
|
else
|
||||||
|
{:error, "Not aws signed url #{inspect(image)}"}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp is_aws_signed_url(""), do: nil
|
defp is_aws_signed_url(image) when is_binary(image) and image != "" do
|
||||||
defp is_aws_signed_url(nil), do: nil
|
|
||||||
|
|
||||||
defp is_aws_signed_url(image) when is_binary(image) do
|
|
||||||
%URI{host: host, query: query} = URI.parse(image)
|
%URI{host: host, query: query} = URI.parse(image)
|
||||||
|
|
||||||
if String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires") do
|
String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires")
|
||||||
image
|
|
||||||
else
|
|
||||||
nil
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp is_aws_signed_url(_), do: nil
|
defp is_aws_signed_url(_), do: nil
|
||||||
@ -46,6 +41,6 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do
|
|||||||
|> Map.get("X-Amz-Date")
|
|> Map.get("X-Amz-Date")
|
||||||
|> Timex.parse("{ISO:Basic:Z}")
|
|> Timex.parse("{ISO:Basic:Z}")
|
||||||
|
|
||||||
Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))
|
{:ok, Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -72,7 +72,7 @@ defmodule Pleroma.Web.TwitterAPI.TwitterAPI do
|
|||||||
|
|
||||||
def password_reset(nickname_or_email) do
|
def password_reset(nickname_or_email) do
|
||||||
with true <- is_binary(nickname_or_email),
|
with true <- is_binary(nickname_or_email),
|
||||||
%User{local: true, email: email} = user when is_binary(email) <-
|
%User{local: true, email: email, deactivated: false} = user when is_binary(email) <-
|
||||||
User.get_by_nickname_or_email(nickname_or_email),
|
User.get_by_nickname_or_email(nickname_or_email),
|
||||||
{:ok, token_record} <- Pleroma.PasswordResetToken.create_token(user) do
|
{:ok, token_record} <- Pleroma.PasswordResetToken.create_token(user) do
|
||||||
user
|
user
|
||||||
@ -81,17 +81,8 @@ defmodule Pleroma.Web.TwitterAPI.TwitterAPI do
|
|||||||
|
|
||||||
{:ok, :enqueued}
|
{:ok, :enqueued}
|
||||||
else
|
else
|
||||||
false ->
|
_ ->
|
||||||
{:error, "bad user identifier"}
|
|
||||||
|
|
||||||
%User{local: true, email: nil} ->
|
|
||||||
{:ok, :noop}
|
{:ok, :noop}
|
||||||
|
|
||||||
%User{local: false} ->
|
|
||||||
{:error, "remote user"}
|
|
||||||
|
|
||||||
nil ->
|
|
||||||
{:error, "unknown user"}
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -136,12 +136,12 @@ defmodule Pleroma.Web.WebFinger do
|
|||||||
|
|
||||||
def find_lrdd_template(domain) do
|
def find_lrdd_template(domain) do
|
||||||
with {:ok, %{status: status, body: body}} when status in 200..299 <-
|
with {:ok, %{status: status, body: body}} when status in 200..299 <-
|
||||||
HTTP.get("http://#{domain}/.well-known/host-meta", []) do
|
HTTP.get("http://#{domain}/.well-known/host-meta") do
|
||||||
get_template_from_xml(body)
|
get_template_from_xml(body)
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
with {:ok, %{body: body, status: status}} when status in 200..299 <-
|
with {:ok, %{body: body, status: status}} when status in 200..299 <-
|
||||||
HTTP.get("https://#{domain}/.well-known/host-meta", []) do
|
HTTP.get("https://#{domain}/.well-known/host-meta") do
|
||||||
get_template_from_xml(body)
|
get_template_from_xml(body)
|
||||||
else
|
else
|
||||||
e -> {:error, "Can't find LRDD template: #{inspect(e)}"}
|
e -> {:error, "Can't find LRDD template: #{inspect(e)}"}
|
||||||
|
@ -1350,11 +1350,11 @@ defmodule HttpRequestMock do
|
|||||||
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/relay/relay.json")}}
|
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/relay/relay.json")}}
|
||||||
end
|
end
|
||||||
|
|
||||||
def get("http://localhost:4001/", _, "", Accept: "text/html") do
|
def get("http://localhost:4001/", _, "", [{"accept", "text/html"}]) do
|
||||||
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/7369654.html")}}
|
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/7369654.html")}}
|
||||||
end
|
end
|
||||||
|
|
||||||
def get("https://osada.macgirvin.com/", _, "", Accept: "text/html") do
|
def get("https://osada.macgirvin.com/", _, "", [{"accept", "text/html"}]) do
|
||||||
{:ok,
|
{:ok,
|
||||||
%Tesla.Env{
|
%Tesla.Env{
|
||||||
status: 200,
|
status: 200,
|
||||||
|
@ -109,22 +109,22 @@ defmodule Pleroma.UserSearchTest do
|
|||||||
Enum.map(User.search("doe", resolve: false, for_user: u1), & &1.id) == []
|
Enum.map(User.search("doe", resolve: false, for_user: u1), & &1.id) == []
|
||||||
end
|
end
|
||||||
|
|
||||||
test "finds followers of user by partial name" do
|
test "finds followings of user by partial name" do
|
||||||
u1 = insert(:user)
|
lizz = insert(:user, %{name: "Lizz"})
|
||||||
u2 = insert(:user, %{name: "Jimi"})
|
jimi = insert(:user, %{name: "Jimi"})
|
||||||
follower_jimi = insert(:user, %{name: "Jimi Hendrix"})
|
following_lizz = insert(:user, %{name: "Jimi Hendrix"})
|
||||||
follower_lizz = insert(:user, %{name: "Lizz Wright"})
|
following_jimi = insert(:user, %{name: "Lizz Wright"})
|
||||||
friend = insert(:user, %{name: "Jimi"})
|
follower_lizz = insert(:user, %{name: "Jimi"})
|
||||||
|
|
||||||
{:ok, follower_jimi} = User.follow(follower_jimi, u1)
|
{:ok, lizz} = User.follow(lizz, following_lizz)
|
||||||
{:ok, _follower_lizz} = User.follow(follower_lizz, u2)
|
{:ok, _jimi} = User.follow(jimi, following_jimi)
|
||||||
{:ok, u1} = User.follow(u1, friend)
|
{:ok, _follower_lizz} = User.follow(follower_lizz, lizz)
|
||||||
|
|
||||||
assert Enum.map(User.search("jimi", following: true, for_user: u1), & &1.id) == [
|
assert Enum.map(User.search("jimi", following: true, for_user: lizz), & &1.id) == [
|
||||||
follower_jimi.id
|
following_lizz.id
|
||||||
]
|
]
|
||||||
|
|
||||||
assert User.search("lizz", following: true, for_user: u1) == []
|
assert User.search("lizz", following: true, for_user: lizz) == []
|
||||||
end
|
end
|
||||||
|
|
||||||
test "find local and remote users for authenticated users" do
|
test "find local and remote users for authenticated users" do
|
||||||
|
@ -122,17 +122,27 @@ defmodule Pleroma.Web.MastodonAPI.AuthControllerTest do
|
|||||||
{:ok, user: user}
|
{:ok, user: user}
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it returns 404 when user is not found", %{conn: conn, user: user} do
|
test "it returns 204 when user is not found", %{conn: conn, user: user} do
|
||||||
conn = post(conn, "/auth/password?email=nonexisting_#{user.email}")
|
conn = post(conn, "/auth/password?email=nonexisting_#{user.email}")
|
||||||
assert conn.status == 404
|
|
||||||
assert conn.resp_body == ""
|
assert conn
|
||||||
|
|> json_response(:no_content)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it returns 400 when user is not local", %{conn: conn, user: user} do
|
test "it returns 204 when user is not local", %{conn: conn, user: user} do
|
||||||
{:ok, user} = Repo.update(Ecto.Changeset.change(user, local: false))
|
{:ok, user} = Repo.update(Ecto.Changeset.change(user, local: false))
|
||||||
conn = post(conn, "/auth/password?email=#{user.email}")
|
conn = post(conn, "/auth/password?email=#{user.email}")
|
||||||
assert conn.status == 400
|
|
||||||
assert conn.resp_body == ""
|
assert conn
|
||||||
|
|> json_response(:no_content)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it returns 204 when user is deactivated", %{conn: conn, user: user} do
|
||||||
|
{:ok, user} = Repo.update(Ecto.Changeset.change(user, deactivated: true, local: true))
|
||||||
|
conn = post(conn, "/auth/password?email=#{user.email}")
|
||||||
|
|
||||||
|
assert conn
|
||||||
|
|> json_response(:no_content)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ defmodule Pleroma.Web.MastodonAPI.ListControllerTest do
|
|||||||
assert following == [other_user.follower_address]
|
assert following == [other_user.follower_address]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "removing users from a list" do
|
test "removing users from a list, body params" do
|
||||||
%{user: user, conn: conn} = oauth_access(["write:lists"])
|
%{user: user, conn: conn} = oauth_access(["write:lists"])
|
||||||
other_user = insert(:user)
|
other_user = insert(:user)
|
||||||
third_user = insert(:user)
|
third_user = insert(:user)
|
||||||
@ -85,6 +85,24 @@ defmodule Pleroma.Web.MastodonAPI.ListControllerTest do
|
|||||||
assert following == [third_user.follower_address]
|
assert following == [third_user.follower_address]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "removing users from a list, query params" do
|
||||||
|
%{user: user, conn: conn} = oauth_access(["write:lists"])
|
||||||
|
other_user = insert(:user)
|
||||||
|
third_user = insert(:user)
|
||||||
|
{:ok, list} = Pleroma.List.create("name", user)
|
||||||
|
{:ok, list} = Pleroma.List.follow(list, other_user)
|
||||||
|
{:ok, list} = Pleroma.List.follow(list, third_user)
|
||||||
|
|
||||||
|
assert %{} ==
|
||||||
|
conn
|
||||||
|
|> put_req_header("content-type", "application/json")
|
||||||
|
|> delete("/api/v1/lists/#{list.id}/accounts?account_ids[]=#{other_user.id}")
|
||||||
|
|> json_response_and_validate_schema(:ok)
|
||||||
|
|
||||||
|
%Pleroma.List{following: following} = Pleroma.List.get(list.id, user)
|
||||||
|
assert following == [third_user.follower_address]
|
||||||
|
end
|
||||||
|
|
||||||
test "listing users in a list" do
|
test "listing users in a list" do
|
||||||
%{user: user, conn: conn} = oauth_access(["read:lists"])
|
%{user: user, conn: conn} = oauth_access(["read:lists"])
|
||||||
other_user = insert(:user)
|
other_user = insert(:user)
|
||||||
|
@ -21,7 +21,7 @@ defmodule Pleroma.Web.RichMedia.TTL.AwsSignedUrlTest do
|
|||||||
expire_time =
|
expire_time =
|
||||||
Timex.parse!(timestamp, "{ISO:Basic:Z}") |> Timex.to_unix() |> Kernel.+(valid_till)
|
Timex.parse!(timestamp, "{ISO:Basic:Z}") |> Timex.to_unix() |> Kernel.+(valid_till)
|
||||||
|
|
||||||
assert expire_time == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
|
assert {:ok, expire_time} == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "s3 signed url is parsed and correct ttl is set for rich media" do
|
test "s3 signed url is parsed and correct ttl is set for rich media" do
|
||||||
@ -55,7 +55,7 @@ defmodule Pleroma.Web.RichMedia.TTL.AwsSignedUrlTest do
|
|||||||
|
|
||||||
Cachex.put(:rich_media_cache, url, metadata)
|
Cachex.put(:rich_media_cache, url, metadata)
|
||||||
|
|
||||||
Pleroma.Web.RichMedia.Parser.set_ttl_based_on_image({:ok, metadata}, url)
|
Pleroma.Web.RichMedia.Parser.set_ttl_based_on_image(metadata, url)
|
||||||
|
|
||||||
{:ok, cache_ttl} = Cachex.ttl(:rich_media_cache, url)
|
{:ok, cache_ttl} = Cachex.ttl(:rich_media_cache, url)
|
||||||
|
|
||||||
|
@ -5,6 +5,8 @@
|
|||||||
defmodule Pleroma.Web.RichMedia.ParserTest do
|
defmodule Pleroma.Web.RichMedia.ParserTest do
|
||||||
use ExUnit.Case, async: true
|
use ExUnit.Case, async: true
|
||||||
|
|
||||||
|
alias Pleroma.Web.RichMedia.Parser
|
||||||
|
|
||||||
setup do
|
setup do
|
||||||
Tesla.Mock.mock(fn
|
Tesla.Mock.mock(fn
|
||||||
%{
|
%{
|
||||||
@ -48,23 +50,29 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
|
|||||||
|
|
||||||
%{method: :get, url: "http://example.com/empty"} ->
|
%{method: :get, url: "http://example.com/empty"} ->
|
||||||
%Tesla.Env{status: 200, body: "hello"}
|
%Tesla.Env{status: 200, body: "hello"}
|
||||||
|
|
||||||
|
%{method: :get, url: "http://example.com/malformed"} ->
|
||||||
|
%Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/malformed-data.html")}
|
||||||
|
|
||||||
|
%{method: :get, url: "http://example.com/error"} ->
|
||||||
|
{:error, :overload}
|
||||||
end)
|
end)
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns error when no metadata present" do
|
test "returns error when no metadata present" do
|
||||||
assert {:error, _} = Pleroma.Web.RichMedia.Parser.parse("http://example.com/empty")
|
assert {:error, _} = Parser.parse("http://example.com/empty")
|
||||||
end
|
end
|
||||||
|
|
||||||
test "doesn't just add a title" do
|
test "doesn't just add a title" do
|
||||||
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/non-ogp") ==
|
assert Parser.parse("http://example.com/non-ogp") ==
|
||||||
{:error,
|
{:error,
|
||||||
"Found metadata was invalid or incomplete: %{\"url\" => \"http://example.com/non-ogp\"}"}
|
"Found metadata was invalid or incomplete: %{\"url\" => \"http://example.com/non-ogp\"}"}
|
||||||
end
|
end
|
||||||
|
|
||||||
test "parses ogp" do
|
test "parses ogp" do
|
||||||
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/ogp") ==
|
assert Parser.parse("http://example.com/ogp") ==
|
||||||
{:ok,
|
{:ok,
|
||||||
%{
|
%{
|
||||||
"image" => "http://ia.media-imdb.com/images/rock.jpg",
|
"image" => "http://ia.media-imdb.com/images/rock.jpg",
|
||||||
@ -77,7 +85,7 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
|
|||||||
end
|
end
|
||||||
|
|
||||||
test "falls back to <title> when ogp:title is missing" do
|
test "falls back to <title> when ogp:title is missing" do
|
||||||
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/ogp-missing-title") ==
|
assert Parser.parse("http://example.com/ogp-missing-title") ==
|
||||||
{:ok,
|
{:ok,
|
||||||
%{
|
%{
|
||||||
"image" => "http://ia.media-imdb.com/images/rock.jpg",
|
"image" => "http://ia.media-imdb.com/images/rock.jpg",
|
||||||
@ -90,7 +98,7 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
|
|||||||
end
|
end
|
||||||
|
|
||||||
test "parses twitter card" do
|
test "parses twitter card" do
|
||||||
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/twitter-card") ==
|
assert Parser.parse("http://example.com/twitter-card") ==
|
||||||
{:ok,
|
{:ok,
|
||||||
%{
|
%{
|
||||||
"card" => "summary",
|
"card" => "summary",
|
||||||
@ -103,7 +111,7 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
|
|||||||
end
|
end
|
||||||
|
|
||||||
test "parses OEmbed" do
|
test "parses OEmbed" do
|
||||||
assert Pleroma.Web.RichMedia.Parser.parse("http://example.com/oembed") ==
|
assert Parser.parse("http://example.com/oembed") ==
|
||||||
{:ok,
|
{:ok,
|
||||||
%{
|
%{
|
||||||
"author_name" => "bees",
|
"author_name" => "bees",
|
||||||
@ -132,6 +140,10 @@ defmodule Pleroma.Web.RichMedia.ParserTest do
|
|||||||
end
|
end
|
||||||
|
|
||||||
test "rejects invalid OGP data" do
|
test "rejects invalid OGP data" do
|
||||||
assert {:error, _} = Pleroma.Web.RichMedia.Parser.parse("http://example.com/malformed")
|
assert {:error, _} = Parser.parse("http://example.com/malformed")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "returns error if getting page was not successful" do
|
||||||
|
assert {:error, :overload} = Parser.parse("http://example.com/error")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
Loading…
Reference in New Issue
Block a user