MediaHelper: cache failed URLs for 15 minutes to prevent excessive retries

This commit is contained in:
Mark Felder 2024-03-08 10:32:15 -05:00
parent f775a1931b
commit 741f22bfe0
3 changed files with 33 additions and 18 deletions

View File

@ -0,0 +1 @@
Framegrabs with ffmpeg will execute with a 5 second timeout and cache the URLs of failures with a TTL of 15 minutes to prevent excessive retries.

View File

@ -156,6 +156,7 @@ defmodule Pleroma.Application do
build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500),
build_cachex("failed_media_helper_url", default_ttl: :timer.minutes(15), limit: 2_500),
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
build_cachex("chat_message_id_idempotency_key",
expiration: chat_message_id_idempotency_key_expiration(),

View File

@ -12,6 +12,8 @@ defmodule Pleroma.Helpers.MediaHelper do
require Logger
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
def missing_dependencies do
Enum.reduce([ffmpeg: "ffmpeg"], [], fn {sym, executable}, acc ->
if Pleroma.Utils.command_available?(executable) do
@ -43,11 +45,13 @@ defmodule Pleroma.Helpers.MediaHelper do
@spec video_framegrab(String.t()) :: {:ok, binary()} | {:error, any()}
def video_framegrab(url) do
with executable when is_binary(executable) <- System.find_executable("ffmpeg"),
false <- @cachex.exists?(:failed_media_helper_cache, url),
{:ok, env} <- HTTP.get(url, [], pool: :media),
{:ok, pid} <- StringIO.open(env.body) do
body_stream = IO.binstream(pid, 1)
result =
task =
Task.async(fn ->
Exile.stream!(
[
executable,
@ -64,8 +68,17 @@ defmodule Pleroma.Helpers.MediaHelper do
stderr: :disable
)
|> Enum.into(<<>>)
end)
case Task.yield(task, 5_000) do
nil ->
Task.shutdown(task)
@cachex.put(:failed_media_helper_cache, url, nil)
{:error, {:ffmpeg, :timeout}}
result ->
{:ok, result}
end
else
nil -> {:error, {:ffmpeg, :command_not_found}}
{:error, _} = error -> error