mirror of
https://akkoma.dev/AkkomaGang/akkoma.git
synced 2024-12-25 04:53:06 +00:00
change the structure of image ttl parsar
This commit is contained in:
parent
18234cc44e
commit
de9906ad56
|
@ -4,7 +4,7 @@
|
|||
Richmedia are cached without the ttl but the rich media may have image which can expire, like aws signed url.
|
||||
In such cases the old image url (expired) is returned from the media cache.
|
||||
|
||||
So to avoid such situation we can define a moddule that will set ttl based no image.
|
||||
So to avoid such situation we can define a moddule that will set ttl based on image.
|
||||
|
||||
The module must have a `run` function and it should be registered in the config.
|
||||
|
||||
|
|
|
@ -35,17 +35,17 @@ def parse(url) do
|
|||
@doc """
|
||||
Set the rich media cache based on the expiration time of image.
|
||||
|
||||
Define a module that has `run` function
|
||||
Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
|
||||
|
||||
## Example
|
||||
|
||||
defmodule MyModule do
|
||||
def run(data, url) do
|
||||
@behaviour Pleroma.Web.RichMedia.Parser.TTL
|
||||
def ttl(data, url) do
|
||||
image_url = Map.get(data, :image)
|
||||
# do some parsing in the url and get the ttl of the image
|
||||
# ttl is unix time
|
||||
ttl = parse_ttl_from_url(image_url)
|
||||
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
|
||||
# and return ttl is unix time
|
||||
parse_ttl_from_url(image_url)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -55,22 +55,26 @@ def run(data, url) do
|
|||
ttl_setters: [MyModule]
|
||||
"""
|
||||
def set_ttl_based_on_image({:ok, data}, url) do
|
||||
case Cachex.ttl(:rich_media_cache, url) do
|
||||
{:ok, nil} ->
|
||||
modules = Pleroma.Config.get([:rich_media, :ttl_setters])
|
||||
|
||||
if Enum.count(modules) > 0 do
|
||||
Enum.each(modules, & &1.run(data, url))
|
||||
end
|
||||
|
||||
{:ok, data}
|
||||
|
||||
with {:ok, nil} <- Cachex.ttl(:rich_media_cache, url) do
|
||||
ttl = get_ttl_from_image(data, url)
|
||||
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
|
||||
{:ok, data}
|
||||
else
|
||||
_ ->
|
||||
{:ok, data}
|
||||
end
|
||||
end
|
||||
|
||||
def set_ttl_based_on_image(data, _url), do: data
|
||||
defp get_ttl_from_image(data, url) do
|
||||
Pleroma.Config.get([:rich_media, :ttl_setters])
|
||||
|> Enum.reduce({:ok, nil}, fn
|
||||
module, {:ok, _ttl} ->
|
||||
module.ttl(data, url)
|
||||
|
||||
_, error ->
|
||||
error
|
||||
end)
|
||||
end
|
||||
|
||||
defp parse_url(url) do
|
||||
try do
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do
|
||||
def run(data, url) do
|
||||
@behaviour Pleroma.Web.RichMedia.Parser.TTL
|
||||
|
||||
@impl Pleroma.Web.RichMedia.Parser.TTL
|
||||
def ttl(data, _url) do
|
||||
image = Map.get(data, :image)
|
||||
|
||||
if is_aws_signed_url(image) do
|
||||
|
@ -7,7 +10,6 @@ def run(data, url) do
|
|||
|> parse_query_params()
|
||||
|> format_query_params()
|
||||
|> get_expiration_timestamp()
|
||||
|> set_ttl(url)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -47,8 +49,4 @@ defp get_expiration_timestamp(params) when is_map(params) do
|
|||
|
||||
Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))
|
||||
end
|
||||
|
||||
defp set_ttl(ttl, url) do
|
||||
Cachex.expire_at(:rich_media_cache, url, ttl * 1000)
|
||||
end
|
||||
end
|
||||
|
|
3
lib/pleroma/web/rich_media/parsers/ttl/ttl.ex
Normal file
3
lib/pleroma/web/rich_media/parsers/ttl/ttl.ex
Normal file
|
@ -0,0 +1,3 @@
|
|||
defmodule Pleroma.Web.RichMedia.Parser.TTL do
|
||||
@callback ttl(Map.t(), String.t()) :: {:ok, Integer.t()} | {:error, String.t()}
|
||||
end
|
|
@ -5,7 +5,7 @@
|
|||
defmodule Pleroma.Web.RichMedia.TTL.AwsSignedUrlTest do
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
test "amazon signed url is parsed and correct ttl is set for rich media" do
|
||||
test "s3 signed url is parsed correct for expiration time" do
|
||||
url = "https://pleroma.social/amz"
|
||||
|
||||
{:ok, timestamp} =
|
||||
|
@ -16,22 +16,66 @@ test "amazon signed url is parsed and correct ttl is set for rich media" do
|
|||
# in seconds
|
||||
valid_till = 30
|
||||
|
||||
data = %{
|
||||
image:
|
||||
"https://pleroma.s3.ap-southeast-1.amazonaws.com/sachin%20%281%29%20_a%20-%25%2Aasdasd%20BNN%20bnnn%20.png?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIBLWWK6RGDQXDLJQ%2F20190716%2Fap-southeast-1%2Fs3%2Faws4_request&X-Amz-Date=#{
|
||||
timestamp
|
||||
}&X-Amz-Expires=#{valid_till}&X-Amz-Signature=04ffd6b98634f4b1bbabc62e0fac4879093cd54a6eed24fe8eb38e8369526bbf&X-Amz-SignedHeaders=host",
|
||||
locale: "en_US",
|
||||
site_name: "Pleroma",
|
||||
title: "PLeroma",
|
||||
url: url
|
||||
}
|
||||
metadata = construct_metadata(timestamp, valid_till, url)
|
||||
|
||||
expire_time =
|
||||
Timex.parse!(timestamp, "{ISO:Basic:Z}") |> Timex.to_unix() |> Kernel.+(valid_till)
|
||||
|
||||
assert expire_time == Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.ttl(metadata, url)
|
||||
end
|
||||
|
||||
test "s3 signed url is parsed and correct ttl is set for rich media" do
|
||||
url = "https://pleroma.social/amz"
|
||||
|
||||
{:ok, timestamp} =
|
||||
Timex.now()
|
||||
|> DateTime.truncate(:second)
|
||||
|> Timex.format("{ISO:Basic:Z}")
|
||||
|
||||
# in seconds
|
||||
valid_till = 30
|
||||
|
||||
metadata = construct_metadata(timestamp, valid_till, url)
|
||||
|
||||
body = """
|
||||
<meta name="twitter:card" content="Pleroma" />
|
||||
<meta name="twitter:site" content="Pleroma" />
|
||||
<meta name="twitter:title" content="Pleroma" />
|
||||
<meta name="twitter:description" content="Pleroma" />
|
||||
<meta name="twitter:image" content="#{Map.get(metadata, :image)}" />
|
||||
"""
|
||||
|
||||
Tesla.Mock.mock(fn
|
||||
%{
|
||||
method: :get,
|
||||
url: "https://pleroma.social/amz"
|
||||
} ->
|
||||
%Tesla.Env{status: 200, body: body}
|
||||
end)
|
||||
|
||||
Cachex.put(:rich_media_cache, url, metadata)
|
||||
|
||||
Pleroma.Web.RichMedia.Parser.set_ttl_based_on_image({:ok, metadata}, url)
|
||||
|
||||
Cachex.put(:rich_media_cache, url, data)
|
||||
assert {:ok, _} = Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl.run(data, url)
|
||||
{:ok, cache_ttl} = Cachex.ttl(:rich_media_cache, url)
|
||||
|
||||
# as there is delay in setting and pulling the data from cache we ignore 1 second
|
||||
assert_in_delta(valid_till * 1000, cache_ttl, 1000)
|
||||
end
|
||||
|
||||
defp construct_s3_url(timestamp, valid_till) do
|
||||
"https://pleroma.s3.ap-southeast-1.amazonaws.com/sachin%20%281%29%20_a%20-%25%2Aasdasd%20BNN%20bnnn%20.png?X-Amz-Algorithm=AWS4-HMAC-SHA256&X-Amz-Credential=AKIAIBLWWK6RGDQXDLJQ%2F20190716%2Fap-southeast-1%2Fs3%2Faws4_request&X-Amz-Date=#{
|
||||
timestamp
|
||||
}&X-Amz-Expires=#{valid_till}&X-Amz-Signature=04ffd6b98634f4b1bbabc62e0fac4879093cd54a6eed24fe8eb38e8369526bbf&X-Amz-SignedHeaders=host"
|
||||
end
|
||||
|
||||
defp construct_metadata(timestamp, valid_till, url) do
|
||||
%{
|
||||
image: construct_s3_url(timestamp, valid_till),
|
||||
site: "Pleroma",
|
||||
title: "Pleroma",
|
||||
description: "Pleroma",
|
||||
url: url
|
||||
}
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in a new issue