2018-12-24 13:34:45 +01:00
|
|
|
|
# Portions of this file are derived from Pleroma:
|
|
|
|
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social>
|
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
2018-12-27 11:24:04 +01:00
|
|
|
|
# Upstream: https://git.pleroma.social/pleroma/pleroma/blob/develop/lib/pleroma/web/activity_pub/transmogrifier.ex
|
2018-12-24 13:34:45 +01:00
|
|
|
|
|
2018-10-11 17:37:39 +02:00
|
|
|
|
defmodule Mobilizon.Service.ActivityPub.Transmogrifier do
|
2018-05-17 11:32:23 +02:00
|
|
|
|
@moduledoc """
|
|
|
|
|
A module to handle coding from internal to wire ActivityPub and back.
|
|
|
|
|
"""
|
2018-10-11 17:37:39 +02:00
|
|
|
|
alias Mobilizon.Actors
|
2019-07-30 16:40:59 +02:00
|
|
|
|
alias Mobilizon.Actors.{Actor, Follower}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
alias Mobilizon.Events
|
2019-08-14 17:45:11 +02:00
|
|
|
|
alias Mobilizon.Events.{Event, Comment, Participant}
|
2018-10-11 17:37:39 +02:00
|
|
|
|
alias Mobilizon.Service.ActivityPub
|
2018-11-12 23:30:47 +01:00
|
|
|
|
alias Mobilizon.Service.ActivityPub.Utils
|
2019-07-30 16:40:59 +02:00
|
|
|
|
alias Mobilizon.Service.ActivityPub.Visibility
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
|
|
require Logger
|
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
|
def get_actor(%{"actor" => actor}) when is_binary(actor) do
|
|
|
|
|
actor
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def get_actor(%{"actor" => actor}) when is_list(actor) do
|
|
|
|
|
if is_binary(Enum.at(actor, 0)) do
|
|
|
|
|
Enum.at(actor, 0)
|
|
|
|
|
else
|
|
|
|
|
Enum.find(actor, fn %{"type" => type} -> type in ["Person", "Service", "Application"] end)
|
|
|
|
|
|> Map.get("id")
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def get_actor(%{"actor" => %{"id" => id}}) when is_bitstring(id) do
|
|
|
|
|
id
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def get_actor(%{"actor" => nil, "attributedTo" => actor}) when not is_nil(actor) do
|
|
|
|
|
get_actor(%{"actor" => actor})
|
|
|
|
|
end
|
|
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
|
@doc """
|
|
|
|
|
Modifies an incoming AP object (mastodon format) to our internal format.
|
|
|
|
|
"""
|
|
|
|
|
def fix_object(object) do
|
|
|
|
|
object
|
|
|
|
|
|> Map.put("actor", object["attributedTo"])
|
|
|
|
|
|> fix_attachments
|
2018-11-12 09:06:44 +01:00
|
|
|
|
|> fix_in_reply_to
|
2019-07-30 16:40:59 +02:00
|
|
|
|
|
|
|
|
|
# |> fix_tag
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
|
2019-01-03 14:59:59 +01:00
|
|
|
|
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
|
|
|
|
|
when not is_nil(in_reply_to) and is_bitstring(in_reply_to) do
|
|
|
|
|
in_reply_to |> do_fix_in_reply_to(object)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
|
|
|
|
|
when not is_nil(in_reply_to) and is_map(in_reply_to) do
|
|
|
|
|
if is_bitstring(in_reply_to["id"]) do
|
|
|
|
|
in_reply_to["id"] |> do_fix_in_reply_to(object)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
|
|
|
|
|
when not is_nil(in_reply_to) and is_list(in_reply_to) do
|
|
|
|
|
if is_bitstring(Enum.at(in_reply_to, 0)) do
|
|
|
|
|
in_reply_to |> Enum.at(0) |> do_fix_in_reply_to(object)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-11-12 09:06:44 +01:00
|
|
|
|
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
|
|
|
|
|
when not is_nil(in_reply_to) do
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.warn("inReplyTo ID seem incorrect: #{inspect(in_reply_to)}")
|
2019-01-03 14:59:59 +01:00
|
|
|
|
do_fix_in_reply_to("", object)
|
|
|
|
|
end
|
2018-11-12 09:06:44 +01:00
|
|
|
|
|
2019-01-03 14:59:59 +01:00
|
|
|
|
def fix_in_reply_to(object), do: object
|
|
|
|
|
|
|
|
|
|
def do_fix_in_reply_to(in_reply_to_id, object) do
|
2018-11-12 09:06:44 +01:00
|
|
|
|
case fetch_obj_helper(in_reply_to_id) do
|
|
|
|
|
{:ok, replied_object} ->
|
|
|
|
|
object
|
2018-11-12 18:17:53 +01:00
|
|
|
|
|> Map.put("inReplyTo", replied_object.url)
|
2018-11-12 09:06:44 +01:00
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
|
{:error, {:error, :not_supported}} ->
|
|
|
|
|
Logger.info("Object reply origin has not a supported type")
|
|
|
|
|
object
|
|
|
|
|
|
2018-11-12 09:06:44 +01:00
|
|
|
|
e ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.warn("Couldn't fetch #{in_reply_to_id} #{inspect(e)}")
|
2018-11-12 09:06:44 +01:00
|
|
|
|
object
|
|
|
|
|
end
|
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
|
|
def fix_attachments(object) do
|
|
|
|
|
attachments =
|
|
|
|
|
(object["attachment"] || [])
|
|
|
|
|
|> Enum.map(fn data ->
|
|
|
|
|
url = [%{"type" => "Link", "mediaType" => data["mediaType"], "href" => data["url"]}]
|
|
|
|
|
Map.put(data, "url", url)
|
|
|
|
|
end)
|
|
|
|
|
|
|
|
|
|
object
|
|
|
|
|
|> Map.put("attachment", attachments)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def fix_tag(object) do
|
|
|
|
|
tags =
|
|
|
|
|
(object["tag"] || [])
|
|
|
|
|
|> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end)
|
|
|
|
|
|> Enum.map(fn data -> String.slice(data["name"], 1..-1) end)
|
|
|
|
|
|
|
|
|
|
combined = (object["tag"] || []) ++ tags
|
|
|
|
|
|
|
|
|
|
object
|
|
|
|
|
|> Map.put("tag", combined)
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def handle_incoming(%{"id" => nil}), do: :error
|
|
|
|
|
def handle_incoming(%{"id" => ""}), do: :error
|
|
|
|
|
|
2019-07-23 13:49:22 +02:00
|
|
|
|
def handle_incoming(%{"type" => "Flag"} = data) do
|
|
|
|
|
with params <- Mobilizon.Service.ActivityPub.Converters.Flag.as_to_model(data) do
|
|
|
|
|
params = %{
|
|
|
|
|
reporter_url: params["reporter"].url,
|
|
|
|
|
reported_actor_url: params["reported"].url,
|
|
|
|
|
comments_url: params["comments"] |> Enum.map(& &1.url),
|
|
|
|
|
content: params["content"] || "",
|
|
|
|
|
additional: %{
|
|
|
|
|
"cc" => [params["reported"].url]
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ActivityPub.flag(params)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
|
def handle_incoming(%{"type" => "Create", "object" => %{"type" => "Note"} = object} = data) do
|
2018-08-24 11:34:00 +02:00
|
|
|
|
Logger.info("Handle incoming to create notes")
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
2018-08-24 11:34:00 +02:00
|
|
|
|
with {:ok, %Actor{} = actor} <- Actors.get_or_fetch_by_url(data["actor"]) do
|
2018-05-18 09:56:21 +02:00
|
|
|
|
Logger.debug("found actor")
|
2018-12-14 17:41:55 +01:00
|
|
|
|
Logger.debug(inspect(actor))
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
|
|
params = %{
|
|
|
|
|
to: data["to"],
|
2018-11-12 23:30:47 +01:00
|
|
|
|
object: object |> fix_object,
|
2018-05-18 09:56:21 +02:00
|
|
|
|
actor: actor,
|
2018-05-17 11:32:23 +02:00
|
|
|
|
local: false,
|
|
|
|
|
published: data["published"],
|
|
|
|
|
additional:
|
|
|
|
|
Map.take(data, [
|
|
|
|
|
"cc",
|
|
|
|
|
"id"
|
|
|
|
|
])
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ActivityPub.create(params)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-30 10:35:29 +02:00
|
|
|
|
def handle_incoming(%{"type" => "Create", "object" => %{"type" => "Event"} = object} = data) do
|
|
|
|
|
Logger.info("Handle incoming to create event")
|
|
|
|
|
|
|
|
|
|
with {:ok, %Actor{} = actor} <- Actors.get_or_fetch_by_url(data["actor"]) do
|
|
|
|
|
Logger.debug("found actor")
|
|
|
|
|
Logger.debug(inspect(actor))
|
|
|
|
|
|
|
|
|
|
params = %{
|
|
|
|
|
to: data["to"],
|
|
|
|
|
object: object |> fix_object,
|
|
|
|
|
actor: actor,
|
|
|
|
|
local: false,
|
|
|
|
|
published: data["published"],
|
|
|
|
|
additional:
|
|
|
|
|
Map.take(data, [
|
|
|
|
|
"cc",
|
|
|
|
|
"id"
|
|
|
|
|
])
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ActivityPub.create(params)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-07-27 10:45:35 +02:00
|
|
|
|
def handle_incoming(
|
2019-08-14 17:45:11 +02:00
|
|
|
|
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = _data
|
2018-07-27 10:45:35 +02:00
|
|
|
|
) do
|
2018-11-12 09:05:31 +01:00
|
|
|
|
with {:ok, %Actor{} = followed} <- Actors.get_or_fetch_by_url(followed, true),
|
2018-08-24 11:34:00 +02:00
|
|
|
|
{:ok, %Actor{} = follower} <- Actors.get_or_fetch_by_url(follower),
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:ok, activity, object} <- ActivityPub.follow(follower, followed, id, false) do
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:ok, activity, object}
|
|
|
|
|
else
|
|
|
|
|
e ->
|
|
|
|
|
Logger.warn("Unable to handle Follow activity #{inspect(e)}")
|
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{
|
|
|
|
|
"type" => "Accept",
|
2019-08-14 17:45:11 +02:00
|
|
|
|
"object" => accepted_object,
|
2019-07-30 16:40:59 +02:00
|
|
|
|
"actor" => _actor,
|
2019-08-14 17:45:11 +02:00
|
|
|
|
"id" => id
|
2019-07-30 16:40:59 +02:00
|
|
|
|
} = data
|
|
|
|
|
) do
|
2019-08-14 17:45:11 +02:00
|
|
|
|
with actor_url <- get_actor(data),
|
|
|
|
|
{:ok, %Actor{} = actor} <- Actors.get_or_fetch_by_url(actor_url),
|
|
|
|
|
{:object_not_found, {:ok, activity, object}} <-
|
|
|
|
|
{:object_not_found,
|
|
|
|
|
do_handle_incoming_accept_following(accepted_object, actor) ||
|
|
|
|
|
do_handle_incoming_accept_join(accepted_object, actor)} do
|
|
|
|
|
{:ok, activity, object}
|
2019-07-30 16:40:59 +02:00
|
|
|
|
else
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:object_not_found, nil} ->
|
|
|
|
|
Logger.warn(
|
|
|
|
|
"Unable to process Accept activity #{inspect(id)}. Object #{inspect(accepted_object)} wasn't found."
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
:error
|
2019-07-30 16:40:59 +02:00
|
|
|
|
|
|
|
|
|
e ->
|
2019-08-14 17:45:11 +02:00
|
|
|
|
Logger.warn(
|
|
|
|
|
"Unable to process Accept activity #{inspect(id)} for object #{inspect(accepted_object)} only returned #{
|
|
|
|
|
inspect(e)
|
|
|
|
|
}"
|
|
|
|
|
)
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def handle_incoming(
|
2019-08-14 17:45:11 +02:00
|
|
|
|
%{"type" => "Reject", "object" => rejected_object, "actor" => _actor, "id" => id} = data
|
2019-07-30 16:40:59 +02:00
|
|
|
|
) do
|
2019-08-14 17:45:11 +02:00
|
|
|
|
with actor_url <- get_actor(data),
|
|
|
|
|
{:ok, %Actor{} = actor} <- Actors.get_or_fetch_by_url(actor_url),
|
|
|
|
|
{:object_not_found, {:ok, activity, object}} <-
|
|
|
|
|
{:object_not_found,
|
|
|
|
|
do_handle_incoming_reject_following(rejected_object, actor) ||
|
|
|
|
|
do_handle_incoming_reject_join(rejected_object, actor)} do
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:ok, activity, object}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
else
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:object_not_found, nil} ->
|
|
|
|
|
Logger.warn(
|
|
|
|
|
"Unable to process Reject activity #{inspect(id)}. Object #{inspect(rejected_object)} wasn't found."
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
:error
|
|
|
|
|
|
2018-11-07 18:31:44 +01:00
|
|
|
|
e ->
|
2019-08-14 17:45:11 +02:00
|
|
|
|
Logger.warn(
|
|
|
|
|
"Unable to process Reject activity #{inspect(id)} for object #{inspect(rejected_object)} only returned #{
|
|
|
|
|
inspect(e)
|
|
|
|
|
}"
|
|
|
|
|
)
|
|
|
|
|
|
2018-11-07 18:31:44 +01:00
|
|
|
|
:error
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
# def handle_incoming(
|
|
|
|
|
# %{"type" => "Like", "object" => object_id, "actor" => actor, "id" => id} = data
|
|
|
|
|
# ) do
|
|
|
|
|
# with %User{} = actor <- User.get_or_fetch_by_ap_id(actor),
|
|
|
|
|
# {:ok, object} <-
|
2018-11-12 18:17:53 +01:00
|
|
|
|
# fetch_obj_helper(object_id) || ActivityPub.fetch_object_from_id(object_id),
|
2018-07-27 10:45:35 +02:00
|
|
|
|
# {:ok, activity, object} <- ActivityPub.like(actor, object, id, false) do
|
|
|
|
|
# {:ok, activity}
|
|
|
|
|
# else
|
|
|
|
|
# _e -> :error
|
|
|
|
|
# end
|
|
|
|
|
# end
|
2018-12-14 17:41:55 +01:00
|
|
|
|
# #
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def handle_incoming(
|
2019-08-14 17:45:11 +02:00
|
|
|
|
%{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data
|
2019-07-30 16:40:59 +02:00
|
|
|
|
) do
|
|
|
|
|
with actor <- get_actor(data),
|
|
|
|
|
{:ok, %Actor{} = actor} <- Actors.get_or_fetch_by_url(actor),
|
|
|
|
|
{:ok, object} <- fetch_obj_helper_as_activity_streams(object_id),
|
|
|
|
|
public <- Visibility.is_public?(data),
|
|
|
|
|
{:ok, activity, object} <- ActivityPub.announce(actor, object, id, false, public) do
|
|
|
|
|
{:ok, activity, object}
|
|
|
|
|
else
|
|
|
|
|
e ->
|
|
|
|
|
Logger.debug(inspect(e))
|
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{"type" => "Update", "object" => %{"type" => object_type} = object, "actor" => _actor_id} =
|
|
|
|
|
data
|
|
|
|
|
)
|
|
|
|
|
when object_type in ["Person", "Application", "Service", "Organization"] do
|
2019-07-23 18:06:22 +02:00
|
|
|
|
case Actors.get_actor_by_url(object["id"]) do
|
|
|
|
|
{:ok, %Actor{url: url}} ->
|
|
|
|
|
{:ok, new_actor_data} = ActivityPub.actor_data_from_actor_object(object)
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
2019-07-23 18:06:22 +02:00
|
|
|
|
Actors.insert_or_update_actor(new_actor_data)
|
|
|
|
|
|
|
|
|
|
ActivityPub.update(%{
|
|
|
|
|
local: false,
|
|
|
|
|
to: data["to"] || [],
|
|
|
|
|
cc: data["cc"] || [],
|
|
|
|
|
object: object,
|
|
|
|
|
actor: url
|
|
|
|
|
})
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
|
|
|
|
e ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.debug(inspect(e))
|
2018-12-14 17:41:55 +01:00
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{
|
|
|
|
|
"type" => "Undo",
|
|
|
|
|
"object" => %{
|
|
|
|
|
"type" => "Announce",
|
|
|
|
|
"object" => object_id,
|
|
|
|
|
"id" => cancelled_activity_id
|
|
|
|
|
},
|
2019-08-14 17:45:11 +02:00
|
|
|
|
"actor" => _actor,
|
2019-07-30 16:40:59 +02:00
|
|
|
|
"id" => id
|
|
|
|
|
} = data
|
|
|
|
|
) do
|
|
|
|
|
with actor <- get_actor(data),
|
|
|
|
|
{:ok, %Actor{} = actor} <- Actors.get_or_fetch_by_url(actor),
|
|
|
|
|
{:ok, object} <- fetch_obj_helper_as_activity_streams(object_id),
|
|
|
|
|
{:ok, activity, object} <-
|
|
|
|
|
ActivityPub.unannounce(actor, object, id, cancelled_activity_id, false) do
|
|
|
|
|
{:ok, activity, object}
|
|
|
|
|
else
|
|
|
|
|
_e -> :error
|
|
|
|
|
end
|
|
|
|
|
end
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{
|
|
|
|
|
"type" => "Undo",
|
|
|
|
|
"object" => %{"type" => "Follow", "object" => followed},
|
|
|
|
|
"actor" => follower,
|
|
|
|
|
"id" => id
|
|
|
|
|
} = _data
|
|
|
|
|
) do
|
|
|
|
|
with {:ok, %Actor{domain: nil} = followed} <- Actors.get_actor_by_url(followed),
|
|
|
|
|
{:ok, %Actor{} = follower} <- Actors.get_actor_by_url(follower),
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:ok, activity, object} <- ActivityPub.unfollow(follower, followed, id, false) do
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:ok, activity, object}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
else
|
|
|
|
|
e ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.debug(inspect(e))
|
2018-12-14 17:41:55 +01:00
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# TODO: We presently assume that any actor on the same origin domain as the object being
|
|
|
|
|
# deleted has the rights to delete that object. A better way to validate whether or not
|
|
|
|
|
# the object should be deleted is to refetch the object URI, which should return either
|
|
|
|
|
# an error or a tombstone. This would allow us to verify that a deletion actually took
|
|
|
|
|
# place.
|
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{"type" => "Delete", "object" => object, "actor" => _actor, "id" => _id} = data
|
|
|
|
|
) do
|
|
|
|
|
object_id = Utils.get_url(object)
|
|
|
|
|
|
|
|
|
|
with actor <- get_actor(data),
|
|
|
|
|
{:ok, %Actor{url: _actor_url}} <- Actors.get_actor_by_url(actor),
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:ok, object} <- fetch_obj_helper(object_id),
|
2018-12-14 17:41:55 +01:00
|
|
|
|
# TODO : Validate that DELETE comes indeed form right domain (see above)
|
|
|
|
|
# :ok <- contain_origin(actor_url, object.data),
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:ok, activity, object} <- ActivityPub.delete(object, false) do
|
|
|
|
|
{:ok, activity, object}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
else
|
|
|
|
|
e ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.debug(inspect(e))
|
2018-12-14 17:41:55 +01:00
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-08-14 17:45:11 +02:00
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{"type" => "Join", "object" => object, "actor" => _actor, "id" => _id} = data
|
|
|
|
|
) do
|
|
|
|
|
with actor <- get_actor(data),
|
|
|
|
|
{:ok, %Actor{url: _actor_url} = actor} <- Actors.get_actor_by_url(actor),
|
|
|
|
|
{:ok, object} <- fetch_obj_helper(object),
|
|
|
|
|
{:ok, activity, object} <- ActivityPub.join(object, actor, false) do
|
|
|
|
|
{:ok, activity, object}
|
|
|
|
|
else
|
|
|
|
|
e ->
|
|
|
|
|
Logger.debug(inspect(e))
|
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{"type" => "Leave", "object" => object, "actor" => actor, "id" => _id} = data
|
|
|
|
|
) do
|
|
|
|
|
with actor <- get_actor(data),
|
|
|
|
|
{:ok, %Actor{} = actor} <- Actors.get_actor_by_url(actor),
|
|
|
|
|
{:ok, object} <- fetch_obj_helper(object),
|
|
|
|
|
{:ok, activity, object} <- ActivityPub.leave(object, actor, false) do
|
|
|
|
|
{:ok, activity, object}
|
|
|
|
|
else
|
|
|
|
|
{:only_organizer, true} ->
|
|
|
|
|
Logger.warn(
|
|
|
|
|
"Actor #{inspect(actor)} tried to leave event #{inspect(object)} but it was the only organizer so we didn't detach it"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
:error
|
|
|
|
|
|
|
|
|
|
e ->
|
|
|
|
|
Logger.error(inspect(e))
|
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-07-27 10:45:35 +02:00
|
|
|
|
#
|
|
|
|
|
# # TODO
|
|
|
|
|
# # Accept
|
|
|
|
|
# # Undo
|
|
|
|
|
#
|
2018-12-14 17:41:55 +01:00
|
|
|
|
# def handle_incoming(
|
|
|
|
|
# %{
|
|
|
|
|
# "type" => "Undo",
|
|
|
|
|
# "object" => %{"type" => "Like", "object" => object_id},
|
|
|
|
|
# "actor" => _actor,
|
|
|
|
|
# "id" => id
|
|
|
|
|
# } = data
|
|
|
|
|
# ) do
|
|
|
|
|
# with actor <- get_actor(data),
|
|
|
|
|
# %Actor{} = actor <- Actors.get_or_fetch_by_url(actor),
|
2019-07-30 16:40:59 +02:00
|
|
|
|
# {:ok, object} <- fetch_obj_helper(object_id) || fetch_obj_helper(object_id),
|
2018-12-14 17:41:55 +01:00
|
|
|
|
# {:ok, activity, _, _} <- ActivityPub.unlike(actor, object, id, false) do
|
|
|
|
|
# {:ok, activity}
|
|
|
|
|
# else
|
|
|
|
|
# _e -> :error
|
|
|
|
|
# end
|
|
|
|
|
# end
|
|
|
|
|
|
|
|
|
|
def handle_incoming(_) do
|
|
|
|
|
Logger.info("Handing something not supported")
|
|
|
|
|
{:error, :not_supported}
|
|
|
|
|
end
|
2018-05-19 20:29:11 +02:00
|
|
|
|
|
2019-08-14 17:45:11 +02:00
|
|
|
|
@doc """
|
|
|
|
|
Handle incoming `Accept` activities wrapping a `Follow` activity
|
|
|
|
|
"""
|
|
|
|
|
def do_handle_incoming_accept_following(follow_object, %Actor{} = actor) do
|
|
|
|
|
with {:follow,
|
|
|
|
|
{:ok,
|
|
|
|
|
%Follower{approved: false, actor: follower, id: follow_id, target_actor: followed} =
|
|
|
|
|
follow}} <-
|
|
|
|
|
{:follow, get_follow(follow_object)},
|
|
|
|
|
{:same_actor, true} <- {:same_actor, actor.id == followed.id},
|
|
|
|
|
{:ok, activity, _} <-
|
|
|
|
|
ActivityPub.accept(
|
|
|
|
|
%{
|
|
|
|
|
to: [follower.url],
|
|
|
|
|
actor: actor.url,
|
|
|
|
|
object: follow_object,
|
|
|
|
|
local: false
|
|
|
|
|
},
|
|
|
|
|
"#{MobilizonWeb.Endpoint.url()}/accept/follow/#{follow_id}"
|
|
|
|
|
),
|
|
|
|
|
{:ok, %Follower{approved: true}} <- Actors.update_follower(follow, %{"approved" => true}) do
|
|
|
|
|
{:ok, activity, follow}
|
|
|
|
|
else
|
|
|
|
|
{:follow, _} ->
|
|
|
|
|
Logger.debug(
|
|
|
|
|
"Tried to handle an Accept activity but it's not containing a Follow activity"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
|
|
|
|
|
{:same_actor} ->
|
|
|
|
|
{:error, "Actor who accepted the follow wasn't the target. Quite odd."}
|
|
|
|
|
|
|
|
|
|
{:ok, %Follower{approved: true} = _follow} ->
|
|
|
|
|
{:error, "Follow already accepted"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
@doc """
|
|
|
|
|
Handle incoming `Reject` activities wrapping a `Follow` activity
|
|
|
|
|
"""
|
|
|
|
|
def do_handle_incoming_reject_following(follow_object, %Actor{} = actor) do
|
|
|
|
|
with {:follow,
|
|
|
|
|
{:ok,
|
|
|
|
|
%Follower{approved: false, actor: follower, id: follow_id, target_actor: followed} =
|
|
|
|
|
follow}} <-
|
|
|
|
|
{:follow, get_follow(follow_object)},
|
|
|
|
|
{:same_actor, true} <- {:same_actor, actor.id == followed.id},
|
|
|
|
|
{:ok, activity, _} <-
|
|
|
|
|
ActivityPub.reject(
|
|
|
|
|
%{
|
|
|
|
|
to: [follower.url],
|
|
|
|
|
actor: actor.url,
|
|
|
|
|
object: follow_object,
|
|
|
|
|
local: false
|
|
|
|
|
},
|
|
|
|
|
"#{MobilizonWeb.Endpoint.url()}/reject/follow/#{follow_id}"
|
|
|
|
|
),
|
|
|
|
|
{:ok, %Follower{}} <- Actors.delete_follower(follow) do
|
|
|
|
|
{:ok, activity, follow}
|
|
|
|
|
else
|
|
|
|
|
{:follow, _} ->
|
|
|
|
|
Logger.debug(
|
|
|
|
|
"Tried to handle a Reject activity but it's not containing a Follow activity"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
|
|
|
|
|
{:same_actor} ->
|
|
|
|
|
{:error, "Actor who rejected the follow wasn't the target. Quite odd."}
|
|
|
|
|
|
|
|
|
|
{:ok, %Follower{approved: true} = _follow} ->
|
|
|
|
|
{:error, "Follow already accepted"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
@doc """
|
|
|
|
|
Handle incoming `Accept` activities wrapping a `Join` activity on an event
|
|
|
|
|
"""
|
|
|
|
|
def do_handle_incoming_accept_join(join_object, %Actor{} = actor_accepting) do
|
|
|
|
|
with {:join_event,
|
|
|
|
|
{:ok,
|
|
|
|
|
%Participant{role: :not_approved, actor: actor, id: join_id, event: event} =
|
|
|
|
|
participant}} <-
|
|
|
|
|
{:join_event, get_participant(join_object)},
|
|
|
|
|
# TODO: The actor that accepts the Join activity may another one that the event organizer ?
|
|
|
|
|
# Or maybe for groups it's the group that sends the Accept activity
|
|
|
|
|
{:same_actor, true} <- {:same_actor, actor_accepting.id == event.organizer_actor_id},
|
|
|
|
|
{:ok, activity, _} <-
|
|
|
|
|
ActivityPub.accept(
|
|
|
|
|
%{
|
|
|
|
|
to: [actor.url],
|
|
|
|
|
actor: actor_accepting.url,
|
|
|
|
|
object: join_object,
|
|
|
|
|
local: false
|
|
|
|
|
},
|
|
|
|
|
"#{MobilizonWeb.Endpoint.url()}/accept/join/#{join_id}"
|
|
|
|
|
),
|
|
|
|
|
{:ok, %Participant{role: :participant}} <-
|
|
|
|
|
Events.update_participant(participant, %{"role" => :participant}) do
|
|
|
|
|
{:ok, activity, participant}
|
|
|
|
|
else
|
|
|
|
|
{:join_event, {:ok, %Participant{role: :participant}}} ->
|
|
|
|
|
Logger.debug(
|
|
|
|
|
"Tried to handle an Accept activity on a Join activity with a event object but the participant is already validated"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
|
|
|
|
|
{:join_event, _err} ->
|
|
|
|
|
Logger.debug(
|
|
|
|
|
"Tried to handle an Accept activity but it's not containing a Join activity on a event"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
|
|
|
|
|
{:same_actor} ->
|
|
|
|
|
{:error, "Actor who accepted the join wasn't the event organizer. Quite odd."}
|
|
|
|
|
|
|
|
|
|
{:ok, %Participant{role: :participant} = _follow} ->
|
|
|
|
|
{:error, "Participant"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
@doc """
|
|
|
|
|
Handle incoming `Reject` activities wrapping a `Join` activity on an event
|
|
|
|
|
"""
|
|
|
|
|
def do_handle_incoming_reject_join(join_object, %Actor{} = actor_accepting) do
|
|
|
|
|
with {:join_event,
|
|
|
|
|
{:ok,
|
|
|
|
|
%Participant{role: :not_approved, actor: actor, id: join_id, event: event} =
|
|
|
|
|
participant}} <-
|
|
|
|
|
{:join_event, get_participant(join_object)},
|
|
|
|
|
# TODO: The actor that accepts the Join activity may another one that the event organizer ?
|
|
|
|
|
# Or maybe for groups it's the group that sends the Accept activity
|
|
|
|
|
{:same_actor, true} <- {:same_actor, actor_accepting.id == event.organizer_actor_id},
|
|
|
|
|
{:ok, activity, _} <-
|
|
|
|
|
ActivityPub.reject(
|
|
|
|
|
%{
|
|
|
|
|
to: [actor.url],
|
|
|
|
|
actor: actor_accepting.url,
|
|
|
|
|
object: join_object,
|
|
|
|
|
local: false
|
|
|
|
|
},
|
|
|
|
|
"#{MobilizonWeb.Endpoint.url()}/reject/join/#{join_id}"
|
|
|
|
|
),
|
|
|
|
|
{:ok, %Participant{}} <-
|
|
|
|
|
Events.delete_participant(participant) do
|
|
|
|
|
{:ok, activity, participant}
|
|
|
|
|
else
|
|
|
|
|
{:join_event, {:ok, %Participant{role: :participant}}} ->
|
|
|
|
|
Logger.debug(
|
|
|
|
|
"Tried to handle an Reject activity on a Join activity with a event object but the participant is already validated"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
|
|
|
|
|
{:join_event, _err} ->
|
|
|
|
|
Logger.debug(
|
|
|
|
|
"Tried to handle an Reject activity but it's not containing a Join activity on a event"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
|
|
|
|
|
{:same_actor} ->
|
|
|
|
|
{:error, "Actor who rejected the join wasn't the event organizer. Quite odd."}
|
|
|
|
|
|
|
|
|
|
{:ok, %Participant{role: :participant} = _follow} ->
|
|
|
|
|
{:error, "Participant"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# TODO: Add do_handle_incoming_accept_join/1 on Groups
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
defp get_follow(follow_object) do
|
|
|
|
|
with follow_object_id when not is_nil(follow_object_id) <- Utils.get_url(follow_object),
|
|
|
|
|
{:not_found, %Follower{} = follow} <-
|
|
|
|
|
{:not_found, Actors.get_follow_by_url(follow_object_id)} do
|
|
|
|
|
{:ok, follow}
|
|
|
|
|
else
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:not_found, _err} ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:error, "Follow URL not found"}
|
|
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
|
{:error, "ActivityPub ID not found in Accept Follow object"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-08-14 17:45:11 +02:00
|
|
|
|
defp get_participant(join_object) do
|
|
|
|
|
with join_object_id when not is_nil(join_object_id) <- Utils.get_url(join_object),
|
|
|
|
|
{:not_found, %Participant{} = participant} <-
|
|
|
|
|
{:not_found, Events.get_participant_by_url(join_object_id)} do
|
|
|
|
|
{:ok, participant}
|
|
|
|
|
else
|
|
|
|
|
{:not_found, _err} ->
|
|
|
|
|
{:error, "Participant URL not found"}
|
|
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
|
{:error, "ActivityPub ID not found in Accept Join object"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-06-14 18:15:27 +02:00
|
|
|
|
def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) do
|
|
|
|
|
with false <- String.starts_with?(in_reply_to, "http"),
|
2018-11-12 18:17:53 +01:00
|
|
|
|
{:ok, replied_to_object} <- fetch_obj_helper(in_reply_to) do
|
2018-06-14 18:15:27 +02:00
|
|
|
|
Map.put(object, "inReplyTo", replied_to_object["external_url"] || in_reply_to)
|
2018-05-19 20:29:11 +02:00
|
|
|
|
else
|
|
|
|
|
_e -> object
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def set_reply_to_uri(obj), do: obj
|
2018-07-27 10:45:35 +02:00
|
|
|
|
#
|
|
|
|
|
# # Prepares the object of an outgoing create activity.
|
2018-05-19 20:29:11 +02:00
|
|
|
|
def prepare_object(object) do
|
|
|
|
|
object
|
2018-07-27 10:45:35 +02:00
|
|
|
|
# |> set_sensitive
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|> add_hashtags
|
2018-11-12 09:05:31 +01:00
|
|
|
|
|> add_mention_tags
|
2018-07-27 10:45:35 +02:00
|
|
|
|
# |> add_emoji_tags
|
2018-05-19 20:29:11 +02:00
|
|
|
|
|> add_attributed_to
|
2018-07-27 10:45:35 +02:00
|
|
|
|
# |> prepare_attachments
|
2018-05-19 20:29:11 +02:00
|
|
|
|
|> set_reply_to_uri
|
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
|
@doc """
|
2018-05-17 11:32:23 +02:00
|
|
|
|
internal -> Mastodon
|
|
|
|
|
"""
|
|
|
|
|
def prepare_outgoing(%{"type" => "Create", "object" => %{"type" => "Note"} = object} = data) do
|
2018-11-12 09:05:31 +01:00
|
|
|
|
Logger.debug("Prepare outgoing for a note creation")
|
|
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
|
object =
|
|
|
|
|
object
|
2018-05-19 20:29:11 +02:00
|
|
|
|
|> prepare_object
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
|
|
data =
|
|
|
|
|
data
|
|
|
|
|
|> Map.put("object", object)
|
2018-11-12 23:30:47 +01:00
|
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2018-11-12 09:05:31 +01:00
|
|
|
|
Logger.debug("Finished prepare outgoing for a note creation")
|
|
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
|
{:ok, data}
|
|
|
|
|
end
|
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
|
def prepare_outgoing(%{"type" => _type} = data) do
|
2018-05-17 11:32:23 +02:00
|
|
|
|
data =
|
|
|
|
|
data
|
2018-11-12 23:30:47 +01:00
|
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
|
end
|
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
|
# def prepare_outgoing(%Event{} = event) do
|
|
|
|
|
# event =
|
|
|
|
|
# event
|
|
|
|
|
# |> Map.from_struct()
|
|
|
|
|
# |> Map.drop([:__meta__])
|
|
|
|
|
# |> Map.put(:"@context", "https://www.w3.org/ns/activitystreams")
|
|
|
|
|
# |> prepare_object
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
|
# {:ok, event}
|
|
|
|
|
# end
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
|
# def prepare_outgoing(%Comment{} = comment) do
|
|
|
|
|
# comment =
|
|
|
|
|
# comment
|
|
|
|
|
# |> Map.from_struct()
|
|
|
|
|
# |> Map.drop([:__meta__])
|
|
|
|
|
# |> Map.put(:"@context", "https://www.w3.org/ns/activitystreams")
|
|
|
|
|
# |> prepare_object
|
2018-08-24 11:34:00 +02:00
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
|
# {:ok, comment}
|
|
|
|
|
# end
|
2018-08-24 11:34:00 +02:00
|
|
|
|
|
2018-07-27 10:45:35 +02:00
|
|
|
|
#
|
|
|
|
|
# def maybe_fix_object_url(data) do
|
|
|
|
|
# if is_binary(data["object"]) and not String.starts_with?(data["object"], "http") do
|
|
|
|
|
# case ActivityPub.fetch_object_from_id(data["object"]) do
|
|
|
|
|
# {:ok, relative_object} ->
|
|
|
|
|
# if relative_object.data["external_url"] do
|
|
|
|
|
# data =
|
|
|
|
|
# data
|
|
|
|
|
# |> Map.put("object", relative_object.data["external_url"])
|
|
|
|
|
# else
|
|
|
|
|
# data
|
|
|
|
|
# end
|
|
|
|
|
#
|
|
|
|
|
# e ->
|
|
|
|
|
# Logger.error("Couldn't fetch #{data["object"]} #{inspect(e)}")
|
|
|
|
|
# data
|
|
|
|
|
# end
|
|
|
|
|
# else
|
|
|
|
|
# data
|
|
|
|
|
# end
|
|
|
|
|
# end
|
|
|
|
|
#
|
2018-11-12 09:06:44 +01:00
|
|
|
|
|
|
|
|
|
def add_hashtags(object) do
|
|
|
|
|
tags =
|
|
|
|
|
(object["tag"] || [])
|
|
|
|
|
|> Enum.map(fn tag ->
|
|
|
|
|
%{
|
|
|
|
|
"href" => MobilizonWeb.Endpoint.url() <> "/tags/#{tag}",
|
|
|
|
|
"name" => "##{tag}",
|
|
|
|
|
"type" => "Hashtag"
|
|
|
|
|
}
|
|
|
|
|
end)
|
|
|
|
|
|
|
|
|
|
object
|
|
|
|
|
|> Map.put("tag", tags)
|
|
|
|
|
end
|
|
|
|
|
|
2018-11-12 09:05:31 +01:00
|
|
|
|
def add_mention_tags(object) do
|
2018-11-12 18:17:53 +01:00
|
|
|
|
recipients =
|
|
|
|
|
(object["to"] ++ (object["cc"] || [])) -- ["https://www.w3.org/ns/activitystreams#Public"]
|
2018-11-12 09:05:31 +01:00
|
|
|
|
|
|
|
|
|
mentions =
|
|
|
|
|
recipients
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|> Enum.filter(& &1)
|
|
|
|
|
|> Enum.map(fn url ->
|
|
|
|
|
case Actors.get_actor_by_url(url) do
|
|
|
|
|
{:ok, actor} -> actor
|
|
|
|
|
_ -> nil
|
|
|
|
|
end
|
|
|
|
|
end)
|
2018-11-12 09:05:31 +01:00
|
|
|
|
|> Enum.filter(& &1)
|
|
|
|
|
|> Enum.map(fn actor ->
|
|
|
|
|
%{"type" => "Mention", "href" => actor.url, "name" => "@#{actor.preferred_username}"}
|
|
|
|
|
end)
|
|
|
|
|
|
|
|
|
|
tags = object["tag"] || []
|
|
|
|
|
|
|
|
|
|
object
|
|
|
|
|
|> Map.put("tag", tags ++ mentions)
|
|
|
|
|
end
|
|
|
|
|
|
2018-07-27 10:45:35 +02:00
|
|
|
|
#
|
|
|
|
|
# # TODO: we should probably send mtime instead of unix epoch time for updated
|
|
|
|
|
# def add_emoji_tags(object) do
|
|
|
|
|
# tags = object["tag"] || []
|
|
|
|
|
# emoji = object["emoji"] || []
|
|
|
|
|
#
|
|
|
|
|
# out =
|
|
|
|
|
# emoji
|
|
|
|
|
# |> Enum.map(fn {name, url} ->
|
|
|
|
|
# %{
|
|
|
|
|
# "icon" => %{"url" => url, "type" => "Image"},
|
|
|
|
|
# "name" => ":" <> name <> ":",
|
|
|
|
|
# "type" => "Emoji",
|
|
|
|
|
# "updated" => "1970-01-01T00:00:00Z",
|
|
|
|
|
# "id" => url
|
|
|
|
|
# }
|
|
|
|
|
# end)
|
|
|
|
|
#
|
|
|
|
|
# object
|
|
|
|
|
# |> Map.put("tag", tags ++ out)
|
|
|
|
|
# end
|
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
# def set_sensitive(object) do
|
|
|
|
|
# tags = object["tag"] || []
|
|
|
|
|
# Map.put(object, "sensitive", "nsfw" in tags)
|
|
|
|
|
# end
|
|
|
|
|
#
|
2018-05-19 20:29:11 +02:00
|
|
|
|
def add_attributed_to(object) do
|
2018-06-14 18:15:27 +02:00
|
|
|
|
attributed_to = object["attributedTo"] || object["actor"]
|
2018-05-19 20:29:11 +02:00
|
|
|
|
|
2018-06-14 18:15:27 +02:00
|
|
|
|
object |> Map.put("attributedTo", attributed_to)
|
2018-05-19 20:29:11 +02:00
|
|
|
|
end
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
|
|
|
|
#
|
|
|
|
|
# def prepare_attachments(object) do
|
|
|
|
|
# attachments =
|
|
|
|
|
# (object["attachment"] || [])
|
|
|
|
|
# |> Enum.map(fn data ->
|
|
|
|
|
# [%{"mediaType" => media_type, "href" => href} | _] = data["url"]
|
|
|
|
|
# %{"url" => href, "mediaType" => media_type, "name" => data["name"], "type" => "Document"}
|
|
|
|
|
# end)
|
|
|
|
|
#
|
|
|
|
|
# object
|
|
|
|
|
# |> Map.put("attachment", attachments)
|
|
|
|
|
# end
|
2018-11-12 09:06:44 +01:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
@spec fetch_obj_helper(map() | String.t()) :: Event.t() | Comment.t() | Actor.t() | any()
|
|
|
|
|
def fetch_obj_helper(object) do
|
|
|
|
|
Logger.debug("Fetching object #{inspect(object)}")
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
case object |> Utils.get_url() |> ActivityPub.fetch_object_from_url() do
|
|
|
|
|
{:ok, object} ->
|
|
|
|
|
{:ok, object}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
err ->
|
|
|
|
|
Logger.info("Error while fetching #{inspect(object)}")
|
|
|
|
|
{:error, err}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def fetch_obj_helper_as_activity_streams(object) do
|
|
|
|
|
with {:ok, object} <- fetch_obj_helper(object) do
|
|
|
|
|
{:ok, Mobilizon.Service.ActivityPub.Convertible.model_to_as(object)}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
end
|
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|