2018-12-24 13:34:45 +01:00
|
|
|
|
# Portions of this file are derived from Pleroma:
|
|
|
|
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social>
|
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
2018-12-27 11:24:04 +01:00
|
|
|
|
# Upstream: https://git.pleroma.social/pleroma/pleroma/blob/develop/lib/pleroma/web/activity_pub/activity_pub.ex
|
2018-12-24 13:34:45 +01:00
|
|
|
|
|
2020-01-22 02:14:42 +01:00
|
|
|
|
defmodule Mobilizon.Federation.ActivityPub do
|
2018-06-14 18:15:27 +02:00
|
|
|
|
@moduledoc """
|
2020-01-22 02:14:42 +01:00
|
|
|
|
The ActivityPub context.
|
2018-06-14 18:15:27 +02:00
|
|
|
|
"""
|
|
|
|
|
|
2020-01-22 02:14:42 +01:00
|
|
|
|
import Mobilizon.Federation.ActivityPub.Utils
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2020-01-28 20:15:59 +01:00
|
|
|
|
alias Mobilizon.{Actors, Config, Events, Reports, Share, Users}
|
2019-08-14 17:45:11 +02:00
|
|
|
|
alias Mobilizon.Actors.{Actor, Follower}
|
2019-09-22 16:26:23 +02:00
|
|
|
|
alias Mobilizon.Events.{Comment, Event, Participant}
|
2019-11-15 18:36:47 +01:00
|
|
|
|
alias Mobilizon.Reports.Report
|
|
|
|
|
alias Mobilizon.Tombstone
|
2020-01-22 02:14:42 +01:00
|
|
|
|
|
|
|
|
|
alias Mobilizon.Federation.ActivityPub.{
|
|
|
|
|
Activity,
|
|
|
|
|
Audience,
|
|
|
|
|
Federator,
|
|
|
|
|
Relay,
|
|
|
|
|
Transmogrifier,
|
2020-01-22 22:40:40 +01:00
|
|
|
|
Visibility
|
2020-01-22 02:14:42 +01:00
|
|
|
|
}
|
|
|
|
|
|
2020-01-22 22:40:40 +01:00
|
|
|
|
alias Mobilizon.Federation.ActivityStream.{Converter, Convertible}
|
|
|
|
|
alias Mobilizon.Federation.ActivityStream.Converter.Utils, as: ConverterUtils
|
2020-01-22 02:14:42 +01:00
|
|
|
|
alias Mobilizon.Federation.HTTPSignatures.Signature
|
2020-01-22 22:40:40 +01:00
|
|
|
|
alias Mobilizon.Federation.WebFinger
|
2020-01-22 02:14:42 +01:00
|
|
|
|
|
2020-01-26 21:11:16 +01:00
|
|
|
|
alias Mobilizon.GraphQL.API.Utils, as: APIUtils
|
|
|
|
|
|
2020-01-28 19:18:33 +01:00
|
|
|
|
alias Mobilizon.Web.Endpoint
|
2020-01-26 21:36:50 +01:00
|
|
|
|
alias Mobilizon.Web.Email.{Admin, Mailer}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2018-11-12 09:05:31 +01:00
|
|
|
|
require Logger
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
|
@doc """
|
|
|
|
|
Wraps an object into an activity
|
|
|
|
|
"""
|
2020-01-22 22:40:40 +01:00
|
|
|
|
@spec create_activity(map, boolean) :: {:ok, Activity.t()}
|
2019-09-03 08:38:04 +02:00
|
|
|
|
def create_activity(map, local \\ true) when is_map(map) do
|
|
|
|
|
with map <- lazy_put_activity_defaults(map) do
|
2019-09-04 18:24:31 +02:00
|
|
|
|
{:ok,
|
|
|
|
|
%Activity{
|
|
|
|
|
data: map,
|
|
|
|
|
local: local,
|
|
|
|
|
actor: map["actor"],
|
|
|
|
|
recipients: get_recipients(map)
|
|
|
|
|
}}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
|
@doc """
|
|
|
|
|
Fetch an object from an URL, from our local database of events and comments, then eventually remote
|
|
|
|
|
"""
|
2019-02-21 18:11:49 +01:00
|
|
|
|
# TODO: Make database calls parallel
|
2018-11-12 18:17:53 +01:00
|
|
|
|
@spec fetch_object_from_url(String.t()) :: {:ok, %Event{}} | {:ok, %Comment{}} | {:error, any()}
|
2018-08-24 11:34:00 +02:00
|
|
|
|
def fetch_object_from_url(url) do
|
2018-12-14 17:41:55 +01:00
|
|
|
|
Logger.info("Fetching object from url #{url}")
|
|
|
|
|
|
2020-01-22 02:14:42 +01:00
|
|
|
|
date = Signature.generate_date_header()
|
2019-12-03 11:29:51 +01:00
|
|
|
|
|
|
|
|
|
headers =
|
|
|
|
|
[{:Accept, "application/activity+json"}]
|
|
|
|
|
|> maybe_date_fetch(date)
|
|
|
|
|
|> sign_fetch(url, date)
|
|
|
|
|
|
|
|
|
|
Logger.debug("Fetch headers: #{inspect(headers)}")
|
|
|
|
|
|
2019-07-30 10:35:29 +02:00
|
|
|
|
with {:not_http, true} <- {:not_http, String.starts_with?(url, "http")},
|
|
|
|
|
{:existing_event, nil} <- {:existing_event, Events.get_event_by_url(url)},
|
|
|
|
|
{:existing_comment, nil} <- {:existing_comment, Events.get_comment_from_url(url)},
|
2020-02-14 17:56:36 +01:00
|
|
|
|
{:existing_actor, {:error, _err}} <-
|
|
|
|
|
{:existing_actor, get_or_fetch_actor_by_url(url)},
|
2018-08-24 11:34:00 +02:00
|
|
|
|
{:ok, %{body: body, status_code: code}} when code in 200..299 <-
|
|
|
|
|
HTTPoison.get(
|
|
|
|
|
url,
|
2019-12-03 11:29:51 +01:00
|
|
|
|
headers,
|
2018-08-24 11:34:00 +02:00
|
|
|
|
follow_redirect: true,
|
|
|
|
|
timeout: 10_000,
|
|
|
|
|
recv_timeout: 20_000
|
|
|
|
|
),
|
|
|
|
|
{:ok, data} <- Jason.decode(body),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
{:origin_check, true} <- {:origin_check, origin_check?(url, data)},
|
2018-08-24 11:34:00 +02:00
|
|
|
|
params <- %{
|
|
|
|
|
"type" => "Create",
|
|
|
|
|
"to" => data["to"],
|
|
|
|
|
"cc" => data["cc"],
|
|
|
|
|
"actor" => data["attributedTo"],
|
|
|
|
|
"object" => data
|
|
|
|
|
},
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:ok, _activity, %{url: object_url} = _object} <- Transmogrifier.handle_incoming(params) do
|
2018-08-24 11:34:00 +02:00
|
|
|
|
case data["type"] do
|
|
|
|
|
"Event" ->
|
2019-09-16 02:07:44 +02:00
|
|
|
|
{:ok, Events.get_public_event_by_url_with_preload!(object_url)}
|
2018-08-24 11:34:00 +02:00
|
|
|
|
|
|
|
|
|
"Note" ->
|
2019-09-16 02:07:44 +02:00
|
|
|
|
{:ok, Events.get_comment_from_url_with_preload!(object_url)}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
2019-02-21 18:11:49 +01:00
|
|
|
|
"Actor" ->
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:ok, Actors.get_actor_by_url!(object_url, true)}
|
2019-02-21 18:11:49 +01:00
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
|
other ->
|
|
|
|
|
{:error, other}
|
2018-08-24 11:34:00 +02:00
|
|
|
|
end
|
|
|
|
|
else
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:existing_event, %Event{url: event_url}} ->
|
2019-09-16 02:07:44 +02:00
|
|
|
|
{:ok, Events.get_public_event_by_url_with_preload!(event_url)}
|
2019-07-30 10:35:29 +02:00
|
|
|
|
|
|
|
|
|
{:existing_comment, %Comment{url: comment_url}} ->
|
2019-09-16 02:07:44 +02:00
|
|
|
|
{:ok, Events.get_comment_from_url_with_preload!(comment_url)}
|
2019-07-30 10:35:29 +02:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:existing_actor, {:ok, %Actor{url: actor_url}}} ->
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:ok, Actors.get_actor_by_url!(actor_url, true)}
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
{:origin_check, false} ->
|
|
|
|
|
Logger.warn("Object origin check failed")
|
|
|
|
|
{:error, "Object origin check failed"}
|
|
|
|
|
|
2019-07-30 10:35:29 +02:00
|
|
|
|
e ->
|
|
|
|
|
{:error, e}
|
2018-08-24 11:34:00 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-09-09 00:52:49 +02:00
|
|
|
|
@doc """
|
2020-02-14 17:56:36 +01:00
|
|
|
|
Getting an actor from url, eventually creating it if we don't have it locally or if it needs an update
|
2019-09-09 00:52:49 +02:00
|
|
|
|
"""
|
2019-10-25 17:43:37 +02:00
|
|
|
|
@spec get_or_fetch_actor_by_url(String.t(), boolean) :: {:ok, Actor.t()} | {:error, String.t()}
|
2019-12-17 13:36:45 +01:00
|
|
|
|
def get_or_fetch_actor_by_url(url, preload \\ false)
|
|
|
|
|
|
|
|
|
|
def get_or_fetch_actor_by_url("https://www.w3.org/ns/activitystreams#Public", _preload) do
|
|
|
|
|
with %Actor{url: url} <- Relay.get_actor() do
|
|
|
|
|
get_or_fetch_actor_by_url(url)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def get_or_fetch_actor_by_url(url, preload) do
|
2020-02-14 17:56:36 +01:00
|
|
|
|
with {:ok, %Actor{} = cached_actor} <- Actors.get_actor_by_url(url, preload),
|
|
|
|
|
false <- Actors.needs_update?(cached_actor) do
|
|
|
|
|
{:ok, cached_actor}
|
|
|
|
|
else
|
2019-09-09 00:52:49 +02:00
|
|
|
|
_ ->
|
2020-02-14 17:56:36 +01:00
|
|
|
|
# For tests, see https://github.com/jjh42/mock#not-supported---mocking-internal-function-calls and Mobilizon.Federation.ActivityPubTest
|
|
|
|
|
case __MODULE__.make_actor_from_url(url, preload) do
|
2019-09-09 00:52:49 +02:00
|
|
|
|
{:ok, %Actor{} = actor} ->
|
|
|
|
|
{:ok, actor}
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
err ->
|
2019-09-09 00:52:49 +02:00
|
|
|
|
Logger.warn("Could not fetch by AP id")
|
2019-12-03 11:29:51 +01:00
|
|
|
|
Logger.debug(inspect(err))
|
2019-09-09 00:52:49 +02:00
|
|
|
|
{:error, "Could not fetch by AP id"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
|
@doc """
|
2019-10-25 17:43:37 +02:00
|
|
|
|
Create an activity of type `Create`
|
|
|
|
|
|
|
|
|
|
* Creates the object, which returns AS data
|
|
|
|
|
* Wraps ActivityStreams data into a `Create` activity
|
2020-01-22 02:14:42 +01:00
|
|
|
|
* Creates an `Mobilizon.Federation.ActivityPub.Activity` from this
|
2019-10-25 17:43:37 +02:00
|
|
|
|
* Federates (asynchronously) the activity
|
|
|
|
|
* Returns the activity
|
2019-02-22 16:11:57 +01:00
|
|
|
|
"""
|
2019-10-25 17:43:37 +02:00
|
|
|
|
@spec create(atom(), map(), boolean, map()) :: {:ok, Activity.t(), struct()} | any()
|
|
|
|
|
def create(type, args, local \\ false, additional \\ %{}) do
|
2018-11-12 09:05:31 +01:00
|
|
|
|
Logger.debug("creating an activity")
|
2019-10-25 17:43:37 +02:00
|
|
|
|
Logger.debug(inspect(args))
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2019-11-15 18:36:47 +01:00
|
|
|
|
with {:tombstone, nil} <- {:tombstone, check_for_tombstones(args)},
|
|
|
|
|
{:ok, entity, create_data} <-
|
2019-11-18 18:40:03 +01:00
|
|
|
|
(case type do
|
|
|
|
|
:event -> create_event(args, additional)
|
|
|
|
|
:comment -> create_comment(args, additional)
|
|
|
|
|
:group -> create_group(args, additional)
|
|
|
|
|
end),
|
|
|
|
|
{:ok, activity} <- create_activity(create_data, local),
|
2018-05-17 11:32:23 +02:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, activity, entity}
|
2018-05-18 09:56:21 +02:00
|
|
|
|
else
|
|
|
|
|
err ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.error("Something went wrong while creating an activity")
|
|
|
|
|
Logger.debug(inspect(err))
|
2018-12-14 17:41:55 +01:00
|
|
|
|
err
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-10-25 17:43:37 +02:00
|
|
|
|
@doc """
|
|
|
|
|
Create an activity of type `Update`
|
2019-07-30 16:40:59 +02:00
|
|
|
|
|
2019-10-25 17:43:37 +02:00
|
|
|
|
* Updates the object, which returns AS data
|
|
|
|
|
* Wraps ActivityStreams data into a `Update` activity
|
2020-01-22 02:14:42 +01:00
|
|
|
|
* Creates an `Mobilizon.Federation.ActivityPub.Activity` from this
|
2019-10-25 17:43:37 +02:00
|
|
|
|
* Federates (asynchronously) the activity
|
|
|
|
|
* Returns the activity
|
|
|
|
|
"""
|
|
|
|
|
@spec update(atom(), struct(), map(), boolean, map()) :: {:ok, Activity.t(), struct()} | any()
|
|
|
|
|
def update(type, old_entity, args, local \\ false, additional \\ %{}) do
|
|
|
|
|
Logger.debug("updating an activity")
|
|
|
|
|
Logger.debug(inspect(args))
|
|
|
|
|
|
2019-11-18 18:40:03 +01:00
|
|
|
|
with {:ok, entity, update_data} <-
|
|
|
|
|
(case type do
|
|
|
|
|
:event -> update_event(old_entity, args, additional)
|
|
|
|
|
:actor -> update_actor(old_entity, args, additional)
|
|
|
|
|
end),
|
|
|
|
|
{:ok, activity} <- create_activity(update_data, local),
|
2019-07-30 16:40:59 +02:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, activity, entity}
|
|
|
|
|
else
|
|
|
|
|
err ->
|
|
|
|
|
Logger.error("Something went wrong while creating an activity")
|
|
|
|
|
Logger.debug(inspect(err))
|
|
|
|
|
err
|
2019-07-30 16:40:59 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
def accept(type, entity, local \\ true, additional \\ %{}) do
|
|
|
|
|
Logger.debug("We're accepting something")
|
|
|
|
|
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, entity, update_data} =
|
|
|
|
|
case type do
|
2019-12-03 11:29:51 +01:00
|
|
|
|
:join -> accept_join(entity, additional)
|
|
|
|
|
:follow -> accept_follow(entity, additional)
|
2019-10-25 17:43:37 +02:00
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2019-10-25 17:43:37 +02:00
|
|
|
|
with {:ok, activity} <- create_activity(update_data, local),
|
2018-05-17 11:32:23 +02:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, activity, entity}
|
|
|
|
|
else
|
|
|
|
|
err ->
|
|
|
|
|
Logger.error("Something went wrong while creating an activity")
|
|
|
|
|
Logger.debug(inspect(err))
|
|
|
|
|
err
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
def reject(type, entity, local \\ true, additional \\ %{}) do
|
|
|
|
|
{:ok, entity, update_data} =
|
|
|
|
|
case type do
|
|
|
|
|
:join -> reject_join(entity, additional)
|
|
|
|
|
:follow -> reject_follow(entity, additional)
|
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
with {:ok, activity} <- create_activity(update_data, local),
|
2018-05-17 11:32:23 +02:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-12-03 11:29:51 +01:00
|
|
|
|
{:ok, activity, entity}
|
|
|
|
|
else
|
|
|
|
|
err ->
|
|
|
|
|
Logger.error("Something went wrong while creating an activity")
|
|
|
|
|
Logger.debug(inspect(err))
|
|
|
|
|
err
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def announce(
|
|
|
|
|
%Actor{} = actor,
|
|
|
|
|
object,
|
|
|
|
|
activity_id \\ nil,
|
|
|
|
|
local \\ true,
|
|
|
|
|
public \\ true
|
|
|
|
|
) do
|
2020-01-22 02:14:42 +01:00
|
|
|
|
with true <- Visibility.is_public?(object),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
{:ok, %Actor{id: object_owner_actor_id}} <- Actors.get_actor_by_url(object["actor"]),
|
|
|
|
|
{:ok, %Share{} = _share} <- Share.create(object["id"], actor.id, object_owner_actor_id),
|
2019-07-30 16:40:59 +02:00
|
|
|
|
announce_data <- make_announce_data(actor, object, activity_id, public),
|
2019-09-03 08:38:04 +02:00
|
|
|
|
{:ok, activity} <- create_activity(announce_data, local),
|
2019-07-30 16:40:59 +02:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
|
{:ok, activity, object}
|
|
|
|
|
else
|
|
|
|
|
error ->
|
|
|
|
|
{:error, error}
|
|
|
|
|
end
|
|
|
|
|
end
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def unannounce(
|
|
|
|
|
%Actor{} = actor,
|
|
|
|
|
object,
|
|
|
|
|
activity_id \\ nil,
|
|
|
|
|
cancelled_activity_id \\ nil,
|
|
|
|
|
local \\ true
|
|
|
|
|
) do
|
|
|
|
|
with announce_activity <- make_announce_data(actor, object, cancelled_activity_id),
|
|
|
|
|
unannounce_data <- make_unannounce_data(actor, announce_activity, activity_id),
|
2019-09-03 08:38:04 +02:00
|
|
|
|
{:ok, unannounce_activity} <- create_activity(unannounce_data, local),
|
2019-07-30 16:40:59 +02:00
|
|
|
|
:ok <- maybe_federate(unannounce_activity) do
|
|
|
|
|
{:ok, unannounce_activity, object}
|
|
|
|
|
else
|
|
|
|
|
_e -> {:ok, object}
|
|
|
|
|
end
|
|
|
|
|
end
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
|
@doc """
|
|
|
|
|
Make an actor follow another
|
|
|
|
|
"""
|
2018-12-14 17:41:55 +01:00
|
|
|
|
def follow(%Actor{} = follower, %Actor{} = followed, activity_id \\ nil, local \\ true) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
with {:ok, %Follower{} = follower} <-
|
2019-09-09 00:52:49 +02:00
|
|
|
|
Actors.follow(followed, follower, activity_id, false),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
follower_as_data <- Convertible.model_to_as(follower),
|
|
|
|
|
{:ok, activity} <- create_activity(follower_as_data, local),
|
2018-05-17 11:32:23 +02:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, activity, follower}
|
2018-11-12 09:05:31 +01:00
|
|
|
|
else
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:error, err, msg} when err in [:already_following, :suspended] ->
|
|
|
|
|
{:error, msg}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
|
@doc """
|
|
|
|
|
Make an actor unfollow another
|
|
|
|
|
"""
|
2018-12-14 17:41:55 +01:00
|
|
|
|
@spec unfollow(Actor.t(), Actor.t(), String.t(), boolean()) :: {:ok, map()} | any()
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def unfollow(%Actor{} = follower, %Actor{} = followed, activity_id \\ nil, local \\ true) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
with {:ok, %Follower{id: follow_id} = follow} <- Actors.unfollow(followed, follower),
|
2018-12-14 17:41:55 +01:00
|
|
|
|
# We recreate the follow activity
|
2019-10-25 17:43:37 +02:00
|
|
|
|
follow_as_data <-
|
|
|
|
|
Convertible.model_to_as(%{follow | actor: follower, target_actor: followed}),
|
|
|
|
|
{:ok, follow_activity} <- create_activity(follow_as_data, local),
|
2019-07-30 16:40:59 +02:00
|
|
|
|
activity_unfollow_id <-
|
2020-01-28 19:18:33 +01:00
|
|
|
|
activity_id || "#{Endpoint.url()}/unfollow/#{follow_id}/activity",
|
2019-07-30 16:40:59 +02:00
|
|
|
|
unfollow_data <-
|
|
|
|
|
make_unfollow_data(follower, followed, follow_activity, activity_unfollow_id),
|
2019-09-03 08:38:04 +02:00
|
|
|
|
{:ok, activity} <- create_activity(unfollow_data, local),
|
2018-12-14 17:41:55 +01:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-12-03 11:29:51 +01:00
|
|
|
|
{:ok, activity, follow}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
else
|
|
|
|
|
err ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.debug("Error while unfollowing an actor #{inspect(err)}")
|
2018-12-14 17:41:55 +01:00
|
|
|
|
err
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-08-24 11:34:00 +02:00
|
|
|
|
def delete(object, local \\ true)
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
@spec delete(Event.t(), boolean) :: {:ok, Activity.t(), Event.t()}
|
2018-08-24 11:34:00 +02:00
|
|
|
|
def delete(%Event{url: url, organizer_actor: actor} = event, local) do
|
2018-05-17 11:32:23 +02:00
|
|
|
|
data = %{
|
|
|
|
|
"type" => "Delete",
|
2018-05-18 09:56:21 +02:00
|
|
|
|
"actor" => actor.url,
|
2018-05-17 11:32:23 +02:00
|
|
|
|
"object" => url,
|
2019-07-30 16:40:59 +02:00
|
|
|
|
"to" => [actor.url <> "/followers", "https://www.w3.org/ns/activitystreams#Public"],
|
|
|
|
|
"id" => url <> "/delete"
|
2018-05-17 11:32:23 +02:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
with audience <-
|
|
|
|
|
Audience.calculate_to_and_cc_from_mentions(event),
|
|
|
|
|
{:ok, %Event{} = event} <- Events.delete_event(event),
|
2019-12-16 11:47:31 +01:00
|
|
|
|
{:ok, true} <- Cachex.del(:activity_pub, "event_#{event.uuid}"),
|
2019-11-15 18:36:47 +01:00
|
|
|
|
{:ok, %Tombstone{} = _tombstone} <-
|
|
|
|
|
Tombstone.create_tombstone(%{uri: event.url, actor_id: actor.id}),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
Share.delete_all_by_uri(event.url),
|
2020-02-14 17:56:36 +01:00
|
|
|
|
:ok <- check_for_actor_key_rotation(actor),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
{:ok, activity} <- create_activity(Map.merge(data, audience), local),
|
2018-07-27 10:45:35 +02:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-09-09 09:31:08 +02:00
|
|
|
|
{:ok, activity, event}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
@spec delete(Comment.t(), boolean) :: {:ok, Activity.t(), Comment.t()}
|
2018-08-24 11:34:00 +02:00
|
|
|
|
def delete(%Comment{url: url, actor: actor} = comment, local) do
|
|
|
|
|
data = %{
|
|
|
|
|
"type" => "Delete",
|
|
|
|
|
"actor" => actor.url,
|
|
|
|
|
"object" => url,
|
2019-07-30 16:40:59 +02:00
|
|
|
|
"id" => url <> "/delete",
|
2018-08-24 11:34:00 +02:00
|
|
|
|
"to" => [actor.url <> "/followers", "https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
with audience <-
|
|
|
|
|
Audience.calculate_to_and_cc_from_mentions(comment),
|
|
|
|
|
{:ok, %Comment{} = comment} <- Events.delete_comment(comment),
|
2019-12-16 11:47:31 +01:00
|
|
|
|
{:ok, true} <- Cachex.del(:activity_pub, "comment_#{comment.uuid}"),
|
2019-11-15 18:36:47 +01:00
|
|
|
|
{:ok, %Tombstone{} = _tombstone} <-
|
|
|
|
|
Tombstone.create_tombstone(%{uri: comment.url, actor_id: actor.id}),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
Share.delete_all_by_uri(comment.url),
|
2020-02-14 17:56:36 +01:00
|
|
|
|
:ok <- check_for_actor_key_rotation(actor),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
{:ok, activity} <- create_activity(Map.merge(data, audience), local),
|
2018-08-24 11:34:00 +02:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, activity, comment}
|
2018-08-24 11:34:00 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
|
def delete(%Actor{url: url} = actor, local) do
|
|
|
|
|
data = %{
|
|
|
|
|
"type" => "Delete",
|
|
|
|
|
"actor" => url,
|
|
|
|
|
"object" => url,
|
2019-07-30 16:40:59 +02:00
|
|
|
|
"id" => url <> "/delete",
|
2018-12-14 17:41:55 +01:00
|
|
|
|
"to" => [url <> "/followers", "https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
with {:ok, %Oban.Job{}} <- Actors.delete_actor(actor),
|
2019-09-03 08:38:04 +02:00
|
|
|
|
{:ok, activity} <- create_activity(data, local),
|
2018-12-14 17:41:55 +01:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, activity, actor}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-11-15 18:36:47 +01:00
|
|
|
|
def flag(args, local \\ false, _additional \\ %{}) do
|
|
|
|
|
with {:build_args, args} <- {:build_args, prepare_args_for_report(args)},
|
|
|
|
|
{:create_report, {:ok, %Report{} = report}} <-
|
|
|
|
|
{:create_report, Reports.create_report(args)},
|
|
|
|
|
report_as_data <- Convertible.model_to_as(report),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
cc <- if(local, do: [report.reported.url], else: []),
|
|
|
|
|
report_as_data <- Map.merge(report_as_data, %{"to" => [], "cc" => cc}),
|
2019-11-15 18:36:47 +01:00
|
|
|
|
{:ok, activity} <- create_activity(report_as_data, local),
|
2019-07-23 13:49:22 +02:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-11-15 18:36:47 +01:00
|
|
|
|
Enum.each(Users.list_moderators(), fn moderator ->
|
|
|
|
|
moderator
|
|
|
|
|
|> Admin.report(report)
|
|
|
|
|
|> Mailer.deliver_later()
|
|
|
|
|
end)
|
|
|
|
|
|
|
|
|
|
{:ok, activity, report}
|
|
|
|
|
else
|
|
|
|
|
err ->
|
|
|
|
|
Logger.error("Something went wrong while creating an activity")
|
|
|
|
|
Logger.debug(inspect(err))
|
|
|
|
|
err
|
2019-07-23 13:49:22 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
def join(object, actor, local \\ true, additional \\ %{})
|
2019-08-14 17:45:11 +02:00
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
def join(%Event{} = event, %Actor{} = actor, local, additional) do
|
2019-10-11 11:50:06 +02:00
|
|
|
|
# TODO Refactor me for federation
|
2019-12-03 11:29:51 +01:00
|
|
|
|
with {:maximum_attendee_capacity, true} <-
|
|
|
|
|
{:maximum_attendee_capacity, check_attendee_capacity(event)},
|
2019-12-20 13:04:34 +01:00
|
|
|
|
role <-
|
|
|
|
|
additional
|
|
|
|
|
|> Map.get(:metadata, %{})
|
|
|
|
|
|> Map.get(:role, Mobilizon.Events.get_default_participant_role(event)),
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:ok, %Participant{} = participant} <-
|
|
|
|
|
Mobilizon.Events.create_participant(%{
|
2019-12-20 13:04:34 +01:00
|
|
|
|
role: role,
|
2019-08-14 17:45:11 +02:00
|
|
|
|
event_id: event.id,
|
2019-12-03 11:29:51 +01:00
|
|
|
|
actor_id: actor.id,
|
2019-12-20 13:04:34 +01:00
|
|
|
|
url: Map.get(additional, :url),
|
2020-03-05 19:32:34 +01:00
|
|
|
|
metadata:
|
|
|
|
|
additional
|
|
|
|
|
|> Map.get(:metadata, %{})
|
|
|
|
|
|> Map.update(:message, nil, &String.trim(HtmlSanitizeEx.strip_tags(&1)))
|
2019-08-14 17:45:11 +02:00
|
|
|
|
}),
|
|
|
|
|
join_data <- Convertible.model_to_as(participant),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
audience <-
|
|
|
|
|
Audience.calculate_to_and_cc_from_mentions(participant),
|
|
|
|
|
{:ok, activity} <- create_activity(Map.merge(join_data, audience), local),
|
2019-08-14 17:45:11 +02:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-12-20 13:04:34 +01:00
|
|
|
|
if event.local && Mobilizon.Events.get_default_participant_role(event) === :participant &&
|
|
|
|
|
role == :participant do
|
2019-12-03 11:29:51 +01:00
|
|
|
|
accept(
|
|
|
|
|
:join,
|
2019-10-25 17:43:37 +02:00
|
|
|
|
participant,
|
2019-12-03 11:29:51 +01:00
|
|
|
|
true,
|
|
|
|
|
%{"actor" => event.organizer_actor.url}
|
2019-08-14 17:45:11 +02:00
|
|
|
|
)
|
2019-12-03 11:29:51 +01:00
|
|
|
|
else
|
|
|
|
|
{:ok, activity, participant}
|
2019-08-14 17:45:11 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# TODO: Implement me
|
2019-12-03 11:29:51 +01:00
|
|
|
|
def join(%Actor{type: :Group} = _group, %Actor{} = _actor, _local, _additional) do
|
2019-08-14 17:45:11 +02:00
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
defp check_attendee_capacity(%Event{options: options} = event) do
|
|
|
|
|
with maximum_attendee_capacity <-
|
|
|
|
|
Map.get(options, :maximum_attendee_capacity) || 0 do
|
|
|
|
|
maximum_attendee_capacity == 0 ||
|
|
|
|
|
Mobilizon.Events.count_participant_participants(event.id) < maximum_attendee_capacity
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-12-20 13:04:34 +01:00
|
|
|
|
def leave(object, actor, local \\ true, additional \\ %{})
|
2019-08-14 17:45:11 +02:00
|
|
|
|
|
2019-09-22 16:26:23 +02:00
|
|
|
|
# TODO: If we want to use this for exclusion we need to have an extra field
|
|
|
|
|
# for the actor that excluded the participant
|
2019-08-14 17:45:11 +02:00
|
|
|
|
def leave(
|
2019-12-03 11:29:51 +01:00
|
|
|
|
%Event{id: event_id, url: event_url} = _event,
|
2019-08-14 17:45:11 +02:00
|
|
|
|
%Actor{id: actor_id, url: actor_url} = _actor,
|
2019-12-20 13:04:34 +01:00
|
|
|
|
local,
|
|
|
|
|
additional
|
2019-08-14 17:45:11 +02:00
|
|
|
|
) do
|
|
|
|
|
with {:only_organizer, false} <-
|
2019-09-13 01:01:17 +02:00
|
|
|
|
{:only_organizer, Participant.is_not_only_organizer(event_id, actor_id)},
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:ok, %Participant{} = participant} <-
|
2019-12-20 13:04:34 +01:00
|
|
|
|
Mobilizon.Events.get_participant(
|
|
|
|
|
event_id,
|
|
|
|
|
actor_id,
|
|
|
|
|
Map.get(additional, :metadata, %{})
|
|
|
|
|
),
|
2019-09-30 13:48:47 +02:00
|
|
|
|
{:ok, %Participant{} = participant} <-
|
|
|
|
|
Events.delete_participant(participant),
|
2019-08-14 17:45:11 +02:00
|
|
|
|
leave_data <- %{
|
|
|
|
|
"type" => "Leave",
|
|
|
|
|
# If it's an exclusion it should be something else
|
|
|
|
|
"actor" => actor_url,
|
|
|
|
|
"object" => event_url,
|
2020-01-28 19:18:33 +01:00
|
|
|
|
"id" => "#{Endpoint.url()}/leave/event/#{participant.id}"
|
2019-08-14 17:45:11 +02:00
|
|
|
|
},
|
2019-12-03 11:29:51 +01:00
|
|
|
|
audience <-
|
|
|
|
|
Audience.calculate_to_and_cc_from_mentions(participant),
|
|
|
|
|
{:ok, activity} <- create_activity(Map.merge(leave_data, audience), local),
|
2019-08-14 17:45:11 +02:00
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
|
{:ok, activity, participant}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
|
@doc """
|
2019-10-15 21:18:03 +02:00
|
|
|
|
Create an actor locally by its URL (AP ID)
|
2018-11-12 18:17:53 +01:00
|
|
|
|
"""
|
|
|
|
|
@spec make_actor_from_url(String.t(), boolean()) :: {:ok, %Actor{}} | {:error, any()}
|
2018-11-12 09:05:31 +01:00
|
|
|
|
def make_actor_from_url(url, preload \\ false) do
|
2019-07-23 18:06:22 +02:00
|
|
|
|
case fetch_and_prepare_actor_from_url(url) do
|
|
|
|
|
{:ok, data} ->
|
2019-09-09 00:52:49 +02:00
|
|
|
|
Actors.upsert_actor(data, preload)
|
2019-07-23 18:06:22 +02:00
|
|
|
|
|
2018-11-08 16:11:23 +01:00
|
|
|
|
# Request returned 410
|
|
|
|
|
{:error, :actor_deleted} ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.info("Actor was deleted")
|
2018-11-08 16:11:23 +01:00
|
|
|
|
{:error, :actor_deleted}
|
|
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
|
e ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.warn("Failed to make actor from url")
|
2018-05-17 11:32:23 +02:00
|
|
|
|
{:error, e}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
|
@doc """
|
2019-02-22 16:11:57 +01:00
|
|
|
|
Find an actor in our local database or call WebFinger to find what's its AP ID is and then fetch it
|
2018-11-12 18:17:53 +01:00
|
|
|
|
"""
|
2018-11-23 15:03:53 +01:00
|
|
|
|
@spec find_or_make_actor_from_nickname(String.t(), atom() | nil) :: tuple()
|
|
|
|
|
def find_or_make_actor_from_nickname(nickname, type \\ nil) do
|
2019-07-23 18:06:22 +02:00
|
|
|
|
case Actors.get_actor_by_name(nickname, type) do
|
|
|
|
|
%Actor{} = actor ->
|
|
|
|
|
{:ok, actor}
|
|
|
|
|
|
|
|
|
|
nil ->
|
|
|
|
|
make_actor_from_nickname(nickname)
|
2018-05-30 14:27:21 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-11-23 15:03:53 +01:00
|
|
|
|
@spec find_or_make_person_from_nickname(String.t()) :: tuple()
|
|
|
|
|
def find_or_make_person_from_nickname(nick), do: find_or_make_actor_from_nickname(nick, :Person)
|
|
|
|
|
|
|
|
|
|
@spec find_or_make_group_from_nickname(String.t()) :: tuple()
|
|
|
|
|
def find_or_make_group_from_nickname(nick), do: find_or_make_actor_from_nickname(nick, :Group)
|
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
|
@doc """
|
2019-10-15 21:18:03 +02:00
|
|
|
|
Create an actor inside our database from username, using WebFinger to find out its AP ID and then fetch it
|
2018-11-12 18:17:53 +01:00
|
|
|
|
"""
|
|
|
|
|
@spec make_actor_from_nickname(String.t()) :: {:ok, %Actor{}} | {:error, any()}
|
2018-05-18 09:56:21 +02:00
|
|
|
|
def make_actor_from_nickname(nickname) do
|
2019-07-23 18:06:22 +02:00
|
|
|
|
case WebFinger.finger(nickname) do
|
|
|
|
|
{:ok, %{"url" => url}} when not is_nil(url) ->
|
|
|
|
|
make_actor_from_url(url)
|
|
|
|
|
|
|
|
|
|
_e ->
|
|
|
|
|
{:error, "No ActivityPub URL found in WebFinger"}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
@spec is_create_activity?(Activity.t()) :: boolean
|
|
|
|
|
defp is_create_activity?(%Activity{data: %{"type" => "Create"}}), do: true
|
|
|
|
|
defp is_create_activity?(_), do: false
|
|
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
|
@doc """
|
|
|
|
|
Publish an activity to all appropriated audiences inboxes
|
|
|
|
|
"""
|
2019-12-03 11:29:51 +01:00
|
|
|
|
@spec publish(Actor.t(), Activity.t()) :: :ok
|
2018-05-17 11:32:23 +02:00
|
|
|
|
def publish(actor, activity) do
|
2018-05-18 09:56:21 +02:00
|
|
|
|
Logger.debug("Publishing an activity")
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.debug(inspect(activity))
|
|
|
|
|
|
2020-01-22 02:14:42 +01:00
|
|
|
|
public = Visibility.is_public?(activity)
|
2019-12-03 11:29:51 +01:00
|
|
|
|
Logger.debug("is public ? #{public}")
|
2019-07-30 16:40:59 +02:00
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
if public && is_create_activity?(activity) && Config.get([:instance, :allow_relay]) do
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.info(fn -> "Relaying #{activity.data["id"]} out" end)
|
2019-09-22 18:29:13 +02:00
|
|
|
|
|
|
|
|
|
Relay.publish(activity)
|
2019-07-30 16:40:59 +02:00
|
|
|
|
end
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
2018-05-18 09:56:21 +02:00
|
|
|
|
followers =
|
|
|
|
|
if actor.followers_url in activity.recipients do
|
2019-09-11 03:16:37 +02:00
|
|
|
|
Actors.list_external_followers_for_actor(actor)
|
2018-05-18 09:56:21 +02:00
|
|
|
|
else
|
|
|
|
|
[]
|
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2018-05-18 09:56:21 +02:00
|
|
|
|
remote_inboxes =
|
2018-11-12 09:05:31 +01:00
|
|
|
|
(remote_actors(activity) ++ followers)
|
2018-05-18 09:56:21 +02:00
|
|
|
|
|> Enum.map(fn follower -> follower.shared_inbox_url end)
|
|
|
|
|
|> Enum.uniq()
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
|
|
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
|
|
|
|
json = Jason.encode!(data)
|
2019-01-03 14:59:59 +01:00
|
|
|
|
Logger.debug(fn -> "Remote inboxes are : #{inspect(remote_inboxes)}" end)
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
|
|
Enum.each(remote_inboxes, fn inbox ->
|
|
|
|
|
Federator.enqueue(:publish_single_ap, %{
|
|
|
|
|
inbox: inbox,
|
|
|
|
|
json: json,
|
|
|
|
|
actor: actor,
|
|
|
|
|
id: activity.data["id"]
|
|
|
|
|
})
|
|
|
|
|
end)
|
|
|
|
|
end
|
|
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
|
@doc """
|
|
|
|
|
Publish an activity to a specific inbox
|
|
|
|
|
"""
|
2018-05-17 11:32:23 +02:00
|
|
|
|
def publish_one(%{inbox: inbox, json: json, actor: actor, id: id}) do
|
|
|
|
|
Logger.info("Federating #{id} to #{inbox}")
|
2019-12-03 11:29:51 +01:00
|
|
|
|
%URI{host: host, path: path} = URI.parse(inbox)
|
2018-12-07 10:47:31 +01:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
digest = Signature.build_digest(json)
|
|
|
|
|
date = Signature.generate_date_header()
|
2019-12-20 13:04:34 +01:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
# request_target = Signature.generate_request_target("POST", path)
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
|
|
signature =
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Signature.sign(actor, %{
|
2019-12-03 11:29:51 +01:00
|
|
|
|
"(request-target)": "post #{path}",
|
2018-10-11 17:47:02 +02:00
|
|
|
|
host: host,
|
2018-12-07 10:47:31 +01:00
|
|
|
|
"content-length": byte_size(json),
|
|
|
|
|
digest: digest,
|
|
|
|
|
date: date
|
2018-10-11 17:47:02 +02:00
|
|
|
|
})
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
|
HTTPoison.post(
|
|
|
|
|
inbox,
|
|
|
|
|
json,
|
2018-12-07 10:47:31 +01:00
|
|
|
|
[
|
|
|
|
|
{"Content-Type", "application/activity+json"},
|
|
|
|
|
{"signature", signature},
|
|
|
|
|
{"digest", digest},
|
|
|
|
|
{"date", date}
|
|
|
|
|
],
|
2018-11-12 23:30:47 +01:00
|
|
|
|
hackney: [pool: :default]
|
|
|
|
|
)
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
|
2019-10-15 21:18:03 +02:00
|
|
|
|
# Fetching a remote actor's information through its AP ID
|
2018-11-12 18:17:53 +01:00
|
|
|
|
@spec fetch_and_prepare_actor_from_url(String.t()) :: {:ok, struct()} | {:error, atom()} | any()
|
|
|
|
|
defp fetch_and_prepare_actor_from_url(url) do
|
2018-11-12 09:05:31 +01:00
|
|
|
|
Logger.debug("Fetching and preparing actor from url")
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.debug(inspect(url))
|
|
|
|
|
|
|
|
|
|
res =
|
|
|
|
|
with %HTTPoison.Response{status_code: 200, body: body} <-
|
|
|
|
|
HTTPoison.get!(url, [Accept: "application/activity+json"], follow_redirect: true),
|
|
|
|
|
:ok <- Logger.debug("response okay, now decoding json"),
|
|
|
|
|
{:ok, data} <- Jason.decode(body) do
|
|
|
|
|
Logger.debug("Got activity+json response at actor's endpoint, now converting data")
|
2020-01-22 02:14:42 +01:00
|
|
|
|
Converter.Actor.as_to_model_data(data)
|
2019-07-30 16:40:59 +02:00
|
|
|
|
else
|
|
|
|
|
# Actor is gone, probably deleted
|
|
|
|
|
{:ok, %HTTPoison.Response{status_code: 410}} ->
|
|
|
|
|
Logger.info("Response HTTP 410")
|
|
|
|
|
{:error, :actor_deleted}
|
|
|
|
|
|
|
|
|
|
e ->
|
|
|
|
|
Logger.warn("Could not decode actor at fetch #{url}, #{inspect(e)}")
|
|
|
|
|
{:error, e}
|
|
|
|
|
end
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
res
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
|
@doc """
|
|
|
|
|
Return all public activities (events & comments) for an actor
|
|
|
|
|
"""
|
2019-04-25 19:05:05 +02:00
|
|
|
|
@spec fetch_public_activities_for_actor(Actor.t(), integer(), integer()) :: map()
|
|
|
|
|
def fetch_public_activities_for_actor(%Actor{} = actor, page \\ 1, limit \\ 10) do
|
2019-09-16 02:07:44 +02:00
|
|
|
|
{:ok, events, total_events} = Events.list_public_events_for_actor(actor, page, limit)
|
|
|
|
|
{:ok, comments, total_comments} = Events.list_public_comments_for_actor(actor, page, limit)
|
2018-12-14 11:23:36 +01:00
|
|
|
|
|
2019-04-25 19:05:05 +02:00
|
|
|
|
event_activities = Enum.map(events, &event_to_activity/1)
|
|
|
|
|
comment_activities = Enum.map(comments, &comment_to_activity/1)
|
|
|
|
|
activities = event_activities ++ comment_activities
|
2018-08-24 11:34:00 +02:00
|
|
|
|
|
2019-04-25 19:05:05 +02:00
|
|
|
|
%{elements: activities, total: total_events + total_comments}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
|
# Create an activity from an event
|
2018-11-12 18:17:53 +01:00
|
|
|
|
@spec event_to_activity(%Event{}, boolean()) :: Activity.t()
|
2018-05-30 14:27:21 +02:00
|
|
|
|
defp event_to_activity(%Event{} = event, local \\ true) do
|
2018-11-12 18:17:53 +01:00
|
|
|
|
%Activity{
|
|
|
|
|
recipients: ["https://www.w3.org/ns/activitystreams#Public"],
|
2018-05-18 09:56:21 +02:00
|
|
|
|
actor: event.organizer_actor.url,
|
2019-09-22 18:29:13 +02:00
|
|
|
|
data: Converter.Event.model_to_as(event),
|
2018-11-12 18:17:53 +01:00
|
|
|
|
local: local
|
2018-05-17 11:32:23 +02:00
|
|
|
|
}
|
|
|
|
|
end
|
2018-05-30 14:27:21 +02:00
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
|
# Create an activity from a comment
|
2018-11-12 18:17:53 +01:00
|
|
|
|
@spec comment_to_activity(%Comment{}, boolean()) :: Activity.t()
|
2019-02-22 16:11:57 +01:00
|
|
|
|
defp comment_to_activity(%Comment{} = comment, local \\ true) do
|
2018-11-12 18:17:53 +01:00
|
|
|
|
%Activity{
|
|
|
|
|
recipients: ["https://www.w3.org/ns/activitystreams#Public"],
|
2018-08-24 11:34:00 +02:00
|
|
|
|
actor: comment.actor.url,
|
2019-09-22 18:29:13 +02:00
|
|
|
|
data: Converter.Comment.model_to_as(comment),
|
2018-11-12 18:17:53 +01:00
|
|
|
|
local: local
|
2018-08-24 11:34:00 +02:00
|
|
|
|
}
|
|
|
|
|
end
|
|
|
|
|
|
2019-10-25 17:43:37 +02:00
|
|
|
|
# Get recipients for an activity or object
|
|
|
|
|
@spec get_recipients(map()) :: list()
|
|
|
|
|
defp get_recipients(data) do
|
2020-01-30 20:27:25 +01:00
|
|
|
|
Map.get(data, "to", []) ++ Map.get(data, "cc", [])
|
2019-10-25 17:43:37 +02:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
@spec create_event(map(), map()) :: {:ok, map()}
|
|
|
|
|
defp create_event(args, additional) do
|
|
|
|
|
with args <- prepare_args_for_event(args),
|
|
|
|
|
{:ok, %Event{} = event} <- Events.create_event(args),
|
|
|
|
|
event_as_data <- Convertible.model_to_as(event),
|
|
|
|
|
audience <-
|
2019-12-03 11:29:51 +01:00
|
|
|
|
Audience.calculate_to_and_cc_from_mentions(event),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
create_data <-
|
|
|
|
|
make_create_data(event_as_data, Map.merge(audience, additional)) do
|
|
|
|
|
{:ok, event, create_data}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
@spec create_comment(map(), map()) :: {:ok, map()}
|
|
|
|
|
defp create_comment(args, additional) do
|
|
|
|
|
with args <- prepare_args_for_comment(args),
|
|
|
|
|
{:ok, %Comment{} = comment} <- Events.create_comment(args),
|
|
|
|
|
comment_as_data <- Convertible.model_to_as(comment),
|
|
|
|
|
audience <-
|
2019-12-03 11:29:51 +01:00
|
|
|
|
Audience.calculate_to_and_cc_from_mentions(comment),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
create_data <-
|
|
|
|
|
make_create_data(comment_as_data, Map.merge(audience, additional)) do
|
|
|
|
|
{:ok, comment, create_data}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
@spec create_group(map(), map()) :: {:ok, map()}
|
|
|
|
|
defp create_group(args, additional) do
|
|
|
|
|
with args <- prepare_args_for_group(args),
|
|
|
|
|
{:ok, %Actor{type: :Group} = group} <- Actors.create_group(args),
|
|
|
|
|
group_as_data <- Convertible.model_to_as(group),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
audience <- %{"to" => ["https://www.w3.org/ns/activitystreams#Public"], "cc" => []},
|
2019-10-25 17:43:37 +02:00
|
|
|
|
create_data <-
|
|
|
|
|
make_create_data(group_as_data, Map.merge(audience, additional)) do
|
|
|
|
|
{:ok, group, create_data}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-11-15 18:36:47 +01:00
|
|
|
|
@spec check_for_tombstones(map()) :: Tombstone.t() | nil
|
|
|
|
|
defp check_for_tombstones(%{url: url}), do: Tombstone.find_tombstone(url)
|
|
|
|
|
defp check_for_tombstones(_), do: nil
|
|
|
|
|
|
2020-01-23 00:55:07 +01:00
|
|
|
|
@spec update_event(Event.t(), map(), map()) :: {:ok, Event.t(), Activity.t()} | any()
|
|
|
|
|
defp update_event(%Event{} = old_event, args, additional) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
with args <- prepare_args_for_event(args),
|
|
|
|
|
{:ok, %Event{} = new_event} <- Events.update_event(old_event, args),
|
2019-12-16 11:47:31 +01:00
|
|
|
|
{:ok, true} <- Cachex.del(:activity_pub, "event_#{new_event.uuid}"),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
event_as_data <- Convertible.model_to_as(new_event),
|
|
|
|
|
audience <-
|
2019-12-03 11:29:51 +01:00
|
|
|
|
Audience.calculate_to_and_cc_from_mentions(new_event),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
update_data <- make_update_data(event_as_data, Map.merge(audience, additional)) do
|
|
|
|
|
{:ok, new_event, update_data}
|
|
|
|
|
else
|
|
|
|
|
err ->
|
|
|
|
|
Logger.error("Something went wrong while creating an update activity")
|
|
|
|
|
Logger.debug(inspect(err))
|
|
|
|
|
err
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-01-23 00:55:07 +01:00
|
|
|
|
@spec update_actor(Actor.t(), map, map) :: {:ok, Actor.t(), Activity.t()} | any
|
2019-10-25 17:43:37 +02:00
|
|
|
|
defp update_actor(%Actor{} = old_actor, args, additional) do
|
|
|
|
|
with {:ok, %Actor{} = new_actor} <- Actors.update_actor(old_actor, args),
|
|
|
|
|
actor_as_data <- Convertible.model_to_as(new_actor),
|
2019-12-16 11:47:31 +01:00
|
|
|
|
{:ok, true} <- Cachex.del(:activity_pub, "actor_#{new_actor.preferred_username}"),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
audience <-
|
2019-12-03 11:29:51 +01:00
|
|
|
|
Audience.calculate_to_and_cc_from_mentions(new_actor),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
additional <- Map.merge(additional, %{"actor" => old_actor.url}),
|
|
|
|
|
update_data <- make_update_data(actor_as_data, Map.merge(audience, additional)) do
|
|
|
|
|
{:ok, new_actor, update_data}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-01-23 00:55:07 +01:00
|
|
|
|
@spec accept_follow(Follower.t(), map) :: {:ok, Follower.t(), Activity.t()} | any
|
|
|
|
|
defp accept_follow(%Follower{} = follower, additional) do
|
2019-12-03 11:29:51 +01:00
|
|
|
|
with {:ok, %Follower{} = follower} <- Actors.update_follower(follower, %{approved: true}),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
follower_as_data <- Convertible.model_to_as(follower),
|
|
|
|
|
update_data <-
|
2019-12-03 11:29:51 +01:00
|
|
|
|
make_accept_join_data(
|
2019-10-25 17:43:37 +02:00
|
|
|
|
follower_as_data,
|
2019-12-03 11:29:51 +01:00
|
|
|
|
Map.merge(additional, %{
|
2020-01-28 19:18:33 +01:00
|
|
|
|
"id" => "#{Endpoint.url()}/accept/follow/#{follower.id}",
|
2019-12-03 11:29:51 +01:00
|
|
|
|
"to" => [follower.actor.url],
|
|
|
|
|
"cc" => [],
|
|
|
|
|
"actor" => follower.target_actor.url
|
2019-10-25 17:43:37 +02:00
|
|
|
|
})
|
|
|
|
|
) do
|
|
|
|
|
{:ok, follower, update_data}
|
|
|
|
|
else
|
|
|
|
|
err ->
|
|
|
|
|
Logger.error("Something went wrong while creating an update activity")
|
|
|
|
|
Logger.debug(inspect(err))
|
|
|
|
|
err
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-01-23 00:55:07 +01:00
|
|
|
|
@spec accept_join(Participant.t(), map) :: {:ok, Participant.t(), Activity.t()} | any
|
|
|
|
|
defp accept_join(%Participant{} = participant, additional) do
|
2019-12-03 11:29:51 +01:00
|
|
|
|
with {:ok, %Participant{} = participant} <-
|
|
|
|
|
Events.update_participant(participant, %{role: :participant}),
|
2020-01-28 19:18:33 +01:00
|
|
|
|
Absinthe.Subscription.publish(Endpoint, participant.actor,
|
2019-12-03 11:29:51 +01:00
|
|
|
|
event_person_participation_changed: participant.actor.id
|
|
|
|
|
),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
participant_as_data <- Convertible.model_to_as(participant),
|
|
|
|
|
audience <-
|
2019-12-03 11:29:51 +01:00
|
|
|
|
Audience.calculate_to_and_cc_from_mentions(participant),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
update_data <-
|
|
|
|
|
make_accept_join_data(
|
|
|
|
|
participant_as_data,
|
|
|
|
|
Map.merge(Map.merge(audience, additional), %{
|
2020-01-28 19:18:33 +01:00
|
|
|
|
"id" => "#{Endpoint.url()}/accept/join/#{participant.id}"
|
2019-10-25 17:43:37 +02:00
|
|
|
|
})
|
|
|
|
|
) do
|
|
|
|
|
{:ok, participant, update_data}
|
|
|
|
|
else
|
|
|
|
|
err ->
|
|
|
|
|
Logger.error("Something went wrong while creating an update activity")
|
|
|
|
|
Logger.debug(inspect(err))
|
|
|
|
|
err
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-01-23 00:55:07 +01:00
|
|
|
|
@spec reject_join(Participant.t(), map()) :: {:ok, Participant.t(), Activity.t()} | any()
|
2019-12-03 11:29:51 +01:00
|
|
|
|
defp reject_join(%Participant{} = participant, additional) do
|
|
|
|
|
with {:ok, %Participant{} = participant} <-
|
2019-12-20 13:04:34 +01:00
|
|
|
|
Events.update_participant(participant, %{role: :rejected}),
|
2020-01-28 19:18:33 +01:00
|
|
|
|
Absinthe.Subscription.publish(Endpoint, participant.actor,
|
2019-12-03 11:29:51 +01:00
|
|
|
|
event_person_participation_changed: participant.actor.id
|
|
|
|
|
),
|
|
|
|
|
participant_as_data <- Convertible.model_to_as(participant),
|
|
|
|
|
audience <-
|
|
|
|
|
participant
|
|
|
|
|
|> Audience.calculate_to_and_cc_from_mentions()
|
|
|
|
|
|> Map.merge(additional),
|
|
|
|
|
reject_data <- %{
|
|
|
|
|
"type" => "Reject",
|
|
|
|
|
"object" => participant_as_data
|
|
|
|
|
},
|
|
|
|
|
update_data <-
|
|
|
|
|
reject_data
|
|
|
|
|
|> Map.merge(audience)
|
|
|
|
|
|> Map.merge(%{
|
2020-01-28 19:18:33 +01:00
|
|
|
|
"id" => "#{Endpoint.url()}/reject/join/#{participant.id}"
|
2019-12-03 11:29:51 +01:00
|
|
|
|
}) do
|
|
|
|
|
{:ok, participant, update_data}
|
|
|
|
|
else
|
|
|
|
|
err ->
|
|
|
|
|
Logger.error("Something went wrong while creating an update activity")
|
|
|
|
|
Logger.debug(inspect(err))
|
|
|
|
|
err
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-01-23 00:55:07 +01:00
|
|
|
|
@spec reject_follow(Follower.t(), map()) :: {:ok, Follower.t(), Activity.t()} | any()
|
2019-12-03 11:29:51 +01:00
|
|
|
|
defp reject_follow(%Follower{} = follower, additional) do
|
|
|
|
|
with {:ok, %Follower{} = follower} <- Actors.delete_follower(follower),
|
|
|
|
|
follower_as_data <- Convertible.model_to_as(follower),
|
|
|
|
|
audience <-
|
|
|
|
|
follower.actor |> Audience.calculate_to_and_cc_from_mentions() |> Map.merge(additional),
|
|
|
|
|
reject_data <- %{
|
2020-01-30 20:27:25 +01:00
|
|
|
|
"to" => [follower.actor.url],
|
2019-12-03 11:29:51 +01:00
|
|
|
|
"type" => "Reject",
|
2020-01-30 20:27:25 +01:00
|
|
|
|
"actor" => follower.target_actor.url,
|
2019-12-03 11:29:51 +01:00
|
|
|
|
"object" => follower_as_data
|
|
|
|
|
},
|
|
|
|
|
update_data <-
|
2020-01-30 20:27:25 +01:00
|
|
|
|
audience
|
|
|
|
|
|> Map.merge(reject_data)
|
2019-12-03 11:29:51 +01:00
|
|
|
|
|> Map.merge(%{
|
2020-01-28 19:18:33 +01:00
|
|
|
|
"id" => "#{Endpoint.url()}/reject/follow/#{follower.id}"
|
2019-12-03 11:29:51 +01:00
|
|
|
|
}) do
|
|
|
|
|
{:ok, follower, update_data}
|
|
|
|
|
else
|
|
|
|
|
err ->
|
|
|
|
|
Logger.error("Something went wrong while creating an update activity")
|
|
|
|
|
Logger.debug(inspect(err))
|
|
|
|
|
err
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-10-25 17:43:37 +02:00
|
|
|
|
# Prepare and sanitize arguments for events
|
|
|
|
|
defp prepare_args_for_event(args) do
|
|
|
|
|
# If title is not set: we are not updating it
|
|
|
|
|
args =
|
|
|
|
|
if Map.has_key?(args, :title) && !is_nil(args.title),
|
|
|
|
|
do: Map.update(args, :title, "", &String.trim(HtmlSanitizeEx.strip_tags(&1))),
|
|
|
|
|
else: args
|
|
|
|
|
|
|
|
|
|
# If we've been given a description (we might not get one if updating)
|
|
|
|
|
# sanitize it, HTML it, and extract tags & mentions from it
|
|
|
|
|
args =
|
|
|
|
|
if Map.has_key?(args, :description) && !is_nil(args.description) do
|
|
|
|
|
{description, mentions, tags} =
|
|
|
|
|
APIUtils.make_content_html(
|
|
|
|
|
String.trim(args.description),
|
|
|
|
|
Map.get(args, :tags, []),
|
|
|
|
|
"text/html"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
mentions = ConverterUtils.fetch_mentions(Map.get(args, :mentions, []) ++ mentions)
|
|
|
|
|
|
|
|
|
|
Map.merge(args, %{
|
|
|
|
|
description: description,
|
|
|
|
|
mentions: mentions,
|
|
|
|
|
tags: tags
|
|
|
|
|
})
|
|
|
|
|
else
|
|
|
|
|
args
|
|
|
|
|
end
|
|
|
|
|
|
2019-12-20 13:04:34 +01:00
|
|
|
|
# Check that we can only allow anonymous participation if our instance allows it
|
|
|
|
|
{_, options} =
|
|
|
|
|
Map.get_and_update(
|
|
|
|
|
Map.get(args, :options, %{anonymous_participation: false}),
|
|
|
|
|
:anonymous_participation,
|
|
|
|
|
fn value ->
|
|
|
|
|
{value, value && Mobilizon.Config.anonymous_participation?()}
|
|
|
|
|
end
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
args = Map.put(args, :options, options)
|
|
|
|
|
|
2019-10-25 17:43:37 +02:00
|
|
|
|
Map.update(args, :tags, [], &ConverterUtils.fetch_tags/1)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# Prepare and sanitize arguments for comments
|
|
|
|
|
defp prepare_args_for_comment(args) do
|
|
|
|
|
with in_reply_to_comment <-
|
2019-12-03 11:29:51 +01:00
|
|
|
|
args |> Map.get(:in_reply_to_comment_id) |> Events.get_comment_with_preload(),
|
|
|
|
|
event <- args |> Map.get(:event_id) |> handle_event_for_comment(),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
args <- Map.update(args, :visibility, :public, & &1),
|
|
|
|
|
{text, mentions, tags} <-
|
|
|
|
|
APIUtils.make_content_html(
|
|
|
|
|
args |> Map.get(:text, "") |> String.trim(),
|
|
|
|
|
# Can't put additional tags on a comment
|
|
|
|
|
[],
|
|
|
|
|
"text/html"
|
|
|
|
|
),
|
|
|
|
|
tags <- ConverterUtils.fetch_tags(tags),
|
|
|
|
|
mentions <- Map.get(args, :mentions, []) ++ ConverterUtils.fetch_mentions(mentions),
|
|
|
|
|
args <-
|
|
|
|
|
Map.merge(args, %{
|
|
|
|
|
actor_id: Map.get(args, :actor_id),
|
|
|
|
|
text: text,
|
|
|
|
|
mentions: mentions,
|
|
|
|
|
tags: tags,
|
2019-12-03 11:29:51 +01:00
|
|
|
|
event: event,
|
2019-10-25 17:43:37 +02:00
|
|
|
|
in_reply_to_comment: in_reply_to_comment,
|
|
|
|
|
in_reply_to_comment_id:
|
2019-11-15 18:36:47 +01:00
|
|
|
|
if(is_nil(in_reply_to_comment), do: nil, else: Map.get(in_reply_to_comment, :id)),
|
|
|
|
|
origin_comment_id:
|
|
|
|
|
if(is_nil(in_reply_to_comment),
|
|
|
|
|
do: nil,
|
|
|
|
|
else: Comment.get_thread_id(in_reply_to_comment)
|
|
|
|
|
)
|
2019-10-25 17:43:37 +02:00
|
|
|
|
}) do
|
|
|
|
|
args
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-12-03 11:29:51 +01:00
|
|
|
|
@spec handle_event_for_comment(String.t() | integer() | nil) :: Event.t() | nil
|
|
|
|
|
defp handle_event_for_comment(event_id) when not is_nil(event_id) do
|
|
|
|
|
case Events.get_event_with_preload(event_id) do
|
|
|
|
|
{:ok, %Event{} = event} -> event
|
|
|
|
|
{:error, :event_not_found} -> nil
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
defp handle_event_for_comment(nil), do: nil
|
|
|
|
|
|
2019-10-25 17:43:37 +02:00
|
|
|
|
defp prepare_args_for_group(args) do
|
|
|
|
|
with preferred_username <-
|
|
|
|
|
args |> Map.get(:preferred_username) |> HtmlSanitizeEx.strip_tags() |> String.trim(),
|
|
|
|
|
summary <- args |> Map.get(:summary, "") |> String.trim(),
|
|
|
|
|
{summary, _mentions, _tags} <-
|
|
|
|
|
summary |> String.trim() |> APIUtils.make_content_html([], "text/html") do
|
|
|
|
|
%{args | preferred_username: preferred_username, summary: summary}
|
|
|
|
|
end
|
|
|
|
|
end
|
2019-11-15 18:36:47 +01:00
|
|
|
|
|
|
|
|
|
defp prepare_args_for_report(args) do
|
|
|
|
|
with {:reporter, %Actor{} = reporter_actor} <-
|
|
|
|
|
{:reporter, Actors.get_actor!(args.reporter_id)},
|
|
|
|
|
{:reported, %Actor{} = reported_actor} <-
|
|
|
|
|
{:reported, Actors.get_actor!(args.reported_id)},
|
|
|
|
|
content <- HtmlSanitizeEx.strip_tags(args.content),
|
|
|
|
|
event <- Events.get_comment(Map.get(args, :event_id)),
|
|
|
|
|
{:get_report_comments, comments} <-
|
|
|
|
|
{:get_report_comments,
|
|
|
|
|
Events.list_comments_by_actor_and_ids(
|
|
|
|
|
reported_actor.id,
|
|
|
|
|
Map.get(args, :comments_ids, [])
|
|
|
|
|
)} do
|
|
|
|
|
Map.merge(args, %{
|
|
|
|
|
reporter: reporter_actor,
|
|
|
|
|
reported: reported_actor,
|
|
|
|
|
content: content,
|
|
|
|
|
event: event,
|
|
|
|
|
comments: comments
|
|
|
|
|
})
|
|
|
|
|
end
|
|
|
|
|
end
|
2020-02-14 17:56:36 +01:00
|
|
|
|
|
|
|
|
|
defp check_for_actor_key_rotation(%Actor{} = actor) do
|
|
|
|
|
if Actors.should_rotate_actor_key(actor) do
|
|
|
|
|
Actors.schedule_key_rotation(
|
|
|
|
|
actor,
|
|
|
|
|
Application.get_env(:mobilizon, :activitypub)[:actor_key_rotation_delay]
|
|
|
|
|
)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
:ok
|
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|