2018-12-24 13:34:45 +01:00
|
|
|
# Portions of this file are derived from Pleroma:
|
|
|
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social>
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
2018-12-27 11:24:04 +01:00
|
|
|
# Upstream: https://git.pleroma.social/pleroma/pleroma/blob/develop/lib/pleroma/web/activity_pub/activity_pub.ex
|
2018-12-24 13:34:45 +01:00
|
|
|
|
2018-10-11 17:37:39 +02:00
|
|
|
defmodule Mobilizon.Service.ActivityPub do
|
2018-06-14 18:15:27 +02:00
|
|
|
@moduledoc """
|
|
|
|
# ActivityPub
|
|
|
|
|
|
|
|
Every ActivityPub method
|
|
|
|
"""
|
|
|
|
|
2018-10-11 17:37:39 +02:00
|
|
|
alias Mobilizon.Events
|
|
|
|
alias Mobilizon.Events.{Event, Category, Comment}
|
|
|
|
alias Mobilizon.Service.ActivityPub.Transmogrifier
|
|
|
|
alias Mobilizon.Service.WebFinger
|
|
|
|
alias Mobilizon.Activity
|
2018-05-17 11:32:23 +02:00
|
|
|
|
2018-10-11 17:37:39 +02:00
|
|
|
alias Mobilizon.Actors
|
|
|
|
alias Mobilizon.Actors.Actor
|
2018-12-14 17:41:55 +01:00
|
|
|
alias Mobilizon.Actors.Follower
|
2018-05-17 11:32:23 +02:00
|
|
|
|
2018-10-11 17:37:39 +02:00
|
|
|
alias Mobilizon.Service.Federator
|
2018-12-07 10:47:31 +01:00
|
|
|
alias Mobilizon.Service.HTTPSignatures
|
2018-05-17 11:32:23 +02:00
|
|
|
|
2018-11-12 09:05:31 +01:00
|
|
|
require Logger
|
2018-10-11 17:37:39 +02:00
|
|
|
import Mobilizon.Service.ActivityPub.Utils
|
2018-05-17 11:32:23 +02:00
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Get recipients for an activity or object
|
|
|
|
"""
|
|
|
|
@spec get_recipients(map()) :: list()
|
2018-05-17 11:32:23 +02:00
|
|
|
def get_recipients(data) do
|
|
|
|
(data["to"] || []) ++ (data["cc"] || [])
|
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Wraps an object into an activity
|
2018-11-12 09:05:31 +01:00
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
TODO: Rename me
|
|
|
|
"""
|
|
|
|
@spec insert(map(), boolean()) :: {:ok, %Activity{}} | {:error, any()}
|
|
|
|
def insert(map, local \\ true) when is_map(map) do
|
2018-05-17 11:32:23 +02:00
|
|
|
with map <- lazy_put_activity_defaults(map),
|
2018-12-14 17:41:55 +01:00
|
|
|
:ok <- insert_full_object(map) do
|
2018-11-12 09:05:31 +01:00
|
|
|
object_id =
|
|
|
|
cond do
|
|
|
|
is_map(map["object"]) ->
|
|
|
|
map["object"]["id"]
|
|
|
|
|
|
|
|
is_binary(map["object"]) ->
|
|
|
|
map["id"]
|
|
|
|
end
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
map = if local, do: Map.put(map, "id", "#{object_id}/activity"), else: map
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
activity = %Activity{
|
2018-07-27 10:45:35 +02:00
|
|
|
data: map,
|
|
|
|
local: local,
|
|
|
|
actor: map["actor"],
|
|
|
|
recipients: get_recipients(map)
|
|
|
|
}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
# Notification.create_notifications(activity)
|
2018-05-19 20:29:11 +02:00
|
|
|
# stream_out(activity)
|
2018-05-17 11:32:23 +02:00
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
%Activity{} = activity -> {:ok, activity}
|
|
|
|
error -> {:error, error}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Fetch an object from an URL, from our local database of events and comments, then eventually remote
|
|
|
|
"""
|
|
|
|
@spec fetch_object_from_url(String.t()) :: {:ok, %Event{}} | {:ok, %Comment{}} | {:error, any()}
|
2018-08-24 11:34:00 +02:00
|
|
|
def fetch_object_from_url(url) do
|
2018-12-14 17:41:55 +01:00
|
|
|
Logger.info("Fetching object from url #{url}")
|
|
|
|
|
2018-08-24 11:34:00 +02:00
|
|
|
with true <- String.starts_with?(url, "http"),
|
2018-11-08 15:57:46 +01:00
|
|
|
nil <- Events.get_event_by_url(url),
|
|
|
|
nil <- Events.get_comment_from_url(url),
|
2018-08-24 11:34:00 +02:00
|
|
|
{:ok, %{body: body, status_code: code}} when code in 200..299 <-
|
|
|
|
HTTPoison.get(
|
|
|
|
url,
|
|
|
|
[Accept: "application/activity+json"],
|
|
|
|
follow_redirect: true,
|
|
|
|
timeout: 10_000,
|
|
|
|
recv_timeout: 20_000
|
|
|
|
),
|
|
|
|
{:ok, data} <- Jason.decode(body),
|
|
|
|
params <- %{
|
|
|
|
"type" => "Create",
|
|
|
|
"to" => data["to"],
|
|
|
|
"cc" => data["cc"],
|
|
|
|
"actor" => data["attributedTo"],
|
|
|
|
"object" => data
|
|
|
|
},
|
|
|
|
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
|
|
|
|
case data["type"] do
|
|
|
|
"Event" ->
|
|
|
|
{:ok, Events.get_event_by_url!(activity.data["object"]["id"])}
|
|
|
|
|
|
|
|
"Note" ->
|
2018-12-14 17:41:55 +01:00
|
|
|
{:ok, Events.get_comment_full_from_url!(activity.data["object"]["id"])}
|
|
|
|
|
|
|
|
other ->
|
|
|
|
{:error, other}
|
2018-08-24 11:34:00 +02:00
|
|
|
end
|
|
|
|
else
|
2018-12-14 17:41:55 +01:00
|
|
|
%Event{url: event_url} -> {:ok, Events.get_event_by_url!(event_url)}
|
|
|
|
%Comment{url: comment_url} -> {:ok, Events.get_comment_full_from_url!(comment_url)}
|
2018-08-24 11:34:00 +02:00
|
|
|
e -> {:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-12 09:05:31 +01:00
|
|
|
def create(%{to: to, actor: actor, object: object} = params) do
|
|
|
|
Logger.debug("creating an activity")
|
2018-12-14 17:41:55 +01:00
|
|
|
Logger.debug(inspect(params))
|
|
|
|
Logger.debug(inspect(object))
|
2018-05-17 11:32:23 +02:00
|
|
|
additional = params[:additional] || %{}
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
published = params[:published]
|
|
|
|
|
|
|
|
with create_data <-
|
|
|
|
make_create_data(
|
2018-11-12 09:05:31 +01:00
|
|
|
%{to: to, actor: actor, published: published, object: object},
|
2018-05-17 11:32:23 +02:00
|
|
|
additional
|
|
|
|
),
|
2018-12-14 17:41:55 +01:00
|
|
|
:ok <- Logger.debug(inspect(create_data)),
|
2018-05-17 11:32:23 +02:00
|
|
|
{:ok, activity} <- insert(create_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
2018-07-27 10:45:35 +02:00
|
|
|
# {:ok, actor} <- Actors.increase_event_count(actor) do
|
2018-05-17 11:32:23 +02:00
|
|
|
{:ok, activity}
|
2018-05-18 09:56:21 +02:00
|
|
|
else
|
|
|
|
err ->
|
2018-11-12 18:17:53 +01:00
|
|
|
Logger.error("Something went wrong")
|
|
|
|
Logger.error(inspect(err))
|
2018-12-14 17:41:55 +01:00
|
|
|
err
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def accept(%{to: to, actor: actor, object: object} = params) do
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
|
|
|
|
with data <- %{"to" => to, "type" => "Accept", "actor" => actor, "object" => object},
|
|
|
|
{:ok, activity} <- insert(data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def update(%{to: to, cc: cc, actor: actor, object: object} = params) do
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
|
|
|
|
with data <- %{
|
2018-07-27 10:45:35 +02:00
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
|
|
|
"type" => "Update",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => object
|
|
|
|
},
|
2018-05-17 11:32:23 +02:00
|
|
|
{:ok, activity} <- insert(data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
# TODO: This is weird, maybe we shouldn't check here if we can make the activity.
|
|
|
|
# def like(
|
|
|
|
# %Actor{url: url} = actor,
|
|
|
|
# object,
|
|
|
|
# activity_id \\ nil,
|
|
|
|
# local \\ true
|
|
|
|
# ) do
|
|
|
|
# with nil <- get_existing_like(url, object),
|
|
|
|
# like_data <- make_like_data(user, object, activity_id),
|
|
|
|
# {:ok, activity} <- insert(like_data, local),
|
|
|
|
# {:ok, object} <- add_like_to_object(activity, object),
|
|
|
|
# :ok <- maybe_federate(activity) do
|
|
|
|
# {:ok, activity, object}
|
|
|
|
# else
|
|
|
|
# %Activity{} = activity -> {:ok, activity, object}
|
|
|
|
# error -> {:error, error}
|
|
|
|
# end
|
|
|
|
# end
|
|
|
|
|
|
|
|
# def unlike(
|
|
|
|
# %User{} = actor,
|
|
|
|
# %Object{} = object,
|
|
|
|
# activity_id \\ nil,
|
|
|
|
# local \\ true
|
|
|
|
# ) do
|
|
|
|
# with %Activity{} = like_activity <- get_existing_like(actor.ap_id, object),
|
|
|
|
# unlike_data <- make_unlike_data(actor, like_activity, activity_id),
|
|
|
|
# {:ok, unlike_activity} <- insert(unlike_data, local),
|
|
|
|
# {:ok, _activity} <- Repo.delete(like_activity),
|
|
|
|
# {:ok, object} <- remove_like_from_object(like_activity, object),
|
|
|
|
# :ok <- maybe_federate(unlike_activity) do
|
|
|
|
# {:ok, unlike_activity, like_activity, object}
|
|
|
|
# else
|
|
|
|
# _e -> {:ok, object}
|
|
|
|
# end
|
|
|
|
# end
|
|
|
|
|
|
|
|
# def announce(
|
|
|
|
# %Actor{} = actor,
|
|
|
|
# object,
|
|
|
|
# activity_id \\ nil,
|
|
|
|
# local \\ true
|
|
|
|
# ) do
|
|
|
|
# #with true <- is_public?(object),
|
|
|
|
# with announce_data <- make_announce_data(actor, object, activity_id),
|
|
|
|
# {:ok, activity} <- insert(announce_data, local),
|
|
|
|
# # {:ok, object} <- add_announce_to_object(activity, object),
|
|
|
|
# :ok <- maybe_federate(activity) do
|
|
|
|
# {:ok, activity, object}
|
|
|
|
# else
|
|
|
|
# error -> {:error, error}
|
|
|
|
# end
|
|
|
|
# end
|
|
|
|
|
|
|
|
# def unannounce(
|
|
|
|
# %Actor{} = actor,
|
|
|
|
# object,
|
|
|
|
# activity_id \\ nil,
|
|
|
|
# local \\ true
|
|
|
|
# ) do
|
|
|
|
# with %Activity{} = announce_activity <- get_existing_announce(actor.ap_id, object),
|
|
|
|
# unannounce_data <- make_unannounce_data(actor, announce_activity, activity_id),
|
|
|
|
# {:ok, unannounce_activity} <- insert(unannounce_data, local),
|
|
|
|
# :ok <- maybe_federate(unannounce_activity),
|
|
|
|
# {:ok, _activity} <- Repo.delete(announce_activity),
|
|
|
|
# {:ok, object} <- remove_announce_from_object(announce_activity, object) do
|
|
|
|
# {:ok, unannounce_activity, object}
|
|
|
|
# else
|
|
|
|
# _e -> {:ok, object}
|
|
|
|
# end
|
|
|
|
# end
|
|
|
|
|
|
|
|
def follow(%Actor{} = follower, %Actor{} = followed, activity_id \\ nil, local \\ true) do
|
|
|
|
with {:ok, %Follower{} = follow} <- Actor.follow(followed, follower, true),
|
|
|
|
activity_follow_id <- activity_id || Follower.url(follow),
|
|
|
|
data <- make_follow_data(followed, follower, activity_follow_id),
|
2018-05-17 11:32:23 +02:00
|
|
|
{:ok, activity} <- insert(data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
2018-11-12 09:05:31 +01:00
|
|
|
else
|
|
|
|
{err, _} when err in [:already_following, :suspended] ->
|
|
|
|
{:error, err}
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
@spec unfollow(Actor.t(), Actor.t(), String.t(), boolean()) :: {:ok, map()} | any()
|
|
|
|
def unfollow(%Actor{} = followed, %Actor{} = follower, activity_id \\ nil, local \\ true) do
|
|
|
|
with {:ok, %Follower{id: follow_id}} <- Actor.unfollow(followed, follower),
|
|
|
|
# We recreate the follow activity
|
|
|
|
data <- make_follow_data(followed, follower, follow_id),
|
|
|
|
{:ok, follow_activity} <- insert(data, local),
|
|
|
|
unfollow_data <- make_unfollow_data(follower, followed, follow_activity, activity_id),
|
|
|
|
{:ok, activity} <- insert(unfollow_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
err ->
|
|
|
|
Logger.error(inspect(err))
|
|
|
|
err
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-24 11:34:00 +02:00
|
|
|
def delete(object, local \\ true)
|
|
|
|
|
|
|
|
def delete(%Event{url: url, organizer_actor: actor} = event, local) do
|
2018-05-17 11:32:23 +02:00
|
|
|
data = %{
|
|
|
|
"type" => "Delete",
|
2018-05-18 09:56:21 +02:00
|
|
|
"actor" => actor.url,
|
2018-05-17 11:32:23 +02:00
|
|
|
"object" => url,
|
2018-05-18 09:56:21 +02:00
|
|
|
"to" => [actor.url <> "/followers", "https://www.w3.org/ns/activitystreams#Public"]
|
2018-05-17 11:32:23 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
with Events.delete_event(event),
|
|
|
|
{:ok, activity} <- insert(data, local),
|
2018-07-27 10:45:35 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
2018-05-17 11:32:23 +02:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-24 11:34:00 +02:00
|
|
|
def delete(%Comment{url: url, actor: actor} = comment, local) do
|
|
|
|
data = %{
|
|
|
|
"type" => "Delete",
|
|
|
|
"actor" => actor.url,
|
|
|
|
"object" => url,
|
|
|
|
"to" => [actor.url <> "/followers", "https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
}
|
|
|
|
|
|
|
|
with Events.delete_comment(comment),
|
|
|
|
{:ok, activity} <- insert(data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
def delete(%Actor{url: url} = actor, local) do
|
|
|
|
data = %{
|
|
|
|
"type" => "Delete",
|
|
|
|
"actor" => url,
|
|
|
|
"object" => url,
|
|
|
|
"to" => [url <> "/followers", "https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
}
|
|
|
|
|
|
|
|
with Actors.delete_actor(actor),
|
|
|
|
{:ok, activity} <- insert(data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Create an actor locally by it's URL (AP ID)
|
|
|
|
"""
|
|
|
|
@spec make_actor_from_url(String.t(), boolean()) :: {:ok, %Actor{}} | {:error, any()}
|
2018-11-12 09:05:31 +01:00
|
|
|
def make_actor_from_url(url, preload \\ false) do
|
|
|
|
with {:ok, data} <- fetch_and_prepare_actor_from_url(url) do
|
|
|
|
Actors.insert_or_update_actor(data, preload)
|
2018-05-17 11:32:23 +02:00
|
|
|
else
|
2018-11-08 16:11:23 +01:00
|
|
|
# Request returned 410
|
|
|
|
{:error, :actor_deleted} ->
|
|
|
|
{:error, :actor_deleted}
|
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
e ->
|
2018-05-18 09:56:21 +02:00
|
|
|
Logger.error("Failed to make actor from url")
|
2018-07-27 10:45:35 +02:00
|
|
|
Logger.error(inspect(e))
|
2018-05-17 11:32:23 +02:00
|
|
|
{:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Find an actor in our local database or call Webfinger to find what's its AP ID is and then fetch it
|
|
|
|
"""
|
2018-11-23 15:03:53 +01:00
|
|
|
@spec find_or_make_actor_from_nickname(String.t(), atom() | nil) :: tuple()
|
|
|
|
def find_or_make_actor_from_nickname(nickname, type \\ nil) do
|
|
|
|
with %Actor{} = actor <- Actors.get_actor_by_name(nickname, type) do
|
2018-05-30 14:27:21 +02:00
|
|
|
{:ok, actor}
|
|
|
|
else
|
|
|
|
nil -> make_actor_from_nickname(nickname)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-23 15:03:53 +01:00
|
|
|
@spec find_or_make_person_from_nickname(String.t()) :: tuple()
|
|
|
|
def find_or_make_person_from_nickname(nick), do: find_or_make_actor_from_nickname(nick, :Person)
|
|
|
|
|
|
|
|
@spec find_or_make_group_from_nickname(String.t()) :: tuple()
|
|
|
|
def find_or_make_group_from_nickname(nick), do: find_or_make_actor_from_nickname(nick, :Group)
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Create an actor inside our database from username, using Webfinger to find out it's AP ID and then fetch it
|
|
|
|
"""
|
|
|
|
@spec make_actor_from_nickname(String.t()) :: {:ok, %Actor{}} | {:error, any()}
|
2018-05-18 09:56:21 +02:00
|
|
|
def make_actor_from_nickname(nickname) do
|
2018-05-17 11:32:23 +02:00
|
|
|
with {:ok, %{"url" => url}} when not is_nil(url) <- WebFinger.finger(nickname) do
|
2018-05-18 09:56:21 +02:00
|
|
|
make_actor_from_url(url)
|
2018-05-17 11:32:23 +02:00
|
|
|
else
|
|
|
|
_e -> {:error, "No ActivityPub URL found in WebFinger"}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def publish(actor, activity) do
|
2018-05-18 09:56:21 +02:00
|
|
|
Logger.debug("Publishing an activity")
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2018-05-18 09:56:21 +02:00
|
|
|
followers =
|
|
|
|
if actor.followers_url in activity.recipients do
|
2018-11-27 17:54:54 +01:00
|
|
|
Actor.get_followers(actor) |> Enum.filter(fn follower -> is_nil(follower.domain) end)
|
2018-05-18 09:56:21 +02:00
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
2018-05-18 09:56:21 +02:00
|
|
|
remote_inboxes =
|
2018-11-12 09:05:31 +01:00
|
|
|
(remote_actors(activity) ++ followers)
|
2018-05-18 09:56:21 +02:00
|
|
|
|> Enum.map(fn follower -> follower.shared_inbox_url end)
|
|
|
|
|> Enum.uniq()
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
|
|
|
json = Jason.encode!(data)
|
2018-11-12 09:05:31 +01:00
|
|
|
Logger.debug("Remote inboxes are : #{inspect(remote_inboxes)}")
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
Enum.each(remote_inboxes, fn inbox ->
|
|
|
|
Federator.enqueue(:publish_single_ap, %{
|
|
|
|
inbox: inbox,
|
|
|
|
json: json,
|
|
|
|
actor: actor,
|
|
|
|
id: activity.data["id"]
|
|
|
|
})
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
def publish_one(%{inbox: inbox, json: json, actor: actor, id: id}) do
|
|
|
|
Logger.info("Federating #{id} to #{inbox}")
|
2018-12-14 17:41:55 +01:00
|
|
|
%URI{host: host, path: path} = URI.parse(inbox)
|
2018-12-07 10:47:31 +01:00
|
|
|
|
|
|
|
digest = HTTPSignatures.build_digest(json)
|
|
|
|
date = HTTPSignatures.generate_date_header()
|
|
|
|
request_target = HTTPSignatures.generate_request_target("POST", path)
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
signature =
|
2018-12-07 10:47:31 +01:00
|
|
|
HTTPSignatures.sign(actor, %{
|
2018-10-11 17:47:02 +02:00
|
|
|
host: host,
|
2018-12-07 10:47:31 +01:00
|
|
|
"content-length": byte_size(json),
|
|
|
|
"(request-target)": request_target,
|
|
|
|
digest: digest,
|
|
|
|
date: date
|
2018-10-11 17:47:02 +02:00
|
|
|
})
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
HTTPoison.post(
|
|
|
|
inbox,
|
|
|
|
json,
|
2018-12-07 10:47:31 +01:00
|
|
|
[
|
|
|
|
{"Content-Type", "application/activity+json"},
|
|
|
|
{"signature", signature},
|
|
|
|
{"digest", digest},
|
|
|
|
{"date", date}
|
|
|
|
],
|
2018-11-12 23:30:47 +01:00
|
|
|
hackney: [pool: :default]
|
|
|
|
)
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
|
2018-12-07 10:47:31 +01:00
|
|
|
# Fetching a remote actor's informations through it's AP ID
|
2018-11-12 18:17:53 +01:00
|
|
|
@spec fetch_and_prepare_actor_from_url(String.t()) :: {:ok, struct()} | {:error, atom()} | any()
|
|
|
|
defp fetch_and_prepare_actor_from_url(url) do
|
2018-11-12 09:05:31 +01:00
|
|
|
Logger.debug("Fetching and preparing actor from url")
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2018-11-08 16:11:23 +01:00
|
|
|
with {:ok, %HTTPoison.Response{status_code: 200, body: body}} <-
|
2018-07-27 10:45:35 +02:00
|
|
|
HTTPoison.get(url, [Accept: "application/activity+json"], follow_redirect: true),
|
2018-05-17 11:32:23 +02:00
|
|
|
{:ok, data} <- Jason.decode(body) do
|
2018-11-12 18:17:53 +01:00
|
|
|
actor_data_from_actor_object(data)
|
2018-05-17 11:32:23 +02:00
|
|
|
else
|
2018-11-12 18:17:53 +01:00
|
|
|
# Actor is gone, probably deleted
|
2018-11-08 16:11:23 +01:00
|
|
|
{:ok, %HTTPoison.Response{status_code: 410}} ->
|
|
|
|
{:error, :actor_deleted}
|
|
|
|
|
|
|
|
e ->
|
2018-11-12 09:05:31 +01:00
|
|
|
Logger.error("Could not decode actor at fetch #{url}, #{inspect(e)}")
|
2018-11-08 16:11:23 +01:00
|
|
|
e
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Creating proper actor data struct from AP data
|
|
|
|
"""
|
|
|
|
@spec actor_data_from_actor_object(map()) :: {:ok, map()}
|
|
|
|
def actor_data_from_actor_object(data) when is_map(data) do
|
|
|
|
actor_data = %{
|
2018-05-17 11:32:23 +02:00
|
|
|
url: data["id"],
|
2018-05-30 18:59:13 +02:00
|
|
|
avatar_url: data["icon"]["url"],
|
|
|
|
banner_url: data["image"]["url"],
|
2018-11-07 18:25:45 +01:00
|
|
|
name: data["name"],
|
2018-05-18 09:56:21 +02:00
|
|
|
preferred_username: data["preferredUsername"],
|
|
|
|
summary: data["summary"],
|
2018-06-14 17:25:55 +02:00
|
|
|
keys: data["publicKey"]["publicKeyPem"],
|
2018-05-18 09:56:21 +02:00
|
|
|
inbox_url: data["inbox"],
|
|
|
|
outbox_url: data["outbox"],
|
|
|
|
following_url: data["following"],
|
|
|
|
followers_url: data["followers"],
|
2018-05-30 18:59:13 +02:00
|
|
|
shared_inbox_url: data["endpoints"]["sharedInbox"],
|
2018-05-18 09:56:21 +02:00
|
|
|
domain: URI.parse(data["id"]).host,
|
|
|
|
manually_approves_followers: data["manuallyApprovesFollowers"],
|
2018-07-27 10:45:35 +02:00
|
|
|
type: data["type"]
|
2018-05-17 11:32:23 +02:00
|
|
|
}
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
{:ok, actor_data}
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Return all public activities (events & comments) for an actor
|
|
|
|
"""
|
|
|
|
@spec fetch_public_activities_for_actor(Actor.t(), integer(), integer()) :: {list(), integer()}
|
2018-12-14 11:23:36 +01:00
|
|
|
def fetch_public_activities_for_actor(actor, page \\ nil, limit \\ nil)
|
|
|
|
|
|
|
|
def fetch_public_activities_for_actor(%Actor{} = actor, page, limit) do
|
2018-05-30 14:27:21 +02:00
|
|
|
case actor.type do
|
|
|
|
:Person ->
|
2018-11-12 23:30:47 +01:00
|
|
|
{:ok, events, total_events} = Events.get_events_for_actor(actor, page, limit)
|
|
|
|
{:ok, comments, total_comments} = Events.get_comments_for_actor(actor, page, limit)
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
event_activities = Enum.map(events, &event_to_activity/1)
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
comment_activities = Enum.map(comments, &comment_to_activity/1)
|
2018-08-24 11:34:00 +02:00
|
|
|
|
|
|
|
activities = event_activities ++ comment_activities
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
{activities, total_events + total_comments}
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2018-05-30 14:27:21 +02:00
|
|
|
:Service ->
|
|
|
|
bot = Actors.get_bot_by_actor(actor)
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2018-05-30 14:27:21 +02:00
|
|
|
case bot.type do
|
|
|
|
"ics" ->
|
|
|
|
{:ok, %HTTPoison.Response{body: body} = _resp} = HTTPoison.get(bot.source)
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
|
|
ical_events =
|
|
|
|
body
|
|
|
|
|> ExIcal.parse()
|
|
|
|
|> ExIcal.by_range(DateTime.utc_now(), DateTime.utc_now() |> Timex.shift(years: 1))
|
|
|
|
|
|
|
|
activities =
|
|
|
|
ical_events
|
|
|
|
|> Enum.chunk_every(limit)
|
|
|
|
|> Enum.at(page - 1)
|
|
|
|
|> Enum.map(fn event ->
|
|
|
|
{:ok, activity} = ical_event_to_activity(event, actor, bot.source)
|
|
|
|
activity
|
|
|
|
end)
|
|
|
|
|
2018-05-30 14:27:21 +02:00
|
|
|
{activities, length(ical_events)}
|
|
|
|
end
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
# Create an activity from an event
|
2018-11-12 18:17:53 +01:00
|
|
|
@spec event_to_activity(%Event{}, boolean()) :: Activity.t()
|
2018-05-30 14:27:21 +02:00
|
|
|
defp event_to_activity(%Event{} = event, local \\ true) do
|
2018-11-12 18:17:53 +01:00
|
|
|
%Activity{
|
|
|
|
recipients: ["https://www.w3.org/ns/activitystreams#Public"],
|
2018-05-18 09:56:21 +02:00
|
|
|
actor: event.organizer_actor.url,
|
2018-11-12 18:17:53 +01:00
|
|
|
data: event |> make_event_data,
|
|
|
|
local: local
|
2018-05-17 11:32:23 +02:00
|
|
|
}
|
|
|
|
end
|
2018-05-30 14:27:21 +02:00
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
# Create an activity from a comment
|
2018-11-12 18:17:53 +01:00
|
|
|
@spec comment_to_activity(%Comment{}, boolean()) :: Activity.t()
|
2018-12-14 17:41:55 +01:00
|
|
|
def comment_to_activity(%Comment{} = comment, local \\ true) do
|
2018-11-12 18:17:53 +01:00
|
|
|
%Activity{
|
|
|
|
recipients: ["https://www.w3.org/ns/activitystreams#Public"],
|
2018-08-24 11:34:00 +02:00
|
|
|
actor: comment.actor.url,
|
2018-11-12 18:17:53 +01:00
|
|
|
data: comment |> make_comment_data,
|
|
|
|
local: local
|
2018-08-24 11:34:00 +02:00
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
defp ical_event_to_activity(%ExIcal.Event{} = ical_event, %Actor{} = actor, _source) do
|
2018-05-30 14:27:21 +02:00
|
|
|
# Logger.debug(inspect ical_event)
|
|
|
|
# TODO : refactor me !
|
2018-11-12 18:17:53 +01:00
|
|
|
# TODO : also, there should be a form of cache that allows this to be more efficient
|
2018-07-27 10:45:35 +02:00
|
|
|
category =
|
|
|
|
if is_nil(ical_event.categories) do
|
|
|
|
nil
|
|
|
|
else
|
|
|
|
ical_category = ical_event.categories |> hd() |> String.downcase()
|
|
|
|
|
|
|
|
case ical_category |> Events.get_category_by_title() do
|
|
|
|
nil ->
|
|
|
|
case Events.create_category(%{"title" => ical_category}) do
|
|
|
|
{:ok, %Category{} = category} -> category
|
|
|
|
_ -> nil
|
|
|
|
end
|
|
|
|
|
|
|
|
category ->
|
|
|
|
category
|
|
|
|
end
|
2018-05-30 14:27:21 +02:00
|
|
|
end
|
|
|
|
|
2018-07-27 10:45:35 +02:00
|
|
|
{:ok, event} =
|
|
|
|
Events.create_event(%{
|
|
|
|
begins_on: ical_event.start,
|
|
|
|
ends_on: ical_event.end,
|
|
|
|
inserted_at: ical_event.stamp,
|
|
|
|
updated_at: ical_event.stamp,
|
|
|
|
description: ical_event.description |> sanitize_ical_event_strings,
|
|
|
|
title: ical_event.summary |> sanitize_ical_event_strings,
|
|
|
|
organizer_actor: actor,
|
|
|
|
category: category
|
|
|
|
})
|
2018-05-30 14:27:21 +02:00
|
|
|
|
|
|
|
event_to_activity(event, false)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp sanitize_ical_event_strings(string) when is_binary(string) do
|
|
|
|
string
|
|
|
|
|> String.replace(~s"\r\n", "")
|
|
|
|
|> String.replace(~s"\\,", ",")
|
|
|
|
end
|
|
|
|
|
|
|
|
defp sanitize_ical_event_strings(nil) do
|
|
|
|
nil
|
|
|
|
end
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
|
|
def is_public?(activity) do
|
|
|
|
"https://www.w3.org/ns/activitystreams#Public" in (activity.data["to"] ++
|
|
|
|
(activity.data["cc"] || []))
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|