2018-12-24 13:34:45 +01:00
|
|
|
# Portions of this file are derived from Pleroma:
|
|
|
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social>
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
2018-12-27 11:24:04 +01:00
|
|
|
# Upstream: https://git.pleroma.social/pleroma/pleroma/blob/develop/lib/pleroma/web/federator/federator.ex
|
2018-12-24 13:34:45 +01:00
|
|
|
|
2020-01-22 02:14:42 +01:00
|
|
|
defmodule Mobilizon.Federation.ActivityPub.Federator do
|
2018-06-14 18:15:27 +02:00
|
|
|
@moduledoc """
|
|
|
|
Handle federated activities
|
|
|
|
"""
|
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
use GenServer
|
2019-09-13 01:35:03 +02:00
|
|
|
|
2020-06-15 11:13:20 +02:00
|
|
|
alias Mobilizon.Actors.Actor
|
2020-01-22 02:14:42 +01:00
|
|
|
alias Mobilizon.Federation.ActivityPub
|
|
|
|
alias Mobilizon.Federation.ActivityPub.{Activity, Transmogrifier}
|
2019-09-13 01:35:03 +02:00
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
require Logger
|
|
|
|
|
|
|
|
@max_jobs 20
|
|
|
|
|
|
|
|
def init(args) do
|
|
|
|
{:ok, args}
|
|
|
|
end
|
|
|
|
|
2019-09-16 01:13:20 +02:00
|
|
|
def start_link(_) do
|
2018-05-17 11:32:23 +02:00
|
|
|
spawn(fn ->
|
|
|
|
# 1 minute
|
2018-06-14 18:15:27 +02:00
|
|
|
Process.sleep(1000 * 60)
|
2018-05-17 11:32:23 +02:00
|
|
|
end)
|
|
|
|
|
|
|
|
GenServer.start_link(
|
|
|
|
__MODULE__,
|
|
|
|
%{
|
|
|
|
in: {:sets.new(), []},
|
|
|
|
out: {:sets.new(), []}
|
|
|
|
},
|
|
|
|
name: __MODULE__
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle(:publish, activity) do
|
2018-07-27 10:45:35 +02:00
|
|
|
Logger.debug(inspect(activity))
|
2018-05-17 11:32:23 +02:00
|
|
|
Logger.debug(fn -> "Running publish for #{activity.data["id"]}" end)
|
|
|
|
|
2020-06-15 11:13:20 +02:00
|
|
|
with {:ok, %Actor{} = actor} <- ActivityPub.get_or_fetch_actor_by_url(activity.data["actor"]) do
|
2018-05-17 11:32:23 +02:00
|
|
|
Logger.info(fn -> "Sending #{activity.data["id"]} out via AP" end)
|
|
|
|
ActivityPub.publish(actor, activity)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle(:incoming_ap_doc, params) do
|
|
|
|
Logger.info("Handling incoming AP activity")
|
2018-11-12 18:17:53 +01:00
|
|
|
Logger.debug(inspect(params))
|
2018-05-17 11:32:23 +02:00
|
|
|
|
2019-07-23 18:06:22 +02:00
|
|
|
case Transmogrifier.handle_incoming(params) do
|
2019-10-25 17:43:37 +02:00
|
|
|
{:ok, activity, _data} ->
|
2019-07-23 18:06:22 +02:00
|
|
|
{:ok, activity}
|
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
%Activity{} ->
|
|
|
|
Logger.info("Already had #{params["id"]}")
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
e ->
|
2018-05-17 11:32:23 +02:00
|
|
|
# Just drop those for now
|
2018-11-12 18:17:53 +01:00
|
|
|
Logger.error("Unhandled activity")
|
2019-07-30 16:40:59 +02:00
|
|
|
Logger.debug(inspect(e))
|
|
|
|
Logger.debug(Jason.encode!(params))
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle(:publish_single_ap, params) do
|
|
|
|
ActivityPub.publish_one(params)
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle(type, _) do
|
|
|
|
Logger.debug(fn -> "Unknown task: #{type}" end)
|
|
|
|
{:error, "Don't know what to do with this"}
|
|
|
|
end
|
|
|
|
|
|
|
|
def enqueue(type, payload, priority \\ 1) do
|
2019-07-30 16:40:59 +02:00
|
|
|
Logger.debug("enqueue something with type #{inspect(type)}")
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
if Mix.env() == :test do
|
|
|
|
handle(type, payload)
|
|
|
|
else
|
|
|
|
GenServer.cast(__MODULE__, {:enqueue, type, payload, priority})
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_start_job(running_jobs, queue) do
|
|
|
|
if :sets.size(running_jobs) < @max_jobs && queue != [] do
|
|
|
|
{{type, payload}, queue} = queue_pop(queue)
|
|
|
|
{:ok, pid} = Task.start(fn -> handle(type, payload) end)
|
|
|
|
mref = Process.monitor(pid)
|
|
|
|
{:sets.add_element(mref, running_jobs), queue}
|
|
|
|
else
|
|
|
|
{running_jobs, queue}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle_cast({:enqueue, type, payload, _priority}, state)
|
|
|
|
when type in [:incoming_doc, :incoming_ap_doc] do
|
|
|
|
%{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}} = state
|
|
|
|
i_queue = enqueue_sorted(i_queue, {type, payload}, 1)
|
|
|
|
{i_running_jobs, i_queue} = maybe_start_job(i_running_jobs, i_queue)
|
|
|
|
{:noreply, %{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}}}
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle_cast({:enqueue, type, payload, _priority}, state) do
|
|
|
|
%{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}} = state
|
|
|
|
o_queue = enqueue_sorted(o_queue, {type, payload}, 1)
|
|
|
|
{o_running_jobs, o_queue} = maybe_start_job(o_running_jobs, o_queue)
|
|
|
|
{:noreply, %{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}}}
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle_cast(m, state) do
|
2019-07-30 16:40:59 +02:00
|
|
|
Logger.debug(fn ->
|
2018-06-14 18:15:27 +02:00
|
|
|
"Unknown: #{inspect(m)}, #{inspect(state)}"
|
2018-07-27 10:45:35 +02:00
|
|
|
end)
|
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
{:noreply, state}
|
|
|
|
end
|
|
|
|
|
|
|
|
def handle_info({:DOWN, ref, :process, _pid, _reason}, state) do
|
|
|
|
%{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}} = state
|
|
|
|
i_running_jobs = :sets.del_element(ref, i_running_jobs)
|
|
|
|
o_running_jobs = :sets.del_element(ref, o_running_jobs)
|
|
|
|
{i_running_jobs, i_queue} = maybe_start_job(i_running_jobs, i_queue)
|
|
|
|
{o_running_jobs, o_queue} = maybe_start_job(o_running_jobs, o_queue)
|
|
|
|
|
|
|
|
{:noreply, %{in: {i_running_jobs, i_queue}, out: {o_running_jobs, o_queue}}}
|
|
|
|
end
|
|
|
|
|
|
|
|
def enqueue_sorted(queue, element, priority) do
|
|
|
|
[%{item: element, priority: priority} | queue]
|
|
|
|
|> Enum.sort_by(fn %{priority: priority} -> priority end)
|
|
|
|
end
|
|
|
|
|
|
|
|
def queue_pop([%{item: element} | queue]) do
|
|
|
|
{element, queue}
|
|
|
|
end
|
|
|
|
end
|