This repository has been archived on 2020-02-06. You can view files and clone it, but cannot push or open issues/pull-requests.
pleroma/lib/pleroma/web/ostatus/ostatus.ex

396 lines
12 KiB
Elixir
Raw Normal View History

# Pleroma: A lightweight social networking server
2018-12-31 10:41:47 -05:00
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
2017-04-18 12:41:51 -04:00
defmodule Pleroma.Web.OStatus do
2017-04-27 03:43:58 -04:00
import Pleroma.Web.XML
require Logger
alias Pleroma.Activity
2019-05-25 00:24:21 -04:00
alias Pleroma.HTTP
alias Pleroma.Object
2019-02-09 10:16:26 -05:00
alias Pleroma.User
alias Pleroma.Web
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.OStatus.DeleteHandler
2019-02-09 10:16:26 -05:00
alias Pleroma.Web.OStatus.FollowHandler
alias Pleroma.Web.OStatus.NoteHandler
alias Pleroma.Web.OStatus.UnfollowHandler
alias Pleroma.Web.WebFinger
alias Pleroma.Web.Websub
2017-04-18 12:41:51 -04:00
def is_representable?(%Activity{} = activity) do
object = Object.normalize(activity)
cond do
is_nil(object) ->
false
Visibility.is_public?(activity) && object.data["type"] == "Note" ->
true
true ->
false
end
end
2019-09-12 02:59:34 -04:00
def feed_path(user), do: "#{user.ap_id}/feed.atom"
2017-04-18 12:41:51 -04:00
2019-09-12 02:59:34 -04:00
def pubsub_path(user), do: "#{Web.base_url()}/push/hub/#{user.nickname}"
2017-04-18 12:41:51 -04:00
2019-09-12 02:59:34 -04:00
def salmon_path(user), do: "#{user.ap_id}/salmon"
2019-09-12 02:59:34 -04:00
def remote_follow_path, do: "#{Web.base_url()}/ostatus_subscribe?acct={uri}"
def handle_incoming(xml_string, options \\ []) do
2017-08-04 10:57:38 -04:00
with doc when doc != :error <- parse_document(xml_string) do
with {:ok, actor_user} <- find_make_or_update_actor(doc),
do: Pleroma.Instances.set_reachable(actor_user.ap_id)
2017-08-04 10:57:38 -04:00
entries = :xmerl_xpath.string('//entry', doc)
2018-03-30 09:01:53 -04:00
activities =
Enum.map(entries, fn entry ->
{:xmlObj, :string, object_type} =
:xmerl_xpath.string('string(/entry/activity:object-type[1])', entry)
{:xmlObj, :string, verb} = :xmerl_xpath.string('string(/entry/activity:verb[1])', entry)
Logger.debug("Handling #{verb}")
try do
case verb do
'http://activitystrea.ms/schema/1.0/delete' ->
with {:ok, activity} <- DeleteHandler.handle_delete(entry, doc), do: activity
'http://activitystrea.ms/schema/1.0/follow' ->
with {:ok, activity} <- FollowHandler.handle(entry, doc), do: activity
2018-05-21 04:36:20 -04:00
'http://activitystrea.ms/schema/1.0/unfollow' ->
with {:ok, activity} <- UnfollowHandler.handle(entry, doc), do: activity
2018-03-30 09:01:53 -04:00
'http://activitystrea.ms/schema/1.0/share' ->
with {:ok, activity, retweeted_activity} <- handle_share(entry, doc),
do: [activity, retweeted_activity]
'http://activitystrea.ms/schema/1.0/favorite' ->
with {:ok, activity, favorited_activity} <- handle_favorite(entry, doc),
do: [activity, favorited_activity]
_ ->
case object_type do
'http://activitystrea.ms/schema/1.0/note' ->
with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
do: activity
2018-03-30 09:01:53 -04:00
'http://activitystrea.ms/schema/1.0/comment' ->
with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
do: activity
2018-03-30 09:01:53 -04:00
_ ->
Logger.error("Couldn't parse incoming document")
nil
end
end
rescue
e ->
Logger.error("Error occured while handling activity")
Logger.error(xml_string)
Logger.error(inspect(e))
nil
2017-08-04 10:57:38 -04:00
end
2018-03-30 09:01:53 -04:00
end)
|> Enum.filter(& &1)
2017-08-04 10:57:38 -04:00
{:ok, activities}
else
_e -> {:error, []}
end
end
2017-05-07 14:05:03 -04:00
def make_share(entry, doc, retweeted_activity) do
with {:ok, actor} <- find_make_or_update_actor(doc),
%Object{} = object <- Object.normalize(retweeted_activity),
2017-05-07 14:05:03 -04:00
id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
{:ok, activity, _object} = ActivityPub.announce(actor, object, id, false) do
2017-05-04 12:42:29 -04:00
{:ok, activity}
end
end
def handle_share(entry, doc) do
2017-05-19 10:08:46 -04:00
with {:ok, retweeted_activity} <- get_or_build_object(entry),
2017-05-04 12:42:29 -04:00
{:ok, activity} <- make_share(entry, doc, retweeted_activity) do
{:ok, activity, retweeted_activity}
else
e -> {:error, e}
end
end
2017-05-07 14:05:03 -04:00
def make_favorite(entry, doc, favorited_activity) do
with {:ok, actor} <- find_make_or_update_actor(doc),
%Object{} = object <- Object.normalize(favorited_activity),
2017-05-07 14:05:03 -04:00
id when not is_nil(id) <- string_from_xpath("/entry/id", entry),
{:ok, activity, _object} = ActivityPub.like(actor, object, id, false) do
{:ok, activity}
end
end
2017-05-19 10:08:46 -04:00
def get_or_build_object(entry) do
with {:ok, activity} <- get_or_try_fetching(entry) do
{:ok, activity}
else
_e ->
with [object] <- :xmerl_xpath.string('/entry/activity:object', entry) do
2017-05-20 07:35:22 -04:00
NoteHandler.handle_note(object, object)
2017-05-19 10:08:46 -04:00
end
end
end
def get_or_try_fetching(entry) do
2017-05-11 11:59:11 -04:00
Logger.debug("Trying to get entry from db")
2018-03-30 09:01:53 -04:00
with id when not is_nil(id) <- string_from_xpath("//activity:object[1]/id", entry),
%Activity{} = activity <- Activity.get_create_by_object_ap_id_with_object(id) do
{:ok, activity}
2018-03-30 09:01:53 -04:00
else
_ ->
2017-05-11 03:34:11 -04:00
Logger.debug("Couldn't get, will try to fetch")
2018-03-30 09:01:53 -04:00
with href when not is_nil(href) <-
string_from_xpath("//activity:object[1]/link[@type=\"text/html\"]/@href", entry),
2017-08-04 10:57:38 -04:00
{:ok, [favorited_activity]} <- fetch_activity_from_url(href) do
{:ok, favorited_activity}
2018-03-30 09:01:53 -04:00
else
e -> Logger.debug("Couldn't find href: #{inspect(e)}")
end
end
end
def handle_favorite(entry, doc) do
with {:ok, favorited_activity} <- get_or_try_fetching(entry),
{:ok, activity} <- make_favorite(entry, doc, favorited_activity) do
{:ok, activity, favorited_activity}
else
e -> {:error, e}
end
end
def get_attachments(entry) do
:xmerl_xpath.string('/entry/link[@rel="enclosure"]', entry)
2018-03-30 09:01:53 -04:00
|> Enum.map(fn enclosure ->
with href when not is_nil(href) <- string_from_xpath("/link/@href", enclosure),
type when not is_nil(type) <- string_from_xpath("/link/@type", enclosure) do
%{
"type" => "Attachment",
2018-03-30 09:01:53 -04:00
"url" => [
%{
"type" => "Link",
"mediaType" => type,
"href" => href
}
]
}
end
end)
2018-03-30 09:01:53 -04:00
|> Enum.filter(& &1)
end
2017-05-20 07:35:22 -04:00
@doc """
2017-10-31 12:30:46 -04:00
Gets the content from a an entry.
2017-05-20 07:35:22 -04:00
"""
def get_content(entry) do
2017-10-31 12:30:46 -04:00
string_from_xpath("//content", entry)
end
2017-10-31 12:30:46 -04:00
@doc """
Get the cw that mastodon uses.
"""
def get_cw(entry) do
2019-09-12 02:59:34 -04:00
case string_from_xpath("/*/summary", entry) do
cw when not is_nil(cw) -> cw
_ -> nil
end
end
2017-05-18 09:58:18 -04:00
def get_tags(entry) do
:xmerl_xpath.string('//category', entry)
2018-03-30 09:01:53 -04:00
|> Enum.map(fn category -> string_from_xpath("/category/@term", category) end)
|> Enum.filter(& &1)
2017-11-01 04:33:29 -04:00
|> Enum.map(&String.downcase/1)
2017-05-18 09:58:18 -04:00
end
def maybe_update(doc, user) do
2019-09-12 02:59:34 -04:00
case string_from_xpath("//author[1]/ap_enabled", doc) do
"true" ->
Transmogrifier.upgrade_user_from_ap_id(user.ap_id)
_ ->
maybe_update_ostatus(doc, user)
2018-02-24 07:06:53 -05:00
end
end
2018-03-30 09:01:53 -04:00
2018-02-24 07:06:53 -05:00
def maybe_update_ostatus(doc, user) do
2019-09-12 02:59:34 -04:00
old_data = Map.take(user, [:bio, :avatar, :name])
with false <- user.local,
avatar <- make_avatar_object(doc),
2017-06-26 11:00:58 -04:00
bio <- string_from_xpath("//author[1]/summary", doc),
2017-09-05 05:40:34 -04:00
name <- string_from_xpath("//author[1]/poco:displayName", doc),
2018-03-30 09:01:53 -04:00
new_data <- %{
avatar: avatar || old_data.avatar,
name: name || old_data.name,
bio: bio || old_data.bio
2018-03-30 09:01:53 -04:00
},
false <- new_data == old_data do
change = Ecto.Changeset.change(user, new_data)
User.update_and_set_cache(change)
2018-03-30 09:01:53 -04:00
else
_ ->
{:ok, user}
end
end
def find_make_or_update_actor(doc) do
uri = string_from_xpath("//author/uri[1]", doc)
2018-03-30 09:01:53 -04:00
with {:ok, %User{} = user} <- find_or_make_user(uri),
{:ap_enabled, false} <- {:ap_enabled, User.ap_enabled?(user)} do
maybe_update(doc, user)
else
{:ap_enabled, true} ->
{:error, :invalid_protocol}
_ ->
{:error, :unknown_user}
end
end
2019-09-12 02:59:34 -04:00
@spec find_or_make_user(String.t()) :: {:ok, User.t()}
2017-04-29 13:06:01 -04:00
def find_or_make_user(uri) do
2019-09-12 02:59:34 -04:00
case User.get_by_ap_id(uri) do
%User{} = user -> {:ok, user}
_ -> make_user(uri)
end
end
2019-09-12 02:59:34 -04:00
@spec make_user(String.t(), boolean()) :: {:ok, User.t()} | {:error, any()}
def make_user(uri, update \\ false) do
2017-04-29 13:06:01 -04:00
with {:ok, info} <- gather_user_info(uri) do
with false <- update,
2019-09-12 02:59:34 -04:00
%User{} = user <- User.get_cached_by_ap_id(info["uri"]) do
{:ok, user}
2018-03-30 09:01:53 -04:00
else
2019-09-12 02:59:34 -04:00
_e -> User.insert_or_update_user(build_user_data(info))
end
2017-04-29 13:06:01 -04:00
end
end
2019-09-12 02:59:34 -04:00
defp build_user_data(info) do
%{
name: info["name"],
nickname: info["nickname"] <> "@" <> info["host"],
ap_id: info["uri"],
info: info,
avatar: info["avatar"],
bio: info["bio"]
}
end
# TODO: Just takes the first one for now.
2017-09-16 07:24:15 -04:00
def make_avatar_object(author_doc, rel \\ "avatar") do
href = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@href", author_doc)
type = string_from_xpath("//author[1]/link[@rel=\"#{rel}\"]/@type", author_doc)
if href do
%{
"type" => "Image",
2019-09-12 02:59:34 -04:00
"url" => [%{"type" => "Link", "mediaType" => type, "href" => href}]
}
else
nil
end
2017-04-18 12:41:51 -04:00
end
2017-04-29 11:51:59 -04:00
2019-09-12 02:59:34 -04:00
@spec gather_user_info(String.t()) :: {:ok, map()} | {:error, any()}
2017-04-29 11:51:59 -04:00
def gather_user_info(username) do
with {:ok, webfinger_data} <- WebFinger.finger(username),
{:ok, feed_data} <- Websub.gather_feed_data(webfinger_data["topic"]) do
2019-09-12 02:59:34 -04:00
data =
webfinger_data
|> Map.merge(feed_data)
|> Map.put("fqn", username)
{:ok, data}
2018-03-30 09:01:53 -04:00
else
e ->
Logger.debug(fn -> "Couldn't gather info for #{username}" end)
{:error, e}
2017-04-29 11:51:59 -04:00
end
end
# Regex-based 'parsing' so we don't have to pull in a full html parser
# It's a hack anyway. Maybe revisit this in the future
@mastodon_regex ~r/<link href='(.*)' rel='alternate' type='application\/atom\+xml'>/
@gs_regex ~r/<link title=.* href="(.*)" type="application\/atom\+xml" rel="alternate">/
2017-05-05 14:15:26 -04:00
@gs_classic_regex ~r/<link rel="alternate" href="(.*)" type="application\/atom\+xml" title=.*>/
def get_atom_url(body) do
cond do
Regex.match?(@mastodon_regex, body) ->
[[_, match]] = Regex.scan(@mastodon_regex, body)
{:ok, match}
2018-03-30 09:01:53 -04:00
Regex.match?(@gs_regex, body) ->
[[_, match]] = Regex.scan(@gs_regex, body)
{:ok, match}
2018-03-30 09:01:53 -04:00
2017-05-05 14:15:26 -04:00
Regex.match?(@gs_classic_regex, body) ->
[[_, match]] = Regex.scan(@gs_classic_regex, body)
{:ok, match}
2018-03-30 09:01:53 -04:00
true ->
2018-03-19 13:44:25 -04:00
Logger.debug(fn -> "Couldn't find Atom link in #{inspect(body)}" end)
{:error, "Couldn't find the Atom link"}
end
end
def fetch_activity_from_atom_url(url, options \\ []) do
2018-03-19 05:28:28 -04:00
with true <- String.starts_with?(url, "http"),
2018-12-02 09:08:36 -05:00
{:ok, %{body: body, status: code}} when code in 200..299 <-
2019-09-12 02:59:34 -04:00
HTTP.get(url, [{:Accept, "application/atom+xml"}]) do
2017-08-04 10:57:38 -04:00
Logger.debug("Got document from #{url}, handling...")
handle_incoming(body, options)
2018-02-23 09:00:19 -05:00
else
e ->
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
e
2017-08-04 10:57:38 -04:00
end
end
def fetch_activity_from_html_url(url, options \\ []) do
2017-05-11 03:13:14 -04:00
Logger.debug("Trying to fetch #{url}")
2018-03-30 09:01:53 -04:00
2018-03-19 05:28:28 -04:00
with true <- String.starts_with?(url, "http"),
2019-05-25 00:24:21 -04:00
{:ok, %{body: body}} <- HTTP.get(url, []),
2017-08-04 10:57:38 -04:00
{:ok, atom_url} <- get_atom_url(body) do
fetch_activity_from_atom_url(atom_url, options)
2018-02-23 09:00:19 -05:00
else
e ->
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
e
end
2017-04-18 12:41:51 -04:00
end
2017-08-04 10:57:38 -04:00
def fetch_activity_from_url(url, options \\ []) do
with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url, options) do
2018-12-10 01:39:57 -05:00
{:ok, activities}
else
_e -> fetch_activity_from_html_url(url, options)
2017-08-04 10:57:38 -04:00
end
2018-12-10 01:39:57 -05:00
rescue
e ->
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
{:error, "Couldn't get #{url}: #{inspect(e)}"}
2017-08-04 10:57:38 -04:00
end
2017-04-18 12:41:51 -04:00
end