Merge branch 'develop' of gitlab.com:fediverse.space/fediverse.space into develop
This commit is contained in:
commit
acfa38505f
|
@ -22,15 +22,14 @@ test-frontend:
|
||||||
changes:
|
changes:
|
||||||
- frontend/*
|
- frontend/*
|
||||||
|
|
||||||
backend-sobelow:
|
test-backend:
|
||||||
stage: test
|
stage: test
|
||||||
image: elixir:1.9
|
image: elixir:1.9
|
||||||
|
variables:
|
||||||
|
MIX_ENV: test
|
||||||
only:
|
only:
|
||||||
changes:
|
changes:
|
||||||
- backend/*
|
- backend/*
|
||||||
except:
|
|
||||||
- develop
|
|
||||||
- master
|
|
||||||
before_script:
|
before_script:
|
||||||
- cd backend
|
- cd backend
|
||||||
script:
|
script:
|
||||||
|
@ -38,6 +37,7 @@ backend-sobelow:
|
||||||
- mix local.rebar --force
|
- mix local.rebar --force
|
||||||
- mix deps.get
|
- mix deps.get
|
||||||
- mix deps.compile
|
- mix deps.compile
|
||||||
|
- mix credo --strict
|
||||||
- mix sobelow --config
|
- mix sobelow --config
|
||||||
cache:
|
cache:
|
||||||
paths:
|
paths:
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
defmodule Backend.Api do
|
defmodule Backend.Api do
|
||||||
|
@moduledoc """
|
||||||
|
Functions used in the API controllers. Most of these simply return data from the database.
|
||||||
|
"""
|
||||||
alias Backend.{Edge, Instance, Repo}
|
alias Backend.{Edge, Instance, Repo}
|
||||||
import Backend.Util
|
import Backend.Util
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
@ -67,6 +70,7 @@ defmodule Backend.Api do
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec list_edges() :: [Edge.t()]
|
@spec list_edges() :: [Edge.t()]
|
||||||
|
# credo:disable-for-next-line Credo.Check.Refactor.CyclomaticComplexity
|
||||||
def list_edges(domain \\ nil) do
|
def list_edges(domain \\ nil) do
|
||||||
user_threshold = get_config(:personal_instance_threshold)
|
user_threshold = get_config(:personal_instance_threshold)
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ defmodule Backend.Application do
|
||||||
# Start the endpoint when the application starts
|
# Start the endpoint when the application starts
|
||||||
BackendWeb.Endpoint,
|
BackendWeb.Endpoint,
|
||||||
# Crawler children
|
# Crawler children
|
||||||
:hackney_pool.child_spec(:crawler, timeout: 15000, max_connections: crawl_worker_count),
|
:hackney_pool.child_spec(:crawler, timeout: 15_000, max_connections: crawl_worker_count),
|
||||||
Supervisor.child_spec(
|
Supervisor.child_spec(
|
||||||
{Task,
|
{Task,
|
||||||
fn ->
|
fn ->
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
defmodule Backend.Auth do
|
defmodule Backend.Auth do
|
||||||
|
@moduledoc """
|
||||||
|
Functions related to authentication.
|
||||||
|
"""
|
||||||
alias Phoenix.Token
|
alias Phoenix.Token
|
||||||
import Backend.Util
|
import Backend.Util
|
||||||
|
|
||||||
|
@ -12,6 +15,6 @@ defmodule Backend.Auth do
|
||||||
|
|
||||||
def verify_token(token) do
|
def verify_token(token) do
|
||||||
# tokens are valid for 12 hours
|
# tokens are valid for 12 hours
|
||||||
Token.verify(BackendWeb.Endpoint, @salt, token, max_age: 43200)
|
Token.verify(BackendWeb.Endpoint, @salt, token, max_age: 43_200)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
defmodule Backend.Crawl do
|
defmodule Backend.Crawl do
|
||||||
|
@moduledoc """
|
||||||
|
Stores aggregate data about a single crawl (i.e. not individual statuses, but the number of statuses seen etc.)
|
||||||
|
"""
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
defmodule Backend.CrawlInteraction do
|
defmodule Backend.CrawlInteraction do
|
||||||
|
@moduledoc """
|
||||||
|
Model for tracking interactions between instances. Stores the source and target instance, as well as the number
|
||||||
|
of mentions seen in the given crawl.
|
||||||
|
"""
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,13 @@ defmodule Backend.Crawler do
|
||||||
"""
|
"""
|
||||||
|
|
||||||
alias __MODULE__
|
alias __MODULE__
|
||||||
alias Backend.Crawler.Crawlers.{Friendica, GnuSocial, Mastodon, Misskey, Nodeinfo}
|
alias Backend.{Crawl, CrawlInteraction, Instance, InstancePeer, MostRecentCrawl, Repo}
|
||||||
alias Backend.Crawler.ApiCrawler
|
alias Backend.Crawler.ApiCrawler
|
||||||
alias Backend.{Crawl, CrawlInteraction, MostRecentCrawl, Repo, Instance, InstancePeer}
|
alias Backend.Crawler.Crawlers.{Friendica, GnuSocial, Mastodon, Misskey, Nodeinfo}
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
import Backend.Util
|
import Backend.Util
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
defstruct [
|
defstruct [
|
||||||
|
|
|
@ -1,4 +1,9 @@
|
||||||
defmodule Backend.Crawler.Crawlers.Friendica do
|
defmodule Backend.Crawler.Crawlers.Friendica do
|
||||||
|
@moduledoc """
|
||||||
|
A crawler for Friendica servers.
|
||||||
|
These don't expose a public list of statuses. This crawler combines nodeinfo data with the /statistics.json endpoint
|
||||||
|
in Friendica, and gets a list of peers from /poco/@server.
|
||||||
|
"""
|
||||||
alias Backend.Crawler.ApiCrawler
|
alias Backend.Crawler.ApiCrawler
|
||||||
import Backend.Crawler.Util
|
import Backend.Crawler.Util
|
||||||
import Backend.Util
|
import Backend.Util
|
||||||
|
@ -21,7 +26,8 @@ defmodule Backend.Crawler.Crawlers.Friendica do
|
||||||
@impl ApiCrawler
|
@impl ApiCrawler
|
||||||
def allows_crawling?(domain) do
|
def allows_crawling?(domain) do
|
||||||
[
|
[
|
||||||
"/statistics.json"
|
"/statistics.json",
|
||||||
|
"/poco/@server"
|
||||||
]
|
]
|
||||||
|> Enum.map(fn endpoint -> "https://#{domain}#{endpoint}" end)
|
|> Enum.map(fn endpoint -> "https://#{domain}#{endpoint}" end)
|
||||||
|> urls_are_crawlable?()
|
|> urls_are_crawlable?()
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
defmodule Backend.Crawler.Crawlers.GnuSocial do
|
defmodule Backend.Crawler.Crawlers.GnuSocial do
|
||||||
|
@moduledoc """
|
||||||
|
Crawler for GNU Social servers.
|
||||||
|
"""
|
||||||
alias Backend.Crawler.ApiCrawler
|
alias Backend.Crawler.ApiCrawler
|
||||||
alias Backend.Crawler.Crawlers.Nodeinfo
|
alias Backend.Crawler.Crawlers.Nodeinfo
|
||||||
import Backend.Crawler.Util
|
import Backend.Crawler.Util
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
defmodule Backend.Crawler.Crawlers.Mastodon do
|
defmodule Backend.Crawler.Crawlers.Mastodon do
|
||||||
|
@moduledoc """
|
||||||
|
Crawler for the Mastodon API (used by Mastodon, its forks like Gab or Glitch, and Pleroma).
|
||||||
|
"""
|
||||||
require Logger
|
require Logger
|
||||||
import Backend.Crawler.Util
|
import Backend.Crawler.Util
|
||||||
import Backend.Util
|
import Backend.Util
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
defmodule Backend.Crawler.Crawlers.Misskey do
|
defmodule Backend.Crawler.Crawlers.Misskey do
|
||||||
|
@moduledoc """
|
||||||
|
Crawler for Misskey servers.
|
||||||
|
"""
|
||||||
alias Backend.Crawler.ApiCrawler
|
alias Backend.Crawler.ApiCrawler
|
||||||
|
|
||||||
@behaviour ApiCrawler
|
@behaviour ApiCrawler
|
||||||
|
|
|
@ -1,9 +1,4 @@
|
||||||
defmodule Backend.Crawler.Crawlers.Nodeinfo do
|
defmodule Backend.Crawler.Crawlers.Nodeinfo do
|
||||||
alias Backend.Crawler.ApiCrawler
|
|
||||||
require Logger
|
|
||||||
import Backend.Util
|
|
||||||
import Backend.Crawler.Util
|
|
||||||
|
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
This module is slightly different from the other crawlers.
|
This module is slightly different from the other crawlers.
|
||||||
It doesn't implement the ApiCrawler spec because it isn't run as a self-contained crawler.
|
It doesn't implement the ApiCrawler spec because it isn't run as a self-contained crawler.
|
||||||
|
@ -12,6 +7,10 @@ defmodule Backend.Crawler.Crawlers.Nodeinfo do
|
||||||
This is to get the user count. Some servers don't publish this in other places (e.g. GNU Social, PeerTube) so we need
|
This is to get the user count. Some servers don't publish this in other places (e.g. GNU Social, PeerTube) so we need
|
||||||
nodeinfo to know whether it's a personal instance or not.
|
nodeinfo to know whether it's a personal instance or not.
|
||||||
"""
|
"""
|
||||||
|
alias Backend.Crawler.ApiCrawler
|
||||||
|
require Logger
|
||||||
|
import Backend.Util
|
||||||
|
import Backend.Crawler.Util
|
||||||
|
|
||||||
defstruct [
|
defstruct [
|
||||||
:description,
|
:description,
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
defmodule Backend.Crawler.StaleInstanceManager do
|
defmodule Backend.Crawler.StaleInstanceManager do
|
||||||
|
@moduledoc """
|
||||||
|
This module regularly finds stale instances (i.e. instances that haven't been updated for longer than the crawl
|
||||||
|
interval) and adds them to the job queue. It runs once a minute.
|
||||||
|
"""
|
||||||
|
|
||||||
use GenServer
|
use GenServer
|
||||||
alias Backend.{Instance, Repo}
|
alias Backend.{Instance, Repo}
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
import Backend.Util
|
import Backend.Util
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@moduledoc """
|
|
||||||
This module regularly finds stale instances (i.e. instances that haven't been updated for longer than the crawl
|
|
||||||
interval) and adds them to the job queue. It runs once a minute.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def start_link(_opts) do
|
def start_link(_opts) do
|
||||||
GenServer.start_link(__MODULE__, [], name: __MODULE__)
|
GenServer.start_link(__MODULE__, [], name: __MODULE__)
|
||||||
end
|
end
|
||||||
|
@ -44,11 +44,11 @@ defmodule Backend.Crawler.StaleInstanceManager do
|
||||||
{:noreply, state}
|
{:noreply, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp schedule_add() do
|
defp schedule_add do
|
||||||
Process.send_after(self(), :queue_stale_domains, 60_000)
|
Process.send_after(self(), :queue_stale_domains, 60_000)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp queue_stale_domains() do
|
defp queue_stale_domains do
|
||||||
now = get_now()
|
now = get_now()
|
||||||
|
|
||||||
stale_domains =
|
stale_domains =
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
defmodule Backend.Crawler.Util do
|
defmodule Backend.Crawler.Util do
|
||||||
|
@moduledoc false
|
||||||
require Logger
|
require Logger
|
||||||
alias Backend.{Instance, Repo}
|
alias Backend.{Instance, Repo}
|
||||||
import Backend.Util
|
import Backend.Util
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
defmodule Backend.Edge do
|
defmodule Backend.Edge do
|
||||||
|
@moduledoc false
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
defmodule Backend.Elasticsearch.Cluster do
|
defmodule Backend.Elasticsearch.Cluster do
|
||||||
|
@moduledoc false
|
||||||
use Elasticsearch.Cluster, otp_app: :backend
|
use Elasticsearch.Cluster, otp_app: :backend
|
||||||
|
|
||||||
def init(config) do
|
def init(config) do
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
defmodule Backend.Elasticsearch.Store do
|
defmodule Backend.Elasticsearch.Store do
|
||||||
|
@moduledoc false
|
||||||
@behaviour Elasticsearch.Store
|
@behaviour Elasticsearch.Store
|
||||||
|
|
||||||
alias Backend.Repo
|
alias Backend.Repo
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
defmodule Backend.Instance do
|
defmodule Backend.Instance do
|
||||||
|
@moduledoc """
|
||||||
|
Model for storing everything related to an instance: not only the data from crawls, but also statistics, the time
|
||||||
|
of the next scheduled crawl, X and Y coordinates on the graph, and so on.
|
||||||
|
"""
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
defmodule Backend.InstancePeer do
|
defmodule Backend.InstancePeer do
|
||||||
|
@moduledoc """
|
||||||
|
Model for tracking which other instances a given instance knows of
|
||||||
|
(the data returned from /api/v1/instance/peers from Mastodon, for example)
|
||||||
|
"""
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
defmodule Backend.MostRecentCrawl do
|
defmodule Backend.MostRecentCrawl do
|
||||||
|
@moduledoc """
|
||||||
|
Model for fast access to the most recent crawl ID for a given domain.
|
||||||
|
You could also just look this up in the crawls table, but that table gets very large so this is much faster.
|
||||||
|
"""
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,8 @@
|
||||||
defmodule Backend.Release do
|
defmodule Backend.Release do
|
||||||
|
@moduledoc """
|
||||||
|
Functions related to releases. Can be run against the compiled binary with e.g.
|
||||||
|
`/bin/backend eval "Backend.Release.migrate()"`
|
||||||
|
"""
|
||||||
@app :backend
|
@app :backend
|
||||||
@start_apps [
|
@start_apps [
|
||||||
:crypto,
|
:crypto,
|
||||||
|
@ -31,7 +35,7 @@ defmodule Backend.Release do
|
||||||
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :down, to: version))
|
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :down, to: version))
|
||||||
end
|
end
|
||||||
|
|
||||||
def build_elasticsearch_indexes() do
|
def build_elasticsearch_indexes do
|
||||||
start_services()
|
start_services()
|
||||||
IO.puts("Building indexes...")
|
IO.puts("Building indexes...")
|
||||||
Enum.each(@indexes, &Elasticsearch.Index.hot_swap(Backend.Elasticsearch.Cluster, &1))
|
Enum.each(@indexes, &Elasticsearch.Index.hot_swap(Backend.Elasticsearch.Cluster, &1))
|
||||||
|
|
|
@ -5,9 +5,12 @@ defmodule Backend.Scheduler do
|
||||||
|
|
||||||
use Quantum.Scheduler, otp_app: :backend
|
use Quantum.Scheduler, otp_app: :backend
|
||||||
|
|
||||||
alias Backend.{Crawl, Edge, CrawlInteraction, Instance, Repo}
|
alias Backend.{Crawl, CrawlInteraction, Edge, Instance, Repo}
|
||||||
|
alias Backend.Mailer.AdminEmail
|
||||||
|
|
||||||
import Backend.Util
|
import Backend.Util
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
@ -34,7 +37,7 @@ defmodule Backend.Scheduler do
|
||||||
Calculates every instance's "insularity score" -- that is, the percentage of mentions that are among users on the
|
Calculates every instance's "insularity score" -- that is, the percentage of mentions that are among users on the
|
||||||
instance, rather than at other instances.
|
instance, rather than at other instances.
|
||||||
"""
|
"""
|
||||||
def generate_insularity_scores() do
|
def generate_insularity_scores do
|
||||||
now = get_now()
|
now = get_now()
|
||||||
|
|
||||||
crawls_subquery =
|
crawls_subquery =
|
||||||
|
@ -79,7 +82,7 @@ defmodule Backend.Scheduler do
|
||||||
@doc """
|
@doc """
|
||||||
This function calculates the average number of statuses per hour over the last month.
|
This function calculates the average number of statuses per hour over the last month.
|
||||||
"""
|
"""
|
||||||
def generate_status_rate() do
|
def generate_status_rate do
|
||||||
now = get_now()
|
now = get_now()
|
||||||
# We want the earliest sucessful crawl so that we can exclude it from the statistics.
|
# We want the earliest sucessful crawl so that we can exclude it from the statistics.
|
||||||
# This is because the first crawl goes up to one month into the past -- this would mess up the counts!
|
# This is because the first crawl goes up to one month into the past -- this would mess up the counts!
|
||||||
|
@ -138,7 +141,7 @@ defmodule Backend.Scheduler do
|
||||||
It calculates the strength of edges between nodes. Self-edges are not generated.
|
It calculates the strength of edges between nodes. Self-edges are not generated.
|
||||||
Edges are only generated if both instances have been succesfully crawled.
|
Edges are only generated if both instances have been succesfully crawled.
|
||||||
"""
|
"""
|
||||||
def generate_edges() do
|
def generate_edges do
|
||||||
now = get_now()
|
now = get_now()
|
||||||
|
|
||||||
crawls_subquery =
|
crawls_subquery =
|
||||||
|
@ -177,32 +180,7 @@ defmodule Backend.Scheduler do
|
||||||
|
|
||||||
edges =
|
edges =
|
||||||
interactions
|
interactions
|
||||||
# Get a map of %{{source, target} => {total_mention_count, total_statuses_seen}}
|
|> reduce_mention_count()
|
||||||
|> Enum.reduce(%{}, fn
|
|
||||||
%{
|
|
||||||
source_domain: source_domain,
|
|
||||||
target_domain: target_domain,
|
|
||||||
mentions: mentions,
|
|
||||||
source_statuses_seen: source_statuses_seen,
|
|
||||||
target_statuses_seen: target_statuses_seen
|
|
||||||
},
|
|
||||||
acc ->
|
|
||||||
key = get_interaction_key(source_domain, target_domain)
|
|
||||||
|
|
||||||
# target_statuses_seen might be nil if that instance was never crawled. default to 0.
|
|
||||||
target_statuses_seen =
|
|
||||||
case target_statuses_seen do
|
|
||||||
nil -> 0
|
|
||||||
_ -> target_statuses_seen
|
|
||||||
end
|
|
||||||
|
|
||||||
statuses_seen = source_statuses_seen + target_statuses_seen
|
|
||||||
|
|
||||||
Map.update(acc, key, {mentions, statuses_seen}, fn {curr_mentions,
|
|
||||||
curr_statuses_seen} ->
|
|
||||||
{curr_mentions + mentions, curr_statuses_seen}
|
|
||||||
end)
|
|
||||||
end)
|
|
||||||
|> Enum.map(fn {{source_domain, target_domain}, {mention_count, statuses_seen}} ->
|
|> Enum.map(fn {{source_domain, target_domain}, {mention_count, statuses_seen}} ->
|
||||||
%{
|
%{
|
||||||
source_domain: source_domain,
|
source_domain: source_domain,
|
||||||
|
@ -224,7 +202,7 @@ defmodule Backend.Scheduler do
|
||||||
This function checks to see if a lot of instances on the same base domain have been created recently. If so,
|
This function checks to see if a lot of instances on the same base domain have been created recently. If so,
|
||||||
notifies the server admin over SMS.
|
notifies the server admin over SMS.
|
||||||
"""
|
"""
|
||||||
def check_for_spam_instances() do
|
def check_for_spam_instances do
|
||||||
hour_range = 3
|
hour_range = 3
|
||||||
|
|
||||||
count_subquery =
|
count_subquery =
|
||||||
|
@ -264,9 +242,38 @@ defmodule Backend.Scheduler do
|
||||||
|
|
||||||
Logger.info(message)
|
Logger.info(message)
|
||||||
send_admin_sms(message)
|
send_admin_sms(message)
|
||||||
Backend.Mailer.AdminEmail.send("Potential spam", message)
|
AdminEmail.send("Potential spam", message)
|
||||||
else
|
else
|
||||||
Logger.debug("Did not find potential spam instances.")
|
Logger.debug("Did not find potential spam instances.")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Takes a list of Interactions
|
||||||
|
# Returns a map of %{{source, target} => {total_mention_count, total_statuses_seen}}
|
||||||
|
defp reduce_mention_count(interactions) do
|
||||||
|
Enum.reduce(interactions, %{}, fn
|
||||||
|
%{
|
||||||
|
source_domain: source_domain,
|
||||||
|
target_domain: target_domain,
|
||||||
|
mentions: mentions,
|
||||||
|
source_statuses_seen: source_statuses_seen,
|
||||||
|
target_statuses_seen: target_statuses_seen
|
||||||
|
},
|
||||||
|
acc ->
|
||||||
|
key = get_interaction_key(source_domain, target_domain)
|
||||||
|
|
||||||
|
# target_statuses_seen might be nil if that instance was never crawled. default to 0.
|
||||||
|
target_statuses_seen =
|
||||||
|
case target_statuses_seen do
|
||||||
|
nil -> 0
|
||||||
|
_ -> target_statuses_seen
|
||||||
|
end
|
||||||
|
|
||||||
|
statuses_seen = source_statuses_seen + target_statuses_seen
|
||||||
|
|
||||||
|
Map.update(acc, key, {mentions, statuses_seen}, fn {curr_mentions, curr_statuses_seen} ->
|
||||||
|
{curr_mentions + mentions, curr_statuses_seen}
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
defmodule Backend.Util do
|
defmodule Backend.Util do
|
||||||
|
@moduledoc false
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
require Logger
|
require Logger
|
||||||
alias Backend.{Crawl, MostRecentCrawl, Repo}
|
alias Backend.{Crawl, MostRecentCrawl, Repo}
|
||||||
|
@ -53,7 +54,7 @@ defmodule Backend.Util do
|
||||||
@doc """
|
@doc """
|
||||||
Gets the current UTC time as a NaiveDateTime in a format that can be inserted into the database.
|
Gets the current UTC time as a NaiveDateTime in a format that can be inserted into the database.
|
||||||
"""
|
"""
|
||||||
def get_now() do
|
def get_now do
|
||||||
NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
|
NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -160,8 +161,8 @@ defmodule Backend.Util do
|
||||||
def get_and_decode(url) do
|
def get_and_decode(url) do
|
||||||
case HTTPoison.get(url, [{"User-Agent", get_config(:user_agent)}],
|
case HTTPoison.get(url, [{"User-Agent", get_config(:user_agent)}],
|
||||||
hackney: [pool: :crawler],
|
hackney: [pool: :crawler],
|
||||||
recv_timeout: 15000,
|
recv_timeout: 15_000,
|
||||||
timeout: 15000
|
timeout: 15_000
|
||||||
) do
|
) do
|
||||||
{:ok, %{status_code: 200, body: body}} -> Jason.decode(body)
|
{:ok, %{status_code: 200, body: body}} -> Jason.decode(body)
|
||||||
{:ok, _} -> {:error, %HTTPoison.Error{reason: "Non-200 response"}}
|
{:ok, _} -> {:error, %HTTPoison.Error{reason: "Non-200 response"}}
|
||||||
|
@ -185,8 +186,8 @@ defmodule Backend.Util do
|
||||||
def post_and_decode(url, body \\ "") do
|
def post_and_decode(url, body \\ "") do
|
||||||
case HTTPoison.post(url, body, [{"User-Agent", get_config(:user_agent)}],
|
case HTTPoison.post(url, body, [{"User-Agent", get_config(:user_agent)}],
|
||||||
hackney: [pool: :crawler],
|
hackney: [pool: :crawler],
|
||||||
recv_timeout: 15000,
|
recv_timeout: 15_000,
|
||||||
timeout: 15000
|
timeout: 15_000
|
||||||
) do
|
) do
|
||||||
{:ok, %{status_code: 200, body: response_body}} -> Jason.decode(response_body)
|
{:ok, %{status_code: 200, body: response_body}} -> Jason.decode(response_body)
|
||||||
{:ok, _} -> {:error, %HTTPoison.Error{reason: "Non-200 response"}}
|
{:ok, _} -> {:error, %HTTPoison.Error{reason: "Non-200 response"}}
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
defmodule BackendWeb.AdminController do
|
defmodule BackendWeb.AdminController do
|
||||||
|
alias Backend.{Api, Auth, Instance}
|
||||||
use BackendWeb, :controller
|
use BackendWeb, :controller
|
||||||
alias Backend.{Auth, Api, Instance}
|
|
||||||
require Logger
|
|
||||||
|
|
||||||
action_fallback BackendWeb.FallbackController
|
action_fallback BackendWeb.FallbackController
|
||||||
|
|
||||||
|
|
|
@ -10,8 +10,6 @@ defmodule BackendWeb.AdminLoginController do
|
||||||
choose one or the other by POSTing back.
|
choose one or the other by POSTing back.
|
||||||
"""
|
"""
|
||||||
def show(conn, %{"id" => domain}) do
|
def show(conn, %{"id" => domain}) do
|
||||||
# TODO: this should really be handled in a more async manner
|
|
||||||
# TODO: this assumes mastodon/pleroma API
|
|
||||||
cleaned_domain = clean_domain(domain)
|
cleaned_domain = clean_domain(domain)
|
||||||
|
|
||||||
instance_data = get_and_decode!("https://#{cleaned_domain}/api/v1/instance")
|
instance_data = get_and_decode!("https://#{cleaned_domain}/api/v1/instance")
|
||||||
|
@ -24,6 +22,7 @@ defmodule BackendWeb.AdminLoginController do
|
||||||
|
|
||||||
instance_data = get_and_decode!("https://#{cleaned_domain}/api/v1/instance")
|
instance_data = get_and_decode!("https://#{cleaned_domain}/api/v1/instance")
|
||||||
|
|
||||||
|
# credo:disable-for-lines:16 Credo.Check.Refactor.CondStatements
|
||||||
error =
|
error =
|
||||||
cond do
|
cond do
|
||||||
type == "email" ->
|
type == "email" ->
|
||||||
|
|
|
@ -8,57 +8,66 @@ defmodule BackendWeb.InstanceView do
|
||||||
|
|
||||||
cond do
|
cond do
|
||||||
instance.user_count < user_threshold and not instance.opt_in ->
|
instance.user_count < user_threshold and not instance.opt_in ->
|
||||||
%{
|
render_personal_instance(instance)
|
||||||
name: instance.domain,
|
|
||||||
status: "personal instance"
|
|
||||||
}
|
|
||||||
|
|
||||||
instance.crawl_error == "robots.txt" ->
|
instance.crawl_error == "robots.txt" ->
|
||||||
%{
|
render_domain_and_error(instance)
|
||||||
name: instance.domain,
|
|
||||||
status: instance.crawl_error
|
|
||||||
}
|
|
||||||
|
|
||||||
instance.crawl_error != nil and instance.type == nil ->
|
instance.crawl_error != nil and instance.type == nil ->
|
||||||
%{
|
render_domain_and_error(instance)
|
||||||
name: instance.domain,
|
|
||||||
status: instance.crawl_error
|
|
||||||
}
|
|
||||||
|
|
||||||
true ->
|
true ->
|
||||||
last_updated = max_datetime(crawl.inserted_at, instance.updated_at)
|
render_instance(instance, crawl)
|
||||||
|
|
||||||
filtered_peers =
|
|
||||||
instance.peers
|
|
||||||
|> Enum.filter(fn peer -> not peer.opt_out end)
|
|
||||||
|
|
||||||
statuses_per_user_per_day =
|
|
||||||
if instance.statuses_per_day != nil and instance.user_count != nil and
|
|
||||||
instance.user_count > 0 do
|
|
||||||
instance.statuses_per_day / instance.user_count
|
|
||||||
else
|
|
||||||
nil
|
|
||||||
end
|
|
||||||
|
|
||||||
%{
|
|
||||||
name: instance.domain,
|
|
||||||
description: instance.description,
|
|
||||||
version: instance.version,
|
|
||||||
userCount: instance.user_count,
|
|
||||||
insularity: instance.insularity,
|
|
||||||
statusCount: instance.status_count,
|
|
||||||
domainCount: length(instance.peers),
|
|
||||||
peers: render_many(filtered_peers, InstanceView, "instance.json"),
|
|
||||||
lastUpdated: last_updated,
|
|
||||||
status: "success",
|
|
||||||
type: instance.type,
|
|
||||||
statusesPerDay: instance.statuses_per_day,
|
|
||||||
statusesPerUserPerDay: statuses_per_user_per_day
|
|
||||||
}
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def render("instance.json", %{instance: instance}) do
|
def render("instance.json", %{instance: instance}) do
|
||||||
%{name: instance.domain}
|
%{name: instance.domain}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp render_personal_instance(instance) do
|
||||||
|
%{
|
||||||
|
name: instance.domain,
|
||||||
|
status: "personal instance"
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp render_domain_and_error(instance) do
|
||||||
|
%{
|
||||||
|
name: instance.domain,
|
||||||
|
status: instance.crawl_error
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp render_instance(instance, crawl) do
|
||||||
|
last_updated = max_datetime(crawl.inserted_at, instance.updated_at)
|
||||||
|
|
||||||
|
filtered_peers =
|
||||||
|
instance.peers
|
||||||
|
|> Enum.filter(fn peer -> not peer.opt_out end)
|
||||||
|
|
||||||
|
statuses_per_user_per_day =
|
||||||
|
if instance.statuses_per_day != nil and instance.user_count != nil and
|
||||||
|
instance.user_count > 0 do
|
||||||
|
instance.statuses_per_day / instance.user_count
|
||||||
|
else
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
|
||||||
|
%{
|
||||||
|
name: instance.domain,
|
||||||
|
description: instance.description,
|
||||||
|
version: instance.version,
|
||||||
|
userCount: instance.user_count,
|
||||||
|
insularity: instance.insularity,
|
||||||
|
statusCount: instance.status_count,
|
||||||
|
domainCount: length(instance.peers),
|
||||||
|
peers: render_many(filtered_peers, InstanceView, "instance.json"),
|
||||||
|
lastUpdated: last_updated,
|
||||||
|
status: "success",
|
||||||
|
type: instance.type,
|
||||||
|
statusesPerDay: instance.statuses_per_day,
|
||||||
|
statusesPerUserPerDay: statuses_per_user_per_day
|
||||||
|
}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
defmodule Backend.Mailer.AdminEmail do
|
defmodule Backend.Mailer.AdminEmail do
|
||||||
|
@moduledoc """
|
||||||
|
Module for sending emails to the server administrator.
|
||||||
|
"""
|
||||||
import Swoosh.Email
|
import Swoosh.Email
|
||||||
import Backend.Util
|
import Backend.Util
|
||||||
require Logger
|
require Logger
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
defmodule Backend.Mailer do
|
defmodule Backend.Mailer do
|
||||||
|
@moduledoc false
|
||||||
use Swoosh.Mailer, otp_app: :backend
|
use Swoosh.Mailer, otp_app: :backend
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
defmodule Backend.Mailer.UserEmail do
|
defmodule Backend.Mailer.UserEmail do
|
||||||
|
@moduledoc """
|
||||||
|
Module for sending emails to users.
|
||||||
|
"""
|
||||||
import Swoosh.Email
|
import Swoosh.Email
|
||||||
import Backend.{Auth, Util}
|
import Backend.{Auth, Util}
|
||||||
require Logger
|
require Logger
|
||||||
|
|
|
@ -1,13 +0,0 @@
|
||||||
defmodule Mix.Tasks.Crawl do
|
|
||||||
alias Backend.Crawler
|
|
||||||
use Mix.Task
|
|
||||||
|
|
||||||
@shortdoc "Crawl a given instance."
|
|
||||||
|
|
||||||
def run(domain) do
|
|
||||||
Mix.Task.run("app.start")
|
|
||||||
# Application.ensure_all_started(:timex)
|
|
||||||
# Mix.Task.run("loadconfig")
|
|
||||||
Crawler.run(domain)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -62,7 +62,8 @@ defmodule Backend.MixProject do
|
||||||
{:swoosh, "~> 0.23.3"},
|
{:swoosh, "~> 0.23.3"},
|
||||||
{:ex_twilio, "~> 0.7.0"},
|
{:ex_twilio, "~> 0.7.0"},
|
||||||
{:elasticsearch, "~> 1.0"},
|
{:elasticsearch, "~> 1.0"},
|
||||||
{:appsignal, "~> 1.10.1"}
|
{:appsignal, "~> 1.10.1"},
|
||||||
|
{:credo, "~> 1.1", only: [:dev, :test], runtime: false}
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -2,12 +2,14 @@
|
||||||
"appsignal": {:hex, :appsignal, "1.10.11", "5df2546d6ea15e392a4384b175ebc1bb33f4ccf8fe9872c11542d3ae2043ff88", [:make, :mix], [{:decorator, "~> 1.2.3", [hex: :decorator, repo: "hexpm", optional: false]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.2.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:plug, ">= 1.1.0", [hex: :plug, repo: "hexpm", optional: true]}, {:poison, ">= 1.3.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
|
"appsignal": {:hex, :appsignal, "1.10.11", "5df2546d6ea15e392a4384b175ebc1bb33f4ccf8fe9872c11542d3ae2043ff88", [:make, :mix], [{:decorator, "~> 1.2.3", [hex: :decorator, repo: "hexpm", optional: false]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.2.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:plug, ">= 1.1.0", [hex: :plug, repo: "hexpm", optional: true]}, {:poison, ">= 1.3.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"artificery": {:hex, :artificery, "0.4.2", "3ded6e29e13113af52811c72f414d1e88f711410cac1b619ab3a2666bbd7efd4", [:mix], [], "hexpm"},
|
"artificery": {:hex, :artificery, "0.4.2", "3ded6e29e13113af52811c72f414d1e88f711410cac1b619ab3a2666bbd7efd4", [:mix], [], "hexpm"},
|
||||||
"base64url": {:hex, :base64url, "0.0.1", "36a90125f5948e3afd7be97662a1504b934dd5dac78451ca6e9abf85a10286be", [:rebar], [], "hexpm"},
|
"base64url": {:hex, :base64url, "0.0.1", "36a90125f5948e3afd7be97662a1504b934dd5dac78451ca6e9abf85a10286be", [:rebar], [], "hexpm"},
|
||||||
|
"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm"},
|
||||||
"certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm"},
|
"certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm"},
|
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm"},
|
||||||
"connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], [], "hexpm"},
|
"connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], [], "hexpm"},
|
||||||
"corsica": {:hex, :corsica, "1.1.2", "5ad8b9dcbeeda4762d78a57c0c8c2f88e1eef8741508517c98cb79e0db1f107d", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"corsica": {:hex, :corsica, "1.1.2", "5ad8b9dcbeeda4762d78a57c0c8c2f88e1eef8741508517c98cb79e0db1f107d", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"cowboy": {:hex, :cowboy, "2.6.3", "99aa50e94e685557cad82e704457336a453d4abcb77839ad22dbe71f311fcc06", [:rebar3], [{:cowlib, "~> 2.7.3", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.7.1", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm"},
|
"cowboy": {:hex, :cowboy, "2.6.3", "99aa50e94e685557cad82e704457336a453d4abcb77839ad22dbe71f311fcc06", [:rebar3], [{:cowlib, "~> 2.7.3", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.7.1", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"cowlib": {:hex, :cowlib, "2.7.3", "a7ffcd0917e6d50b4d5fb28e9e2085a0ceb3c97dea310505f7460ff5ed764ce9", [:rebar3], [], "hexpm"},
|
"cowlib": {:hex, :cowlib, "2.7.3", "a7ffcd0917e6d50b4d5fb28e9e2085a0ceb3c97dea310505f7460ff5ed764ce9", [:rebar3], [], "hexpm"},
|
||||||
|
"credo": {:hex, :credo, "1.1.3", "bf31887b8914a4b7e1810ae2b5aab7c657698abbf4cca6a2335a094d57995168", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"crontab": {:hex, :crontab, "1.1.7", "b9219f0bdc8678b94143655a8f229716c5810c0636a4489f98c0956137e53985", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
"crontab": {:hex, :crontab, "1.1.7", "b9219f0bdc8678b94143655a8f229716c5810c0636a4489f98c0956137e53985", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"db_connection": {:hex, :db_connection, "2.1.0", "122e2f62c4906bf2e49554f1e64db5030c19229aa40935f33088e7d543aa79d0", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"},
|
"db_connection": {:hex, :db_connection, "2.1.0", "122e2f62c4906bf2e49554f1e64db5030c19229aa40935f33088e7d543aa79d0", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"decimal": {:hex, :decimal, "1.8.0", "ca462e0d885f09a1c5a342dbd7c1dcf27ea63548c65a65e67334f4b61803822e", [:mix], [], "hexpm"},
|
"decimal": {:hex, :decimal, "1.8.0", "ca462e0d885f09a1c5a342dbd7c1dcf27ea63548c65a65e67334f4b61803822e", [:mix], [], "hexpm"},
|
||||||
|
|
|
@ -14,6 +14,7 @@ defmodule BackendWeb.ChannelCase do
|
||||||
"""
|
"""
|
||||||
|
|
||||||
use ExUnit.CaseTemplate
|
use ExUnit.CaseTemplate
|
||||||
|
alias Ecto.Adapters.SQL.Sandbox
|
||||||
|
|
||||||
using do
|
using do
|
||||||
quote do
|
quote do
|
||||||
|
@ -26,10 +27,10 @@ defmodule BackendWeb.ChannelCase do
|
||||||
end
|
end
|
||||||
|
|
||||||
setup tags do
|
setup tags do
|
||||||
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Backend.Repo)
|
:ok = Sandbox.checkout(Backend.Repo)
|
||||||
|
|
||||||
unless tags[:async] do
|
unless tags[:async] do
|
||||||
Ecto.Adapters.SQL.Sandbox.mode(Backend.Repo, {:shared, self()})
|
Sandbox.mode(Backend.Repo, {:shared, self()})
|
||||||
end
|
end
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
|
|
|
@ -14,6 +14,7 @@ defmodule BackendWeb.ConnCase do
|
||||||
"""
|
"""
|
||||||
|
|
||||||
use ExUnit.CaseTemplate
|
use ExUnit.CaseTemplate
|
||||||
|
alias Ecto.Adapters.SQL.Sandbox
|
||||||
|
|
||||||
using do
|
using do
|
||||||
quote do
|
quote do
|
||||||
|
@ -27,10 +28,10 @@ defmodule BackendWeb.ConnCase do
|
||||||
end
|
end
|
||||||
|
|
||||||
setup tags do
|
setup tags do
|
||||||
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Backend.Repo)
|
:ok = Sandbox.checkout(Backend.Repo)
|
||||||
|
|
||||||
unless tags[:async] do
|
unless tags[:async] do
|
||||||
Ecto.Adapters.SQL.Sandbox.mode(Backend.Repo, {:shared, self()})
|
Sandbox.mode(Backend.Repo, {:shared, self()})
|
||||||
end
|
end
|
||||||
|
|
||||||
{:ok, conn: Phoenix.ConnTest.build_conn()}
|
{:ok, conn: Phoenix.ConnTest.build_conn()}
|
||||||
|
|
|
@ -13,6 +13,7 @@ defmodule Backend.DataCase do
|
||||||
"""
|
"""
|
||||||
|
|
||||||
use ExUnit.CaseTemplate
|
use ExUnit.CaseTemplate
|
||||||
|
alias Ecto.Adapters.SQL.Sandbox
|
||||||
|
|
||||||
using do
|
using do
|
||||||
quote do
|
quote do
|
||||||
|
@ -26,10 +27,10 @@ defmodule Backend.DataCase do
|
||||||
end
|
end
|
||||||
|
|
||||||
setup tags do
|
setup tags do
|
||||||
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Backend.Repo)
|
:ok = Sandbox.checkout(Backend.Repo)
|
||||||
|
|
||||||
unless tags[:async] do
|
unless tags[:async] do
|
||||||
Ecto.Adapters.SQL.Sandbox.mode(Backend.Repo, {:shared, self()})
|
Sandbox.mode(Backend.Repo, {:shared, self()})
|
||||||
end
|
end
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
|
|
Loading…
Reference in a new issue