improved edges

This commit is contained in:
Tao Bojlén 2019-08-27 13:50:16 +00:00
parent 4d333dd14c
commit 693cf2b2d9
31 changed files with 622 additions and 166 deletions

View File

@ -63,7 +63,9 @@ config :backend, :crawler,
crawl_workers: 20, crawl_workers: 20,
blacklist: [ blacklist: [
"gab.best", "gab.best",
"4chan.icu" "4chan.icu",
"pleroma.site",
"pleroma.online"
], ],
user_agent: "fediverse.space crawler", user_agent: "fediverse.space crawler",
admin_phone: System.get_env("ADMIN_PHONE"), admin_phone: System.get_env("ADMIN_PHONE"),

View File

@ -62,8 +62,4 @@ config :backend, :crawler,
personal_instance_threshold: 5, personal_instance_threshold: 5,
crawl_interval_mins: 60, crawl_interval_mins: 60,
crawl_workers: 10, crawl_workers: 10,
blacklist: [
"gab.best",
"4chan.icu"
],
frontend_domain: "localhost:3000" frontend_domain: "localhost:3000"

View File

@ -6,6 +6,13 @@ defmodule Backend.Api do
import Backend.Util import Backend.Util
import Ecto.Query import Ecto.Query
@spec get_instances(Integer.t() | nil) :: Scrivener.Page.t()
def get_instances(page \\ nil) do
Instance
|> where([i], not is_nil(i.type))
|> Repo.paginate(page: page)
end
@spec get_instance(String.t()) :: Instance.t() | nil @spec get_instance(String.t()) :: Instance.t() | nil
def get_instance(domain) do def get_instance(domain) do
Instance Instance

View File

@ -26,25 +26,39 @@ defmodule Backend.Crawler.ApiCrawler do
:peers, :peers,
:interactions, :interactions,
:statuses_seen, :statuses_seen,
:instance_type :instance_type,
:blocked_domains
] ]
@type t() :: %__MODULE__{ @type t() :: %__MODULE__{
version: String.t(), version: String.t() | nil,
description: String.t(), description: String.t() | nil,
user_count: integer | nil, user_count: integer | nil,
status_count: integer | nil, status_count: integer | nil,
peers: [String.t()], peers: [String.t()],
interactions: instance_interactions, interactions: instance_interactions,
statuses_seen: integer, statuses_seen: integer,
instance_type: instance_type instance_type: instance_type | nil,
blocked_domains: [String.t()]
} }
@empty_result %{
version: nil,
description: nil,
user_count: nil,
status_count: nil,
peers: [],
interactions: %{},
statuses_seen: 0,
instance_type: nil,
blocked_domains: []
}
@doc """ @doc """
Check whether the instance at the given domain is of the type that this ApiCrawler implements. Check whether the instance at the given domain is of the type that this ApiCrawler implements.
Arguments are the instance domain and the nodeinfo results. Arguments are the instance domain and the nodeinfo results.
""" """
@callback is_instance_type?(String.t(), Nodeinfo.t()) :: boolean() @callback is_instance_type?(String.t(), ApiCrawler.t()) :: boolean()
@doc """ @doc """
Check whether the instance allows crawling according to its robots.txt or otherwise. Check whether the instance allows crawling according to its robots.txt or otherwise.
@ -56,4 +70,11 @@ defmodule Backend.Crawler.ApiCrawler do
Takes two arguments: the domain to crawl and the existing results (from nodeinfo). Takes two arguments: the domain to crawl and the existing results (from nodeinfo).
""" """
@callback crawl(String.t(), Nodeinfo.t()) :: t() @callback crawl(String.t(), Nodeinfo.t()) :: t()
@doc """
Returns the default, empty state
"""
def get_default do
@empty_result
end
end end

View File

@ -4,7 +4,17 @@ defmodule Backend.Crawler do
""" """
alias __MODULE__ alias __MODULE__
alias Backend.{Crawl, CrawlInteraction, Instance, InstancePeer, MostRecentCrawl, Repo}
alias Backend.{
Crawl,
CrawlInteraction,
FederationRestriction,
Instance,
InstancePeer,
MostRecentCrawl,
Repo
}
alias Backend.Crawler.ApiCrawler alias Backend.Crawler.ApiCrawler
alias Backend.Crawler.Crawlers.{Friendica, GnuSocial, Mastodon, Misskey, Nodeinfo} alias Backend.Crawler.Crawlers.{Friendica, GnuSocial, Mastodon, Misskey, Nodeinfo}
@ -75,14 +85,24 @@ defmodule Backend.Crawler do
# a) it should always be run first # a) it should always be run first
# b) it passes the results on to the next crawlers (e.g. user_count) # b) it passes the results on to the next crawlers (e.g. user_count)
defp crawl(%Crawler{api_crawlers: [Nodeinfo | remaining_crawlers], domain: domain} = state) do defp crawl(%Crawler{api_crawlers: [Nodeinfo | remaining_crawlers], domain: domain} = state) do
with true <- Nodeinfo.allows_crawling?(domain), {:ok, nodeinfo} <- Nodeinfo.crawl(domain) do if Nodeinfo.allows_crawling?(domain) do
Logger.debug("Found nodeinfo for #{domain}.") nodeinfo = Nodeinfo.crawl(domain, nil)
result = Map.merge(nodeinfo, %{peers: [], interactions: %{}, statuses_seen: 0})
crawl(%Crawler{state | result: result, found_api?: true, api_crawlers: remaining_crawlers}) if nodeinfo != nil do
else Logger.debug("Found nodeinfo for #{domain}.")
_ ->
crawl(%Crawler{
state
| result: nodeinfo,
found_api?: true,
api_crawlers: remaining_crawlers
})
else
Logger.debug("Did not find nodeinfo for #{domain}.") Logger.debug("Did not find nodeinfo for #{domain}.")
crawl(%Crawler{state | api_crawlers: remaining_crawlers}) crawl(%Crawler{state | api_crawlers: remaining_crawlers})
end
else
crawl(%Crawler{state | api_crawlers: remaining_crawlers, allows_crawling?: false})
end end
end end
@ -165,7 +185,7 @@ defmodule Backend.Crawler do
Elasticsearch.put_document!(Backend.Elasticsearch.Cluster, instance, "instances/_doc") Elasticsearch.put_document!(Backend.Elasticsearch.Cluster, instance, "instances/_doc")
# Save details of a new crawl ## Save details of a new crawl ##
curr_crawl = curr_crawl =
Repo.insert!(%Crawl{ Repo.insert!(%Crawl{
instance_domain: domain, instance_domain: domain,
@ -196,18 +216,22 @@ defmodule Backend.Crawler do
|> list_union(result.peers) |> list_union(result.peers)
|> Enum.filter(fn domain -> domain != nil and not is_blacklisted?(domain) end) |> Enum.filter(fn domain -> domain != nil and not is_blacklisted?(domain) end)
|> Enum.map(&clean_domain(&1)) |> Enum.map(&clean_domain(&1))
|> Enum.filter(fn peer_domain ->
if is_valid_domain?(peer_domain) do
true
else
Logger.info("Found invalid peer domain from #{domain}: #{peer_domain}")
false
end
end)
if not Enum.all?(peers_domains, &is_valid_domain?(&1)) do new_instances =
invalid_peers = Enum.filter(peers_domains, fn d -> not is_valid_domain?(d) end)
raise "#{domain} has invalid peers: #{Enum.join(invalid_peers, ", ")}"
end
peers =
peers_domains peers_domains
|> list_union(result.blocked_domains)
|> Enum.map(&%{domain: &1, inserted_at: now, updated_at: now, next_crawl: now}) |> Enum.map(&%{domain: &1, inserted_at: now, updated_at: now, next_crawl: now})
Instance Instance
|> Repo.insert_all(peers, on_conflict: :nothing, conflict_target: :domain) |> Repo.insert_all(new_instances, on_conflict: :nothing, conflict_target: :domain)
Repo.transaction(fn -> Repo.transaction(fn ->
## Save peer relationships ## ## Save peer relationships ##
@ -249,6 +273,56 @@ defmodule Backend.Crawler do
|> Repo.insert_all(new_instance_peers) |> Repo.insert_all(new_instance_peers)
end) end)
## Save federation restrictions ##
Repo.transaction(fn ->
current_restrictions =
FederationRestriction
|> select([fr], {fr.target_domain, fr.type})
|> where(source_domain: ^domain)
|> Repo.all()
wanted_restrictions_set =
result.blocked_domains
|> Enum.map(&{&1, "reject"})
|> MapSet.new()
current_restrictions_set = MapSet.new(current_restrictions)
# Delete the ones we don't want
restrictions_to_delete =
current_restrictions_set
|> MapSet.difference(wanted_restrictions_set)
|> MapSet.to_list()
|> Enum.map(fn {target_domain, _type} -> target_domain end)
if length(restrictions_to_delete) > 0 do
FederationRestriction
|> where(
[fr],
fr.source_domain == ^domain and fr.target_domain in ^restrictions_to_delete
)
|> Repo.delete_all()
end
# Save the new ones
new_restrictions =
wanted_restrictions_set
|> MapSet.difference(current_restrictions_set)
|> MapSet.to_list()
|> Enum.map(fn {target_domain, type} ->
%{
source_domain: domain,
target_domain: target_domain,
type: type,
inserted_at: now,
updated_at: now
}
end)
FederationRestriction
|> Repo.insert_all(new_restrictions)
end)
## Save interactions ## ## Save interactions ##
interactions = interactions =
result.interactions result.interactions

View File

@ -62,12 +62,11 @@ defmodule Backend.Crawler.Crawlers.Friendica do
end) end)
if details |> Map.get(:user_count, 0) |> is_above_user_threshold?() do if details |> Map.get(:user_count, 0) |> is_above_user_threshold?() do
Map.merge( ApiCrawler.get_default()
%{peers: peers, interactions: %{}, statuses_seen: 0, instance_type: :friendica}, |> Map.merge(%{peers: peers, instance_type: :friendica})
Map.take(details, [:description, :version, :user_count, :status_count]) |> Map.merge(Map.take(details, [:description, :version, :user_count, :status_count]))
)
else else
nodeinfo_result Map.merge(ApiCrawler.get_default(), nodeinfo_result)
end end
end end

View File

@ -3,7 +3,6 @@ defmodule Backend.Crawler.Crawlers.GnuSocial do
Crawler for GNU Social servers. Crawler for GNU Social servers.
""" """
alias Backend.Crawler.ApiCrawler alias Backend.Crawler.ApiCrawler
alias Backend.Crawler.Crawlers.Nodeinfo
import Backend.Crawler.Util import Backend.Crawler.Util
import Backend.Util import Backend.Util
require Logger require Logger
@ -32,17 +31,17 @@ defmodule Backend.Crawler.Crawlers.GnuSocial do
end end
@impl ApiCrawler @impl ApiCrawler
def crawl(domain, nodeinfo_result) do def crawl(domain, nodeinfo) do
if nodeinfo_result == nil or if nodeinfo == nil or
nodeinfo_result |> Map.get(:user_count) |> is_above_user_threshold?() do nodeinfo |> Map.get(:user_count) |> is_above_user_threshold?() do
crawl_large_instance(domain, nodeinfo_result) Map.merge(crawl_large_instance(domain), nodeinfo)
else else
nodeinfo_result Map.merge(ApiCrawler.get_default(), nodeinfo)
end end
end end
@spec crawl_large_instance(String.t(), Nodeinfo.t()) :: ApiCrawler.t() @spec crawl_large_instance(String.t()) :: ApiCrawler.t()
defp crawl_large_instance(domain, nodeinfo_result) do defp crawl_large_instance(domain) do
status_datetime_threshold = status_datetime_threshold =
NaiveDateTime.utc_now() NaiveDateTime.utc_now()
|> NaiveDateTime.add(get_config(:status_age_limit_days) * 24 * 3600 * -1, :second) |> NaiveDateTime.add(get_config(:status_age_limit_days) * 24 * 3600 * -1, :second)
@ -52,24 +51,14 @@ defmodule Backend.Crawler.Crawlers.GnuSocial do
{interactions, statuses_seen} = get_interactions(domain, min_timestamp) {interactions, statuses_seen} = get_interactions(domain, min_timestamp)
if nodeinfo_result != nil do Map.merge(
Map.merge(nodeinfo_result, %{ ApiCrawler.get_default(),
interactions: interactions,
statuses_seen: statuses_seen,
peers: []
})
else
%{ %{
version: nil,
description: nil,
user_count: nil,
status_count: nil,
peers: [],
interactions: interactions, interactions: interactions,
statuses_seen: statuses_seen, statuses_seen: statuses_seen,
instance_type: :gnusocial instance_type: :gnusocial
} }
end )
end end
@spec get_interactions( @spec get_interactions(

View File

@ -34,26 +34,19 @@ defmodule Backend.Crawler.Crawlers.Mastodon do
end end
@impl ApiCrawler @impl ApiCrawler
def crawl(domain, _current_result) do def crawl(domain, nodeinfo) do
instance = get_and_decode!("https://#{domain}/api/v1/instance") instance = get_and_decode!("https://#{domain}/api/v1/instance")
user_count = get_in(instance, ["stats", "user_count"]) user_count = get_in(instance, ["stats", "user_count"])
if is_above_user_threshold?(user_count) or has_opted_in?(domain) do if is_above_user_threshold?(user_count) or has_opted_in?(domain) do
crawl_large_instance(domain, instance) Map.merge(nodeinfo, crawl_large_instance(domain, instance))
else else
Map.merge( ApiCrawler.get_default()
Map.take(instance["stats"], ["user_count"]) |> Map.merge(nodeinfo)
|> convert_keys_to_atoms(), |> Map.merge(%{
%{ instance_type: get_instance_type(instance),
instance_type: get_instance_type(instance), user_count: get_in(instance, ["stats", "user_count"])
peers: [], })
interactions: %{},
statuses_seen: 0,
description: nil,
version: nil,
status_count: nil
}
)
end end
end end

View File

@ -35,22 +35,18 @@ defmodule Backend.Crawler.Crawlers.Misskey do
end end
@impl ApiCrawler @impl ApiCrawler
def crawl(domain, _result) do def crawl(domain, nodeinfo) do
with {:ok, %{"originalUsersCount" => user_count, "originalNotesCount" => status_count}} <- with {:ok, %{"originalUsersCount" => user_count, "originalNotesCount" => status_count}} <-
post_and_decode("https://#{domain}/api/stats") do post_and_decode("https://#{domain}/api/stats") do
if is_above_user_threshold?(user_count) or has_opted_in?(domain) do if is_above_user_threshold?(user_count) or has_opted_in?(domain) do
crawl_large_instance(domain, user_count, status_count) Map.merge(nodeinfo, crawl_large_instance(domain, user_count, status_count))
else else
%{ ApiCrawler.get_default()
instance_type: :misskey, |> Map.merge(nodeinfo)
version: nil, |> Map.merge(%{
description: nil,
user_count: user_count, user_count: user_count,
status_count: nil, type: :misskey
peers: [], })
interactions: %{},
statuses_seen: 0
}
end end
end end
end end

View File

@ -1,34 +1,16 @@
defmodule Backend.Crawler.Crawlers.Nodeinfo do defmodule Backend.Crawler.Crawlers.Nodeinfo do
@moduledoc """ @moduledoc """
This module is slightly different from the other crawlers. This module is slightly different from the other crawlers. It's run before all the others and its
It doesn't implement the ApiCrawler spec because it isn't run as a self-contained crawler. result is included in theirs.
Instead, it's run before all the other crawlers.
This is to get the user count. Some servers don't publish this in other places (e.g. GNU Social, PeerTube) so we need
nodeinfo to know whether it's a personal instance or not.
""" """
alias Backend.Crawler.ApiCrawler alias Backend.Crawler.ApiCrawler
require Logger require Logger
import Backend.Util import Backend.Util
import Backend.Crawler.Util import Backend.Crawler.Util
@behaviour ApiCrawler
defstruct [ @impl ApiCrawler
:description,
:user_count,
:status_count,
:instance_type,
:version
]
@type t() :: %__MODULE__{
description: String.t(),
user_count: integer,
status_count: integer,
instance_type: ApiCrawler.instance_type(),
version: String.t()
}
@spec allows_crawling?(String.t()) :: boolean()
def allows_crawling?(domain) do def allows_crawling?(domain) do
[ [
".well-known/nodeinfo" ".well-known/nodeinfo"
@ -37,13 +19,19 @@ defmodule Backend.Crawler.Crawlers.Nodeinfo do
|> urls_are_crawlable?() |> urls_are_crawlable?()
end end
@spec crawl(String.t()) :: {:ok, t()} | {:error, nil} @impl ApiCrawler
def crawl(domain) do def is_instance_type?(_domain, _nodeinfo) do
# This crawler is used slightly differently from the others -- we always check for nodeinfo.
true
end
@impl ApiCrawler
def crawl(domain, _curr_result) do
with {:ok, nodeinfo_url} <- get_nodeinfo_url(domain), with {:ok, nodeinfo_url} <- get_nodeinfo_url(domain),
{:ok, nodeinfo} <- get_nodeinfo(nodeinfo_url) do {:ok, nodeinfo} <- get_nodeinfo(nodeinfo_url) do
{:ok, nodeinfo} nodeinfo
else else
_other -> {:error, nil} _other -> ApiCrawler.get_default()
end end
end end
@ -65,8 +53,7 @@ defmodule Backend.Crawler.Crawlers.Nodeinfo do
|> Map.get("href") |> Map.get("href")
end end
@spec get_nodeinfo(String.t()) :: @spec get_nodeinfo(String.t()) :: ApiCrawler.t()
{:ok, t()} | {:error, Jason.DecodeError.t() | HTTPoison.Error.t()}
defp get_nodeinfo(nodeinfo_url) do defp get_nodeinfo(nodeinfo_url) do
case get_and_decode(nodeinfo_url) do case get_and_decode(nodeinfo_url) do
{:ok, nodeinfo} -> {:ok, process_nodeinfo(nodeinfo)} {:ok, nodeinfo} -> {:ok, process_nodeinfo(nodeinfo)}
@ -74,7 +61,7 @@ defmodule Backend.Crawler.Crawlers.Nodeinfo do
end end
end end
@spec process_nodeinfo(any()) :: t() @spec process_nodeinfo(any()) :: ApiCrawler.t()
defp process_nodeinfo(nodeinfo) do defp process_nodeinfo(nodeinfo) do
user_count = get_in(nodeinfo, ["usage", "users", "total"]) user_count = get_in(nodeinfo, ["usage", "users", "total"])
@ -90,21 +77,33 @@ defmodule Backend.Crawler.Crawlers.Nodeinfo do
type = nodeinfo |> get_in(["software", "name"]) |> String.downcase() |> String.to_atom() type = nodeinfo |> get_in(["software", "name"]) |> String.downcase() |> String.to_atom()
%__MODULE__{ Map.merge(
description: description, ApiCrawler.get_default(),
user_count: user_count, %{
status_count: get_in(nodeinfo, ["usage", "localPosts"]), description: description,
instance_type: type, user_count: user_count,
version: get_in(nodeinfo, ["software", "version"]) status_count: get_in(nodeinfo, ["usage", "localPosts"]),
} instance_type: type,
version: get_in(nodeinfo, ["software", "version"]),
blocked_domains:
get_in(nodeinfo, ["metadata", "federation", "mrf_simple", "reject"])
|> (fn b ->
if b == nil do
[]
else
b
end
end).()
|> Enum.map(&clean_domain(&1))
}
)
else else
%{ Map.merge(
description: nil, ApiCrawler.get_default(),
user_count: user_count, %{
status_count: nil, user_count: user_count
instance_type: nil, }
version: nil )
}
end end
end end

View File

@ -0,0 +1,28 @@
defmodule Backend.FederationRestriction do
@moduledoc false
use Ecto.Schema
import Ecto.Changeset
schema "federation_restrictions" do
belongs_to :source, Backend.Instance,
references: :domain,
type: :string,
foreign_key: :source_domain
belongs_to :target, Backend.Instance,
references: :domain,
type: :string,
foreign_key: :target_domain
field :type, :string
timestamps()
end
@doc false
def changeset(federation_restriction, attrs) do
federation_restriction
|> cast(attrs, [:source, :target, :type])
|> validate_required([:source, :target, :type])
end
end

View File

@ -4,7 +4,7 @@ defmodule Backend.Repo do
adapter: Ecto.Adapters.Postgres, adapter: Ecto.Adapters.Postgres,
timeout: 25_000 timeout: 25_000
use Paginator use Scrivener, page_size: 20
def init(_type, config) do def init(_type, config) do
{:ok, Keyword.put(config, :url, System.get_env("DATABASE_URL"))} {:ok, Keyword.put(config, :url, System.get_env("DATABASE_URL"))}

View File

@ -3,10 +3,9 @@ defmodule Backend.Scheduler do
This module runs recurring tasks. This module runs recurring tasks.
""" """
use Appsignal.Instrumentation.Decorators
use Quantum.Scheduler, otp_app: :backend use Quantum.Scheduler, otp_app: :backend
alias Backend.{Crawl, CrawlInteraction, Edge, Instance, Repo} alias Backend.{Crawl, CrawlInteraction, Edge, FederationRestriction, Instance, Repo}
alias Backend.Mailer.AdminEmail alias Backend.Mailer.AdminEmail
import Backend.Util import Backend.Util
@ -21,7 +20,6 @@ defmodule Backend.Scheduler do
`unit` must singular, e.g. "second", "minute", "hour", "month", "year", etc... `unit` must singular, e.g. "second", "minute", "hour", "month", "year", etc...
""" """
@spec prune_crawls(integer, String.t()) :: any @spec prune_crawls(integer, String.t()) :: any
@decorate transaction()
def prune_crawls(amount, unit) do def prune_crawls(amount, unit) do
{deleted_num, _} = {deleted_num, _} =
Crawl Crawl
@ -39,7 +37,6 @@ defmodule Backend.Scheduler do
Calculates every instance's "insularity score" -- that is, the percentage of mentions that are among users on the Calculates every instance's "insularity score" -- that is, the percentage of mentions that are among users on the
instance, rather than at other instances. instance, rather than at other instances.
""" """
@decorate transaction()
def generate_insularity_scores do def generate_insularity_scores do
now = get_now() now = get_now()
@ -85,7 +82,6 @@ defmodule Backend.Scheduler do
@doc """ @doc """
This function calculates the average number of statuses per hour over the last month. This function calculates the average number of statuses per hour over the last month.
""" """
@decorate transaction()
def generate_status_rate do def generate_status_rate do
now = get_now() now = get_now()
# We want the earliest sucessful crawl so that we can exclude it from the statistics. # We want the earliest sucessful crawl so that we can exclude it from the statistics.
@ -143,9 +139,11 @@ defmodule Backend.Scheduler do
@doc """ @doc """
This function aggregates statistics from the interactions in the database. This function aggregates statistics from the interactions in the database.
It calculates the strength of edges between nodes. Self-edges are not generated. It calculates the strength of edges between nodes. Self-edges are not generated.
Edges are only generated if both instances have been succesfully crawled. Edges are only generated if
* both instances have been succesfully crawled
* neither of the instances have blocked each other
* there are interactions in each direction
""" """
@decorate transaction()
def generate_edges do def generate_edges do
now = get_now() now = get_now()
@ -177,15 +175,30 @@ defmodule Backend.Scheduler do
}) })
|> Repo.all(timeout: :infinity) |> Repo.all(timeout: :infinity)
federation_blocks =
FederationRestriction
|> select([fr], {fr.source_domain, fr.target_domain})
|> where([fr], fr.type == "reject")
|> Repo.all()
|> MapSet.new()
# Get edges and their weights # Get edges and their weights
Repo.transaction( Repo.transaction(
fn -> fn ->
Edge Edge
|> Repo.delete_all(timeout: :infinity) |> Repo.delete_all(timeout: :infinity)
edges = mentions =
interactions interactions
|> reduce_mention_count() |> reduce_mention_count(federation_blocks)
# Filter down to mentions where there are interactions in both directions
filtered_mentions =
mentions
|> Enum.filter(&has_opposite_mention?(&1, mentions))
edges =
filtered_mentions
|> Enum.map(fn {{source_domain, target_domain}, {mention_count, statuses_seen}} -> |> Enum.map(fn {{source_domain, target_domain}, {mention_count, statuses_seen}} ->
%{ %{
source_domain: source_domain, source_domain: source_domain,
@ -207,7 +220,6 @@ defmodule Backend.Scheduler do
This function checks to see if a lot of instances on the same base domain have been created recently. If so, This function checks to see if a lot of instances on the same base domain have been created recently. If so,
notifies the server admin over SMS. notifies the server admin over SMS.
""" """
@decorate transaction()
def check_for_spam_instances do def check_for_spam_instances do
hour_range = 3 hour_range = 3
@ -254,10 +266,9 @@ defmodule Backend.Scheduler do
end end
end end
# Takes a list of Interactions # Takes a list of Interactions and a MapSet of blocks in the form {source_domain, target_domain}
# Returns a map of %{{source, target} => {total_mention_count, total_statuses_seen}} # Returns a map of %{{source, target} => {total_mention_count, total_statuses_seen}}
@decorate transaction_event() defp reduce_mention_count(interactions, federation_blocks) do
defp reduce_mention_count(interactions) do
Enum.reduce(interactions, %{}, fn Enum.reduce(interactions, %{}, fn
%{ %{
source_domain: source_domain, source_domain: source_domain,
@ -278,9 +289,46 @@ defmodule Backend.Scheduler do
statuses_seen = source_statuses_seen + target_statuses_seen statuses_seen = source_statuses_seen + target_statuses_seen
Map.update(acc, key, {mentions, statuses_seen}, fn {curr_mentions, curr_statuses_seen} -> maybe_update_map(
{curr_mentions + mentions, curr_statuses_seen} acc,
end) key,
source_domain,
target_domain,
mentions,
statuses_seen,
federation_blocks
)
end) end)
end end
defp maybe_update_map(
acc,
key,
source_domain,
target_domain,
mentions,
statuses_seen,
federation_blocks
) do
if not MapSet.member?(federation_blocks, {source_domain, target_domain}) and
not MapSet.member?(federation_blocks, {target_domain, source_domain}) do
Map.update(acc, key, {mentions, statuses_seen}, fn {curr_mentions, curr_statuses_seen} ->
{curr_mentions + mentions, curr_statuses_seen}
end)
end
end
defp has_opposite_mention?(mention, all_mentions) do
{{source_domain, target_domain}, {mention_count, _statuses_seen}} = mention
other_direction_key = {target_domain, source_domain}
if mention_count > 0 and Map.has_key?(all_mentions, other_direction_key) do
{other_direction_mentions, _other_statuses_seen} =
Map.get(all_mentions, other_direction_key)
other_direction_mentions > 0
else
false
end
end
end end

View File

@ -128,6 +128,7 @@ defmodule Backend.Util do
end end
end end
@spec clean_domain(String.t()) :: String.t()
def clean_domain(domain) do def clean_domain(domain) do
cleaned = cleaned =
domain domain
@ -136,7 +137,7 @@ defmodule Backend.Util do
|> String.trim() |> String.trim()
|> String.downcase() |> String.downcase()
Regex.replace(~r/:\d+/, cleaned, "") Regex.replace(~r/(:\d+|\.)$/, cleaned, "")
end end
def get_account(username, domain) do def get_account(username, domain) do
@ -209,6 +210,6 @@ defmodule Backend.Util do
@spec is_valid_domain?(String.t()) :: boolean @spec is_valid_domain?(String.t()) :: boolean
def is_valid_domain?(domain) do def is_valid_domain?(domain) do
Regex.match?(~r/^[\w\.\-_]+$/, domain) Regex.match?(~r/^[\pL\d\.\-_]+\.[a-zA-Z]+$/, domain)
end end
end end

View File

@ -1,9 +1,30 @@
defmodule BackendWeb.InstanceController do defmodule BackendWeb.InstanceController do
use BackendWeb, :controller use BackendWeb, :controller
alias Backend.Api
alias Graph.Cache alias Graph.Cache
action_fallback(BackendWeb.FallbackController) action_fallback(BackendWeb.FallbackController)
def index(conn, params) do
page = Map.get(params, "page")
%{
entries: instances,
total_pages: total_pages,
page_number: page_number,
total_entries: total_entries,
page_size: page_size
} = Api.get_instances(page)
render(conn, "index.json",
instances: instances,
total_pages: total_pages,
page_number: page_number,
total_entries: total_entries,
page_size: page_size
)
end
def show(conn, %{"id" => domain}) do def show(conn, %{"id" => domain}) do
instance = Cache.get_instance_with_peers(domain) instance = Cache.get_instance_with_peers(domain)

View File

@ -8,7 +8,7 @@ defmodule BackendWeb.Router do
scope "/api", BackendWeb do scope "/api", BackendWeb do
pipe_through(:api) pipe_through(:api)
resources("/instances", InstanceController, only: [:show]) resources("/instances", InstanceController, only: [:index, :show])
resources("/graph", GraphController, only: [:index, :show]) resources("/graph", GraphController, only: [:index, :show])
resources("/search", SearchController, only: [:index]) resources("/search", SearchController, only: [:index])

View File

@ -3,6 +3,40 @@ defmodule BackendWeb.InstanceView do
alias BackendWeb.InstanceView alias BackendWeb.InstanceView
import Backend.Util import Backend.Util
def render("index.json", %{
instances: instances,
total_pages: total_pages,
page_number: page_number,
total_entries: total_entries,
page_size: page_size
}) do
%{
instances: render_many(instances, InstanceView, "index_instance.json"),
pageNumber: page_number,
totalPages: total_pages,
totalEntries: total_entries,
pageSize: page_size
}
end
@doc """
Used when rendering the index of all instances (the different from show.json is primarily that it does not
include peers).
"""
def render("index_instance.json", %{instance: instance}) do
%{
name: instance.domain,
description: instance.description,
version: instance.version,
userCount: instance.user_count,
insularity: instance.insularity,
statusCount: instance.status_count,
type: instance.type,
statusesPerDay: instance.statuses_per_day,
statusesPerUserPerDay: get_statuses_per_user_per_day(instance)
}
end
def render("show.json", %{instance: instance, crawl: crawl}) do def render("show.json", %{instance: instance, crawl: crawl}) do
user_threshold = get_config(:personal_instance_threshold) user_threshold = get_config(:personal_instance_threshold)
@ -21,7 +55,7 @@ defmodule BackendWeb.InstanceView do
end end
end end
def render("instance.json", %{instance: instance}) do def render("peer.json", %{instance: instance}) do
%{name: instance.domain} %{name: instance.domain}
end end
@ -46,14 +80,6 @@ defmodule BackendWeb.InstanceView do
instance.peers instance.peers
|> Enum.filter(fn peer -> not peer.opt_out end) |> Enum.filter(fn peer -> not peer.opt_out end)
statuses_per_user_per_day =
if instance.statuses_per_day != nil and instance.user_count != nil and
instance.user_count > 0 do
instance.statuses_per_day / instance.user_count
else
nil
end
%{ %{
name: instance.domain, name: instance.domain,
description: instance.description, description: instance.description,
@ -62,12 +88,21 @@ defmodule BackendWeb.InstanceView do
insularity: instance.insularity, insularity: instance.insularity,
statusCount: instance.status_count, statusCount: instance.status_count,
domainCount: length(instance.peers), domainCount: length(instance.peers),
peers: render_many(filtered_peers, InstanceView, "instance.json"), peers: render_many(filtered_peers, InstanceView, "peer.json"),
lastUpdated: last_updated, lastUpdated: last_updated,
status: "success", status: "success",
type: instance.type, type: instance.type,
statusesPerDay: instance.statuses_per_day, statusesPerDay: instance.statuses_per_day,
statusesPerUserPerDay: statuses_per_user_per_day statusesPerUserPerDay: get_statuses_per_user_per_day(instance)
} }
end end
defp get_statuses_per_user_per_day(instance) do
if instance.statuses_per_day != nil and instance.user_count != nil and
instance.user_count > 0 do
instance.statuses_per_day / instance.user_count
else
nil
end
end
end end

View File

@ -56,7 +56,6 @@ defmodule Backend.MixProject do
{:corsica, "~> 1.1.2"}, {:corsica, "~> 1.1.2"},
{:sobelow, "~> 0.8", only: [:dev, :test]}, {:sobelow, "~> 0.8", only: [:dev, :test]},
{:gollum, "~> 0.3.2"}, {:gollum, "~> 0.3.2"},
{:paginator, "~> 0.6.0"},
{:public_suffix, "~> 0.6.0"}, {:public_suffix, "~> 0.6.0"},
{:idna, "~> 5.1.2", override: true}, {:idna, "~> 5.1.2", override: true},
{:swoosh, "~> 0.23.3"}, {:swoosh, "~> 0.23.3"},
@ -66,7 +65,8 @@ defmodule Backend.MixProject do
{:credo, "~> 1.1", only: [:dev, :test], runtime: false}, {:credo, "~> 1.1", only: [:dev, :test], runtime: false},
{:nebulex, "~> 1.1"}, {:nebulex, "~> 1.1"},
{:hunter, "~> 0.5.1"}, {:hunter, "~> 0.5.1"},
{:poison, "~> 4.0", override: true} {:poison, "~> 4.0", override: true},
{:scrivener_ecto, "~> 2.2"}
] ]
end end

View File

@ -50,6 +50,8 @@
"public_suffix": {:hex, :public_suffix, "0.6.0", "100cfe86f13f9f6f0cf67e743b1b83c78dd1223a2c422fa03ebf4adff514cbc3", [:mix], [{:idna, ">= 1.2.0 and < 6.0.0", [hex: :idna, repo: "hexpm", optional: false]}], "hexpm"}, "public_suffix": {:hex, :public_suffix, "0.6.0", "100cfe86f13f9f6f0cf67e743b1b83c78dd1223a2c422fa03ebf4adff514cbc3", [:mix], [{:idna, ">= 1.2.0 and < 6.0.0", [hex: :idna, repo: "hexpm", optional: false]}], "hexpm"},
"quantum": {:hex, :quantum, "2.3.4", "72a0e8855e2adc101459eac8454787cb74ab4169de6ca50f670e72142d4960e9", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}], "hexpm"}, "quantum": {:hex, :quantum, "2.3.4", "72a0e8855e2adc101459eac8454787cb74ab4169de6ca50f670e72142d4960e9", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}], "hexpm"},
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"}, "ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
"scrivener": {:hex, :scrivener, "2.7.0", "fa94cdea21fad0649921d8066b1833d18d296217bfdf4a5389a2f45ee857b773", [:mix], [], "hexpm"},
"scrivener_ecto": {:hex, :scrivener_ecto, "2.2.0", "53d5f1ba28f35f17891cf526ee102f8f225b7024d1cdaf8984875467158c9c5e", [:mix], [{:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:scrivener, "~> 2.4", [hex: :scrivener, repo: "hexpm", optional: false]}], "hexpm"},
"shards": {:hex, :shards, "0.6.0", "678d292ad74a4598a872930f9b12251f43e97f6050287f1fb712fbfd3d282f75", [:make, :rebar3], [], "hexpm"}, "shards": {:hex, :shards, "0.6.0", "678d292ad74a4598a872930f9b12251f43e97f6050287f1fb712fbfd3d282f75", [:make, :rebar3], [], "hexpm"},
"sobelow": {:hex, :sobelow, "0.8.0", "a3ec73e546dfde19f14818e5000c418e3f305d9edb070e79dd391de0ae1cd1ea", [:mix], [], "hexpm"}, "sobelow": {:hex, :sobelow, "0.8.0", "a3ec73e546dfde19f14818e5000c418e3f305d9edb070e79dd391de0ae1cd1ea", [:mix], [], "hexpm"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"}, "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"},

View File

@ -0,0 +1,22 @@
defmodule Backend.Repo.Migrations.CreateFederationRestrictions do
use Ecto.Migration
def change do
create table(:federation_restrictions) do
add :source_domain,
references(:instances, column: :domain, type: :string, on_delete: :delete_all),
null: false
add :target_domain,
references(:instances, column: :domain, type: :string, on_delete: :delete_all),
null: false
add :type, :string, null: false
timestamps()
end
create index(:federation_restrictions, [:source_domain])
create index(:federation_restrictions, [:target_domain])
end
end

View File

@ -5,13 +5,21 @@ import { Classes } from "@blueprintjs/core";
import { ConnectedRouter } from "connected-react-router"; import { ConnectedRouter } from "connected-react-router";
import { Route } from "react-router-dom"; import { Route } from "react-router-dom";
import { Nav } from "./components/organisms/"; import { Nav } from "./components/organisms/";
import { AboutScreen, AdminScreen, GraphScreen, LoginScreen, VerifyLoginScreen } from "./components/screens/"; import {
AboutScreen,
AdminScreen,
GraphScreen,
LoginScreen,
TableScreen,
VerifyLoginScreen
} from "./components/screens/";
import { history } from "./index"; import { history } from "./index";
const AppRouter: React.FC = () => ( const AppRouter: React.FC = () => (
<ConnectedRouter history={history}> <ConnectedRouter history={history}>
<div className={`${Classes.DARK} App`}> <div className={`${Classes.DARK} App`}>
<Nav /> <Nav />
<Route path="/instances" exact={true} component={TableScreen} />
<Route path="/about" exact={true} component={AboutScreen} /> <Route path="/about" exact={true} component={AboutScreen} />
<Route path="/admin/login" exact={true} component={LoginScreen} /> <Route path="/admin/login" exact={true} component={LoginScreen} />
<Route path="/admin/verify" exact={true} component={VerifyLoginScreen} /> <Route path="/admin/verify" exact={true} component={VerifyLoginScreen} />

View File

@ -111,7 +111,7 @@ const mapStateToProps = (state: IAppState) => {
const match = domainMatchSelector(state); const match = domainMatchSelector(state);
return { return {
currentInstanceName: match && match.params.domain, currentInstanceName: match && match.params.domain,
graphLoadError: state.data.error, graphLoadError: state.data.graphLoadError,
graphResponse: state.data.graphResponse, graphResponse: state.data.graphResponse,
hoveringOverResult: state.search.hoveringOverResult, hoveringOverResult: state.search.hoveringOverResult,
isLoadingGraph: state.data.isLoadingGraph, isLoadingGraph: state.data.isLoadingGraph,

View File

@ -0,0 +1,127 @@
import { Button, ButtonGroup, Code, HTMLTable, Intent, NonIdealState, Spinner } from "@blueprintjs/core";
import { push } from "connected-react-router";
import { range } from "lodash";
import * as numeral from "numeral";
import React from "react";
import { connect } from "react-redux";
import { Dispatch } from "redux";
import styled from "styled-components";
import { loadInstanceList } from "../../redux/actions";
import { IAppState, IInstanceListResponse } from "../../redux/types";
import { InstanceType } from "../atoms";
import { ErrorState } from "../molecules";
const StyledTable = styled(HTMLTable)`
width: 100%;
`;
const PaginationContainer = styled.div`
margin-top: 20px;
display: flex;
flex-direction: column;
flex: 1;
align-items: center;
`;
interface IInstanceTableProps {
loadError: boolean;
instancesResponse?: IInstanceListResponse;
isLoading: boolean;
fetchInstances: (page?: number) => void;
navigate: (path: string) => void;
}
class InstanceTable extends React.PureComponent<IInstanceTableProps> {
public componentDidMount() {
const { isLoading, instancesResponse, loadError } = this.props;
if (!isLoading && !instancesResponse && !loadError) {
this.props.fetchInstances();
}
}
public render() {
const { isLoading, instancesResponse, loadError } = this.props;
if (loadError) {
return <ErrorState />;
} else if (isLoading || !instancesResponse) {
return <NonIdealState icon={<Spinner />} />;
}
const { instances, pageNumber, totalPages, totalEntries, pageSize } = instancesResponse!;
return (
<>
<StyledTable striped={true} bordered={true} interactive={true}>
<thead>
<tr>
<th>Instance</th>
<th>Server type</th>
<th>Version</th>
<th>Users</th>
<th>Statuses</th>
<th>Insularity</th>
</tr>
</thead>
<tbody>
{instances.map(i => (
<tr key={i.name} onClick={this.goToInstanceFactory(i.name)}>
<td>{i.name}</td>
<td>{i.type && <InstanceType type={i.type} />}</td>
<td>{i.version && <Code>{i.version}</Code>}</td>
<td>{i.userCount}</td>
<td>{i.statusCount}</td>
<td>{i.insularity && numeral.default(i.insularity).format("0.0%")}</td>
</tr>
))}
</tbody>
</StyledTable>
<PaginationContainer>
<p>
Showing {(pageNumber - 1) * pageSize + 1}-{Math.min(pageNumber * pageSize, totalEntries)} of {totalEntries}{" "}
known instances
</p>
<ButtonGroup>
{range(totalPages).map(n => {
const isCurrentPage = pageNumber === n + 1;
return (
<Button
key={n}
onClick={this.loadPageFactory(n + 1)}
disabled={isCurrentPage}
intent={isCurrentPage ? Intent.PRIMARY : undefined}
>
{n + 1}
</Button>
);
})}
</ButtonGroup>
</PaginationContainer>
</>
);
}
private loadPageFactory = (page: number) => () => {
this.props.fetchInstances(page);
};
private goToInstanceFactory = (domain: string) => () => {
this.props.navigate(`/instance/${domain}`);
};
}
const mapStateToProps = (state: IAppState) => {
return {
instancesResponse: state.data.instancesResponse,
isLoading: state.data.isLoadingInstanceList,
loadError: state.data.instanceListLoadError
};
};
const mapDispatchToProps = (dispatch: Dispatch) => ({
fetchInstances: (page?: number) => dispatch(loadInstanceList(page) as any),
navigate: (path: string) => dispatch(push(path))
});
export default connect(
mapStateToProps,
mapDispatchToProps
)(InstanceTable);

View File

@ -11,7 +11,7 @@ interface INavState {
aboutIsOpen: boolean; aboutIsOpen: boolean;
} }
const linkIsActive = (currMatch: match<IInstanceDomainPath>, location: Location) => { const graphIsActive = (currMatch: match<IInstanceDomainPath>, location: Location) => {
return location.pathname === "/" || location.pathname.startsWith("/instance/"); return location.pathname === "/" || location.pathname.startsWith("/instance/");
}; };
@ -31,10 +31,17 @@ class Nav extends React.Component<{}, INavState> {
to="/" to="/"
className={`${Classes.BUTTON} ${Classes.MINIMAL} bp3-icon-${IconNames.GLOBE_NETWORK}`} className={`${Classes.BUTTON} ${Classes.MINIMAL} bp3-icon-${IconNames.GLOBE_NETWORK}`}
activeClassName={Classes.INTENT_PRIMARY} activeClassName={Classes.INTENT_PRIMARY}
isActive={linkIsActive as any} isActive={graphIsActive as any}
> >
Home Home
</NavLink> </NavLink>
<NavLink
to="/instances"
className={`${Classes.BUTTON} ${Classes.MINIMAL} bp3-icon-${IconNames.TH}`}
activeClassName={Classes.INTENT_PRIMARY}
>
Instances
</NavLink>
<NavLink <NavLink
to="/about" to="/about"
className={`${Classes.BUTTON} ${Classes.MINIMAL} bp3-icon-${IconNames.INFO_SIGN}`} className={`${Classes.BUTTON} ${Classes.MINIMAL} bp3-icon-${IconNames.INFO_SIGN}`}

View File

@ -2,3 +2,4 @@ export { default as Graph } from "./Graph";
export { default as Nav } from "./Nav"; export { default as Nav } from "./Nav";
export { default as SidebarContainer } from "./SidebarContainer"; export { default as SidebarContainer } from "./SidebarContainer";
export { default as SearchFilters } from "./SearchFilters"; export { default as SearchFilters } from "./SearchFilters";
export { default as InstanceTable } from "./InstanceTable";

View File

@ -98,7 +98,7 @@ const mapStateToProps = (state: IAppState) => {
const match = domainMatchSelector(state); const match = domainMatchSelector(state);
return { return {
currentInstanceName: match && match.params.domain, currentInstanceName: match && match.params.domain,
graphLoadError: state.data.error, graphLoadError: state.data.graphLoadError,
pathname: state.router.location.pathname pathname: state.router.location.pathname
}; };
}; };

View File

@ -0,0 +1,17 @@
import { H1 } from "@blueprintjs/core";
import React from "react";
import { Page } from "../atoms";
import { InstanceTable } from "../organisms";
class TableScreen extends React.PureComponent {
public render() {
return (
<Page>
<H1>{"Instances"}</H1>
<InstanceTable />
</Page>
);
}
}
export default TableScreen;

View File

@ -5,3 +5,4 @@ export { default as InstanceScreen } from "./InstanceScreen";
export { default as AdminScreen } from "./AdminScreen"; export { default as AdminScreen } from "./AdminScreen";
export { default as LoginScreen } from "./LoginScreen"; export { default as LoginScreen } from "./LoginScreen";
export { default as VerifyLoginScreen } from "./VerifyLoginScreen"; export { default as VerifyLoginScreen } from "./VerifyLoginScreen";
export { default as TableScreen } from "./TableScreen";

View File

@ -48,6 +48,18 @@ const graphLoadFailed = () => {
}; };
}; };
// Instance list
const requestInstanceList = () => ({
type: ActionType.REQUEST_INSTANCES
});
const receiveInstanceList = (instances: IInstanceDetails[]) => ({
payload: instances,
type: ActionType.RECEIVE_INSTANCES
});
const instanceListLoadFailed = () => ({
type: ActionType.INSTANCE_LIST_LOAD_ERROR
});
// Search // Search
const requestSearchResult = (query: string, filters: ISearchFilter[]) => { const requestSearchResult = (query: string, filters: ISearchFilter[]) => {
return { return {
@ -138,3 +150,17 @@ export const fetchGraph = () => {
.catch(() => dispatch(graphLoadFailed())); .catch(() => dispatch(graphLoadFailed()));
}; };
}; };
export const loadInstanceList = (page?: number) => {
return (dispatch: Dispatch) => {
dispatch(requestInstanceList());
let params = "";
if (!!page) {
params += `page=${page}`;
}
const path = !!params ? `instances?${params}` : "instances";
return getFromApi(path)
.then(instancesListResponse => dispatch(receiveInstanceList(instancesListResponse)))
.catch(() => dispatch(instanceListLoadFailed()));
};
};

View File

@ -5,9 +5,11 @@ import { combineReducers } from "redux";
import { History } from "history"; import { History } from "history";
import { ActionType, IAction, ICurrentInstanceState, IDataState, ISearchState } from "./types"; import { ActionType, IAction, ICurrentInstanceState, IDataState, ISearchState } from "./types";
const initialDataState = { const initialDataState: IDataState = {
error: false, graphLoadError: false,
isLoadingGraph: false instanceListLoadError: false,
isLoadingGraph: false,
isLoadingInstanceList: false
}; };
const data = (state: IDataState = initialDataState, action: IAction): IDataState => { const data = (state: IDataState = initialDataState, action: IAction): IDataState => {
switch (action.type) { switch (action.type) {
@ -26,9 +28,28 @@ const data = (state: IDataState = initialDataState, action: IAction): IDataState
case ActionType.GRAPH_LOAD_ERROR: case ActionType.GRAPH_LOAD_ERROR:
return { return {
...state, ...state,
error: true, graphLoadError: true,
isLoadingGraph: false isLoadingGraph: false
}; };
case ActionType.REQUEST_INSTANCES:
return {
...state,
instanceListLoadError: false,
instancesResponse: undefined,
isLoadingInstanceList: true
};
case ActionType.RECEIVE_INSTANCES:
return {
...state,
instancesResponse: action.payload,
isLoadingInstanceList: false
};
case ActionType.INSTANCE_LIST_LOAD_ERROR:
return {
...state,
instanceListLoadError: true,
isLoadingInstanceList: false
};
default: default:
return state; return state;
} }

View File

@ -10,6 +10,10 @@ export enum ActionType {
REQUEST_GRAPH = "REQUEST_GRAPH", REQUEST_GRAPH = "REQUEST_GRAPH",
RECEIVE_GRAPH = "RECEIVE_GRAPH", RECEIVE_GRAPH = "RECEIVE_GRAPH",
GRAPH_LOAD_ERROR = "GRAPH_LOAD_ERROR", GRAPH_LOAD_ERROR = "GRAPH_LOAD_ERROR",
// Instance list
REQUEST_INSTANCES = "REQUEST_INSTANCES",
RECEIVE_INSTANCES = "RECEIVE_INSTANCES",
INSTANCE_LIST_LOAD_ERROR = "INSTANCE_LIST_LOAD_ERROR",
// Nav // Nav
DESELECT_INSTANCE = "DESELECT_INSTANCE", DESELECT_INSTANCE = "DESELECT_INSTANCE",
// Search // Search
@ -26,7 +30,7 @@ export interface IAction {
payload: any; payload: any;
} }
export interface IInstance { export interface IPeer {
name: string; name: string;
} }
@ -45,7 +49,7 @@ export interface IInstanceDetails {
insularity?: number; insularity?: number;
statusCount?: number; statusCount?: number;
domainCount?: number; domainCount?: number;
peers?: IInstance[]; peers?: IPeer[];
lastUpdated?: string; lastUpdated?: string;
status: string; status: string;
type?: string; type?: string;
@ -93,6 +97,14 @@ export interface ISearchResponse {
next: string | null; next: string | null;
} }
export interface IInstanceListResponse {
pageNumber: number;
totalPages: number;
totalEntries: number;
pageSize: number;
instances: IInstanceDetails[];
}
// Redux state // Redux state
// The current instance name is stored in the URL. See state -> router -> location // The current instance name is stored in the URL. See state -> router -> location
@ -104,8 +116,11 @@ export interface ICurrentInstanceState {
export interface IDataState { export interface IDataState {
graphResponse?: IGraphResponse; graphResponse?: IGraphResponse;
instancesResponse?: IInstanceListResponse;
isLoadingGraph: boolean; isLoadingGraph: boolean;
error: boolean; isLoadingInstanceList: boolean;
graphLoadError: boolean;
instanceListLoadError: boolean;
} }
export interface ISearchState { export interface ISearchState {