add caching to expensive queries
This commit is contained in:
parent
d195cc302d
commit
67baa0ec04
|
@ -9,6 +9,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
|
||||
### Added
|
||||
|
||||
- Add caching to graph + instance endpoints to better handle traffic spikes.
|
||||
|
||||
### Changed
|
||||
|
||||
### Deprecated
|
||||
|
|
|
@ -34,10 +34,15 @@ config :logger, :console,
|
|||
config :phoenix, :json_library, Jason
|
||||
|
||||
config :gollum,
|
||||
# 24 hrs
|
||||
refresh_secs: 86_400,
|
||||
lazy_refresh: true,
|
||||
user_agent: "fediverse.space crawler"
|
||||
|
||||
config :backend, Graph.Cache,
|
||||
# 1 hour
|
||||
gc_interval: 3600
|
||||
|
||||
config :ex_twilio,
|
||||
account_sid: System.get_env("TWILIO_ACCOUNT_SID"),
|
||||
auth_token: System.get_env("TWILIO_AUTH_TOKEN")
|
||||
|
|
|
@ -33,7 +33,8 @@ defmodule Backend.Application do
|
|||
),
|
||||
Supervisor.child_spec({Task, fn -> HTTPoison.start() end}, id: :start_httpoison),
|
||||
Backend.Scheduler,
|
||||
Backend.Elasticsearch.Cluster
|
||||
Backend.Elasticsearch.Cluster,
|
||||
Graph.Cache
|
||||
]
|
||||
|
||||
children =
|
||||
|
|
|
@ -1,19 +1,16 @@
|
|||
defmodule BackendWeb.GraphController do
|
||||
use BackendWeb, :controller
|
||||
|
||||
alias Backend.Api
|
||||
alias Graph.Cache
|
||||
|
||||
action_fallback BackendWeb.FallbackController
|
||||
|
||||
def index(conn, _params) do
|
||||
nodes = Api.list_nodes()
|
||||
edges = Api.list_edges()
|
||||
%{nodes: nodes, edges: edges} = Cache.get_graph()
|
||||
render(conn, "index.json", nodes: nodes, edges: edges)
|
||||
end
|
||||
|
||||
def show(conn, %{"id" => domain}) do
|
||||
nodes = Api.list_nodes(domain)
|
||||
edges = Api.list_edges(domain)
|
||||
%{nodes: nodes, edges: edges} = Cache.get_graph(domain)
|
||||
render(conn, "index.json", nodes: nodes, edges: edges)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -2,17 +2,17 @@ defmodule BackendWeb.InstanceController do
|
|||
use BackendWeb, :controller
|
||||
|
||||
import Backend.Util
|
||||
alias Backend.Api
|
||||
alias Graph.Cache
|
||||
|
||||
action_fallback(BackendWeb.FallbackController)
|
||||
|
||||
def show(conn, %{"id" => domain}) do
|
||||
instance = Api.get_instance_with_peers(domain)
|
||||
instance = Cache.get_instance_with_peers(domain)
|
||||
|
||||
if instance == nil or instance.opt_out == true do
|
||||
send_resp(conn, 404, "Not found")
|
||||
else
|
||||
last_crawl = get_last_crawl(domain)
|
||||
last_crawl = Cache.get_last_crawl(domain)
|
||||
render(conn, "show.json", instance: instance, crawl: last_crawl)
|
||||
end
|
||||
end
|
||||
|
|
93
backend/lib/graph/cache.ex
Normal file
93
backend/lib/graph/cache.ex
Normal file
|
@ -0,0 +1,93 @@
|
|||
defmodule Graph.Cache do
|
||||
@moduledoc false
|
||||
use Nebulex.Cache,
|
||||
otp_app: :backend,
|
||||
adapter: Nebulex.Adapters.Local
|
||||
|
||||
alias Backend.{Api, Crawl, Edge, Instance, MostRecentCrawl, Repo}
|
||||
alias __MODULE__
|
||||
require Logger
|
||||
import Ecto.Query
|
||||
|
||||
@spec get_graph(String.t() | nil) :: %{
|
||||
nodes: [Instance.t()],
|
||||
edges: [Edge.t()]
|
||||
}
|
||||
def get_graph(domain \\ nil) do
|
||||
key =
|
||||
if domain != nil do
|
||||
"graph_" <> domain
|
||||
else
|
||||
"graph"
|
||||
end
|
||||
|
||||
case Cache.get(key) do
|
||||
nil ->
|
||||
Appsignal.increment_counter("graph_cache.misses", 1)
|
||||
Logger.debug("Graph cache: miss")
|
||||
nodes = Api.list_nodes(domain)
|
||||
edges = Api.list_edges(domain)
|
||||
# Cache for 10 minutes
|
||||
Cache.set(key, %{nodes: nodes, edges: edges}, ttl: 600)
|
||||
%{nodes: nodes, edges: edges}
|
||||
|
||||
data ->
|
||||
Appsignal.increment_counter("graph_cache.hits", 1)
|
||||
Logger.debug("Graph cache: hit")
|
||||
data
|
||||
end
|
||||
end
|
||||
|
||||
@spec get_instance_with_peers(String.t()) :: Instance.t()
|
||||
def get_instance_with_peers(domain) do
|
||||
key = "instance_" <> domain
|
||||
|
||||
case Cache.get(key) do
|
||||
nil ->
|
||||
Appsignal.increment_counter("instance_cache.misses", 1)
|
||||
Logger.debug("Instance cache: miss")
|
||||
instance = Api.get_instance_with_peers(domain)
|
||||
# Cache for one minute
|
||||
Cache.set(key, instance, ttl: 60)
|
||||
instance
|
||||
|
||||
data ->
|
||||
Appsignal.increment_counter("instance_cache.hits", 1)
|
||||
Logger.debug("Instance cache: hit")
|
||||
data
|
||||
end
|
||||
end
|
||||
|
||||
@spec get_last_crawl(String.t()) :: Crawl.t() | nil
|
||||
def get_last_crawl(domain) do
|
||||
key = "most_recent_crawl_" <> domain
|
||||
|
||||
most_recent_crawl_subquery =
|
||||
MostRecentCrawl
|
||||
|> select([mrc], %{
|
||||
most_recent_id: mrc.crawl_id
|
||||
})
|
||||
|> where([mrc], mrc.instance_domain == ^domain)
|
||||
|
||||
case Cache.get(key) do
|
||||
nil ->
|
||||
Appsignal.increment_counter("most_recent_crawl_cache.misses", 1)
|
||||
Logger.debug("Most recent crawl cache: miss")
|
||||
|
||||
crawl =
|
||||
Crawl
|
||||
|> join(:inner, [c], mrc in subquery(most_recent_crawl_subquery),
|
||||
on: c.id == mrc.most_recent_id
|
||||
)
|
||||
|> Repo.one()
|
||||
|
||||
# Cache for one minute
|
||||
Cache.set(key, crawl, ttl: 60)
|
||||
|
||||
data ->
|
||||
Appsignal.increment_counter("most_recent_crawl_cache.hits", 1)
|
||||
Logger.debug("Most recent crawl cache: hit")
|
||||
data
|
||||
end
|
||||
end
|
||||
end
|
|
@ -63,7 +63,8 @@ defmodule Backend.MixProject do
|
|||
{:ex_twilio, "~> 0.7.0"},
|
||||
{:elasticsearch, "~> 1.0"},
|
||||
{:appsignal, "~> 1.10.1"},
|
||||
{:credo, "~> 1.1", only: [:dev, :test], runtime: false}
|
||||
{:credo, "~> 1.1", only: [:dev, :test], runtime: false},
|
||||
{:nebulex, "~> 1.1"}
|
||||
]
|
||||
end
|
||||
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"},
|
||||
"mime": {:hex, :mime, "1.3.1", "30ce04ab3175b6ad0bdce0035cba77bba68b813d523d1aac73d9781b4d193cf8", [:mix], [], "hexpm"},
|
||||
"mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm"},
|
||||
"nebulex": {:hex, :nebulex, "1.1.0", "be45cc3a2b7d01eb7da05747d38072d336187d05796ad9ef2d9dad9be430f915", [:mix], [{:shards, "~> 0.6", [hex: :shards, repo: "hexpm", optional: false]}], "hexpm"},
|
||||
"paginator": {:hex, :paginator, "0.6.0", "bc2c01abdd98281ff39b6a7439cf540091122a7927bdaabc167c61d4508f9cbb", [:mix], [{:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.13", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm"},
|
||||
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
|
||||
"phoenix": {:hex, :phoenix, "1.4.9", "746d098e10741c334d88143d3c94cab1756435f94387a63441792e66ec0ee974", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.8.1 or ~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
||||
|
@ -48,6 +49,7 @@
|
|||
"public_suffix": {:hex, :public_suffix, "0.6.0", "100cfe86f13f9f6f0cf67e743b1b83c78dd1223a2c422fa03ebf4adff514cbc3", [:mix], [{:idna, ">= 1.2.0 and < 6.0.0", [hex: :idna, repo: "hexpm", optional: false]}], "hexpm"},
|
||||
"quantum": {:hex, :quantum, "2.3.4", "72a0e8855e2adc101459eac8454787cb74ab4169de6ca50f670e72142d4960e9", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}], "hexpm"},
|
||||
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
|
||||
"shards": {:hex, :shards, "0.6.0", "678d292ad74a4598a872930f9b12251f43e97f6050287f1fb712fbfd3d282f75", [:make, :rebar3], [], "hexpm"},
|
||||
"sobelow": {:hex, :sobelow, "0.8.0", "a3ec73e546dfde19f14818e5000c418e3f305d9edb070e79dd391de0ae1cd1ea", [:mix], [], "hexpm"},
|
||||
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"},
|
||||
"swarm": {:hex, :swarm, "3.4.0", "64f8b30055d74640d2186c66354b33b999438692a91be275bb89cdc7e401f448", [:mix], [{:gen_state_machine, "~> 2.0", [hex: :gen_state_machine, repo: "hexpm", optional: false]}, {:libring, "~> 1.0", [hex: :libring, repo: "hexpm", optional: false]}], "hexpm"},
|
||||
|
|
Loading…
Reference in a new issue