index.community/backend/config/config.exs

95 lines
2.8 KiB
Elixir
Raw Normal View History

2019-07-14 11:47:06 +00:00
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
import Config
config :backend,
ecto_repos: [Backend.Repo]
# Configures the endpoint
config :backend, BackendWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "XL4NKGBN9lZMrQbMEI1KJOlwAt8S7younVJl90TdAgzmwyapr3g7BRYSNYvX0sZ9",
render_errors: [view: BackendWeb.ErrorView, accepts: ~w(json)],
2019-07-27 10:13:42 +00:00
pubsub: [name: Backend.PubSub, adapter: Phoenix.PubSub.PG2],
instrumenters: [Appsignal.Phoenix.Instrumenter]
2019-07-14 11:47:06 +00:00
config :backend, Backend.Repo, queue_target: 5000
instances_config_path =
if System.get_env("MIX_ENV") == "prod",
2019-08-10 11:56:46 +00:00
do: "lib/backend-2.6.0/priv/elasticsearch/instances.json",
2019-08-02 16:03:21 +00:00
else: "priv/elasticsearch/instances.json"
2019-07-26 22:30:11 +00:00
config :backend, Backend.Elasticsearch.Cluster,
url: "http://localhost:9200",
api: Elasticsearch.API.HTTP,
json_library: Jason,
indexes: %{
instances: %{
settings: instances_config_path,
2019-07-26 22:30:11 +00:00
store: Backend.Elasticsearch.Store,
sources: [Backend.Instance],
bulk_page_size: 1000,
bulk_wait_interval: 1_000
}
}
2019-07-14 11:47:06 +00:00
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
2019-08-07 12:41:29 +00:00
config :gollum,
refresh_secs: 86_400,
lazy_refresh: true,
user_agent: "fediverse.space crawler"
2019-07-25 15:56:03 +00:00
config :ex_twilio,
account_sid: System.get_env("TWILIO_ACCOUNT_SID"),
auth_token: System.get_env("TWILIO_AUTH_TOKEN")
config :backend, Backend.Mailer,
adapter: Swoosh.Adapters.Sendgrid,
api_key: System.get_env("SENDGRID_API_KEY")
2019-07-14 11:47:06 +00:00
config :backend, :crawler,
status_age_limit_days: 28,
status_count_limit: 5000,
personal_instance_threshold: 10,
2019-07-25 16:12:16 +00:00
crawl_interval_mins: 60,
2019-08-07 17:17:36 +00:00
crawl_workers: 20,
2019-07-14 11:47:06 +00:00
blacklist: [
2019-07-18 10:21:06 +00:00
"gab.best",
"4chan.icu"
2019-07-14 11:47:06 +00:00
],
2019-07-25 15:56:03 +00:00
user_agent: "fediverse.space crawler",
admin_phone: System.get_env("ADMIN_PHONE"),
twilio_phone: System.get_env("TWILIO_PHONE"),
2019-08-02 19:46:40 +00:00
admin_email: System.get_env("ADMIN_EMAIL")
2019-07-14 11:47:06 +00:00
config :backend, Backend.Scheduler,
jobs: [
# At midnight every day
{"@daily", {Backend.Scheduler, :prune_crawls, [1, "month"]}},
# 00.15 daily
2019-07-18 15:20:09 +00:00
{"15 0 * * *", {Backend.Scheduler, :generate_edges, []}},
# 00.30 every night
2019-07-25 15:56:03 +00:00
{"30 0 * * *", {Backend.Scheduler, :generate_insularity_scores, []}},
2019-07-27 17:58:40 +00:00
# 00.45 every night
{"45 0 * * *", {Backend.Scheduler, :generate_status_rate, []}},
2019-07-27 10:32:42 +00:00
# Every 3 hours
{"0 */3 * * *", {Backend.Scheduler, :check_for_spam_instances, []}}
2019-07-14 11:47:06 +00:00
]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"