update ES index on crawl

This commit is contained in:
Tao Bror Bojlén 2019-08-02 19:03:21 +03:00
parent 8d1b975990
commit 09708e74ab
No known key found for this signature in database
GPG key ID: C6EC7AAB905F9E6F
3 changed files with 20 additions and 10 deletions

View file

@ -34,6 +34,9 @@ Though dockerized, backend development is easiest if you have the following inst
- `docker-compose build` - `docker-compose build`
- `docker-compose up -d phoenix` - `docker-compose up -d phoenix`
- if you don't specify `phoenix`, it'll also start `gephi` which should only be run as a regular one-off job - if you don't specify `phoenix`, it'll also start `gephi` which should only be run as a regular one-off job
- Create the elasticsearch index:
- `iex -S mix app.start`
- `Elasticsearch.Index.hot_swap(Backend.Elasticsearch.Cluster, :instances)`
### Frontend ### Frontend
@ -96,6 +99,9 @@ SHELL=/bin/bash
10. (Optional) Set up caching with something like [dokku-nginx-cache](https://github.com/Aluxian/dokku-nginx-cache) 10. (Optional) Set up caching with something like [dokku-nginx-cache](https://github.com/Aluxian/dokku-nginx-cache)
Before the app starts running, make sure that the Elasticsearch index exists -- otherwise it'll create one called
`instances`, which should be the name of the alias. Then it won't be able to hot swap if you reindex in the future.
## Acknowledgements ## Acknowledgements
[![NLnet logo](/nlnet-logo.png)](https://nlnet.nl/project/fediverse_space/) [![NLnet logo](/nlnet-logo.png)](https://nlnet.nl/project/fediverse_space/)

View file

@ -23,7 +23,7 @@ config :backend, Backend.Repo, queue_target: 5000
instances_config_path = instances_config_path =
if System.get_env("MIX_ENV") == "prod", if System.get_env("MIX_ENV") == "prod",
do: "lib/backend-2.2.0/priv/elasticsearch/instances.json", do: "lib/backend-2.2.0/priv/elasticsearch/instances.json",
else: "instances.json" else: "priv/elasticsearch/instances.json"
config :backend, Backend.Elasticsearch.Cluster, config :backend, Backend.Elasticsearch.Cluster,
url: "http://localhost:9200", url: "http://localhost:9200",

View file

@ -111,22 +111,26 @@ defmodule Backend.Crawler do
end end
## Update the instance we crawled ## ## Update the instance we crawled ##
instance = %Instance{
domain: domain,
description: result.description,
version: result.version,
user_count: result.user_count,
status_count: result.status_count,
type: instance_type,
base_domain: get_base_domain(domain)
}
Repo.insert!( Repo.insert!(
%Instance{ instance,
domain: domain,
description: result.description,
version: result.version,
user_count: result.user_count,
status_count: result.status_count,
type: instance_type,
base_domain: get_base_domain(domain)
},
on_conflict: on_conflict:
{:replace, {:replace,
[:description, :version, :user_count, :status_count, :type, :base_domain, :updated_at]}, [:description, :version, :user_count, :status_count, :type, :base_domain, :updated_at]},
conflict_target: :domain conflict_target: :domain
) )
Elasticsearch.put_document(Backend.Elasticsearch.Cluster, instance, "instances/_doc")
# Save details of a new crawl # Save details of a new crawl
curr_crawl = curr_crawl =
Repo.insert!(%Crawl{ Repo.insert!(%Crawl{