fix some unsuccesful crawls being saved without error

This commit is contained in:
Tao Bror Bojlén 2019-10-01 16:05:09 +01:00
parent 8c83e5fcf9
commit 637278ad74
No known key found for this signature in database
GPG Key ID: C6EC7AAB905F9E6F
2 changed files with 13 additions and 2 deletions

View File

@ -18,6 +18,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Fixed
- Display plain Pleroma version rather than the Mastodon-compatible string
- Fixed some unsuccessful crawls being saved without their errors
### Security

View File

@ -134,7 +134,13 @@ defmodule Backend.Crawler do
end
end
# Save the state (after crawling) to the database.
## Save the state (after crawling) to the database. ##
# If we didn't get a server type, the crawl wasn't successful.
defp save(%Crawler{result: %{type: nil}} = state) do
save_error(state)
end
defp save(%Crawler{
domain: domain,
result: result,
@ -345,7 +351,11 @@ defmodule Backend.Crawler do
Appsignal.increment_counter("crawler.success", 1)
end
defp save(%{domain: domain, error: error, allows_crawling?: allows_crawling}) do
defp save(state) do
save_error(state)
end
defp save_error(%{domain: domain, error: error, allows_crawling?: allows_crawling}) do
now = get_now()
error =