don't log crawl times with appsignal

This commit is contained in:
Tao Bror Bojlén 2019-07-27 13:57:07 +03:00
parent 235952b072
commit 74fc30e8a5
No known key found for this signature in database
GPG Key ID: C6EC7AAB905F9E6F
1 changed files with 0 additions and 6 deletions

View File

@ -10,7 +10,6 @@ defmodule Backend.Crawler do
import Ecto.Query
import Backend.Util
require Logger
use Appsignal.Instrumentation.Decorators
defstruct [
# the instance domain (a string)
@ -32,7 +31,6 @@ defmodule Backend.Crawler do
error: String.t() | nil
}
@decorate transaction()
def run(domain) do
Logger.debug("Starting crawl of #{domain}")
@ -60,13 +58,11 @@ defmodule Backend.Crawler do
# Recursive function to check whether `domain` has an API that the head of the api_crawlers list can read.
# If so, crawls it. If not, continues with the tail of the api_crawlers list.
@decorate transaction_event()
defp crawl(%Crawler{api_crawlers: [], domain: domain} = state) do
Logger.debug("Found no compatible API for #{domain}")
Map.put(state, :found_api?, false)
end
@decorate transaction_event()
defp crawl(%Crawler{domain: domain, api_crawlers: [curr | remaining_crawlers]} = state) do
if curr.is_instance_type?(domain) do
Logger.debug("Found #{curr} instance")
@ -97,7 +93,6 @@ defmodule Backend.Crawler do
end
# Save the state (after crawling) to the database.
@decorate transaction_event()
defp save(%Crawler{
domain: domain,
result: result,
@ -214,7 +209,6 @@ defmodule Backend.Crawler do
|> Repo.insert_all(interactions)
end
@decorate transaction_event()
defp save(%{domain: domain, error: error, allows_crawling?: allows_crawling}) do
error =
cond do