From c9afd4e8afe6a719b5d930c9baa2699442849fd8 Mon Sep 17 00:00:00 2001 From: Maximilian Möhring Date: Wed, 15 May 2019 16:58:56 +0200 Subject: Improves structured logging. (crawler) --- crawler/scrape.go | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) (limited to 'crawler/scrape.go') diff --git a/crawler/scrape.go b/crawler/scrape.go index f46b651..9d0d0f2 100644 --- a/crawler/scrape.go +++ b/crawler/scrape.go @@ -4,7 +4,6 @@ import ( "time" "github.com/gocolly/colly" - log "github.com/sirupsen/logrus" ) func (app *App) Scrape(shops []Shop) { @@ -49,11 +48,13 @@ func (app *App) ScrapeShop(shop Shop, wait chan bool) { err = app.save_offer(W) if err != nil { - Warn(err, "Saving offers failed. Shop: "+shop.Name) + shop.error_msg = err.Error() + shop.Warn("Saving offers failed.") } err = app.remove_expired(W, shop) if err != nil { - Warn(err, "Removing expired offers failed. Shop: "+shop.Name) + shop.error_msg = err.Error() + shop.Warn("Removing expired offers failed.") } wait <- true @@ -81,7 +82,7 @@ func (app *App) ScrapeHTML(shop Shop) []Angebot { case "Drankdozijn": return app.ScrapeDrankdozijn(shop) default: - log.Println(shop.Name + ": No Crawler") + shop.Warn("No Crawler") } return []Angebot{} -- cgit v1.2.3