summaryrefslogtreecommitdiff
path: root/crawler/scrape.go
blob: 6ef9fcf56d04736197e5543e3e32645440d0fa0b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
package main

import (
	log "github.com/Sirupsen/logrus"
)

func (app *App) ScrapeHTML(shops []Shop) {

	wait := make(chan bool)
	count := 0

	for _, shop := range shops {

		go app.Scrape(shop, wait)
		count++

	}

	/*
		Wait until all go routines finished
	*/
	for i := 0; i < count; i++ {
		<-wait
	}
}

func (app *App) Scrape(shop Shop, wait chan bool) {
	var W []Angebot
	var err error
	txFailed := false

	app.Tx, err = app.DB.Beginx()
	if err != nil {
		Fatal(err, "scrape.go: Starting transaction failed. Shop: "+shop.Name)
	}

	// retry on error
	for i := 0; i < 3; i++ {
		W = app.ScrapeShop(shop)

		W = sanitize_offer(W, shop)

		if len(W) >= 1 {
			break
		}
	}

	err = app.save_offer(W)
	if err != nil {
		txFailed = true
		Warn(err, "Saving offers failed. Shop: "+shop.Name)
	}
	err = app.remove_expired(W, shop)
	if err != nil {
		txFailed = true
		Warn(err, "Removing expired offers failed. Shop: "+shop.Name)
	}

	if txFailed {
		err = app.Tx.Rollback()
		if err != nil {
			Fatal(err, "scrape.go: Rollback transaction failed")
		}
	} else {
		err = app.Tx.Commit()
		if err != nil {
			Fatal(err, "scrape.go: Committing transaction failed")
		}
	}

	wait <- true
}

func (app *App) ScrapeShop(shop Shop) []Angebot {

	switch shop.Name {
	case "Bottleworld":
		return app.ScrapeBottleWord(shop)
	case "MC Whisky":
		return app.ScrapeMCWhisky(shop)
	case "Rum & Co":
		return app.ScrapeRumundCo(shop)
	case "Whic":
		return app.ScrapeWhic(shop)
	case "Whisky.de":
		return app.ScrapeWhiskyde(shop)
	case "Whiskysite.nl":
		return app.ScrapeWhiskysitenl(shop)
	case "Whisky World":
		return app.ScrapeWhiskyworld(shop)
	case "Whiskyzone":
		return app.ScrapeWhiskyzone(shop)
	default:
		log.Println(shop.Name + ": No Crawler")
	}

	return []Angebot{}
}