diff options
| -rw-r--r-- | crawler/init.go | 1 | ||||
| -rw-r--r-- | crawler/main.go | 19 |
2 files changed, 20 insertions, 0 deletions
diff --git a/crawler/init.go b/crawler/init.go index a4fcb21..f23dd9b 100644 --- a/crawler/init.go +++ b/crawler/init.go @@ -17,6 +17,7 @@ func init() { verbose := flag.BoolP("verbose", "v", false, "same as --debug") silent := flag.BoolP("silent", "s", false, "suppress outputs except warnings") loglevel_f := flag.StringP("loglevel", "l", "Warn", `sets log level, can be "Warn", "Info" or "Debug"`) + flag.Bool("shops", false, `list all crawlable shops`) flag.Parse() loglevel := strings.ToLower(*loglevel_f) diff --git a/crawler/main.go b/crawler/main.go index 034c588..bfcd647 100644 --- a/crawler/main.go +++ b/crawler/main.go @@ -10,6 +10,7 @@ import ( log "github.com/Sirupsen/logrus" "github.com/jmoiron/sqlx" + flag "github.com/spf13/pflag" ) type App struct { @@ -65,6 +66,24 @@ func main() { Fatal(err, "Getting shops failed") } + // prints all crawlable shops + if "true" == flag.Lookup("shops").Value.String() { + log.SetLevel(log.InfoLevel) + for _, shop := range shops { + log.WithFields( + log.Fields{ + "shop_id": shop.Id, + "shop_url": shop.Url, + "shop_short_url": shop.Short_url, + "shop_logo_url": shop.Logo_url, + "shipping costs": shop.Shipping_costs, + "free shipping": shop.Free_shipping, + }, + ).Info(shop.Name) + } + return + } + // reruns sanitizing functions over database if app.Config.FixDatabase { err := app.fix_db() |
