diff options
| author | horus | 2018-06-18 15:54:53 +0200 |
|---|---|---|
| committer | horus | 2018-06-18 15:54:53 +0200 |
| commit | 01e0cbe79f37b4be2fc82d31c71042b5ce4d699a (patch) | |
| tree | bb179b5c5c6349a69853c3781236b6056b7e7ea6 /crawler/init.go | |
| parent | 88a2628258eb5ea79736338637ab8b5b83680c92 (diff) | |
| parent | 8114b7b17b723a5fe0fee24470e255faf587332e (diff) | |
| download | alkobote-01e0cbe79f37b4be2fc82d31c71042b5ce4d699a.tar.gz | |
Merge branch 'master' of /home/horus/app/fk_angebote
Diffstat (limited to 'crawler/init.go')
| -rw-r--r-- | crawler/init.go | 13 |
1 files changed, 13 insertions, 0 deletions
diff --git a/crawler/init.go b/crawler/init.go index 60f7e47..668df2d 100644 --- a/crawler/init.go +++ b/crawler/init.go @@ -23,6 +23,9 @@ func init() { loglevel_f := flag.StringP("loglevel", "l", "Warn", `sets log level, can be "Warn", "Info" or "Debug"`) flag.Bool("list-shops", false, `lists all crawlable shops`) shopids_f := flag.StringP("restrict-shops", "r", "", `comma separated list of shop ids, crawls only these`) + user_agent_f := flag.StringP("user-agent", "u", "", "sets user agent") + delay_f := flag.Int("delay", 0, "toggles random delay between crawls") + ignore_robots_f := flag.Bool("ignore-robots-txt", true, "ignores robots.txt") flag.Parse() loglevel := strings.ToLower(*loglevel_f) @@ -41,6 +44,16 @@ func init() { _conf.parseConfig(*configFile) + if *user_agent_f != "" { + _conf.UserAgent = *user_agent_f + } + if *delay_f != 0 { + _conf.Delay = *delay_f + } + if !*ignore_robots_f { + _conf.IgnoreRobotsTXT = *ignore_robots_f + } + if _conf.Debug && !*silent { log.SetLevel(log.DebugLevel) } |
