From fe18a79dd2f7115b30d440a025a7f7f72486dd09 Mon Sep 17 00:00:00 2001 From: James Mills Date: Tue, 3 Apr 2018 14:32:43 -0700 Subject: Add support for robots.txt user agent control from web crawlers (#10) --- cmd/gopherproxy/main.go | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) (limited to 'cmd') diff --git a/cmd/gopherproxy/main.go b/cmd/gopherproxy/main.go index 9a7d1f9..84bde30 100644 --- a/cmd/gopherproxy/main.go +++ b/cmd/gopherproxy/main.go @@ -8,12 +8,16 @@ import ( ) var ( - bind = flag.String("bind", "0.0.0.0:8000", "[int]:port to bind to") - uri = flag.String("uri", "floodgap.com", ":[port] to proxy to") + // TODO: Allow config file and environment vars + // (opt -> env -> config -> default) + bind = flag.String("bind", "0.0.0.0:8000", "[int]:port to bind to") + robotsfile = flag.String("robots-file", "robots.txt", "robots.txt file") + uri = flag.String("uri", "floodgap.com", ":[port] to proxy to") ) func main() { flag.Parse() - log.Fatal(gopherproxy.ListenAndServe(*bind, *uri)) + // Use a config struct + log.Fatal(gopherproxy.ListenAndServe(*bind, *robotsfile, *uri)) } -- cgit v1.2.3-70-g09d2