diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index c7ef9be..433508e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -31,5 +31,5 @@ jobs: env: API_USER: ${{ secrets.API_USER }} API_PASSWORD: ${{ secrets.API_PASSWORD }} - CRONCERT_API: ${{ secrets.CRONCERT_API }} + EVENT_API: ${{ secrets.EVENT_API }} diff --git a/README.md b/README.md index 54748ea..039cb57 100644 --- a/README.md +++ b/README.md @@ -27,6 +27,10 @@ Currently an event has the following fields: Have a look at the configuration file `config.yml` for details about how to configure the crawler for a specific website. +## Run the crawler + +The crawler can be executed with `go run main.go` to crawl all configured locations and print the results. To run a single crawler add the flag `-single`. To write the events to the event api add the environment variables `API_USER`, `API_PASSWORD` and `EVENT_API` and add the flag `-store` to the go command. + ## Regular execution through Github Actions The crawler is regularely being executed through Github Actions and its crawled data consequently written to the event api described below. diff --git a/main.go b/main.go index c3820cd..6875d59 100644 --- a/main.go +++ b/main.go @@ -293,7 +293,7 @@ func extractStringRegex(rc *RegexConfig, s string) (string, error) { } func writeEventsToAPI(c Crawler) { - apiUrl := os.Getenv("CRONCERT_API") + apiUrl := os.Getenv("EVENT_API") client := &http.Client{ Timeout: time.Second * 10, }