Handle errors better in updater/scraper/poller.

This commit is contained in:
A. Svensson 2016-04-21 14:54:02 +02:00
parent a91ba8a5dd
commit 014b42cbb3
3 changed files with 51 additions and 32 deletions

View File

@ -57,8 +57,7 @@ func poll_players(host string, timeout int) (int, error) {
return int(players), nil return int(players), nil
} }
// TODO: return errors! func PollServers(servers []ServerConfig, timeout int) ([]*RawServerData, error) {
func PollServers(servers []ServerConfig, timeout int) []*RawServerData {
var wg sync.WaitGroup var wg sync.WaitGroup
var tmp []*RawServerData var tmp []*RawServerData
for _, s := range servers { for _, s := range servers {
@ -66,7 +65,7 @@ func PollServers(servers []ServerConfig, timeout int) []*RawServerData {
go func(s ServerConfig) { go func(s ServerConfig) {
defer wg.Done() defer wg.Done()
players, err := poll_players(s.GameUrl, timeout) players, err := poll_players(s.GameUrl, timeout)
if err != nil { if log_error(err) {
return return
} }
gameurl := fmt.Sprintf("byond://%s", s.GameUrl) gameurl := fmt.Sprintf("byond://%s", s.GameUrl)
@ -74,5 +73,5 @@ func PollServers(servers []ServerConfig, timeout int) []*RawServerData {
}(s) }(s)
} }
wg.Wait() wg.Wait()
return tmp return tmp, nil
} }

View File

@ -19,48 +19,62 @@ var (
RE_PLAYERS = regexp.MustCompile(`Logged in: (\d+) player`) RE_PLAYERS = regexp.MustCompile(`Logged in: (\d+) player`)
) )
// TODO: return errors! func ScrapePage() ([]*RawServerData, error) {
func ScrapePage() []*RawServerData { data, err := download_data()
data := download_data() if err != nil {
return parse_data(data) return nil, err
}
tmp, err := parse_data(data)
if err != nil {
return nil, err
}
return tmp, nil
} }
func download_data() *goquery.Document { func download_data() (*goquery.Document, error) {
var r io.Reader var r io.Reader
if IsDebugging() { if IsDebugging() {
fmt.Println("Scraper data source: ./dump.html") fmt.Println("Scraper data source: ./dump.html")
f, err := os.Open("./tmp/dump.html") f, err := os.Open("./tmp/dump.html")
log_error(err) if err != nil {
return nil, err
}
defer f.Close() defer f.Close()
r = charmap.Windows1252.NewDecoder().Reader(f) r = charmap.Windows1252.NewDecoder().Reader(f)
} else { } else {
client := &http.Client{ client := &http.Client{
Timeout: time.Duration(1) * time.Minute, Timeout: time.Duration(1) * time.Minute,
} }
resp, e := client.Get("http://www.byond.com/games/exadv1/spacestation13") resp, err := client.Get("http://www.byond.com/games/exadv1/spacestation13")
log_error(e) if err != nil {
return nil, err
}
defer resp.Body.Close() defer resp.Body.Close()
// Yep, Byond serve's it's pages with Windows-1252 encoding... // Yep, Byond serve's it's pages with Windows-1252 encoding...
r = charmap.Windows1252.NewDecoder().Reader(resp.Body) r = charmap.Windows1252.NewDecoder().Reader(resp.Body)
} }
doc, e := goquery.NewDocumentFromReader(r) doc, err := goquery.NewDocumentFromReader(r)
log_error(e) if err != nil {
return doc return nil, err
}
return doc, nil
} }
func parse_data(data *goquery.Document) []*RawServerData { func parse_data(data *goquery.Document) ([]*RawServerData, error) {
var servers []*RawServerData var servers []*RawServerData
data.Find(".live_game_entry").Each(func(i int, s *goquery.Selection) { data.Find(".live_game_entry").Each(func(i int, s *goquery.Selection) {
tmp := parse_server_data(s) tmp, err := parse_server_data(s)
if !log_error(err) {
if tmp != nil { if tmp != nil {
servers = append(servers, tmp) servers = append(servers, tmp)
} }
}
}) })
return servers return servers, nil
} }
func parse_server_data(raw *goquery.Selection) *RawServerData { func parse_server_data(raw *goquery.Selection) (*RawServerData, error) {
s := raw.Find(".live_game_status") s := raw.Find(".live_game_status")
t := s.Find("b").First() t := s.Find("b").First()
@ -72,7 +86,7 @@ func parse_server_data(raw *goquery.Selection) *RawServerData {
title = strings.Replace(title, "\n", "", -1) title = strings.Replace(title, "\n", "", -1)
if len(title) < 1 { if len(title) < 1 {
// Yes, someone has made a public server without a server name at least once // Yes, someone has made a public server without a server name at least once
return nil return nil, fmt.Errorf("Empty name for server")
} }
game_url := s.Find("span.smaller").Find("nobr").Text() game_url := s.Find("span.smaller").Find("nobr").Text()
@ -90,9 +104,11 @@ func parse_server_data(raw *goquery.Selection) *RawServerData {
// than 2 there's multiple matches, which is fishy... // than 2 there's multiple matches, which is fishy...
if len(ret) == 2 { if len(ret) == 2 {
p, err := strconv.ParseInt(ret[1], 10, 0) p, err := strconv.ParseInt(ret[1], 10, 0)
log_error(err) if err != nil {
return nil, err
}
players = int(p) players = int(p)
} }
return &RawServerData{title, game_url, site_url, players, Now()} return &RawServerData{title, game_url, site_url, players, Now()}, nil
} }

View File

@ -23,23 +23,27 @@ func (i *Instance) UpdateServers() {
tx := i.DB.NewTransaction() tx := i.DB.NewTransaction()
config, err := LoadConfig(SERVERS_CONFIG) config, err := LoadConfig(SERVERS_CONFIG)
if err != nil { if !log_error(err) {
fmt.Printf("Unable to load servers to poll: %s\n", err) // TODO
} else {
if i.Debug { if i.Debug {
fmt.Println("\nPolling servers...") fmt.Println("\nPolling servers...")
} }
for _, s := range PollServers(config.PollServers, config.Timeout) { polled, err := PollServers(config.PollServers, config.Timeout)
if !log_error(err) {
for _, s := range polled {
i.update_server(tx, s) i.update_server(tx, s)
} }
} }
}
if i.Debug { if i.Debug {
fmt.Println("\nScraping servers...") fmt.Println("\nScraping servers...")
} }
for _, s := range ScrapePage() { scraped, err := ScrapePage()
if !log_error(err) {
for _, s := range scraped {
i.update_server(tx, s) i.update_server(tx, s)
} }
}
if i.Debug { if i.Debug {
fmt.Println("\nUpdating inactive servers...") fmt.Println("\nUpdating inactive servers...")