2023-04-16 15:14:44 +00:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
2023-04-19 23:00:25 +00:00
|
|
|
"errors"
|
2023-04-16 15:14:44 +00:00
|
|
|
"fmt"
|
|
|
|
"log"
|
|
|
|
"net/http"
|
2023-04-19 23:00:25 +00:00
|
|
|
"regexp"
|
|
|
|
"strconv"
|
2023-04-16 15:14:44 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
type ItemID = int
|
|
|
|
|
|
|
|
type Story struct {
|
|
|
|
ID ItemID
|
|
|
|
By string // username of author
|
|
|
|
Time int // UNIX timestamp
|
|
|
|
Descendants int // number of comments
|
|
|
|
Kids []ItemID
|
|
|
|
Score int
|
|
|
|
Title string
|
|
|
|
Url string
|
|
|
|
}
|
|
|
|
|
2023-04-16 17:51:17 +00:00
|
|
|
var (
|
|
|
|
HackerNewsUrl string
|
|
|
|
HackerNewsFirebaseUrl string
|
2023-04-19 23:00:25 +00:00
|
|
|
HackerNewsLinkRegexp *regexp.Regexp
|
2023-04-16 17:51:17 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
HackerNewsUrl = "https://news.ycombinator.com"
|
|
|
|
HackerNewsFirebaseUrl = "https://hacker-news.firebaseio.com/v0"
|
2023-04-19 23:00:25 +00:00
|
|
|
HackerNewsLinkRegexp = regexp.MustCompile(`(?:https?:\/\/)?news\.ycombinator\.com\/item\?id=([0-9]+)`)
|
2023-04-16 17:51:17 +00:00
|
|
|
}
|
|
|
|
|
2023-04-16 17:56:42 +00:00
|
|
|
func FetchHackerNewsTopStories() []Story {
|
2023-04-16 15:14:44 +00:00
|
|
|
// API docs: https://github.com/HackerNews/API
|
|
|
|
|
2023-04-16 17:51:17 +00:00
|
|
|
url := fmt.Sprintf("%s/topstories.json", HackerNewsFirebaseUrl)
|
2023-04-16 15:14:44 +00:00
|
|
|
resp, err := http.Get(url)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal("Error fetching top stories:", err)
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
log.Printf("GET %s %d\n", url, resp.StatusCode)
|
|
|
|
|
|
|
|
var ids []int
|
|
|
|
err = json.NewDecoder(resp.Body).Decode(&ids)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal("Error decoding top stories JSON:", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// we are only interested in the first page of top stories
|
|
|
|
const limit = 30
|
|
|
|
ids = ids[:limit]
|
|
|
|
|
|
|
|
var stories [limit]Story
|
|
|
|
for i, id := range ids {
|
2023-04-16 17:56:42 +00:00
|
|
|
story := FetchStoryById(id)
|
2023-04-16 15:14:44 +00:00
|
|
|
stories[i] = story
|
|
|
|
}
|
|
|
|
|
|
|
|
// Can't return [30]Story as []Story so we copy the array
|
|
|
|
return stories[:]
|
|
|
|
}
|
|
|
|
|
2023-04-16 17:56:42 +00:00
|
|
|
func FetchStoryById(id ItemID) Story {
|
2023-04-16 15:14:44 +00:00
|
|
|
url := fmt.Sprintf("https://hacker-news.firebaseio.com/v0/item/%d.json", id)
|
|
|
|
resp, err := http.Get(url)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal("Error fetching story:", err)
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
log.Printf("GET %s %d\n", url, resp.StatusCode)
|
|
|
|
|
|
|
|
var story Story
|
|
|
|
err = json.NewDecoder(resp.Body).Decode(&story)
|
|
|
|
if err != nil {
|
|
|
|
log.Fatal("Error decoding story JSON:", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return story
|
|
|
|
}
|
2023-04-16 19:05:10 +00:00
|
|
|
|
2023-04-19 23:00:25 +00:00
|
|
|
func ParseHackerNewsLink(link string) (ItemID, error) {
|
|
|
|
match := HackerNewsLinkRegexp.FindStringSubmatch(link)
|
|
|
|
if len(match) == 0 {
|
|
|
|
return -1, errors.New("input is not a hacker news link")
|
|
|
|
}
|
|
|
|
id, err := strconv.Atoi(match[1])
|
|
|
|
if err != nil {
|
|
|
|
// this should never happen
|
|
|
|
panic(err)
|
|
|
|
}
|
|
|
|
return id, nil
|
|
|
|
}
|
|
|
|
|
2023-04-16 19:05:10 +00:00
|
|
|
func HackerNewsUserLink(user string) string {
|
|
|
|
return fmt.Sprintf("%s/user?id=%s", HackerNewsUrl, user)
|
|
|
|
}
|
|
|
|
|
|
|
|
func HackerNewsItemLink(id int) string {
|
|
|
|
return fmt.Sprintf("%s/item?id=%d", HackerNewsUrl, id)
|
|
|
|
}
|