mirror of https://github.com/velour/catbase.git
hn: use api
This commit is contained in:
parent
958a454271
commit
af9fc12038
|
@ -0,0 +1,44 @@
|
||||||
|
package hn
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"path"
|
||||||
|
)
|
||||||
|
|
||||||
|
const BASE = "https://hacker-news.firebaseio.com/v0/"
|
||||||
|
|
||||||
|
func get(url string) (*http.Response, error) {
|
||||||
|
c := &http.Client{}
|
||||||
|
req, _ := http.NewRequest("GET", url, nil)
|
||||||
|
req.Header.Set("User-Agent", "catbase/1.0")
|
||||||
|
return c.Do(req)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetItem(id int) (Item, error) {
|
||||||
|
u := path.Join(BASE, "item", fmt.Sprintf("%d.json", id))
|
||||||
|
resp, err := get(u)
|
||||||
|
if err != nil {
|
||||||
|
return Item{}, err
|
||||||
|
}
|
||||||
|
dec := json.NewDecoder(resp.Body)
|
||||||
|
i := Item{}
|
||||||
|
if err := dec.Decode(&i); err != nil {
|
||||||
|
return Item{}, err
|
||||||
|
}
|
||||||
|
return i, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type Items []Item
|
||||||
|
|
||||||
|
func (is Items) Titles() string {
|
||||||
|
out := ""
|
||||||
|
for i, v := range is {
|
||||||
|
if i > 0 {
|
||||||
|
out += ", "
|
||||||
|
}
|
||||||
|
out += fmt.Sprintf("<%s|%s>", v.URL, v.Title)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
|
@ -0,0 +1,19 @@
|
||||||
|
package hn
|
||||||
|
|
||||||
|
type Item struct {
|
||||||
|
ID int `json:"id"`
|
||||||
|
Deleted bool `json:"deleted"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
By string `json:"by"`
|
||||||
|
Time int `json:"time"`
|
||||||
|
Text string `json:"text"`
|
||||||
|
Dead bool `json:"dead"`
|
||||||
|
Parent int `json:"parent"`
|
||||||
|
Poll []int `json:"poll"` // check this
|
||||||
|
Kids []int `json:"kids"`
|
||||||
|
URL string `json:"url"`
|
||||||
|
Score int `json:"score"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Parts []int `json:"parts"`
|
||||||
|
Descendants int `json:"descendants"`
|
||||||
|
}
|
|
@ -4,12 +4,13 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"net/http"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/velour/catbase/plugins/newsbid/webshit/hn"
|
||||||
|
|
||||||
"github.com/gocolly/colly"
|
"github.com/gocolly/colly"
|
||||||
|
|
||||||
hacknews "github.com/PaulRosset/go-hacknews"
|
hacknews "github.com/PaulRosset/go-hacknews"
|
||||||
|
@ -36,30 +37,14 @@ type Webshit struct {
|
||||||
config Config
|
config Config
|
||||||
}
|
}
|
||||||
|
|
||||||
type Story struct {
|
|
||||||
Title string
|
|
||||||
URL string
|
|
||||||
}
|
|
||||||
|
|
||||||
type Stories []Story
|
|
||||||
|
|
||||||
func (s Stories) Titles() string {
|
|
||||||
out := ""
|
|
||||||
for i, v := range s {
|
|
||||||
if i > 0 {
|
|
||||||
out += ", "
|
|
||||||
}
|
|
||||||
out += fmt.Sprintf("<%s|%s>", v.URL, v.Title)
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
||||||
type Bid struct {
|
type Bid struct {
|
||||||
ID int
|
ID int
|
||||||
User string
|
User string
|
||||||
Title string
|
Title string
|
||||||
URL string
|
URL string
|
||||||
Bid int
|
Bid int
|
||||||
|
PlacedScore int
|
||||||
|
ProcessedScore int
|
||||||
Placed int64
|
Placed int64
|
||||||
Processed int64
|
Processed int64
|
||||||
}
|
}
|
||||||
|
@ -77,8 +62,8 @@ type Balance struct {
|
||||||
type WeeklyResult struct {
|
type WeeklyResult struct {
|
||||||
User string
|
User string
|
||||||
Won int
|
Won int
|
||||||
WinningArticles Stories
|
WinningArticles hn.Items
|
||||||
LosingArticles Stories
|
LosingArticles hn.Items
|
||||||
Score int
|
Score int
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -100,7 +85,9 @@ func (w *Webshit) setup() {
|
||||||
title string,
|
title string,
|
||||||
url string,
|
url string,
|
||||||
bid integer,
|
bid integer,
|
||||||
placed integer
|
placed_score integer,
|
||||||
|
processed_score integer,
|
||||||
|
placed integer,
|
||||||
processed integer
|
processed integer
|
||||||
)`)
|
)`)
|
||||||
w.db.MustExec(`create table if not exists webshit_balances (
|
w.db.MustExec(`create table if not exists webshit_balances (
|
||||||
|
@ -127,7 +114,7 @@ func (w *Webshit) Check() ([]WeeklyResult, error) {
|
||||||
return nil, fmt.Errorf("there are no bids against the current ngate post")
|
return nil, fmt.Errorf("there are no bids against the current ngate post")
|
||||||
}
|
}
|
||||||
|
|
||||||
storyMap := map[string]Story{}
|
storyMap := map[string]hn.Item{}
|
||||||
for _, s := range stories {
|
for _, s := range stories {
|
||||||
u, err := url.Parse(s.URL)
|
u, err := url.Parse(s.URL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -160,7 +147,7 @@ func (w *Webshit) Check() ([]WeeklyResult, error) {
|
||||||
return wr, nil
|
return wr, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *Webshit) checkBids(bids []Bid, storyMap map[string]Story) []WeeklyResult {
|
func (w *Webshit) checkBids(bids []Bid, storyMap map[string]hn.Item) []WeeklyResult {
|
||||||
|
|
||||||
var wins []Bid
|
var wins []Bid
|
||||||
total, totalWinning := 0.0, 0.0
|
total, totalWinning := 0.0, 0.0
|
||||||
|
@ -188,7 +175,7 @@ func (w *Webshit) checkBids(bids []Bid, storyMap map[string]Story) []WeeklyResul
|
||||||
rec.WinningArticles = append(rec.WinningArticles, s)
|
rec.WinningArticles = append(rec.WinningArticles, s)
|
||||||
totalWinning += float64(b.Bid)
|
totalWinning += float64(b.Bid)
|
||||||
} else {
|
} else {
|
||||||
rec.LosingArticles = append(rec.LosingArticles, Story{Title: b.Title, URL: b.URL})
|
rec.LosingArticles = append(rec.LosingArticles, hn.Item{Title: b.Title, URL: b.URL})
|
||||||
}
|
}
|
||||||
total += float64(b.Bid)
|
total += float64(b.Bid)
|
||||||
wr[b.User] = rec
|
wr[b.User] = rec
|
||||||
|
@ -246,7 +233,7 @@ func scrapeScoreAndComments(url string) (int, int, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetHeadlines will return the current possible news headlines for bidding
|
// GetHeadlines will return the current possible news headlines for bidding
|
||||||
func (w *Webshit) GetHeadlines() ([]Story, error) {
|
func (w *Webshit) GetHeadlines() (hn.Items, error) {
|
||||||
news := hacknews.Initializer{Story: w.config.HNFeed, NbPosts: w.config.HNLimit}
|
news := hacknews.Initializer{Story: w.config.HNFeed, NbPosts: w.config.HNLimit}
|
||||||
ids, err := news.GetCodesStory()
|
ids, err := news.GetCodesStory()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -256,9 +243,9 @@ func (w *Webshit) GetHeadlines() ([]Story, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
var stories []Story
|
var stories hn.Items
|
||||||
for _, p := range posts {
|
for _, p := range posts {
|
||||||
stories = append(stories, Story{
|
stories = append(stories, hn.Item{
|
||||||
Title: p.Title,
|
Title: p.Title,
|
||||||
URL: p.Url,
|
URL: p.Url,
|
||||||
})
|
})
|
||||||
|
@ -267,7 +254,7 @@ func (w *Webshit) GetHeadlines() ([]Story, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetWeekly will return the headlines in the last webshit weekly report
|
// GetWeekly will return the headlines in the last webshit weekly report
|
||||||
func (w *Webshit) GetWeekly() ([]Story, *time.Time, error) {
|
func (w *Webshit) GetWeekly() (hn.Items, *time.Time, error) {
|
||||||
fp := gofeed.NewParser()
|
fp := gofeed.NewParser()
|
||||||
feed, err := fp.ParseURL("http://n-gate.com/hackernews/index.rss")
|
feed, err := fp.ParseURL("http://n-gate.com/hackernews/index.rss")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -285,9 +272,9 @@ func (w *Webshit) GetWeekly() ([]Story, *time.Time, error) {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var items []Story
|
var items hn.Items
|
||||||
doc.Find(".storylink").Each(func(i int, s *goquery.Selection) {
|
doc.Find(".storylink").Each(func(i int, s *goquery.Selection) {
|
||||||
story := Story{
|
story := hn.Item{
|
||||||
Title: s.Find("a").Text(),
|
Title: s.Find("a").Text(),
|
||||||
URL: s.SiblingsFiltered(".small").First().Find("a").AttrOr("href", ""),
|
URL: s.SiblingsFiltered(".small").First().Find("a").AttrOr("href", ""),
|
||||||
}
|
}
|
||||||
|
@ -382,36 +369,16 @@ func (w *Webshit) Bid(user string, amount int, URL string) (Bid, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// getStoryByURL scrapes the URL for a title
|
// getStoryByURL scrapes the URL for a title
|
||||||
func (w *Webshit) getStoryByURL(URL string) (Story, error) {
|
func (w *Webshit) getStoryByURL(URL string) (hn.Item, error) {
|
||||||
u, err := url.Parse(URL)
|
u, err := url.Parse(URL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Story{}, err
|
return hn.Item{}, err
|
||||||
}
|
}
|
||||||
if u.Host != "news.ycombinator.com" {
|
if u.Host != "news.ycombinator.com" {
|
||||||
return Story{}, fmt.Errorf("expected HN link")
|
return hn.Item{}, fmt.Errorf("expected HN link")
|
||||||
}
|
}
|
||||||
res, err := http.Get(URL)
|
id, _ := strconv.Atoi(u.Query().Get("id"))
|
||||||
if err != nil {
|
return hn.GetItem(id)
|
||||||
return Story{}, err
|
|
||||||
}
|
|
||||||
defer res.Body.Close()
|
|
||||||
if res.StatusCode != 200 {
|
|
||||||
return Story{}, fmt.Errorf("bad response code: %d", res.StatusCode)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load the HTML document
|
|
||||||
doc, err := goquery.NewDocumentFromReader(res.Body)
|
|
||||||
if err != nil {
|
|
||||||
return Story{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find the review items
|
|
||||||
title := doc.Find("title").Text()
|
|
||||||
title = strings.ReplaceAll(title, " | Hacker News", "")
|
|
||||||
return Story{
|
|
||||||
Title: title,
|
|
||||||
URL: URL,
|
|
||||||
}, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *Webshit) updateScores(results []WeeklyResult) error {
|
func (w *Webshit) updateScores(results []WeeklyResult) error {
|
||||||
|
|
Loading…
Reference in New Issue