master
Chakib Benziane 5 years ago
commit a948d4bf69

16
.gitignore vendored

@ -0,0 +1,16 @@
# ---> Go
# Test binary, build with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
*.log
# Binary
hugobot
# Sqlite
*.sqlite-*

@ -0,0 +1,52 @@
FROM golang:1.11-alpine as builder
MAINTAINER Chakib <contact@bitcointechweekly.com>
# Copy source
COPY . /go/src/hugobot
# install dependencies and build
RUN apk add --no-cache --upgrade \
ca-certificates \
git \
openssh \
make \
alpine-sdk
RUN cd /go/src/hugobot \
&& make install
################################
#### FINAL IMAGE
###############################
FROM alpine as final
ENV WEBSITE_PATH=/website
ENV HUGOBOT_DB_PATH=/db
RUN apk add --no-cache --upgrade \
ca-certificates \
bash \
sqlite \
jq
COPY --from=builder /go/bin/hugobot /bin/
RUN mkdir -p ${HUGOBOT_DB_PATH}
RUN mkdir -p ${WEBSITE_PATH}
VOLUME ${HUGOBOT_DB_PATH}
# Expose API ports
EXPOSE 8734
# copy entrypoint
COPY "docker-entrypoint.sh" /entry
ENTRYPOINT ["/entry"]
CMD ["hugobot", "server"]

@ -0,0 +1,9 @@
FROM coleifer/sqlite
RUN apk add --no-cache --virtual .build-reqs build-base gcc make \
&& pip install --no-cache-dir cython \
&& pip install --no-cache-dir flask peewee sqlite-web \
&& apk del .build-reqs
EXPOSE 8080
VOLUME /db
WORKDIR /db
CMD sqlite_web -H 0.0.0.0 -x $SQLITE_DATABASE -P

@ -0,0 +1,22 @@
TARGET=hugobot
GOINSTALL := GO111MODULE=on go install -v
GOBUILD := GO111MODULE=on go build -v
PKG := hugobot
.PHONY: all build install
all: build
build:
$(GOBUILD) -o $(TARGET)
install:
$(GOINSTALL)

@ -0,0 +1,47 @@
**MIRRORED FROM**: https://git.sp4ke.com/sp4ke/hugobot
# HUGOBOT
*hugobot* is a an automated content fetch and aggregation bot for [Hugo][hugo] data
driven websites. It has the following features:
## Data fetch
- Add feeds to the bot in the `feeds` sqlite table
- Currently handles these types of feeds: `RSS`, `Github Releases`, `Newsletters`
- Define your own feed types by implementing the `JobHandler` interface (see
`handlers/handlers.go`).
- Hugobot automatically fetch new posts from the feeds you defined
- It runs periodically to download new posts in the defined feeds.
- Everything is saved on an sqlite
- The scheduler can handle any number of tasks and uses leveldb for
caching/resuming jobs.
## Hugo export
- Data is automatically exported to the configured Hugo website path.
- It can export `markdwon` files or `json/toml` data files
- All fields in the exported files can be customized
- You can define custom output formats by using the `FormatHandler` interface.
## API
- *hugobot* also includes a webserver API that can be used with Hugo [Data
Driven Mode][data-driven].
- WIP: Insert and query data
- An example usage is the automated generation of Bitcoin addresses for new
articles on [bitcointechweekly.com][btw-btc]
## Sqliteweb interface
- See Docker files
[data-driven]:https://gohugo.io/templates/data-templates/#data-driven-content
[btw-btc]:https://bitcointechweekly.com/btc/3Jv15g4G5LDnBJPDh1e2ja8NPnADzMxhVh
[hugo]:https://gohugo.io

@ -0,0 +1,71 @@
package main
import (
"io"
"os"
"strconv"
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/config"
"git.sp4ke.com/sp4ke/hugobot/v3/bitcoin"
gum "git.sp4ke.com/sp4ke/gum.git"
"github.com/gin-gonic/gin"
)
var (
apiLogFile *os.File
)
type API struct {
router *gin.Engine
}
func (api *API) Run(m gum.UnitManager) {
feedsRoute := api.router.Group("/feeds")
{
feedCtrl := &feeds.FeedCtrl{}
feedsRoute.POST("/", feedCtrl.Create)
feedsRoute.DELETE("/:id", feedCtrl.Delete)
feedsRoute.GET("/", feedCtrl.List) // Get all
//feedsRoute.Get("/:id", feedCtrl.GetById) // Get one
}
btcRoute := api.router.Group("/btc")
{
btcRoute.GET("/address", bitcoin.GetAddressCtrl)
}
// Run router
go func() {
err := api.router.Run(":" + strconv.Itoa(config.C.ApiPort))
if err != nil {
panic(err)
}
}()
// Wait for stop signal
<-m.ShouldStop()
// Shutdown
api.Shutdown()
m.Done()
}
func (api *API) Shutdown() {}
func NewApi() *API {
apiLogFile, _ = os.Create(".api.log")
gin.DefaultWriter = io.MultiWriter(apiLogFile, os.Stdout)
api := &API{
router: gin.Default(),
}
return api
}

@ -0,0 +1,188 @@
package bitcoin
import (
"database/sql"
"log"
"net/http"
"git.sp4ke.com/sp4ke/hugobot/v3/db"
"github.com/gin-gonic/gin"
sqlite3 "github.com/mattn/go-sqlite3"
)
var DB = db.DB
const (
DBBTCAddressesSchema = `CREATE TABLE IF NOT EXISTS btc_addresses (
addr_id INTEGER PRIMARY KEY,
address TEXT NOT NULL UNIQUE,
address_position INTEGER NOT NULL DEFAULT 0,
linked_article_title TEXT DEFAULT '',
linked_article_id TEXT NOT NULL DEFAULT '',
used INTEGER NOT NULL DEFAULT 0,
synced INTEGER NOT NULL DEFAULT 0
)`
QueryUnusedAddress = `SELECT * FROM btc_addresses WHERE used = 0 LIMIT 1 `
UpdateAddressQuery = `UPDATE btc_addresses
SET linked_article_id = ?,
linked_article_title = ?,
used = ?
WHERE addr_id = ?
`
)
type BTCAddress struct {
ID int64 `db:"addr_id"`
Address string `db:"address"`
AddrPosition int64 `db:"address_position"`
LinkedArticleTitle string `db:"linked_article_title"`
LinkedArticleID string `db:"linked_article_id"`
Used bool `db:"used"`
Synced bool `db:"synced"`
}
// TODO: Set address to synced
func (a *BTCAddress) SetSynced() error {
a.Synced = true
query := `UPDATE btc_addresses SET synced = :synced WHERE addr_id = :addr_id`
_, err := DB.Handle.NamedExec(query, a)
if err != nil {
return err
}
return nil
}
func GetAddressByPos(pos int) (*BTCAddress, error) {
var btcAddr BTCAddress
err := DB.Handle.Get(&btcAddr,
"SELECT * FROM btc_addresses WHERE address_position = ?",
pos,
)
if err != nil {
return nil, err
}
return &btcAddr, nil
}
func GetAddressByArticleID(artId string) (*BTCAddress, error) {
var btcAddr BTCAddress
err := DB.Handle.Get(&btcAddr,
"SELECT * FROM btc_addresses WHERE linked_article_id = ?",
artId,
)
if err != nil {
return nil, err
}
return &btcAddr, nil
}
func GetAllUsedUnsyncedAddresses() ([]*BTCAddress, error) {
var addrs []*BTCAddress
err := DB.Handle.Select(&addrs,
"SELECT * FROM btc_addresses WHERE used = 1 AND synced = 0",
)
if err != nil {
return nil, err
}
return addrs, nil
}
func GetNextUnused() (*BTCAddress, error) {
var btcAddr BTCAddress
err := DB.Handle.Get(&btcAddr, QueryUnusedAddress)
if err != nil {
return nil, err
}
return &btcAddr, nil
}
func GetAddressForArticle(artId string, artTitle string) (*BTCAddress, error) {
// Check if article already has an assigned address
addr, err := GetAddressByArticleID(artId)
sqliteErr, isSqliteErr := err.(sqlite3.Error)
if (isSqliteErr && sqliteErr.Code != sqlite3.ErrNotFound) ||
(err != nil && !isSqliteErr && err != sql.ErrNoRows) {
log.Println("err")
return nil, err
}
if err == nil {
// If different title update it
if artTitle != addr.LinkedArticleTitle {
addr.LinkedArticleTitle = artTitle
// Store newly assigned address
_, err = DB.Handle.Exec(UpdateAddressQuery,
addr.LinkedArticleID,
addr.LinkedArticleTitle,
addr.Used,
addr.ID,
)
if err != nil {
return nil, err
}
}
return addr, nil
}
// Get next unused address
addr, err = GetNextUnused()
if err != nil {
return nil, err
}
addr.LinkedArticleID = artId
addr.LinkedArticleTitle = artTitle
addr.Used = true
// Store newly assigned address
_, err = DB.Handle.Exec(UpdateAddressQuery,
addr.LinkedArticleID,
addr.LinkedArticleTitle,
addr.Used,
addr.ID,
)
if err != nil {
return nil, err
}
return addr, nil
}
func GetAddressCtrl(c *gin.Context) {
artId := c.Query("articleId")
artTitle := c.Query("articleTitle")
addr, err := GetAddressForArticle(artId, artTitle)
if err != nil {
c.JSON(http.StatusBadRequest,
gin.H{"status": http.StatusBadRequest,
"error": err.Error()})
c.Abort()
return
}
c.JSON(http.StatusOK, gin.H{
"status": http.StatusOK,
"addr": addr.Address,
})
}
func init() {
_, err := DB.Handle.Exec(DBBTCAddressesSchema)
if err != nil {
log.Fatal(err)
}
}

@ -0,0 +1,85 @@
package main
import (
"git.sp4ke.com/sp4ke/hugobot/v3/export"
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/static"
"log"
cli "gopkg.in/urfave/cli.v1"
)
var startServerCmd = cli.Command{
Name: "server",
Aliases: []string{"s"},
Usage: "Run server",
Action: startServer,
}
var exportCmdGrp = cli.Command{
Name: "export",
Aliases: []string{"e"},
Usage: "Export to hugo",
Subcommands: []cli.Command{
exportPostsCmd,
exportWeeksCmd,
exportBTCAddressesCmd,
},
}
var exportBTCAddressesCmd = cli.Command{
Name: "btc",
Usage: "export bitcoin addresses",
Action: exportAddresses,
}
var exportWeeksCmd = cli.Command{
Name: "weeks",
Usage: "export weeks",
Action: exportWeeks,
}
var exportPostsCmd = cli.Command{
Name: "posts",
Usage: "Export posts to hugo",
Action: exportPosts,
}
func startServer(c *cli.Context) {
server()
}
func exportPosts(c *cli.Context) {
exporter := export.NewHugoExporter()
feeds, err := feeds.ListFeeds()
if err != nil {
log.Fatal(err)
}
for _, f := range feeds {
exporter.Export(*f)
}
// Export static data
err = static.HugoExportData()
if err != nil {
log.Fatal(err)
}
}
func exportWeeks(c *cli.Context) {
err := export.ExportWeeks()
if err != nil {
log.Fatal(err)
}
}
func exportAddresses(c *cli.Context) {
err := export.ExportBTCAddresses()
if err != nil {
log.Fatal(err)
}
}

@ -0,0 +1 @@
api-port = 8734

@ -0,0 +1,58 @@
package config
import (
"log"
"path"
"github.com/fatih/structs"
)
const (
BTCQRCodesDir = "qrcodes"
)
type Config struct {
WebsitePath string
GithubAccessToken string
RelBitcoinAddrContentPath string
ApiPort int
}
var (
C *Config
)
func HugoData() string {
return path.Join(C.WebsitePath, "data")
}
func HugoContent() string {
return path.Join(C.WebsitePath, "content")
}
func RelBitcoinAddrContentPath() string {
return path.Join(C.WebsitePath, C.RelBitcoinAddrContentPath)
}
func RegisterConf(conf string, val interface{}) error {
log.Printf("Setting %#v to %#v", conf, val)
s := structs.New(C)
field, ok := s.FieldOk(conf)
// Conf option not registered in Config struct
if !ok {
return nil
}
err := field.Set(val)
if err != nil {
return err
}
return nil
}
func init() {
C = new(Config)
}

@ -0,0 +1,72 @@
package db
import (
"fmt"
"log"
"net/url"
"os"
"path/filepath"
"time"
"github.com/jmoiron/sqlx"
_ "github.com/mattn/go-sqlite3"
)
const (
DBName = "hugobot.sqlite"
DBPragma = ` PRAGMA foreign_keys = ON; `
DBBasePathEnv = "HUGOBOT_DB_PATH"
)
var (
DBOptions = map[string]string{
"_journal_mode": "WAL",
}
DB *Database
)
type Database struct {
Handle *sqlx.DB
}
func (d *Database) Open() error {
dsnOptions := &url.Values{}
for k, v := range DBOptions {
dsnOptions.Set(k, v)
}
// Get db base path
path, set := os.LookupEnv(DBBasePathEnv)
if !set {
path = "."
}
path = filepath.Join(path, DBName)
//path = fmt.Sprintf("%s/%s", path, DBName)
dsn := fmt.Sprintf("file:%s?%s", path, dsnOptions.Encode())
log.Printf("Opening sqlite db %s\n", dsn)
var err error
d.Handle, err = sqlx.Open("sqlite3", dsn)
if err != nil {
log.Fatal(err)
}
// Execute Pragmas
d.Handle.MustExec(DBPragma)
return nil
}
type AutoIncr struct {
ID int64 `json:"id"`
Created time.Time `json:"created"`
}
func init() {
DB = &Database{}
DB.Open()
}

@ -0,0 +1,42 @@
version: "2.2"
volumes:
js-deps:
build:
sqlite-db:
services:
bot:
image: hugobot/hugobot
build: .
volumes:
- path_to_website:/website
- $PWD:/hugobot
- sqlite-db:/db
environment:
- BUILD_DIR=/build
restart: on-failure
ports:
- "8734:8734"
working_dir: /hugobot
sqlite-web:
image: hugobot/sqlite-web
build:
context: .
dockerfile: ./Dockerfile-sqliteweb
ports:
- "8080"
volumes:
- sqlite-db:/db
environment:
- SQLITE_DATABASE=hugobot.sqlite
- SQLITE_WEB_PASSWORD=hugobot

@ -0,0 +1,15 @@
#!/bin/bash
set -e
if [[ -z "$(ls -A "$HUGOBOT_DB_PATH")" ]];then
echo "WARNING !! $HUGOBOT_DB_PATH is empty, creating new database !"
fi
if [[ -z "$(ls -A "$WEBSITE_PATH")" ]];then
echo "you need to mount the website path !"
exit 1
fi
exec "$@"

@ -0,0 +1,58 @@
package encoder
import (
"encoding/json"
"fmt"
"io"
"github.com/BurntSushi/toml"
)
const (
JSON = iota
TOML
)
type Encoder interface {
Encode(v interface{}) error
}
type ExportEncoder struct {
encoder Encoder
w io.Writer
eType int
}
func (ee *ExportEncoder) Encode(v interface{}) error {
var err error
if ee.eType == TOML {
fmt.Fprintf(ee.w, "+++\n")
}
err = ee.encoder.Encode(v)
if ee.eType == TOML {
fmt.Fprintf(ee.w, "+++\n")
}
return err
}
func NewExportEncoder(w io.Writer, encType int) *ExportEncoder {
var enc Encoder
switch encType {
case JSON:
enc = json.NewEncoder(w)
case TOML:
enc = toml.NewEncoder(w)
}
return &ExportEncoder{
encoder: enc,
w: w,
eType: encType,
}
}

@ -0,0 +1,65 @@
package export
import (
"git.sp4ke.com/sp4ke/hugobot/v3/bitcoin"
"git.sp4ke.com/sp4ke/hugobot/v3/config"
"git.sp4ke.com/sp4ke/hugobot/v3/encoder"
"log"
"os"
"path/filepath"
qrcode "github.com/skip2/go-qrcode"
)
func ExportBTCAddresses() error {
unusedAddrs, err := bitcoin.GetAllUsedUnsyncedAddresses()
if err != nil {
return err
}
for _, a := range unusedAddrs {
//first export the qr codes
log.Println("exporting ", a)
qrFileName := a.Address + ".png"
qrCodePath := filepath.Join(config.RelBitcoinAddrContentPath(),
config.BTCQRCodesDir, qrFileName)
err := qrcode.WriteFile(a.Address, qrcode.Medium, 580, qrCodePath)
if err != nil {
return err
}
// store the address pages
filename := a.Address + ".md"
filePath := filepath.Join(config.RelBitcoinAddrContentPath(), filename)
data := map[string]interface{}{
"linked_article_id": a.LinkedArticleID,
//"resources": []map[string]interface{}{
//map[string]interface{}{
//"src": filepath.Join(config.BTCQRCodesDir, a.Address+".png"),
//},
//},
}
addressPage, err := os.Create(filePath)
if err != nil {
return err
}
tomlExporter := encoder.NewExportEncoder(addressPage, encoder.TOML)
tomlExporter.Encode(data)
// Set synced
err = a.SetSynced()
if err != nil {
return err
}
}
return nil
}

@ -0,0 +1,17 @@
package export
import (
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"strings"
)
func BulletinExport(exp Map, feed feeds.Feed, post posts.Post) error {
bulletinInfo := strings.Split(feed.Section, "/")
if bulletinInfo[0] == "bulletin" {
exp["bulletin_type"] = bulletinInfo[1]
}
return nil
}

@ -0,0 +1,214 @@
package export
import (
"fmt"
"log"
"os"
"path/filepath"
"strings"
"time"
"git.sp4ke.com/sp4ke/hugobot/v3/config"
"git.sp4ke.com/sp4ke/hugobot/v3/encoder"
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/filters"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"git.sp4ke.com/sp4ke/hugobot/v3/types"
"git.sp4ke.com/sp4ke/hugobot/v3/utils"
)
var PostMappers []PostMapper
var FeedMappers []FeedMapper
type Map map[string]interface{}
type PostMapper func(Map, feeds.Feed, posts.Post) error
type FeedMapper func(Map, feeds.Feed) error
// Exported version of a post
type PostExport struct {
ID int64 `json:"id"`
Title string `json:"title"`
Link string `json:"link"`
Published time.Time `json:"published"`
Content string `json:"content"`
}
type PostMap map[int64]Map
type FeedExport struct {
Name string `json:"name"`
Section string `json:"section"`
Categories types.StringList `json:"categories"`
Posts PostMap `json:"posts"`
}
type HugoExporter struct{}
func (he HugoExporter) Handle(feed feeds.Feed) error {
return he.export(feed)
}
func (he HugoExporter) export(feed feeds.Feed) error {
log.Printf("Exporting %s to %s", feed.Name, config.HugoData())
posts, err := posts.GetPostsByFeedId(feed.FeedID)
if err != nil {
return err
}
if len(posts) == 0 {
log.Printf("nothing to export")
return nil
}
// Run filters on posts
for _, p := range posts {
filters.RunPostFilterHooks(feed, p)
}
// Dir and filename
dirPath := filepath.Join(config.HugoData(), feed.Section)
cleanFeedName := strings.Replace(feed.Name, "/", "-", -1)
filePath := filepath.Join(dirPath, cleanFeedName+".json")
err = utils.Mkdir(dirPath)
if err != nil {
return err
}
feedExp := Map{
"name": feed.Name,
"section": feed.Section,
"categories": feed.Categories,
}
runFeedMappers(feedExp, feed)
postsMap := make(PostMap)
for _, p := range posts {
exp := Map{
"id": p.PostID,
"title": p.Title,
"link": p.Link,
"published": p.Published,
"updated": p.Updated,
//"content": p.Content,
}
runPostMappers(exp, feed, *p)
postsMap[p.PostID] = exp
}
feedExp["posts"] = postsMap
outputFile, err := os.Create(filePath)
defer outputFile.Close()
if err != nil {
return err
}
exportEncoder := encoder.NewExportEncoder(outputFile, encoder.JSON)
exportEncoder.Encode(feedExp)
//jsonEnc.Encode(feedExp)
// Handle feeds which export posts individually as hugo posts
// Like bulletin
if feed.ExportPosts {
for _, p := range posts {
exp := map[string]interface{}{
"id": p.PostID,
"title": p.Title,
"name": feed.Name,
"author": p.Author,
"description": p.PostDescription,
"externalLink": feed.UseExternalLink,
"display_name": feed.DisplayName,
"publishdate": p.Published,
"date": p.Updated,
"issuedate": utils.NextThursday(p.Updated),
"use_data": true,
"slug": p.ShortID,
"link": p.Link,
// Content is written in the post
"content": p.Content,
"categories": feed.Categories,
"tags": strings.Split(p.Tags, ","),
}
if feed.Publications != "" {
exp["publications"] = strings.Split(feed.Publications, ",")
}
runPostMappers(exp, feed, *p)
dirPath := filepath.Join(config.HugoContent(), feed.Section)
cleanFeedName := strings.Replace(feed.Name, "/", "-", -1)
fileName := fmt.Sprintf("%s-%s.md", cleanFeedName, p.ShortID)
filePath := filepath.Join(dirPath, fileName)
outputFile, err := os.Create(filePath)
defer outputFile.Close()
if err != nil {
return err
}
exportEncoder := encoder.NewExportEncoder(outputFile, encoder.TOML)
exportEncoder.Encode(exp)
}
}
return nil
}
// Runs in goroutine
func (he HugoExporter) Export(feed feeds.Feed) {
err := he.export(feed)
if err != nil {
log.Fatal(err)
}
}
func NewHugoExporter() HugoExporter {
// Make sure path exists
err := utils.Mkdir(config.HugoData())
if err != nil {
log.Fatal(err)
}
return HugoExporter{}
}
func runPostMappers(e Map, f feeds.Feed, p posts.Post) {
for _, fn := range PostMappers {
err := fn(e, f, p)
if err != nil {
log.Print(err)
}
}
}
func runFeedMappers(e Map, f feeds.Feed) {
for _, fn := range FeedMappers {
err := fn(e, f)
if err != nil {
log.Print(err)
}
}
}
func RegisterPostMapper(mapper PostMapper) {
PostMappers = append(PostMappers, mapper)
}
func RegisterFeedMapper(mapper FeedMapper) {
FeedMappers = append(FeedMappers, mapper)
}
func init() {
RegisterPostMapper(BulletinExport)
RegisterPostMapper(NewsletterPostLayout)
RegisterPostMapper(RFCExport)
RegisterPostMapper(ReleaseExport)
}

@ -0,0 +1,18 @@
package export
import (
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"path"
"github.com/gobuffalo/flect"
)
func NewsletterPostLayout(exp Map, feed feeds.Feed, post posts.Post) error {
section := path.Base(flect.Singularize(feed.Section))
if feed.Section == "bulletin/newsletters" {
exp["layout"] = section
}
return nil
}

@ -0,0 +1,14 @@
package export
import (
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
)
//
func ReleaseExport(exp Map, feed feeds.Feed, post posts.Post) error {
if feed.Section == "bulletin/releases" {
exp["data"] = post.JsonData
}
return nil
}

@ -0,0 +1,15 @@
package export
import (
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
)
// TODO: This happend in the main export file
func RFCExport(exp Map, feed feeds.Feed, post posts.Post) error {
if feed.Section == "bulletin/rfc" {
exp["data"] = post.JsonData
}
return nil
}

@ -0,0 +1,56 @@
// Export all weeks to the weeks content directory
package export
import (
"git.sp4ke.com/sp4ke/hugobot/v3/config"
"git.sp4ke.com/sp4ke/hugobot/v3/encoder"
"git.sp4ke.com/sp4ke/hugobot/v3/utils"
"os"
"path/filepath"
"time"
)
const (
FirstWeek = "2017-12-07"
)
var (
WeeksContentDir = "weeks"
)
type WeekData struct {
Title string
Date time.Time
}
func ExportWeeks() error {
firstWeek, err := time.Parse("2006-01-02", FirstWeek)
if err != nil {
return err
}
WeeksTilNow := utils.GetAllThursdays(firstWeek, time.Now())
for _, week := range WeeksTilNow {
weekName := week.Format("2006-01-02")
fileName := weekName + ".md"
weekFile, err := os.Create(filepath.Join(config.HugoContent(),
WeeksContentDir,
fileName))
if err != nil {
return err
}
weekData := WeekData{
Title: weekName,
Date: week,
}
tomlExporter := encoder.NewExportEncoder(weekFile, encoder.TOML)
tomlExporter.Encode(weekData)
}
return nil
}

@ -0,0 +1,112 @@
package main
import (
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/handlers"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"fmt"
"log"
"time"
cli "gopkg.in/urfave/cli.v1"
)
var fetchCmd = cli.Command{
Name: "fetch",
Aliases: []string{"f"},
Usage: "Fetch data from feed",
Flags: []cli.Flag{
cli.StringFlag{
Name: "since",
Usage: "Fetch data since `TIME`, defaults to last refresh time",
},
},
Action: fetchFeeds,
}
var feedsCmdGroup = cli.Command{
Name: "feeds",
Usage: "Feeds related commands. default: list feeds",
Flags: []cli.Flag{
cli.IntFlag{
Name: "id,i",
Value: 0,
Usage: "Feeds `id`",
},
},
Subcommands: []cli.Command{
fetchCmd,
},
Action: listFeeds,
}
func fetchFeeds(c *cli.Context) {
var result []*posts.Post
fList, err := getFeeds(c.Parent())
if err != nil {
log.Fatal(err)
}
for _, f := range fList {
var handler handlers.FormatHandler
handler = handlers.GetFormatHandler(*f)
if c.IsSet("since") {
// Parse time
t, err := time.Parse(time.UnixDate, c.String("since"))
if err != nil {
log.Fatal(err)
}
result, err = handler.FetchSince(f.Url, t)
} else {
result, err = handler.FetchSince(f.Url, f.LastRefresh)
}
if err != nil {
log.Fatal(err)
}
for _, post := range result {
log.Printf("%s (updated: %s)", post.Title, post.Updated)
}
log.Println("Total: ", len(result))
}
}
func listFeeds(c *cli.Context) {
fList, err := getFeeds(c)
if err != nil {
log.Fatal(err)
}
for _, f := range fList {
fmt.Println(f)
}
}
func getFeeds(c *cli.Context) ([]*feeds.Feed, error) {
var fList []*feeds.Feed
var err error
if c.IsSet("id") {
feed, err := feeds.GetById(c.Int64("id"))
if err != nil {
return nil, err
}
fList = append(fList, feed)
} else {
fList, err = feeds.ListFeeds()
if err != nil {
return nil, err
}
}
return fList, nil
}

@ -0,0 +1,117 @@
package feeds
import (
"git.sp4ke.com/sp4ke/hugobot/v3/types"
"log"
"net/http"
"strconv"
"github.com/gin-gonic/gin"
sqlite3 "github.com/mattn/go-sqlite3"
)
const (
MsgOK = "OK"
)
var (
ErrNotInt = "expected int"
)
type FeedCtrl struct{}
func (ctrl FeedCtrl) Create(c *gin.Context) {
var feedForm FeedForm
feedModel := new(Feed)
if err := c.ShouldBindJSON(&feedForm); err != nil {
c.JSON(http.StatusNotAcceptable, gin.H{
"status": http.StatusNotAcceptable,
"message": "invalid form",
"form": feedForm})
c.Abort()
return
}
feedModel.Name = feedForm.Name
feedModel.Url = feedForm.Url
feedModel.Format = feedForm.Format
feedModel.Section = feedForm.Section
feedModel.Categories = types.StringList(feedForm.Categories)
err := feedModel.Write()
if err != nil {
log.Println(err)
c.JSON(http.StatusNotAcceptable,
gin.H{"status": http.StatusNotAcceptable, "error": err.Error()})
c.Abort()
return
}
c.JSON(http.StatusOK, gin.H{"status": http.StatusOK, "message": MsgOK})
}
func (ctrl FeedCtrl) List(c *gin.Context) {
feeds, err := ListFeeds()
if err != nil {
c.JSON(http.StatusNotAcceptable, gin.H{
"error": err.Error(),
"status": http.StatusNotAcceptable,
})
c.Abort()
return
}
c.JSON(http.StatusOK, gin.H{"status": http.StatusOK, "result": feeds})
}
func (ctrl FeedCtrl) Delete(c *gin.Context) {
id, err := strconv.Atoi(c.Param("id"))
if err != nil {
c.JSON(http.StatusNotAcceptable, gin.H{
"error": ErrNotInt,
"status": http.StatusNotAcceptable,
})
c.Abort()
return
}
err = DeleteById(id)
sqlErr, isSqlErr := err.(sqlite3.Error)
if err != nil {
if isSqlErr {
c.JSON(http.StatusInternalServerError,
gin.H{
"error": sqlErr.Error(),
"status": http.StatusInternalServerError,
})
} else {
var status int
switch err {
case ErrDoesNotExist:
status = http.StatusNotFound
default:
status = http.StatusInternalServerError
}
c.JSON(status,
gin.H{"error": err.Error(), "status": status})
}
c.Abort()
return
}
c.JSON(http.StatusOK, gin.H{"status": http.StatusOK, "message": MsgOK})
}

@ -0,0 +1,262 @@
package feeds
import (
"git.sp4ke.com/sp4ke/hugobot/v3/db"
"git.sp4ke.com/sp4ke/hugobot/v3/types"
"errors"
"log"
"time"
sqlite3 "github.com/mattn/go-sqlite3"
)
//sqlite> SELECT feeds.name, url, feed_formats.name AS format_name from feeds JOIN feed_formats ON feeds.format = feed_formats.id;
//
var DB = db.DB
const (
DBFeedSchema = `CREATE TABLE IF NOT EXISTS feeds (
feed_id INTEGER PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
display_name TEXT DEFAULT '',
publications TEXT DEFAULT '',
section TEXT DEFAULT '',
categories TEXT DEFAULT '',
description TEXT DEFAULT '',
url TEXT NOT NULL,
export_posts INTEGER DEFAULT 0,
last_refresh timestamp DEFAULT -1,
created timestamp DEFAULT (strftime('%s')),
interval INTEGER DEFAULT 60,
format INTEGER NOT NULL DEFAULT 0,
serial_run INTEGER DEFAULT 0,
use_external_link INTEGER DEFAULT 0,
FOREIGN KEY (format) REFERENCES feed_formats(id)
)`
DBFeedFormatsSchema = `CREATE TABLE IF NOT EXISTS feed_formats (
id INTEGER PRIMARY KEY,
format_name TEXT NOT NULL UNIQUE
)`
)
const (
QDeleteFeedById = `DELETE FROM feeds WHERE feed_id = ?`
QGetFeed = `SELECT * FROM feeds WHERE feed_id = ?`
QGetFeedByName = `SELECT * FROM feeds WHERE name = ?`
QGetFeedByURL = `SELECT * FROM feeds WHERE url = ?`
QListFeeds = `SELECT
feeds.feed_id,
feeds.name,
feeds.display_name,
feeds.publications,
feeds.section,
feeds.categories,
feeds.description,
feeds.url,
feeds.last_refresh,
feeds.created,
feeds.format,
feeds.serial_run,
feeds.use_external_link,
feeds.interval,
feeds.export_posts,
feed_formats.format_name
FROM feeds
JOIN feed_formats ON feeds.format = feed_formats.id`
)
var (
ErrDoesNotExist = errors.New("does not exist")
ErrAlreadyExists = errors.New("already exists")
)
type FeedFormat int
// Feed Formats
const (
FormatRSS FeedFormat = iota
FormatHTML
FormatJSON
FormatTweet
FormatRFC
FormatGHRelease
)
var FeedFormats = map[FeedFormat]string{
FormatRSS: "RSS",
FormatHTML: "HTML",
FormatJSON: "JSON",
FormatTweet: "TWEET",
FormatRFC: "RFC",
FormatGHRelease: "GithubRelease",
}
type Feed struct {
FeedID int64 `json:"id" db:"feed_id"`
Name string `json:"name" db:"name"`
Section string `json:"section,omitempty"`
Categories types.StringList `json:"categories,omitempty"`
Description string `json:"description"`
Url string `json:"url"`
Format FeedFormat `json:"-"`
FormatString string `json:"format" db:"format_name"`
LastRefresh time.Time `db:"last_refresh" json:"last_refresh"` // timestamp time.Unix()
Created time.Time `json:"created"`
DisplayName string `db:"display_name"`
Publications string `json:"-"`
// This feed's posts should also be exported individually
ExportPosts bool `json:"export_posts" db:"export_posts"`
// Time in seconds between each polling job on the news feed
Interval float64 `json:"refresh_interval"`
Serial bool `json:"serial" db:"serial_run"` // Jobs for this feed should run in series
// Items which only contain summaries and redirect to external content
// like publications and newsletters
UseExternalLink bool `json:"use_external_link" db:"use_external_link"`
}
func (f *Feed) Write() error {
query := `INSERT INTO feeds
(name, section, categories, url, format)
VALUES(:name, :section, :categories, :url, :format)`
_, err := DB.Handle.NamedExec(query, f)
sqlErr, isSqlErr := err.(sqlite3.Error)
if isSqlErr && sqlErr.Code == sqlite3.ErrConstraint {
return ErrAlreadyExists
}
if err != nil {
return err
}
return nil
}
func (f *Feed) UpdateRefreshTime(time time.Time) error {
f.LastRefresh = time
query := `UPDATE feeds SET last_refresh = ? WHERE feed_id = ?`
_, err := DB.Handle.Exec(query, f.LastRefresh, f.FeedID)
if err != nil {
return err
}
return nil
}
func GetById(id int64) (*Feed, error) {
var feed Feed
err := DB.Handle.Get(&feed, QGetFeed, id)
if err != nil {
return nil, err
}
feed.FormatString = FeedFormats[feed.Format]
return &feed, nil
}
func GetByName(name string) (*Feed, error) {
var feed Feed
err := DB.Handle.Get(&feed, QGetFeedByName, name)
if err != nil {
return nil, err
}
feed.FormatString = FeedFormats[feed.Format]
return &feed, nil
}
func GetByURL(url string) (*Feed, error) {
var feed Feed
err := DB.Handle.Get(&feed, QGetFeedByURL, url)
if err != nil {
return nil, err
}
feed.FormatString = FeedFormats[feed.Format]
return &feed, nil
}
func ListFeeds() ([]*Feed, error) {
var feeds []*Feed
err := DB.Handle.Select(&feeds, QListFeeds)
if err != nil {
return nil, err
}
return feeds, nil
}
func DeleteById(id int) error {
// If id does not exists return warning
var feedToDelete Feed
err := DB.Handle.Get(&feedToDelete, QGetFeed, id)
if err != nil {
return ErrDoesNotExist
}
_, err = DB.Handle.Exec(QDeleteFeedById, id)
if err != nil {
return err
}
return nil
}
// Returns true if the feed should be refreshed
func (feed *Feed) ShouldRefresh() (float64, bool) {
lastRefresh := feed.LastRefresh
delta := time.Since(lastRefresh).Seconds() // Delta since last refresh
//log.Printf("%s delta %f >= interval %f ?", feed.Name, delta, feed.Interval)
//
//
//log.Printf("refresh %s in %.0f seconds", feed.Name, feed.Interval-delta)
return delta, delta >= feed.Interval
}
func init() {
_, err := DB.Handle.Exec(DBFeedSchema)
if err != nil {
log.Fatal(err)
}
_, err = DB.Handle.Exec(DBFeedFormatsSchema)
if err != nil {
log.Fatal(err)
}
// Populate feed formats
query := `INSERT INTO feed_formats (id, format_name) VALUES (?, ?)`
for k, v := range FeedFormats {
_, err := DB.Handle.Exec(query, k, v)
if err != nil {
sqlErr, ok := err.(sqlite3.Error)
if ok && sqlErr.ExtendedCode == sqlite3.ErrConstraintUnique {
log.Panic(err)
}
if !ok {
log.Panic(err)
}
}
}
}

@ -0,0 +1,9 @@
package feeds
type FeedForm struct {
Name string `form:"name" binding:"required"`
Url string `form:"url" binding:"required"`
Format FeedFormat `form:"format"`
Categories []string `form:"categories"`
Section string `form:"section"`
}

@ -0,0 +1,26 @@
package filters
import (
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"log"
)
type FilterHook func(feed feeds.Feed, post *posts.Post) error
var (
PostFilters []FilterHook
)
func RegisterPostFilterHook(hook FilterHook) {
PostFilters = append(PostFilters, hook)
}
func RunPostFilterHooks(feed feeds.Feed, post *posts.Post) {
for _, h := range PostFilters {
err := h(feed, post)
if err != nil {
log.Fatal(err)
}
}
}

@ -0,0 +1,65 @@
package filters
import (
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"strings"
"github.com/PuerkitoBio/goquery"
)
const (
PreviewTextSel = ".mcnPreviewText"
)
var (
RemoveSelectors = []string{"style", ".footerContainer", "#awesomewrap", "#templatePreheader", "img", "head"}
)
func mailChimpFilter(feed feeds.Feed, post *posts.Post) error {
// Nothing to do for empty content
if post.PostDescription == post.Content &&
post.Content == "" {
return nil
}
// Same content in both
if post.PostDescription == post.Content {
post.PostDescription = ""
}
doc, err := goquery.NewDocumentFromReader(strings.NewReader(post.Content))
if err != nil {
return err
}
sel := doc.Find(strings.Join(RemoveSelectors, ","))
sel.Remove()
post.Content, err = doc.Html()
return err
}
func extractPreviewText(feed feeds.Feed, post *posts.Post) error {
// Ignore filled description
if post.PostDescription != "" {
return nil
}
doc, err := goquery.NewDocumentFromReader(strings.NewReader(post.Content))
if err != nil {
return err
}
sel := doc.Find(PreviewTextSel)
post.PostDescription = sel.Text()
return nil
}
func init() {
RegisterPostFilterHook(mailChimpFilter)
RegisterPostFilterHook(extractPreviewText)
}

@ -0,0 +1,21 @@
package github
import (
"git.sp4ke.com/sp4ke/hugobot/v3/config"
"context"
"github.com/google/go-github/github"
"golang.org/x/oauth2"
)
func Auth(ctx context.Context) *github.Client {
ts := oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: config.C.GithubAccessToken},
)
tc := oauth2.NewClient(ctx, ts)
client := github.NewClient(tc)
return client
}

@ -0,0 +1,75 @@
package github
import (
"log"
"net/url"
"strings"
"time"
"github.com/google/go-github/github"
)
func ParseOwnerRepo(Url string) (owner, repo string) {
url, err := url.Parse(Url)
if err != nil {
panic(err)
}
parts := strings.Split(strings.TrimPrefix(url.Path, "/"), "/")
owner = parts[0]
repo = parts[1]
return owner, repo
}
func RespMiddleware(resp *github.Response) {
if resp == nil {
return
}
log.Printf("Rate remaining: %d/%d (reset: %s)", resp.Rate.Remaining, resp.Rate.Limit, resp.Rate.Reset)
err := github.CheckResponse(resp.Response)
if _, ok := err.(*github.RateLimitError); ok {
log.Printf("HIT RATE LIMIT !!!")
}
}
type Release struct {
Version string `json:"version"`
ID int64 `json:"ID"`
Date time.Time `json:"date"`
Description string `json:"description"`
URL string `json:"url"`
ShortURL string `json:"short_url"`
HtmlURL string `json:"html_url"`
Name string `json:"name"`
TagName string `json:"tag_name"`
}
type PR struct {
Title string `json:"title"`
URL string `json:"url"`
HtmlURL string `json:"html_url"`
Number int `json:"number"`
Date time.Time `json:"date"`
IssueURL string `json:"issue_url"`
Body string `json:"body"`
}
type Issue struct {
Title string `json:"title"`
URL string `json:"url"`
ShortURL string `json:"short_url"`
Number int `json:"number"`
State string `json:"state"`
Updated time.Time `json:"updated"`
Created time.Time `json:"created"`
Comments int `json:"comments"`
HtmlURL string `json:"html_url"`
Open bool `json:"opened_issue"`
IsPR bool `json:"is_pr"`
Merged bool `json:"merged"` // only for PRs
MergedAt time.Time `json:"merged_at"`
IsUpdate bool `json:"is_update"`
}

@ -0,0 +1,35 @@
module git.sp4ke.com/sp4ke/hugobot/v3
go 1.12
require (
git.sp4ke.com/sp4ke/gum.git v0.0.0-20190304130815-31be968b7b17
github.com/BurntSushi/toml v0.3.1
github.com/PuerkitoBio/goquery v1.5.0
github.com/beeker1121/goque v2.0.1+incompatible
github.com/fatih/structs v1.1.0
github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3 // indirect
github.com/gin-gonic/gin v1.3.0
github.com/gobuffalo/flect v0.1.1
github.com/gofrs/uuid v3.2.0+incompatible
github.com/google/go-github v17.0.0+incompatible
github.com/google/go-querystring v1.0.0 // indirect
github.com/jmoiron/sqlx v1.2.0
github.com/json-iterator/go v1.1.6 // indirect
github.com/mattn/go-isatty v0.0.7 // indirect
github.com/mattn/go-sqlite3 v1.10.0
github.com/mmcdole/gofeed v1.0.0-beta2
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.1 // indirect
github.com/onsi/ginkgo v1.8.0 // indirect
github.com/skip2/go-qrcode v0.0.0-20190110000554-dc11ecdae0a9
github.com/syndtr/goleveldb v1.0.0
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf
github.com/ugorji/go v1.1.4 // indirect
github.com/urfave/cli v1.20.0
golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a
gopkg.in/go-playground/assert.v1 v1.2.1 // indirect
gopkg.in/go-playground/validator.v8 v8.18.2 // indirect
gopkg.in/urfave/cli.v1 v1.20.0
)

110
go.sum

@ -0,0 +1,110 @@
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
git.sp4ke.com/sp4ke/gum.git v0.0.0-20190304130815-31be968b7b17 h1:rpKl7uNionkSHPXPP/kmdrMYCyqJ7VE4dta8VCJGyf8=
git.sp4ke.com/sp4ke/gum.git v0.0.0-20190304130815-31be968b7b17/go.mod h1:Kp1clomdxSOGlb1aSkYTd7tOoJq4ww/oVTkcjfkpPBo=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/PuerkitoBio/goquery v1.5.0 h1:uGvmFXOA73IKluu/F84Xd1tt/z07GYm8X49XKHP7EJk=
github.com/PuerkitoBio/goquery v1.5.0/go.mod h1:qD2PgZ9lccMbQlc7eEOjaeRlFQON7xY8kdmcsrnKqMg=
github.com/andybalholm/cascadia v1.0.0 h1:hOCXnnZ5A+3eVDX8pvgl4kofXv2ELss0bKcqRySc45o=
github.com/andybalholm/cascadia v1.0.0/go.mod h1:GsXiBklL0woXo1j/WYWtSYYC4ouU9PqHO0sqidkEA4Y=
github.com/beeker1121/goque v2.0.1+incompatible h1:5nJHPMqQLxUvGFc8m/NW2QzxKyc0zICmqs/JUsmEjwE=
github.com/beeker1121/goque v2.0.1+incompatible/go.mod h1:L6dOWBhDOnxUVQsb0wkLve0VCnt2xJW/MI8pdRX4ANw=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3 h1:t8FVkw33L+wilf2QiWkw0UV77qRpcH/JHPKGpKa2E8g=
github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s=
github.com/gin-gonic/gin v1.3.0 h1:kCmZyPklC0gVdL728E6Aj20uYBJV93nj/TkwBTKhFbs=
github.com/gin-gonic/gin v1.3.0/go.mod h1:7cKuhb5qV2ggCFctp2fJQ+ErvciLZrIeoOSOm6mUr7Y=
github.com/go-sql-driver/mysql v1.4.0 h1:7LxgVwFb2hIQtMm87NdgAVfXjnt4OePseqT1tKx+opk=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/gobuffalo/flect v0.1.1 h1:GTZJjJufv9FxgRs1+0Soo3wj+Md3kTUmTER/YE4uINA=
github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI=
github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE=
github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db h1:woRePGFeVFfLKN/pOkfl+p/TAqKOfFu+7KPlMVpok/w=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-github v17.0.0+incompatible h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY=
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk=
github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/jmoiron/sqlx v1.2.0 h1:41Ip0zITnmWNR/vHV+S4m+VoUivnWY5E4OJfLZjCJMA=
github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks=
github.com/json-iterator/go v1.1.6 h1:MrUvLMLTMxbqFJ9kzlvat/rYZqZnW3u4wkLzWTaFwKs=
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
github.com/lib/pq v1.0.0 h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.10.0 h1:jbhqpg7tQe4SupckyijYiy0mJJ/pRyHvXf7JdWK860o=
github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mmcdole/gofeed v1.0.0-beta2 h1:CjQ0ADhAwNSb08zknAkGOEYqr8zfZKfrzgk9BxpWP2E=
github.com/mmcdole/gofeed v1.0.0-beta2/go.mod h1:/BF9JneEL2/flujm8XHoxUcghdTV6vvb3xx/vKyChFU=
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf h1:sWGE2v+hO0Nd4yFU/S/mDBM5plIU8v/Qhfz41hkDIAI=
github.com/mmcdole/goxpp v0.0.0-20181012175147-0068e33feabf/go.mod h1:pasqhqstspkosTneA62Nc+2p9SOBBYAPbnmRRWPQ0V8=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.8.0 h1:VkHVNpR4iVnU8XQR6DBm8BqYjN7CRzw+xKUbVVbbW9w=
github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v1.4.3 h1:RE1xgDvH7imwFD45h+u2SgIfERHlS2yNG4DObb5BSKU=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/skip2/go-qrcode v0.0.0-20190110000554-dc11ecdae0a9 h1:lpEzuenPuO1XNTeikEmvqYFcU37GVLl8SRNblzyvGBE=
github.com/skip2/go-qrcode v0.0.0-20190110000554-dc11ecdae0a9/go.mod h1:PLPIyL7ikehBD1OAjmKKiOEhbvWyHGaNDjquXMcYABo=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE=
github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ=
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf h1:Z2X3Os7oRzpdJ75iPqWZc0HeJWFYNCvKsfpQwFpRNTA=
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf/go.mod h1:M8agBzgqHIhgj7wEn9/0hJUZcrvt9VY+Ln+S1I5Mha0=
github.com/ugorji/go v1.1.4 h1:j4s+tAvLfL3bZyefP2SEWmhBzmuIlH/eqNuPdFPgngw=
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
github.com/urfave/cli v1.20.0 h1:fDqGv3UG/4jbVl/QkFwEdddtEDjh/5Ov6X+0B/3bPaw=
github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e h1:bRhVy7zSSasaqNksaRZiA5EEI+Ei4I1nO5Jh72wfHlg=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a h1:tImsplftrFpALCYumobsd0K86vlAs/eXGFms2txfJfA=
golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4 h1:YUO/7uOKsKeq9UokNS62b8FYywz3ker1l1vDZRCRefw=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
google.golang.org/appengine v1.4.0 h1:/wp5JvzpHIxhs/dumFmF7BXTf3Z+dd4uXta4kVyO508=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/go-playground/assert.v1 v1.2.1 h1:xoYuJVE7KT85PYWrN730RguIQO0ePzVRfFMXadIrXTM=
gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE=
gopkg.in/go-playground/validator.v8 v8.18.2 h1:lFB4DoMU6B626w8ny76MV7VX6W2VHct2GVOI3xgiMrQ=
gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/urfave/cli.v1 v1.20.0 h1:NdAVW6RYxDif9DhDHaAortIu956m2c0v+09AZBPTbE0=
gopkg.in/urfave/cli.v1 v1.20.0/go.mod h1:vuBzUtMdQeixQj8LVd+/98pzhxNGQoyuPBlsXHOQNO0=
gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

@ -0,0 +1,36 @@
package handlers
import (
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"log"
"time"
)
type JobHandler interface {
// Main handling function
Handle(feeds.Feed) error
}
type FormatHandler interface {
FetchSince(url string, time time.Time) ([]*posts.Post, error)
JobHandler // Also implements a job handler
}
func GetFormatHandler(feed feeds.Feed) FormatHandler {
var handler FormatHandler
switch feed.Format {
case feeds.FormatRSS:
handler = NewRSSHandler()
case feeds.FormatRFC:
handler = NewRFCHandler()
case feeds.FormatGHRelease:
handler = NewGHReleaseHandler()
default:
log.Printf("WARNING: No format handler for %s", feed.FormatString)
}
return handler
}

@ -0,0 +1,200 @@
package handlers
import (
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/github"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"git.sp4ke.com/sp4ke/hugobot/v3/utils"
"context"
"errors"
"log"
"time"
githubApi "github.com/google/go-github/github"
)
const (
NoReleaseForProjectTreshold = 10
)
type GHRelease struct {
ProjectID int64 `json:"project_id"`
ReleaseID int64 `json:"release_id"`
TagID string `json:"tag_id"`
Name string `json:"name"`
IsTagOnly bool `json:"is_tag_only"`
Date time.Time `json:"commit_date"`
TarBall string `json:"tar_ball"`
Link string `json:"link"`
Owner string `json:"owner"`
Repo string `json:"repo"`
}
type GHReleaseHandler struct {
ctx context.Context
ghClient *githubApi.Client
}
func (handler GHReleaseHandler) Handle(feed feeds.Feed) error {
posts, err := handler.FetchSince(feed.Url, feed.LastRefresh)
if err != nil {
return err
}
if posts == nil {
log.Printf("No new posts in feed <%s>", feed.Name)
}
for _, p := range posts {
var err error
isTagOnly, ok := p.JsonData["is_tag_only"].(bool)
if !ok {
return errors.New("could not convert is_tag_only to bool")
}
if isTagOnly {
err = p.Write(feed.FeedID)
} else {
err = p.WriteWithShortId(feed.FeedID, p.JsonData["release_id"])
}
if err != nil {
return err
}
}
return nil
}
func (handler GHReleaseHandler) FetchSince(url string,
after time.Time) ([]*posts.Post, error) {
var results []*posts.Post
log.Printf("Fetching GH release %s since %v", url, after)
owner, repo := github.ParseOwnerRepo(url)
project, resp, err := handler.ghClient.Repositories.Get(handler.ctx, owner, repo)
if resp == nil {
return nil, errors.New("No response")
}
github.RespMiddleware(resp)
if err != nil {
return nil, err
}
// Handle releases first, if project has no releases use tags instead
listReleasesOptions := &githubApi.ListOptions{
PerPage: 100,
}
releases, resp, err := handler.ghClient.Repositories.ListReleases(
handler.ctx, owner, repo, listReleasesOptions,
)
github.RespMiddleware(resp)
if err != nil {
return nil, err
}
// If no releases use tags
if len(releases) <= 0 {
log.Println("no releases, using tags")
var allTags []*githubApi.RepositoryTag
// Handle tags first
listTagOptions := &githubApi.ListOptions{PerPage: 100}
for {
tags, resp, err := handler.ghClient.Repositories.ListTags(
handler.ctx, owner, repo, listTagOptions,
)
if err != nil {
return nil, err
}
allTags = append(allTags, tags...)
if resp.NextPage == 0 {
break
}
listTagOptions.Page = resp.NextPage
}
for _, tag := range allTags {
//var release *githubApi.RepositoryRelease
//
commit, resp, err := handler.ghClient.Repositories.GetCommit(
handler.ctx, owner, repo, tag.GetCommit().GetSHA(),
)
github.RespMiddleware(resp)
if err != nil {
return nil, err
}
if commit.GetCommit().GetCommitter().GetDate().Before(after) {
break
}
ghRelease := GHRelease{
ProjectID: project.GetID(),
TagID: tag.GetName(),
IsTagOnly: true,
Date: commit.GetCommit().GetCommitter().GetDate(),
TarBall: tag.GetTarballURL(),
Owner: owner,
Repo: repo,
}
post := &posts.Post{}
post.Title = tag.GetName()
post.Link = ghRelease.TarBall
post.Published = ghRelease.Date
post.Updated = post.Published
post.JsonData = utils.StructToJsonMap(ghRelease)
post.Author = commit.GetAuthor().GetName()
results = append(results, post)
}
} else {
for _, release := range releases {
ghRelease := GHRelease{
ProjectID: project.GetID(),
ReleaseID: release.GetID(),
Name: release.GetName(),
TagID: release.GetTagName(),
IsTagOnly: false,
Date: release.GetCreatedAt().Time,
TarBall: release.GetTarballURL(),
Owner: owner,
Repo: repo,
}
post := &posts.Post{}
post.Title = release.GetName()
post.Link = release.GetHTMLURL()
post.Published = release.GetPublishedAt().Time
post.Updated = release.GetPublishedAt().Time
post.JsonData = utils.StructToJsonMap(ghRelease)
post.Author = release.GetAuthor().GetName()
post.Content = release.GetBody()
results = append(results, post)
}
}
return results, nil
}
func NewGHReleaseHandler() FormatHandler {
ctxb := context.Background()
client := github.Auth(ctxb)
return GHReleaseHandler{
ctx: ctxb,
ghClient: client,
}
}

@ -0,0 +1,226 @@
package handlers
import (
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/github"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"git.sp4ke.com/sp4ke/hugobot/v3/static"
"git.sp4ke.com/sp4ke/hugobot/v3/utils"
"context"
"fmt"
"log"
"regexp"
"strconv"
"strings"
"time"
githubApi "github.com/google/go-github/github"
)
var (
ReBIP = regexp.MustCompile(`bips?[\s-]*(?P<bipId>[0-9]+)`)
ReBOLT = regexp.MustCompile(`bolt?[\s-]*(?P<boltId>[0-9]+)`)
ReSLIP = regexp.MustCompile(`slips?[\s-]*(?P<slipId>[0-9]+)`)
)
const (
BIPLink = "https://github.com/bitcoin/bips/blob/master/bip-%04d.mediawiki"
SLIPLink = "https://github.com/satoshilabs/slips/blob/master/slip-%04d.md"
)
const (
BOLT = 73738971
BIP = 14531737
SLIP = 50844973
)
var RFCTypes = map[int64]string{
BIP: "bip",
BOLT: "bolt",
SLIP: "slip",
}
type RFCUpdate struct {
RFCID int64 `json:"rfcid"`
RFCType string `json:"rfc_type"` //bip bolt slip
RFCNumber int `json:"rfc_number"` // (bip/bolt/slip id)
Issue *github.Issue `json:"issue"`
RFCLink string `json:"rfc_link"`
}
type RFCHandler struct {
ctx context.Context
ghClient *githubApi.Client
}
func (handler RFCHandler) Handle(feed feeds.Feed) error {
posts, err := handler.FetchSince(feed.Url, feed.LastRefresh)
if err != nil {
return err
}
if posts == nil {
log.Printf("No new posts in feed <%s>", feed.Name)
}
for _, p := range posts {
// Since RFCs are based on github issues, we use their id as unique
// id in the local sqlite db
err := p.WriteWithShortId(feed.FeedID, p.JsonData["rfcid"])
if err != nil {
return err
}
}
return nil
}
func (handler RFCHandler) FetchSince(url string, after time.Time) ([]*posts.Post, error) {
var results []*posts.Post
log.Printf("Fetching RFC %s since %v", url, after)
owner, repo := github.ParseOwnerRepo(url)
project, resp, err := handler.ghClient.Repositories.Get(handler.ctx, owner, repo)
github.RespMiddleware(resp)
if err != nil {
return nil, err
}
//All Issues
var allIssues []*githubApi.Issue
listIssueOptions := &githubApi.IssueListByRepoOptions{
Since: after,
State: "all",
ListOptions: githubApi.ListOptions{PerPage: 100},
}
for {
issues, resp, err := handler.ghClient.Issues.ListByRepo(
handler.ctx, owner, repo, listIssueOptions)
if err != nil {
return nil, err
}
allIssues = append(allIssues, issues...)
if resp.NextPage == 0 {
break
}
listIssueOptions.Page = resp.NextPage
}
for iIndex, issue := range allIssues {
var pr *githubApi.PullRequest
// base rfc object
rfc := RFCUpdate{
RFCID: issue.GetID(),
RFCType: RFCTypes[*project.ID],
Issue: &github.Issue{
Title: issue.GetTitle(),
URL: issue.GetURL(),
Number: issue.GetNumber(),
State: issue.GetState(),
Updated: issue.GetUpdatedAt(),
Created: issue.GetCreatedAt(),
Comments: issue.GetComments(),
HtmlURL: issue.GetHTMLURL(),
},
}
if issue.IsPullRequest() {
log.Printf("parsing %s. Progress %d/%d\n", url, iIndex+1, len(allIssues))
pr, resp, err = handler.ghClient.PullRequests.Get(
handler.ctx, owner, repo, issue.GetNumber(),
)
//github.RespMiddleware(resp)
if err != nil {
return nil, err
}
rfc.Issue.IsPR = issue.IsPullRequest()
rfc.Issue.Merged = *pr.Merged
if rfc.Issue.Merged {
rfc.Issue.MergedAt = *pr.MergedAt
}
}
// If is open and is not new (update) mark as update
if rfc.Issue.Created != rfc.Issue.Updated &&
!rfc.Issue.Merged &&
rfc.Issue.State == "open" {
rfc.Issue.IsUpdate = true
}
rfc.RFCNumber, err = GetRFCNumber(issue.GetTitle())
if err != nil {
return nil, err
}
if rfc.RFCNumber != -1 {
switch rfc.RFCType {
case RFCTypes[BIP]:
rfc.RFCLink = fmt.Sprintf(BIPLink, rfc.RFCNumber)
case RFCTypes[SLIP]:
rfc.RFCLink = fmt.Sprintf(SLIPLink, rfc.RFCNumber)
case RFCTypes[BOLT]:
rfc.RFCLink = static.BoltMap[rfc.RFCNumber]
}
}
post := &posts.Post{}
post.Title = rfc.Issue.Title
post.Link = rfc.Issue.URL
post.Published = rfc.Issue.Created
post.Updated = rfc.Issue.Updated
post.JsonData = utils.StructToJsonMap(rfc)
results = append(results, post)
}
return results, nil
}
func NewRFCHandler() FormatHandler {
ctxb := context.Background()
client := github.Auth(ctxb)
return RFCHandler{
ctx: ctxb,
ghClient: client,
}
}
func GetRFCNumber(title string) (int, error) {
// Detect BIP
for _, re := range []*regexp.Regexp{ReBIP, ReBOLT, ReSLIP} {
matches := re.FindStringSubmatch(strings.ToLower(title))
if matches != nil {
res, err := strconv.Atoi(matches[1])
if err != nil {
return -1, err
}
return res, nil
}
}
return -1, nil
}

@ -0,0 +1,118 @@
package handlers
import (
"git.sp4ke.com/sp4ke/hugobot/v3/export"
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"log"
"strings"
"time"
"github.com/fatih/structs"
"github.com/mmcdole/gofeed"
)
type RSSHandler struct {
rssFeed *gofeed.Feed
}
func (handler RSSHandler) Handle(feed feeds.Feed) error {
posts, err := handler.FetchSince(feed.Url, feed.LastRefresh)
if err != nil {
return err
}
if posts == nil {
log.Printf("No new posts in feed <%s>", feed.Name)
}
// Write posts to DB
for _, p := range posts {
err := p.Write(feed.FeedID)
if err != nil {
return err
}
}
return nil
}
func (handler RSSHandler) FetchSince(url string, after time.Time) ([]*posts.Post, error) {
var err error
var fetchedPosts []*posts.Post
log.Printf("Fetching RSS since %v", after)
fp := gofeed.NewParser()
handler.rssFeed, err = fp.ParseURL(url)
if err != nil {
return nil, err
}
for _, item := range handler.rssFeed.Items {
if item.PublishedParsed.After(after) {
//log.Println(item.Title)
post := &posts.Post{}
if item.Author != nil {
post.Author = item.Author.Name
}
post.Title = item.Title
// If content is in description
// store them in reverse in the post
if len(item.Content) == 0 &&
len(item.Description) > 0 {
post.Content = item.Description
// If content is same as description
} else if item.Content == item.Description {
post.Content = item.Content
post.PostDescription = ""
} else {
post.Content = item.Content
post.PostDescription = item.Description
}
post.Link = item.Link
if item.UpdatedParsed != nil {
post.Updated = *item.UpdatedParsed
} else {
post.Updated = *item.PublishedParsed
}
if item.PublishedParsed != nil {
post.Published = *item.PublishedParsed
}
post.Tags = strings.Join(item.Categories, ",")
item.Content = ""
item.Description = ""
post.JsonData = structs.Map(item)
fetchedPosts = append(fetchedPosts, post)
}
}
return fetchedPosts, nil
}
func NewRSSHandler() FormatHandler {
return RSSHandler{}
}
func RSSExportMapper(exp export.Map, feed feeds.Feed, post posts.Post) error {
if feed.Format == feeds.FormatRSS {
exp["updated"] = post.Updated
}
return nil
}
func init() {
export.RegisterPostMapper(RSSExportMapper)
}

@ -0,0 +1,328 @@
package main
import (
"git.sp4ke.com/sp4ke/hugobot/v3/export"
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/handlers"
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"git.sp4ke.com/sp4ke/hugobot/v3/utils"
"bytes"
"encoding/gob"
"encoding/json"
"fmt"
"log"
"time"
"github.com/beeker1121/goque"
"github.com/gofrs/uuid"
"github.com/syndtr/goleveldb/leveldb"
)
type JobStatus int
type JobType int
const (
JobStatusNew JobStatus = iota
JobStatusQueued
JobStatusDone
JobStatusFailed
)
const (
JobTypeFetch JobType = iota
JobTypeExport
)
var (
JobTypeMap = map[JobType]string{
JobTypeFetch: "fetch",
JobTypeExport: "export",
}
JobStatusMap = map[JobStatus]string{
JobStatusNew: "new",
JobStatusQueued: "queued",
JobStatusDone: "done",
JobStatusFailed: "failed",
}
)
func (js JobStatus) String() string {
return JobStatusMap[js]
}
type Prioritizer interface {
// Return job priority
GetPriority() uint8
}
// Represents a Job to be done on a feed
// It could be any of: Poll, Fetch, Store
// Should implement Poller
type Job struct {
ID uuid.UUID
Feed *feeds.Feed
Status JobStatus
Data []*posts.Post
Priority uint8
JobType JobType
Serial bool // Should be run in a serial manner
Err error
Prioritizer
}
type Handler interface {
Handle()
}
// GoRoutine method
func (job *Job) Handle() {
var err error
if job.JobType == JobTypeFetch {
handler := handlers.GetFormatHandler(*job.Feed)
err = handler.Handle(*job.Feed)
} else if job.JobType == JobTypeExport {
handler := export.NewHugoExporter()
err = handler.Handle(*job.Feed)
}
if err != nil {
job.Failed(err)
return
}
//log.Println("Done for job type ", job.JobType)
job.Done()
}
func (job *Job) Failed(err error) {
errr := job.Feed.UpdateRefreshTime(time.Now())
if errr != nil {
log.Fatal(errr)
}
job.Status = JobStatusFailed
job.Err = err
NotifyScheduler(job)
}
func (job *Job) Done() {
//TODO: only update refresh time after actual fetching
//
err := job.Feed.UpdateRefreshTime(time.Now())
if err != nil {
log.Fatal(err)
}
job.Status = JobStatusDone
NotifyScheduler(job)
}
func (job *Job) GetPriority() uint8 {
return job.Priority
}
func (job *Job) String() string {
exp := map[string]interface{}{
"jobId": job.ID,
"feed": job.Feed.Name,
"priority": job.Priority,
"jobType": JobTypeMap[job.JobType],
"serial": job.Serial,
"err": job.Err,
}
b, err := json.MarshalIndent(exp, "", " ")
if err != nil {
log.Printf("error printing job %s\n", err)
return ""
}
return fmt.Sprintf(string(b))
}
// Decode object from []byte
func JobFromBytes(value []byte) (*Job, error) {
buffer := bytes.NewBuffer(value)
dec := gob.NewDecoder(buffer)
j := &Job{}
err := dec.Decode(j)
if err != nil {
return nil, err
}
return j, nil
}
// helper function for jobs that accepts any
// value type, which is then encoded into a byte slice using
// encoding/gob.
func (job *Job) ToBytes() ([]byte, error) {
var buffer bytes.Buffer
enc := gob.NewEncoder(&buffer)
if err := enc.Encode(job); err != nil {
return nil, err
}
return buffer.Bytes(), nil
}
func NewFetchJob(feed *feeds.Feed,
priority uint8) (*Job, error) {
uuid, err := uuid.NewV4()
if err != nil {
return nil, err
}
job := &Job{
ID: uuid,
Feed: feed,
Status: JobStatusNew,
JobType: JobTypeFetch,
Priority: priority,
Serial: feed.Serial,
}
return job, nil
}
func NewExportJob(feed *feeds.Feed,
priority uint8) (*Job, error) {
uuid, err := uuid.NewV4()
if err != nil {
return nil, err
}
job := &Job{
ID: uuid,
Feed: feed,
Status: JobStatusNew,
Priority: priority,
JobType: JobTypeExport,
}
return job, nil
}
type Queuer interface {
Enqueue(job *Job) (*Job, error)
Dequeue() (*Job, error)
Close() error
Drop() error // Clsoe and delete all jobs
Length() uint64
//Peek() (*Job, error)
//PeekByID(id uint64) (*Job, error)
// Returns item located at given offset starting from head
// of queue without removing it
//PeekByOffset(offset uint64) (*Job, error)
}
// Represents the queue of fetching todo jobs
type JobPool struct {
// Actual jobs queue
Q *goque.PriorityQueue
// Handle queuing mechanics
Queuer
maxJobs int
feedJobMap *leveldb.DB
}
func (jp *JobPool) Close() error {
jp.Q.Close()
err := jp.feedJobMap.Close()
return err
}
func (jp *JobPool) Dequeue() (*Job, error) {
item, err := jp.Q.Dequeue()
if err != nil {
return nil, err
}
j := &Job{}
item.ToObject(j)
//TODO: This is done when the job is done
//feedId := utils.IntToBytes(j.Feed.ID)
//err = jp.feedJobMap.Delete(feedId, nil)
//if err != nil {
//return nil, err
//}
return j, nil
}
func (jp *JobPool) DeleteMarkedJob(job *Job) error {
var err error
feedId := utils.IntToBytes(job.Feed.FeedID)
err = jp.feedJobMap.Delete(feedId, nil)
return err
}
// Mark a job in feedJobMap to avoid duplicates
func (jp *JobPool) MarkUniqJob(job *Job) error {
// Mark the feed in the feedJobMap to avoid creating duplicates
feedId := utils.IntToBytes(job.Feed.FeedID)
jobData, err := job.ToBytes()
if err != nil {
return err
}
err = jp.feedJobMap.Put(feedId, jobData, nil)
if err != nil {
return err
}
return nil
}
func (jp *JobPool) Enqueue(job *Job) error {
// Update job status
job.Status = JobStatusQueued
// Enqueue the job in the jobpool
item, err := jp.Q.EnqueueObject(job.GetPriority(), job)
if err != nil {
return err
}
// Recode item to job
j := &Job{}
item.ToObject(j)
return nil
}
func (jp *JobPool) Drop() {
jp.Q.Drop()
}
func (jp *JobPool) Length() uint64 {
return jp.Q.Length()
}
func (jp *JobPool) Peek() (*Job, error) {
item, err := jp.Q.Peek()
if err != nil {
return nil, err
}
j := &Job{}
item.ToObject(j)
return j, err
}

@ -0,0 +1,29 @@
package logging
import (
"log"
"os"
)
const (
logFileName = ".log"
)
var (
logFile *os.File
)
func Close() error {
return logFile.Close()
}
func init() {
//var err error
//logFile, err = os.OpenFile(logFileName, os.O_RDWR|os.O_CREATE|os.O_APPEND, 0600)
//if err != nil {
//log.Fatal(err)
//}
//log.SetOutput(io.MultiWriter(logFile, os.Stdout))
log.SetOutput(os.Stdout)
}

@ -0,0 +1,96 @@
package main
import (
"log"
"os"
"git.sp4ke.com/sp4ke/hugobot/v3/config"
"git.sp4ke.com/sp4ke/hugobot/v3/logging"
"github.com/gobuffalo/flect"
altsrc "github.com/urfave/cli/altsrc"
cli "gopkg.in/urfave/cli.v1"
)
func tearDown() {
DB.Handle.Close()
logging.Close()
}
func main() {
defer tearDown()
app := cli.NewApp()
app.Name = "hugobot"
app.Version = "1.0"
flags := []cli.Flag{
altsrc.NewStringFlag(cli.StringFlag{
Name: "website-path",
Usage: "`PATH` to hugo project",
EnvVar: "WEBSITE_PATH",
}),
altsrc.NewStringFlag(cli.StringFlag{
Name: "github-access-token",
Usage: "Github API Access Token",
EnvVar: "GH_ACCESS_TOKEN",
}),
altsrc.NewStringFlag(cli.StringFlag{
Name: "rel-bitcoin-addr-content-path",
Usage: "path to bitcoin data relative to hugo path",
}),
altsrc.NewIntFlag(cli.IntFlag{
Name: "api-port",
Usage: "default bot api port",
}),
cli.StringFlag{
Name: "config",
Value: "config.toml",
Usage: "TOML config `FILE` path",
},
}
app.Before = func(c *cli.Context) error {
err := altsrc.InitInputSourceWithContext(flags,
altsrc.NewTomlSourceFromFlagFunc("config"))(c)
if err != nil {
return err
}
for _, conf := range c.GlobalFlagNames() {
// find corresponding flag
for _, flag := range flags {
if flag.GetName() == conf {
switch flag.(type) {
case cli.StringFlag:
err = config.RegisterConf(flect.Pascalize(conf), c.GlobalString(conf))
case *altsrc.StringFlag:
err = config.RegisterConf(flect.Pascalize(conf), c.GlobalString(conf))
case cli.IntFlag:
err = config.RegisterConf(flect.Pascalize(conf), c.GlobalInt(conf))
case *altsrc.IntFlag:
err = config.RegisterConf(flect.Pascalize(conf), c.GlobalInt(conf))
}
}
}
}
return err
}
app.Flags = flags
app.Commands = []cli.Command{
startServerCmd,
exportCmdGrp,
feedsCmdGroup,
}
if err := app.Run(os.Args); err != nil {
log.Fatal(err)
}
}

@ -0,0 +1,36 @@
package main
import (
"git.sp4ke.com/sp4ke/hugobot/v3/handlers"
"fmt"
"os"
"testing"
"time"
)
const (
rssTestFeed = "https://bitcointechweekly.com/index.xml"
rssTestFeed2 = "https://bitcoinops.org/feed.xml"
)
func TestFetch(t *testing.T) {
handler := handlers.NewRSSHandler()
when, _ := time.Parse("Jan 2006", "Jun 2018")
res, err := handler.FetchSince(rssTestFeed2, when)
if err != nil {
t.Error(err)
}
for i, post := range res {
f, err := os.Create(fmt.Sprintf("%d.html", i))
if err != nil {
t.Error(err)
}
defer f.Close()
_, err = f.WriteString(post.Content)
if err != nil {
t.Error(err)
}
}
}

@ -0,0 +1,176 @@
package posts
import (
"git.sp4ke.com/sp4ke/hugobot/v3/db"
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/types"
"git.sp4ke.com/sp4ke/hugobot/v3/utils"
"errors"
"fmt"
"log"
"strconv"
"time"
sqlite3 "github.com/mattn/go-sqlite3"
)
const (
DBPostsSchema = `CREATE TABLE IF NOT EXISTS posts (
post_id INTEGER PRIMARY KEY,
feed_id INTEGER NOT NULL,
title TEXT DEFAULT '',
description TEXT DEFAULT '',
link TEXT NOT NULL,
updated timestamp NOT NULL,
published timestamp NOT NULL,
author TEXT DEFAULT '',
content TEXT DEFAULT '',
tags TEXT DEFAULT '',
json_data BLOB DEFAULT '',
short_id TEXT UNIQUE,
FOREIGN KEY (feed_id) REFERENCES feeds(feed_id)
)`
)
var (
ErrDoesNotExist = errors.New("does not exist")
ErrAlreadyExists = errors.New("already exists")
)
var DB = db.DB
type Post struct {
PostID int64 `josn:"id" db:"post_id"`
Title string `json:"title"`
PostDescription string `json:"description" db:"post_description"`
Link string `json:"link"`
Updated time.Time `json:"updated"`
Published time.Time `json:"published"`
Author string `json:"author"`
Content string `json:"content"`
Tags string `json:"tags"`
ShortID string `json:"short_id" db:"short_id"`
JsonData types.JsonMap `json:"data" db:"json_data"`
feeds.Feed
}
// Writes with provided short id
func (post *Post) WriteWithShortId(feedId int64, shortId interface{}) error {
var shortid string
switch v := shortId.(type) {
case int:
shortid = strconv.Itoa(v)
case int64:
shortid = strconv.Itoa(int(v))
case string:
shortid = v
default:
return fmt.Errorf("Cannot convert %v to string", shortId)
}
return write(post, feedId, shortid)
}
// Auto generates shortId
func (post *Post) Write(feedId int64) error {
shortId, err := utils.GetSIDGenerator().Generate()
if err != nil {
return err
}
return write(post, feedId, shortId)
}
func write(post *Post, feedId int64, shortId string) error {
const query = `INSERT OR REPLACE INTO posts (
feed_id,
title,
description,
link,
updated,
published,
author,
content,
json_data,
short_id,
tags
)
VALUES(
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
)
`
_, err := DB.Handle.Exec(query,
feedId,
post.Title,
post.PostDescription,
post.Link,
post.Updated,
post.Published,
post.Author,
post.Content,
post.JsonData,
shortId,
post.Tags,
)
sqlErr, isSqlErr := err.(sqlite3.Error)
if isSqlErr && sqlErr.Code == sqlite3.ErrConstraint {
return fmt.Errorf("%+v --- %s ", sqlErr, post.Title)
}
if err != nil {
return err
}
return nil
}
func ListPosts() ([]Post, error) {
const query = `SELECT * FROM posts JOIN feeds ON posts.feed_id = feeds.feed_id`
var posts []Post
err := DB.Handle.Select(&posts, query)
if err != nil {
return nil, err
}
return posts, nil
}
func GetPostsByFeedId(feedId int64) ([]*Post, error) {
const query = `SELECT
post_id,
feed_id,
title,
description AS post_description,
link,
updated,
published,
author,
content,
tags,
json_data,
short_id
FROM posts WHERE feed_id = ?`
var posts []*Post
err := DB.Handle.Select(&posts, query, feedId)
if err != nil {
return nil, err
}
return posts, nil
}
func init() {
_, err := DB.Handle.Exec(DBPostsSchema)
if err != nil {
log.Fatal(err)
}
}

@ -0,0 +1,28 @@
package main
import (
"git.sp4ke.com/sp4ke/hugobot/v3/posts"
"log"
"os"
"testing"
)
func TestGetPosts(t *testing.T) {
posts, err := posts.ListPosts()
if err != nil {
t.Error(err)
}
log.Println(posts)
for _, p := range posts {
t.Logf("%s <---- %s", p.Title, p.Feed.Name)
}
}
func TestMain(m *testing.M) {
code := m.Run()
defer DB.Handle.Close()
os.Exit(code)
}

@ -0,0 +1,379 @@
package main
import (
"git.sp4ke.com/sp4ke/hugobot/v3/export"
"git.sp4ke.com/sp4ke/hugobot/v3/feeds"
"git.sp4ke.com/sp4ke/hugobot/v3/static"
"git.sp4ke.com/sp4ke/hugobot/v3/utils"
"errors"
"log"
"math"
"path"
"time"
gum "git.sp4ke.com/sp4ke/gum.git"
"github.com/syndtr/goleveldb/leveldb"
"github.com/beeker1121/goque"
)
const (
StaticDataExportInterval = 3600 * time.Second
JobSchedulerInterval = 60 * time.Second
QDataDir = "./.data"
MaxQueueJobs = 100
MaxSerialJob = 100
// Used in JobPool to avoid duplicate jobs for the same feed
MapFeedJobFile = "map_feed_job"
)
var (
SourcePriorityRange = Range{Min: 0, Max: 336 * time.Hour.Seconds()} // 2 weeks
TargetPriorityRange = Range{Min: 0, Max: math.MaxUint8}
SchedulerUpdates = make(chan *Job)
)
type Range struct {
Min float64
Max float64
}
func (r Range) Val() float64 {
return r.Max - r.Min
}
// JobPool schedluer. Priodically schedule new jobs
type Scheduler struct {
jobs *JobPool
jobUpdates chan *Job
serialJobs chan *Job
}
func serialRun(inputJobs <-chan *Job) {
for {
j := <-inputJobs
log.Printf("serial run %v", j)
j.Handle()
}
}
func (s *Scheduler) Run(m gum.UnitManager) {
go serialRun(s.serialJobs) // These jobs run in series
jobTimer := time.NewTicker(JobSchedulerInterval)
staticExportTimer := time.NewTicker(StaticDataExportInterval)
for {
select {
case <-jobTimer.C:
log.Println("job heartbeat !")
j, _ := s.jobs.Peek()
if j != nil {
log.Printf("peeking job: %s\n", j)
}
// If max pool jobs reached clean the pool
if s.jobs.Length() >= MaxQueueJobs {
s.jobs.Drop()
s.panicAndShutdown(m)
return
}
s.updateJobs()
// Spawn job works
s.dispatchJobs()
case job := <-s.jobUpdates:
log.Printf("job update recieved: %s", JobStatusMap[job.Status])
switch job.Status {
case JobStatusDone:
log.Println("Job is done, removing from feedJobMap. New jobs for this feed can be added now.")
// Remove job from feedJobMap
err := s.jobs.DeleteMarkedJob(job)
if err != nil {
log.Fatal(err)
}
// Create export job for successful fetch jobs
if job.JobType == JobTypeFetch {
log.Println("Creating an export job")
expJob, err := NewExportJob(job.Feed, 0)
if err != nil {
log.Fatal(err)
}
//log.Printf("export job: %+v", expJob)
log.Printf("export job: %s\n", expJob)
err = s.jobs.Enqueue(expJob)
if err != nil {
log.Fatal(err)
}
}
case JobStatusFailed:
//TODO: Store all failed jobs somewhere
err := s.jobs.DeleteMarkedJob(job)
if err != nil {
log.Fatal(err)
}
log.Printf("Job %s failed with error: %s", job.Feed.Name, job.Err)
}
case <-staticExportTimer.C:
log.Println("-------- export tick --------")
log.Println("Exporting static data ...")
err := static.HugoExportData()
if err != nil {
log.Fatal(err)
}
log.Println("Exporting weeks ...")
err = export.ExportWeeks()
if err != nil {
log.Fatal(err)
}
log.Println("Exporting btc ...")
err = export.ExportBTCAddresses()
if err != nil {
log.Fatal(err)
}
case <-m.ShouldStop():
s.Shutdown()
m.Done()
}
}
}
func (s *Scheduler) panicAndShutdown(m gum.UnitManager) {
//err := s.jobs.Drop()
//if err != nil {
//log.Fatal(err)
//}
//TODO
m.Panic(errors.New("max job queue exceeded"))
s.Shutdown()
m.Done()
}
func (s *Scheduler) Shutdown() {
// Flush ongoing jobs back to job queue
iter := s.jobs.feedJobMap.NewIterator(nil, nil)
var markedDelete [][]byte
for iter.Next() {
key := iter.Key()
log.Printf("Putting job %s back to queue", key)
value := iter.Value()
//log.Println("value ", value)
job, err := JobFromBytes(value)
if err != nil {
log.Fatal(err)
}
err = s.jobs.Enqueue(job)
if err != nil {
log.Fatal(err)
}
markedDelete = append(markedDelete, key)
}
iter.Release()
err := iter.Error()
if err != nil {
log.Fatal(err)
}
for _, k := range markedDelete {
err := s.jobs.feedJobMap.Delete(k, nil)
if err != nil {
log.Fatal(err)
}
}
// Close jobpool queue
err = s.jobs.Close()
if err != nil {
panic(err)
}
}
// Dispatchs all jobs in the job pool to task workers
func (s *Scheduler) dispatchJobs() {
//log.Println("dispatching ...")
jobsLength := int(s.jobs.Length())
for i := 0; i < jobsLength; i++ {
log.Printf("Dequeing %d/%d", i, s.jobs.Length())
j, err := s.jobs.Dequeue()
if err != nil {
log.Fatal(err)
}
log.Println("Dispatching ", j)
if j.Serial {
s.serialJobs <- j
} else {
go j.Handle()
}
}
}
// Gets all available feeds and creates
// a new Job if time.Now() - feed.last_refresh >= feed.interval
func (s *Scheduler) updateJobs() {
//
// Get all feeds
//
// For each feed compare Now() vs last refresh
// If now() - last_refresh >= refresh interval -> create job
feeds, err := feeds.ListFeeds()
if err != nil {
log.Fatal(err)
}
//log.Printf("updating jobs for %d feeds\n", len(feeds))
// Check all jobs
for _, f := range feeds {
//log.Printf("checking feed %s: %s\n", f.Name, f.Url)
//log.Printf("Seconds since last refresh %f", time.Since(f.LastRefresh.Time).Seconds())
//log.Printf("Refresh interval %f", f.Interval)
if delta, ok := f.ShouldRefresh(); ok {
log.Printf("Refreshing %s -- %f seconds since last.", f.Name, delta)
// If there is already a job with this feed id skip and return an empty job
feedId := utils.IntToBytes(f.FeedID)
_, err := s.jobs.feedJobMap.Get(feedId, nil)
if err == nil {
log.Println("Job already exists for feed")
} else if err != leveldb.ErrNotFound {
log.Fatal(err)
} else {
// Priority is based on the delta time since last refresh
// bigger delta == higher priority
// Convert priority to smaller range priority
// We use original range of `0 - 1 month` in seconds
// Target range is uint8 `0 - 255`
prio := MapPriority(delta)
job, err := NewFetchJob(f, prio)
if err != nil {
panic(err)
}
err = s.jobs.Enqueue(job)
if err != nil {
panic(err)
}
err = s.jobs.MarkUniqJob(job)
if err != nil {
panic(err)
}
}
}
}
jLen := s.jobs.Length()
if jLen > 0 {
log.Printf("jobs length = %+v\n", jLen)
}
}
func NewScheduler() gum.WorkUnit {
// Priority queue for jobs
q, err := goque.OpenPriorityQueue(QDataDir, goque.DESC)
if err != nil {
panic(err)
}
// map[FEED_ID][JOB]
// Used to avoid duplicate jobs in the queue for the same feed
feedJobMapDB, err := leveldb.OpenFile(path.Join(QDataDir, MapFeedJobFile), nil)
if err != nil {
panic(err)
}
jobPool := &JobPool{
Q: q,
maxJobs: MaxQueueJobs,
feedJobMap: feedJobMapDB,
}
// Restore all ongoing jobs in feedJobMap to the pool
iter := feedJobMapDB.NewIterator(nil, nil)
var markedDelete [][]byte
for iter.Next() {
key := iter.Key()
value := iter.Value()
job, err := JobFromBytes(value)
if err != nil {
log.Fatal(err)
}
log.Printf("Restoring uncomplete job %s back to job queue", job.ID)
err = jobPool.Enqueue(job)
if err != nil {
log.Fatal(err)
}
markedDelete = append(markedDelete, key)
}
iter.Release()
err = iter.Error()
if err != nil {
log.Fatal(err)
}
serialJobs := make(chan *Job, MaxSerialJob)
return &Scheduler{
jobs: jobPool,
jobUpdates: SchedulerUpdates,
serialJobs: serialJobs,
}
}
func NotifyScheduler(job *Job) {
SchedulerUpdates <- job
}
func MapPriority(val float64) uint8 {
newVal := (((val - SourcePriorityRange.Min) * TargetPriorityRange.Val()) /
SourcePriorityRange.Val()) + TargetPriorityRange.Min
return uint8(newVal)
}

@ -0,0 +1,62 @@
package main
import (
"git.sp4ke.com/sp4ke/hugobot/v3/db"
"log"
"os"
"syscall"
"time"
gum "git.sp4ke.com/sp4ke/gum.git"
)
var (
DB = db.DB
quit chan bool
)
func shutdown(c <-chan os.Signal) {
ticker := time.NewTicker(JobSchedulerInterval)
for {
select {
case <-ticker.C:
log.Println("shutdown goroutine")
default:
for sig := range c {
switch sig {
case os.Interrupt:
log.Println("shutting down ... ")
DB.Handle.Close()
quit <- true
}
}
}
}
}
func server() {
manager := gum.NewManager()
manager.ShutdownOn(syscall.SIGINT)
manager.ShutdownOn(syscall.SIGTERM)
// Jobs scheduler
scheduler := NewScheduler()
manager.AddUnit(scheduler)
// API
api := NewApi()
manager.AddUnit(api)
go manager.Run()
<-manager.Quit
}

@ -0,0 +1,29 @@
package static
var BoltMap = map[int]string{
0: "https://github.com/lightningnetwork/lightning-rfc/blob/master/00-introduction.md",
1: "https://github.com/lightningnetwork/lightning-rfc/blob/master/01-messaging.md",
2: "https://github.com/lightningnetwork/lightning-rfc/blob/master/02-peer-protocol.md",
3: "https://github.com/lightningnetwork/lightning-rfc/blob/master/03-transactions.md",
4: "https://github.com/lightningnetwork/lightning-rfc/blob/master/04-onion-routing.md",
5: "https://github.com/lightningnetwork/lightning-rfc/blob/master/05-onchain.md",
7: "https://github.com/lightningnetwork/lightning-rfc/blob/master/07-routing-gossip.md",
8: "https://github.com/lightningnetwork/lightning-rfc/blob/master/08-transport.md",
9: "https://github.com/lightningnetwork/lightning-rfc/blob/master/09-features.md",
10: "https://github.com/lightningnetwork/lightning-rfc/blob/master/10-dns-bootstrap.md",
11: "https://github.com/lightningnetwork/lightning-rfc/blob/master/11-payment-encoding.md",
}
var BoltNames = map[int]string{
0: "introduction",
1: "messaging",
2: "peer protocol",
3: "transactions",
4: "onion routing",
5: "onchain",
7: "routing gossip",
8: "transport",
9: "features",
10: "dns bootstrap",
11: "payment encoding",
}

@ -0,0 +1,32 @@
package static
import (
"git.sp4ke.com/sp4ke/hugobot/v3/config"
"encoding/json"
"os"
"path/filepath"
)
var data = map[string]interface{}{
"bolts": map[string]interface{}{
"names": BoltNames,
},
}
// Json Export Static Data
func HugoExportData() error {
dirPath := filepath.Join(config.HugoData())
for k, v := range data {
filePath := filepath.Join(dirPath, k+".json")
outputFile, err := os.Create(filePath)
defer outputFile.Close()
if err != nil {
return err
}
jsonEnc := json.NewEncoder(outputFile)
jsonEnc.Encode(v)
}
return nil
}

@ -0,0 +1,38 @@
package types
import (
"bytes"
"database/sql/driver"
"encoding/json"
"errors"
)
type JsonMap map[string]interface{}
func (m *JsonMap) Scan(src interface{}) error {
val, ok := src.([]byte)
if !ok {
return errors.New("not []byte")
}
jsonDecoder := json.NewDecoder(bytes.NewBuffer(val))
err := jsonDecoder.Decode(m)
if err != nil {
return err
}
return nil
}
func (m JsonMap) Value() (driver.Value, error) {
val, err := json.Marshal(m)
if err != nil {
return driver.Value(nil), err
}
return driver.Value(val), nil
}

@ -0,0 +1,28 @@
package types
import (
"database/sql/driver"
"errors"
"strings"
)
type StringList []string
func (sl *StringList) Scan(src interface{}) error {
switch v := src.(type) {
case string:
*sl = strings.Split(v, ",")
case []byte:
*sl = strings.Split(string(v), ",")
default:
return errors.New("Could not scan to []string")
}
return nil
}
func (sl StringList) Value() (driver.Value, error) {
result := strings.Join(sl, ",")
return driver.Value(result), nil
}

@ -0,0 +1,9 @@
package utils
import "encoding/binary"
func IntToBytes(x int64) []byte {
buf := make([]byte, 4)
binary.PutVarint(buf, x)
return buf
}

@ -0,0 +1,18 @@
package utils
import (
"git.sp4ke.com/sp4ke/hugobot/v3/types"
"github.com/fatih/structs"
)
func StructToJsonMap(in interface{}) types.JsonMap {
out := make(types.JsonMap)
s := structs.New(in)
for _, f := range s.Fields() {
out[f.Tag("json")] = f.Value()
}
return out
}

@ -0,0 +1,16 @@
package utils
import (
"os"
"path/filepath"
)
const (
MkdirMode = 0770
)
func Mkdir(path ...string) error {
joined := filepath.Join(path...)
return os.MkdirAll(joined, MkdirMode)
}

@ -0,0 +1,14 @@
package utils
import (
"encoding/json"
"fmt"
)
func PrettyPrint(v interface{}) (err error) {
b, err := json.MarshalIndent(v, "", " ")
if err == nil {
fmt.Println(string(b))
}
return
}

@ -0,0 +1,25 @@
package utils
import (
"github.com/teris-io/shortid"
)
const (
Seed = 322124
)
var (
sid *shortid.Shortid
)
func GetSIDGenerator() *shortid.Shortid {
return sid
}
func init() {
var err error
sid, err = shortid.New(1, shortid.DefaultABC, Seed)
if err != nil {
panic(err)
}
}

@ -0,0 +1,49 @@
package utils
import (
"log"
"time"
)
func NextThursday(t time.Time) time.Time {
weekday := t.Weekday()
t, err := time.Parse("2006-01-02", t.Format("2006-01-02"))
if err != nil {
log.Println(err)
}
nextThursday := t
if weekday < 4 {
nextThursday = t.AddDate(0, 0, int(4-weekday))
} else if weekday > 4 {
nextThursday = t.AddDate(0, 0, int((7-weekday)+4))
}
return nextThursday
}
// Returns all thursdays starting from now up to the input date
func GetAllThursdays(from time.Time, to time.Time) []time.Time {
var dates []time.Time
//log.Printf("Parsing from %s", from)
firstWeek := NextThursday(from)
lastWeek := NextThursday(to)
//log.Printf("First thursday is %s", firstWeek)
cursorWeek := firstWeek
for cursorWeek.Before(lastWeek) {
dates = append(dates, cursorWeek)
cursorWeek = cursorWeek.AddDate(0, 0, 7)
}
if !cursorWeek.Before(lastWeek) &&
cursorWeek.Weekday() == time.Thursday {
dates = append(dates, cursorWeek)
}
return dates
}

@ -0,0 +1,26 @@
package utils
import (
"testing"
"time"
)
func TestGetAllThursdays(t *testing.T) {
tt, err := time.Parse("2006-01-02", "2017-12-07")
if err != nil {
t.Error(err)
}
dates := GetAllThursdays(tt, time.Now())
if dates[0] != NextThursday(tt) {
t.Error("starting date")
}
t.Log(NextThursday(time.Now()))
t.Log(dates[len(dates)-1])
if dates[len(dates)-1] != NextThursday(time.Now()) {
t.Error("end date")
}
}
Loading…
Cancel
Save