gosuki/firefox.go

221 lines
4.8 KiB
Go
Raw Normal View History

2018-06-08 16:27:33 +00:00
package main
import (
2018-06-14 14:42:54 +00:00
"path"
2018-10-25 16:09:03 +00:00
"time"
)
2018-06-08 16:27:33 +00:00
var Firefox = BrowserPaths{
"places.sqlite",
"/home/spike/.mozilla/firefox/p1rrgord.default/",
}
const (
MozPlacesRootID = 1
MozPlacesTagsRootID = 4
MozPlacesMobileRootID = 6
)
2018-06-08 16:27:33 +00:00
type FFBrowser struct {
BaseBrowser //embedding
2018-10-28 19:19:12 +00:00
places *DB
}
type FFTag struct {
id int
title string
2018-06-08 16:27:33 +00:00
}
func NewFFBrowser() IBrowser {
browser := &FFBrowser{}
browser.name = "firefox"
browser.bType = TFirefox
browser.baseDir = Firefox.BookmarkDir
browser.bkFile = Firefox.BookmarkFile
browser.Stats = &ParserStats{}
2018-10-25 16:09:03 +00:00
browser.NodeTree = &Node{Name: "root", Parent: nil, Type: "root"}
2018-06-08 16:27:33 +00:00
2018-10-28 19:19:12 +00:00
// Initialize `places.sqlite`
bookmarkPath := path.Join(browser.baseDir, browser.bkFile)
browser.places = DB{}.New("Places", bookmarkPath)
browser.places.InitRO()
2018-06-08 16:27:33 +00:00
// Across jobs buffer
browser.InitBuffer()
browser.SetupWatcher()
/*
*Run debouncer to avoid duplicate running of jobs
*when a batch of events is received
*/
//browser.eventsChan = make(chan fsnotify.Event, EventsChanLen)
//go debouncer(3000*time.Millisecond, browser.eventsChan, browser)
2018-06-08 16:27:33 +00:00
return browser
}
2018-10-28 19:19:12 +00:00
func (bw *FFBrowser) Shutdown() {
log.Debugf("<%s> shutting down ... ", bw.name)
err := bw.BaseBrowser.Close()
if err != nil {
log.Critical(err)
}
err = bw.places.Close()
if err != nil {
log.Critical(err)
}
}
2018-06-08 16:27:33 +00:00
func (bw *FFBrowser) Watch() bool {
2018-10-28 19:19:12 +00:00
log.Debugf("<%s> TODO ... ", bw.name)
//if !bw.isWatching {
//go WatcherThread(bw)
//bw.isWatching = true
//return true
//}
2018-06-08 16:27:33 +00:00
//return false
2018-06-08 16:27:33 +00:00
return false
}
func (bw *FFBrowser) Load() {
bw.BaseBrowser.Load()
bw.Run()
}
func getFFBookmarks(bw *FFBrowser) {
QGetBookmarks := `WITH bookmarks AS
(SELECT moz_places.url AS url,
moz_places.description as desc,
moz_places.title as urlTitle,
moz_bookmarks.parent AS tagId
FROM moz_places LEFT OUTER JOIN moz_bookmarks
ON moz_places.id = moz_bookmarks.fk
WHERE moz_bookmarks.parent
IN (SELECT id FROM moz_bookmarks WHERE parent = ? ))
SELECT url, IFNULL(urlTitle, ''), IFNULL(desc,''),
tagId, moz_bookmarks.title AS tagTitle
FROM bookmarks LEFT OUTER JOIN moz_bookmarks
ON tagId = moz_bookmarks.id
ORDER BY url`
//QGetTags := "SELECT id,title from moz_bookmarks WHERE parent = %d"
2018-10-28 19:19:12 +00:00
rows, err := bw.places.Handle.Query(QGetBookmarks, MozPlacesTagsRootID)
if err != nil {
log.Error(err)
}
tagMap := make(map[int]*Node)
2018-10-26 01:04:26 +00:00
urlMap := make(map[string]*Node)
// Rebuild node tree
rootNode := bw.NodeTree
2018-10-26 01:04:26 +00:00
/*
*This pass is used only for fetching bookmarks from firefox.
*Checking against the URLIndex should not be done here
*/
for rows.Next() {
var url, title, tagTitle, desc string
var tagId int
err = rows.Scan(&url, &title, &desc, &tagId, &tagTitle)
2018-10-26 01:04:26 +00:00
//log.Debugf("%s|%s|%s|%d|%s", url, title, desc, tagId, tagTitle)
2018-10-28 19:19:12 +00:00
if err != nil {
log.Error(err)
}
/*
* If this is the first time we see this tag
* add it to the tagMap and create its node
*/
tagNode, tagNodeExists := tagMap[tagId]
if !tagNodeExists {
// Add the tag as a node
tagNode = new(Node)
tagNode.Type = "tag"
tagNode.Name = tagTitle
tagNode.Parent = rootNode
rootNode.Children = append(rootNode.Children, tagNode)
tagMap[tagId] = tagNode
bw.Stats.currentNodeCount++
}
// Add the url to the tag
2018-10-26 01:04:26 +00:00
urlNode, urlNodeExists := urlMap[url]
if !urlNodeExists {
urlNode = new(Node)
urlNode.Type = "url"
urlNode.URL = url
urlNode.Name = title
urlNode.Desc = desc
urlMap[url] = urlNode
}
2018-10-26 01:04:26 +00:00
// Add tag to urlnode tags
urlNode.Tags = append(urlNode.Tags, tagNode.Name)
2018-10-26 01:04:26 +00:00
// Set tag as parent to urlnode
urlNode.Parent = tagMap[tagId]
2018-10-26 01:04:26 +00:00
// Add urlnode as child to tag node
tagMap[tagId].Children = append(tagMap[tagId].Children, urlNode)
bw.Stats.currentUrlCount++
bw.Stats.currentNodeCount++
}
2018-10-26 01:04:26 +00:00
/*
*Build tags for each url then check against URLIndex
*for changes
*/
// Check if url already in index TODO: should be done in new pass
//iVal, found := bw.URLIndex.Get(urlNode.URL)
/*
* The fields where tags may change are hashed together
* to detect changes in futre parses
* To handle tag changes we need to get all parent nodes
* (tags) for this url then hash their concatenation
*/
//nameHash := xxhash.ChecksumString64(urlNode.Name)
}
2018-06-08 16:27:33 +00:00
func (bw *FFBrowser) Run() {
log.Debugf("<%s> start bookmark parsing", bw.name)
2018-06-08 16:27:33 +00:00
// TODO: Handle folders
2018-06-14 14:42:54 +00:00
// Parse bookmarks to a flat tree (for compatibility with tree system)
2018-10-25 16:09:03 +00:00
start := time.Now()
getFFBookmarks(bw)
2018-10-26 01:04:26 +00:00
bw.Stats.lastParseTime = time.Since(start)
2018-06-14 14:42:54 +00:00
// Finished parsing
2018-10-25 16:19:15 +00:00
//go PrintTree(bw.NodeTree) // debugging
log.Debugf("<%s> parsed %d bookmarks and %d nodes", bw.name, bw.Stats.currentUrlCount, bw.Stats.currentNodeCount)
2018-10-25 16:09:03 +00:00
log.Debugf("<%s> parsed tree in %s", bw.name, bw.Stats.lastParseTime)
bw.ResetStats()
2018-06-14 14:42:54 +00:00
2018-10-26 01:04:26 +00:00
syncTreeToBuffer(bw.NodeTree, bw.BufferDB)
// Implement incremental sync by doing INSERTs
bw.BufferDB.SyncTo(CacheDB)
2018-06-08 16:27:33 +00:00
}