2018-06-08 16:27:33 +00:00
|
|
|
package main
|
|
|
|
|
2018-06-14 00:30:18 +00:00
|
|
|
import (
|
2018-06-14 14:42:54 +00:00
|
|
|
"path"
|
2018-10-25 16:09:03 +00:00
|
|
|
"time"
|
2018-06-14 00:30:18 +00:00
|
|
|
)
|
|
|
|
|
2018-06-08 16:27:33 +00:00
|
|
|
var Firefox = BrowserPaths{
|
|
|
|
"places.sqlite",
|
|
|
|
"/home/spike/.mozilla/firefox/p1rrgord.default/",
|
|
|
|
}
|
|
|
|
|
2018-10-23 20:14:54 +00:00
|
|
|
const (
|
|
|
|
MozPlacesRootID = 1
|
|
|
|
MozPlacesTagsRootID = 4
|
|
|
|
MozPlacesMobileRootID = 6
|
|
|
|
)
|
|
|
|
|
2018-06-08 16:27:33 +00:00
|
|
|
type FFBrowser struct {
|
|
|
|
BaseBrowser //embedding
|
2018-10-23 20:14:54 +00:00
|
|
|
_places *DB
|
|
|
|
}
|
|
|
|
|
|
|
|
type FFTag struct {
|
|
|
|
id int
|
|
|
|
title string
|
2018-06-08 16:27:33 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func NewFFBrowser() IBrowser {
|
|
|
|
browser := &FFBrowser{}
|
|
|
|
browser.name = "firefox"
|
|
|
|
browser.bType = TFirefox
|
|
|
|
browser.baseDir = Firefox.BookmarkDir
|
|
|
|
browser.bkFile = Firefox.BookmarkFile
|
|
|
|
browser.Stats = &ParserStats{}
|
2018-10-25 16:09:03 +00:00
|
|
|
browser.NodeTree = &Node{Name: "root", Parent: nil, Type: "root"}
|
2018-06-08 16:27:33 +00:00
|
|
|
|
|
|
|
// Across jobs buffer
|
|
|
|
browser.InitBuffer()
|
|
|
|
|
|
|
|
browser.SetupWatcher()
|
|
|
|
|
2018-10-23 20:14:54 +00:00
|
|
|
/*
|
|
|
|
*Run debouncer to avoid duplicate running of jobs
|
|
|
|
*when a batch of events is received
|
|
|
|
*/
|
|
|
|
|
|
|
|
//browser.eventsChan = make(chan fsnotify.Event, EventsChanLen)
|
|
|
|
//go debouncer(3000*time.Millisecond, browser.eventsChan, browser)
|
2018-06-14 00:30:18 +00:00
|
|
|
|
2018-06-08 16:27:33 +00:00
|
|
|
return browser
|
|
|
|
}
|
|
|
|
|
|
|
|
func (bw *FFBrowser) Watch() bool {
|
|
|
|
|
2018-10-23 20:14:54 +00:00
|
|
|
log.Debugf("<%s> NOT IMPLEMENTED! ", bw.name)
|
|
|
|
//if !bw.isWatching {
|
|
|
|
//go WatcherThread(bw)
|
|
|
|
//bw.isWatching = true
|
|
|
|
//return true
|
|
|
|
//}
|
2018-06-08 16:27:33 +00:00
|
|
|
|
2018-10-23 20:14:54 +00:00
|
|
|
//return false
|
2018-06-08 16:27:33 +00:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
func (bw *FFBrowser) Load() {
|
|
|
|
bw.BaseBrowser.Load()
|
|
|
|
bw.Run()
|
|
|
|
}
|
|
|
|
|
2018-10-24 16:16:42 +00:00
|
|
|
func getFFBookmarks(bw *FFBrowser) {
|
|
|
|
|
|
|
|
QGetBookmarks := `WITH bookmarks AS
|
|
|
|
|
|
|
|
(SELECT moz_places.url AS url,
|
|
|
|
moz_places.description as desc,
|
|
|
|
moz_places.title as urlTitle,
|
|
|
|
moz_bookmarks.parent AS tagId
|
|
|
|
FROM moz_places LEFT OUTER JOIN moz_bookmarks
|
|
|
|
ON moz_places.id = moz_bookmarks.fk
|
|
|
|
WHERE moz_bookmarks.parent
|
|
|
|
IN (SELECT id FROM moz_bookmarks WHERE parent = ? ))
|
|
|
|
|
|
|
|
SELECT url, IFNULL(urlTitle, ''), IFNULL(desc,''),
|
|
|
|
tagId, moz_bookmarks.title AS tagTitle
|
|
|
|
|
|
|
|
FROM bookmarks LEFT OUTER JOIN moz_bookmarks
|
|
|
|
ON tagId = moz_bookmarks.id
|
|
|
|
ORDER BY url`
|
|
|
|
|
|
|
|
//QGetTags := "SELECT id,title from moz_bookmarks WHERE parent = %d"
|
2018-10-23 20:14:54 +00:00
|
|
|
|
2018-10-24 16:16:42 +00:00
|
|
|
rows, err := bw._places.Handle.Query(QGetBookmarks, MozPlacesTagsRootID)
|
2018-10-23 20:14:54 +00:00
|
|
|
logPanic(err)
|
|
|
|
|
2018-10-24 16:16:42 +00:00
|
|
|
tagMap := make(map[int]*Node)
|
2018-10-26 01:04:26 +00:00
|
|
|
urlMap := make(map[string]*Node)
|
2018-10-24 16:16:42 +00:00
|
|
|
|
|
|
|
// Rebuild node tree
|
|
|
|
rootNode := bw.NodeTree
|
|
|
|
|
2018-10-26 01:04:26 +00:00
|
|
|
/*
|
|
|
|
*This pass is used only for fetching bookmarks from firefox.
|
|
|
|
*Checking against the URLIndex should not be done here
|
|
|
|
*/
|
2018-10-23 20:14:54 +00:00
|
|
|
for rows.Next() {
|
2018-10-24 16:16:42 +00:00
|
|
|
var url, title, tagTitle, desc string
|
|
|
|
var tagId int
|
|
|
|
err = rows.Scan(&url, &title, &desc, &tagId, &tagTitle)
|
2018-10-26 01:04:26 +00:00
|
|
|
//log.Debugf("%s|%s|%s|%d|%s", url, title, desc, tagId, tagTitle)
|
2018-10-23 20:14:54 +00:00
|
|
|
logPanic(err)
|
2018-10-24 16:16:42 +00:00
|
|
|
|
|
|
|
/*
|
|
|
|
* If this is the first time we see this tag
|
|
|
|
* add it to the tagMap and create its node
|
|
|
|
*/
|
|
|
|
tagNode, tagNodeExists := tagMap[tagId]
|
|
|
|
if !tagNodeExists {
|
|
|
|
// Add the tag as a node
|
|
|
|
tagNode = new(Node)
|
|
|
|
tagNode.Type = "tag"
|
|
|
|
tagNode.Name = tagTitle
|
|
|
|
tagNode.Parent = rootNode
|
|
|
|
rootNode.Children = append(rootNode.Children, tagNode)
|
|
|
|
tagMap[tagId] = tagNode
|
|
|
|
bw.Stats.currentNodeCount++
|
|
|
|
}
|
|
|
|
|
|
|
|
// Add the url to the tag
|
2018-10-26 01:04:26 +00:00
|
|
|
urlNode, urlNodeExists := urlMap[url]
|
|
|
|
if !urlNodeExists {
|
|
|
|
urlNode = new(Node)
|
|
|
|
urlNode.Type = "url"
|
|
|
|
urlNode.URL = url
|
|
|
|
urlNode.Name = title
|
|
|
|
urlNode.Desc = desc
|
|
|
|
urlMap[url] = urlNode
|
|
|
|
}
|
2018-10-24 16:16:42 +00:00
|
|
|
|
2018-10-26 01:04:26 +00:00
|
|
|
// Add tag to urlnode tags
|
|
|
|
urlNode.Tags = append(urlNode.Tags, tagNode.Name)
|
|
|
|
log.Debug(urlNode.Tags)
|
2018-10-24 16:16:42 +00:00
|
|
|
|
2018-10-26 01:04:26 +00:00
|
|
|
// Set tag as parent to urlnode
|
|
|
|
urlNode.Parent = tagMap[tagId]
|
2018-10-24 16:16:42 +00:00
|
|
|
|
2018-10-26 01:04:26 +00:00
|
|
|
// Add urlnode as child to tag node
|
|
|
|
tagMap[tagId].Children = append(tagMap[tagId].Children, urlNode)
|
2018-10-24 16:16:42 +00:00
|
|
|
|
|
|
|
bw.Stats.currentUrlCount++
|
|
|
|
bw.Stats.currentNodeCount++
|
2018-10-23 20:14:54 +00:00
|
|
|
}
|
2018-10-24 16:16:42 +00:00
|
|
|
|
2018-10-26 01:04:26 +00:00
|
|
|
/*
|
|
|
|
*Build tags for each url then check against URLIndex
|
|
|
|
*for changes
|
|
|
|
*/
|
|
|
|
|
|
|
|
// Check if url already in index TODO: should be done in new pass
|
|
|
|
//iVal, found := bw.URLIndex.Get(urlNode.URL)
|
|
|
|
|
|
|
|
/*
|
|
|
|
* The fields where tags may change are hashed together
|
|
|
|
* to detect changes in futre parses
|
|
|
|
* To handle tag changes we need to get all parent nodes
|
|
|
|
* (tags) for this url then hash their concatenation
|
|
|
|
*/
|
|
|
|
|
|
|
|
//nameHash := xxhash.ChecksumString64(urlNode.Name)
|
|
|
|
// TODO: No guarantee we finished gathering tags !!
|
2018-10-24 16:16:42 +00:00
|
|
|
|
2018-10-23 20:14:54 +00:00
|
|
|
}
|
|
|
|
|
2018-06-08 16:27:33 +00:00
|
|
|
func (bw *FFBrowser) Run() {
|
|
|
|
|
2018-10-23 20:14:54 +00:00
|
|
|
log.Debugf("<%s> start bookmark parsing", bw.name)
|
2018-06-08 16:27:33 +00:00
|
|
|
|
2018-10-23 20:14:54 +00:00
|
|
|
// TODO: Node tree is not used for now as the folder
|
|
|
|
// parsing is not implemented
|
2018-06-14 14:42:54 +00:00
|
|
|
// Rebuild node tree
|
2018-10-23 20:14:54 +00:00
|
|
|
// bw.NodeTree = &Node{Name: "root", Parent: nil}
|
2018-06-14 14:42:54 +00:00
|
|
|
|
|
|
|
// Open firefox sqlite db
|
|
|
|
bookmarkPath := path.Join(bw.baseDir, bw.bkFile)
|
|
|
|
placesDB := DB{}.New("Places", bookmarkPath)
|
|
|
|
placesDB.InitRO()
|
|
|
|
defer placesDB.Close()
|
|
|
|
|
2018-10-23 20:14:54 +00:00
|
|
|
bw._places = placesDB
|
2018-06-14 14:42:54 +00:00
|
|
|
|
2018-10-24 16:16:42 +00:00
|
|
|
// Parse bookmarks to a flat tree (for compatibility with tree system)
|
2018-10-25 16:09:03 +00:00
|
|
|
start := time.Now()
|
2018-10-24 16:16:42 +00:00
|
|
|
getFFBookmarks(bw)
|
2018-10-26 01:04:26 +00:00
|
|
|
bw.Stats.lastParseTime = time.Since(start)
|
2018-06-14 14:42:54 +00:00
|
|
|
|
2018-10-24 16:16:42 +00:00
|
|
|
// Finished parsing
|
2018-10-25 16:19:15 +00:00
|
|
|
//go PrintTree(bw.NodeTree) // debugging
|
2018-10-24 16:16:42 +00:00
|
|
|
log.Debugf("<%s> parsed %d bookmarks and %d nodes", bw.name, bw.Stats.currentUrlCount, bw.Stats.currentNodeCount)
|
2018-10-25 16:09:03 +00:00
|
|
|
log.Debugf("<%s> parsed tree in %s", bw.name, bw.Stats.lastParseTime)
|
|
|
|
|
2018-10-24 16:16:42 +00:00
|
|
|
bw.ResetStats()
|
2018-06-14 14:42:54 +00:00
|
|
|
|
2018-10-26 01:04:26 +00:00
|
|
|
syncTreeToBuffer(bw.NodeTree, bw.BufferDB)
|
|
|
|
|
|
|
|
// Implement incremental sync by doing INSERTs
|
|
|
|
bw.BufferDB.CopyTo(CacheDB)
|
2018-06-08 16:27:33 +00:00
|
|
|
}
|