mirror of
https://github.com/mickael-kerjean/filestash.git
synced 2025-11-02 11:57:04 +08:00
chore (plg_search_sqlitefts): remove unused code
This commit is contained in:
@ -45,7 +45,7 @@ var SEARCH_EXCLUSION = func() []string {
|
||||
f.Id = "folder_exclusion"
|
||||
f.Name = "folder_exclusion"
|
||||
f.Type = "text"
|
||||
f.Description = "Exclude folders during the exploration phase"
|
||||
f.Description = "Exclude some specific folder from the crawl / index"
|
||||
f.Placeholder = "Default: node_modules,bower_components,.cache,.npm,.git"
|
||||
f.Default = "node_modules,bower_components,.cache,.npm,.git"
|
||||
return f
|
||||
@ -138,7 +138,7 @@ var INDEXING_EXT = func() string {
|
||||
f.Id = "indexer_ext"
|
||||
f.Name = "indexer_ext"
|
||||
f.Type = "text"
|
||||
f.Description = "File extension we want to see indexed"
|
||||
f.Description = "Extensions that will be handled by the full text search engine"
|
||||
f.Placeholder = "Default: org,txt,docx,pdf,md,form"
|
||||
f.Default = "org,txt,docx,pdf,md,form"
|
||||
return f
|
||||
|
||||
@ -26,7 +26,6 @@ type Crawler struct {
|
||||
Backend IBackend
|
||||
State indexer.Index
|
||||
mu sync.Mutex
|
||||
lastHash string
|
||||
}
|
||||
|
||||
func NewCrawler(id string, b IBackend) (Crawler, error) {
|
||||
|
||||
@ -2,10 +2,7 @@ package plg_search_sqlitefts
|
||||
|
||||
import (
|
||||
"container/heap"
|
||||
"encoding/base64"
|
||||
"hash/fnv"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@ -30,36 +27,7 @@ func (this *Crawler) Discover(tx indexer.Manager) bool {
|
||||
this.CurrentPhase = ""
|
||||
return true
|
||||
}
|
||||
if len(files) == 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
// We don't want our indexer to go wild and diverge over time. As such we need to detect those edge cases: aka
|
||||
// recursive folder structure. Our detection is relying on a Hash of []os.FileInfo
|
||||
hashFiles := func() string {
|
||||
var step int = len(files) / 50
|
||||
if step == 0 {
|
||||
step = 1
|
||||
}
|
||||
hasher := fnv.New32()
|
||||
hasher.Write([]byte(strconv.Itoa(len(files))))
|
||||
for i := 0; i < len(files); i = i + step {
|
||||
hasher.Write([]byte(files[i].Name()))
|
||||
}
|
||||
return base64.StdEncoding.EncodeToString(hasher.Sum(nil))
|
||||
}()
|
||||
if hashFiles == this.lastHash {
|
||||
return true
|
||||
}
|
||||
this.lastHash = ""
|
||||
for i := 0; i < this.FoldersUnknown.Len(); i++ {
|
||||
if this.FoldersUnknown[i].Hash == hashFiles && filepath.Base(doc.Path) != filepath.Base(this.FoldersUnknown[i].Path) {
|
||||
this.lastHash = hashFiles
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Insert the newly found data within our index
|
||||
excluded := SEARCH_EXCLUSION()
|
||||
for i := range files {
|
||||
f := files[i]
|
||||
@ -105,7 +73,6 @@ func (this *Crawler) Discover(tx indexer.Manager) bool {
|
||||
Path: p,
|
||||
Size: f.Size(),
|
||||
ModTime: f.ModTime(),
|
||||
Hash: hashFiles,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
|
||||
Reference in New Issue
Block a user