A privacy-first, self-hosted, fully open source personal knowledge management software, written in typescript and golang. (PERSONAL FORK)
1// SiYuan - Refactor your thinking
2// Copyright (c) 2020-present, b3log.org
3//
4// This program is free software: you can redistribute it and/or modify
5// it under the terms of the GNU Affero General Public License as published by
6// the Free Software Foundation, either version 3 of the License, or
7// (at your option) any later version.
8//
9// This program is distributed in the hope that it will be useful,
10// but WITHOUT ANY WARRANTY; without even the implied warranty of
11// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12// GNU Affero General Public License for more details.
13//
14// You should have received a copy of the GNU Affero General Public License
15// along with this program. If not, see <https://www.gnu.org/licenses/>.
16
17package model
18
19import (
20 "encoding/json"
21 "errors"
22 "fmt"
23 "io/fs"
24 "math"
25 "os"
26 "path"
27 "path/filepath"
28 "sort"
29 "strconv"
30 "strings"
31 "time"
32
33 "github.com/88250/gulu"
34 "github.com/88250/lute"
35 "github.com/88250/lute/ast"
36 "github.com/88250/lute/parse"
37 "github.com/88250/lute/render"
38 "github.com/siyuan-note/dataparser"
39 "github.com/siyuan-note/eventbus"
40 "github.com/siyuan-note/filelock"
41 "github.com/siyuan-note/logging"
42 "github.com/siyuan-note/siyuan/kernel/cache"
43 "github.com/siyuan-note/siyuan/kernel/conf"
44 "github.com/siyuan-note/siyuan/kernel/search"
45 "github.com/siyuan-note/siyuan/kernel/sql"
46 "github.com/siyuan-note/siyuan/kernel/task"
47 "github.com/siyuan-note/siyuan/kernel/treenode"
48 "github.com/siyuan-note/siyuan/kernel/util"
49)
50
51var historyTicker = time.NewTicker(time.Minute * 10)
52
53func AutoGenerateFileHistory() {
54 ChangeHistoryTick(Conf.Editor.GenerateHistoryInterval)
55 for {
56 <-historyTicker.C
57 task.AppendTask(task.HistoryGenerateFile, generateFileHistory)
58 }
59}
60
61func generateFileHistory() {
62 defer logging.Recover()
63
64 if 1 > Conf.Editor.GenerateHistoryInterval {
65 return
66 }
67
68 FlushTxQueue()
69
70 // 生成文档历史
71 for _, box := range Conf.GetOpenedBoxes() {
72 box.generateDocHistory0()
73 }
74
75 // 生成资源文件历史
76 generateAssetsHistory()
77
78 historyDir := util.HistoryDir
79 clearOutdatedHistoryDir(historyDir)
80
81 // 以下部分是老版本的历史数据,不再保留
82 for _, box := range Conf.GetBoxes() {
83 historyDir = filepath.Join(util.DataDir, box.ID, ".siyuan", "history")
84 os.RemoveAll(historyDir)
85 }
86 historyDir = filepath.Join(util.DataDir, "assets", ".siyuan", "history")
87 os.RemoveAll(historyDir)
88 historyDir = filepath.Join(util.DataDir, ".siyuan", "history")
89 os.RemoveAll(historyDir)
90}
91
92func ChangeHistoryTick(minutes int) {
93 if 0 >= minutes {
94 minutes = 3600
95 }
96 historyTicker.Reset(time.Minute * time.Duration(minutes))
97}
98
99func ClearWorkspaceHistory() (err error) {
100 historyDir := util.HistoryDir
101 if gulu.File.IsDir(historyDir) {
102 if err = os.RemoveAll(historyDir); err != nil {
103 logging.LogErrorf("remove workspace history dir [%s] failed: %s", historyDir, err)
104 return
105 }
106 logging.LogInfof("removed workspace history dir [%s]", historyDir)
107 }
108
109 sql.InitHistoryDatabase(true)
110
111 // 以下部分是老版本的清理逻辑,暂时保留
112
113 notebooks, err := ListNotebooks()
114 if err != nil {
115 return
116 }
117
118 for _, notebook := range notebooks {
119 boxID := notebook.ID
120 historyDir := filepath.Join(util.DataDir, boxID, ".siyuan", "history")
121 if !gulu.File.IsDir(historyDir) {
122 continue
123 }
124
125 if err = os.RemoveAll(historyDir); err != nil {
126 logging.LogErrorf("remove notebook history dir [%s] failed: %s", historyDir, err)
127 return
128 }
129 logging.LogInfof("removed notebook history dir [%s]", historyDir)
130 }
131
132 historyDir = filepath.Join(util.DataDir, ".siyuan", "history")
133 if gulu.File.IsDir(historyDir) {
134 if err = os.RemoveAll(historyDir); err != nil {
135 logging.LogErrorf("remove data history dir [%s] failed: %s", historyDir, err)
136 return
137 }
138 logging.LogInfof("removed data history dir [%s]", historyDir)
139 }
140 historyDir = filepath.Join(util.DataDir, "assets", ".siyuan", "history")
141 if gulu.File.IsDir(historyDir) {
142 if err = os.RemoveAll(historyDir); err != nil {
143 logging.LogErrorf("remove assets history dir [%s] failed: %s", historyDir, err)
144 return
145 }
146 logging.LogInfof("removed assets history dir [%s]", historyDir)
147 }
148 return
149}
150
151func GetDocHistoryContent(historyPath, keyword string, highlight bool) (id, rootID, content string, isLargeDoc bool, err error) {
152 if !util.IsAbsPathInWorkspace(historyPath) {
153 msg := "Path [" + historyPath + "] is not in workspace"
154 logging.LogErrorf(msg)
155 err = errors.New(msg)
156 return
157 }
158
159 if !gulu.File.IsExist(historyPath) {
160 logging.LogWarnf("doc history [%s] not exist", historyPath)
161 return
162 }
163
164 data, err := filelock.ReadFile(historyPath)
165 if err != nil {
166 logging.LogErrorf("read file [%s] failed: %s", historyPath, err)
167 return
168 }
169 isLargeDoc = 1024*1024*1 <= len(data)
170
171 luteEngine := NewLute()
172 historyTree, err := dataparser.ParseJSONWithoutFix(data, luteEngine.ParseOptions)
173 if err != nil {
174 logging.LogErrorf("parse tree from file [%s] failed: %s", historyPath, err)
175 return
176 }
177 id = historyTree.Root.ID
178 rootID = historyTree.Root.ID
179
180 if !isLargeDoc {
181 renderTree := &parse.Tree{Root: &ast.Node{Type: ast.NodeDocument}}
182 keyword = strings.Join(strings.Split(keyword, " "), search.TermSep)
183 keywords := search.SplitKeyword(keyword)
184
185 var unlinks []*ast.Node
186 ast.Walk(historyTree.Root, func(n *ast.Node, entering bool) ast.WalkStatus {
187 if !entering {
188 return ast.WalkContinue
189 }
190
191 // 数据历史浏览时忽略内容块折叠状态 https://github.com/siyuan-note/siyuan/issues/5778
192 n.RemoveIALAttr("heading-fold")
193 n.RemoveIALAttr("fold")
194
195 if highlight && 0 < len(keywords) {
196 if markReplaceSpan(n, &unlinks, keywords, search.MarkDataType, luteEngine) {
197 return ast.WalkContinue
198 }
199 }
200 return ast.WalkContinue
201 })
202
203 for _, unlink := range unlinks {
204 unlink.Unlink()
205 }
206
207 var appends []*ast.Node
208 for n := historyTree.Root.FirstChild; nil != n; n = n.Next {
209 appends = append(appends, n)
210 }
211 for _, n := range appends {
212 renderTree.Root.AppendChild(n)
213 }
214
215 historyTree = renderTree
216 }
217
218 // 禁止文档历史内容可编辑 https://github.com/siyuan-note/siyuan/issues/6580
219 luteEngine.RenderOptions.ProtyleContenteditable = false
220 if isLargeDoc {
221 util.PushMsg(Conf.Language(36), 5000)
222 formatRenderer := render.NewFormatRenderer(historyTree, luteEngine.RenderOptions)
223 content = gulu.Str.FromBytes(formatRenderer.Render())
224 } else {
225 content = luteEngine.Tree2BlockDOM(historyTree, luteEngine.RenderOptions)
226 }
227 return
228}
229
230func RollbackDocHistory(boxID, historyPath string) (err error) {
231 if !gulu.File.IsExist(historyPath) {
232 logging.LogWarnf("doc history [%s] not exist", historyPath)
233 return
234 }
235
236 FlushTxQueue()
237
238 srcPath := historyPath
239 var destPath, parentHPath string
240 rootID := util.GetTreeID(historyPath)
241 workingDoc := treenode.GetBlockTree(rootID)
242 if nil != workingDoc && "d" == workingDoc.Type {
243 if err = filelock.Remove(filepath.Join(util.DataDir, boxID, workingDoc.Path)); err != nil {
244 return
245 }
246 }
247
248 destPath, parentHPath, err = getRollbackDockPath(boxID, historyPath, workingDoc)
249 if err != nil {
250 return
251 }
252
253 var avIDs []string
254 tree, _ := loadTree(srcPath, util.NewLute())
255 if nil != tree {
256 historyDir := strings.TrimPrefix(historyPath, util.HistoryDir+string(os.PathSeparator))
257 if strings.Contains(historyDir, string(os.PathSeparator)) {
258 historyDir = historyDir[:strings.Index(historyDir, string(os.PathSeparator))]
259 }
260 historyDir = filepath.Join(util.HistoryDir, historyDir)
261
262 avNodes := tree.Root.ChildrenByType(ast.NodeAttributeView)
263 for _, avNode := range avNodes {
264 srcAvPath := filepath.Join(historyDir, "storage", "av", avNode.AttributeViewID+".json")
265 destAvPath := filepath.Join(util.DataDir, "storage", "av", avNode.AttributeViewID+".json")
266 if gulu.File.IsExist(destAvPath) {
267 if copyErr := filelock.CopyNewtimes(srcAvPath, destAvPath); nil != copyErr {
268 logging.LogErrorf("copy av [%s] failed: %s", srcAvPath, copyErr)
269 }
270 }
271
272 avIDs = append(avIDs, avNode.AttributeViewID)
273 }
274 }
275 avIDs = gulu.Str.RemoveDuplicatedElem(avIDs)
276
277 tree.Box = boxID
278 tree.Path = filepath.ToSlash(strings.TrimPrefix(destPath, util.DataDir+string(os.PathSeparator)+boxID))
279 tree.HPath = parentHPath + "/" + tree.Root.IALAttr("title")
280
281 // 重置重复的块 ID https://github.com/siyuan-note/siyuan/issues/14358
282 if nil != workingDoc {
283 treenode.RemoveBlockTreesByRootID(rootID)
284 }
285 nodes := map[string]*ast.Node{}
286 ast.Walk(tree.Root, func(n *ast.Node, entering bool) ast.WalkStatus {
287 if !entering || !n.IsBlock() {
288 return ast.WalkContinue
289 }
290
291 nodes[n.ID] = n
292 return ast.WalkContinue
293 })
294 var ids []string
295 for nodeID, _ := range nodes {
296 ids = append(ids, nodeID)
297 }
298 idMap := treenode.ExistBlockTrees(ids)
299 var duplicatedIDs []string
300 for nodeID, exist := range idMap {
301 if exist {
302 duplicatedIDs = append(duplicatedIDs, nodeID)
303 }
304 }
305 for _, nodeID := range duplicatedIDs {
306 node := nodes[nodeID]
307 treenode.ResetNodeID(node)
308 if ast.NodeDocument == node.Type {
309 tree.ID = node.ID
310 tree.Path = tree.Path[:strings.LastIndex(tree.Path, "/")] + "/" + node.ID + ".sy"
311 }
312 }
313
314 // 仅重新索引该文档,不进行全量索引
315 // Reindex only the current document after rolling back the document https://github.com/siyuan-note/siyuan/issues/12320
316 sql.RemoveTreeQueue(rootID)
317 if writeErr := indexWriteTreeIndexQueue(tree); nil != writeErr {
318 return
319 }
320 ReloadFiletree()
321 ReloadProtyle(rootID)
322 util.PushMsg(Conf.Language(102), 3000)
323
324 IncSync()
325
326 // 刷新属性视图
327 for _, avID := range avIDs {
328 ReloadAttrView(avID)
329 }
330
331 go func() {
332 sql.FlushQueue()
333
334 tree, _ = LoadTreeByBlockID(rootID)
335 if nil == tree {
336 return
337 }
338
339 ReloadProtyle(rootID)
340
341 // 刷新页签名
342 refText := getNodeRefText(tree.Root)
343 evt := util.NewCmdResult("rename", 0, util.PushModeBroadcast)
344 evt.Data = map[string]interface{}{
345 "box": boxID,
346 "id": tree.Root.ID,
347 "path": tree.Path,
348 "title": tree.Root.IALAttr("title"),
349 "refText": refText,
350 }
351 util.PushEvent(evt)
352
353 // 收集引用的定义块 ID
354 refDefIDs := getRefDefIDs(tree.Root)
355 // 推送定义节点引用计数
356 for _, defID := range refDefIDs {
357 task.AppendAsyncTaskWithDelay(task.SetDefRefCount, util.SQLFlushInterval, refreshRefCount, defID)
358 }
359 }()
360 return nil
361}
362
363func getRollbackDockPath(boxID, historyPath string, workingDoc *treenode.BlockTree) (destPath, parentHPath string, err error) {
364 var parentID string
365 baseName := filepath.Base(historyPath)
366 var parentWorkingDoc *treenode.BlockTree
367 if nil != workingDoc {
368 parentID = path.Base(path.Dir(workingDoc.Path))
369 parentWorkingDoc = treenode.GetBlockTree(parentID)
370 } else {
371 parentID = filepath.Base(filepath.Dir(historyPath))
372 parentWorkingDoc = treenode.GetBlockTree(parentID)
373 }
374
375 if nil != parentWorkingDoc {
376 // 父路径如果是文档,则恢复到父路径下
377 parentDir := strings.TrimSuffix(parentWorkingDoc.Path, ".sy")
378 parentDir = filepath.Join(util.DataDir, boxID, parentDir)
379 if err = os.MkdirAll(parentDir, 0755); err != nil {
380 return
381 }
382 destPath = filepath.Join(parentDir, baseName)
383 parentHPath = parentWorkingDoc.HPath
384 } else {
385 // 父路径如果不是文档,则恢复到笔记本根路径下
386 destPath = filepath.Join(util.DataDir, boxID, baseName)
387 }
388 return
389}
390
391func RollbackAssetsHistory(historyPath string) (err error) {
392 historyPath = filepath.Join(util.WorkspaceDir, historyPath)
393 if !gulu.File.IsExist(historyPath) {
394 logging.LogWarnf("assets history [%s] not exist", historyPath)
395 return
396 }
397
398 from := historyPath
399 to := filepath.Join(util.DataDir, "assets", filepath.Base(historyPath))
400
401 if err = filelock.CopyNewtimes(from, to); err != nil {
402 logging.LogErrorf("copy file [%s] to [%s] failed: %s", from, to, err)
403 return
404 }
405 IncSync()
406 util.PushMsg(Conf.Language(102), 3000)
407 return nil
408}
409
410func RollbackNotebookHistory(historyPath string) (err error) {
411 if !gulu.File.IsExist(historyPath) {
412 logging.LogWarnf("notebook history [%s] not exist", historyPath)
413 return
414 }
415
416 from := historyPath
417 to := filepath.Join(util.DataDir, filepath.Base(historyPath))
418
419 if err = filelock.CopyNewtimes(from, to); err != nil {
420 logging.LogErrorf("copy file [%s] to [%s] failed: %s", from, to, err)
421 return
422 }
423
424 FullReindex()
425 IncSync()
426 return nil
427}
428
429type History struct {
430 HCreated string `json:"hCreated"`
431 Items []*HistoryItem `json:"items"`
432}
433
434type HistoryItem struct {
435 Title string `json:"title"`
436 Path string `json:"path"`
437 Op string `json:"op"`
438 Notebook string `json:"notebook"` // 仅用于文档历史
439}
440
441const fileHistoryPageSize = 32
442
443func FullTextSearchHistory(query, box, op string, typ, page int) (ret []string, pageCount, totalCount int) {
444 query = util.RemoveInvalid(query)
445 if "" != query && HistoryTypeDocID != typ {
446 query = stringQuery(query)
447 }
448
449 offset := (page - 1) * fileHistoryPageSize
450
451 table := "histories_fts_case_insensitive"
452 stmt := "SELECT DISTINCT created FROM " + table + " WHERE "
453 stmt += buildSearchHistoryQueryFilter(query, op, box, table, typ)
454 countStmt := strings.ReplaceAll(stmt, "SELECT DISTINCT created", "SELECT COUNT(DISTINCT created) AS total")
455 stmt += " ORDER BY created DESC LIMIT " + strconv.Itoa(fileHistoryPageSize) + " OFFSET " + strconv.Itoa(offset)
456 result, err := sql.QueryHistory(stmt)
457 if err != nil {
458 return
459 }
460 for _, row := range result {
461 ret = append(ret, row["created"].(string))
462 }
463 result, err = sql.QueryHistory(countStmt)
464 if err != nil {
465 return
466 }
467 if 1 > len(ret) {
468 ret = []string{}
469 }
470 if 1 > len(result) {
471 return
472 }
473 totalCount = int(result[0]["total"].(int64))
474 pageCount = int(math.Ceil(float64(totalCount) / float64(fileHistoryPageSize)))
475 return
476}
477
478func FullTextSearchHistoryItems(created, query, box, op string, typ int) (ret []*HistoryItem) {
479 query = util.RemoveInvalid(query)
480 if "" != query && HistoryTypeDocID != typ {
481 query = stringQuery(query)
482 }
483
484 table := "histories_fts_case_insensitive"
485 stmt := "SELECT * FROM " + table + " WHERE "
486 stmt += buildSearchHistoryQueryFilter(query, op, box, table, typ)
487
488 _, parseErr := strconv.Atoi(created)
489 if nil != parseErr {
490 ret = []*HistoryItem{}
491 return
492 }
493
494 stmt += " AND created = '" + created + "' ORDER BY created DESC LIMIT " + fmt.Sprintf("%d", fileHistoryPageSize)
495 sqlHistories := sql.SelectHistoriesRawStmt(stmt)
496 ret = fromSQLHistories(sqlHistories)
497 return
498}
499
500func buildSearchHistoryQueryFilter(query, op, box, table string, typ int) (stmt string) {
501 if "" != query {
502 switch typ {
503 case HistoryTypeDocName:
504 stmt += table + " MATCH '{title}:(" + query + ")'"
505 case HistoryTypeDoc:
506 stmt += table + " MATCH '{title content}:(" + query + ")'"
507 case HistoryTypeDocID:
508 stmt += " id = '" + query + "'"
509 case HistoryTypeAsset:
510 stmt += table + " MATCH '{title content}:(" + query + ")'"
511 }
512 } else {
513 stmt += "1=1"
514 }
515 if "all" != op {
516 stmt += " AND op = '" + op + "'"
517 }
518
519 if "%" != box && !ast.IsNodeIDPattern(box) {
520 box = "%"
521 }
522
523 if HistoryTypeDocName == typ || HistoryTypeDoc == typ || HistoryTypeDocID == typ {
524 if HistoryTypeDocName == typ || HistoryTypeDoc == typ {
525 stmt += " AND path LIKE '%/" + box + "/%' AND path LIKE '%.sy'"
526 }
527 } else if HistoryTypeAsset == typ {
528 stmt += " AND path LIKE '%/assets/%'"
529 }
530
531 ago := time.Now().Add(-24 * time.Hour * time.Duration(Conf.Editor.HistoryRetentionDays))
532 stmt += " AND CAST(created AS INTEGER) > " + fmt.Sprintf("%d", ago.Unix()) + ""
533 return
534}
535
536func GetNotebookHistory() (ret []*History, err error) {
537 ret = []*History{}
538
539 historyDir := util.HistoryDir
540 if !gulu.File.IsDir(historyDir) {
541 return
542 }
543
544 historyNotebookConfs, err := filepath.Glob(historyDir + "/*-delete/*/.siyuan/conf.json")
545 if err != nil {
546 logging.LogErrorf("read dir [%s] failed: %s", historyDir, err)
547 return
548 }
549 sort.Slice(historyNotebookConfs, func(i, j int) bool {
550 iTimeDir := filepath.Base(filepath.Dir(filepath.Dir(filepath.Dir(historyNotebookConfs[i]))))
551 jTimeDir := filepath.Base(filepath.Dir(filepath.Dir(filepath.Dir(historyNotebookConfs[j]))))
552 return iTimeDir > jTimeDir
553 })
554
555 for _, historyNotebookConf := range historyNotebookConfs {
556 timeDir := filepath.Base(filepath.Dir(filepath.Dir(filepath.Dir(historyNotebookConf))))
557 t := timeDir[:strings.LastIndex(timeDir, "-")]
558 if ti, parseErr := time.Parse("2006-01-02-150405", t); nil == parseErr {
559 t = ti.Format("2006-01-02 15:04:05")
560 }
561
562 var c conf.BoxConf
563 data, readErr := os.ReadFile(historyNotebookConf)
564 if nil != readErr {
565 logging.LogErrorf("read notebook conf [%s] failed: %s", historyNotebookConf, readErr)
566 continue
567 }
568 if err = json.Unmarshal(data, &c); err != nil {
569 logging.LogErrorf("parse notebook conf [%s] failed: %s", historyNotebookConf, err)
570 continue
571 }
572
573 ret = append(ret, &History{
574 HCreated: t,
575 Items: []*HistoryItem{{
576 Title: c.Name,
577 Path: filepath.Dir(filepath.Dir(historyNotebookConf)),
578 Op: HistoryOpDelete,
579 }},
580 })
581 }
582
583 sort.Slice(ret, func(i, j int) bool {
584 return ret[i].HCreated > ret[j].HCreated
585 })
586 return
587}
588
589func generateAssetsHistory() {
590 assets := recentModifiedAssets()
591 if 1 > len(assets) {
592 return
593 }
594
595 historyDir, err := GetHistoryDir(HistoryOpUpdate)
596 if err != nil {
597 logging.LogErrorf("get history dir failed: %s", err)
598 return
599 }
600
601 for _, file := range assets {
602 historyPath := filepath.Join(historyDir, "assets", strings.TrimPrefix(file, filepath.Join(util.DataDir, "assets")))
603 if err = os.MkdirAll(filepath.Dir(historyPath), 0755); err != nil {
604 logging.LogErrorf("generate history failed: %s", err)
605 return
606 }
607
608 if err = filelock.Copy(file, historyPath); err != nil {
609 logging.LogErrorf("copy file [%s] to [%s] failed: %s", file, historyPath, err)
610 return
611 }
612 }
613
614 indexHistoryDir(filepath.Base(historyDir), util.NewLute())
615 return
616}
617
618func (box *Box) generateDocHistory0() {
619 files := box.recentModifiedDocs()
620 if 1 > len(files) {
621 return
622 }
623
624 historyDir, err := GetHistoryDir(HistoryOpUpdate)
625 if err != nil {
626 logging.LogErrorf("get history dir failed: %s", err)
627 return
628 }
629
630 luteEngine := util.NewLute()
631 for _, file := range files {
632 historyPath := filepath.Join(historyDir, box.ID, strings.TrimPrefix(file, filepath.Join(util.DataDir, box.ID)))
633 if err = os.MkdirAll(filepath.Dir(historyPath), 0755); err != nil {
634 logging.LogErrorf("generate history failed: %s", err)
635 return
636 }
637
638 var data []byte
639 if data, err = filelock.ReadFile(file); err != nil {
640 logging.LogErrorf("generate history failed: %s", err)
641 return
642 }
643
644 if err = gulu.File.WriteFileSafer(historyPath, data, 0644); err != nil {
645 logging.LogErrorf("generate history failed: %s", err)
646 return
647 }
648
649 if strings.HasSuffix(file, ".sy") {
650 tree, loadErr := loadTree(file, luteEngine)
651 if nil != loadErr {
652 logging.LogErrorf("load tree [%s] failed: %s", file, loadErr)
653 } else {
654 generateAvHistory(tree, historyDir)
655 }
656 }
657 }
658
659 indexHistoryDir(filepath.Base(historyDir), util.NewLute())
660 return
661}
662
663func clearOutdatedHistoryDir(historyDir string) {
664 if !gulu.File.IsExist(historyDir) {
665 return
666 }
667
668 dirs, err := os.ReadDir(historyDir)
669 if err != nil {
670 logging.LogErrorf("clear history [%s] failed: %s", historyDir, err)
671 return
672 }
673
674 now := time.Now()
675 ago := now.Add(-24 * time.Hour * time.Duration(Conf.Editor.HistoryRetentionDays)).Unix()
676 var removes []string
677 for _, dir := range dirs {
678 dirInfo, err := dir.Info()
679 if err != nil {
680 logging.LogErrorf("read history dir [%s] failed: %s", dir.Name(), err)
681 continue
682 }
683 if dirInfo.ModTime().Unix() < ago {
684 removes = append(removes, filepath.Join(historyDir, dir.Name()))
685 }
686 }
687 for _, dir := range removes {
688 if err = os.RemoveAll(dir); err != nil {
689 logging.LogWarnf("remove history dir [%s] failed: %s", dir, err)
690 continue
691 }
692 //logging.LogInfof("auto removed history dir [%s]", dir)
693 }
694
695 // 清理历史库
696 sql.DeleteOutdatedHistories(ago)
697}
698
699var boxLatestHistoryTime = map[string]time.Time{}
700
701func (box *Box) recentModifiedDocs() (ret []string) {
702 latestHistoryTime := boxLatestHistoryTime[box.ID]
703 filelock.Walk(filepath.Join(util.DataDir, box.ID), func(path string, d fs.DirEntry, err error) error {
704 if nil != err || nil == d {
705 return nil
706 }
707 if isSkipFile(d.Name()) {
708 if d.IsDir() {
709 return filepath.SkipDir
710 }
711 return nil
712 }
713
714 if d.IsDir() {
715 return nil
716 }
717
718 info, err := d.Info()
719 if nil != err {
720 return err
721 }
722
723 if info.ModTime().After(latestHistoryTime) {
724 ret = append(ret, path)
725 }
726 return nil
727 })
728 box.UpdateHistoryGenerated()
729 return
730}
731
732var assetsLatestHistoryTime = time.Now().Unix()
733
734func recentModifiedAssets() (ret []string) {
735 assets := cache.GetAssets()
736 for _, asset := range assets {
737 if asset.Updated > assetsLatestHistoryTime {
738 absPath := filepath.Join(util.DataDir, asset.Path)
739 if filelock.IsHidden(absPath) {
740 continue
741 }
742 ret = append(ret, absPath)
743 }
744 }
745 assetsLatestHistoryTime = time.Now().Unix()
746 return
747}
748
749const (
750 HistoryOpClean = "clean"
751 HistoryOpUpdate = "update"
752 HistoryOpDelete = "delete"
753 HistoryOpFormat = "format"
754 HistoryOpSync = "sync"
755 HistoryOpReplace = "replace"
756 HistoryOpOutline = "outline"
757)
758
759func generateOpTypeHistory(tree *parse.Tree, opType string) {
760 historyDir, err := GetHistoryDir(opType)
761 if err != nil {
762 logging.LogErrorf("get history dir failed: %s", err)
763 return
764 }
765
766 historyPath := filepath.Join(historyDir, tree.Box, tree.Path)
767 if err = os.MkdirAll(filepath.Dir(historyPath), 0755); err != nil {
768 logging.LogErrorf("generate history failed: %s", err)
769 return
770 }
771
772 var data []byte
773 if data, err = filelock.ReadFile(filepath.Join(util.DataDir, tree.Box, tree.Path)); err != nil {
774 logging.LogErrorf("generate history failed: %s", err)
775 return
776 }
777
778 if err = gulu.File.WriteFileSafer(historyPath, data, 0644); err != nil {
779 logging.LogErrorf("generate history failed: %s", err)
780 return
781 }
782
783 generateAvHistory(tree, historyDir)
784
785 indexHistoryDir(filepath.Base(historyDir), util.NewLute())
786}
787
788func generateAvHistory(tree *parse.Tree, historyDir string) {
789 avNodes := tree.Root.ChildrenByType(ast.NodeAttributeView)
790 for _, avNode := range avNodes {
791 srcAvPath := filepath.Join(util.DataDir, "storage", "av", avNode.AttributeViewID+".json")
792 destAvPath := filepath.Join(historyDir, "storage", "av", avNode.AttributeViewID+".json")
793 if copyErr := filelock.Copy(srcAvPath, destAvPath); nil != copyErr {
794 logging.LogErrorf("copy av [%s] failed: %s", srcAvPath, copyErr)
795 }
796 }
797}
798
799func GetHistoryDir(suffix string) (ret string, err error) {
800 return getHistoryDir(suffix, time.Now())
801}
802
803func getHistoryDir(suffix string, t time.Time) (ret string, err error) {
804 ret = filepath.Join(util.HistoryDir, t.Format("2006-01-02-150405")+"-"+suffix)
805 if err = os.MkdirAll(ret, 0755); err != nil {
806 logging.LogErrorf("make history dir failed: %s", err)
807 return
808 }
809 return
810}
811
812func ReindexHistory() {
813 task.AppendTask(task.HistoryDatabaseIndexFull, fullReindexHistory)
814 return
815}
816
817func fullReindexHistory() {
818 historyDirs, err := os.ReadDir(util.HistoryDir)
819 if err != nil {
820 logging.LogErrorf("read history dir [%s] failed: %s", util.HistoryDir, err)
821 return
822 }
823
824 util.PushMsg(Conf.Language(192), 7*1000)
825 sql.InitHistoryDatabase(true)
826 lutEngine := util.NewLute()
827 for _, historyDir := range historyDirs {
828 if !historyDir.IsDir() {
829 continue
830 }
831
832 name := historyDir.Name()
833 indexHistoryDir(name, lutEngine)
834 }
835 return
836}
837
838var validOps = []string{HistoryOpClean, HistoryOpUpdate, HistoryOpDelete, HistoryOpFormat, HistoryOpSync, HistoryOpReplace, HistoryOpOutline}
839
840const (
841 HistoryTypeDocName = 0 // Search docs by doc name
842 HistoryTypeDoc = 1 // Search docs by doc name and content
843 HistoryTypeAsset = 2 // Search assets
844 HistoryTypeDocID = 3 // Search docs by doc id
845)
846
847func indexHistoryDir(name string, luteEngine *lute.Lute) {
848 defer logging.Recover()
849
850 op := name[strings.LastIndex(name, "-")+1:]
851 if !gulu.Str.Contains(op, validOps) {
852 logging.LogWarnf("invalid history op [%s]", op)
853 return
854 }
855 t := name[:strings.LastIndex(name, "-")]
856 tt, parseErr := time.ParseInLocation("2006-01-02-150405", t, time.Local)
857 if nil != parseErr {
858 logging.LogWarnf("parse history dir time [%s] failed: %s", t, parseErr)
859 return
860 }
861 created := fmt.Sprintf("%d", tt.Unix())
862
863 entryPath := filepath.Join(util.HistoryDir, name)
864 var docs, assets []string
865 filelock.Walk(entryPath, func(path string, d fs.DirEntry, err error) error {
866 if strings.HasSuffix(d.Name(), ".sy") {
867 docs = append(docs, path)
868 } else if strings.Contains(path, "assets"+string(os.PathSeparator)) {
869 assets = append(assets, path)
870 }
871 return nil
872 })
873
874 var histories []*sql.History
875 for _, doc := range docs {
876 tree, loadErr := loadTree(doc, luteEngine)
877 if nil != loadErr {
878 logging.LogErrorf("load tree [%s] failed: %s", doc, loadErr)
879 continue
880 }
881
882 title := tree.Root.IALAttr("title")
883 if "" == title {
884 title = Conf.language(16)
885 }
886 content := tree.Root.Content()
887 p := strings.TrimPrefix(doc, util.HistoryDir)
888 p = filepath.ToSlash(p[1:])
889 histories = append(histories, &sql.History{
890 ID: tree.Root.ID,
891 Type: HistoryTypeDoc,
892 Op: op,
893 Title: title,
894 Content: content,
895 Path: p,
896 Created: created,
897 })
898 }
899
900 for _, asset := range assets {
901 p := strings.TrimPrefix(asset, util.HistoryDir)
902 p = filepath.ToSlash(p[1:])
903 _, id := util.LastID(p)
904 if !ast.IsNodeIDPattern(id) {
905 id = ""
906 }
907 histories = append(histories, &sql.History{
908 ID: id,
909 Type: HistoryTypeAsset,
910 Op: op,
911 Title: filepath.Base(asset),
912 Path: p,
913 Created: created,
914 })
915 }
916
917 sql.IndexHistoriesQueue(histories)
918 return
919}
920
921func fromSQLHistories(sqlHistories []*sql.History) (ret []*HistoryItem) {
922 if 1 > len(sqlHistories) {
923 ret = []*HistoryItem{}
924 return
925 }
926
927 for _, sqlHistory := range sqlHistories {
928 item := &HistoryItem{
929 Title: sqlHistory.Title,
930 Path: filepath.Join(util.HistoryDir, sqlHistory.Path),
931 Op: sqlHistory.Op,
932 }
933 if HistoryTypeAsset == sqlHistory.Type {
934 item.Path = filepath.ToSlash(strings.TrimPrefix(item.Path, util.WorkspaceDir))
935 } else {
936 parts := strings.Split(sqlHistory.Path, "/")
937 if 2 <= len(parts) {
938 item.Notebook = parts[1]
939 } else {
940 logging.LogWarnf("invalid doc history path [%s]", item.Path)
941 }
942 }
943 ret = append(ret, item)
944 }
945 return
946}
947
948func init() {
949 subscribeSQLHistoryEvents()
950}
951
952func subscribeSQLHistoryEvents() {
953 eventbus.Subscribe(util.EvtSQLHistoryRebuild, func() {
954 ReindexHistory()
955 })
956}