Squashed commit of the following:
commit ac82511ddd17d5472ae3cfea9bbad9754f5a4d62 Author: Simon Cambier <simon.cambier@protonmail.com> Date: Sat Oct 22 08:23:42 2022 +0200 Screw that cache, seriously. commit 8ba40d1be73daaaffea09e07bc56c339266db9b6 Author: Simon Cambier <simon.cambier@protonmail.com> Date: Fri Oct 21 22:36:48 2022 +0200 Stuff commit 27b8fd7dc809be9714a109d3a458eb1276a47e2e Author: Simon Cambier <simon.cambier@protonmail.com> Date: Fri Oct 21 22:22:20 2022 +0200 Moved files commit fb1349c914907e586e103ca54fb04b9ddd45ef5d Author: Simon Cambier <simon.cambier@protonmail.com> Date: Thu Oct 20 22:25:29 2022 +0200 Removed duplicate code commit e7371138e60cbe4155cfd4fb44e3ee1d2e3ee088 Author: Simon Cambier <simon.cambier@protonmail.com> Date: Thu Oct 20 21:50:09 2022 +0200 Moved a bunch of files commit 2ee1b2a0e799d4b41ab3a444d8cc44dfff5b5623 Author: Simon Cambier <simon.cambier@protonmail.com> Date: Thu Oct 20 21:32:21 2022 +0200 Removed useless code commit 76c530dfb9adbad1bbe9079de2330fe43a044249 Author: Simon Cambier <simon.cambier@protonmail.com> Date: Thu Oct 20 20:44:11 2022 +0200 Split file reading and indexing
This commit is contained in:
92
src/file-loader.ts
Normal file
92
src/file-loader.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
import { cacheManager } from './cache-manager'
|
||||
import {
|
||||
extractHeadingsFromCache,
|
||||
getAliasesFromMetadata,
|
||||
getTagsFromMetadata,
|
||||
isFilePlaintext,
|
||||
removeDiacritics,
|
||||
} from './tools/utils'
|
||||
import * as NotesIndex from './notes-index'
|
||||
import type { TFile } from 'obsidian'
|
||||
import type { IndexedDocument } from './globals'
|
||||
import { pdfManager } from './pdf/pdf-manager'
|
||||
import { getNonExistingNotes } from './tools/notes'
|
||||
|
||||
/**
|
||||
* Return all plaintext files as IndexedDocuments
|
||||
*/
|
||||
export async function getPlainTextFiles(): Promise<IndexedDocument[]> {
|
||||
const allFiles = app.vault.getFiles().filter(f => isFilePlaintext(f.path))
|
||||
const data: IndexedDocument[] = []
|
||||
for (const file of allFiles) {
|
||||
const doc = await fileToIndexedDocument(file)
|
||||
data.push(doc)
|
||||
await cacheManager.updateDocument(file.path, doc)
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
/**
|
||||
* Return all PDF files as IndexedDocuments.
|
||||
* If a PDF isn't cached, it will be read from the disk and added to the IndexedDB
|
||||
*/
|
||||
export async function getPDFFiles(): Promise<IndexedDocument[]> {
|
||||
const allFiles = app.vault.getFiles().filter(f => f.path.endsWith('.pdf'))
|
||||
const data: IndexedDocument[] = []
|
||||
|
||||
const input = []
|
||||
for (const file of allFiles) {
|
||||
input.push(
|
||||
NotesIndex.processQueue(async () => {
|
||||
const doc = await fileToIndexedDocument(file)
|
||||
cacheManager.updateDocument(file.path, doc)
|
||||
data.push(doc)
|
||||
})
|
||||
)
|
||||
}
|
||||
await Promise.all(input)
|
||||
return data
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a file into an IndexedDocument.
|
||||
* Will use the cache if possible.
|
||||
* @param file
|
||||
*/
|
||||
export async function fileToIndexedDocument(
|
||||
file: TFile
|
||||
): Promise<IndexedDocument> {
|
||||
let content: string
|
||||
if (isFilePlaintext(file.path)) {
|
||||
content = removeDiacritics(await app.vault.cachedRead(file))
|
||||
} else if (file.path.endsWith('.pdf')) {
|
||||
content = removeDiacritics(await pdfManager.getPdfText(file))
|
||||
} else {
|
||||
throw new Error('Invalid file: ' + file.path)
|
||||
}
|
||||
|
||||
content = removeDiacritics(content)
|
||||
const metadata = app.metadataCache.getFileCache(file)
|
||||
|
||||
// Look for links that lead to non-existing files,
|
||||
// and add them to the index.
|
||||
if (metadata) {
|
||||
const nonExisting = getNonExistingNotes(file, metadata)
|
||||
for (const name of nonExisting.filter(o => !cacheManager.getDocument(o))) {
|
||||
NotesIndex.addNonExistingToIndex(name, file.path)
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
basename: removeDiacritics(file.basename),
|
||||
content,
|
||||
path: file.path,
|
||||
mtime: file.stat.mtime,
|
||||
|
||||
tags: getTagsFromMetadata(metadata),
|
||||
aliases: getAliasesFromMetadata(metadata).join(''),
|
||||
headings1: metadata ? extractHeadingsFromCache(metadata, 1).join(' ') : '',
|
||||
headings2: metadata ? extractHeadingsFromCache(metadata, 2).join(' ') : '',
|
||||
headings3: metadata ? extractHeadingsFromCache(metadata, 3).join(' ') : '',
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user