Merge branch 'develop'
This commit is contained in:
@@ -6,7 +6,7 @@ describe('The Query class', () => {
|
||||
|
||||
it('should correctly parse string queries', () => {
|
||||
// Act
|
||||
const query = new Query(stringQuery)
|
||||
const query = new Query(stringQuery, { ignoreDiacritics: true })
|
||||
|
||||
// Assert
|
||||
const segments = query.query.text
|
||||
@@ -25,7 +25,7 @@ describe('The Query class', () => {
|
||||
|
||||
it('should not exclude words when there is no space before', () => {
|
||||
// Act
|
||||
const query = new Query('foo bar-baz')
|
||||
const query = new Query('foo bar-baz', { ignoreDiacritics: true })
|
||||
|
||||
// Assert
|
||||
expect(query.query.exclude.text).toHaveLength(0)
|
||||
@@ -34,7 +34,7 @@ describe('The Query class', () => {
|
||||
describe('.getExactTerms()', () => {
|
||||
it('should an array of strings containg "exact" values', () => {
|
||||
// Act
|
||||
const query = new Query(stringQuery)
|
||||
const query = new Query(stringQuery, { ignoreDiacritics: true })
|
||||
|
||||
// Assert
|
||||
expect(query.getExactTerms()).toEqual(['lorem ipsum', 'sit amet'])
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
import { Notice, TFile } from 'obsidian'
|
||||
import {
|
||||
type DocumentRef,
|
||||
getTextExtractor,
|
||||
type IndexedDocument,
|
||||
} from './globals'
|
||||
import { database } from './database'
|
||||
import { TFile } from 'obsidian'
|
||||
import type { IndexedDocument } from './globals'
|
||||
import {
|
||||
extractHeadingsFromCache,
|
||||
getAliasesFromMetadata,
|
||||
@@ -12,167 +7,17 @@ import {
|
||||
isFileCanvas,
|
||||
isFileFromDataloomPlugin,
|
||||
isFileImage,
|
||||
isFilePDF,
|
||||
isFileOffice,
|
||||
isFilePlaintext,
|
||||
isFilenameIndexable,
|
||||
isFilePDF,
|
||||
logDebug,
|
||||
makeMD5,
|
||||
removeDiacritics,
|
||||
stripMarkdownCharacters,
|
||||
} from './tools/utils'
|
||||
import type { CanvasData } from 'obsidian/canvas'
|
||||
import type { AsPlainObject } from 'minisearch'
|
||||
import type MiniSearch from 'minisearch'
|
||||
import { settings } from './settings'
|
||||
import { getObsidianApp } from './stores/obsidian-app'
|
||||
import type OmnisearchPlugin from './main'
|
||||
import { getNonExistingNotes } from './tools/notes'
|
||||
|
||||
const app = getObsidianApp()
|
||||
|
||||
/**
|
||||
* This function is responsible for extracting the text from a file and
|
||||
* returning it as an `IndexedDocument` object.
|
||||
* @param path
|
||||
*/
|
||||
async function getAndMapIndexedDocument(
|
||||
path: string
|
||||
): Promise<IndexedDocument> {
|
||||
const file = app.vault.getAbstractFileByPath(path)
|
||||
if (!file) throw new Error(`Invalid file path: "${path}"`)
|
||||
if (!(file instanceof TFile)) throw new Error(`Not a TFile: "${path}"`)
|
||||
let content: string | null = null
|
||||
|
||||
const extractor = getTextExtractor()
|
||||
|
||||
// ** Plain text **
|
||||
// Just read the file content
|
||||
if (isFilePlaintext(path)) {
|
||||
content = await app.vault.cachedRead(file)
|
||||
}
|
||||
|
||||
// ** Canvas **
|
||||
// Extract the text fields from the json
|
||||
else if (isFileCanvas(path)) {
|
||||
const canvas = JSON.parse(await app.vault.cachedRead(file)) as CanvasData
|
||||
let texts: string[] = []
|
||||
// Concatenate text from the canvas fields
|
||||
for (const node of canvas.nodes) {
|
||||
if (node.type === 'text') {
|
||||
texts.push(node.text)
|
||||
} else if (node.type === 'file') {
|
||||
texts.push(node.file)
|
||||
}
|
||||
}
|
||||
for (const edge of canvas.edges.filter(e => !!e.label)) {
|
||||
texts.push(edge.label!)
|
||||
}
|
||||
content = texts.join('\r\n')
|
||||
}
|
||||
|
||||
// ** Dataloom plugin **
|
||||
else if (isFileFromDataloomPlugin(path)) {
|
||||
try {
|
||||
const data = JSON.parse(await app.vault.cachedRead(file))
|
||||
// data is a json object, we recursively iterate the keys
|
||||
// and concatenate the values if the key is "markdown"
|
||||
const texts: string[] = []
|
||||
const iterate = (obj: any) => {
|
||||
for (const key in obj) {
|
||||
if (typeof obj[key] === 'object') {
|
||||
iterate(obj[key])
|
||||
} else if (key === 'content') {
|
||||
texts.push(obj[key])
|
||||
}
|
||||
}
|
||||
}
|
||||
iterate(data)
|
||||
content = texts.join('\r\n')
|
||||
} catch (e) {
|
||||
console.error('Omnisearch: Error while parsing Dataloom file', path)
|
||||
console.error(e)
|
||||
}
|
||||
}
|
||||
|
||||
// ** Image **
|
||||
else if (
|
||||
isFileImage(path) &&
|
||||
settings.imagesIndexing &&
|
||||
extractor?.canFileBeExtracted(path)
|
||||
) {
|
||||
content = await extractor.extractText(file)
|
||||
}
|
||||
// ** PDF **
|
||||
else if (
|
||||
isFilePDF(path) &&
|
||||
settings.PDFIndexing &&
|
||||
extractor?.canFileBeExtracted(path)
|
||||
) {
|
||||
content = await extractor.extractText(file)
|
||||
}
|
||||
|
||||
// ** Office document **
|
||||
else if (
|
||||
isFileOffice(path) &&
|
||||
settings.officeIndexing &&
|
||||
extractor?.canFileBeExtracted(path)
|
||||
) {
|
||||
content = await extractor.extractText(file)
|
||||
}
|
||||
|
||||
// ** Unsupported files **
|
||||
else if (isFilenameIndexable(path)) {
|
||||
content = file.path
|
||||
}
|
||||
|
||||
if (content === null || content === undefined) {
|
||||
// This shouldn't happen
|
||||
console.warn(`Omnisearch: ${content} content for file`, file.path)
|
||||
content = ''
|
||||
}
|
||||
const metadata = app.metadataCache.getFileCache(file)
|
||||
|
||||
// Look for links that lead to non-existing files,
|
||||
// and add them to the index.
|
||||
if (metadata) {
|
||||
// // FIXME: https://github.com/scambier/obsidian-omnisearch/issues/129
|
||||
// const nonExisting = getNonExistingNotes(file, metadata)
|
||||
// for (const name of nonExisting.filter(
|
||||
// o => !cacheManager.getLiveDocument(o)
|
||||
// )) {
|
||||
// NotesIndex.addNonExistingToIndex(name, file.path)
|
||||
// }
|
||||
|
||||
// EXCALIDRAW
|
||||
// Remove the json code
|
||||
if (metadata.frontmatter?.['excalidraw-plugin']) {
|
||||
const comments =
|
||||
metadata.sections?.filter(s => s.type === 'comment') ?? []
|
||||
for (const { start, end } of comments.map(c => c.position)) {
|
||||
content =
|
||||
content.substring(0, start.offset - 1) + content.substring(end.offset)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const tags = getTagsFromMetadata(metadata)
|
||||
return {
|
||||
basename: file.basename,
|
||||
content,
|
||||
/** Content without diacritics and markdown chars */
|
||||
cleanedContent: stripMarkdownCharacters(removeDiacritics(content)),
|
||||
path: file.path,
|
||||
mtime: file.stat.mtime,
|
||||
|
||||
tags: tags,
|
||||
unmarkedTags: tags.map(t => t.replace('#', '')),
|
||||
aliases: getAliasesFromMetadata(metadata).join(''),
|
||||
headings1: metadata ? extractHeadingsFromCache(metadata, 1).join(' ') : '',
|
||||
headings2: metadata ? extractHeadingsFromCache(metadata, 2).join(' ') : '',
|
||||
headings3: metadata ? extractHeadingsFromCache(metadata, 3).join(' ') : '',
|
||||
}
|
||||
}
|
||||
|
||||
class CacheManager {
|
||||
export class CacheManager {
|
||||
/**
|
||||
* Show an empty input field next time the user opens Omnisearch modal
|
||||
*/
|
||||
@@ -184,13 +29,15 @@ class CacheManager {
|
||||
*/
|
||||
private documents: Map<string, IndexedDocument> = new Map()
|
||||
|
||||
constructor(private plugin: OmnisearchPlugin) {}
|
||||
|
||||
/**
|
||||
* Set or update the live cache with the content of the given file.
|
||||
* @param path
|
||||
*/
|
||||
public async addToLiveCache(path: string): Promise<void> {
|
||||
try {
|
||||
const doc = await getAndMapIndexedDocument(path)
|
||||
const doc = await this.getAndMapIndexedDocument(path)
|
||||
if (!doc.path) {
|
||||
console.error(
|
||||
`Missing .path field in IndexedDocument "${doc.basename}", skipping`
|
||||
@@ -224,6 +71,7 @@ class CacheManager {
|
||||
return
|
||||
}
|
||||
this.nextQueryIsEmpty = false
|
||||
const database = this.plugin.database
|
||||
let history = await database.searchHistory.toArray()
|
||||
history = history.filter(s => s.query !== query).reverse()
|
||||
history.unshift({ query })
|
||||
@@ -236,7 +84,7 @@ class CacheManager {
|
||||
* @returns The search history, in reverse chronological order
|
||||
*/
|
||||
public async getSearchHistory(): Promise<ReadonlyArray<string>> {
|
||||
const data = (await database.searchHistory.toArray())
|
||||
const data = (await this.plugin.database.searchHistory.toArray())
|
||||
.reverse()
|
||||
.map(o => o.query)
|
||||
if (this.nextQueryIsEmpty) {
|
||||
@@ -245,55 +93,156 @@ class CacheManager {
|
||||
return data
|
||||
}
|
||||
|
||||
//#region Minisearch
|
||||
/**
|
||||
* This function is responsible for extracting the text from a file and
|
||||
* returning it as an `IndexedDocument` object.
|
||||
* @param path
|
||||
*/
|
||||
private async getAndMapIndexedDocument(
|
||||
path: string
|
||||
): Promise<IndexedDocument> {
|
||||
const app = this.plugin.app
|
||||
const file = app.vault.getAbstractFileByPath(path)
|
||||
if (!file) throw new Error(`Invalid file path: "${path}"`)
|
||||
if (!(file instanceof TFile)) throw new Error(`Not a TFile: "${path}"`)
|
||||
let content: string | null = null
|
||||
|
||||
public getDocumentsChecksum(documents: IndexedDocument[]): string {
|
||||
return makeMD5(
|
||||
JSON.stringify(
|
||||
documents.sort((a, b) => {
|
||||
if (a.path < b.path) {
|
||||
return -1
|
||||
} else if (a.path > b.path) {
|
||||
return 1
|
||||
const extractor = this.plugin.getTextExtractor()
|
||||
|
||||
// ** Plain text **
|
||||
// Just read the file content
|
||||
if (this.plugin.notesIndexer.isFilePlaintext(path)) {
|
||||
content = await app.vault.cachedRead(file)
|
||||
}
|
||||
|
||||
// ** Canvas **
|
||||
// Extract the text fields from the json
|
||||
else if (isFileCanvas(path)) {
|
||||
const canvas = JSON.parse(await app.vault.cachedRead(file)) as CanvasData
|
||||
let texts: string[] = []
|
||||
// Concatenate text from the canvas fields
|
||||
for (const node of canvas.nodes) {
|
||||
if (node.type === 'text') {
|
||||
texts.push(node.text)
|
||||
} else if (node.type === 'file') {
|
||||
texts.push(node.file)
|
||||
}
|
||||
}
|
||||
for (const edge of canvas.edges.filter(e => !!e.label)) {
|
||||
texts.push(edge.label!)
|
||||
}
|
||||
content = texts.join('\r\n')
|
||||
}
|
||||
|
||||
// ** Dataloom plugin **
|
||||
else if (isFileFromDataloomPlugin(path)) {
|
||||
try {
|
||||
const data = JSON.parse(await app.vault.cachedRead(file))
|
||||
// data is a json object, we recursively iterate the keys
|
||||
// and concatenate the values if the key is "markdown"
|
||||
const texts: string[] = []
|
||||
const iterate = (obj: any) => {
|
||||
for (const key in obj) {
|
||||
if (typeof obj[key] === 'object') {
|
||||
iterate(obj[key])
|
||||
} else if (key === 'content') {
|
||||
texts.push(obj[key])
|
||||
}
|
||||
}
|
||||
return 0
|
||||
})
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
iterate(data)
|
||||
content = texts.join('\r\n')
|
||||
} catch (e) {
|
||||
console.error('Omnisearch: Error while parsing Dataloom file', path)
|
||||
console.error(e)
|
||||
}
|
||||
}
|
||||
|
||||
public async getMinisearchCache(): Promise<{
|
||||
paths: DocumentRef[]
|
||||
data: AsPlainObject
|
||||
} | null> {
|
||||
try {
|
||||
const cachedIndex = (await database.minisearch.toArray())[0]
|
||||
return cachedIndex
|
||||
} catch (e) {
|
||||
new Notice(
|
||||
'Omnisearch - Cache missing or invalid. Some freezes may occur while Omnisearch indexes your vault.'
|
||||
)
|
||||
console.error('Omnisearch - Error while loading Minisearch cache')
|
||||
console.error(e)
|
||||
return null
|
||||
// ** Image **
|
||||
else if (
|
||||
isFileImage(path) &&
|
||||
this.plugin.settings.imagesIndexing &&
|
||||
extractor?.canFileBeExtracted(path)
|
||||
) {
|
||||
content = await extractor.extractText(file)
|
||||
}
|
||||
// ** PDF **
|
||||
else if (
|
||||
isFilePDF(path) &&
|
||||
this.plugin.settings.PDFIndexing &&
|
||||
extractor?.canFileBeExtracted(path)
|
||||
) {
|
||||
content = await extractor.extractText(file)
|
||||
}
|
||||
|
||||
// ** Office document **
|
||||
else if (
|
||||
isFileOffice(path) &&
|
||||
this.plugin.settings.officeIndexing &&
|
||||
extractor?.canFileBeExtracted(path)
|
||||
) {
|
||||
content = await extractor.extractText(file)
|
||||
}
|
||||
|
||||
// ** Unsupported files **
|
||||
else if (this.plugin.notesIndexer.isFilenameIndexable(path)) {
|
||||
content = file.path
|
||||
}
|
||||
|
||||
if (content === null || content === undefined) {
|
||||
// This shouldn't happen
|
||||
console.warn(`Omnisearch: ${content} content for file`, file.path)
|
||||
content = ''
|
||||
}
|
||||
const metadata = app.metadataCache.getFileCache(file)
|
||||
|
||||
// Look for links that lead to non-existing files,
|
||||
// and add them to the index.
|
||||
if (metadata) {
|
||||
const nonExisting = getNonExistingNotes(this.plugin.app, file, metadata)
|
||||
for (const name of nonExisting.filter(o => !this.documents.has(o))) {
|
||||
const doc =
|
||||
this.plugin.notesIndexer.generateIndexableNonexistingDocument(
|
||||
name,
|
||||
file.path
|
||||
)
|
||||
// TODO: index non-existing note
|
||||
}
|
||||
|
||||
// EXCALIDRAW
|
||||
// Remove the json code
|
||||
if (metadata.frontmatter?.['excalidraw-plugin']) {
|
||||
const comments =
|
||||
metadata.sections?.filter(s => s.type === 'comment') ?? []
|
||||
for (const { start, end } of comments.map(c => c.position)) {
|
||||
content =
|
||||
content.substring(0, start.offset - 1) +
|
||||
content.substring(end.offset)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const tags = getTagsFromMetadata(metadata)
|
||||
return {
|
||||
basename: file.basename,
|
||||
content,
|
||||
/** Content without diacritics and markdown chars */
|
||||
cleanedContent: stripMarkdownCharacters(removeDiacritics(content)),
|
||||
path: file.path,
|
||||
mtime: file.stat.mtime,
|
||||
|
||||
tags: tags,
|
||||
unmarkedTags: tags.map(t => t.replace('#', '')),
|
||||
aliases: getAliasesFromMetadata(metadata).join(''),
|
||||
headings1: metadata
|
||||
? extractHeadingsFromCache(metadata, 1).join(' ')
|
||||
: '',
|
||||
headings2: metadata
|
||||
? extractHeadingsFromCache(metadata, 2).join(' ')
|
||||
: '',
|
||||
headings3: metadata
|
||||
? extractHeadingsFromCache(metadata, 3).join(' ')
|
||||
: '',
|
||||
}
|
||||
}
|
||||
|
||||
public async writeMinisearchCache(
|
||||
minisearch: MiniSearch,
|
||||
indexed: Map<string, number>
|
||||
): Promise<void> {
|
||||
const paths = Array.from(indexed).map(([k, v]) => ({ path: k, mtime: v }))
|
||||
await database.minisearch.clear()
|
||||
await database.minisearch.add({
|
||||
date: new Date().toISOString(),
|
||||
paths,
|
||||
data: minisearch.toJSON(),
|
||||
})
|
||||
console.log('Omnisearch - Search cache written')
|
||||
}
|
||||
|
||||
//#endregion Minisearch
|
||||
}
|
||||
|
||||
export const cacheManager = new CacheManager()
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
import { debounce } from 'obsidian'
|
||||
import { toggleInputComposition } from 'src/globals'
|
||||
import { createEventDispatcher, tick } from 'svelte'
|
||||
import { cacheManager } from '../cache-manager'
|
||||
import type OmnisearchPlugin from '../main'
|
||||
|
||||
export let initialValue = ''
|
||||
export let placeholder = ''
|
||||
export let plugin: OmnisearchPlugin
|
||||
let initialSet = false
|
||||
let value = ''
|
||||
let elInput: HTMLInputElement
|
||||
@@ -39,7 +40,7 @@
|
||||
const debouncedOnInput = debounce(() => {
|
||||
// If typing a query and not executing it,
|
||||
// the next time we open the modal, the search field will be empty
|
||||
cacheManager.addToSearchHistory('')
|
||||
plugin.cacheManager.addToSearchHistory('')
|
||||
dispatch('input', value)
|
||||
}, 300)
|
||||
</script>
|
||||
@@ -50,13 +51,13 @@
|
||||
bind:this="{elInput}"
|
||||
bind:value="{value}"
|
||||
class="prompt-input"
|
||||
use:selectInput
|
||||
on:compositionend="{_ => toggleInputComposition(false)}"
|
||||
on:compositionstart="{_ => toggleInputComposition(true)}"
|
||||
on:input="{debouncedOnInput}"
|
||||
placeholder="{placeholder}"
|
||||
spellcheck="false"
|
||||
type="text" />
|
||||
type="text"
|
||||
use:selectInput />
|
||||
</div>
|
||||
<slot />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
} from 'src/globals'
|
||||
import { getCtrlKeyLabel, loopIndex } from 'src/tools/utils'
|
||||
import { onDestroy, onMount, tick } from 'svelte'
|
||||
import { MarkdownView, App, Platform } from 'obsidian'
|
||||
import { MarkdownView, Platform } from 'obsidian'
|
||||
import ModalContainer from './ModalContainer.svelte'
|
||||
import {
|
||||
OmnisearchInFileModal,
|
||||
@@ -18,14 +18,13 @@
|
||||
import ResultItemInFile from './ResultItemInFile.svelte'
|
||||
import { Query } from 'src/search/query'
|
||||
import { openNote } from 'src/tools/notes'
|
||||
import { searchEngine } from 'src/search/omnisearch'
|
||||
import { stringsToRegex } from 'src/tools/text-processing'
|
||||
import type OmnisearchPlugin from '../main'
|
||||
|
||||
export let plugin: OmnisearchPlugin
|
||||
export let modal: OmnisearchInFileModal
|
||||
export let parent: OmnisearchVaultModal | null = null
|
||||
export let singleFilePath = ''
|
||||
export let previousQuery: string | undefined
|
||||
export let app: App
|
||||
|
||||
let searchQuery: string
|
||||
let groupedOffsets: number[] = []
|
||||
@@ -51,10 +50,12 @@
|
||||
|
||||
$: (async () => {
|
||||
if (searchQuery) {
|
||||
query = new Query(searchQuery)
|
||||
query = new Query(searchQuery, {
|
||||
ignoreDiacritics: plugin.settings.ignoreDiacritics,
|
||||
})
|
||||
note =
|
||||
(
|
||||
await searchEngine.getSuggestions(query, {
|
||||
await plugin.searchEngine.getSuggestions(query, {
|
||||
singleFilePath,
|
||||
})
|
||||
)[0] ?? null
|
||||
@@ -131,12 +132,12 @@
|
||||
if (parent) parent.close()
|
||||
|
||||
// Open (or switch focus to) the note
|
||||
const reg = stringsToRegex(note.foundWords)
|
||||
const reg = plugin.textProcessor.stringsToRegex(note.foundWords)
|
||||
reg.exec(note.content)
|
||||
await openNote(note, reg.lastIndex, newTab)
|
||||
await openNote(plugin.app, note, reg.lastIndex, newTab)
|
||||
|
||||
// Move cursor to the match
|
||||
const view = app.workspace.getActiveViewOfType(MarkdownView)
|
||||
const view = plugin.app.workspace.getActiveViewOfType(MarkdownView)
|
||||
if (!view) {
|
||||
// Not an editable document, so no cursor to place
|
||||
return
|
||||
@@ -155,12 +156,13 @@
|
||||
}
|
||||
|
||||
function switchToVaultModal(): void {
|
||||
new OmnisearchVaultModal(app, searchQuery ?? previousQuery).open()
|
||||
new OmnisearchVaultModal(plugin, searchQuery ?? previousQuery).open()
|
||||
modal.close()
|
||||
}
|
||||
</script>
|
||||
|
||||
<InputSearch
|
||||
plugin="{plugin}"
|
||||
on:input="{e => (searchQuery = e.detail)}"
|
||||
placeholder="Omnisearch - File"
|
||||
initialValue="{previousQuery}">
|
||||
@@ -175,6 +177,7 @@
|
||||
{#if groupedOffsets.length && note}
|
||||
{#each groupedOffsets as offset, i}
|
||||
<ResultItemInFile
|
||||
{plugin}
|
||||
offset="{offset}"
|
||||
note="{note}"
|
||||
index="{i}"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
<script lang="ts">
|
||||
import { App, MarkdownView, Notice, Platform, TFile } from 'obsidian'
|
||||
import { MarkdownView, Notice, Platform, TFile } from 'obsidian'
|
||||
import { onDestroy, onMount, tick } from 'svelte'
|
||||
import InputSearch from './InputSearch.svelte'
|
||||
import ModalContainer from './ModalContainer.svelte'
|
||||
@@ -24,16 +24,13 @@
|
||||
} from 'src/components/modals'
|
||||
import ResultItemVault from './ResultItemVault.svelte'
|
||||
import { Query } from 'src/search/query'
|
||||
import { settings } from '../settings'
|
||||
import * as NotesIndex from '../notes-index'
|
||||
import { cacheManager } from '../cache-manager'
|
||||
import { searchEngine } from 'src/search/omnisearch'
|
||||
import { cancelable, CancelablePromise } from 'cancelable-promise'
|
||||
import { debounce } from 'lodash-es'
|
||||
import type OmnisearchPlugin from '../main'
|
||||
|
||||
export let modal: OmnisearchVaultModal
|
||||
export let previousQuery: string | undefined
|
||||
export let app: App
|
||||
export let plugin: OmnisearchPlugin
|
||||
|
||||
let selectedIndex = 0
|
||||
let historySearchIndex = 0
|
||||
@@ -51,7 +48,7 @@
|
||||
|
||||
$: selectedNote = resultNotes[selectedIndex]
|
||||
$: searchQuery = searchQuery ?? previousQuery
|
||||
$: if (settings.openInNewPane) {
|
||||
$: if (plugin.settings.openInNewPane) {
|
||||
openInNewPaneKey = '↵'
|
||||
openInCurrentPaneKey = getCtrlKeyLabel() + ' ↵'
|
||||
createInNewPaneKey = 'shift ↵'
|
||||
@@ -103,7 +100,7 @@
|
||||
eventBus.on('vault', Action.PrevSearchHistory, prevSearchHistory)
|
||||
eventBus.on('vault', Action.NextSearchHistory, nextSearchHistory)
|
||||
eventBus.on('vault', Action.OpenInNewLeaf, openNoteInNewLeaf)
|
||||
await NotesIndex.refreshIndex()
|
||||
await plugin.notesIndexer.refreshIndex()
|
||||
await updateResultsDebounced()
|
||||
})
|
||||
|
||||
@@ -113,7 +110,9 @@
|
||||
|
||||
async function prevSearchHistory() {
|
||||
// Filter out the empty string, if it's there
|
||||
const history = (await cacheManager.getSearchHistory()).filter(s => s)
|
||||
const history = (await plugin.cacheManager.getSearchHistory()).filter(
|
||||
s => s
|
||||
)
|
||||
if (++historySearchIndex >= history.length) {
|
||||
historySearchIndex = 0
|
||||
}
|
||||
@@ -122,7 +121,9 @@
|
||||
}
|
||||
|
||||
async function nextSearchHistory() {
|
||||
const history = (await cacheManager.getSearchHistory()).filter(s => s)
|
||||
const history = (await plugin.cacheManager.getSearchHistory()).filter(
|
||||
s => s
|
||||
)
|
||||
if (--historySearchIndex < 0) {
|
||||
historySearchIndex = history.length ? history.length - 1 : 0
|
||||
}
|
||||
@@ -138,10 +139,12 @@
|
||||
cancelableQuery.cancel()
|
||||
cancelableQuery = null
|
||||
}
|
||||
query = new Query(searchQuery)
|
||||
query = new Query(searchQuery, {
|
||||
ignoreDiacritics: plugin.settings.ignoreDiacritics,
|
||||
})
|
||||
cancelableQuery = cancelable(
|
||||
new Promise(resolve => {
|
||||
resolve(searchEngine.getSuggestions(query))
|
||||
resolve(plugin.searchEngine.getSuggestions(query))
|
||||
})
|
||||
)
|
||||
resultNotes = await cancelableQuery
|
||||
@@ -188,7 +191,7 @@
|
||||
|
||||
function saveCurrentQuery() {
|
||||
if (searchQuery) {
|
||||
cacheManager.addToSearchHistory(searchQuery)
|
||||
plugin.cacheManager.addToSearchHistory(searchQuery)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -199,7 +202,7 @@
|
||||
) {
|
||||
saveCurrentQuery()
|
||||
const offset = note.matches?.[0]?.offset ?? 0
|
||||
openNote(note, offset, newPane, newLeaf)
|
||||
openNote(plugin.app, note, offset, newPane, newLeaf)
|
||||
}
|
||||
|
||||
async function onClickCreateNote(_e: MouseEvent) {
|
||||
@@ -211,7 +214,7 @@
|
||||
}): Promise<void> {
|
||||
if (searchQuery) {
|
||||
try {
|
||||
await createNote(searchQuery, opt?.newLeaf)
|
||||
await createNote(plugin.app, searchQuery, opt?.newLeaf)
|
||||
} catch (e) {
|
||||
new Notice((e as Error).message)
|
||||
return
|
||||
@@ -222,11 +225,11 @@
|
||||
|
||||
function insertLink(): void {
|
||||
if (!selectedNote) return
|
||||
const file = app.vault
|
||||
const file = plugin.app.vault
|
||||
.getMarkdownFiles()
|
||||
.find(f => f.path === selectedNote.path)
|
||||
const active = app.workspace.getActiveFile()
|
||||
const view = app.workspace.getActiveViewOfType(MarkdownView)
|
||||
const active = plugin.app.workspace.getActiveFile()
|
||||
const view = plugin.app.workspace.getActiveViewOfType(MarkdownView)
|
||||
if (!view?.editor) {
|
||||
new Notice('Omnisearch - Error - No active editor', 3000)
|
||||
return
|
||||
@@ -235,7 +238,7 @@
|
||||
// Generate link
|
||||
let link: string
|
||||
if (file && active) {
|
||||
link = app.fileManager.generateMarkdownLink(file, active.path)
|
||||
link = plugin.app.fileManager.generateMarkdownLink(file, active.path)
|
||||
} else {
|
||||
link = `[[${selectedNote.basename}.${getExtension(selectedNote.path)}]]`
|
||||
}
|
||||
@@ -249,7 +252,7 @@
|
||||
modal.close()
|
||||
}
|
||||
|
||||
function switchToInFileModal(): void {
|
||||
function switchToInFileModal(): void {
|
||||
// Do nothing if the selectedNote is a PDF,
|
||||
// or if there is 0 match (e.g indexing in progress)
|
||||
if (
|
||||
@@ -264,15 +267,15 @@
|
||||
|
||||
if (selectedNote) {
|
||||
// Open in-file modal for selected search result
|
||||
const file = app.vault.getAbstractFileByPath(selectedNote.path)
|
||||
const file = plugin.app.vault.getAbstractFileByPath(selectedNote.path)
|
||||
if (file && file instanceof TFile) {
|
||||
new OmnisearchInFileModal(app, file, searchQuery).open()
|
||||
new OmnisearchInFileModal(plugin, file, searchQuery).open()
|
||||
}
|
||||
} else {
|
||||
// Open in-file modal for active file
|
||||
const view = app.workspace.getActiveViewOfType(MarkdownView)
|
||||
const view = plugin.app.workspace.getActiveViewOfType(MarkdownView)
|
||||
if (view?.file) {
|
||||
new OmnisearchInFileModal(app, view.file, searchQuery).open()
|
||||
new OmnisearchInFileModal(plugin, view.file, searchQuery).open()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -295,11 +298,12 @@
|
||||
|
||||
<InputSearch
|
||||
bind:this="{refInput}"
|
||||
plugin="{plugin}"
|
||||
initialValue="{searchQuery}"
|
||||
on:input="{e => (searchQuery = e.detail)}"
|
||||
placeholder="Omnisearch - Vault">
|
||||
<div class="omnisearch-input-container__buttons">
|
||||
{#if settings.showCreateButton}
|
||||
{#if plugin.settings.showCreateButton}
|
||||
<button on:click="{onClickCreateNote}">Create note</button>
|
||||
{/if}
|
||||
{#if Platform.isMobile}
|
||||
@@ -317,7 +321,7 @@
|
||||
<ModalContainer>
|
||||
{#each resultNotes as result, i}
|
||||
<ResultItemVault
|
||||
app="{app}"
|
||||
{plugin}
|
||||
selected="{i === selectedIndex}"
|
||||
note="{result}"
|
||||
on:mousemove="{_ => (selectedIndex = i)}"
|
||||
@@ -329,7 +333,7 @@
|
||||
<div style="text-align: center;">
|
||||
{#if !resultNotes.length && searchQuery && !searching}
|
||||
We found 0 result for your search here.
|
||||
{#if settings.simpleSearch && searchQuery
|
||||
{#if plugin.settings.simpleSearch && searchQuery
|
||||
.split(SPACE_OR_PUNCTUATION)
|
||||
.some(w => w.length < 3)}
|
||||
<br />
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
<script lang="ts">
|
||||
import { makeExcerpt, highlightText } from 'src/tools/text-processing'
|
||||
import type { ResultNote } from '../globals'
|
||||
import ResultItemContainer from './ResultItemContainer.svelte'
|
||||
import { cloneDeep } from 'lodash-es'
|
||||
import type OmnisearchPlugin from '../main'
|
||||
|
||||
export let plugin: OmnisearchPlugin
|
||||
export let offset: number
|
||||
export let note: ResultNote
|
||||
export let index = 0
|
||||
export let selected = false
|
||||
|
||||
$: cleanedContent = makeExcerpt(note?.content ?? '', offset)
|
||||
$: cleanedContent = plugin.textProcessor.makeExcerpt(note?.content ?? '', offset)
|
||||
</script>
|
||||
|
||||
<ResultItemContainer
|
||||
id="{index.toString()}"
|
||||
selected="{selected}"
|
||||
on:mousemove
|
||||
on:auxclick
|
||||
on:click
|
||||
on:auxclick>
|
||||
on:mousemove
|
||||
selected="{selected}">
|
||||
<div class="omnisearch-result__body">
|
||||
{@html highlightText(cleanedContent, note.matches)}
|
||||
{@html plugin.textProcessor.highlightText(cleanedContent, note.matches)}
|
||||
</div>
|
||||
</ResultItemContainer>
|
||||
|
||||
@@ -9,18 +9,12 @@
|
||||
pathWithoutFilename,
|
||||
} from '../tools/utils'
|
||||
import ResultItemContainer from './ResultItemContainer.svelte'
|
||||
import { TFile, setIcon, App } from 'obsidian'
|
||||
import { cloneDeep } from 'lodash-es'
|
||||
import {
|
||||
stringsToRegex,
|
||||
getMatches,
|
||||
makeExcerpt,
|
||||
highlightText,
|
||||
} from 'src/tools/text-processing'
|
||||
import { TFile, setIcon } from 'obsidian'
|
||||
import type OmnisearchPlugin from '../main'
|
||||
|
||||
export let selected = false
|
||||
export let note: ResultNote
|
||||
export let app: App
|
||||
export let plugin: OmnisearchPlugin
|
||||
|
||||
let imagePath: string | null = null
|
||||
let title = ''
|
||||
@@ -31,16 +25,15 @@
|
||||
$: {
|
||||
imagePath = null
|
||||
if (isFileImage(note.path)) {
|
||||
const file = app.vault.getAbstractFileByPath(note.path)
|
||||
const file = plugin.app.vault.getAbstractFileByPath(note.path)
|
||||
if (file instanceof TFile) {
|
||||
imagePath = app.vault.getResourcePath(file)
|
||||
imagePath = plugin.app.vault.getResourcePath(file)
|
||||
}
|
||||
}
|
||||
}
|
||||
$: reg = stringsToRegex(note.foundWords)
|
||||
$: matchesTitle = getMatches(title, reg)
|
||||
$: matchesNotePath = getMatches(notePath, reg)
|
||||
$: cleanedContent = makeExcerpt(note.content, note.matches[0]?.offset ?? -1)
|
||||
$: matchesTitle = plugin.textProcessor.getMatches(title, note.foundWords)
|
||||
$: matchesNotePath = plugin.textProcessor.getMatches(notePath, note.foundWords)
|
||||
$: cleanedContent = plugin.textProcessor.makeExcerpt(note.content, note.matches[0]?.offset ?? -1)
|
||||
$: glyph = false //cacheManager.getLiveDocument(note.path)?.doesNotExist
|
||||
$: {
|
||||
title = note.basename
|
||||
@@ -63,15 +56,15 @@
|
||||
<ResultItemContainer
|
||||
glyph="{glyph}"
|
||||
id="{note.path}"
|
||||
on:click
|
||||
on:auxclick
|
||||
on:click
|
||||
on:mousemove
|
||||
selected="{selected}">
|
||||
<div>
|
||||
<div class="omnisearch-result__title-container">
|
||||
<span class="omnisearch-result__title">
|
||||
<span bind:this="{elFilePathIcon}"></span>
|
||||
<span>{@html highlightText(title, matchesTitle)}</span>
|
||||
<span>{@html plugin.textProcessor.highlightText(title, matchesTitle)}</span>
|
||||
<span class="omnisearch-result__extension">
|
||||
.{getExtension(note.path)}
|
||||
</span>
|
||||
@@ -91,14 +84,14 @@
|
||||
{#if notePath}
|
||||
<div class="omnisearch-result__folder-path">
|
||||
<span bind:this="{elFolderPathIcon}"></span>
|
||||
<span>{@html highlightText(notePath, matchesNotePath)}</span>
|
||||
<span>{@html plugin.textProcessor.highlightText(notePath, matchesNotePath)}</span>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div style="display: flex; flex-direction: row;">
|
||||
{#if $showExcerpt}
|
||||
<div class="omnisearch-result__body">
|
||||
{@html highlightText(cleanedContent, note.matches)}
|
||||
{@html plugin.textProcessor.highlightText(cleanedContent, note.matches)}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import { App, MarkdownView, Modal, TFile } from 'obsidian'
|
||||
import { MarkdownView, Modal, TFile } from 'obsidian'
|
||||
import type { Modifier } from 'obsidian'
|
||||
import ModalVault from './ModalVault.svelte'
|
||||
import ModalInFile from './ModalInFile.svelte'
|
||||
import { Action, eventBus, EventNames, isInputComposition } from '../globals'
|
||||
import { settings } from '../settings'
|
||||
import { cacheManager } from 'src/cache-manager'
|
||||
import type OmnisearchPlugin from 'src/main'
|
||||
|
||||
abstract class OmnisearchModal extends Modal {
|
||||
protected constructor(app: App) {
|
||||
super(app)
|
||||
protected constructor(plugin: OmnisearchPlugin) {
|
||||
super(plugin.app)
|
||||
const settings = plugin.settings
|
||||
|
||||
// Remove all the default modal's children
|
||||
// so that we can more easily customize it
|
||||
@@ -152,26 +152,28 @@ abstract class OmnisearchModal extends Modal {
|
||||
export class OmnisearchVaultModal extends OmnisearchModal {
|
||||
/**
|
||||
* Instanciate the Omnisearch vault modal
|
||||
* @param app
|
||||
* @param plugin
|
||||
* @param query The query to pre-fill the search field with
|
||||
*/
|
||||
constructor(app: App, query?: string) {
|
||||
super(app)
|
||||
constructor(plugin: OmnisearchPlugin, query?: string) {
|
||||
super(plugin)
|
||||
|
||||
// Selected text in the editor
|
||||
const selectedText = app.workspace
|
||||
const selectedText = plugin.app.workspace
|
||||
.getActiveViewOfType(MarkdownView)
|
||||
?.editor.getSelection()
|
||||
|
||||
cacheManager.getSearchHistory().then(history => {
|
||||
plugin.cacheManager.getSearchHistory().then(history => {
|
||||
// Previously searched query (if enabled in settings)
|
||||
const previous = settings.showPreviousQueryResults ? history[0] : null
|
||||
const previous = plugin.settings.showPreviousQueryResults
|
||||
? history[0]
|
||||
: null
|
||||
|
||||
// Instantiate and display the Svelte component
|
||||
const cmp = new ModalVault({
|
||||
target: this.modalEl,
|
||||
props: {
|
||||
app,
|
||||
plugin,
|
||||
modal: this,
|
||||
previousQuery: query || selectedText || previous || '',
|
||||
},
|
||||
@@ -187,17 +189,17 @@ export class OmnisearchVaultModal extends OmnisearchModal {
|
||||
|
||||
export class OmnisearchInFileModal extends OmnisearchModal {
|
||||
constructor(
|
||||
app: App,
|
||||
plugin: OmnisearchPlugin,
|
||||
file: TFile,
|
||||
searchQuery: string = '',
|
||||
parent?: OmnisearchModal
|
||||
) {
|
||||
super(app)
|
||||
super(plugin)
|
||||
|
||||
const cmp = new ModalInFile({
|
||||
target: this.modalEl,
|
||||
props: {
|
||||
app,
|
||||
plugin,
|
||||
modal: this,
|
||||
singleFilePath: file.path,
|
||||
parent: parent,
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
import Dexie from 'dexie'
|
||||
import type MiniSearch from 'minisearch'
|
||||
import type { AsPlainObject } from 'minisearch'
|
||||
import type { DocumentRef } from './globals'
|
||||
import { Notice } from 'obsidian'
|
||||
import { getObsidianApp } from './stores/obsidian-app'
|
||||
import type OmnisearchPlugin from './main'
|
||||
|
||||
export class OmnisearchCache extends Dexie {
|
||||
export class Database extends Dexie {
|
||||
public static readonly dbVersion = 8
|
||||
public static readonly dbName = 'omnisearch/cache/' + getObsidianApp().appId
|
||||
|
||||
private static instance: OmnisearchCache
|
||||
|
||||
searchHistory!: Dexie.Table<{ id?: number; query: string }, number>
|
||||
minisearch!: Dexie.Table<
|
||||
{
|
||||
@@ -20,26 +17,62 @@ export class OmnisearchCache extends Dexie {
|
||||
string
|
||||
>
|
||||
|
||||
private constructor() {
|
||||
super(OmnisearchCache.dbName)
|
||||
constructor(private plugin: OmnisearchPlugin) {
|
||||
super(Database.getDbName(plugin.app.appId))
|
||||
// Database structure
|
||||
this.version(OmnisearchCache.dbVersion).stores({
|
||||
this.version(Database.dbVersion).stores({
|
||||
searchHistory: '++id',
|
||||
minisearch: 'date',
|
||||
})
|
||||
}
|
||||
|
||||
private static getDbName(appId: string) {
|
||||
return 'omnisearch/cache/' + appId
|
||||
}
|
||||
|
||||
//#endregion Table declarations
|
||||
|
||||
public async getMinisearchCache(): Promise<{
|
||||
paths: DocumentRef[]
|
||||
data: AsPlainObject
|
||||
} | null> {
|
||||
try {
|
||||
const cachedIndex = (await this.plugin.database.minisearch.toArray())[0]
|
||||
return cachedIndex
|
||||
} catch (e) {
|
||||
new Notice(
|
||||
'Omnisearch - Cache missing or invalid. Some freezes may occur while Omnisearch indexes your vault.'
|
||||
)
|
||||
console.error('Omnisearch - Error while loading Minisearch cache')
|
||||
console.error(e)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
public async writeMinisearchCache(
|
||||
minisearch: MiniSearch,
|
||||
indexed: Map<string, number>
|
||||
): Promise<void> {
|
||||
const paths = Array.from(indexed).map(([k, v]) => ({ path: k, mtime: v }))
|
||||
const database = this.plugin.database
|
||||
await database.minisearch.clear()
|
||||
await database.minisearch.add({
|
||||
date: new Date().toISOString(),
|
||||
paths,
|
||||
data: minisearch.toJSON(),
|
||||
})
|
||||
console.log('Omnisearch - Search cache written')
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes Omnisearch databases that have an older version than the current one
|
||||
*/
|
||||
public static async clearOldDatabases(): Promise<void> {
|
||||
public async clearOldDatabases(): Promise<void> {
|
||||
const toDelete = (await indexedDB.databases()).filter(
|
||||
db =>
|
||||
db.name === OmnisearchCache.dbName &&
|
||||
db.name === Database.getDbName(this.plugin.app.appId) &&
|
||||
// version multiplied by 10 https://github.com/dexie/Dexie.js/issues/59
|
||||
db.version !== OmnisearchCache.dbVersion * 10
|
||||
db.version !== Database.dbVersion * 10
|
||||
)
|
||||
if (toDelete.length) {
|
||||
console.log('Omnisearch - Those IndexedDb databases will be deleted:')
|
||||
@@ -51,17 +84,8 @@ export class OmnisearchCache extends Dexie {
|
||||
}
|
||||
}
|
||||
|
||||
public static getInstance() {
|
||||
if (!OmnisearchCache.instance) {
|
||||
OmnisearchCache.instance = new OmnisearchCache()
|
||||
}
|
||||
return OmnisearchCache.instance
|
||||
}
|
||||
|
||||
public async clearCache() {
|
||||
new Notice('Omnisearch - Cache cleared. Please restart Obsidian.')
|
||||
await this.minisearch.clear()
|
||||
}
|
||||
}
|
||||
|
||||
export const database = OmnisearchCache.getInstance()
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import { EventBus } from './tools/event-bus'
|
||||
import { writable } from 'svelte/store'
|
||||
import { settings } from './settings'
|
||||
import type { TFile } from 'obsidian'
|
||||
import { Platform } from 'obsidian'
|
||||
import { getObsidianApp } from './stores/obsidian-app'
|
||||
|
||||
export const regexLineSplit = /\r?\n|\r|((\.|\?|!)( |\r?\n|\r))/g
|
||||
export const regexYaml = /^---\s*\n(.*?)\n?^---\s?/ms
|
||||
@@ -14,9 +11,6 @@ export const regexExtensions = /(?:^|\s)\.(\w+)/g
|
||||
export const excerptBefore = 100
|
||||
export const excerptAfter = 300
|
||||
|
||||
export const highlightClass = `suggestion-highlight omnisearch-highlight ${
|
||||
settings.highlight ? 'omnisearch-default-highlight' : ''
|
||||
}`
|
||||
export const K_DISABLE_OMNISEARCH = 'omnisearch-disabled'
|
||||
|
||||
export const eventBus = new EventBus()
|
||||
@@ -97,31 +91,11 @@ export function isInputComposition(): boolean {
|
||||
return inComposition
|
||||
}
|
||||
|
||||
/**
|
||||
* Plugin dependency - Chs Patch for Chinese word segmentation
|
||||
* @returns
|
||||
*/
|
||||
export function getChsSegmenter(): any | undefined {
|
||||
return (getObsidianApp() as any).plugins.plugins['cm-chs-patch']
|
||||
}
|
||||
|
||||
export type TextExtractorApi = {
|
||||
extractText: (file: TFile) => Promise<string>
|
||||
canFileBeExtracted: (filePath: string) => boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Plugin dependency - Text Extractor
|
||||
* @returns
|
||||
*/
|
||||
export function getTextExtractor(): TextExtractorApi | undefined {
|
||||
return (getObsidianApp() as any).plugins?.plugins?.['text-extractor']?.api
|
||||
}
|
||||
|
||||
export function isCacheEnabled(): boolean {
|
||||
return !Platform.isIosApp && settings.useCache
|
||||
}
|
||||
|
||||
export const SEPARATORS =
|
||||
/[|\t\n\r\^"= -#%-*,.`\/<>:;?@[-\]_{}\u00A0\u00A1\u00A7\u00AB\u00B6\u00B7\u00BB\u00BF\u037E\u0387\u055A-\u055F\u0589\u058A\u05BE\u05C0\u05C3\u05C6\u05F3\u05F4\u0609\u060A\u060C\u060D\u061B\u061E\u061F\u066A-\u066D\u06D4\u0700-\u070D\u07F7-\u07F9\u0830-\u083E\u085E\u0964\u0965\u0970\u09FD\u0A76\u0AF0\u0C77\u0C84\u0DF4\u0E4F\u0E5A\u0E5B\u0F04-\u0F12\u0F14\u0F3A-\u0F3D\u0F85\u0FD0-\u0FD4\u0FD9\u0FDA\u104A-\u104F\u10FB\u1360-\u1368\u1400\u166E\u1680\u169B\u169C\u16EB-\u16ED\u1735\u1736\u17D4-\u17D6\u17D8-\u17DA\u1800-\u180A\u1944\u1945\u1A1E\u1A1F\u1AA0-\u1AA6\u1AA8-\u1AAD\u1B5A-\u1B60\u1BFC-\u1BFF\u1C3B-\u1C3F\u1C7E\u1C7F\u1CC0-\u1CC7\u1CD3\u2000-\u200A\u2010-\u2029\u202F-\u2043\u2045-\u2051\u2053-\u205F\u207D\u207E\u208D\u208E\u2308-\u230B\u2329\u232A\u2768-\u2775\u27C5\u27C6\u27E6-\u27EF\u2983-\u2998\u29D8-\u29DB\u29FC\u29FD\u2CF9-\u2CFC\u2CFE\u2CFF\u2D70\u2E00-\u2E2E\u2E30-\u2E4F\u3000-\u3003\u3008-\u3011\u3014-\u301F\u3030\u303D\u30A0\u30FB\uA4FE\uA4FF\uA60D-\uA60F\uA673\uA67E\uA6F2-\uA6F7\uA874-\uA877\uA8CE\uA8CF\uA8F8-\uA8FA\uA8FC\uA92E\uA92F\uA95F\uA9C1-\uA9CD\uA9DE\uA9DF\uAA5C-\uAA5F\uAADE\uAADF\uAAF0\uAAF1\uABEB\uFD3E\uFD3F\uFE10-\uFE19\uFE30-\uFE52\uFE54-\uFE61\uFE63\uFE68\uFE6A\uFE6B\uFF01-\uFF03\uFF05-\uFF0A\uFF0C-\uFF0F\uFF1A\uFF1B\uFF1F\uFF20\uFF3B-\uFF3D\uFF3F\uFF5B\uFF5D\uFF5F-\uFF65]/
|
||||
.toString()
|
||||
|
||||
109
src/main.ts
109
src/main.ts
@@ -4,10 +4,12 @@ import {
|
||||
OmnisearchVaultModal,
|
||||
} from './components/modals'
|
||||
import {
|
||||
getDefaultSettings,
|
||||
isCacheEnabled,
|
||||
isPluginDisabled,
|
||||
loadSettings,
|
||||
type OmnisearchSettings,
|
||||
saveSettings,
|
||||
settings,
|
||||
SettingsTab,
|
||||
showExcerpt,
|
||||
} from './settings'
|
||||
@@ -16,46 +18,57 @@ import {
|
||||
EventNames,
|
||||
indexingStep,
|
||||
IndexingStepType,
|
||||
isCacheEnabled,
|
||||
type TextExtractorApi,
|
||||
} from './globals'
|
||||
import api, { notifyOnIndexed } from './tools/api'
|
||||
import { isFileIndexable, logDebug } from './tools/utils'
|
||||
import { OmnisearchCache, database } from './database'
|
||||
import * as NotesIndex from './notes-index'
|
||||
import { searchEngine } from './search/omnisearch'
|
||||
import { cacheManager } from './cache-manager'
|
||||
import { setObsidianApp } from './stores/obsidian-app'
|
||||
import { notifyOnIndexed, registerAPI } from './tools/api'
|
||||
import { Database } from './database'
|
||||
import { SearchEngine } from './search/search-engine'
|
||||
import { CacheManager } from './cache-manager'
|
||||
import { logDebug } from './tools/utils'
|
||||
import { NotesIndexer } from './notes-indexer'
|
||||
import { TextProcessor } from './tools/text-processing'
|
||||
|
||||
export default class OmnisearchPlugin extends Plugin {
|
||||
// FIXME: fix the type
|
||||
public apiHttpServer: null | any = null
|
||||
public settings: OmnisearchSettings = getDefaultSettings(this.app)
|
||||
|
||||
// FIXME: merge cache and cacheManager, or find other names
|
||||
public readonly cacheManager: CacheManager
|
||||
public readonly database = new Database(this)
|
||||
|
||||
public readonly notesIndexer = new NotesIndexer(this)
|
||||
public readonly textProcessor = new TextProcessor(this)
|
||||
public readonly searchEngine = new SearchEngine(this)
|
||||
|
||||
private ribbonButton?: HTMLElement
|
||||
|
||||
constructor(app: App, manifest: PluginManifest) {
|
||||
super(app, manifest)
|
||||
setObsidianApp(this.app)
|
||||
this.cacheManager = new CacheManager(this)
|
||||
}
|
||||
|
||||
async onload(): Promise<void> {
|
||||
await loadSettings(this)
|
||||
this.settings = await loadSettings(this)
|
||||
this.addSettingTab(new SettingsTab(this))
|
||||
|
||||
if (!Platform.isMobile) {
|
||||
import('./tools/api-server').then(
|
||||
m => (this.apiHttpServer = m.getServer())
|
||||
m => (this.apiHttpServer = m.getServer(this))
|
||||
)
|
||||
}
|
||||
|
||||
if (isPluginDisabled()) {
|
||||
if (isPluginDisabled(this.app)) {
|
||||
console.log('Omnisearch - Plugin disabled')
|
||||
return
|
||||
}
|
||||
|
||||
await cleanOldCacheFiles(this.app)
|
||||
await OmnisearchCache.clearOldDatabases()
|
||||
await this.database.clearOldDatabases()
|
||||
|
||||
registerAPI(this)
|
||||
|
||||
const settings = this.settings
|
||||
if (settings.ribbonIcon) {
|
||||
this.addRibbonButton()
|
||||
}
|
||||
@@ -71,7 +84,7 @@ export default class OmnisearchPlugin extends Plugin {
|
||||
id: 'show-modal',
|
||||
name: 'Vault search',
|
||||
callback: () => {
|
||||
new OmnisearchVaultModal(this.app).open()
|
||||
new OmnisearchVaultModal(this).open()
|
||||
},
|
||||
})
|
||||
|
||||
@@ -80,16 +93,18 @@ export default class OmnisearchPlugin extends Plugin {
|
||||
name: 'In-file search',
|
||||
editorCallback: (_editor, view) => {
|
||||
if (view.file) {
|
||||
new OmnisearchInFileModal(this.app, view.file).open()
|
||||
new OmnisearchInFileModal(this, view.file).open()
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
const searchEngine = this.searchEngine
|
||||
|
||||
this.app.workspace.onLayoutReady(async () => {
|
||||
// Listeners to keep the search index up-to-date
|
||||
this.registerEvent(
|
||||
this.app.vault.on('create', file => {
|
||||
if (isFileIndexable(file.path)) {
|
||||
if (this.notesIndexer.isFileIndexable(file.path)) {
|
||||
logDebug('Indexing new file', file.path)
|
||||
// await cacheManager.addToLiveCache(file.path)
|
||||
searchEngine.addFromPaths([file.path])
|
||||
@@ -99,25 +114,25 @@ export default class OmnisearchPlugin extends Plugin {
|
||||
this.registerEvent(
|
||||
this.app.vault.on('delete', file => {
|
||||
logDebug('Removing file', file.path)
|
||||
cacheManager.removeFromLiveCache(file.path)
|
||||
this.cacheManager.removeFromLiveCache(file.path)
|
||||
searchEngine.removeFromPaths([file.path])
|
||||
})
|
||||
)
|
||||
this.registerEvent(
|
||||
this.app.vault.on('modify', async file => {
|
||||
if (isFileIndexable(file.path)) {
|
||||
if (this.notesIndexer.isFileIndexable(file.path)) {
|
||||
logDebug('Updating file', file.path)
|
||||
await cacheManager.addToLiveCache(file.path)
|
||||
NotesIndex.markNoteForReindex(file)
|
||||
await this.cacheManager.addToLiveCache(file.path)
|
||||
this.notesIndexer.flagNoteForReindex(file)
|
||||
}
|
||||
})
|
||||
)
|
||||
this.registerEvent(
|
||||
this.app.vault.on('rename', async (file, oldPath) => {
|
||||
if (isFileIndexable(file.path)) {
|
||||
if (this.notesIndexer.isFileIndexable(file.path)) {
|
||||
logDebug('Renaming file', file.path)
|
||||
cacheManager.removeFromLiveCache(oldPath)
|
||||
await cacheManager.addToLiveCache(file.path)
|
||||
this.cacheManager.removeFromLiveCache(oldPath)
|
||||
await this.cacheManager.addToLiveCache(file.path)
|
||||
searchEngine.removeFromPaths([oldPath])
|
||||
await searchEngine.addFromPaths([file.path])
|
||||
}
|
||||
@@ -142,8 +157,8 @@ export default class OmnisearchPlugin extends Plugin {
|
||||
// })
|
||||
// new Notice(welcome, 20_000)
|
||||
// }
|
||||
settings.welcomeMessage = code
|
||||
await this.saveData(settings)
|
||||
this.settings.welcomeMessage = code
|
||||
await this.saveData(this.settings)
|
||||
}
|
||||
|
||||
async onunload(): Promise<void> {
|
||||
@@ -152,14 +167,14 @@ export default class OmnisearchPlugin extends Plugin {
|
||||
|
||||
// Clear cache when disabling Omnisearch
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
await database.clearCache()
|
||||
await this.database.clearCache()
|
||||
}
|
||||
this.apiHttpServer.close()
|
||||
}
|
||||
|
||||
addRibbonButton(): void {
|
||||
this.ribbonButton = this.addRibbonIcon('search', 'Omnisearch', _evt => {
|
||||
new OmnisearchVaultModal(this.app).open()
|
||||
new OmnisearchVaultModal(this).open()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -169,10 +184,28 @@ export default class OmnisearchPlugin extends Plugin {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Plugin dependency - Chs Patch for Chinese word segmentation
|
||||
* @returns
|
||||
*/
|
||||
public getChsSegmenter(): any | undefined {
|
||||
return (this.app as any).plugins.plugins['cm-chs-patch']
|
||||
}
|
||||
|
||||
/**
|
||||
* Plugin dependency - Text Extractor
|
||||
* @returns
|
||||
*/
|
||||
public getTextExtractor(): TextExtractorApi | undefined {
|
||||
return (this.app as any).plugins?.plugins?.['text-extractor']?.api
|
||||
}
|
||||
|
||||
private async populateIndex(): Promise<void> {
|
||||
console.time('Omnisearch - Indexing total time')
|
||||
indexingStep.set(IndexingStepType.ReadingFiles)
|
||||
const files = this.app.vault.getFiles().filter(f => isFileIndexable(f.path))
|
||||
const files = this.app.vault
|
||||
.getFiles()
|
||||
.filter(f => this.notesIndexer.isFileIndexable(f.path))
|
||||
console.log(`Omnisearch - ${files.length} files total`)
|
||||
console.log(
|
||||
`Omnisearch - Cache is ${isCacheEnabled() ? 'enabled' : 'disabled'}`
|
||||
@@ -180,6 +213,7 @@ export default class OmnisearchPlugin extends Plugin {
|
||||
// Map documents in the background
|
||||
// Promise.all(files.map(f => cacheManager.addToLiveCache(f.path)))
|
||||
|
||||
const searchEngine = this.searchEngine
|
||||
if (isCacheEnabled()) {
|
||||
console.time('Omnisearch - Loading index from cache')
|
||||
indexingStep.set(IndexingStepType.LoadingCache)
|
||||
@@ -223,14 +257,14 @@ export default class OmnisearchPlugin extends Plugin {
|
||||
indexingStep.set(IndexingStepType.WritingCache)
|
||||
|
||||
// Disable settings.useCache while writing the cache, in case it freezes
|
||||
settings.useCache = false
|
||||
this.settings.useCache = false
|
||||
await saveSettings(this)
|
||||
|
||||
// Write the cache
|
||||
await searchEngine.writeToCache()
|
||||
|
||||
// Re-enable settings.caching
|
||||
settings.useCache = true
|
||||
this.settings.useCache = true
|
||||
await saveSettings(this)
|
||||
}
|
||||
|
||||
@@ -264,16 +298,3 @@ async function cleanOldCacheFiles(app: App) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function registerAPI(plugin: OmnisearchPlugin): void {
|
||||
// Url scheme for obsidian://omnisearch?query=foobar
|
||||
plugin.registerObsidianProtocolHandler('omnisearch', params => {
|
||||
new OmnisearchVaultModal(plugin.app, params.query).open()
|
||||
})
|
||||
|
||||
// Public api
|
||||
// @ts-ignore
|
||||
globalThis['omnisearch'] = api
|
||||
// Deprecated
|
||||
;(plugin.app as any).plugins.plugins.omnisearch.api = api
|
||||
}
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
import type { TAbstractFile } from 'obsidian'
|
||||
import { searchEngine } from './search/omnisearch'
|
||||
|
||||
// /**
|
||||
// * Index a non-existing note.
|
||||
// * Useful to find internal links that lead (yet) to nowhere
|
||||
// * @param name
|
||||
// * @param parent The note referencing the
|
||||
// */
|
||||
// export function addNonExistingToIndex(name: string, parent: string): void {
|
||||
// name = removeAnchors(name)
|
||||
// const filename = name + (name.endsWith('.md') ? '' : '.md')
|
||||
//
|
||||
// const note: IndexedDocument = {
|
||||
// path: filename,
|
||||
// basename: name,
|
||||
// mtime: 0,
|
||||
//
|
||||
// content: '',
|
||||
// tags: [],
|
||||
// aliases: '',
|
||||
// headings1: '',
|
||||
// headings2: '',
|
||||
// headings3: '',
|
||||
//
|
||||
// doesNotExist: true,
|
||||
// parent,
|
||||
// }
|
||||
// // searchEngine.addDocuments([note])
|
||||
// }
|
||||
|
||||
const notesToReindex = new Set<TAbstractFile>()
|
||||
|
||||
/**
|
||||
* Updated notes are not reindexed immediately for performance reasons.
|
||||
* They're added to a list, and reindex is done the next time we open Omnisearch.
|
||||
*/
|
||||
export function markNoteForReindex(note: TAbstractFile): void {
|
||||
notesToReindex.add(note)
|
||||
}
|
||||
|
||||
export async function refreshIndex(): Promise<void> {
|
||||
const paths = [...notesToReindex].map(n => n.path)
|
||||
if (paths.length) {
|
||||
searchEngine.removeFromPaths(paths)
|
||||
await searchEngine.addFromPaths(paths)
|
||||
notesToReindex.clear()
|
||||
// console.log(`Omnisearch - Reindexed ${paths.length} file(s)`)
|
||||
}
|
||||
}
|
||||
106
src/notes-indexer.ts
Normal file
106
src/notes-indexer.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import type { TAbstractFile } from 'obsidian'
|
||||
import type OmnisearchPlugin from './main'
|
||||
import { removeAnchors } from './tools/notes'
|
||||
import type { IndexedDocument } from './globals'
|
||||
import {
|
||||
isFileCanvas,
|
||||
isFileFromDataloomPlugin,
|
||||
isFileImage,
|
||||
isFilePDF,
|
||||
} from './tools/utils'
|
||||
|
||||
export class NotesIndexer {
|
||||
private notesToReindex = new Set<TAbstractFile>()
|
||||
|
||||
constructor(private plugin: OmnisearchPlugin) {}
|
||||
|
||||
/**
|
||||
* Updated notes are not reindexed immediately for performance reasons.
|
||||
* They're added to a list, and reindex is done the next time we open Omnisearch.
|
||||
*/
|
||||
public flagNoteForReindex(note: TAbstractFile): void {
|
||||
this.notesToReindex.add(note)
|
||||
}
|
||||
|
||||
public async refreshIndex(): Promise<void> {
|
||||
const paths = [...this.notesToReindex].map(n => n.path)
|
||||
if (paths.length) {
|
||||
this.plugin.searchEngine.removeFromPaths(paths)
|
||||
await this.plugin.searchEngine.addFromPaths(paths)
|
||||
this.notesToReindex.clear()
|
||||
}
|
||||
}
|
||||
|
||||
public isFileIndexable(path: string): boolean {
|
||||
return this.isFilenameIndexable(path) || this.isContentIndexable(path)
|
||||
}
|
||||
|
||||
public isContentIndexable(path: string): boolean {
|
||||
const settings = this.plugin.settings
|
||||
const hasTextExtractor = !!this.plugin.getTextExtractor()
|
||||
const canIndexPDF = hasTextExtractor && settings.PDFIndexing
|
||||
const canIndexImages = hasTextExtractor && settings.imagesIndexing
|
||||
return (
|
||||
this.isFilePlaintext(path) ||
|
||||
isFileCanvas(path) ||
|
||||
isFileFromDataloomPlugin(path) ||
|
||||
(canIndexPDF && isFilePDF(path)) ||
|
||||
(canIndexImages && isFileImage(path))
|
||||
)
|
||||
}
|
||||
|
||||
public isFilenameIndexable(path: string): boolean {
|
||||
return (
|
||||
this.canIndexUnsupportedFiles() ||
|
||||
this.isFilePlaintext(path) ||
|
||||
isFileCanvas(path) ||
|
||||
isFileFromDataloomPlugin(path)
|
||||
)
|
||||
}
|
||||
|
||||
public canIndexUnsupportedFiles(): boolean {
|
||||
return (
|
||||
this.plugin.settings.unsupportedFilesIndexing === 'yes' ||
|
||||
(this.plugin.settings.unsupportedFilesIndexing === 'default' &&
|
||||
!!this.plugin.app.vault.getConfig('showUnsupportedFiles'))
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Index a non-existing note.
|
||||
* Useful to find internal links that lead (yet) to nowhere
|
||||
* @param name
|
||||
* @param parent The note referencing the
|
||||
*/
|
||||
public generateIndexableNonexistingDocument(
|
||||
name: string,
|
||||
parent: string
|
||||
): IndexedDocument {
|
||||
name = removeAnchors(name)
|
||||
const filename = name + (name.endsWith('.md') ? '' : '.md')
|
||||
|
||||
return {
|
||||
path: filename,
|
||||
basename: name,
|
||||
mtime: 0,
|
||||
|
||||
content: '',
|
||||
cleanedContent: '',
|
||||
tags: [],
|
||||
unmarkedTags: [],
|
||||
aliases: '',
|
||||
headings1: '',
|
||||
headings2: '',
|
||||
headings3: '',
|
||||
|
||||
doesNotExist: true,
|
||||
parent,
|
||||
}
|
||||
}
|
||||
|
||||
public isFilePlaintext(path: string): boolean {
|
||||
return [...this.plugin.settings.indexedFileTypes, 'md'].some(t =>
|
||||
path.endsWith(`.${t}`)
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
import { settings } from '../settings'
|
||||
import { removeDiacritics } from '../tools/utils'
|
||||
import { parse } from 'search-query-parser'
|
||||
|
||||
@@ -14,8 +13,8 @@ export class Query {
|
||||
}
|
||||
#inQuotes: string[]
|
||||
|
||||
constructor(text = '') {
|
||||
if (settings.ignoreDiacritics) {
|
||||
constructor(text = '', options: { ignoreDiacritics: boolean }) {
|
||||
if (options.ignoreDiacritics) {
|
||||
text = removeDiacritics(text)
|
||||
}
|
||||
const parsed = parse(text.toLowerCase(), {
|
||||
|
||||
@@ -1,72 +1,36 @@
|
||||
import MiniSearch, { type Options, type SearchResult } from 'minisearch'
|
||||
import type { DocumentRef, IndexedDocument, ResultNote } from '../globals'
|
||||
|
||||
import { settings } from '../settings'
|
||||
import { chunkArray, logDebug, removeDiacritics } from '../tools/utils'
|
||||
import { Notice } from 'obsidian'
|
||||
import type { Query } from './query'
|
||||
import { cacheManager } from '../cache-manager'
|
||||
import { sortBy } from 'lodash-es'
|
||||
import { getMatches, stringsToRegex } from 'src/tools/text-processing'
|
||||
import { tokenizeForIndexing, tokenizeForSearch } from './tokenizer'
|
||||
import { getObsidianApp } from '../stores/obsidian-app'
|
||||
import type OmnisearchPlugin from '../main'
|
||||
import { Tokenizer } from './tokenizer'
|
||||
|
||||
export class Omnisearch {
|
||||
|
||||
app = getObsidianApp()
|
||||
|
||||
public static readonly options: Options<IndexedDocument> = {
|
||||
tokenize: tokenizeForIndexing,
|
||||
extractField: (doc, fieldName) => {
|
||||
if (fieldName === 'directory') {
|
||||
// return path without the filename
|
||||
const parts = doc.path.split('/')
|
||||
parts.pop()
|
||||
return parts.join('/')
|
||||
}
|
||||
return (doc as any)[fieldName]
|
||||
},
|
||||
processTerm: (term: string) =>
|
||||
(settings.ignoreDiacritics ? removeDiacritics(term) : term).toLowerCase(),
|
||||
idField: 'path',
|
||||
fields: [
|
||||
'basename',
|
||||
// Different from `path`, since `path` is the unique index and needs to include the filename
|
||||
'directory',
|
||||
'aliases',
|
||||
'content',
|
||||
'headings1',
|
||||
'headings2',
|
||||
'headings3',
|
||||
],
|
||||
storeFields: ['tags'],
|
||||
logger(_level, _message, code) {
|
||||
if (code === 'version_conflict') {
|
||||
new Notice(
|
||||
'Omnisearch - Your index cache may be incorrect or corrupted. If this message keeps appearing, go to Settings to clear the cache.',
|
||||
5000
|
||||
)
|
||||
}
|
||||
},
|
||||
}
|
||||
export class SearchEngine {
|
||||
private tokenizer: Tokenizer
|
||||
private minisearch: MiniSearch
|
||||
/** Map<path, mtime> */
|
||||
private indexedDocuments: Map<string, number> = new Map()
|
||||
// private previousResults: SearchResult[] = []
|
||||
// private previousQuery: Query | null = null
|
||||
|
||||
constructor() {
|
||||
this.minisearch = new MiniSearch(Omnisearch.options)
|
||||
constructor(protected plugin: OmnisearchPlugin) {
|
||||
this.tokenizer = new Tokenizer(plugin)
|
||||
this.minisearch = new MiniSearch(this.getOptions())
|
||||
}
|
||||
|
||||
/**
|
||||
* Return true if the cache is valid
|
||||
*/
|
||||
async loadCache(): Promise<boolean> {
|
||||
const cache = await cacheManager.getMinisearchCache()
|
||||
const cache = await this.plugin.database.getMinisearchCache()
|
||||
if (cache) {
|
||||
// console.log('Omnisearch - Cache', cache)
|
||||
this.minisearch = MiniSearch.loadJS(cache.data, Omnisearch.options)
|
||||
this.minisearch = await MiniSearch.loadJSAsync(
|
||||
cache.data,
|
||||
this.getOptions()
|
||||
)
|
||||
this.indexedDocuments = new Map(cache.paths.map(o => [o.path, o.mtime]))
|
||||
return true
|
||||
}
|
||||
@@ -107,7 +71,9 @@ export class Omnisearch {
|
||||
logDebug('Adding files', paths)
|
||||
let documents = (
|
||||
await Promise.all(
|
||||
paths.map(async path => await cacheManager.getDocument(path))
|
||||
paths.map(
|
||||
async path => await this.plugin.cacheManager.getDocument(path)
|
||||
)
|
||||
)
|
||||
).filter(d => !!d?.path)
|
||||
logDebug('Sorting documents to first index markdown')
|
||||
@@ -154,6 +120,7 @@ export class Omnisearch {
|
||||
query: Query,
|
||||
options: { prefixLength: number; singleFilePath?: string }
|
||||
): Promise<SearchResult[]> {
|
||||
const settings = this.plugin.settings
|
||||
if (query.isEmpty()) {
|
||||
// this.previousResults = []
|
||||
// this.previousQuery = null
|
||||
@@ -176,7 +143,7 @@ export class Omnisearch {
|
||||
break
|
||||
}
|
||||
|
||||
const searchTokens = tokenizeForSearch(query.segmentsToStr())
|
||||
const searchTokens = this.tokenizer.tokenizeForSearch(query.segmentsToStr())
|
||||
logDebug(JSON.stringify(searchTokens, null, 1))
|
||||
let results = this.minisearch.search(searchTokens, {
|
||||
prefix: term => term.length >= options.prefixLength,
|
||||
@@ -248,16 +215,16 @@ export class Omnisearch {
|
||||
results = results.filter(
|
||||
result =>
|
||||
!(
|
||||
this.app.metadataCache.isUserIgnored &&
|
||||
this.app.metadataCache.isUserIgnored(result.id)
|
||||
this.plugin.app.metadataCache.isUserIgnored &&
|
||||
this.plugin.app.metadataCache.isUserIgnored(result.id)
|
||||
)
|
||||
)
|
||||
} else {
|
||||
// Just downrank them
|
||||
results.forEach(result => {
|
||||
if (
|
||||
this.app.metadataCache.isUserIgnored &&
|
||||
this.app.metadataCache.isUserIgnored(result.id)
|
||||
this.plugin.app.metadataCache.isUserIgnored &&
|
||||
this.plugin.app.metadataCache.isUserIgnored(result.id)
|
||||
) {
|
||||
result.score /= 10
|
||||
}
|
||||
@@ -297,7 +264,7 @@ export class Omnisearch {
|
||||
}
|
||||
|
||||
// Boost custom properties
|
||||
const metadata = this.app.metadataCache.getCache(path)
|
||||
const metadata = this.plugin.app.metadataCache.getCache(path)
|
||||
if (metadata) {
|
||||
for (const { name, weight } of settings.weightCustomProperties) {
|
||||
const values = metadata?.frontmatter?.[name]
|
||||
@@ -323,7 +290,9 @@ export class Omnisearch {
|
||||
if (results.length) logDebug('First result:', results[0])
|
||||
|
||||
const documents = await Promise.all(
|
||||
results.map(async result => await cacheManager.getDocument(result.id))
|
||||
results.map(
|
||||
async result => await this.plugin.cacheManager.getDocument(result.id)
|
||||
)
|
||||
)
|
||||
|
||||
// If the search query contains quotes, filter out results that don't have the exact match
|
||||
@@ -379,7 +348,7 @@ export class Omnisearch {
|
||||
): Promise<ResultNote[]> {
|
||||
// Get the raw results
|
||||
let results: SearchResult[]
|
||||
if (settings.simpleSearch) {
|
||||
if (this.plugin.settings.simpleSearch) {
|
||||
results = await this.search(query, {
|
||||
prefixLength: 3,
|
||||
singleFilePath: options?.singleFilePath,
|
||||
@@ -392,7 +361,9 @@ export class Omnisearch {
|
||||
}
|
||||
|
||||
const documents = await Promise.all(
|
||||
results.map(async result => await cacheManager.getDocument(result.id))
|
||||
results.map(
|
||||
async result => await this.plugin.cacheManager.getDocument(result.id)
|
||||
)
|
||||
)
|
||||
|
||||
// Map the raw results to get usable suggestions
|
||||
@@ -425,9 +396,9 @@ export class Omnisearch {
|
||||
logDebug('Matching tokens:', foundWords)
|
||||
|
||||
logDebug('Getting matches locations...')
|
||||
const matches = getMatches(
|
||||
const matches = this.plugin.textProcessor.getMatches(
|
||||
note.content,
|
||||
stringsToRegex(foundWords),
|
||||
foundWords,
|
||||
query
|
||||
)
|
||||
logDebug(`Matches for ${note.basename}`, matches)
|
||||
@@ -443,11 +414,49 @@ export class Omnisearch {
|
||||
}
|
||||
|
||||
public async writeToCache(): Promise<void> {
|
||||
await cacheManager.writeMinisearchCache(
|
||||
await this.plugin.database.writeMinisearchCache(
|
||||
this.minisearch,
|
||||
this.indexedDocuments
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export const searchEngine = new Omnisearch()
|
||||
private getOptions(): Options<IndexedDocument> {
|
||||
return {
|
||||
tokenize: this.tokenizer.tokenizeForIndexing.bind(this.tokenizer),
|
||||
extractField: (doc, fieldName) => {
|
||||
if (fieldName === 'directory') {
|
||||
// return path without the filename
|
||||
const parts = doc.path.split('/')
|
||||
parts.pop()
|
||||
return parts.join('/')
|
||||
}
|
||||
return (doc as any)[fieldName]
|
||||
},
|
||||
processTerm: (term: string) =>
|
||||
(this.plugin.settings.ignoreDiacritics
|
||||
? removeDiacritics(term)
|
||||
: term
|
||||
).toLowerCase(),
|
||||
idField: 'path',
|
||||
fields: [
|
||||
'basename',
|
||||
// Different from `path`, since `path` is the unique index and needs to include the filename
|
||||
'directory',
|
||||
'aliases',
|
||||
'content',
|
||||
'headings1',
|
||||
'headings2',
|
||||
'headings3',
|
||||
],
|
||||
storeFields: ['tags'],
|
||||
logger(_level, _message, code) {
|
||||
if (code === 'version_conflict') {
|
||||
new Notice(
|
||||
'Omnisearch - Your index cache may be incorrect or corrupted. If this message keeps appearing, go to Settings to clear the cache.',
|
||||
5000
|
||||
)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,93 +1,96 @@
|
||||
import type { QueryCombination } from 'minisearch'
|
||||
import {
|
||||
BRACKETS_AND_SPACE,
|
||||
SPACE_OR_PUNCTUATION,
|
||||
chsRegex,
|
||||
getChsSegmenter,
|
||||
} from 'src/globals'
|
||||
import { settings } from 'src/settings'
|
||||
import { BRACKETS_AND_SPACE, chsRegex, SPACE_OR_PUNCTUATION } from 'src/globals'
|
||||
import { logDebug, splitCamelCase, splitHyphens } from 'src/tools/utils'
|
||||
import type OmnisearchPlugin from '../main'
|
||||
|
||||
const markdownLinkExtractor = require('markdown-link-extractor')
|
||||
|
||||
function tokenizeWords(text: string, { skipChs = false } = {}): string[] {
|
||||
const tokens = text.split(BRACKETS_AND_SPACE)
|
||||
if (skipChs) return tokens
|
||||
return tokenizeChsWord(tokens)
|
||||
}
|
||||
export class Tokenizer {
|
||||
constructor(private plugin: OmnisearchPlugin) {}
|
||||
|
||||
function tokenizeTokens(text: string, { skipChs = false } = {}): string[] {
|
||||
const tokens = text.split(SPACE_OR_PUNCTUATION)
|
||||
if (skipChs) return tokens
|
||||
return tokenizeChsWord(tokens)
|
||||
}
|
||||
/**
|
||||
* Tokenization for indexing will possibly return more tokens than the original text.
|
||||
* This is because we combine different methods of tokenization to get the best results.
|
||||
* @param text
|
||||
* @returns
|
||||
*/
|
||||
public tokenizeForIndexing(text: string): string[] {
|
||||
const words = this.tokenizeWords(text)
|
||||
let urls: string[] = []
|
||||
if (this.plugin.settings.tokenizeUrls) {
|
||||
try {
|
||||
urls = markdownLinkExtractor(text)
|
||||
} catch (e) {
|
||||
logDebug('Error extracting urls', e)
|
||||
}
|
||||
}
|
||||
|
||||
function tokenizeChsWord(tokens: string[]): string[] {
|
||||
const segmenter = getChsSegmenter()
|
||||
if (!segmenter) return tokens
|
||||
return tokens.flatMap(word =>
|
||||
chsRegex.test(word) ? segmenter.cut(word, { search: true }) : [word]
|
||||
)
|
||||
}
|
||||
let tokens = this.tokenizeTokens(text, { skipChs: true })
|
||||
|
||||
/**
|
||||
* Tokenization for indexing will possibly return more tokens than the original text.
|
||||
* This is because we combine different methods of tokenization to get the best results.
|
||||
* @param text
|
||||
* @returns
|
||||
*/
|
||||
export function tokenizeForIndexing(text: string): string[] {
|
||||
const words = tokenizeWords(text)
|
||||
let urls: string[] = []
|
||||
if (settings.tokenizeUrls) {
|
||||
try {
|
||||
urls = markdownLinkExtractor(text)
|
||||
} catch (e) {
|
||||
logDebug('Error extracting urls', e)
|
||||
// Split hyphenated tokens
|
||||
tokens = [...tokens, ...tokens.flatMap(splitHyphens)]
|
||||
|
||||
// Split camelCase tokens into "camel" and "case
|
||||
tokens = [...tokens, ...tokens.flatMap(splitCamelCase)]
|
||||
|
||||
// Add whole words (aka "not tokens")
|
||||
tokens = [...tokens, ...words]
|
||||
|
||||
// Add urls
|
||||
if (urls.length) {
|
||||
tokens = [...tokens, ...urls]
|
||||
}
|
||||
|
||||
// Remove duplicates
|
||||
tokens = [...new Set(tokens)]
|
||||
|
||||
return tokens
|
||||
}
|
||||
|
||||
/**
|
||||
* Search tokenization will use the same tokenization methods as indexing,
|
||||
* but will combine each group with "OR" operators
|
||||
* @param text
|
||||
* @returns
|
||||
*/
|
||||
public tokenizeForSearch(text: string): QueryCombination {
|
||||
// Extract urls and remove them from the query
|
||||
const urls: string[] = markdownLinkExtractor(text)
|
||||
text = urls.reduce((acc, url) => acc.replace(url, ''), text)
|
||||
|
||||
const tokens = [...this.tokenizeTokens(text), ...urls].filter(Boolean)
|
||||
|
||||
return {
|
||||
combineWith: 'OR',
|
||||
queries: [
|
||||
{ combineWith: 'AND', queries: tokens },
|
||||
{
|
||||
combineWith: 'AND',
|
||||
queries: this.tokenizeWords(text).filter(Boolean),
|
||||
},
|
||||
{ combineWith: 'AND', queries: tokens.flatMap(splitHyphens) },
|
||||
{ combineWith: 'AND', queries: tokens.flatMap(splitCamelCase) },
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
let tokens = tokenizeTokens(text, { skipChs: true })
|
||||
|
||||
// Split hyphenated tokens
|
||||
tokens = [...tokens, ...tokens.flatMap(splitHyphens)]
|
||||
|
||||
// Split camelCase tokens into "camel" and "case
|
||||
tokens = [...tokens, ...tokens.flatMap(splitCamelCase)]
|
||||
|
||||
// Add whole words (aka "not tokens")
|
||||
tokens = [...tokens, ...words]
|
||||
|
||||
// Add urls
|
||||
if (urls.length) {
|
||||
tokens = [...tokens, ...urls]
|
||||
private tokenizeWords(text: string, { skipChs = false } = {}): string[] {
|
||||
const tokens = text.split(BRACKETS_AND_SPACE)
|
||||
if (skipChs) return tokens
|
||||
return this.tokenizeChsWord(tokens)
|
||||
}
|
||||
|
||||
// Remove duplicates
|
||||
tokens = [...new Set(tokens)]
|
||||
private tokenizeTokens(text: string, { skipChs = false } = {}): string[] {
|
||||
const tokens = text.split(SPACE_OR_PUNCTUATION)
|
||||
if (skipChs) return tokens
|
||||
return this.tokenizeChsWord(tokens)
|
||||
}
|
||||
|
||||
return tokens
|
||||
}
|
||||
|
||||
/**
|
||||
* Search tokenization will use the same tokenization methods as indexing,
|
||||
* but will combine each group with "OR" operators
|
||||
* @param text
|
||||
* @returns
|
||||
*/
|
||||
export function tokenizeForSearch(text: string): QueryCombination {
|
||||
// Extract urls and remove them from the query
|
||||
const urls: string[] = markdownLinkExtractor(text)
|
||||
text = urls.reduce((acc, url) => acc.replace(url, ''), text)
|
||||
|
||||
const tokens = [...tokenizeTokens(text), ...urls].filter(Boolean)
|
||||
|
||||
return {
|
||||
combineWith: 'OR',
|
||||
queries: [
|
||||
{ combineWith: 'AND', queries: tokens },
|
||||
{ combineWith: 'AND', queries: tokenizeWords(text).filter(Boolean) },
|
||||
{ combineWith: 'AND', queries: tokens.flatMap(splitHyphens) },
|
||||
{ combineWith: 'AND', queries: tokens.flatMap(splitCamelCase) },
|
||||
],
|
||||
private tokenizeChsWord(tokens: string[]): string[] {
|
||||
const segmenter = this.plugin.getChsSegmenter()
|
||||
if (!segmenter) return tokens
|
||||
return tokens.flatMap(word =>
|
||||
chsRegex.test(word) ? segmenter.cut(word, { search: true }) : [word]
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
173
src/settings.ts
173
src/settings.ts
@@ -1,5 +1,6 @@
|
||||
// noinspection CssUnresolvedCustomProperty
|
||||
import {
|
||||
App,
|
||||
Notice,
|
||||
Platform,
|
||||
Plugin,
|
||||
@@ -8,14 +9,9 @@ import {
|
||||
SliderComponent,
|
||||
} from 'obsidian'
|
||||
import { writable } from 'svelte/store'
|
||||
import { database } from './database'
|
||||
import {
|
||||
K_DISABLE_OMNISEARCH,
|
||||
getTextExtractor,
|
||||
isCacheEnabled,
|
||||
} from './globals'
|
||||
import { K_DISABLE_OMNISEARCH } from './globals'
|
||||
import type OmnisearchPlugin from './main'
|
||||
import { getObsidianApp } from './stores/obsidian-app'
|
||||
import { enablePrintDebug } from "./tools/utils";
|
||||
|
||||
interface WeightingSettings {
|
||||
weightBasename: number
|
||||
@@ -71,6 +67,8 @@ export interface OmnisearchSettings extends WeightingSettings {
|
||||
httpApiEnabled: boolean
|
||||
httpApiPort: string
|
||||
httpApiNotice: boolean
|
||||
|
||||
DANGER_httpHost: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -95,6 +93,8 @@ export class SettingsTab extends PluginSettingTab {
|
||||
|
||||
display(): void {
|
||||
const { containerEl } = this
|
||||
const database = this.plugin.database
|
||||
const textExtractor = this.plugin.getTextExtractor()
|
||||
containerEl.empty()
|
||||
|
||||
if (this.app.loadLocalStorage(K_DISABLE_OMNISEARCH) == '1') {
|
||||
@@ -117,7 +117,7 @@ export class SettingsTab extends PluginSettingTab {
|
||||
const indexingDesc = new DocumentFragment()
|
||||
indexingDesc.createSpan({}, span => {
|
||||
span.innerHTML = `⚠️ <span style="color: var(--text-accent)">Changing indexing settings will clear the cache, and requires a restart of Obsidian.</span><br/><br/>`
|
||||
if (getTextExtractor()) {
|
||||
if (textExtractor) {
|
||||
span.innerHTML += `
|
||||
👍 You have installed <a href="https://github.com/scambier/obsidian-text-extractor">Text Extractor</a>, Omnisearch can use it to index PDFs and images contents.
|
||||
<br />Text extraction only works on desktop, but the cache can be synchronized with your mobile device.`
|
||||
@@ -138,7 +138,7 @@ export class SettingsTab extends PluginSettingTab {
|
||||
})
|
||||
new Setting(containerEl)
|
||||
.setName(
|
||||
`PDFs content indexing ${getTextExtractor() ? '' : '⚠️ Disabled'}`
|
||||
`PDFs content indexing ${textExtractor ? '' : '⚠️ Disabled'}`
|
||||
)
|
||||
.setDesc(indexPDFsDesc)
|
||||
.addToggle(toggle =>
|
||||
@@ -148,7 +148,7 @@ export class SettingsTab extends PluginSettingTab {
|
||||
await saveSettings(this.plugin)
|
||||
})
|
||||
)
|
||||
.setDisabled(!getTextExtractor())
|
||||
.setDisabled(!textExtractor)
|
||||
|
||||
// Images Indexing
|
||||
const indexImagesDesc = new DocumentFragment()
|
||||
@@ -156,7 +156,7 @@ export class SettingsTab extends PluginSettingTab {
|
||||
span.innerHTML = `Omnisearch will use Text Extractor to OCR your images and index their content.`
|
||||
})
|
||||
new Setting(containerEl)
|
||||
.setName(`Images OCR indexing ${getTextExtractor() ? '' : '⚠️ Disabled'}`)
|
||||
.setName(`Images OCR indexing ${textExtractor ? '' : '⚠️ Disabled'}`)
|
||||
.setDesc(indexImagesDesc)
|
||||
.addToggle(toggle =>
|
||||
toggle.setValue(settings.imagesIndexing).onChange(async v => {
|
||||
@@ -165,7 +165,7 @@ export class SettingsTab extends PluginSettingTab {
|
||||
await saveSettings(this.plugin)
|
||||
})
|
||||
)
|
||||
.setDisabled(!getTextExtractor())
|
||||
.setDisabled(!textExtractor)
|
||||
|
||||
// Office Documents Indexing
|
||||
const indexOfficesDesc = new DocumentFragment()
|
||||
@@ -174,7 +174,7 @@ export class SettingsTab extends PluginSettingTab {
|
||||
})
|
||||
new Setting(containerEl)
|
||||
.setName(
|
||||
`Documents content indexing ${getTextExtractor() ? '' : '⚠️ Disabled'}`
|
||||
`Documents content indexing ${textExtractor ? '' : '⚠️ Disabled'}`
|
||||
)
|
||||
.setDesc(indexOfficesDesc)
|
||||
.addToggle(toggle =>
|
||||
@@ -184,7 +184,7 @@ export class SettingsTab extends PluginSettingTab {
|
||||
await saveSettings(this.plugin)
|
||||
})
|
||||
)
|
||||
.setDisabled(!getTextExtractor())
|
||||
.setDisabled(!textExtractor)
|
||||
|
||||
// Index filenames of unsupported files
|
||||
const indexUnsupportedDesc = new DocumentFragment()
|
||||
@@ -475,42 +475,43 @@ export class SettingsTab extends PluginSettingTab {
|
||||
|
||||
//#region Results Weighting
|
||||
|
||||
const defaultSettings = getDefaultSettings(this.app)
|
||||
|
||||
new Setting(containerEl).setName('Results weighting').setHeading()
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName(
|
||||
`File name & declared aliases (default: ${DEFAULT_SETTINGS.weightBasename})`
|
||||
`File name & declared aliases (default: ${defaultSettings.weightBasename})`
|
||||
)
|
||||
.addSlider(cb => this.weightSlider(cb, 'weightBasename'))
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName(`File directory (default: ${DEFAULT_SETTINGS.weightDirectory})`)
|
||||
.setName(`File directory (default: ${defaultSettings.weightDirectory})`)
|
||||
.addSlider(cb => this.weightSlider(cb, 'weightDirectory'))
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName(`Headings level 1 (default: ${DEFAULT_SETTINGS.weightH1})`)
|
||||
.setName(`Headings level 1 (default: ${defaultSettings.weightH1})`)
|
||||
.addSlider(cb => this.weightSlider(cb, 'weightH1'))
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName(`Headings level 2 (default: ${DEFAULT_SETTINGS.weightH2})`)
|
||||
.setName(`Headings level 2 (default: ${defaultSettings.weightH2})`)
|
||||
.addSlider(cb => this.weightSlider(cb, 'weightH2'))
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName(`Headings level 3 (default: ${DEFAULT_SETTINGS.weightH3})`)
|
||||
.setName(`Headings level 3 (default: ${defaultSettings.weightH3})`)
|
||||
.addSlider(cb => this.weightSlider(cb, 'weightH3'))
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName(
|
||||
`Tags (default: ${DEFAULT_SETTINGS.weightUnmarkedTags})`
|
||||
)
|
||||
.setName(`Tags (default: ${defaultSettings.weightUnmarkedTags})`)
|
||||
.addSlider(cb => this.weightSlider(cb, 'weightUnmarkedTags'))
|
||||
|
||||
//#region Specific tags
|
||||
|
||||
new Setting(containerEl)
|
||||
.setName('Header properties fields')
|
||||
.setDesc('You can set custom weights for values of header properties (e.g. "keywords").')
|
||||
|
||||
.setDesc(
|
||||
'You can set custom weights for values of header properties (e.g. "keywords").'
|
||||
)
|
||||
|
||||
for (let i = 0; i < settings.weightCustomProperties.length; i++) {
|
||||
const item = settings.weightCustomProperties[i]
|
||||
@@ -547,14 +548,13 @@ export class SettingsTab extends PluginSettingTab {
|
||||
}
|
||||
|
||||
// Add a new custom tag
|
||||
new Setting(containerEl)
|
||||
.addButton(btn => {
|
||||
btn.setButtonText('Add a new property')
|
||||
btn.onClick(cb => {
|
||||
settings.weightCustomProperties.push({ name: '', weight: 1 })
|
||||
this.display()
|
||||
})
|
||||
new Setting(containerEl).addButton(btn => {
|
||||
btn.setButtonText('Add a new property')
|
||||
btn.onClick(_cb => {
|
||||
settings.weightCustomProperties.push({ name: '', weight: 1 })
|
||||
this.display()
|
||||
})
|
||||
})
|
||||
|
||||
//#endregion Specific tags
|
||||
|
||||
@@ -631,6 +631,7 @@ export class SettingsTab extends PluginSettingTab {
|
||||
.addToggle(toggle =>
|
||||
toggle.setValue(settings.verboseLogging).onChange(async v => {
|
||||
settings.verboseLogging = v
|
||||
enablePrintDebug(v)
|
||||
await saveSettings(this.plugin)
|
||||
})
|
||||
)
|
||||
@@ -670,7 +671,7 @@ export class SettingsTab extends PluginSettingTab {
|
||||
.setName('Disable on this device')
|
||||
.setDesc(disableDesc)
|
||||
.addToggle(toggle =>
|
||||
toggle.setValue(isPluginDisabled()).onChange(async v => {
|
||||
toggle.setValue(isPluginDisabled(this.app)).onChange(async v => {
|
||||
if (v) {
|
||||
this.app.saveLocalStorage(K_DISABLE_OMNISEARCH, '1')
|
||||
} else {
|
||||
@@ -712,67 +713,83 @@ export class SettingsTab extends PluginSettingTab {
|
||||
}
|
||||
}
|
||||
|
||||
const app = getObsidianApp()
|
||||
export function getDefaultSettings(app: App): OmnisearchSettings {
|
||||
return {
|
||||
useCache: true,
|
||||
hideExcluded: false,
|
||||
downrankedFoldersFilters: [] as string[],
|
||||
ignoreDiacritics: true,
|
||||
indexedFileTypes: [] as string[],
|
||||
PDFIndexing: false,
|
||||
officeIndexing: false,
|
||||
imagesIndexing: false,
|
||||
unsupportedFilesIndexing: 'default',
|
||||
splitCamelCase: false,
|
||||
openInNewPane: false,
|
||||
vimLikeNavigationShortcut: app.vault.getConfig('vimMode') as boolean,
|
||||
|
||||
export const DEFAULT_SETTINGS: OmnisearchSettings = {
|
||||
useCache: true,
|
||||
hideExcluded: false,
|
||||
downrankedFoldersFilters: [] as string[],
|
||||
ignoreDiacritics: true,
|
||||
indexedFileTypes: [] as string[],
|
||||
PDFIndexing: false,
|
||||
officeIndexing: false,
|
||||
imagesIndexing: false,
|
||||
unsupportedFilesIndexing: 'default',
|
||||
splitCamelCase: false,
|
||||
openInNewPane: false,
|
||||
vimLikeNavigationShortcut: app.vault.getConfig('vimMode') as boolean,
|
||||
ribbonIcon: true,
|
||||
showExcerpt: true,
|
||||
renderLineReturnInExcerpts: true,
|
||||
showCreateButton: false,
|
||||
highlight: true,
|
||||
showPreviousQueryResults: true,
|
||||
simpleSearch: false,
|
||||
tokenizeUrls: false,
|
||||
fuzziness: '1',
|
||||
|
||||
ribbonIcon: true,
|
||||
showExcerpt: true,
|
||||
renderLineReturnInExcerpts: true,
|
||||
showCreateButton: false,
|
||||
highlight: true,
|
||||
showPreviousQueryResults: true,
|
||||
simpleSearch: false,
|
||||
tokenizeUrls: false,
|
||||
fuzziness: '1',
|
||||
weightBasename: 3,
|
||||
weightDirectory: 2,
|
||||
weightH1: 1.5,
|
||||
weightH2: 1.3,
|
||||
weightH3: 1.1,
|
||||
weightUnmarkedTags: 1.1,
|
||||
weightCustomProperties: [] as { name: string; weight: number }[],
|
||||
|
||||
weightBasename: 3,
|
||||
weightDirectory: 2,
|
||||
weightH1: 1.5,
|
||||
weightH2: 1.3,
|
||||
weightH3: 1.1,
|
||||
weightUnmarkedTags: 1.1,
|
||||
weightCustomProperties: [] as { name: string; weight: number }[],
|
||||
httpApiEnabled: false,
|
||||
httpApiPort: '51361',
|
||||
httpApiNotice: true,
|
||||
|
||||
httpApiEnabled: false,
|
||||
httpApiPort: '51361',
|
||||
httpApiNotice: true,
|
||||
welcomeMessage: '',
|
||||
verboseLogging: false,
|
||||
|
||||
welcomeMessage: '',
|
||||
verboseLogging: false,
|
||||
} as const
|
||||
DANGER_httpHost: null,
|
||||
}
|
||||
}
|
||||
|
||||
export let settings = Object.assign({}, DEFAULT_SETTINGS) as OmnisearchSettings
|
||||
let settings: OmnisearchSettings
|
||||
|
||||
export async function loadSettings(plugin: Plugin): Promise<void> {
|
||||
settings = Object.assign({}, DEFAULT_SETTINGS, await plugin.loadData())
|
||||
// /**
|
||||
// * @deprecated
|
||||
// */
|
||||
// export function getSettings(): OmnisearchSettings {
|
||||
// if (!settings) {
|
||||
// settings = Object.assign({}, getDefaultSettings()) as OmnisearchSettings
|
||||
// }
|
||||
// return settings
|
||||
// }
|
||||
|
||||
export async function loadSettings(
|
||||
plugin: Plugin
|
||||
): Promise<OmnisearchSettings> {
|
||||
settings = Object.assign(
|
||||
{},
|
||||
getDefaultSettings(plugin.app),
|
||||
await plugin.loadData()
|
||||
)
|
||||
showExcerpt.set(settings.showExcerpt)
|
||||
enablePrintDebug(settings.verboseLogging)
|
||||
return settings
|
||||
}
|
||||
|
||||
export async function saveSettings(plugin: Plugin): Promise<void> {
|
||||
await plugin.saveData(settings)
|
||||
}
|
||||
|
||||
export function isPluginDisabled(): boolean {
|
||||
export function isPluginDisabled(app: App): boolean {
|
||||
return app.loadLocalStorage(K_DISABLE_OMNISEARCH) === '1'
|
||||
}
|
||||
|
||||
export function canIndexUnsupportedFiles(): boolean {
|
||||
return (
|
||||
settings.unsupportedFilesIndexing === 'yes' ||
|
||||
(settings.unsupportedFilesIndexing === 'default' &&
|
||||
!!app.vault.getConfig('showUnsupportedFiles'))
|
||||
)
|
||||
export function isCacheEnabled(): boolean {
|
||||
return !Platform.isIosApp && settings.useCache
|
||||
}
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import type { App } from 'obsidian'
|
||||
|
||||
let obsidianApp: App | null = null
|
||||
|
||||
export function setObsidianApp(app: App) {
|
||||
obsidianApp = app
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function to get the Obsidian app instance.
|
||||
*/
|
||||
export function getObsidianApp() {
|
||||
if (!obsidianApp) {
|
||||
// throw new Error('Obsidian app not set')
|
||||
// console.trace('Obsidian app not set')
|
||||
return app // FIXME: please.
|
||||
}
|
||||
return obsidianApp as App
|
||||
}
|
||||
@@ -1,10 +1,11 @@
|
||||
import * as http from 'http'
|
||||
import * as url from 'url'
|
||||
import api from './api'
|
||||
import { Notice } from 'obsidian'
|
||||
import { settings } from 'src/settings'
|
||||
import type OmnisearchPlugin from '../main'
|
||||
import { getApi } from './api'
|
||||
|
||||
export function getServer() {
|
||||
export function getServer(plugin: OmnisearchPlugin) {
|
||||
const api = getApi(plugin)
|
||||
const server = http.createServer(async function (req, res) {
|
||||
res.setHeader('Access-Control-Allow-Origin', '*')
|
||||
res.setHeader(
|
||||
@@ -43,11 +44,14 @@ export function getServer() {
|
||||
server.listen(
|
||||
{
|
||||
port: parseInt(port),
|
||||
host: 'localhost',
|
||||
host: plugin.settings.DANGER_httpHost ?? 'localhost',
|
||||
},
|
||||
() => {
|
||||
console.log(`Omnisearch - Started HTTP server on port ${port}`)
|
||||
if (settings.httpApiNotice) {
|
||||
if (plugin.settings.DANGER_httpHost && plugin.settings.DANGER_httpHost !== 'localhost') {
|
||||
new Notice(`Omnisearch - Started non-localhost HTTP server at ${plugin.settings.DANGER_httpHost}:${port}`, 120_000)
|
||||
}
|
||||
else if (plugin.settings.httpApiNotice) {
|
||||
new Notice(`Omnisearch - Started HTTP server on port ${port}`)
|
||||
}
|
||||
}
|
||||
@@ -63,7 +67,7 @@ export function getServer() {
|
||||
close() {
|
||||
server.close()
|
||||
console.log(`Omnisearch - Terminated HTTP server`)
|
||||
if (settings.httpApiEnabled && settings.httpApiNotice) {
|
||||
if (plugin.settings.httpApiEnabled && plugin.settings.httpApiNotice) {
|
||||
new Notice(`Omnisearch - Terminated HTTP server`)
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import type { ResultNote } from '../globals'
|
||||
import { Query } from '../search/query'
|
||||
import { searchEngine } from '../search/omnisearch'
|
||||
import { makeExcerpt } from './text-processing'
|
||||
import { refreshIndex } from '../notes-index'
|
||||
import { getObsidianApp } from '../stores/obsidian-app'
|
||||
import type OmnisearchPlugin from '../main'
|
||||
import { OmnisearchVaultModal } from '../components/modals'
|
||||
|
||||
type ResultNoteApi = {
|
||||
score: number
|
||||
@@ -20,8 +18,6 @@ export type SearchMatchApi = {
|
||||
offset: number
|
||||
}
|
||||
|
||||
const app = getObsidianApp()
|
||||
|
||||
let notified = false
|
||||
|
||||
/**
|
||||
@@ -29,15 +25,21 @@ let notified = false
|
||||
*/
|
||||
let onIndexedCallbacks: Array<() => void> = []
|
||||
|
||||
function mapResults(results: ResultNote[]): ResultNoteApi[] {
|
||||
function mapResults(
|
||||
plugin: OmnisearchPlugin,
|
||||
results: ResultNote[]
|
||||
): ResultNoteApi[] {
|
||||
return results.map(result => {
|
||||
const { score, path, basename, foundWords, matches, content } = result
|
||||
|
||||
const excerpt = makeExcerpt(content, matches[0]?.offset ?? -1)
|
||||
const excerpt = plugin.textProcessor.makeExcerpt(
|
||||
content,
|
||||
matches[0]?.offset ?? -1
|
||||
)
|
||||
|
||||
const res: ResultNoteApi = {
|
||||
score,
|
||||
vault: app.vault.getName(),
|
||||
vault: plugin.app.vault.getName(),
|
||||
path,
|
||||
basename,
|
||||
foundWords,
|
||||
@@ -54,27 +56,52 @@ function mapResults(results: ResultNote[]): ResultNoteApi[] {
|
||||
})
|
||||
}
|
||||
|
||||
async function search(q: string): Promise<ResultNoteApi[]> {
|
||||
const query = new Query(q)
|
||||
const raw = await searchEngine.getSuggestions(query)
|
||||
return mapResults(raw)
|
||||
}
|
||||
|
||||
function registerOnIndexed(cb: () => void): void {
|
||||
onIndexedCallbacks.push(cb)
|
||||
// Immediately call the callback if the indexing is already ready done
|
||||
if (notified) {
|
||||
cb()
|
||||
}
|
||||
}
|
||||
|
||||
function unregisterOnIndexed(cb: () => void): void {
|
||||
onIndexedCallbacks = onIndexedCallbacks.filter(o => o !== cb)
|
||||
}
|
||||
|
||||
export function notifyOnIndexed(): void {
|
||||
notified = true
|
||||
onIndexedCallbacks.forEach(cb => cb())
|
||||
}
|
||||
|
||||
export default { search, registerOnIndexed, unregisterOnIndexed, refreshIndex }
|
||||
let registed = false
|
||||
|
||||
export function registerAPI(plugin: OmnisearchPlugin): void {
|
||||
if (registed) {
|
||||
return
|
||||
}
|
||||
registed = true
|
||||
|
||||
// Url scheme for obsidian://omnisearch?query=foobar
|
||||
plugin.registerObsidianProtocolHandler('omnisearch', params => {
|
||||
new OmnisearchVaultModal(plugin, params.query).open()
|
||||
})
|
||||
|
||||
const api = getApi(plugin)
|
||||
|
||||
// Public api
|
||||
// @ts-ignore
|
||||
globalThis['omnisearch'] = api
|
||||
// Deprecated
|
||||
;(plugin.app as any).plugins.plugins.omnisearch.api = api
|
||||
}
|
||||
|
||||
export function getApi(plugin: OmnisearchPlugin) {
|
||||
return {
|
||||
async search(q: string): Promise<ResultNoteApi[]> {
|
||||
const query = new Query(q, {
|
||||
ignoreDiacritics: plugin.settings.ignoreDiacritics,
|
||||
})
|
||||
const raw = await plugin.searchEngine.getSuggestions(query)
|
||||
return mapResults(plugin, raw)
|
||||
},
|
||||
registerOnIndexed(cb: () => void): void {
|
||||
onIndexedCallbacks.push(cb)
|
||||
// Immediately call the callback if the indexing is already ready done
|
||||
if (notified) {
|
||||
cb()
|
||||
}
|
||||
},
|
||||
unregisterOnIndexed(cb: () => void): void {
|
||||
onIndexedCallbacks = onIndexedCallbacks.filter(o => o !== cb)
|
||||
},
|
||||
refreshIndex: plugin.notesIndexer.refreshIndex,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import { type CachedMetadata, MarkdownView, TFile } from 'obsidian'
|
||||
import { type App, type CachedMetadata, MarkdownView, TFile } from 'obsidian'
|
||||
import type { ResultNote } from '../globals'
|
||||
import { getObsidianApp } from '../stores/obsidian-app'
|
||||
|
||||
const app = getObsidianApp()
|
||||
|
||||
export async function openNote(
|
||||
app: App,
|
||||
item: ResultNote,
|
||||
offset = 0,
|
||||
newPane = false,
|
||||
@@ -47,7 +45,11 @@ export async function openNote(
|
||||
})
|
||||
}
|
||||
|
||||
export async function createNote(name: string, newLeaf = false): Promise<void> {
|
||||
export async function createNote(
|
||||
app: App,
|
||||
name: string,
|
||||
newLeaf = false
|
||||
): Promise<void> {
|
||||
try {
|
||||
let pathPrefix: string
|
||||
switch (app.vault.getConfig('newFileLocation')) {
|
||||
@@ -77,6 +79,7 @@ export async function createNote(name: string, newLeaf = false): Promise<void> {
|
||||
* @returns
|
||||
*/
|
||||
export function getNonExistingNotes(
|
||||
app: App,
|
||||
file: TFile,
|
||||
metadata: CachedMetadata
|
||||
): string[] {
|
||||
|
||||
@@ -1,83 +1,206 @@
|
||||
import {
|
||||
highlightClass,
|
||||
type SearchMatch,
|
||||
regexLineSplit,
|
||||
regexYaml,
|
||||
regexStripQuotes,
|
||||
excerptAfter,
|
||||
excerptBefore,
|
||||
} from 'src/globals'
|
||||
import { settings } from 'src/settings'
|
||||
import { excerptAfter, excerptBefore, type SearchMatch } from 'src/globals'
|
||||
import { removeDiacritics, warnDebug } from './utils'
|
||||
import type { Query } from 'src/search/query'
|
||||
import { Notice } from 'obsidian'
|
||||
import { escapeRegExp } from 'lodash-es'
|
||||
import type OmnisearchPlugin from '../main'
|
||||
|
||||
/**
|
||||
* Wraps the matches in the text with a <span> element and a highlight class
|
||||
* @param text
|
||||
* @param matches
|
||||
* @returns The html string with the matches highlighted
|
||||
*/
|
||||
export function highlightText(text: string, matches: SearchMatch[]): string {
|
||||
if (!matches.length) {
|
||||
return text
|
||||
}
|
||||
try {
|
||||
// Text to highlight
|
||||
const smartMatches = new RegExp(
|
||||
matches
|
||||
.map(
|
||||
// This regex will match the word (with \b word boundary)
|
||||
// \b doesn't detect non-alphabetical character's word boundary, so we need to escape it
|
||||
matchItem => {
|
||||
const escaped = escapeRegExp(matchItem.match)
|
||||
return `\\b${escaped}\\b${
|
||||
!/[a-zA-Z]/.test(matchItem.match) ? `|${escaped}` : ''
|
||||
}`
|
||||
}
|
||||
)
|
||||
.join('|'),
|
||||
'giu'
|
||||
)
|
||||
export class TextProcessor {
|
||||
constructor(private plugin: OmnisearchPlugin) {}
|
||||
|
||||
// Replacer function that will highlight the matches
|
||||
const replacer = (match: string) => {
|
||||
const matchInfo = matches.find(info =>
|
||||
match.match(
|
||||
new RegExp(
|
||||
`\\b${escapeRegExp(info.match)}\\b${
|
||||
!/[a-zA-Z]/.test(info.match) ? `|${escapeRegExp(info.match)}` : ''
|
||||
}`,
|
||||
'giu'
|
||||
)
|
||||
)
|
||||
)
|
||||
if (matchInfo) {
|
||||
return `<span class="${highlightClass}">${match}</span>`
|
||||
}
|
||||
return match
|
||||
/**
|
||||
* Wraps the matches in the text with a <span> element and a highlight class
|
||||
* @param text
|
||||
* @param matches
|
||||
* @returns The html string with the matches highlighted
|
||||
*/
|
||||
public highlightText(text: string, matches: SearchMatch[]): string {
|
||||
const highlightClass = `suggestion-highlight omnisearch-highlight ${
|
||||
this.plugin.settings.highlight ? 'omnisearch-default-highlight' : ''
|
||||
}`
|
||||
|
||||
if (!matches.length) {
|
||||
return text
|
||||
}
|
||||
|
||||
// Effectively highlight the text
|
||||
let newText = text.replace(smartMatches, replacer)
|
||||
|
||||
// If the text didn't change (= nothing to highlight), re-run the regex but just replace the matches without the word boundary
|
||||
if (newText === text) {
|
||||
const dumbMatches = new RegExp(
|
||||
matches.map(matchItem => escapeRegExp(matchItem.match)).join('|'),
|
||||
try {
|
||||
// Text to highlight
|
||||
const smartMatches = new RegExp(
|
||||
matches
|
||||
.map(
|
||||
// This regex will match the word (with \b word boundary)
|
||||
// \b doesn't detect non-alphabetical character's word boundary, so we need to escape it
|
||||
matchItem => {
|
||||
const escaped = escapeRegExp(matchItem.match)
|
||||
return `\\b${escaped}\\b${
|
||||
!/[a-zA-Z]/.test(matchItem.match) ? `|${escaped}` : ''
|
||||
}`
|
||||
}
|
||||
)
|
||||
.join('|'),
|
||||
'giu'
|
||||
)
|
||||
newText = text.replace(dumbMatches, replacer)
|
||||
|
||||
// Replacer function that will highlight the matches
|
||||
const replacer = (match: string) => {
|
||||
const matchInfo = matches.find(info =>
|
||||
match.match(
|
||||
new RegExp(
|
||||
`\\b${escapeRegExp(info.match)}\\b${
|
||||
!/[a-zA-Z]/.test(info.match)
|
||||
? `|${escapeRegExp(info.match)}`
|
||||
: ''
|
||||
}`,
|
||||
'giu'
|
||||
)
|
||||
)
|
||||
)
|
||||
if (matchInfo) {
|
||||
return `<span class="${highlightClass}">${match}</span>`
|
||||
}
|
||||
return match
|
||||
}
|
||||
|
||||
// Effectively highlight the text
|
||||
let newText = text.replace(smartMatches, replacer)
|
||||
|
||||
// If the text didn't change (= nothing to highlight), re-run the regex but just replace the matches without the word boundary
|
||||
if (newText === text) {
|
||||
const dumbMatches = new RegExp(
|
||||
matches.map(matchItem => escapeRegExp(matchItem.match)).join('|'),
|
||||
'giu'
|
||||
)
|
||||
newText = text.replace(dumbMatches, replacer)
|
||||
}
|
||||
return newText
|
||||
} catch (e) {
|
||||
console.error('Omnisearch - Error in highlightText()', e)
|
||||
return text
|
||||
}
|
||||
}
|
||||
|
||||
escapeHTML(html: string): string {
|
||||
return html
|
||||
.replaceAll('&', '&')
|
||||
.replaceAll('<', '<')
|
||||
.replaceAll('>', '>')
|
||||
.replaceAll('"', '"')
|
||||
.replaceAll("'", ''')
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a list of strings to a list of words, using the \b word boundary.
|
||||
* Used to find excerpts in a note body, or select which words to highlight.
|
||||
*/
|
||||
public stringsToRegex(strings: string[]): RegExp {
|
||||
if (!strings.length) return /^$/g
|
||||
|
||||
// sort strings by decreasing length, so that longer strings are matched first
|
||||
strings.sort((a, b) => b.length - a.length)
|
||||
|
||||
const joined = `(${strings
|
||||
.map(s => `\\b${escapeRegExp(s)}\\b|${escapeRegExp(s)}`)
|
||||
.join('|')})`
|
||||
|
||||
return new RegExp(`${joined}`, 'gui')
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of matches in the text, using the provided regex
|
||||
* @param text
|
||||
* @param reg
|
||||
* @param query
|
||||
*/
|
||||
public getMatches(text: string, words: string[], query?: Query): SearchMatch[] {
|
||||
const reg = this.stringsToRegex(words)
|
||||
const originalText = text
|
||||
// text = text.toLowerCase().replace(new RegExp(SEPARATORS, 'gu'), ' ')
|
||||
if (this.plugin.settings.ignoreDiacritics) {
|
||||
text = removeDiacritics(text)
|
||||
}
|
||||
const startTime = new Date().getTime()
|
||||
let match: RegExpExecArray | null = null
|
||||
let matches: SearchMatch[] = []
|
||||
let count = 0
|
||||
while ((match = reg.exec(text)) !== null) {
|
||||
// Avoid infinite loops, stop looking after 100 matches or if we're taking too much time
|
||||
if (++count >= 100 || new Date().getTime() - startTime > 50) {
|
||||
warnDebug('Stopped getMatches at', count, 'results')
|
||||
break
|
||||
}
|
||||
const matchStartIndex = match.index
|
||||
const matchEndIndex = matchStartIndex + match[0].length
|
||||
const originalMatch = originalText
|
||||
.substring(matchStartIndex, matchEndIndex)
|
||||
.trim()
|
||||
if (originalMatch && match.index >= 0) {
|
||||
matches.push({ match: originalMatch, offset: match.index })
|
||||
}
|
||||
}
|
||||
|
||||
// If the query is more than 1 token and can be found "as is" in the text, put this match first
|
||||
if (
|
||||
query &&
|
||||
(query.query.text.length > 1 || query.getExactTerms().length > 0)
|
||||
) {
|
||||
const best = text.indexOf(query.getBestStringForExcerpt())
|
||||
if (best > -1 && matches.find(m => m.offset === best)) {
|
||||
matches.unshift({
|
||||
offset: best,
|
||||
match: query.getBestStringForExcerpt(),
|
||||
})
|
||||
}
|
||||
}
|
||||
return matches
|
||||
}
|
||||
|
||||
public makeExcerpt(content: string, offset: number): string {
|
||||
const settings = this.plugin.settings
|
||||
try {
|
||||
const pos = offset ?? -1
|
||||
const from = Math.max(0, pos - excerptBefore)
|
||||
const to = Math.min(content.length, pos + excerptAfter)
|
||||
if (pos > -1) {
|
||||
content =
|
||||
(from > 0 ? '…' : '') +
|
||||
content.slice(from, to).trim() +
|
||||
(to < content.length - 1 ? '…' : '')
|
||||
} else {
|
||||
content = content.slice(0, excerptAfter)
|
||||
}
|
||||
if (settings.renderLineReturnInExcerpts) {
|
||||
const lineReturn = new RegExp(/(?:\r\n|\r|\n)/g)
|
||||
// Remove multiple line returns
|
||||
content = content
|
||||
.split(lineReturn)
|
||||
.filter(l => l)
|
||||
.join('\n')
|
||||
|
||||
const last = content.lastIndexOf('\n', pos - from)
|
||||
|
||||
if (last > 0) {
|
||||
content = content.slice(last)
|
||||
}
|
||||
}
|
||||
|
||||
content = escapeHTML(content)
|
||||
|
||||
if (settings.renderLineReturnInExcerpts) {
|
||||
content = content.trim().replaceAll('\n', '<br>')
|
||||
}
|
||||
|
||||
return content
|
||||
} catch (e) {
|
||||
new Notice(
|
||||
'Omnisearch - Error while creating excerpt, see developer console'
|
||||
)
|
||||
console.error(`Omnisearch - Error while creating excerpt`)
|
||||
console.error(e)
|
||||
return ''
|
||||
}
|
||||
return newText
|
||||
} catch (e) {
|
||||
console.error('Omnisearch - Error in highlightText()', e)
|
||||
return text
|
||||
}
|
||||
}
|
||||
|
||||
export function escapeHTML(html: string): string {
|
||||
function escapeHTML(html: string): string {
|
||||
return html
|
||||
.replaceAll('&', '&')
|
||||
.replaceAll('<', '<')
|
||||
@@ -86,143 +209,3 @@ export function escapeHTML(html: string): string {
|
||||
.replaceAll("'", ''')
|
||||
}
|
||||
|
||||
export function splitLines(text: string): string[] {
|
||||
return text.split(regexLineSplit).filter(l => !!l && l.length > 2)
|
||||
}
|
||||
|
||||
export function removeFrontMatter(text: string): string {
|
||||
// Regex to recognize YAML Front Matter (at beginning of file, 3 hyphens, than any character, including newlines, then 3 hyphens).
|
||||
return text.replace(regexYaml, '')
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a list of strings to a list of words, using the \b word boundary.
|
||||
* Used to find excerpts in a note body, or select which words to highlight.
|
||||
*/
|
||||
export function stringsToRegex(strings: string[]): RegExp {
|
||||
if (!strings.length) return /^$/g
|
||||
|
||||
// sort strings by decreasing length, so that longer strings are matched first
|
||||
strings.sort((a, b) => b.length - a.length)
|
||||
|
||||
const joined = `(${strings
|
||||
.map(s => `\\b${escapeRegExp(s)}\\b|${escapeRegExp(s)}`)
|
||||
.join('|')})`
|
||||
|
||||
return new RegExp(`${joined}`, 'gui')
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an array of matches in the text, using the provided regex
|
||||
* @param text
|
||||
* @param reg
|
||||
* @param query
|
||||
*/
|
||||
export function getMatches(
|
||||
text: string,
|
||||
reg: RegExp,
|
||||
query?: Query
|
||||
): SearchMatch[] {
|
||||
const originalText = text
|
||||
// text = text.toLowerCase().replace(new RegExp(SEPARATORS, 'gu'), ' ')
|
||||
if (settings.ignoreDiacritics) {
|
||||
text = removeDiacritics(text)
|
||||
}
|
||||
const startTime = new Date().getTime()
|
||||
let match: RegExpExecArray | null = null
|
||||
let matches: SearchMatch[] = []
|
||||
let count = 0
|
||||
while ((match = reg.exec(text)) !== null) {
|
||||
// Avoid infinite loops, stop looking after 100 matches or if we're taking too much time
|
||||
if (++count >= 100 || new Date().getTime() - startTime > 50) {
|
||||
warnDebug('Stopped getMatches at', count, 'results')
|
||||
break
|
||||
}
|
||||
const matchStartIndex = match.index
|
||||
const matchEndIndex = matchStartIndex + match[0].length
|
||||
const originalMatch = originalText
|
||||
.substring(matchStartIndex, matchEndIndex)
|
||||
.trim()
|
||||
if (originalMatch && match.index >= 0) {
|
||||
matches.push({ match: originalMatch, offset: match.index })
|
||||
}
|
||||
}
|
||||
|
||||
// If the query is more than 1 token and can be found "as is" in the text, put this match first
|
||||
if (
|
||||
query &&
|
||||
(query.query.text.length > 1 || query.getExactTerms().length > 0)
|
||||
) {
|
||||
const best = text.indexOf(query.getBestStringForExcerpt())
|
||||
if (best > -1 && matches.find(m => m.offset === best)) {
|
||||
matches.unshift({
|
||||
offset: best,
|
||||
match: query.getBestStringForExcerpt(),
|
||||
})
|
||||
}
|
||||
}
|
||||
return matches
|
||||
}
|
||||
|
||||
export function makeExcerpt(content: string, offset: number): string {
|
||||
try {
|
||||
const pos = offset ?? -1
|
||||
const from = Math.max(0, pos - excerptBefore)
|
||||
const to = Math.min(content.length, pos + excerptAfter)
|
||||
if (pos > -1) {
|
||||
content =
|
||||
(from > 0 ? '…' : '') +
|
||||
content.slice(from, to).trim() +
|
||||
(to < content.length - 1 ? '…' : '')
|
||||
} else {
|
||||
content = content.slice(0, excerptAfter)
|
||||
}
|
||||
if (settings.renderLineReturnInExcerpts) {
|
||||
const lineReturn = new RegExp(/(?:\r\n|\r|\n)/g)
|
||||
// Remove multiple line returns
|
||||
content = content
|
||||
.split(lineReturn)
|
||||
.filter(l => l)
|
||||
.join('\n')
|
||||
|
||||
const last = content.lastIndexOf('\n', pos - from)
|
||||
|
||||
if (last > 0) {
|
||||
content = content.slice(last)
|
||||
}
|
||||
}
|
||||
|
||||
content = escapeHTML(content)
|
||||
|
||||
if (settings.renderLineReturnInExcerpts) {
|
||||
content = content.trim().replaceAll('\n', '<br>')
|
||||
}
|
||||
|
||||
return content
|
||||
} catch (e) {
|
||||
new Notice(
|
||||
'Omnisearch - Error while creating excerpt, see developer console'
|
||||
)
|
||||
console.error(`Omnisearch - Error while creating excerpt`)
|
||||
console.error(e)
|
||||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* splits a string in words or "expressions in quotes"
|
||||
* @param str
|
||||
* @returns
|
||||
*/
|
||||
export function splitQuotes(str: string): string[] {
|
||||
return (
|
||||
str
|
||||
.match(/"(.*?)"/g)
|
||||
?.map(s => s.replace(/"/g, ''))
|
||||
.filter(q => !!q) ?? []
|
||||
)
|
||||
}
|
||||
|
||||
export function stripSurroundingQuotes(str: string): string {
|
||||
return str.replace(regexStripQuotes, '')
|
||||
}
|
||||
|
||||
@@ -4,8 +4,7 @@ import {
|
||||
parseFrontMatterAliases,
|
||||
Platform,
|
||||
} from 'obsidian'
|
||||
import { getTextExtractor, isSearchMatch, type SearchMatch } from '../globals'
|
||||
import { canIndexUnsupportedFiles, settings } from '../settings'
|
||||
import { isSearchMatch, type SearchMatch } from '../globals'
|
||||
import { type BinaryLike, createHash } from 'crypto'
|
||||
import { md5 } from 'pure-md5'
|
||||
|
||||
@@ -135,32 +134,6 @@ export function getCtrlKeyLabel(): 'ctrl' | '⌘' {
|
||||
return Platform.isMacOS ? '⌘' : 'ctrl'
|
||||
}
|
||||
|
||||
export function isContentIndexable(path: string): boolean {
|
||||
const hasTextExtractor = !!getTextExtractor()
|
||||
const canIndexPDF = hasTextExtractor && settings.PDFIndexing
|
||||
const canIndexImages = hasTextExtractor && settings.imagesIndexing
|
||||
return (
|
||||
isFilePlaintext(path) ||
|
||||
isFileCanvas(path) ||
|
||||
isFileFromDataloomPlugin(path) ||
|
||||
(canIndexPDF && isFilePDF(path)) ||
|
||||
(canIndexImages && isFileImage(path))
|
||||
)
|
||||
}
|
||||
|
||||
export function isFilenameIndexable(path: string): boolean {
|
||||
return (
|
||||
canIndexUnsupportedFiles() ||
|
||||
isFilePlaintext(path) ||
|
||||
isFileCanvas(path) ||
|
||||
isFileFromDataloomPlugin(path)
|
||||
)
|
||||
}
|
||||
|
||||
export function isFileIndexable(path: string): boolean {
|
||||
return isFilenameIndexable(path) || isContentIndexable(path)
|
||||
}
|
||||
|
||||
export function isFileImage(path: string): boolean {
|
||||
const ext = getExtension(path)
|
||||
return ext === 'png' || ext === 'jpg' || ext === 'jpeg' || ext === 'webp'
|
||||
@@ -175,10 +148,6 @@ export function isFileOffice(path: string): boolean {
|
||||
return ext === 'docx' || ext === 'xlsx'
|
||||
}
|
||||
|
||||
export function isFilePlaintext(path: string): boolean {
|
||||
return [...settings.indexedFileTypes, 'md'].some(t => path.endsWith(`.${t}`))
|
||||
}
|
||||
|
||||
export function isFileCanvas(path: string): boolean {
|
||||
return path.endsWith('.canvas')
|
||||
}
|
||||
@@ -250,8 +219,13 @@ export function warnDebug(...args: any[]): void {
|
||||
printDebug(console.warn, ...args)
|
||||
}
|
||||
|
||||
let printDebugEnabled= false
|
||||
export function enablePrintDebug(enable: boolean): void {
|
||||
printDebugEnabled = enable
|
||||
}
|
||||
|
||||
function printDebug(fn: (...args: any[]) => any, ...args: any[]): void {
|
||||
if (settings.verboseLogging) {
|
||||
if (printDebugEnabled) {
|
||||
const t = new Date()
|
||||
const ts = `${t.getMinutes()}:${t.getSeconds()}:${t.getMilliseconds()}`
|
||||
fn(...['Omnisearch -', ts + ' -', ...args])
|
||||
|
||||
Reference in New Issue
Block a user