Squashed commit of the following:
commit ac82511ddd17d5472ae3cfea9bbad9754f5a4d62 Author: Simon Cambier <simon.cambier@protonmail.com> Date: Sat Oct 22 08:23:42 2022 +0200 Screw that cache, seriously. commit 8ba40d1be73daaaffea09e07bc56c339266db9b6 Author: Simon Cambier <simon.cambier@protonmail.com> Date: Fri Oct 21 22:36:48 2022 +0200 Stuff commit 27b8fd7dc809be9714a109d3a458eb1276a47e2e Author: Simon Cambier <simon.cambier@protonmail.com> Date: Fri Oct 21 22:22:20 2022 +0200 Moved files commit fb1349c914907e586e103ca54fb04b9ddd45ef5d Author: Simon Cambier <simon.cambier@protonmail.com> Date: Thu Oct 20 22:25:29 2022 +0200 Removed duplicate code commit e7371138e60cbe4155cfd4fb44e3ee1d2e3ee088 Author: Simon Cambier <simon.cambier@protonmail.com> Date: Thu Oct 20 21:50:09 2022 +0200 Moved a bunch of files commit 2ee1b2a0e799d4b41ab3a444d8cc44dfff5b5623 Author: Simon Cambier <simon.cambier@protonmail.com> Date: Thu Oct 20 21:32:21 2022 +0200 Removed useless code commit 76c530dfb9adbad1bbe9079de2330fe43a044249 Author: Simon Cambier <simon.cambier@protonmail.com> Date: Thu Oct 20 20:44:11 2022 +0200 Split file reading and indexing
This commit is contained in:
37
src/tools/api.ts
Normal file
37
src/tools/api.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import type { ResultNote, SearchMatch } from '../globals'
|
||||
import { Query } from '../search/query'
|
||||
import * as Search from '../search/search'
|
||||
|
||||
type ResultNoteApi = {
|
||||
score: number
|
||||
path: string
|
||||
basename: string
|
||||
foundWords: string[]
|
||||
matches: SearchMatch[]
|
||||
}
|
||||
|
||||
function mapResults(results: ResultNote[]): ResultNoteApi[] {
|
||||
return results.map(result => {
|
||||
const { score, path, basename, foundWords, matches } = result
|
||||
return {
|
||||
score,
|
||||
path,
|
||||
basename,
|
||||
foundWords,
|
||||
matches: matches.map(match => {
|
||||
return {
|
||||
match: match.match,
|
||||
offset: match.offset,
|
||||
}
|
||||
}),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function search(q: string): Promise<ResultNoteApi[]> {
|
||||
const query = new Query(q)
|
||||
const raw = await Search.getSuggestions(query)
|
||||
return mapResults(raw)
|
||||
}
|
||||
|
||||
export default { search }
|
||||
66
src/tools/event-bus.ts
Normal file
66
src/tools/event-bus.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
export type EventBusCallback = (...args: any[]) => any
|
||||
|
||||
export class EventBus {
|
||||
private handlers: Map<string, EventBusCallback> = new Map()
|
||||
private disabled: string[] = []
|
||||
|
||||
/**
|
||||
* Adds a subscription for `event`, for the specified `context`.
|
||||
* If a subscription for the same event in the same context already exists, this will overwrite it.
|
||||
* @param context
|
||||
* @param event
|
||||
* @param callback
|
||||
*/
|
||||
public on(context: string, event: string, callback: EventBusCallback): void {
|
||||
if (context.includes('@') || event.includes('@')) {
|
||||
throw new Error('Invalid context/event name - Cannot contain @')
|
||||
}
|
||||
this.handlers.set(`${context}@${event}`, callback)
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the subscription for an `event` in the `context`.
|
||||
* If `event` is left empty, removes all subscriptions.
|
||||
* @param context
|
||||
* @param event
|
||||
*/
|
||||
public off(context: string, event?: string): void {
|
||||
if (event) {
|
||||
this.handlers.delete(`${context}@${event}`)
|
||||
} else {
|
||||
for (const [key] of this.handlers.entries()) {
|
||||
if (key.startsWith(`${context}@`)) {
|
||||
this.handlers.delete(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Disables a `context`. Does not remove subscriptions, but all events for related listeners will be ignored.
|
||||
* @param context
|
||||
*/
|
||||
public disable(context: string): void {
|
||||
this.enable(context)
|
||||
this.disabled.push(context)
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-enables a `context`.
|
||||
* @param context
|
||||
*/
|
||||
public enable(context: string): void {
|
||||
this.disabled = this.disabled.filter(v => v !== context)
|
||||
}
|
||||
|
||||
public emit(event: string, ...args: any[]): void {
|
||||
const entries = [...this.handlers.entries()].filter(
|
||||
([k, _]) => !this.disabled.includes(k.split('@')[0])
|
||||
)
|
||||
for (const [key, handler] of entries) {
|
||||
if (key.endsWith(`@${event}`)) {
|
||||
handler(...args)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
100
src/tools/notes.ts
Normal file
100
src/tools/notes.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { type CachedMetadata, MarkdownView, TFile } from 'obsidian'
|
||||
import { stringsToRegex } from './utils'
|
||||
import type { ResultNote } from '../globals'
|
||||
|
||||
export async function openNote(
|
||||
item: ResultNote,
|
||||
newPane = false
|
||||
): Promise<void> {
|
||||
const reg = stringsToRegex(item.foundWords)
|
||||
reg.exec(item.content)
|
||||
const offset = reg.lastIndex
|
||||
|
||||
// Check if the note is already open,
|
||||
// to avoid opening it twice if the first one is pinned
|
||||
let alreadyOpenAndPinned = false
|
||||
app.workspace.iterateAllLeaves(leaf => {
|
||||
if (leaf.view instanceof MarkdownView) {
|
||||
if (
|
||||
!newPane &&
|
||||
leaf.getViewState().state?.file === item.path &&
|
||||
leaf.getViewState()?.pinned
|
||||
) {
|
||||
app.workspace.setActiveLeaf(leaf, { focus: true })
|
||||
alreadyOpenAndPinned = true
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (!alreadyOpenAndPinned) {
|
||||
// Open the note normally
|
||||
await app.workspace.openLinkText(item.path, '', newPane)
|
||||
}
|
||||
|
||||
const view = app.workspace.getActiveViewOfType(MarkdownView)
|
||||
if (!view) {
|
||||
// Not an editable document, so no cursor to place
|
||||
// throw new Error('OmniSearch - No active MarkdownView')
|
||||
return
|
||||
}
|
||||
const pos = view.editor.offsetToPos(offset)
|
||||
pos.ch = 0
|
||||
|
||||
view.editor.setCursor(pos)
|
||||
view.editor.scrollIntoView({
|
||||
from: { line: pos.line - 10, ch: 0 },
|
||||
to: { line: pos.line + 10, ch: 0 },
|
||||
})
|
||||
}
|
||||
|
||||
export async function createNote(name: string, newLeaf = false): Promise<void> {
|
||||
try {
|
||||
let pathPrefix: string
|
||||
switch (app.vault.getConfig('newFileLocation')) {
|
||||
case 'current':
|
||||
pathPrefix = (app.workspace.getActiveFile()?.parent.path ?? '') + '/'
|
||||
break
|
||||
case 'folder':
|
||||
pathPrefix = app.vault.getConfig('newFileFolderPath') + '/'
|
||||
break
|
||||
default: // 'root'
|
||||
pathPrefix = ''
|
||||
break
|
||||
}
|
||||
await app.workspace.openLinkText(`${pathPrefix}${name}.md`, '', newLeaf)
|
||||
} catch (e) {
|
||||
;(e as any).message =
|
||||
'OmniSearch - Could not create note: ' + (e as any).message
|
||||
console.error(e)
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* For a given file, returns a list of links leading to notes that don't exist
|
||||
* @param file
|
||||
* @param metadata
|
||||
* @returns
|
||||
*/
|
||||
export function getNonExistingNotes(
|
||||
file: TFile,
|
||||
metadata: CachedMetadata
|
||||
): string[] {
|
||||
return (metadata.links ?? [])
|
||||
.map(l => {
|
||||
const path = removeAnchors(l.link)
|
||||
return app.metadataCache.getFirstLinkpathDest(path, file.path)
|
||||
? ''
|
||||
: l.link
|
||||
})
|
||||
.filter(l => !!l)
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes anchors and headings
|
||||
* @param name
|
||||
* @returns
|
||||
*/
|
||||
export function removeAnchors(name: string): string {
|
||||
return name.split(/[\^#]+/)[0]
|
||||
}
|
||||
203
src/tools/utils.ts
Normal file
203
src/tools/utils.ts
Normal file
@@ -0,0 +1,203 @@
|
||||
import { type CachedMetadata, Platform } from 'obsidian'
|
||||
import type { SearchMatch } from '../globals'
|
||||
import {
|
||||
excerptAfter,
|
||||
excerptBefore,
|
||||
highlightClass,
|
||||
isSearchMatch,
|
||||
regexLineSplit,
|
||||
regexStripQuotes,
|
||||
regexYaml,
|
||||
} from '../globals'
|
||||
import { settings } from '../settings'
|
||||
import { type BinaryLike, createHash } from 'crypto'
|
||||
import { md5 } from 'pure-md5'
|
||||
|
||||
export function highlighter(str: string): string {
|
||||
return `<span class="${highlightClass}">${str}</span>`
|
||||
}
|
||||
|
||||
export function escapeHTML(html: string): string {
|
||||
return html
|
||||
.replaceAll('&', '&')
|
||||
.replaceAll('<', '<')
|
||||
.replaceAll('>', '>')
|
||||
.replaceAll('"', '"')
|
||||
.replaceAll("'", ''')
|
||||
}
|
||||
|
||||
export function splitLines(text: string): string[] {
|
||||
return text.split(regexLineSplit).filter(l => !!l && l.length > 2)
|
||||
}
|
||||
|
||||
export function removeFrontMatter(text: string): string {
|
||||
// Regex to recognize YAML Front Matter (at beginning of file, 3 hyphens, than any charecter, including newlines, then 3 hyphens).
|
||||
return text.replace(regexYaml, '')
|
||||
}
|
||||
|
||||
export function wait(ms: number): Promise<void> {
|
||||
return new Promise(resolve => {
|
||||
setTimeout(resolve, ms)
|
||||
})
|
||||
}
|
||||
|
||||
// https://stackoverflow.com/a/3561711
|
||||
export function escapeRegex(str: string): string {
|
||||
return str.replace(/[-/\\^$*+?.()|[\]{}]/g, '\\$&')
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the positions of all occurences of `val` inside of `text`
|
||||
* https://stackoverflow.com/a/58828841
|
||||
* @param text
|
||||
* @param regex
|
||||
* @returns
|
||||
*/
|
||||
export function getAllIndices(text: string, regex: RegExp): SearchMatch[] {
|
||||
return [...text.matchAll(regex)]
|
||||
.map(o => ({ match: o[0], offset: o.index }))
|
||||
.filter(isSearchMatch)
|
||||
}
|
||||
|
||||
export function stringsToRegex(strings: string[]): RegExp {
|
||||
if (!strings.length) return /^$/g
|
||||
return new RegExp(strings.map(s => `(${escapeRegex(s)})`).join('|'), 'gi')
|
||||
}
|
||||
|
||||
export function extractHeadingsFromCache(
|
||||
cache: CachedMetadata,
|
||||
level: number
|
||||
): string[] {
|
||||
return (
|
||||
cache.headings?.filter(h => h.level === level).map(h => h.heading) ?? []
|
||||
)
|
||||
}
|
||||
|
||||
export function loopIndex(index: number, nbItems: number): number {
|
||||
return (index + nbItems) % nbItems
|
||||
}
|
||||
|
||||
export function makeExcerpt(content: string, offset: number): string {
|
||||
const pos = offset ?? -1
|
||||
if (pos > -1) {
|
||||
const from = Math.max(0, pos - excerptBefore)
|
||||
const to = Math.min(content.length, pos + excerptAfter)
|
||||
content =
|
||||
(from > 0 ? '…' : '') +
|
||||
content.slice(from, to).trim() +
|
||||
(to < content.length - 1 ? '…' : '')
|
||||
}
|
||||
return escapeHTML(content)
|
||||
}
|
||||
|
||||
/**
|
||||
* splits a string in words or "expressions in quotes"
|
||||
* @param str
|
||||
* @returns
|
||||
*/
|
||||
export function splitQuotes(str: string): string[] {
|
||||
return (
|
||||
str
|
||||
.match(/"(.*?)"/g)
|
||||
?.map(s => s.replace(/"/g, ''))
|
||||
.filter(q => !!q) ?? []
|
||||
)
|
||||
}
|
||||
|
||||
export function stripSurroundingQuotes(str: string): string {
|
||||
return str.replace(regexStripQuotes, '')
|
||||
}
|
||||
|
||||
function mapAsync<T, U>(
|
||||
array: T[],
|
||||
callbackfn: (value: T, index: number, array: T[]) => Promise<U>
|
||||
): Promise<U[]> {
|
||||
return Promise.all(array.map(callbackfn))
|
||||
}
|
||||
|
||||
/**
|
||||
* https://stackoverflow.com/a/53508547
|
||||
* @param array
|
||||
* @param callbackfn
|
||||
* @returns
|
||||
*/
|
||||
export async function filterAsync<T>(
|
||||
array: T[],
|
||||
callbackfn: (value: T, index: number, array: T[]) => Promise<boolean>
|
||||
): Promise<T[]> {
|
||||
const filterMap = await mapAsync(array, callbackfn)
|
||||
return array.filter((value, index) => filterMap[index])
|
||||
}
|
||||
|
||||
/**
|
||||
* A simple function to strip bold and italic markdown chars from a string
|
||||
* @param text
|
||||
* @returns
|
||||
*/
|
||||
export function stripMarkdownCharacters(text: string): string {
|
||||
return text.replace(/(\*|_)+(.+?)(\*|_)+/g, (match, p1, p2) => p2)
|
||||
}
|
||||
|
||||
export function getAliasesFromMetadata(
|
||||
metadata: CachedMetadata | null
|
||||
): string[] {
|
||||
const arrOrString = metadata?.frontmatter?.aliases ?? []
|
||||
return (
|
||||
Array.isArray(arrOrString) ? arrOrString : arrOrString.toString().split(',')
|
||||
)
|
||||
.map(s => (s ? s.trim() : s))
|
||||
.filter(s => !!s)
|
||||
}
|
||||
|
||||
export function getTagsFromMetadata(metadata: CachedMetadata | null): string[] {
|
||||
const arrOrString = metadata?.frontmatter?.tags ?? []
|
||||
const fromFrontMatter = (
|
||||
Array.isArray(arrOrString) ? arrOrString : arrOrString.split(',')
|
||||
)
|
||||
.map(s => (s ? s.trim() : s))
|
||||
.filter(s => !!s)
|
||||
const fromBody = (metadata?.tags ?? []).map(t => t.tag)
|
||||
|
||||
return [...fromFrontMatter, ...fromBody].map(t =>
|
||||
t[0] !== '#' ? '#' + t : t
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* https://stackoverflow.com/a/37511463
|
||||
*/
|
||||
export function removeDiacritics(str: string): string {
|
||||
return str.normalize('NFD').replace(/\p{Diacritic}/gu, '')
|
||||
}
|
||||
|
||||
export function getCtrlKeyLabel(): 'ctrl' | '⌘' {
|
||||
return Platform.isMacOS ? '⌘' : 'ctrl'
|
||||
}
|
||||
|
||||
export function isFileIndexable(path: string): boolean {
|
||||
return (
|
||||
(settings.PDFIndexing && path.endsWith('.pdf')) || isFilePlaintext(path)
|
||||
)
|
||||
}
|
||||
|
||||
export function isFilePlaintext(path: string): boolean {
|
||||
return getPlaintextExtensions().some(t => path.endsWith(`.${t}`))
|
||||
}
|
||||
|
||||
export function getPlaintextExtensions(): string[] {
|
||||
return [...settings.indexedFileTypes, 'md']
|
||||
}
|
||||
|
||||
export function getExtension(path: string): string {
|
||||
const split = path.split('.')
|
||||
return split[split.length - 1]
|
||||
}
|
||||
|
||||
export function makeMD5(data: BinaryLike): string {
|
||||
if (Platform.isMobileApp) {
|
||||
// A node-less implementation, but since we're not hashing the same data
|
||||
// (arrayBuffer vs stringified array) the hash will be different
|
||||
return md5(data.toString())
|
||||
}
|
||||
return createHash('md5').update(data).digest('hex')
|
||||
}
|
||||
Reference in New Issue
Block a user