diff --git a/src/database.ts b/src/database.ts index 4fe1c5b..bd29700 100644 --- a/src/database.ts +++ b/src/database.ts @@ -60,7 +60,7 @@ export class Database extends Dexie { paths, data: minisearchJson, }) - console.log('Omnisearch - Search cache written') + console.debug('Omnisearch - Search cache written') } /** @@ -74,7 +74,7 @@ export class Database extends Dexie { db.version !== Database.dbVersion * 10 ) if (toDelete.length) { - console.log('Omnisearch - Those IndexedDb databases will be deleted:') + console.debug('Omnisearch - Those IndexedDb databases will be deleted:') for (const db of toDelete) { if (db.name) { indexedDB.deleteDatabase(db.name) diff --git a/src/main.ts b/src/main.ts index dbaeb9e..8f9828b 100644 --- a/src/main.ts +++ b/src/main.ts @@ -32,11 +32,11 @@ import { notifyOnIndexed, registerAPI } from './tools/api' import { Database } from './database' import { SearchEngine } from './search/search-engine' import { DocumentsRepository } from './repositories/documents-repository' -import { logDebug } from './tools/utils' +import { logVerbose } from './tools/utils' import { NotesIndexer } from './notes-indexer' import { TextProcessor } from './tools/text-processing' import { EmbedsRepository } from './repositories/embeds-repository' -import { SearchHistory } from "./search/search-history"; +import { SearchHistory } from './search/search-history' export default class OmnisearchPlugin extends Plugin { // FIXME: fix the type @@ -71,7 +71,7 @@ export default class OmnisearchPlugin extends Plugin { } if (isPluginDisabled(this.app)) { - console.log('Omnisearch - Plugin disabled') + console.debug('Plugin disabled') return } @@ -120,7 +120,7 @@ export default class OmnisearchPlugin extends Plugin { file instanceof TFile && this.notesIndexer.isFileIndexable(file.path) ) { - logDebug('Indexing new file', file.path) + logVerbose('Indexing new file', file.path) searchEngine.addFromPaths([file.path]) this.embedsRepository.refreshEmbedsForNote(file.path) } @@ -128,7 +128,7 @@ export default class OmnisearchPlugin extends Plugin { ) this.registerEvent( this.app.vault.on('delete', file => { - logDebug('Removing file', file.path) + logVerbose('Removing file', file.path) this.documentsRepository.removeDocument(file.path) searchEngine.removeFromPaths([file.path]) this.embedsRepository.removeFile(file.path) @@ -145,7 +145,7 @@ export default class OmnisearchPlugin extends Plugin { this.registerEvent( this.app.vault.on('rename', async (file, oldPath) => { if (this.notesIndexer.isFileIndexable(file.path)) { - logDebug('Renaming file', file.path) + logVerbose('Renaming file', file.path) this.documentsRepository.removeDocument(oldPath) await this.documentsRepository.addDocument(file.path) @@ -237,25 +237,23 @@ export default class OmnisearchPlugin extends Plugin { } private async populateIndex(): Promise { - console.time('Omnisearch - Indexing total time') + console.time('Indexing total time') indexingStep.set(IndexingStepType.ReadingFiles) const files = this.app.vault .getFiles() .filter(f => this.notesIndexer.isFileIndexable(f.path)) - console.log(`Omnisearch - ${files.length} files total`) - console.log( - `Omnisearch - Cache is ${isCacheEnabled() ? 'enabled' : 'disabled'}` - ) + console.debug(`${files.length} files total`) + console.debug(`Cache is ${isCacheEnabled() ? 'enabled' : 'disabled'}`) // Map documents in the background // Promise.all(files.map(f => cacheManager.addToLiveCache(f.path))) const searchEngine = this.searchEngine if (isCacheEnabled()) { - console.time('Omnisearch - Loading index from cache') + console.time('Loading index from cache') indexingStep.set(IndexingStepType.LoadingCache) const hasCache = await searchEngine.loadCache() if (hasCache) { - console.timeEnd('Omnisearch - Loading index from cache') + console.timeEnd('Loading index from cache') } } @@ -265,22 +263,20 @@ export default class OmnisearchPlugin extends Plugin { if (isCacheEnabled()) { if (diff.toAdd.length) { - console.log( - 'Omnisearch - Total number of files to add/update: ' + - diff.toAdd.length + console.debug( + 'Total number of files to add/update: ' + diff.toAdd.length ) } if (diff.toRemove.length) { - console.log( - 'Omnisearch - Total number of files to remove: ' + - diff.toRemove.length + console.debug( + 'Total number of files to remove: ' + diff.toRemove.length ) } } if (diff.toAdd.length >= 1000 && isCacheEnabled()) { new Notice( - `Omnisearch - ${diff.toAdd.length} files need to be indexed. Obsidian may experience stutters and freezes during the process`, + `${diff.toAdd.length} files need to be indexed. Obsidian may experience stutters and freezes during the process`, 10_000 ) } @@ -310,9 +306,9 @@ export default class OmnisearchPlugin extends Plugin { } } - console.timeEnd('Omnisearch - Indexing total time') + console.timeEnd('Indexing total time') if (diff.toAdd.length >= 1000 && isCacheEnabled()) { - new Notice(`Omnisearch - Your files have been indexed.`) + new Notice(`Your files have been indexed.`) } indexingStep.set(IndexingStepType.Done) notifyOnIndexed() diff --git a/src/notes-indexer.ts b/src/notes-indexer.ts index 769ed9e..04cb74e 100644 --- a/src/notes-indexer.ts +++ b/src/notes-indexer.ts @@ -7,7 +7,7 @@ import { isFileFromDataloom, isFileImage, isFilePDF, - logDebug, + logVerbose, } from './tools/utils' export class NotesIndexer { @@ -25,7 +25,7 @@ export class NotesIndexer { public async refreshIndex(): Promise { for (const file of this.notesToReindex) { - logDebug('Updating file', file.path) + logVerbose('Updating file', file.path) await this.plugin.documentsRepository.addDocument(file.path) } diff --git a/src/repositories/documents-repository.ts b/src/repositories/documents-repository.ts index d064697..7aac7a1 100644 --- a/src/repositories/documents-repository.ts +++ b/src/repositories/documents-repository.ts @@ -9,7 +9,7 @@ import { isFileImage, isFileOffice, isFilePDF, - logDebug, + logVerbose, removeDiacritics, stripMarkdownCharacters, } from '../tools/utils' @@ -65,7 +65,7 @@ export class DocumentsRepository { if (this.documents.has(path)) { return this.documents.get(path)! } - logDebug('Generating IndexedDocument from', path) + logVerbose('Generating IndexedDocument from', path) await this.addDocument(path) return this.documents.get(path)! } diff --git a/src/repositories/embeds-repository.ts b/src/repositories/embeds-repository.ts index 4b6a9cd..f42fdb1 100644 --- a/src/repositories/embeds-repository.ts +++ b/src/repositories/embeds-repository.ts @@ -1,6 +1,6 @@ import { getLinkpath, Notice } from 'obsidian' import type OmnisearchPlugin from '../main' -import { logDebug } from '../tools/utils' +import { logVerbose } from '../tools/utils' export class EmbedsRepository { /** Map */ @@ -55,7 +55,7 @@ export class EmbedsRepository { } public async writeToCache(): Promise { - logDebug('Writing embeds to cache') + logVerbose('Writing embeds to cache') const database = this.plugin.database const data: { embedded: string; referencedBy: string[] }[] = [] for (const [path, embedsList] of this.embeds) { @@ -69,10 +69,10 @@ export class EmbedsRepository { try { const database = this.plugin.database if (!database.embeds) { - logDebug('No embeds in cache') + logVerbose('No embeds in cache') return } - logDebug('Loading embeds from cache') + logVerbose('Loading embeds from cache') const embedsArr = await database.embeds.toArray() for (const { embedded: path, referencedBy: embeds } of embedsArr) { for (const embed of embeds) { diff --git a/src/search/search-engine.ts b/src/search/search-engine.ts index f648d37..ec123a5 100644 --- a/src/search/search-engine.ts +++ b/src/search/search-engine.ts @@ -5,7 +5,7 @@ import MiniSearch, { } from 'minisearch' import type { DocumentRef, IndexedDocument, ResultNote } from '../globals' -import { chunkArray, logDebug, removeDiacritics } from '../tools/utils' +import { chunkArray, logVerbose, removeDiacritics } from '../tools/utils' import { Notice } from 'obsidian' import type { Query } from './query' import { sortBy } from 'lodash-es' @@ -55,13 +55,12 @@ export class SearchEngine { } { const docsMap = new Map(docs.map(d => [d.path, d.mtime])) - // console.log(this.indexedDocuments) const toAdd = docs.filter( d => !this.indexedDocuments.has(d.path) || this.indexedDocuments.get(d.path) !== d.mtime ) - // console.log(toAdd) + const toRemove = [...this.indexedDocuments] .filter( ([path, mtime]) => !docsMap.has(path) || docsMap.get(path) !== mtime @@ -75,7 +74,7 @@ export class SearchEngine { * @param paths */ public async addFromPaths(paths: string[]): Promise { - logDebug('Adding files', paths) + logVerbose('Adding files', paths) let documents = ( await Promise.all( paths.map( @@ -83,7 +82,7 @@ export class SearchEngine { ) ) ).filter(d => !!d?.path) - logDebug('Sorting documents to first index markdown') + logVerbose('Sorting documents to first index markdown') // Index markdown files first documents = sortBy(documents, d => (d.path.endsWith('.md') ? 0 : 1)) @@ -95,7 +94,7 @@ export class SearchEngine { // Split the documents in smaller chunks to add them to minisearch const chunkedDocs = chunkArray(documents, 500) for (const docs of chunkedDocs) { - logDebug('Indexing into search engine', docs) + logVerbose('Indexing into search engine', docs) // Update the list of indexed docs docs.forEach(doc => this.indexedDocuments.set(doc.path, doc.mtime)) @@ -134,8 +133,8 @@ export class SearchEngine { return [] } - logDebug('=== New search ===') - logDebug('Starting search for', query) + logVerbose('=== New search ===') + logVerbose('Starting search for', query) let fuzziness: number switch (settings.fuzziness) { @@ -151,7 +150,7 @@ export class SearchEngine { } const searchTokens = this.tokenizer.tokenizeForSearch(query.segmentsToStr()) - logDebug(JSON.stringify(searchTokens, null, 1)) + logVerbose(JSON.stringify(searchTokens, null, 1)) let results = this.minisearch.search(searchTokens, { prefix: term => term.length >= options.prefixLength, // length <= 3: no fuzziness @@ -174,7 +173,7 @@ export class SearchEngine { tokenize: text => [text], }) - logDebug(`Found ${results.length} results`, results) + logVerbose(`Found ${results.length} results`, results) // Filter query results to only keep files that match query.query.ext (if any) if (query.query.ext?.length) { @@ -212,7 +211,7 @@ export class SearchEngine { return results.filter(r => r.id === options.singleFilePath) } - logDebug( + logVerbose( 'searching with downranked folders', settings.downrankedFoldersFilters ) @@ -252,7 +251,7 @@ export class SearchEngine { // we don't want the filter to match the folder sources, e.g. // it needs to match a whole folder name if (path === filter || path.startsWith(filter + '/')) { - logDebug('searching with downranked folders in path: ', path) + logVerbose('searching with downranked folders in path: ', path) downrankingFolder = true } } @@ -277,7 +276,7 @@ export class SearchEngine { for (const { name, weight } of settings.weightCustomProperties) { const values = metadata?.frontmatter?.[name] if (values && result.terms.some(t => values.includes(t))) { - logDebug(`Boosting field "${name}" x${weight} for ${path}`) + logVerbose(`Boosting field "${name}" x${weight} for ${path}`) result.score *= weight } } @@ -290,14 +289,14 @@ export class SearchEngine { } } } - logDebug('Sorting and limiting results') + logVerbose('Sorting and limiting results') // Sort results and keep the 50 best results = results.sort((a, b) => b.score - a.score).slice(0, 50) - logDebug('Filtered results:', results) + logVerbose('Filtered results:', results) - if (results.length) logDebug('First result:', results[0]) + if (results.length) logVerbose('First result:', results[0]) const documents = await Promise.all( results.map( @@ -308,7 +307,7 @@ export class SearchEngine { // If the search query contains quotes, filter out results that don't have the exact match const exactTerms = query.getExactTerms() if (exactTerms.length) { - logDebug('Filtering with quoted terms: ', exactTerms) + logVerbose('Filtering with quoted terms: ', exactTerms) results = results.filter(r => { const document = documents.find(d => d.path === r.id) const title = document?.path.toLowerCase() ?? '' @@ -327,7 +326,7 @@ export class SearchEngine { // If the search query contains exclude terms, filter out results that have them const exclusions = query.query.exclude.text if (exclusions.length) { - logDebug('Filtering with exclusions') + logVerbose('Filtering with exclusions') results = results.filter(r => { const content = ( documents.find(d => d.path === r.id)?.content ?? '' @@ -336,7 +335,7 @@ export class SearchEngine { }) } - logDebug('Deduping') + logVerbose('Deduping') // FIXME: // Dedupe results - clutch for https://github.com/scambier/obsidian-omnisearch/issues/129 results = results.filter( @@ -410,7 +409,7 @@ export class SearchEngine { // Map the raw results to get usable suggestions const resultNotes = results.map(result => { - logDebug('Locating matches for', result.id) + logVerbose('Locating matches for', result.id) let note = documents.find(d => d.path === result.id) if (!note) { // throw new Error(`Omnisearch - Note "${result.id}" not indexed`) @@ -435,15 +434,15 @@ export class SearchEngine { // Tags, starting with # ...query.getTags(), ] - logDebug('Matching tokens:', foundWords) + logVerbose('Matching tokens:', foundWords) - logDebug('Getting matches locations...') + logVerbose('Getting matches locations...') const matches = this.plugin.textProcessor.getMatches( note.content, foundWords, query ) - logDebug(`Matches for note "${note.path}"`, matches) + logVerbose(`Matches for note "${note.path}"`, matches) const resultNote: ResultNote = { score: result.score, foundWords, @@ -454,7 +453,7 @@ export class SearchEngine { return resultNote }) - logDebug('Suggestions:', resultNotes) + logVerbose('Suggestions:', resultNotes) return resultNotes } diff --git a/src/search/tokenizer.ts b/src/search/tokenizer.ts index 77d681f..6149070 100644 --- a/src/search/tokenizer.ts +++ b/src/search/tokenizer.ts @@ -1,6 +1,6 @@ import type { QueryCombination } from 'minisearch' import { BRACKETS_AND_SPACE, chsRegex, SPACE_OR_PUNCTUATION } from '../globals' -import { logDebug, splitCamelCase, splitHyphens } from '../tools/utils' +import { logVerbose, splitCamelCase, splitHyphens } from '../tools/utils' import type OmnisearchPlugin from '../main' const markdownLinkExtractor = require('markdown-link-extractor') @@ -22,7 +22,7 @@ export class Tokenizer { try { urls = markdownLinkExtractor(text) } catch (e) { - logDebug('Error extracting urls', e) + logVerbose('Error extracting urls', e) } } diff --git a/src/settings.ts b/src/settings.ts index 6004151..2368fdc 100644 --- a/src/settings.ts +++ b/src/settings.ts @@ -11,7 +11,7 @@ import { import { writable } from 'svelte/store' import { K_DISABLE_OMNISEARCH } from './globals' import type OmnisearchPlugin from './main' -import { enablePrintDebug } from './tools/utils' +import { enableVerboseLogging } from './tools/utils' import { debounce } from 'lodash-es' interface WeightingSettings { @@ -687,7 +687,7 @@ export class SettingsTab extends PluginSettingTab { .addToggle(toggle => toggle.setValue(settings.verboseLogging).onChange(async v => { settings.verboseLogging = v - enablePrintDebug(v) + enableVerboseLogging(v) await saveSettings(this.plugin) }) ) @@ -869,7 +869,7 @@ export async function loadSettings( await plugin.loadData() ) showExcerpt.set(settings.showExcerpt) - enablePrintDebug(settings.verboseLogging) + enableVerboseLogging(settings.verboseLogging) return settings } diff --git a/src/tools/api-server.ts b/src/tools/api-server.ts index 53c172c..3873cbb 100644 --- a/src/tools/api-server.ts +++ b/src/tools/api-server.ts @@ -40,7 +40,7 @@ export function getServer(plugin: OmnisearchPlugin) { return { listen(port: string) { - console.log(`Omnisearch - Starting HTTP server on port ${port}`) + console.debug(`Omnisearch - Starting HTTP server on port ${port}`) server.listen( { port: parseInt(port), diff --git a/src/tools/icon-utils.ts b/src/tools/icon-utils.ts index be37f5d..82c619b 100644 --- a/src/tools/icon-utils.ts +++ b/src/tools/icon-utils.ts @@ -5,7 +5,7 @@ import { isFilePDF, isFileCanvas, isFileExcalidraw, - warnDebug, + warnVerbose, } from './utils' import { escapeHTML } from './text-processing' @@ -38,7 +38,7 @@ export async function loadIconData(plugin: OmnisearchPlugin): Promise { } return iconData } catch (e) { - warnDebug('Failed to read data.json:', e) + warnVerbose('Failed to read data.json:', e) return {} } } @@ -74,7 +74,7 @@ export async function initializeIconPacks( } } } catch (e) { - warnDebug('Failed to list icon packs:', e) + warnVerbose('Failed to list icon packs:', e) } } @@ -142,7 +142,7 @@ export async function loadIconSVG( const iconPackName = prefixToIconPack[prefix] if (!iconPackName) { - warnDebug(`No icon pack found for prefix: ${prefix}`) + warnVerbose(`No icon pack found for prefix: ${prefix}`) return null } @@ -153,12 +153,12 @@ export async function loadIconSVG( if (iconEl) { return iconEl.outerHTML } else { - warnDebug(`Lucide icon not found: ${dashedName}`) + warnVerbose(`Lucide icon not found: ${dashedName}`) return null } } else { if (!iconsPath) { - warnDebug('Icons path is not set. Cannot load icon SVG.') + warnVerbose('Icons path is not set. Cannot load icon SVG.') return null } const iconPath = `${plugin.app.vault.configDir}/${iconsPath}/${iconPackName}/${name}.svg` @@ -166,7 +166,7 @@ export async function loadIconSVG( const svgContent = await plugin.app.vault.adapter.read(iconPath) return svgContent } catch (e) { - warnDebug(`Failed to load icon SVG for ${iconName} at ${iconPath}:`, e) + warnVerbose(`Failed to load icon SVG for ${iconName} at ${iconPath}:`, e) return null } } diff --git a/src/tools/text-processing.ts b/src/tools/text-processing.ts index 351817e..5a72c41 100644 --- a/src/tools/text-processing.ts +++ b/src/tools/text-processing.ts @@ -1,5 +1,5 @@ import { excerptAfter, excerptBefore, type SearchMatch } from '../globals' -import { removeDiacritics, warnDebug } from './utils' +import { removeDiacritics, warnVerbose } from './utils' import type { Query } from '../search/query' import { Notice } from 'obsidian' import { escapeRegExp } from 'lodash-es' @@ -115,7 +115,7 @@ export class TextProcessor { while ((match = reg.exec(text)) !== null) { // Avoid infinite loops, stop looking after 100 matches or if we're taking too much time if (++count >= 100 || new Date().getTime() - startTime > 50) { - warnDebug('Stopped getMatches at', count, 'results') + warnVerbose('Stopped getMatches at', count, 'results') break } const matchStartIndex = match.index diff --git a/src/tools/utils.ts b/src/tools/utils.ts index 439962f..9b74292 100644 --- a/src/tools/utils.ts +++ b/src/tools/utils.ts @@ -230,23 +230,21 @@ export function splitHyphens(text: string): string[] { return text.split('-').filter(t => t) } -export function logDebug(...args: any[]): void { - printDebug(console.log, ...args) +export function logVerbose(...args: any[]): void { + printVerbose(console.debug, ...args) } -export function warnDebug(...args: any[]): void { - printDebug(console.warn, ...args) +export function warnVerbose(...args: any[]): void { + printVerbose(console.warn, ...args) } -let printDebugEnabled = false -export function enablePrintDebug(enable: boolean): void { - printDebugEnabled = enable +let verboseLoggingEnabled = false +export function enableVerboseLogging(enable: boolean): void { + verboseLoggingEnabled = enable } -function printDebug(fn: (...args: any[]) => any, ...args: any[]): void { - if (printDebugEnabled) { - const t = new Date() - const ts = `${t.getMinutes()}:${t.getSeconds()}:${t.getMilliseconds()}` - fn(...['Omnisearch -', ts + ' -', ...args]) +function printVerbose(fn: (...args: any[]) => any, ...args: any[]): void { + if (verboseLoggingEnabled) { + fn(...args) } }