Update validateWikilinks, resolveWikilink, and getBacklinks methods to use IVaultAdapter and IMetadataCacheAdapter instead of direct App access. - Implemented inline link suggestion finding using vault adapter - Implemented backlinks retrieval using metadata cache adapter - Added helper methods: findLinkSuggestions, extractSnippet, escapeRegex - App parameter still required for waypoint methods (not in scope for this task)
1177 lines
29 KiB
TypeScript
1177 lines
29 KiB
TypeScript
import { App, TFile, TFolder } from 'obsidian';
|
|
import { CallToolResult, FileMetadata, DirectoryMetadata, VaultInfo, SearchResult, SearchMatch, StatResult, ExistsResult, ListResult, FileMetadataWithFrontmatter, FrontmatterSummary, WaypointSearchResult, FolderWaypointResult, FolderNoteResult, ValidateWikilinksResult, ResolveWikilinkResult, BacklinksResult } from '../types/mcp-types';
|
|
import { PathUtils } from '../utils/path-utils';
|
|
import { ErrorMessages } from '../utils/error-messages';
|
|
import { GlobUtils } from '../utils/glob-utils';
|
|
import { SearchUtils } from '../utils/search-utils';
|
|
import { WaypointUtils } from '../utils/waypoint-utils';
|
|
import { LinkUtils } from '../utils/link-utils';
|
|
import { IVaultAdapter, IMetadataCacheAdapter } from '../adapters/interfaces';
|
|
|
|
export class VaultTools {
|
|
constructor(
|
|
private vault: IVaultAdapter,
|
|
private metadata: IMetadataCacheAdapter,
|
|
private app: App // Still needed for waypoint methods (searchWaypoints, getFolderWaypoint, isFolderNote)
|
|
) {}
|
|
|
|
async getVaultInfo(): Promise<CallToolResult> {
|
|
try {
|
|
const allFiles = this.vault.getMarkdownFiles();
|
|
const totalNotes = allFiles.length;
|
|
|
|
// Calculate total size
|
|
let totalSize = 0;
|
|
for (const file of allFiles) {
|
|
const stat = this.vault.stat(file);
|
|
if (stat) {
|
|
totalSize += stat.size;
|
|
}
|
|
}
|
|
|
|
const info = {
|
|
totalNotes,
|
|
totalSize,
|
|
sizeFormatted: this.formatBytes(totalSize)
|
|
};
|
|
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(info, null, 2)
|
|
}]
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: `Get vault info error: ${(error as Error).message}`
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
}
|
|
|
|
private formatBytes(bytes: number): string {
|
|
if (bytes === 0) return '0 Bytes';
|
|
const k = 1024;
|
|
const sizes = ['Bytes', 'KB', 'MB', 'GB'];
|
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
return Math.round((bytes / Math.pow(k, i)) * 100) / 100 + ' ' + sizes[i];
|
|
}
|
|
|
|
async listNotes(path?: string): Promise<CallToolResult> {
|
|
let items: Array<FileMetadata | DirectoryMetadata> = [];
|
|
|
|
// Normalize root path: undefined, empty string "", or "." all mean root
|
|
const isRootPath = !path || path === '' || path === '.';
|
|
|
|
let targetFolder: TFolder;
|
|
|
|
if (isRootPath) {
|
|
// Get the root folder using adapter
|
|
targetFolder = this.vault.getRoot();
|
|
} else {
|
|
// Validate non-root path
|
|
if (!PathUtils.isValidVaultPath(path)) {
|
|
return {
|
|
content: [{ type: "text", text: ErrorMessages.invalidPath(path) }],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Normalize the path
|
|
const normalizedPath = PathUtils.normalizePath(path);
|
|
|
|
// Get folder using adapter
|
|
const folderObj = this.vault.getAbstractFileByPath(normalizedPath);
|
|
|
|
if (!folderObj) {
|
|
return {
|
|
content: [{ type: "text", text: ErrorMessages.folderNotFound(normalizedPath) }],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Check if it's a folder
|
|
if (!(folderObj instanceof TFolder)) {
|
|
return {
|
|
content: [{ type: "text", text: ErrorMessages.notAFolder(normalizedPath) }],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
targetFolder = folderObj;
|
|
}
|
|
|
|
// Iterate over direct children of the folder
|
|
for (const item of targetFolder.children) {
|
|
// Skip the vault root itself
|
|
if (item.path === '' || item.path === '/' || (item instanceof TFolder && item.isRoot())) {
|
|
continue;
|
|
}
|
|
|
|
if (item instanceof TFile) {
|
|
items.push(this.createFileMetadata(item));
|
|
} else if (item instanceof TFolder) {
|
|
items.push(this.createDirectoryMetadata(item));
|
|
}
|
|
}
|
|
|
|
// Sort: directories first, then files, alphabetically within each group
|
|
// Use case-insensitive comparison for stable, consistent ordering
|
|
items.sort((a, b) => {
|
|
if (a.kind !== b.kind) {
|
|
return a.kind === 'directory' ? -1 : 1;
|
|
}
|
|
// Case-insensitive alphabetical sort within each group
|
|
return a.name.toLowerCase().localeCompare(b.name.toLowerCase());
|
|
});
|
|
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(items, null, 2)
|
|
}]
|
|
};
|
|
}
|
|
|
|
// Phase 4: Enhanced List Operations
|
|
async list(options: {
|
|
path?: string;
|
|
recursive?: boolean;
|
|
includes?: string[];
|
|
excludes?: string[];
|
|
only?: 'files' | 'directories' | 'any';
|
|
limit?: number;
|
|
cursor?: string;
|
|
withFrontmatterSummary?: boolean;
|
|
}): Promise<CallToolResult> {
|
|
const {
|
|
path,
|
|
recursive = false,
|
|
includes,
|
|
excludes,
|
|
only = 'any',
|
|
limit,
|
|
cursor,
|
|
withFrontmatterSummary = false
|
|
} = options;
|
|
|
|
let items: Array<FileMetadataWithFrontmatter | DirectoryMetadata> = [];
|
|
|
|
// Normalize root path: undefined, empty string "", or "." all mean root
|
|
const isRootPath = !path || path === '' || path === '.';
|
|
|
|
let targetFolder: TFolder;
|
|
|
|
if (isRootPath) {
|
|
// Get the root folder using adapter
|
|
targetFolder = this.vault.getRoot();
|
|
} else {
|
|
// Validate non-root path
|
|
if (!PathUtils.isValidVaultPath(path)) {
|
|
return {
|
|
content: [{ type: "text", text: ErrorMessages.invalidPath(path) }],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Normalize the path
|
|
const normalizedPath = PathUtils.normalizePath(path);
|
|
|
|
// Get folder using adapter
|
|
const folderObj = this.vault.getAbstractFileByPath(normalizedPath);
|
|
|
|
if (!folderObj) {
|
|
return {
|
|
content: [{ type: "text", text: ErrorMessages.folderNotFound(normalizedPath) }],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Check if it's a folder
|
|
if (!(folderObj instanceof TFolder)) {
|
|
return {
|
|
content: [{ type: "text", text: ErrorMessages.notAFolder(normalizedPath) }],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
targetFolder = folderObj;
|
|
}
|
|
|
|
// Collect items based on recursive flag
|
|
await this.collectItems(targetFolder, items, recursive, includes, excludes, only, withFrontmatterSummary);
|
|
|
|
// Sort: directories first, then files, alphabetically within each group
|
|
items.sort((a, b) => {
|
|
if (a.kind !== b.kind) {
|
|
return a.kind === 'directory' ? -1 : 1;
|
|
}
|
|
return a.name.toLowerCase().localeCompare(b.name.toLowerCase());
|
|
});
|
|
|
|
// Handle cursor-based pagination
|
|
let startIndex = 0;
|
|
if (cursor) {
|
|
// Cursor is the path of the last item from the previous page
|
|
const cursorIndex = items.findIndex(item => item.path === cursor);
|
|
if (cursorIndex !== -1) {
|
|
startIndex = cursorIndex + 1;
|
|
}
|
|
}
|
|
|
|
// Apply limit and pagination
|
|
const totalCount = items.length;
|
|
let paginatedItems = items.slice(startIndex);
|
|
let hasMore = false;
|
|
let nextCursor: string | undefined;
|
|
|
|
if (limit && limit > 0 && paginatedItems.length > limit) {
|
|
paginatedItems = paginatedItems.slice(0, limit);
|
|
hasMore = true;
|
|
// Set cursor to the path of the last item in this page
|
|
nextCursor = paginatedItems[paginatedItems.length - 1].path;
|
|
}
|
|
|
|
const result: ListResult = {
|
|
items: paginatedItems,
|
|
totalCount: totalCount,
|
|
hasMore: hasMore,
|
|
nextCursor: nextCursor
|
|
};
|
|
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Helper method to recursively collect items from a folder
|
|
*/
|
|
private async collectItems(
|
|
folder: TFolder,
|
|
items: Array<FileMetadataWithFrontmatter | DirectoryMetadata>,
|
|
recursive: boolean,
|
|
includes?: string[],
|
|
excludes?: string[],
|
|
only?: 'files' | 'directories' | 'any',
|
|
withFrontmatterSummary?: boolean
|
|
): Promise<void> {
|
|
for (const item of folder.children) {
|
|
// Skip the vault root itself
|
|
if (item.path === '' || item.path === '/' || (item instanceof TFolder && item.isRoot())) {
|
|
continue;
|
|
}
|
|
|
|
// Apply glob filtering
|
|
if (!GlobUtils.shouldInclude(item.path, includes, excludes)) {
|
|
continue;
|
|
}
|
|
|
|
// Apply type filtering and add items
|
|
if (item instanceof TFile) {
|
|
if (only !== 'directories') {
|
|
const fileMetadata = await this.createFileMetadataWithFrontmatter(item, withFrontmatterSummary || false);
|
|
items.push(fileMetadata);
|
|
}
|
|
} else if (item instanceof TFolder) {
|
|
if (only !== 'files') {
|
|
items.push(this.createDirectoryMetadata(item));
|
|
}
|
|
|
|
// Recursively collect from subfolders if needed
|
|
if (recursive) {
|
|
await this.collectItems(item, items, recursive, includes, excludes, only, withFrontmatterSummary);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
private async createFileMetadataWithFrontmatter(
|
|
file: TFile,
|
|
withFrontmatterSummary: boolean
|
|
): Promise<FileMetadataWithFrontmatter> {
|
|
const baseMetadata = this.createFileMetadata(file);
|
|
|
|
if (!withFrontmatterSummary || file.extension !== 'md') {
|
|
return baseMetadata;
|
|
}
|
|
|
|
// Extract frontmatter without reading full content
|
|
try {
|
|
const cache = this.metadata.getFileCache(file);
|
|
if (cache?.frontmatter) {
|
|
const summary: FrontmatterSummary = {};
|
|
|
|
// Extract common frontmatter fields
|
|
if (cache.frontmatter.title) {
|
|
summary.title = cache.frontmatter.title;
|
|
}
|
|
if (cache.frontmatter.tags) {
|
|
// Tags can be string or array
|
|
if (Array.isArray(cache.frontmatter.tags)) {
|
|
summary.tags = cache.frontmatter.tags;
|
|
} else if (typeof cache.frontmatter.tags === 'string') {
|
|
summary.tags = [cache.frontmatter.tags];
|
|
}
|
|
}
|
|
if (cache.frontmatter.aliases) {
|
|
// Aliases can be string or array
|
|
if (Array.isArray(cache.frontmatter.aliases)) {
|
|
summary.aliases = cache.frontmatter.aliases;
|
|
} else if (typeof cache.frontmatter.aliases === 'string') {
|
|
summary.aliases = [cache.frontmatter.aliases];
|
|
}
|
|
}
|
|
|
|
// Include all other frontmatter fields
|
|
for (const key in cache.frontmatter) {
|
|
if (key !== 'title' && key !== 'tags' && key !== 'aliases' && key !== 'position') {
|
|
summary[key] = cache.frontmatter[key];
|
|
}
|
|
}
|
|
|
|
return {
|
|
...baseMetadata,
|
|
frontmatterSummary: summary
|
|
};
|
|
}
|
|
} catch (error) {
|
|
// If frontmatter extraction fails, just return base metadata
|
|
console.error(`Failed to extract frontmatter for ${file.path}:`, error);
|
|
}
|
|
|
|
return baseMetadata;
|
|
}
|
|
|
|
private createFileMetadata(file: TFile): FileMetadata {
|
|
return {
|
|
kind: "file",
|
|
name: file.name,
|
|
path: file.path,
|
|
extension: file.extension,
|
|
size: file.stat.size,
|
|
modified: file.stat.mtime,
|
|
created: file.stat.ctime
|
|
};
|
|
}
|
|
|
|
private createDirectoryMetadata(folder: TFolder): DirectoryMetadata {
|
|
// Count direct children
|
|
const childrenCount = folder.children.length;
|
|
|
|
// Try to get modified time from filesystem if available
|
|
// Note: Obsidian's TFolder doesn't have a stat property in the official API
|
|
// We try to access it anyway in case it's populated at runtime
|
|
// In most cases, this will be 0 for directories
|
|
let modified = 0;
|
|
try {
|
|
if ((folder as any).stat && typeof (folder as any).stat.mtime === 'number') {
|
|
modified = (folder as any).stat.mtime;
|
|
}
|
|
} catch (error) {
|
|
// Silently fail - modified will remain 0
|
|
}
|
|
|
|
return {
|
|
kind: "directory",
|
|
name: folder.name,
|
|
path: folder.path,
|
|
childrenCount: childrenCount,
|
|
modified: modified
|
|
};
|
|
}
|
|
|
|
// Phase 3: Discovery Endpoints
|
|
async stat(path: string): Promise<CallToolResult> {
|
|
// Validate path
|
|
if (!PathUtils.isValidVaultPath(path)) {
|
|
return {
|
|
content: [{ type: "text", text: ErrorMessages.invalidPath(path) }],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Normalize the path
|
|
const normalizedPath = PathUtils.normalizePath(path);
|
|
|
|
// Get file or folder using adapter
|
|
const item = this.vault.getAbstractFileByPath(normalizedPath);
|
|
|
|
if (!item) {
|
|
// Path doesn't exist
|
|
const result: StatResult = {
|
|
path: normalizedPath,
|
|
exists: false
|
|
};
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
}
|
|
|
|
// Check if it's a file
|
|
if (item instanceof TFile) {
|
|
const result: StatResult = {
|
|
path: normalizedPath,
|
|
exists: true,
|
|
kind: "file",
|
|
metadata: this.createFileMetadata(item)
|
|
};
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
}
|
|
|
|
// Check if it's a folder
|
|
if (item instanceof TFolder) {
|
|
const result: StatResult = {
|
|
path: normalizedPath,
|
|
exists: true,
|
|
kind: "directory",
|
|
metadata: this.createDirectoryMetadata(item)
|
|
};
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
}
|
|
|
|
// Path doesn't exist (shouldn't reach here)
|
|
const result: StatResult = {
|
|
path: normalizedPath,
|
|
exists: false
|
|
};
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
}
|
|
|
|
async exists(path: string): Promise<CallToolResult> {
|
|
// Validate path
|
|
if (!PathUtils.isValidVaultPath(path)) {
|
|
return {
|
|
content: [{ type: "text", text: ErrorMessages.invalidPath(path) }],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Normalize the path
|
|
const normalizedPath = PathUtils.normalizePath(path);
|
|
|
|
// Get file or folder using adapter
|
|
const item = this.vault.getAbstractFileByPath(normalizedPath);
|
|
|
|
if (!item) {
|
|
// Path doesn't exist
|
|
const result: ExistsResult = {
|
|
path: normalizedPath,
|
|
exists: false
|
|
};
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
}
|
|
|
|
// Check if it's a file
|
|
if (item instanceof TFile) {
|
|
const result: ExistsResult = {
|
|
path: normalizedPath,
|
|
exists: true,
|
|
kind: "file"
|
|
};
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
}
|
|
|
|
// Check if it's a folder
|
|
if (item instanceof TFolder) {
|
|
const result: ExistsResult = {
|
|
path: normalizedPath,
|
|
exists: true,
|
|
kind: "directory"
|
|
};
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
}
|
|
|
|
// Path doesn't exist (shouldn't reach here)
|
|
const result: ExistsResult = {
|
|
path: normalizedPath,
|
|
exists: false
|
|
};
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
}
|
|
|
|
// Phase 6: Powerful Search
|
|
async search(options: {
|
|
query: string;
|
|
isRegex?: boolean;
|
|
caseSensitive?: boolean;
|
|
includes?: string[];
|
|
excludes?: string[];
|
|
folder?: string;
|
|
returnSnippets?: boolean;
|
|
snippetLength?: number;
|
|
maxResults?: number;
|
|
}): Promise<CallToolResult> {
|
|
const {
|
|
query,
|
|
isRegex = false,
|
|
caseSensitive = false,
|
|
includes,
|
|
excludes,
|
|
folder,
|
|
returnSnippets = true,
|
|
snippetLength = 100,
|
|
maxResults = 100
|
|
} = options;
|
|
|
|
try {
|
|
// Compile search pattern
|
|
let searchPattern: RegExp;
|
|
try {
|
|
if (isRegex) {
|
|
const flags = caseSensitive ? 'g' : 'gi';
|
|
searchPattern = new RegExp(query, flags);
|
|
} else {
|
|
// Escape special regex characters for literal search
|
|
const escapedQuery = query.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
const flags = caseSensitive ? 'g' : 'gi';
|
|
searchPattern = new RegExp(escapedQuery, flags);
|
|
}
|
|
} catch (error) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: `Invalid regex pattern: ${(error as Error).message}`
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Get files to search using adapter
|
|
let files = this.vault.getMarkdownFiles();
|
|
|
|
// Filter by folder if specified
|
|
if (folder) {
|
|
const folderPath = folder.endsWith('/') ? folder : folder + '/';
|
|
files = files.filter(file =>
|
|
file.path.startsWith(folderPath) || file.path === folder
|
|
);
|
|
}
|
|
|
|
// Apply glob filtering
|
|
if (includes || excludes) {
|
|
files = files.filter(file =>
|
|
GlobUtils.shouldInclude(file.path, includes, excludes)
|
|
);
|
|
}
|
|
|
|
const matches: SearchMatch[] = [];
|
|
const filesWithMatches = new Set<string>();
|
|
let filesSearched = 0;
|
|
|
|
// Search through files
|
|
for (const file of files) {
|
|
if (matches.length >= maxResults) {
|
|
break;
|
|
}
|
|
|
|
filesSearched++;
|
|
|
|
try {
|
|
const content = await this.vault.read(file);
|
|
const lines = content.split('\n');
|
|
|
|
// Search in content
|
|
for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) {
|
|
if (matches.length >= maxResults) {
|
|
break;
|
|
}
|
|
|
|
const line = lines[lineIndex];
|
|
|
|
// Reset regex lastIndex for global patterns
|
|
searchPattern.lastIndex = 0;
|
|
|
|
let match: RegExpExecArray | null;
|
|
while ((match = searchPattern.exec(line)) !== null) {
|
|
if (matches.length >= maxResults) {
|
|
break;
|
|
}
|
|
|
|
const columnIndex = match.index;
|
|
const matchText = match[0];
|
|
|
|
// Extract snippet with context
|
|
let snippet = line;
|
|
let snippetStart = 0;
|
|
let matchStart = columnIndex;
|
|
|
|
if (returnSnippets && line.length > snippetLength) {
|
|
// Calculate snippet boundaries
|
|
const halfSnippet = Math.floor(snippetLength / 2);
|
|
snippetStart = Math.max(0, columnIndex - halfSnippet);
|
|
const snippetEnd = Math.min(line.length, snippetStart + snippetLength);
|
|
|
|
// Adjust if we're at the end of the line
|
|
if (snippetEnd === line.length && line.length > snippetLength) {
|
|
snippetStart = Math.max(0, line.length - snippetLength);
|
|
}
|
|
|
|
snippet = line.substring(snippetStart, snippetEnd);
|
|
matchStart = columnIndex - snippetStart;
|
|
}
|
|
|
|
matches.push({
|
|
path: file.path,
|
|
line: lineIndex + 1, // 1-indexed
|
|
column: columnIndex + 1, // 1-indexed
|
|
snippet: snippet,
|
|
matchRanges: [{
|
|
start: matchStart,
|
|
end: matchStart + matchText.length
|
|
}]
|
|
});
|
|
|
|
filesWithMatches.add(file.path);
|
|
|
|
// Prevent infinite loop for zero-width matches
|
|
if (match[0].length === 0) {
|
|
searchPattern.lastIndex++;
|
|
}
|
|
}
|
|
}
|
|
} catch (error) {
|
|
// Skip files that can't be read
|
|
console.error(`Failed to search file ${file.path}:`, error);
|
|
}
|
|
}
|
|
|
|
const result: SearchResult = {
|
|
query,
|
|
isRegex,
|
|
matches,
|
|
totalMatches: matches.length,
|
|
filesSearched,
|
|
filesWithMatches: filesWithMatches.size
|
|
};
|
|
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: `Search error: ${(error as Error).message}`
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
}
|
|
|
|
async searchWaypoints(folder?: string): Promise<CallToolResult> {
|
|
try {
|
|
const waypoints = await SearchUtils.searchWaypoints(this.app, folder);
|
|
|
|
const result: WaypointSearchResult = {
|
|
waypoints,
|
|
totalWaypoints: waypoints.length,
|
|
filesSearched: this.app.vault.getMarkdownFiles().filter(file => {
|
|
if (!folder) return true;
|
|
const folderPath = folder.endsWith('/') ? folder : folder + '/';
|
|
return file.path.startsWith(folderPath) || file.path === folder;
|
|
}).length
|
|
};
|
|
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: `Waypoint search error: ${(error as Error).message}`
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
}
|
|
|
|
async getFolderWaypoint(path: string): Promise<CallToolResult> {
|
|
try {
|
|
// Normalize and validate path
|
|
const normalizedPath = PathUtils.normalizePath(path);
|
|
|
|
// Resolve file
|
|
const file = PathUtils.resolveFile(this.app, normalizedPath);
|
|
if (!file) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: ErrorMessages.fileNotFound(normalizedPath)
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Read file content
|
|
const content = await this.app.vault.read(file);
|
|
|
|
// Extract waypoint block
|
|
const waypointBlock = WaypointUtils.extractWaypointBlock(content);
|
|
|
|
const result: FolderWaypointResult = {
|
|
path: file.path,
|
|
hasWaypoint: waypointBlock.hasWaypoint,
|
|
waypointRange: waypointBlock.waypointRange,
|
|
links: waypointBlock.links,
|
|
rawContent: waypointBlock.rawContent
|
|
};
|
|
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: `Get folder waypoint error: ${(error as Error).message}`
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
}
|
|
|
|
async isFolderNote(path: string): Promise<CallToolResult> {
|
|
try {
|
|
// Normalize and validate path
|
|
const normalizedPath = PathUtils.normalizePath(path);
|
|
|
|
// Resolve file
|
|
const file = PathUtils.resolveFile(this.app, normalizedPath);
|
|
if (!file) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: ErrorMessages.fileNotFound(normalizedPath)
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Check if it's a folder note
|
|
const folderNoteInfo = await WaypointUtils.isFolderNote(this.app, file);
|
|
|
|
const result: FolderNoteResult = {
|
|
path: file.path,
|
|
isFolderNote: folderNoteInfo.isFolderNote,
|
|
reason: folderNoteInfo.reason,
|
|
folderPath: folderNoteInfo.folderPath
|
|
};
|
|
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: `Is folder note error: ${(error as Error).message}`
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Validate all wikilinks in a note
|
|
* Reports resolved and unresolved links with suggestions
|
|
*/
|
|
async validateWikilinks(path: string): Promise<CallToolResult> {
|
|
try {
|
|
// Normalize and validate path
|
|
const normalizedPath = PathUtils.normalizePath(path);
|
|
|
|
// Get file using adapter
|
|
const file = this.vault.getAbstractFileByPath(normalizedPath);
|
|
if (!file || !(file instanceof TFile)) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: ErrorMessages.fileNotFound(normalizedPath)
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Read file content
|
|
const content = await this.vault.read(file);
|
|
|
|
// Parse wikilinks
|
|
const wikilinks = LinkUtils.parseWikilinks(content);
|
|
|
|
const resolvedLinks: any[] = [];
|
|
const unresolvedLinks: any[] = [];
|
|
|
|
for (const link of wikilinks) {
|
|
const resolvedFile = this.metadata.getFirstLinkpathDest(link.target, normalizedPath);
|
|
|
|
if (resolvedFile) {
|
|
resolvedLinks.push({
|
|
text: link.raw,
|
|
target: resolvedFile.path,
|
|
alias: link.alias
|
|
});
|
|
} else {
|
|
// Find suggestions (need to implement locally)
|
|
const suggestions = this.findLinkSuggestions(link.target);
|
|
unresolvedLinks.push({
|
|
text: link.raw,
|
|
line: link.line,
|
|
suggestions
|
|
});
|
|
}
|
|
}
|
|
|
|
const result: ValidateWikilinksResult = {
|
|
path: normalizedPath,
|
|
totalLinks: resolvedLinks.length + unresolvedLinks.length,
|
|
resolvedLinks,
|
|
unresolvedLinks
|
|
};
|
|
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: `Validate wikilinks error: ${(error as Error).message}`
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Find potential matches for an unresolved link
|
|
*/
|
|
private findLinkSuggestions(linkText: string, maxSuggestions: number = 5): string[] {
|
|
const allFiles = this.vault.getMarkdownFiles();
|
|
const suggestions: Array<{ path: string; score: number }> = [];
|
|
|
|
// Remove heading/block references for matching
|
|
const cleanLinkText = linkText.split('#')[0].split('^')[0].toLowerCase();
|
|
|
|
for (const file of allFiles) {
|
|
const fileName = file.basename.toLowerCase();
|
|
const filePath = file.path.toLowerCase();
|
|
|
|
// Calculate similarity score
|
|
let score = 0;
|
|
|
|
// Exact basename match (highest priority)
|
|
if (fileName === cleanLinkText) {
|
|
score = 1000;
|
|
}
|
|
// Basename contains link text
|
|
else if (fileName.includes(cleanLinkText)) {
|
|
score = 500 + (cleanLinkText.length / fileName.length) * 100;
|
|
}
|
|
// Path contains link text
|
|
else if (filePath.includes(cleanLinkText)) {
|
|
score = 250 + (cleanLinkText.length / filePath.length) * 100;
|
|
}
|
|
// Levenshtein-like: count matching characters
|
|
else {
|
|
let matchCount = 0;
|
|
for (const char of cleanLinkText) {
|
|
if (fileName.includes(char)) {
|
|
matchCount++;
|
|
}
|
|
}
|
|
score = (matchCount / cleanLinkText.length) * 100;
|
|
}
|
|
|
|
if (score > 0) {
|
|
suggestions.push({ path: file.path, score });
|
|
}
|
|
}
|
|
|
|
// Sort by score (descending) and return top N
|
|
suggestions.sort((a, b) => b.score - a.score);
|
|
return suggestions.slice(0, maxSuggestions).map(s => s.path);
|
|
}
|
|
|
|
/**
|
|
* Resolve a single wikilink from a source note
|
|
* Returns the target path if resolvable, or suggestions if not
|
|
*/
|
|
async resolveWikilink(sourcePath: string, linkText: string): Promise<CallToolResult> {
|
|
try {
|
|
// Normalize and validate source path
|
|
const normalizedPath = PathUtils.normalizePath(sourcePath);
|
|
|
|
// Get source file using adapter
|
|
const file = this.vault.getAbstractFileByPath(normalizedPath);
|
|
if (!file || !(file instanceof TFile)) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: ErrorMessages.fileNotFound(normalizedPath)
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Try to resolve the link using metadata cache adapter
|
|
const resolvedFile = this.metadata.getFirstLinkpathDest(linkText, normalizedPath);
|
|
|
|
const result: ResolveWikilinkResult = {
|
|
sourcePath: normalizedPath,
|
|
linkText,
|
|
resolved: resolvedFile !== null,
|
|
targetPath: resolvedFile?.path
|
|
};
|
|
|
|
// If not resolved, provide suggestions
|
|
if (!resolvedFile) {
|
|
result.suggestions = this.findLinkSuggestions(linkText);
|
|
}
|
|
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: `Resolve wikilink error: ${(error as Error).message}`
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Get all backlinks to a note
|
|
* Optionally includes unlinked mentions
|
|
*/
|
|
async getBacklinks(
|
|
path: string,
|
|
includeUnlinked: boolean = false,
|
|
includeSnippets: boolean = true
|
|
): Promise<CallToolResult> {
|
|
try {
|
|
// Normalize and validate path
|
|
const normalizedPath = PathUtils.normalizePath(path);
|
|
|
|
// Get target file using adapter
|
|
const targetFile = this.vault.getAbstractFileByPath(normalizedPath);
|
|
if (!targetFile || !(targetFile instanceof TFile)) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: ErrorMessages.fileNotFound(normalizedPath)
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
|
|
// Get target file's basename for matching
|
|
const targetBasename = targetFile.basename;
|
|
|
|
// Get all backlinks from MetadataCache using resolvedLinks
|
|
const resolvedLinks = this.metadata.resolvedLinks;
|
|
const backlinks: any[] = [];
|
|
|
|
// Find all files that link to our target
|
|
for (const [sourcePath, links] of Object.entries(resolvedLinks)) {
|
|
// Check if this source file links to our target
|
|
if (!links[normalizedPath]) {
|
|
continue;
|
|
}
|
|
|
|
const sourceFile = this.vault.getAbstractFileByPath(sourcePath);
|
|
if (!(sourceFile instanceof TFile)) {
|
|
continue;
|
|
}
|
|
|
|
// Read the source file to find link occurrences
|
|
const content = await this.vault.read(sourceFile);
|
|
const lines = content.split('\n');
|
|
const occurrences: any[] = [];
|
|
|
|
// Parse wikilinks in the source file to find references to target
|
|
const wikilinks = LinkUtils.parseWikilinks(content);
|
|
|
|
for (const link of wikilinks) {
|
|
// Resolve this link to see if it points to our target
|
|
const resolvedFile = this.metadata.getFirstLinkpathDest(link.target, sourcePath);
|
|
|
|
if (resolvedFile && resolvedFile.path === normalizedPath) {
|
|
const snippet = includeSnippets ? this.extractSnippet(lines, link.line - 1, 100) : '';
|
|
occurrences.push({
|
|
line: link.line,
|
|
snippet
|
|
});
|
|
}
|
|
}
|
|
|
|
if (occurrences.length > 0) {
|
|
backlinks.push({
|
|
sourcePath,
|
|
type: 'linked',
|
|
occurrences
|
|
});
|
|
}
|
|
}
|
|
|
|
// Process unlinked mentions if requested
|
|
if (includeUnlinked) {
|
|
const allFiles = this.vault.getMarkdownFiles();
|
|
|
|
// Build a set of files that already have linked backlinks
|
|
const linkedSourcePaths = new Set(backlinks.map(b => b.sourcePath));
|
|
|
|
for (const file of allFiles) {
|
|
// Skip if already in linked backlinks
|
|
if (linkedSourcePaths.has(file.path)) {
|
|
continue;
|
|
}
|
|
|
|
// Skip the target file itself
|
|
if (file.path === normalizedPath) {
|
|
continue;
|
|
}
|
|
|
|
const content = await this.vault.read(file);
|
|
const lines = content.split('\n');
|
|
const occurrences: any[] = [];
|
|
|
|
// Search for unlinked mentions of the target basename
|
|
for (let i = 0; i < lines.length; i++) {
|
|
const line = lines[i];
|
|
|
|
// Use word boundary regex to find whole word matches
|
|
const regex = new RegExp(`\\b${this.escapeRegex(targetBasename)}\\b`, 'gi');
|
|
|
|
if (regex.test(line)) {
|
|
const snippet = includeSnippets ? this.extractSnippet(lines, i, 100) : '';
|
|
occurrences.push({
|
|
line: i + 1, // 1-indexed
|
|
snippet
|
|
});
|
|
}
|
|
}
|
|
|
|
if (occurrences.length > 0) {
|
|
backlinks.push({
|
|
sourcePath: file.path,
|
|
type: 'unlinked',
|
|
occurrences
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
const result: BacklinksResult = {
|
|
path: normalizedPath,
|
|
backlinks,
|
|
totalBacklinks: backlinks.length
|
|
};
|
|
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: JSON.stringify(result, null, 2)
|
|
}]
|
|
};
|
|
} catch (error) {
|
|
return {
|
|
content: [{
|
|
type: "text",
|
|
text: `Get backlinks error: ${(error as Error).message}`
|
|
}],
|
|
isError: true
|
|
};
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Extract a snippet of text around a specific line
|
|
*/
|
|
private extractSnippet(lines: string[], lineIndex: number, maxLength: number): string {
|
|
const line = lines[lineIndex] || '';
|
|
|
|
// If line is short enough, return it as-is
|
|
if (line.length <= maxLength) {
|
|
return line;
|
|
}
|
|
|
|
// Truncate and add ellipsis
|
|
const half = Math.floor(maxLength / 2);
|
|
return line.substring(0, half) + '...' + line.substring(line.length - half);
|
|
}
|
|
|
|
/**
|
|
* Escape special regex characters
|
|
*/
|
|
private escapeRegex(str: string): string {
|
|
return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
}
|
|
}
|