diff --git a/src/tokenize/markWord.ts b/src/tokenize/markWord.ts index 8f07918..4f8dc75 100644 --- a/src/tokenize/markWord.ts +++ b/src/tokenize/markWord.ts @@ -1,11 +1,16 @@ import {flatMap} from 'lodash'; import {TokenizeEnhancer, TokenPath} from './interface'; -import {leafOf, replace} from './utils'; +import {leafOf, replace, isTextNode} from './utils'; function markInPaths(word: string, name: string, replacement: string) { - return (paths: TokenPath[]) => flatMap( - paths, - path => { + return (paths: TokenPath[]) => + flatMap(paths, path => { + const leafNode = path.at(-1); + + if (leafNode && !isTextNode(leafNode)) { + return [path]; + } + const leaf = leafOf(path); if (!leaf.value.includes(word)) { @@ -29,7 +34,7 @@ function markInPaths(word: string, name: string, replacement: string) { [] ); } - ); + ); } export default function markWord(word: string, name: string, replacement = word): TokenizeEnhancer { diff --git a/src/tokenize/utils.ts b/src/tokenize/utils.ts index a2f8a17..659978e 100644 --- a/src/tokenize/utils.ts +++ b/src/tokenize/utils.ts @@ -12,7 +12,7 @@ export function wrap(path: TokenPath, parent: ProcessingNode): TokenPath { return [parent, ...clone(path)]; } -function isTextNode(node: ProcessingNode): node is TextNode { +export function isTextNode(node: ProcessingNode): node is TextNode { return node.type === 'text'; }