Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/core/render/compiler.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import { taskListCompiler } from './compiler/taskList.js';
import { taskListItemCompiler } from './compiler/taskListItem.js';
import { linkCompiler } from './compiler/link.js';
import { compileMedia } from './compiler/media.js';
import { tableCellCompiler } from './compiler/tableCell.js';

const cachedLinks = {};

Expand Down Expand Up @@ -176,6 +177,7 @@ export class Compiler {
origin.image = imageCompiler({ renderer, contentBase, router });
origin.list = taskListCompiler({ renderer });
origin.listitem = taskListItemCompiler({ renderer });
origin.tablecell = tableCellCompiler({ renderer, compiler: this });

renderer.origin = origin;

Expand Down
15 changes: 15 additions & 0 deletions src/core/render/compiler/tableCell.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
export const tableCellCompiler = ({ renderer }) =>
(renderer.tablecell = function (token) {
let content;

if (token.embedTokens && token.embedTokens.length > 0) {
content = this.parser.parse(token.embedTokens);
} else {
content = this.parser.parseInline(token.tokens);
}

const type = token.header ? 'th' : 'td';
const tag = token.align ? `<${type} align="${token.align}">` : `<${type}>`;

return tag + content + `</${type}>\n`;
});
88 changes: 63 additions & 25 deletions src/core/render/embed.js
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,14 @@ function walkFetchEmbed({ embedTokens, compile, fetch }, cb) {
}
}

cb({ token: currentToken, embedToken });
cb({
token: currentToken,
embedToken,
rowIndex: currentToken.rowIndex,
cellIndex: currentToken.cellIndex,
tokenRef: currentToken.tokenRef,
});

if (++count >= embedTokens.length) {
cb({});
}
Expand Down Expand Up @@ -126,51 +133,82 @@ export function prerenderEmbed({ compiler, raw = '', fetch }, done) {
const linkRE = compile.Lexer.rules.inline.normal.link;
const links = tokens.links;

const linkMatcher = new RegExp(linkRE.source, 'g');

tokens.forEach((token, index) => {
if (token.type === 'paragraph') {
token.text = token.text.replace(
new RegExp(linkRE.source, 'g'),
linkMatcher,
(src, filename, href, title) => {
const embed = compiler.compileEmbed(href, title);

if (embed) {
embedTokens.push({
index,
tokenRef: token,
embed,
});
}

return src;
},
);
} else if (token.type === 'table') {
token.rows.forEach((row, rowIndex) => {
row.forEach((cell, cellIndex) => {
cell.text = cell.text.replace(
linkMatcher,
(src, filename, href, title) => {
const embed = compiler.compileEmbed(href, title);
if (embed) {
embedTokens.push({
index,
tokenRef: token,
rowIndex,
cellIndex,
embed,
});
}
return src;
},
);
});
});
}
});

// keep track of which tokens have been embedded so far
// so that we know where to insert the embedded tokens as they
// are returned
const moves = [];
walkFetchEmbed({ compile, embedTokens, fetch }, ({ embedToken, token }) => {
if (token) {
// iterate through the array of previously inserted tokens
// to determine where the current embedded tokens should be inserted
let index = token.index;
moves.forEach(pos => {
if (index > pos.start) {
index += pos.length;
}
});
walkFetchEmbed(
{ compile, embedTokens, fetch },
({ embedToken, token, rowIndex, cellIndex, tokenRef }) => {
if (token) {
if (typeof rowIndex === 'number' && typeof cellIndex === 'number') {
const cell = tokenRef.rows[rowIndex][cellIndex];

cell.embedTokens = embedToken;
} else {
// iterate through the array of previously inserted tokens
// to determine where the current embedded tokens should be inserted
let index = token.index;
moves.forEach(pos => {
if (index > pos.start) {
index += pos.length;
}
});

Object.assign(links, embedToken.links);
Object.assign(links, embedToken.links);

tokens = tokens
.slice(0, index)
.concat(embedToken, tokens.slice(index + 1));
moves.push({ start: index, length: embedToken.length - 1 });
} else {
cached[raw] = tokens.concat();
tokens.links = cached[raw].links = links;
done(tokens);
}
});
tokens = tokens
.slice(0, index)
.concat(embedToken, tokens.slice(index + 1));
moves.push({ start: index, length: embedToken.length - 1 });
}
} else {
cached[raw] = tokens.concat();
tokens.links = cached[raw].links = links;
done(tokens);
}
},
);
}
Loading