2019-05-30 22:51:23 +02:00
|
|
|
import { parseURI } from 'lbry-redux';
|
2019-06-10 08:20:40 +02:00
|
|
|
import visit from 'unist-util-visit';
|
2019-05-30 22:51:23 +02:00
|
|
|
|
|
|
|
const protocol = 'lbry://';
|
|
|
|
const locateURI = (value, fromIndex) => value.indexOf(protocol, fromIndex);
|
|
|
|
const locateMention = (value, fromIndex) => value.indexOf('@', fromIndex);
|
|
|
|
|
|
|
|
// Generate a valid markdown link
|
2019-06-09 08:57:51 +02:00
|
|
|
const createURI = (text, uri, autoEmbed = false) => ({
|
2019-05-30 22:51:23 +02:00
|
|
|
type: 'link',
|
2019-05-31 22:09:54 +02:00
|
|
|
url: (uri.startsWith(protocol) ? '' : protocol) + uri,
|
2019-06-09 09:14:27 +02:00
|
|
|
data: {
|
|
|
|
// Custom attribute
|
|
|
|
hProperties: { 'data-preview': autoEmbed },
|
|
|
|
},
|
2019-05-30 22:51:23 +02:00
|
|
|
children: [{ type: 'text', value: text }],
|
|
|
|
});
|
|
|
|
|
2019-06-02 22:26:33 +02:00
|
|
|
const validateURI = (match, eat) => {
|
2019-05-30 22:51:23 +02:00
|
|
|
if (match) {
|
|
|
|
try {
|
|
|
|
const text = match[0];
|
2019-05-31 22:09:54 +02:00
|
|
|
const uri = parseURI(text);
|
2019-05-30 22:51:23 +02:00
|
|
|
// Create channel link
|
2019-06-03 00:33:47 +02:00
|
|
|
if (uri.isChannel && !uri.path) {
|
2019-06-09 08:57:51 +02:00
|
|
|
return eat(text)(createURI(uri.claimName, text, false));
|
2019-05-30 22:51:23 +02:00
|
|
|
}
|
|
|
|
// Create uri link
|
2019-06-09 08:57:51 +02:00
|
|
|
return eat(text)(createURI(text, text, true));
|
2019-05-30 22:51:23 +02:00
|
|
|
} catch (err) {
|
|
|
|
// Silent errors: console.error(err)
|
|
|
|
}
|
|
|
|
}
|
2019-06-02 22:26:33 +02:00
|
|
|
};
|
2019-05-30 22:51:23 +02:00
|
|
|
|
2019-05-31 22:09:54 +02:00
|
|
|
// Generate a markdown link from channel name
|
|
|
|
function tokenizeMention(eat, value, silent) {
|
2019-06-03 00:33:47 +02:00
|
|
|
const match = /^@+[a-zA-Z0-9-#:/]+/.exec(value);
|
2019-06-02 22:26:33 +02:00
|
|
|
return validateURI(match, eat);
|
2019-05-31 22:09:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// Generate a markdown link from lbry url
|
|
|
|
function tokenizeURI(eat, value, silent) {
|
2019-06-03 00:33:47 +02:00
|
|
|
const match = /^(lbry:\/\/)+[a-zA-Z0-9-@#:/]+/.exec(value);
|
2019-06-02 22:26:33 +02:00
|
|
|
return validateURI(match, eat);
|
2019-05-31 22:09:54 +02:00
|
|
|
}
|
|
|
|
|
2019-05-30 22:51:23 +02:00
|
|
|
// Configure tokenizer for lbry urls
|
|
|
|
tokenizeURI.locator = locateURI;
|
2019-06-10 08:20:40 +02:00
|
|
|
tokenizeURI.notInList = true;
|
2019-05-30 22:51:23 +02:00
|
|
|
tokenizeURI.notInLink = true;
|
|
|
|
tokenizeURI.notInBlock = true;
|
|
|
|
|
|
|
|
// Configure tokenizer for lbry channels
|
|
|
|
tokenizeMention.locator = locateMention;
|
2019-06-10 08:20:40 +02:00
|
|
|
tokenizeMention.notInList = true;
|
2019-05-30 22:51:23 +02:00
|
|
|
tokenizeMention.notInLink = true;
|
|
|
|
tokenizeMention.notInBlock = true;
|
|
|
|
|
2019-06-10 08:20:40 +02:00
|
|
|
const visitor = (node, index, parent) => {
|
2019-06-10 22:16:01 +02:00
|
|
|
if (node.type === 'link' && parent && parent.type === 'paragraph') {
|
2019-06-10 08:20:40 +02:00
|
|
|
try {
|
|
|
|
const url = parseURI(node.url);
|
|
|
|
// Handle lbry link
|
|
|
|
if (!url.isChannel || (url.isChannel && url.path)) {
|
|
|
|
// Auto-embed lbry url
|
2019-06-10 22:16:01 +02:00
|
|
|
if (!node.data) {
|
2019-06-10 08:20:40 +02:00
|
|
|
node.data = {
|
|
|
|
hProperties: { 'data-preview': true },
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} catch (err) {
|
|
|
|
// Silent errors: console.error(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
// transform
|
|
|
|
const transform = tree => {
|
|
|
|
visit(tree, ['link'], visitor);
|
|
|
|
};
|
|
|
|
|
|
|
|
export const formatedLinks = () => transform;
|
|
|
|
|
2019-05-30 22:51:23 +02:00
|
|
|
// Main module
|
2019-06-10 08:20:40 +02:00
|
|
|
export function inlineLinks() {
|
2019-05-30 22:51:23 +02:00
|
|
|
const Parser = this.Parser;
|
|
|
|
const tokenizers = Parser.prototype.inlineTokenizers;
|
|
|
|
const methods = Parser.prototype.inlineMethods;
|
|
|
|
|
|
|
|
// Add an inline tokenizer (defined in the following example).
|
|
|
|
tokenizers.uri = tokenizeURI;
|
|
|
|
tokenizers.mention = tokenizeMention;
|
|
|
|
|
|
|
|
// Run it just before `text`.
|
|
|
|
methods.splice(methods.indexOf('text'), 0, 'uri');
|
|
|
|
methods.splice(methods.indexOf('text'), 0, 'mention');
|
|
|
|
}
|