info diagram to show version of mermaid, bugfixes for gantt chart

This commit is contained in:
Knut Sveidqvist 2019-06-15 14:29:26 +02:00
parent 4cb0b9174b
commit 5667694652
12 changed files with 874 additions and 11 deletions

41
dist/info.html vendored Normal file
View File

@ -0,0 +1,41 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>Mermaid Quick Test Page</title>
<link rel="icon" type="image/png" href="data:image/png;base64,iVBORw0KGgo=">
</head>
<body>
<div class="mermaid">info
showInfo
</div>
<script src="./mermaid.js"></script>
<script>
mermaid.initialize({
theme: 'forest',
// themeCSS: '.node rect { fill: red; }',
logLevel: 1,
flowchart: { curve: 'linear' },
gantt: { axisFormat: '%m/%d/%Y' },
sequence: { actorMargin: 50 },
// sequenceDiagram: { actorMargin: 300 } // deprecated
});
</script>
<script>
function ganttTestClick(a, b, c){
console.log("a:", a)
console.log("b:", b)
console.log("c:", c)
}
function testClick(nodeId) {
console.log("clicked", nodeId)
var originalBgColor = document.querySelector('body').style.backgroundColor
document.querySelector('body').style.backgroundColor = 'yellow'
setTimeout(function() {
document.querySelector('body').style.backgroundColor = originalBgColor
}, 100)
}
</script>
</body>
</html>

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

15
e2e/spec/info.spec.js Normal file
View File

@ -0,0 +1,15 @@
/* eslint-env jest */
import { imgSnapshotTest } from '../helpers/util.js'
const { toMatchImageSnapshot } = require('jest-image-snapshot')
expect.extend({ toMatchImageSnapshot })
describe('Sequencediagram', () => {
it('should render a simple info diagrams', async () => {
await imgSnapshotTest(page, `
info
showInfo
`,
{})
})
})

View File

@ -165,24 +165,22 @@ export const draw = function (text, id) {
}
}
let taskClass = classStr
let taskClass = ''
if (d.active) {
if (d.crit) {
taskClass += ' activeCrit'
} else {
taskClass = 'active'
taskClass = ' active'
}
} else if (d.done) {
if (d.crit) {
taskClass = ' doneCrit'
} else {
taskClass = 'done'
taskClass = ' done'
}
}
if (d.crit) {
if (taskClass.length > 0) {
taskClass += 'crit'
} else {
if (d.crit) {
taskClass += ' crit'
}
}
@ -191,11 +189,13 @@ export const draw = function (text, id) {
}
if (d.milestone) {
taskClass = ' milestone' + taskClass
taskClass = ' milestone ' + taskClass
}
taskClass += secNum
taskClass += ' ' + classStr
return res + taskClass
})

View File

@ -0,0 +1,15 @@
/* eslint-env jasmine */
describe('when parsing an info graph it', function () {
var ex
beforeEach(function () {
ex = require('./parser/info').parser
ex.yy = require('./infoDb')
})
it('should handle an info definition', function () {
var str = `info
showInfo`
ex.parse(str)
})
})

View File

@ -0,0 +1,36 @@
/**
* Created by knut on 15-01-14.
*/
import { logger } from '../../logger'
var message = ''
var info = false
export const setMessage = txt => {
logger.debug('Setting message to: ' + txt)
message = txt
}
export const getMessage = () => {
return message
}
export const setInfo = inf => {
info = inf
}
export const getInfo = () => {
return info
}
// export const parseError = (err, hash) => {
// global.mermaidAPI.parseError(err, hash)
// }
export default {
setMessage,
getMessage,
setInfo,
getInfo
// parseError
}

View File

@ -0,0 +1,57 @@
/**
* Created by knut on 14-12-11.
*/
import * as d3 from 'd3'
import db from './infoDb'
import infoParser from './parser/info.js'
import { logger } from '../../logger'
const conf = {
}
export const setConf = function (cnf) {
const keys = Object.keys(cnf)
keys.forEach(function (key) {
conf[key] = cnf[key]
})
}
/**
* Draws a an info picture in the tag with id: id based on the graph definition in text.
* @param text
* @param id
*/
export const draw = (txt, id, ver) => {
try {
const parser = infoParser.parser
parser.yy = db
logger.debug('Renering info diagram\n' + txt)
// Parse the graph definition
parser.parse(txt)
logger.debug('Parsed info diagram')
// Fetch the default direction, use TD if none was found
const svg = d3.select('#' + id)
const g = svg.append('g')
g.append('text') // text label for the x axis
.attr('x', 100)
.attr('y', 40)
.attr('class', 'version')
.attr('font-size', '32px')
.style('text-anchor', 'middle')
.text('v ' + ver)
svg.attr('height', 100)
svg.attr('width', 400)
// svg.attr('viewBox', '0 0 300 150');
} catch (e) {
logger.error('Error while rendering info diagram')
logger.error(e.message)
}
}
export default {
setConf,
draw
}

View File

@ -0,0 +1,48 @@
/** mermaid
* http://knsv.github.io/mermaid/
* (c) 2015 Knut Sveidqvist
* MIT license.
*/
%lex
%options case-insensitive
%{
// Pre-lexer code can go here
%}
%%
"info" return 'info' ;
[\s\n\r]+ return 'NL' ;
[\s]+ return 'space';
"showInfo" return 'showInfo';
<<EOF>> return 'EOF' ;
. return 'TXT' ;
/lex
%start start
%% /* language grammar */
start
// %{ : info document 'EOF' { return yy; } }
: info document 'EOF' { return yy; }
;
document
: /* empty */
| document line
;
line
: statement { }
| 'NL'
;
statement
: showInfo { yy.setInfo(true); }
;
%%

View File

@ -0,0 +1,629 @@
/* parser generated by jison 0.4.18 */
/*
Returns a Parser object of the following structure:
Parser: {
yy: {}
}
Parser.prototype: {
yy: {},
trace: function(),
symbols_: {associative list: name ==> number},
terminals_: {associative list: number ==> name},
productions_: [...],
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$),
table: [...],
defaultActions: {...},
parseError: function(str, hash),
parse: function(input),
lexer: {
EOF: 1,
parseError: function(str, hash),
setInput: function(input),
input: function(),
unput: function(str),
more: function(),
less: function(n),
pastInput: function(),
upcomingInput: function(),
showPosition: function(),
test_match: function(regex_match_array, rule_index),
next: function(),
lex: function(),
begin: function(condition),
popState: function(),
_currentRules: function(),
topState: function(),
pushState: function(condition),
options: {
ranges: boolean (optional: true ==> token location info will include a .range[] member)
flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match)
backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code)
},
performAction: function(yy, yy_, $avoiding_name_collisions, YY_START),
rules: [...],
conditions: {associative list: name ==> set},
}
}
token location info (@$, _$, etc.): {
first_line: n,
last_line: n,
first_column: n,
last_column: n,
range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based)
}
the parseError function receives a 'hash' object with these members for lexer and parser errors: {
text: (matched text)
token: (the produced terminal token, if any)
line: (yylineno)
}
while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: {
loc: (yylloc)
expected: (string describing the set of expected tokens)
recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error)
}
*/
var parser = (function(){
var o=function(k,v,o,l){for(o=o||{},l=k.length;l--;o[k[l]]=v);return o},$V0=[6,9,10,12];
var parser = {trace: function trace () { },
yy: {},
symbols_: {"error":2,"start":3,"info":4,"document":5,"EOF":6,"line":7,"statement":8,"NL":9,"showInfo":10,"message":11,"say":12,"TXT":13,"$accept":0,"$end":1},
terminals_: {2:"error",4:"info",6:"EOF",9:"NL",10:"showInfo",12:"say",13:"TXT"},
productions_: [0,[3,3],[5,0],[5,2],[7,1],[7,1],[8,1],[8,1],[11,2]],
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) {
/* this == yyval */
var $0 = $$.length - 1;
switch (yystate) {
case 1:
return yy;
break;
case 4:
break;
case 6:
yy.setInfo(true);
break;
case 7:
yy.setMessage($$[$0]);
break;
case 8:
this.$ = $$[$0-1].substring(1).trim().replace(/\\n/gm, "\n");
break;
}
},
table: [{3:1,4:[1,2]},{1:[3]},o($V0,[2,2],{5:3}),{6:[1,4],7:5,8:6,9:[1,7],10:[1,8],11:9,12:[1,10]},{1:[2,1]},o($V0,[2,3]),o($V0,[2,4]),o($V0,[2,5]),o($V0,[2,6]),o($V0,[2,7]),{13:[1,11]},o($V0,[2,8])],
defaultActions: {4:[2,1]},
parseError: function parseError (str, hash) {
if (hash.recoverable) {
this.trace(str);
} else {
var error = new Error(str);
error.hash = hash;
throw error;
}
},
parse: function parse(input) {
var self = this, stack = [0], tstack = [], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1;
var args = lstack.slice.call(arguments, 1);
var lexer = Object.create(this.lexer);
var sharedState = { yy: {} };
for (var k in this.yy) {
if (Object.prototype.hasOwnProperty.call(this.yy, k)) {
sharedState.yy[k] = this.yy[k];
}
}
lexer.setInput(input, sharedState.yy);
sharedState.yy.lexer = lexer;
sharedState.yy.parser = this;
if (typeof lexer.yylloc == 'undefined') {
lexer.yylloc = {};
}
var yyloc = lexer.yylloc;
lstack.push(yyloc);
var ranges = lexer.options && lexer.options.ranges;
if (typeof sharedState.yy.parseError === 'function') {
this.parseError = sharedState.yy.parseError;
} else {
this.parseError = Object.getPrototypeOf(this).parseError;
}
function popStack(n) {
stack.length = stack.length - 2 * n;
vstack.length = vstack.length - n;
lstack.length = lstack.length - n;
}
function lex() {
var token;
token = tstack.pop() || lexer.lex() || EOF;
if (typeof token !== 'number') {
if (token instanceof Array) {
tstack = token;
token = tstack.pop();
}
token = self.symbols_[token] || token;
}
return token;
}
var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected;
while (true) {
state = stack[stack.length - 1];
if (this.defaultActions[state]) {
action = this.defaultActions[state];
} else {
if (symbol === null || typeof symbol == 'undefined') {
symbol = lex();
}
action = table[state] && table[state][symbol];
}
if (typeof action === 'undefined' || !action.length || !action[0]) {
var errStr = '';
expected = [];
for (p in table[state]) {
if (this.terminals_[p] && p > TERROR) {
expected.push('\'' + this.terminals_[p] + '\'');
}
}
if (lexer.showPosition) {
errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\'';
} else {
errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\'');
}
this.parseError(errStr, {
text: lexer.match,
token: this.terminals_[symbol] || symbol,
line: lexer.yylineno,
loc: yyloc,
expected: expected
});
}
if (action[0] instanceof Array && action.length > 1) {
throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol);
}
switch (action[0]) {
case 1:
stack.push(symbol);
vstack.push(lexer.yytext);
lstack.push(lexer.yylloc);
stack.push(action[1]);
symbol = null;
if (!preErrorSymbol) {
yyleng = lexer.yyleng;
yytext = lexer.yytext;
yylineno = lexer.yylineno;
yyloc = lexer.yylloc;
if (recovering > 0) {
recovering--;
}
} else {
symbol = preErrorSymbol;
preErrorSymbol = null;
}
break;
case 2:
len = this.productions_[action[1]][1];
yyval.$ = vstack[vstack.length - len];
yyval._$ = {
first_line: lstack[lstack.length - (len || 1)].first_line,
last_line: lstack[lstack.length - 1].last_line,
first_column: lstack[lstack.length - (len || 1)].first_column,
last_column: lstack[lstack.length - 1].last_column
};
if (ranges) {
yyval._$.range = [
lstack[lstack.length - (len || 1)].range[0],
lstack[lstack.length - 1].range[1]
];
}
r = this.performAction.apply(yyval, [
yytext,
yyleng,
yylineno,
sharedState.yy,
action[1],
vstack,
lstack
].concat(args));
if (typeof r !== 'undefined') {
return r;
}
if (len) {
stack = stack.slice(0, -1 * len * 2);
vstack = vstack.slice(0, -1 * len);
lstack = lstack.slice(0, -1 * len);
}
stack.push(this.productions_[action[1]][0]);
vstack.push(yyval.$);
lstack.push(yyval._$);
newState = table[stack[stack.length - 2]][stack[stack.length - 1]];
stack.push(newState);
break;
case 3:
return true;
}
}
return true;
}};
/* generated by jison-lex 0.3.4 */
var lexer = (function(){
var lexer = ({
EOF:1,
parseError:function parseError(str, hash) {
if (this.yy.parser) {
this.yy.parser.parseError(str, hash);
} else {
throw new Error(str);
}
},
// resets the lexer, sets new input
setInput:function (input, yy) {
this.yy = yy || this.yy || {};
this._input = input;
this._more = this._backtrack = this.done = false;
this.yylineno = this.yyleng = 0;
this.yytext = this.matched = this.match = '';
this.conditionStack = ['INITIAL'];
this.yylloc = {
first_line: 1,
first_column: 0,
last_line: 1,
last_column: 0
};
if (this.options.ranges) {
this.yylloc.range = [0,0];
}
this.offset = 0;
return this;
},
// consumes and returns one char from the input
input:function () {
var ch = this._input[0];
this.yytext += ch;
this.yyleng++;
this.offset++;
this.match += ch;
this.matched += ch;
var lines = ch.match(/(?:\r\n?|\n).*/g);
if (lines) {
this.yylineno++;
this.yylloc.last_line++;
} else {
this.yylloc.last_column++;
}
if (this.options.ranges) {
this.yylloc.range[1]++;
}
this._input = this._input.slice(1);
return ch;
},
// unshifts one char (or a string) into the input
unput:function (ch) {
var len = ch.length;
var lines = ch.split(/(?:\r\n?|\n)/g);
this._input = ch + this._input;
this.yytext = this.yytext.substr(0, this.yytext.length - len);
//this.yyleng -= len;
this.offset -= len;
var oldLines = this.match.split(/(?:\r\n?|\n)/g);
this.match = this.match.substr(0, this.match.length - 1);
this.matched = this.matched.substr(0, this.matched.length - 1);
if (lines.length - 1) {
this.yylineno -= lines.length - 1;
}
var r = this.yylloc.range;
this.yylloc = {
first_line: this.yylloc.first_line,
last_line: this.yylineno + 1,
first_column: this.yylloc.first_column,
last_column: lines ?
(lines.length === oldLines.length ? this.yylloc.first_column : 0)
+ oldLines[oldLines.length - lines.length].length - lines[0].length :
this.yylloc.first_column - len
};
if (this.options.ranges) {
this.yylloc.range = [r[0], r[0] + this.yyleng - len];
}
this.yyleng = this.yytext.length;
return this;
},
// When called from action, caches matched text and appends it on next action
more:function () {
this._more = true;
return this;
},
// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
reject:function () {
if (this.options.backtrack_lexer) {
this._backtrack = true;
} else {
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), {
text: "",
token: null,
line: this.yylineno
});
}
return this;
},
// retain first n characters of the match
less:function (n) {
this.unput(this.match.slice(n));
},
// displays already matched input, i.e. for error messages
pastInput:function () {
var past = this.matched.substr(0, this.matched.length - this.match.length);
return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, "");
},
// displays upcoming input, i.e. for error messages
upcomingInput:function () {
var next = this.match;
if (next.length < 20) {
next += this._input.substr(0, 20-next.length);
}
return (next.substr(0,20) + (next.length > 20 ? '...' : '')).replace(/\n/g, "");
},
// displays the character position where the lexing error occurred, i.e. for error messages
showPosition:function () {
var pre = this.pastInput();
var c = new Array(pre.length + 1).join("-");
return pre + this.upcomingInput() + "\n" + c + "^";
},
// test the lexed token: return FALSE when not a match, otherwise return token
test_match:function(match, indexed_rule) {
var token,
lines,
backup;
if (this.options.backtrack_lexer) {
// save context
backup = {
yylineno: this.yylineno,
yylloc: {
first_line: this.yylloc.first_line,
last_line: this.last_line,
first_column: this.yylloc.first_column,
last_column: this.yylloc.last_column
},
yytext: this.yytext,
match: this.match,
matches: this.matches,
matched: this.matched,
yyleng: this.yyleng,
offset: this.offset,
_more: this._more,
_input: this._input,
yy: this.yy,
conditionStack: this.conditionStack.slice(0),
done: this.done
};
if (this.options.ranges) {
backup.yylloc.range = this.yylloc.range.slice(0);
}
}
lines = match[0].match(/(?:\r\n?|\n).*/g);
if (lines) {
this.yylineno += lines.length;
}
this.yylloc = {
first_line: this.yylloc.last_line,
last_line: this.yylineno + 1,
first_column: this.yylloc.last_column,
last_column: lines ?
lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length :
this.yylloc.last_column + match[0].length
};
this.yytext += match[0];
this.match += match[0];
this.matches = match;
this.yyleng = this.yytext.length;
if (this.options.ranges) {
this.yylloc.range = [this.offset, this.offset += this.yyleng];
}
this._more = false;
this._backtrack = false;
this._input = this._input.slice(match[0].length);
this.matched += match[0];
token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1]);
if (this.done && this._input) {
this.done = false;
}
if (token) {
return token;
} else if (this._backtrack) {
// recover context
for (var k in backup) {
this[k] = backup[k];
}
return false; // rule action called reject() implying the next rule should be tested instead.
}
return false;
},
// return next match in input
next:function () {
if (this.done) {
return this.EOF;
}
if (!this._input) {
this.done = true;
}
var token,
match,
tempMatch,
index;
if (!this._more) {
this.yytext = '';
this.match = '';
}
var rules = this._currentRules();
for (var i = 0; i < rules.length; i++) {
tempMatch = this._input.match(this.rules[rules[i]]);
if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
match = tempMatch;
index = i;
if (this.options.backtrack_lexer) {
token = this.test_match(tempMatch, rules[i]);
if (token !== false) {
return token;
} else if (this._backtrack) {
match = false;
continue; // rule action called reject() implying a rule MISmatch.
} else {
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
return false;
}
} else if (!this.options.flex) {
break;
}
}
}
if (match) {
token = this.test_match(match, rules[index]);
if (token !== false) {
return token;
}
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
return false;
}
if (this._input === "") {
return this.EOF;
} else {
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), {
text: "",
token: null,
line: this.yylineno
});
}
},
// return next match that has a token
lex:function lex () {
var r = this.next();
if (r) {
return r;
} else {
return this.lex();
}
},
// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
begin:function begin (condition) {
this.conditionStack.push(condition);
},
// pop the previously active lexer condition state off the condition stack
popState:function popState () {
var n = this.conditionStack.length - 1;
if (n > 0) {
return this.conditionStack.pop();
} else {
return this.conditionStack[0];
}
},
// produce the lexer rule set which is active for the currently active lexer condition state
_currentRules:function _currentRules () {
if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules;
} else {
return this.conditions["INITIAL"].rules;
}
},
// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
topState:function topState (n) {
n = this.conditionStack.length - 1 - Math.abs(n || 0);
if (n >= 0) {
return this.conditionStack[n];
} else {
return "INITIAL";
}
},
// alias for begin(condition)
pushState:function pushState (condition) {
this.begin(condition);
},
// return the number of states currently on the stack
stateStackSize:function stateStackSize() {
return this.conditionStack.length;
},
options: {"case-insensitive":true},
performAction: function anonymous(yy,yy_,$avoiding_name_collisions,YY_START) {
// Pre-lexer code can go here
var YYSTATE=YY_START;
switch($avoiding_name_collisions) {
case 0:return 4 ;
break;
case 1:return 9 ;
break;
case 2:return 'space'
break;
case 3:return 10;
break;
case 4:return 12 ;
break;
case 5:return 6 ;
break;
case 6:return 13 ;
break;
}
},
rules: [/^(?:info\b)/i,/^(?:[\s\n\r]+)/i,/^(?:[\s]+)/i,/^(?:showInfo\b)/i,/^(?:say\b)/i,/^(?:$)/i,/^(?:.)/i],
conditions: {"INITIAL":{"rules":[0,1,2,3,4,5,6],"inclusive":true}}
});
return lexer;
})();
parser.lexer = lexer;
function Parser () {
this.yy = {};
}
Parser.prototype = parser;parser.Parser = Parser;
return new Parser;
})();
if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
exports.parser = parser;
exports.Parser = parser.Parser;
exports.parse = function () { return parser.parse.apply(parser, arguments); };
exports.main = function commonjsMain (args) {
if (!args[1]) {
console.log('Usage: '+args[0]+' FILE');
process.exit(1);
}
var source = require('fs').readFileSync(require('path').normalize(args[1]), "utf8");
return exports.parser.parse(source);
};
if (typeof module !== 'undefined' && require.main === module) {
exports.main(process.argv.slice(1));
}
}

View File

@ -104,7 +104,7 @@ const init = function () {
}
const initialize = function (config) {
logger.debug('Initializing mermaid')
logger.debug('Initializing mermaid ')
if (typeof config.mermaid !== 'undefined') {
if (typeof config.mermaid.startOnLoad !== 'undefined') {
mermaid.startOnLoad = config.mermaid.startOnLoad

View File

@ -13,6 +13,7 @@
*/
import * as d3 from 'd3'
import scope from 'scope-css'
import pkg from '../package.json'
import { logger, setLogLevel } from './logger'
import utils from './utils'
@ -31,6 +32,9 @@ import classDb from './diagrams/class/classDb'
import gitGraphRenderer from './diagrams/git/gitGraphRenderer'
import gitGraphParser from './diagrams/git/parser/gitGraph'
import gitGraphAst from './diagrams/git/gitGraphAst'
import infoRenderer from './diagrams/info/infoRenderer'
import infoParser from './diagrams/info/parser/info'
import infoDb from './diagrams/info/infoDb'
const themes = {}
for (const themeName of ['default', 'forest', 'dark', 'neutral']) {
@ -236,6 +240,7 @@ function parse (text) {
const graphType = utils.detectType(text)
let parser
logger.debug('Type ' + graphType)
switch (graphType) {
case 'git':
parser = gitGraphParser
@ -257,6 +262,11 @@ function parse (text) {
parser = classParser
parser.parser.yy = classDb
break
case 'info':
logger.debug('info info info')
parser = infoParser
parser.parser.yy = infoDb
break
}
parser.parser.yy.parseError = (str, hash) => {
@ -429,6 +439,11 @@ const render = function (id, txt, cb, container) {
classRenderer.setConf(config.class)
classRenderer.draw(txt, id)
break
case 'info':
config.class.arrowMarkerAbsolute = config.arrowMarkerAbsolute
infoRenderer.setConf(config.class)
infoRenderer.draw(txt, id, pkg.version)
break
}
d3.select(`[id="${id}"]`).selectAll('foreignobject > *').attr('xmlns', 'http://www.w3.org/1999/xhtml')
@ -482,7 +497,7 @@ const setConf = function (cnf) {
}
function initialize (options) {
logger.debug('Initializing mermaidAPI')
logger.debug('Initializing mermaidAPI ', pkg.version)
// Update default config with options supplied at initialization
if (typeof options === 'object') {
setConf(options)

View File

@ -1,4 +1,5 @@
import * as d3 from 'd3'
import { logger } from './logger'
/**
* @function detectType
@ -19,6 +20,7 @@ import * as d3 from 'd3'
*/
export const detectType = function (text) {
text = text.replace(/^\s*%%.*\n/g, '\n')
logger.debug('Detecting diagram type based on the text ' + text)
if (text.match(/^\s*sequenceDiagram/)) {
return 'sequence'
}
@ -34,6 +36,11 @@ export const detectType = function (text) {
if (text.match(/^\s*gitGraph/)) {
return 'git'
}
if (text.match(/^\s*info/)) {
return 'info'
}
return 'flowchart'
}