Draft fix for issue #304

This commit is contained in:
Knut Sveidqvist 2016-03-23 20:34:44 +01:00
parent 1ad9e75a86
commit bda1aaa5db
15 changed files with 18348 additions and 16632 deletions

8876
dist/mermaid.js vendored

File diff suppressed because one or more lines are too long

39
dist/mermaid.min.js vendored

File diff suppressed because one or more lines are too long

8874
dist/mermaid.slim.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

8208
dist/mermaidAPI.js vendored

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

8206
dist/mermaidAPI.slim.js vendored

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -6,7 +6,7 @@ var fs = require('fs')
, semver = require('semver')
, path = require('path')
var PHANTOM_VERSION = "^1.9.0"
var PHANTOM_VERSION = "^2.1.0"
var info = chalk.blue.bold
, note = chalk.green.bold

View File

@ -25,13 +25,10 @@ function processMermaid(files, _options, _next) {
, options.width
];
files.forEach(function(file) {
phantomArgs.push(file)
})
console.log('phantomArgs');
console.log(JSON.stringify(phantomArgs));
//
mkdirp(outputDir, function(err) {
if (err) {
throw err

View File

@ -29,32 +29,27 @@ var system = require('system')
, webpage = require('webpage')
console.log('phantom.args');
console.log(phantom.args.length);
console.log(JSON.stringify(phantom.args));
var page = webpage.create()
, files = phantom.args.slice(8, phantom.args.length)
, width = phantom.args[7]
, files = system.args.slice(9, system.args.length)
, width = system.args[8]
if(typeof width === 'undefined'){
if(typeof width === 'undefined' || width==='undefined'){
width = 1200;
}
var options = {
outputDir: phantom.args[0]
, png: phantom.args[1] === 'true' ? true : false
, svg: phantom.args[2] === 'true' ? true : false
, css: phantom.args[3] !== '' ? phantom.args[3] : '* { margin: 0; padding: 0; }'
, sequenceConfig: phantom.args[4]
, ganttConfig: phantom.args[5]
, verbose: phantom.args[6] === 'true' ? true : false
outputDir: system.args[1]
, png: system.args[2] === 'true' ? true : false
, svg: system.args[3] === 'true' ? true : false
, css: system.args[4] !== '' ? system.args[4] : '* { margin: 0; padding: 0; }'
, sequenceConfig: system.args[5]
, ganttConfig: system.args[6]
, verbose: system.args[7] === 'true' ? true : false
, width: width
}
, log = logger(options.verbose)
// If no css is suuplied make sure a fixed witdth is given to the gant renderer
if(phantom.args[3] !== ''){
if(system.args[3] !== ''){
if(typeof options.ganttConfig === 'undefined'){
options.ganttConfig = {};
}
@ -73,9 +68,6 @@ page.content = [
, '</html>'
].join('\n')
//console.log('page.content');
//console.log(JSON.stringify(page.content));
//console.log(phantom.args[3]);
page.injectJs('../dist/mermaid.js')
page.onConsoleMessage = function(msg, lineNum, sourceId) {
@ -92,7 +84,7 @@ files.forEach(function(file) {
, svgContent
, allElements;
console.log('ready to execute png: ' + filename + '.png')
console.log('ready to execute png: ' + filename + '.png ')
// this JS is executed in this statement is sandboxed, even though it doesn't
// look like it. we need to serialize then unserialize the svgContent that's
@ -103,6 +95,7 @@ files.forEach(function(file) {
sequenceConfig : options.sequenceConfig,
confWidth : options.width
})
oDOM = oParser.parseFromString(svgContent, "text/xml")
resolveSVGElement(oDOM.firstChild)
@ -123,7 +116,7 @@ files.forEach(function(file) {
page.render(options.outputDir + fs.separator + filename + '.png')
console.log('saved png: ' + filename + '.png')
}
console.log('After png save: ' + filename + '.png')
if (options.svg) {
var serialize = new XMLSerializer();
fs.write(
@ -247,6 +240,7 @@ function executeInPage(data) {
el.className = 'mermaid'
elContent = document.createTextNode(contents)
el.appendChild(elContent)
//el.innerText = '<b>hello</b>\uD800' //contents;
document.body.appendChild(el)
@ -254,6 +248,7 @@ function executeInPage(data) {
sequenceDiagram:{useMaxWidth:false},
flowchart:{useMaxWidth:false}
});
//console.log('after initialize',sequenceConfig);
if(typeof sequenceConfig !== undefined && sequenceConfig !== 'undefined'){
//sc = document.createElement("script")
@ -266,6 +261,7 @@ function executeInPage(data) {
});
}
//console.log('after initialize 2');
if(typeof ganttConfig !== undefined && ganttConfig !== 'undefined'){
sc = document.createElement("script")
scContent = document.createTextNode('mermaid.ganttConfig = JSON.parse(' + JSON.stringify(ganttConfig) + ');')
@ -280,9 +276,6 @@ function executeInPage(data) {
document.body.appendChild(sc)
}
console.log('Generated head');
console.log(document.head.innerHTML);
//console.log(document.body.innerHTML);
mermaid.init();
svg = document.querySelector('svg')
@ -291,6 +284,7 @@ function executeInPage(data) {
width = boundingBox.width * 1.5; // adding the scale factor for consistency with output in chrome browser
height = boundingBox.height * 1.5; // adding the scale factor for consistency with output in chrome browser
var scalefactor = confWidth/(width-8);
// resizing the body to fit the svg

View File

@ -1,629 +0,0 @@
/* parser generated by jison 0.4.11 */
/*
Returns a Parser object of the following structure:
Parser: {
yy: {}
}
Parser.prototype: {
yy: {},
trace: function(),
symbols_: {associative list: name ==> number},
terminals_: {associative list: number ==> name},
productions_: [...],
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$),
table: [...],
defaultActions: {...},
parseError: function(str, hash),
parse: function(input),
lexer: {
EOF: 1,
parseError: function(str, hash),
setInput: function(input),
input: function(),
unput: function(str),
more: function(),
less: function(n),
pastInput: function(),
upcomingInput: function(),
showPosition: function(),
test_match: function(regex_match_array, rule_index),
next: function(),
lex: function(),
begin: function(condition),
popState: function(),
_currentRules: function(),
topState: function(),
pushState: function(condition),
options: {
ranges: boolean (optional: true ==> token location info will include a .range[] member)
flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match)
backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code)
},
performAction: function(yy, yy_, $avoiding_name_collisions, YY_START),
rules: [...],
conditions: {associative list: name ==> set},
}
}
token location info (@$, _$, etc.): {
first_line: n,
last_line: n,
first_column: n,
last_column: n,
range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based)
}
the parseError function receives a 'hash' object with these members for lexer and parser errors: {
text: (matched text)
token: (the produced terminal token, if any)
line: (yylineno)
}
while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: {
loc: (yylloc)
expected: (string describing the set of expected tokens)
recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error)
}
*/
var ebnf = (function(){
var parser = {trace: function trace() { },
yy: {},
symbols_: {"error":2,"production":3,"handle":4,"EOF":5,"handle_list":6,"|":7,"expression_suffix":8,"expression":9,"suffix":10,"ALIAS":11,"symbol":12,"(":13,")":14,"*":15,"?":16,"+":17,"$accept":0,"$end":1},
terminals_: {2:"error",5:"EOF",7:"|",11:"ALIAS",12:"symbol",13:"(",14:")",15:"*",16:"?",17:"+"},
productions_: [0,[3,2],[6,1],[6,3],[4,0],[4,2],[8,3],[8,2],[9,1],[9,3],[10,0],[10,1],[10,1],[10,1]],
performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) {
/* this == yyval */
var $0 = $$.length - 1;
switch (yystate) {
case 1: return $$[$0-1];
break;
case 2: this.$ = [$$[$0]];
break;
case 3: $$[$0-2].push($$[$0]);
break;
case 4: this.$ = [];
break;
case 5: $$[$0-1].push($$[$0]);
break;
case 6: this.$ = ['xalias', $$[$0-1], $$[$0-2], $$[$0]];
break;
case 7: if ($$[$0]) this.$ = [$$[$0], $$[$0-1]]; else this.$ = $$[$0-1];
break;
case 8: this.$ = ['symbol', $$[$0]];
break;
case 9: this.$ = ['()', $$[$0-1]];
break;
}
},
table: [{3:1,4:2,5:[2,4],12:[2,4],13:[2,4]},{1:[3]},{5:[1,3],8:4,9:5,12:[1,6],13:[1,7]},{1:[2,1]},{5:[2,5],7:[2,5],12:[2,5],13:[2,5],14:[2,5]},{5:[2,10],7:[2,10],10:8,11:[2,10],12:[2,10],13:[2,10],14:[2,10],15:[1,9],16:[1,10],17:[1,11]},{5:[2,8],7:[2,8],11:[2,8],12:[2,8],13:[2,8],14:[2,8],15:[2,8],16:[2,8],17:[2,8]},{4:13,6:12,7:[2,4],12:[2,4],13:[2,4],14:[2,4]},{5:[2,7],7:[2,7],11:[1,14],12:[2,7],13:[2,7],14:[2,7]},{5:[2,11],7:[2,11],11:[2,11],12:[2,11],13:[2,11],14:[2,11]},{5:[2,12],7:[2,12],11:[2,12],12:[2,12],13:[2,12],14:[2,12]},{5:[2,13],7:[2,13],11:[2,13],12:[2,13],13:[2,13],14:[2,13]},{7:[1,16],14:[1,15]},{7:[2,2],8:4,9:5,12:[1,6],13:[1,7],14:[2,2]},{5:[2,6],7:[2,6],12:[2,6],13:[2,6],14:[2,6]},{5:[2,9],7:[2,9],11:[2,9],12:[2,9],13:[2,9],14:[2,9],15:[2,9],16:[2,9],17:[2,9]},{4:17,7:[2,4],12:[2,4],13:[2,4],14:[2,4]},{7:[2,3],8:4,9:5,12:[1,6],13:[1,7],14:[2,3]}],
defaultActions: {3:[2,1]},
parseError: function parseError(str, hash) {
if (hash.recoverable) {
this.trace(str);
} else {
throw new Error(str);
}
},
parse: function parse(input) {
var self = this, stack = [0], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1;
var args = lstack.slice.call(arguments, 1);
this.lexer.setInput(input);
this.lexer.yy = this.yy;
this.yy.lexer = this.lexer;
this.yy.parser = this;
if (typeof this.lexer.yylloc == 'undefined') {
this.lexer.yylloc = {};
}
var yyloc = this.lexer.yylloc;
lstack.push(yyloc);
var ranges = this.lexer.options && this.lexer.options.ranges;
if (typeof this.yy.parseError === 'function') {
this.parseError = this.yy.parseError;
} else {
this.parseError = Object.getPrototypeOf(this).parseError;
}
function popStack(n) {
stack.length = stack.length - 2 * n;
vstack.length = vstack.length - n;
lstack.length = lstack.length - n;
}
function lex() {
var token;
token = self.lexer.lex() || EOF;
if (typeof token !== 'number') {
token = self.symbols_[token] || token;
}
return token;
}
var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected;
while (true) {
state = stack[stack.length - 1];
if (this.defaultActions[state]) {
action = this.defaultActions[state];
} else {
if (symbol === null || typeof symbol == 'undefined') {
symbol = lex();
}
action = table[state] && table[state][symbol];
}
if (typeof action === 'undefined' || !action.length || !action[0]) {
var errStr = '';
expected = [];
for (p in table[state]) {
if (this.terminals_[p] && p > TERROR) {
expected.push('\'' + this.terminals_[p] + '\'');
}
}
if (this.lexer.showPosition) {
errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + this.lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\'';
} else {
errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\'');
}
this.parseError(errStr, {
text: this.lexer.match,
token: this.terminals_[symbol] || symbol,
line: this.lexer.yylineno,
loc: yyloc,
expected: expected
});
}
if (action[0] instanceof Array && action.length > 1) {
throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol);
}
switch (action[0]) {
case 1:
stack.push(symbol);
vstack.push(this.lexer.yytext);
lstack.push(this.lexer.yylloc);
stack.push(action[1]);
symbol = null;
if (!preErrorSymbol) {
yyleng = this.lexer.yyleng;
yytext = this.lexer.yytext;
yylineno = this.lexer.yylineno;
yyloc = this.lexer.yylloc;
if (recovering > 0) {
recovering--;
}
} else {
symbol = preErrorSymbol;
preErrorSymbol = null;
}
break;
case 2:
len = this.productions_[action[1]][1];
yyval.$ = vstack[vstack.length - len];
yyval._$ = {
first_line: lstack[lstack.length - (len || 1)].first_line,
last_line: lstack[lstack.length - 1].last_line,
first_column: lstack[lstack.length - (len || 1)].first_column,
last_column: lstack[lstack.length - 1].last_column
};
if (ranges) {
yyval._$.range = [
lstack[lstack.length - (len || 1)].range[0],
lstack[lstack.length - 1].range[1]
];
}
r = this.performAction.apply(yyval, [
yytext,
yyleng,
yylineno,
this.yy,
action[1],
vstack,
lstack
].concat(args));
if (typeof r !== 'undefined') {
return r;
}
if (len) {
stack = stack.slice(0, -1 * len * 2);
vstack = vstack.slice(0, -1 * len);
lstack = lstack.slice(0, -1 * len);
}
stack.push(this.productions_[action[1]][0]);
vstack.push(yyval.$);
lstack.push(yyval._$);
newState = table[stack[stack.length - 2]][stack[stack.length - 1]];
stack.push(newState);
break;
case 3:
return true;
}
}
return true;
}};
/* generated by jison-lex 0.2.1 */
var lexer = (function(){
var lexer = {
EOF:1,
parseError:function parseError(str, hash) {
if (this.yy.parser) {
this.yy.parser.parseError(str, hash);
} else {
throw new Error(str);
}
},
// resets the lexer, sets new input
setInput:function (input) {
this._input = input;
this._more = this._backtrack = this.done = false;
this.yylineno = this.yyleng = 0;
this.yytext = this.matched = this.match = '';
this.conditionStack = ['INITIAL'];
this.yylloc = {
first_line: 1,
first_column: 0,
last_line: 1,
last_column: 0
};
if (this.options.ranges) {
this.yylloc.range = [0,0];
}
this.offset = 0;
return this;
},
// consumes and returns one char from the input
input:function () {
var ch = this._input[0];
this.yytext += ch;
this.yyleng++;
this.offset++;
this.match += ch;
this.matched += ch;
var lines = ch.match(/(?:\r\n?|\n).*/g);
if (lines) {
this.yylineno++;
this.yylloc.last_line++;
} else {
this.yylloc.last_column++;
}
if (this.options.ranges) {
this.yylloc.range[1]++;
}
this._input = this._input.slice(1);
return ch;
},
// unshifts one char (or a string) into the input
unput:function (ch) {
var len = ch.length;
var lines = ch.split(/(?:\r\n?|\n)/g);
this._input = ch + this._input;
this.yytext = this.yytext.substr(0, this.yytext.length - len - 1);
//this.yyleng -= len;
this.offset -= len;
var oldLines = this.match.split(/(?:\r\n?|\n)/g);
this.match = this.match.substr(0, this.match.length - 1);
this.matched = this.matched.substr(0, this.matched.length - 1);
if (lines.length - 1) {
this.yylineno -= lines.length - 1;
}
var r = this.yylloc.range;
this.yylloc = {
first_line: this.yylloc.first_line,
last_line: this.yylineno + 1,
first_column: this.yylloc.first_column,
last_column: lines ?
(lines.length === oldLines.length ? this.yylloc.first_column : 0)
+ oldLines[oldLines.length - lines.length].length - lines[0].length :
this.yylloc.first_column - len
};
if (this.options.ranges) {
this.yylloc.range = [r[0], r[0] + this.yyleng - len];
}
this.yyleng = this.yytext.length;
return this;
},
// When called from action, caches matched text and appends it on next action
more:function () {
this._more = true;
return this;
},
// When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
reject:function () {
if (this.options.backtrack_lexer) {
this._backtrack = true;
} else {
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), {
text: "",
token: null,
line: this.yylineno
});
}
return this;
},
// retain first n characters of the match
less:function (n) {
this.unput(this.match.slice(n));
},
// displays already matched input, i.e. for error messages
pastInput:function () {
var past = this.matched.substr(0, this.matched.length - this.match.length);
return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, "");
},
// displays upcoming input, i.e. for error messages
upcomingInput:function () {
var next = this.match;
if (next.length < 20) {
next += this._input.substr(0, 20-next.length);
}
return (next.substr(0,20) + (next.length > 20 ? '...' : '')).replace(/\n/g, "");
},
// displays the character position where the lexing error occurred, i.e. for error messages
showPosition:function () {
var pre = this.pastInput();
var c = new Array(pre.length + 1).join("-");
return pre + this.upcomingInput() + "\n" + c + "^";
},
// test the lexed token: return FALSE when not a match, otherwise return token
test_match:function (match, indexed_rule) {
var token,
lines,
backup;
if (this.options.backtrack_lexer) {
// save context
backup = {
yylineno: this.yylineno,
yylloc: {
first_line: this.yylloc.first_line,
last_line: this.last_line,
first_column: this.yylloc.first_column,
last_column: this.yylloc.last_column
},
yytext: this.yytext,
match: this.match,
matches: this.matches,
matched: this.matched,
yyleng: this.yyleng,
offset: this.offset,
_more: this._more,
_input: this._input,
yy: this.yy,
conditionStack: this.conditionStack.slice(0),
done: this.done
};
if (this.options.ranges) {
backup.yylloc.range = this.yylloc.range.slice(0);
}
}
lines = match[0].match(/(?:\r\n?|\n).*/g);
if (lines) {
this.yylineno += lines.length;
}
this.yylloc = {
first_line: this.yylloc.last_line,
last_line: this.yylineno + 1,
first_column: this.yylloc.last_column,
last_column: lines ?
lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length :
this.yylloc.last_column + match[0].length
};
this.yytext += match[0];
this.match += match[0];
this.matches = match;
this.yyleng = this.yytext.length;
if (this.options.ranges) {
this.yylloc.range = [this.offset, this.offset += this.yyleng];
}
this._more = false;
this._backtrack = false;
this._input = this._input.slice(match[0].length);
this.matched += match[0];
token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1]);
if (this.done && this._input) {
this.done = false;
}
if (token) {
return token;
} else if (this._backtrack) {
// recover context
for (var k in backup) {
this[k] = backup[k];
}
return false; // rule action called reject() implying the next rule should be tested instead.
}
return false;
},
// return next match in input
next:function () {
if (this.done) {
return this.EOF;
}
if (!this._input) {
this.done = true;
}
var token,
match,
tempMatch,
index;
if (!this._more) {
this.yytext = '';
this.match = '';
}
var rules = this._currentRules();
for (var i = 0; i < rules.length; i++) {
tempMatch = this._input.match(this.rules[rules[i]]);
if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
match = tempMatch;
index = i;
if (this.options.backtrack_lexer) {
token = this.test_match(tempMatch, rules[i]);
if (token !== false) {
return token;
} else if (this._backtrack) {
match = false;
continue; // rule action called reject() implying a rule MISmatch.
} else {
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
return false;
}
} else if (!this.options.flex) {
break;
}
}
}
if (match) {
token = this.test_match(match, rules[index]);
if (token !== false) {
return token;
}
// else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
return false;
}
if (this._input === "") {
return this.EOF;
} else {
return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), {
text: "",
token: null,
line: this.yylineno
});
}
},
// return next match that has a token
lex:function lex() {
var r = this.next();
if (r) {
return r;
} else {
return this.lex();
}
},
// activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
begin:function begin(condition) {
this.conditionStack.push(condition);
},
// pop the previously active lexer condition state off the condition stack
popState:function popState() {
var n = this.conditionStack.length - 1;
if (n > 0) {
return this.conditionStack.pop();
} else {
return this.conditionStack[0];
}
},
// produce the lexer rule set which is active for the currently active lexer condition state
_currentRules:function _currentRules() {
if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules;
} else {
return this.conditions["INITIAL"].rules;
}
},
// return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
topState:function topState(n) {
n = this.conditionStack.length - 1 - Math.abs(n || 0);
if (n >= 0) {
return this.conditionStack[n];
} else {
return "INITIAL";
}
},
// alias for begin(condition)
pushState:function pushState(condition) {
this.begin(condition);
},
// return the number of states currently on the stack
stateStackSize:function stateStackSize() {
return this.conditionStack.length;
},
options: {},
performAction: function anonymous(yy,yy_,$avoiding_name_collisions,YY_START) {
var YYSTATE=YY_START;
switch($avoiding_name_collisions) {
case 0:/* skip whitespace */
break;
case 1:return 12;
break;
case 2:yy_.yytext = yy_.yytext.substr(1, yy_.yyleng-2); return 11;
break;
case 3:return 12;
break;
case 4:return 12;
break;
case 5:return 'bar';
break;
case 6:return 13;
break;
case 7:return 14;
break;
case 8:return 15;
break;
case 9:return 16;
break;
case 10:return 7;
break;
case 11:return 17;
break;
case 12:return 5;
break;
}
},
rules: [/^(?:\s+)/,/^(?:([a-zA-Z][a-zA-Z0-9_-]*))/,/^(?:\[([a-zA-Z][a-zA-Z0-9_-]*)\])/,/^(?:'[^']*')/,/^(?:\.)/,/^(?:bar\b)/,/^(?:\()/,/^(?:\))/,/^(?:\*)/,/^(?:\?)/,/^(?:\|)/,/^(?:\+)/,/^(?:$)/],
conditions: {"INITIAL":{"rules":[0,1,2,3,4,5,6,7,8,9,10,11,12],"inclusive":true}}
};
return lexer;
})();
parser.lexer = lexer;
function Parser () {
this.yy = {};
}
Parser.prototype = parser;parser.Parser = Parser;
return new Parser;
})();
if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
exports.parser = ebnf;
exports.Parser = ebnf.Parser;
exports.parse = function () { return ebnf.parse.apply(ebnf, arguments); };
exports.main = function commonjsMain(args) {
if (!args[1]) {
console.log('Usage: '+args[0]+' FILE');
process.exit(1);
}
var source = require('fs').readFileSync(require('path').normalize(args[1]), "utf8");
return exports.parser.parse(source);
};
if (typeof module !== 'undefined' && require.main === module) {
exports.main(process.argv.slice(1));
}
}

View File

@ -107,7 +107,7 @@
"marked": "^0.3.2",
"mock-browser": "^0.91.34",
"path": "^0.4.9",
"phantomjs": "^1.9.18",
"phantomjs": "^2.1.3",
"proxyquire": "^1.7.3",
"proxyquire-universal": "^1.0.8",
"proxyquireify": "^3.0.0",

View File

@ -152,7 +152,7 @@ describe('when cloning CSS ', function () {
var svg = generateSVG();
addStyleToDocument();
utils.cloneCssStyles(svg, {});
expect(stylesToArray(svg)).toEqual(['.node { stroke: #eeeeee; }', '.node-square { stroke: #bbbbbb; }']);
expect(stylesToArray(svg)).toEqual(['.node { stroke: #eeeeee;}', '.node-square { stroke: #bbbbbb;}']);
});
it('should handle multiple stylesheets in document with classes in SVG', function () {
@ -160,7 +160,7 @@ describe('when cloning CSS ', function () {
addStyleToDocument();
addSecondStyleToDocument();
utils.cloneCssStyles(svg, {});
expect(stylesToArray(svg)).toEqual(['.node { stroke: #eeeeee; }', '.node-square { stroke: #bbbbbb; }', '.node-square { stroke: #bbeebb; }']);
expect(stylesToArray(svg)).toEqual(['.node { stroke: #eeeeee;}', '.node-square { stroke: #bbbbbb;}', '.node-square { stroke: #bbeebb;}']);
});
it('should handle multiple stylesheets + ignore styles in other mermaid SVG', function () {
@ -169,14 +169,14 @@ describe('when cloning CSS ', function () {
addSecondStyleToDocument();
addMermaidSVGwithStyleToDocument();
utils.cloneCssStyles(svg, {});
expect(stylesToArray(svg)).toEqual(['.node { stroke: #eeeeee; }', '.node-square { stroke: #bbbbbb; }', '.node-square { stroke: #bbeebb; }']);
expect(stylesToArray(svg)).toEqual(['.node { stroke: #eeeeee;}', '.node-square { stroke: #bbbbbb;}', '.node-square { stroke: #bbeebb;}']);
});
it('should handle a default class together with stylesheet in document with classes in SVG', function () {
var svg = generateSVG();
addStyleToDocument();
utils.cloneCssStyles(svg, {'default': {'styles': ['stroke:#ffffff', 'stroke-width:1.5px']}});
expect(stylesToArray(svg)).toEqual(['#mermaid-01 .node>rect { stroke:#ffffff; stroke-width:1.5px; }', '.node { stroke: #eeeeee; }', '.node-square { stroke: #bbbbbb; }']);
expect(stylesToArray(svg)).toEqual(['#mermaid-01 .node>rect { stroke:#ffffff; stroke-width:1.5px; }', '.node { stroke: #eeeeee;}', '.node-square { stroke: #bbbbbb;}']);
});
it('should handle a default class together with stylesheet in document and classDefs', function () {
@ -188,8 +188,8 @@ describe('when cloning CSS ', function () {
'node-circle': {'styles': ['fill:#444444', 'stroke:#111111']}
});
expect(stylesToArray(svg)).toEqual(['#mermaid-01 .node>rect { stroke:#ffffff; stroke-width:1.5px; }',
'.node { stroke: #eeeeee; }',
'.node-square { stroke: #bbbbbb; }',
'.node { stroke: #eeeeee;}',
'.node-square { stroke: #bbbbbb;}',
'#mermaid-01 .node-square>rect, .node-square>polygon, .node-square>ellipse { fill:#eeeeee; stroke:#aaaaaa; }',
'#mermaid-01 .node-circle>rect, .node-circle>polygon, .node-circle>ellipse { fill:#444444; stroke:#111111; }'
]);

View File

@ -6,7 +6,7 @@
<script src="../../dist/mermaid.js"></script>
<script>
var config = {
startOnLoad:true,
startOnLoad:false,
callback:function(id){
console.log(id,' rendered');
},