Spaces:
Running
Running
CodeMirror.defineMode("sass", function(config) { | |
var tokenRegexp = function(words){ | |
return new RegExp("^" + words.join("|")); | |
}; | |
var tags = ["&", "a","abbr","acronym","address","applet","area","article","aside","audio","b","base","basefont","bdi","bdo","big","blockquote","body","br","button","canvas","caption","cite","code","col","colgroup","command","datalist","dd","del","details","dfn","dir","div","dl","dt","em","embed","fieldset","figcaption","figure","font","footer","form","frame","frameset","h1","h2","h3","h4","h5","h6","head","header","hgroup","hr","html","i","iframe","img","input","ins","keygen","kbd","label","legend","li","link","map","mark","menu","meta","meter","nav","noframes","noscript","object","ol","optgroup","option","output","p","param","pre","progress","q","rp","rt","ruby","s","samp","script","section","select","small","source","span","strike","strong","style","sub","summary","sup","table","tbody","td","textarea","tfoot","th","thead","time","title","tr","track","tt","u","ul","var","video","wbr"]; | |
var keywords = ["true", "false", "null", "auto"]; | |
var keywordsRegexp = new RegExp("^" + keywords.join("|")); | |
var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-", "\\!=", "/", "\\*", "%", "and", "or", "not"]; | |
var opRegexp = tokenRegexp(operators); | |
function htmlTag(val){ | |
for(var i=0; i<tags.length; i++){ | |
if(val === tags[i]){ | |
return true; | |
} | |
} | |
} | |
var pseudoElements = [':first-line', ':hover', ':first-letter', ':active', ':visited', ':before', ':after', ':link', ':focus', ':first-child', ':lang']; | |
var pseudoElementsRegexp = new RegExp("^(" + pseudoElements.join("\\b|") + ")"); | |
var urlTokens = function(stream, state){ | |
var ch = stream.peek(); | |
if (ch === ")"){ | |
stream.next(); | |
state.tokenizer = tokenBase; | |
return "operator"; | |
}else if (ch === "("){ | |
stream.next(); | |
stream.eatSpace(); | |
return "operator"; | |
}else if (ch === "'" || ch === '"'){ | |
state.tokenizer = buildStringTokenizer(stream.next()); | |
return "string"; | |
}else{ | |
state.tokenizer = buildStringTokenizer(")", false); | |
return "string"; | |
} | |
}; | |
var multilineComment = function(stream, state) { | |
if (stream.skipTo("*/")){ | |
stream.next(); | |
stream.next(); | |
state.tokenizer = tokenBase; | |
}else { | |
stream.next(); | |
} | |
return "comment"; | |
}; | |
var buildStringTokenizer = function(quote, greedy){ | |
if(greedy == null){ greedy = true; } | |
function stringTokenizer(stream, state){ | |
var nextChar = stream.next(); | |
var peekChar = stream.peek(); | |
var previousChar = stream.string.charAt(stream.pos-2); | |
var endingString = ((nextChar !== "\\" && peekChar === quote) || (nextChar === quote && previousChar !== "\\")); | |
/* | |
console.log("previousChar: " + previousChar); | |
console.log("nextChar: " + nextChar); | |
console.log("peekChar: " + peekChar); | |
console.log("ending: " + endingString); | |
*/ | |
if (endingString){ | |
if (nextChar !== quote && greedy) { stream.next(); } | |
state.tokenizer = tokenBase; | |
return "string"; | |
}else if (nextChar === "#" && peekChar === "{"){ | |
state.tokenizer = buildInterpolationTokenizer(stringTokenizer); | |
stream.next(); | |
return "operator"; | |
}else { | |
return "string"; | |
} | |
} | |
return stringTokenizer; | |
}; | |
var buildInterpolationTokenizer = function(currentTokenizer){ | |
return function(stream, state){ | |
if (stream.peek() === "}"){ | |
stream.next(); | |
state.tokenizer = currentTokenizer; | |
return "operator"; | |
}else{ | |
return tokenBase(stream, state); | |
} | |
}; | |
}; | |
var indent = function(state){ | |
if (state.indentCount == 0){ | |
state.indentCount++; | |
var lastScopeOffset = state.scopes[0].offset; | |
var currentOffset = lastScopeOffset + config.indentUnit; | |
state.scopes.unshift({ offset:currentOffset }); | |
} | |
}; | |
var dedent = function(state){ | |
if (state.scopes.length == 1) { return; } | |
state.scopes.shift(); | |
}; | |
var tokenBase = function(stream, state) { | |
var ch = stream.peek(); | |
// Single line Comment | |
if (stream.match('//')) { | |
stream.skipToEnd(); | |
return "comment"; | |
} | |
// Multiline Comment | |
if (stream.match('/*')){ | |
state.tokenizer = multilineComment; | |
return state.tokenizer(stream, state); | |
} | |
// Interpolation | |
if (stream.match('#{')){ | |
state.tokenizer = buildInterpolationTokenizer(tokenBase); | |
return "operator"; | |
} | |
if (ch === "."){ | |
stream.next(); | |
// Match class selectors | |
if (stream.match(/^[\w-]+/)){ | |
indent(state); | |
return "atom"; | |
}else if (stream.peek() === "#"){ | |
indent(state); | |
return "atom"; | |
}else{ | |
return "operator"; | |
} | |
} | |
if (ch === "#"){ | |
stream.next(); | |
// Hex numbers | |
if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){ | |
return "number"; | |
} | |
// ID selectors | |
if (stream.match(/^[\w-]+/)){ | |
indent(state); | |
return "atom"; | |
} | |
if (stream.peek() === "#"){ | |
indent(state); | |
return "atom"; | |
} | |
} | |
// Numbers | |
if (stream.match(/^-?[0-9\.]+/)){ | |
return "number"; | |
} | |
// Units | |
if (stream.match(/^(px|em|in)\b/)){ | |
return "unit"; | |
} | |
if (stream.match(keywordsRegexp)){ | |
return "keyword"; | |
} | |
if (stream.match(/^url/) && stream.peek() === "("){ | |
state.tokenizer = urlTokens; | |
return "atom"; | |
} | |
// Variables | |
if (ch === "$"){ | |
stream.next(); | |
stream.eatWhile(/[\w-]/); | |
if (stream.peek() === ":"){ | |
stream.next(); | |
return "variable-2"; | |
}else{ | |
return "variable-3"; | |
} | |
} | |
if (ch === "!"){ | |
stream.next(); | |
if (stream.match(/^[\w]+/)){ | |
return "keyword"; | |
} | |
return "operator"; | |
} | |
if (ch === "="){ | |
stream.next(); | |
// Match shortcut mixin definition | |
if (stream.match(/^[\w-]+/)){ | |
indent(state); | |
return "meta"; | |
}else { | |
return "operator"; | |
} | |
} | |
if (ch === "+"){ | |
stream.next(); | |
// Match shortcut mixin definition | |
if (stream.match(/^[\w-]+/)){ | |
return "variable-3"; | |
}else { | |
return "operator"; | |
} | |
} | |
// Indent Directives | |
if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)){ | |
indent(state); | |
return "meta"; | |
} | |
// Other Directives | |
if (ch === "@"){ | |
stream.next(); | |
stream.eatWhile(/[\w-]/); | |
return "meta"; | |
} | |
// Strings | |
if (ch === '"' || ch === "'"){ | |
stream.next(); | |
state.tokenizer = buildStringTokenizer(ch); | |
return "string"; | |
} | |
// Pseudo element selectors | |
if (stream.match(pseudoElementsRegexp)){ | |
return "keyword"; | |
} | |
// atoms | |
if (stream.eatWhile(/[\w-&]/)){ | |
var current = stream.current(); | |
// matches a property definition | |
if (stream.peek() === ":"){ | |
// if this is an html tag and it has a pseudo selector, then it's an atom | |
if (htmlTag(current) && stream.match(pseudoElementsRegexp, false)){ | |
return "atom"; | |
}else{ | |
stream.next(); | |
return "property"; | |
} | |
} | |
return "atom"; | |
} | |
if (stream.match(opRegexp)){ | |
return "operator"; | |
} | |
// If we haven't returned by now, we move 1 character | |
// and return an error | |
stream.next(); | |
return 'error'; | |
}; | |
var tokenLexer = function(stream, state) { | |
if (stream.sol()){ | |
state.indentCount = 0; | |
} | |
var style = state.tokenizer(stream, state); | |
var current = stream.current(); | |
if (current === "@return"){ | |
dedent(state); | |
} | |
if (style === "atom" && htmlTag(current)){ | |
indent(state); | |
} | |
if (style !== "error"){ | |
var startOfToken = stream.pos - current.length; | |
var withCurrentIndent = startOfToken + (config.indentUnit * state.indentCount); | |
var newScopes = []; | |
for (var i = 0; i < state.scopes.length; i++){ | |
var scope = state.scopes[i]; | |
if (scope.offset <= withCurrentIndent){ | |
newScopes.push(scope); | |
} | |
} | |
state.scopes = newScopes; | |
} | |
return style; | |
}; | |
return { | |
startState: function() { | |
return { | |
tokenizer: tokenBase, | |
scopes: [{offset: 0, type: 'sass'}], | |
definedVars: [], | |
definedMixins: [] | |
}; | |
}, | |
token: function(stream, state) { | |
var style = tokenLexer(stream, state); | |
state.lastToken = { style: style, content: stream.current() }; | |
return style; | |
}, | |
indent: function(state) { | |
return state.scopes[0].offset; | |
} | |
}; | |
}); | |
CodeMirror.defineMIME("text/x-sass", "sass"); | |