diff options
Diffstat (limited to 'Source/WebInspectorUI/UserInterface/External/CodeMirror/sass.js')
-rw-r--r-- | Source/WebInspectorUI/UserInterface/External/CodeMirror/sass.js | 428 |
1 files changed, 256 insertions, 172 deletions
diff --git a/Source/WebInspectorUI/UserInterface/External/CodeMirror/sass.js b/Source/WebInspectorUI/UserInterface/External/CodeMirror/sass.js index 9c9a0dae0..6973ece29 100644 --- a/Source/WebInspectorUI/UserInterface/External/CodeMirror/sass.js +++ b/Source/WebInspectorUI/UserInterface/External/CodeMirror/sass.js @@ -1,300 +1,379 @@ +// CodeMirror, copyright (c) by Marijn Haverbeke and others +// Distributed under an MIT license: http://codemirror.net/LICENSE + +(function(mod) { + if (typeof exports == "object" && typeof module == "object") // CommonJS + mod(require("../../lib/codemirror")); + else if (typeof define == "function" && define.amd) // AMD + define(["../../lib/codemirror"], mod); + else // Plain browser env + mod(CodeMirror); +})(function(CodeMirror) { +"use strict"; + CodeMirror.defineMode("sass", function(config) { - var tokenRegexp = function(words){ + function tokenRegexp(words) { return new RegExp("^" + words.join("|")); - }; + } var keywords = ["true", "false", "null", "auto"]; var keywordsRegexp = new RegExp("^" + keywords.join("|")); - var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-", "\\!=", "/", "\\*", "%", "and", "or", "not"]; + var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-", + "\\!=", "/", "\\*", "%", "and", "or", "not", ";","\\{","\\}",":"]; var opRegexp = tokenRegexp(operators); - var pseudoElementsRegexp = /^::?[\w\-]+/; + var pseudoElementsRegexp = /^::?[a-zA-Z_][\w\-]*/; - var urlTokens = function(stream, state){ + function urlTokens(stream, state) { var ch = stream.peek(); - if (ch === ")"){ + if (ch === ")") { stream.next(); state.tokenizer = tokenBase; return "operator"; - }else if (ch === "("){ + } else if (ch === "(") { stream.next(); stream.eatSpace(); return "operator"; - }else if (ch === "'" || ch === '"'){ + } else if (ch === "'" || ch === '"') { state.tokenizer = buildStringTokenizer(stream.next()); return "string"; - }else{ + } else { state.tokenizer = buildStringTokenizer(")", false); return "string"; } - }; - var multilineComment = function(stream, state) { - if (stream.skipTo("*/")){ - stream.next(); - stream.next(); - state.tokenizer = tokenBase; - }else { - stream.next(); - } + } + function comment(indentation, multiLine) { + return function(stream, state) { + if (stream.sol() && stream.indentation() <= indentation) { + state.tokenizer = tokenBase; + return tokenBase(stream, state); + } - return "comment"; - }; + if (multiLine && stream.skipTo("*/")) { + stream.next(); + stream.next(); + state.tokenizer = tokenBase; + } else { + stream.skipToEnd(); + } - var buildStringTokenizer = function(quote, greedy){ - if(greedy == null){ greedy = true; } + return "comment"; + }; + } - function stringTokenizer(stream, state){ + function buildStringTokenizer(quote, greedy) { + if (greedy == null) { greedy = true; } + + function stringTokenizer(stream, state) { var nextChar = stream.next(); var peekChar = stream.peek(); var previousChar = stream.string.charAt(stream.pos-2); var endingString = ((nextChar !== "\\" && peekChar === quote) || (nextChar === quote && previousChar !== "\\")); - /* - console.log("previousChar: " + previousChar); - console.log("nextChar: " + nextChar); - console.log("peekChar: " + peekChar); - console.log("ending: " + endingString); - */ - - if (endingString){ + if (endingString) { if (nextChar !== quote && greedy) { stream.next(); } state.tokenizer = tokenBase; return "string"; - }else if (nextChar === "#" && peekChar === "{"){ + } else if (nextChar === "#" && peekChar === "{") { state.tokenizer = buildInterpolationTokenizer(stringTokenizer); stream.next(); return "operator"; - }else { + } else { return "string"; } } return stringTokenizer; - }; + } - var buildInterpolationTokenizer = function(currentTokenizer){ - return function(stream, state){ - if (stream.peek() === "}"){ + function buildInterpolationTokenizer(currentTokenizer) { + return function(stream, state) { + if (stream.peek() === "}") { stream.next(); state.tokenizer = currentTokenizer; return "operator"; - }else{ + } else { return tokenBase(stream, state); } }; - }; + } - var indent = function(state){ - if (state.indentCount == 0){ + function indent(state) { + if (state.indentCount == 0) { state.indentCount++; var lastScopeOffset = state.scopes[0].offset; var currentOffset = lastScopeOffset + config.indentUnit; state.scopes.unshift({ offset:currentOffset }); } - }; + } - var dedent = function(state){ - if (state.scopes.length == 1) { return; } + function dedent(state) { + if (state.scopes.length == 1) return; state.scopes.shift(); - }; + } - var tokenBase = function(stream, state) { + function tokenBase(stream, state) { var ch = stream.peek(); - // Single line Comment - if (stream.match('//')) { - stream.skipToEnd(); - return "comment"; + // Comment + if (stream.match("/*")) { + state.tokenizer = comment(stream.indentation(), true); + return state.tokenizer(stream, state); } - - // Multiline Comment - if (stream.match('/*')){ - state.tokenizer = multilineComment; + if (stream.match("//")) { + state.tokenizer = comment(stream.indentation(), false); return state.tokenizer(stream, state); } // Interpolation - if (stream.match('#{')){ - state.tokenizer = buildInterpolationTokenizer(tokenBase); + if (stream.match("#{")) { + state.tokenizer = buildInterpolationTokenizer(tokenBase); return "operator"; } - if (ch === "."){ + // Strings + if (ch === '"' || ch === "'") { stream.next(); + state.tokenizer = buildStringTokenizer(ch); + return "string"; + } - // Match class selectors - if (stream.match(/^[\w-]+/)){ - indent(state); - return "atom"; - }else if (stream.peek() === "#"){ - indent(state); - return "atom"; - }else{ - return "operator"; + if(!state.cursorHalf){// state.cursorHalf === 0 + // first half i.e. before : for key-value pairs + // including selectors + + if (ch === ".") { + stream.next(); + if (stream.match(/^[\w-]+/)) { + indent(state); + return "atom"; + } else if (stream.peek() === "#") { + indent(state); + return "atom"; + } } - } - if (ch === "#"){ - stream.next(); + if (ch === "#") { + stream.next(); + // ID selectors + if (stream.match(/^[\w-]+/)) { + indent(state); + return "atom"; + } + if (stream.peek() === "#") { + indent(state); + return "atom"; + } + } - // Hex numbers - if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){ - return "number"; + // Variables + if (ch === "$") { + stream.next(); + stream.eatWhile(/[\w-]/); + return "variable-2"; } - // ID selectors - if (stream.match(/^[\w-]+/)){ - indent(state); + // Numbers + if (stream.match(/^-?[0-9\.]+/)) + return "number"; + + // Units + if (stream.match(/^(px|em|in)\b/)) + return "unit"; + + if (stream.match(keywordsRegexp)) + return "keyword"; + + if (stream.match(/^url/) && stream.peek() === "(") { + state.tokenizer = urlTokens; return "atom"; } - if (stream.peek() === "#"){ - indent(state); - return "atom"; + if (ch === "=") { + // Match shortcut mixin definition + if (stream.match(/^=[\w-]+/)) { + indent(state); + return "meta"; + } } - } - // Numbers - if (stream.match(/^-?[0-9\.]+/)){ - return "number"; - } + if (ch === "+") { + // Match shortcut mixin definition + if (stream.match(/^\+[\w-]+/)){ + return "variable-3"; + } + } - // Units - if (stream.match(/^(px|em|in)\b/)){ - return "unit"; - } + if(ch === "@"){ + if(stream.match(/@extend/)){ + if(!stream.match(/\s*[\w]/)) + dedent(state); + } + } - if (stream.match(keywordsRegexp)){ - return "keyword"; - } - if (stream.match(/^url/) && stream.peek() === "("){ - state.tokenizer = urlTokens; - return "atom"; - } + // Indent Directives + if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)) { + indent(state); + return "meta"; + } - // Variables - if (ch === "$"){ - stream.next(); - stream.eatWhile(/[\w-]/); + // Other Directives + if (ch === "@") { + stream.next(); + stream.eatWhile(/[\w-]/); + return "meta"; + } + + if (stream.eatWhile(/[\w-]/)){ + if(stream.match(/ *: *[\w-\+\$#!\("']/,false)){ + return "property"; + } + else if(stream.match(/ *:/,false)){ + indent(state); + state.cursorHalf = 1; + return "atom"; + } + else if(stream.match(/ *,/,false)){ + return "atom"; + } + else{ + indent(state); + return "atom"; + } + } - if (stream.peek() === ":"){ + if(ch === ":"){ + if (stream.match(pseudoElementsRegexp)){ // could be a pseudo-element + return "keyword"; + } stream.next(); - return "variable-2"; - }else{ - return "variable-3"; + state.cursorHalf=1; + return "operator"; } - } - if (ch === "!"){ - stream.next(); + } // cursorHalf===0 ends here + else{ - if (stream.match(/^[\w]+/)){ - return "keyword"; + if (ch === "#") { + stream.next(); + // Hex numbers + if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){ + if(!stream.peek()){ + state.cursorHalf = 0; + } + return "number"; + } } - return "operator"; - } + // Numbers + if (stream.match(/^-?[0-9\.]+/)){ + if(!stream.peek()){ + state.cursorHalf = 0; + } + return "number"; + } - if (ch === "="){ - stream.next(); + // Units + if (stream.match(/^(px|em|in)\b/)){ + if(!stream.peek()){ + state.cursorHalf = 0; + } + return "unit"; + } - // Match shortcut mixin definition - if (stream.match(/^[\w-]+/)){ - indent(state); - return "meta"; - }else { - return "operator"; + if (stream.match(keywordsRegexp)){ + if(!stream.peek()){ + state.cursorHalf = 0; + } + return "keyword"; } - } - if (ch === "+"){ - stream.next(); + if (stream.match(/^url/) && stream.peek() === "(") { + state.tokenizer = urlTokens; + if(!stream.peek()){ + state.cursorHalf = 0; + } + return "atom"; + } - // Match shortcut mixin definition - if (stream.match(/^[\w-]+/)){ + // Variables + if (ch === "$") { + stream.next(); + stream.eatWhile(/[\w-]/); + if(!stream.peek()){ + state.cursorHalf = 0; + } return "variable-3"; - }else { - return "operator"; } - } - // Indent Directives - if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)){ - indent(state); - return "meta"; - } + // bang character for !important, !default, etc. + if (ch === "!") { + stream.next(); + if(!stream.peek()){ + state.cursorHalf = 0; + } + return stream.match(/^[\w]+/) ? "keyword": "operator"; + } - // Other Directives - if (ch === "@"){ - stream.next(); - stream.eatWhile(/[\w-]/); - return "meta"; - } + if (stream.match(opRegexp)){ + if(!stream.peek()){ + state.cursorHalf = 0; + } + return "operator"; + } - // Strings - if (ch === '"' || ch === "'"){ - stream.next(); - state.tokenizer = buildStringTokenizer(ch); - return "string"; - } + // attributes + if (stream.eatWhile(/[\w-]/)) { + if(!stream.peek()){ + state.cursorHalf = 0; + } + return "attribute"; + } - // Pseudo element selectors - if (ch == ':' && stream.match(pseudoElementsRegexp)){ - return "keyword"; - } + //stream.eatSpace(); + if(!stream.peek()){ + state.cursorHalf = 0; + return null; + } - // atoms - if (stream.eatWhile(/[\w-&]/)){ - // matches a property definition - if (stream.peek() === ":" && !stream.match(pseudoElementsRegexp, false)) - return "property"; - else - return "atom"; - } + } // else ends here - if (stream.match(opRegexp)){ + if (stream.match(opRegexp)) return "operator"; - } // If we haven't returned by now, we move 1 character // and return an error stream.next(); return null; - }; + } - var tokenLexer = function(stream, state) { - if (stream.sol()){ - state.indentCount = 0; - } + function tokenLexer(stream, state) { + if (stream.sol()) state.indentCount = 0; var style = state.tokenizer(stream, state); var current = stream.current(); - if (current === "@return"){ + if (current === "@return" || current === "}"){ dedent(state); } - if (style === "atom"){ - indent(state); - } - - if (style !== null){ + if (style !== null) { var startOfToken = stream.pos - current.length; + var withCurrentIndent = startOfToken + (config.indentUnit * state.indentCount); var newScopes = []; - for (var i = 0; i < state.scopes.length; i++){ + for (var i = 0; i < state.scopes.length; i++) { var scope = state.scopes[i]; - if (scope.offset <= withCurrentIndent){ + if (scope.offset <= withCurrentIndent) newScopes.push(scope); - } } state.scopes = newScopes; @@ -302,13 +381,16 @@ CodeMirror.defineMode("sass", function(config) { return style; - }; + } return { startState: function() { return { tokenizer: tokenBase, - scopes: [{offset: 0, type: 'sass'}], + scopes: [{offset: 0, type: "sass"}], + indentCount: 0, + cursorHalf: 0, // cursor half tells us if cursor lies after (1) + // or before (0) colon (well... more or less) definedVars: [], definedMixins: [] }; @@ -328,3 +410,5 @@ CodeMirror.defineMode("sass", function(config) { }); CodeMirror.defineMIME("text/x-sass", "sass"); + +}); |