123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330 |
- CodeMirror.defineMode("sass", function(config) {
- var tokenRegexp = function(words){
- return new RegExp("^" + words.join("|"));
- };
- var keywords = ["true", "false", "null", "auto"];
- var keywordsRegexp = new RegExp("^" + keywords.join("|"));
- var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-", "\\!=", "/", "\\*", "%", "and", "or", "not"];
- var opRegexp = tokenRegexp(operators);
- var pseudoElementsRegexp = /^::?[\w\-]+/;
- var urlTokens = function(stream, state){
- var ch = stream.peek();
- if (ch === ")"){
- stream.next();
- state.tokenizer = tokenBase;
- return "operator";
- }else if (ch === "("){
- stream.next();
- stream.eatSpace();
- return "operator";
- }else if (ch === "'" || ch === '"'){
- state.tokenizer = buildStringTokenizer(stream.next());
- return "string";
- }else{
- state.tokenizer = buildStringTokenizer(")", false);
- return "string";
- }
- };
- var multilineComment = function(stream, state) {
- if (stream.skipTo("*/")){
- stream.next();
- stream.next();
- state.tokenizer = tokenBase;
- }else {
- stream.next();
- }
- return "comment";
- };
- var buildStringTokenizer = function(quote, greedy){
- if(greedy == null){ greedy = true; }
- function stringTokenizer(stream, state){
- var nextChar = stream.next();
- var peekChar = stream.peek();
- var previousChar = stream.string.charAt(stream.pos-2);
- var endingString = ((nextChar !== "\\" && peekChar === quote) || (nextChar === quote && previousChar !== "\\"));
- /*
- console.log("previousChar: " + previousChar);
- console.log("nextChar: " + nextChar);
- console.log("peekChar: " + peekChar);
- console.log("ending: " + endingString);
- */
- if (endingString){
- if (nextChar !== quote && greedy) { stream.next(); }
- state.tokenizer = tokenBase;
- return "string";
- }else if (nextChar === "#" && peekChar === "{"){
- state.tokenizer = buildInterpolationTokenizer(stringTokenizer);
- stream.next();
- return "operator";
- }else {
- return "string";
- }
- }
- return stringTokenizer;
- };
- var buildInterpolationTokenizer = function(currentTokenizer){
- return function(stream, state){
- if (stream.peek() === "}"){
- stream.next();
- state.tokenizer = currentTokenizer;
- return "operator";
- }else{
- return tokenBase(stream, state);
- }
- };
- };
- var indent = function(state){
- if (state.indentCount == 0){
- state.indentCount++;
- var lastScopeOffset = state.scopes[0].offset;
- var currentOffset = lastScopeOffset + config.indentUnit;
- state.scopes.unshift({ offset:currentOffset });
- }
- };
- var dedent = function(state){
- if (state.scopes.length == 1) { return; }
- state.scopes.shift();
- };
- var tokenBase = function(stream, state) {
- var ch = stream.peek();
- // Single line Comment
- if (stream.match('//')) {
- stream.skipToEnd();
- return "comment";
- }
- // Multiline Comment
- if (stream.match('/*')){
- state.tokenizer = multilineComment;
- return state.tokenizer(stream, state);
- }
- // Interpolation
- if (stream.match('#{')){
- state.tokenizer = buildInterpolationTokenizer(tokenBase);
- return "operator";
- }
- if (ch === "."){
- stream.next();
- // Match class selectors
- if (stream.match(/^[\w-]+/)){
- indent(state);
- return "atom";
- }else if (stream.peek() === "#"){
- indent(state);
- return "atom";
- }else{
- return "operator";
- }
- }
- if (ch === "#"){
- stream.next();
- // Hex numbers
- if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){
- return "number";
- }
- // ID selectors
- if (stream.match(/^[\w-]+/)){
- indent(state);
- return "atom";
- }
- if (stream.peek() === "#"){
- indent(state);
- return "atom";
- }
- }
- // Numbers
- if (stream.match(/^-?[0-9\.]+/)){
- return "number";
- }
- // Units
- if (stream.match(/^(px|em|in)\b/)){
- return "unit";
- }
- if (stream.match(keywordsRegexp)){
- return "keyword";
- }
- if (stream.match(/^url/) && stream.peek() === "("){
- state.tokenizer = urlTokens;
- return "atom";
- }
- // Variables
- if (ch === "$"){
- stream.next();
- stream.eatWhile(/[\w-]/);
- if (stream.peek() === ":"){
- stream.next();
- return "variable-2";
- }else{
- return "variable-3";
- }
- }
- if (ch === "!"){
- stream.next();
- if (stream.match(/^[\w]+/)){
- return "keyword";
- }
- return "operator";
- }
- if (ch === "="){
- stream.next();
- // Match shortcut mixin definition
- if (stream.match(/^[\w-]+/)){
- indent(state);
- return "meta";
- }else {
- return "operator";
- }
- }
- if (ch === "+"){
- stream.next();
- // Match shortcut mixin definition
- if (stream.match(/^[\w-]+/)){
- return "variable-3";
- }else {
- return "operator";
- }
- }
- // Indent Directives
- if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)){
- indent(state);
- return "meta";
- }
- // Other Directives
- if (ch === "@"){
- stream.next();
- stream.eatWhile(/[\w-]/);
- return "meta";
- }
- // Strings
- if (ch === '"' || ch === "'"){
- stream.next();
- state.tokenizer = buildStringTokenizer(ch);
- return "string";
- }
- // Pseudo element selectors
- if (ch == ':' && stream.match(pseudoElementsRegexp)){
- return "keyword";
- }
- // atoms
- if (stream.eatWhile(/[\w-&]/)){
- // matches a property definition
- if (stream.peek() === ":" && !stream.match(pseudoElementsRegexp, false))
- return "property";
- else
- return "atom";
- }
- if (stream.match(opRegexp)){
- return "operator";
- }
- // If we haven't returned by now, we move 1 character
- // and return an error
- stream.next();
- return null;
- };
- var tokenLexer = function(stream, state) {
- if (stream.sol()){
- state.indentCount = 0;
- }
- var style = state.tokenizer(stream, state);
- var current = stream.current();
- if (current === "@return"){
- dedent(state);
- }
- if (style === "atom"){
- indent(state);
- }
- if (style !== null){
- var startOfToken = stream.pos - current.length;
- var withCurrentIndent = startOfToken + (config.indentUnit * state.indentCount);
- var newScopes = [];
- for (var i = 0; i < state.scopes.length; i++){
- var scope = state.scopes[i];
- if (scope.offset <= withCurrentIndent){
- newScopes.push(scope);
- }
- }
- state.scopes = newScopes;
- }
- return style;
- };
- return {
- startState: function() {
- return {
- tokenizer: tokenBase,
- scopes: [{offset: 0, type: 'sass'}],
- definedVars: [],
- definedMixins: []
- };
- },
- token: function(stream, state) {
- var style = tokenLexer(stream, state);
- state.lastToken = { style: style, content: stream.current() };
- return style;
- },
- indent: function(state) {
- return state.scopes[0].offset;
- }
- };
- });
- CodeMirror.defineMIME("text/x-sass", "sass");
|