sass.js 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327
  1. // CodeMirror, copyright (c) by Marijn Haverbeke and others
  2. // Distributed under an MIT license: http://codemirror.net/LICENSE
  3. (function(mod) {
  4. if (typeof exports == "object" && typeof module == "object") // CommonJS
  5. mod(require("../../lib/codemirror"));
  6. else if (typeof define == "function" && define.amd) // AMD
  7. define(["../../lib/codemirror"], mod);
  8. else // Plain browser env
  9. mod(CodeMirror);
  10. })(function(CodeMirror) {
  11. "use strict";
  12. CodeMirror.defineMode("sass", function(config) {
  13. function tokenRegexp(words) {
  14. return new RegExp("^" + words.join("|"));
  15. }
  16. var keywords = ["true", "false", "null", "auto"];
  17. var keywordsRegexp = new RegExp("^" + keywords.join("|"));
  18. var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-", "\\!=", "/", "\\*", "%", "and", "or", "not"];
  19. var opRegexp = tokenRegexp(operators);
  20. var pseudoElementsRegexp = /^::?[\w\-]+/;
  21. function urlTokens(stream, state) {
  22. var ch = stream.peek();
  23. if (ch === ")") {
  24. stream.next();
  25. state.tokenizer = tokenBase;
  26. return "operator";
  27. } else if (ch === "(") {
  28. stream.next();
  29. stream.eatSpace();
  30. return "operator";
  31. } else if (ch === "'" || ch === '"') {
  32. state.tokenizer = buildStringTokenizer(stream.next());
  33. return "string";
  34. } else {
  35. state.tokenizer = buildStringTokenizer(")", false);
  36. return "string";
  37. }
  38. }
  39. function comment(indentation, multiLine) {
  40. return function(stream, state) {
  41. if (stream.sol() && stream.indentation() <= indentation) {
  42. state.tokenizer = tokenBase;
  43. return tokenBase(stream, state);
  44. }
  45. if (multiLine && stream.skipTo("*/")) {
  46. stream.next();
  47. stream.next();
  48. state.tokenizer = tokenBase;
  49. } else {
  50. stream.next();
  51. }
  52. return "comment";
  53. };
  54. }
  55. function buildStringTokenizer(quote, greedy) {
  56. if(greedy == null) { greedy = true; }
  57. function stringTokenizer(stream, state) {
  58. var nextChar = stream.next();
  59. var peekChar = stream.peek();
  60. var previousChar = stream.string.charAt(stream.pos-2);
  61. var endingString = ((nextChar !== "\\" && peekChar === quote) || (nextChar === quote && previousChar !== "\\"));
  62. if (endingString) {
  63. if (nextChar !== quote && greedy) { stream.next(); }
  64. state.tokenizer = tokenBase;
  65. return "string";
  66. } else if (nextChar === "#" && peekChar === "{") {
  67. state.tokenizer = buildInterpolationTokenizer(stringTokenizer);
  68. stream.next();
  69. return "operator";
  70. } else {
  71. return "string";
  72. }
  73. }
  74. return stringTokenizer;
  75. }
  76. function buildInterpolationTokenizer(currentTokenizer) {
  77. return function(stream, state) {
  78. if (stream.peek() === "}") {
  79. stream.next();
  80. state.tokenizer = currentTokenizer;
  81. return "operator";
  82. } else {
  83. return tokenBase(stream, state);
  84. }
  85. };
  86. }
  87. function indent(state) {
  88. if (state.indentCount == 0) {
  89. state.indentCount++;
  90. var lastScopeOffset = state.scopes[0].offset;
  91. var currentOffset = lastScopeOffset + config.indentUnit;
  92. state.scopes.unshift({ offset:currentOffset });
  93. }
  94. }
  95. function dedent(state) {
  96. if (state.scopes.length == 1) return;
  97. state.scopes.shift();
  98. }
  99. function tokenBase(stream, state) {
  100. var ch = stream.peek();
  101. // Comment
  102. if (stream.match("/*")) {
  103. state.tokenizer = comment(stream.indentation(), true);
  104. return state.tokenizer(stream, state);
  105. }
  106. if (stream.match("//")) {
  107. state.tokenizer = comment(stream.indentation(), false);
  108. return state.tokenizer(stream, state);
  109. }
  110. // Interpolation
  111. if (stream.match("#{")) {
  112. state.tokenizer = buildInterpolationTokenizer(tokenBase);
  113. return "operator";
  114. }
  115. if (ch === ".") {
  116. stream.next();
  117. // Match class selectors
  118. if (stream.match(/^[\w-]+/)) {
  119. indent(state);
  120. return "atom";
  121. } else if (stream.peek() === "#") {
  122. indent(state);
  123. return "atom";
  124. } else {
  125. return "operator";
  126. }
  127. }
  128. if (ch === "#") {
  129. stream.next();
  130. // Hex numbers
  131. if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/))
  132. return "number";
  133. // ID selectors
  134. if (stream.match(/^[\w-]+/)) {
  135. indent(state);
  136. return "atom";
  137. }
  138. if (stream.peek() === "#") {
  139. indent(state);
  140. return "atom";
  141. }
  142. }
  143. // Numbers
  144. if (stream.match(/^-?[0-9\.]+/))
  145. return "number";
  146. // Units
  147. if (stream.match(/^(px|em|in)\b/))
  148. return "unit";
  149. if (stream.match(keywordsRegexp))
  150. return "keyword";
  151. if (stream.match(/^url/) && stream.peek() === "(") {
  152. state.tokenizer = urlTokens;
  153. return "atom";
  154. }
  155. // Variables
  156. if (ch === "$") {
  157. stream.next();
  158. stream.eatWhile(/[\w-]/);
  159. if (stream.peek() === ":") {
  160. stream.next();
  161. return "variable-2";
  162. } else {
  163. return "variable-3";
  164. }
  165. }
  166. if (ch === "!") {
  167. stream.next();
  168. return stream.match(/^[\w]+/) ? "keyword": "operator";
  169. }
  170. if (ch === "=") {
  171. stream.next();
  172. // Match shortcut mixin definition
  173. if (stream.match(/^[\w-]+/)) {
  174. indent(state);
  175. return "meta";
  176. } else {
  177. return "operator";
  178. }
  179. }
  180. if (ch === "+") {
  181. stream.next();
  182. // Match shortcut mixin definition
  183. if (stream.match(/^[\w-]+/))
  184. return "variable-3";
  185. else
  186. return "operator";
  187. }
  188. // Indent Directives
  189. if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)) {
  190. indent(state);
  191. return "meta";
  192. }
  193. // Other Directives
  194. if (ch === "@") {
  195. stream.next();
  196. stream.eatWhile(/[\w-]/);
  197. return "meta";
  198. }
  199. // Strings
  200. if (ch === '"' || ch === "'") {
  201. stream.next();
  202. state.tokenizer = buildStringTokenizer(ch);
  203. return "string";
  204. }
  205. // Pseudo element selectors
  206. if (ch == ":" && stream.match(pseudoElementsRegexp))
  207. return "keyword";
  208. // atoms
  209. if (stream.eatWhile(/[\w-&]/)) {
  210. // matches a property definition
  211. if (stream.peek() === ":" && !stream.match(pseudoElementsRegexp, false))
  212. return "property";
  213. else
  214. return "atom";
  215. }
  216. if (stream.match(opRegexp))
  217. return "operator";
  218. // If we haven't returned by now, we move 1 character
  219. // and return an error
  220. stream.next();
  221. return null;
  222. }
  223. function tokenLexer(stream, state) {
  224. if (stream.sol()) state.indentCount = 0;
  225. var style = state.tokenizer(stream, state);
  226. var current = stream.current();
  227. if (current === "@return")
  228. dedent(state);
  229. if (style === "atom")
  230. indent(state);
  231. if (style !== null) {
  232. var startOfToken = stream.pos - current.length;
  233. var withCurrentIndent = startOfToken + (config.indentUnit * state.indentCount);
  234. var newScopes = [];
  235. for (var i = 0; i < state.scopes.length; i++) {
  236. var scope = state.scopes[i];
  237. if (scope.offset <= withCurrentIndent)
  238. newScopes.push(scope);
  239. }
  240. state.scopes = newScopes;
  241. }
  242. return style;
  243. }
  244. return {
  245. startState: function() {
  246. return {
  247. tokenizer: tokenBase,
  248. scopes: [{offset: 0, type: "sass"}],
  249. indentCount: 0,
  250. definedVars: [],
  251. definedMixins: []
  252. };
  253. },
  254. token: function(stream, state) {
  255. var style = tokenLexer(stream, state);
  256. state.lastToken = { style: style, content: stream.current() };
  257. return style;
  258. },
  259. indent: function(state) {
  260. return state.scopes[0].offset;
  261. }
  262. };
  263. });
  264. CodeMirror.defineMIME("text/x-sass", "sass");
  265. });