2
0

sass.js 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330
  1. CodeMirror.defineMode("sass", function(config) {
  2. var tokenRegexp = function(words){
  3. return new RegExp("^" + words.join("|"));
  4. };
  5. var keywords = ["true", "false", "null", "auto"];
  6. var keywordsRegexp = new RegExp("^" + keywords.join("|"));
  7. var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-", "\\!=", "/", "\\*", "%", "and", "or", "not"];
  8. var opRegexp = tokenRegexp(operators);
  9. var pseudoElementsRegexp = /^::?[\w\-]+/;
  10. var urlTokens = function(stream, state){
  11. var ch = stream.peek();
  12. if (ch === ")"){
  13. stream.next();
  14. state.tokenizer = tokenBase;
  15. return "operator";
  16. }else if (ch === "("){
  17. stream.next();
  18. stream.eatSpace();
  19. return "operator";
  20. }else if (ch === "'" || ch === '"'){
  21. state.tokenizer = buildStringTokenizer(stream.next());
  22. return "string";
  23. }else{
  24. state.tokenizer = buildStringTokenizer(")", false);
  25. return "string";
  26. }
  27. };
  28. var multilineComment = function(stream, state) {
  29. if (stream.skipTo("*/")){
  30. stream.next();
  31. stream.next();
  32. state.tokenizer = tokenBase;
  33. }else {
  34. stream.next();
  35. }
  36. return "comment";
  37. };
  38. var buildStringTokenizer = function(quote, greedy){
  39. if(greedy == null){ greedy = true; }
  40. function stringTokenizer(stream, state){
  41. var nextChar = stream.next();
  42. var peekChar = stream.peek();
  43. var previousChar = stream.string.charAt(stream.pos-2);
  44. var endingString = ((nextChar !== "\\" && peekChar === quote) || (nextChar === quote && previousChar !== "\\"));
  45. /*
  46. console.log("previousChar: " + previousChar);
  47. console.log("nextChar: " + nextChar);
  48. console.log("peekChar: " + peekChar);
  49. console.log("ending: " + endingString);
  50. */
  51. if (endingString){
  52. if (nextChar !== quote && greedy) { stream.next(); }
  53. state.tokenizer = tokenBase;
  54. return "string";
  55. }else if (nextChar === "#" && peekChar === "{"){
  56. state.tokenizer = buildInterpolationTokenizer(stringTokenizer);
  57. stream.next();
  58. return "operator";
  59. }else {
  60. return "string";
  61. }
  62. }
  63. return stringTokenizer;
  64. };
  65. var buildInterpolationTokenizer = function(currentTokenizer){
  66. return function(stream, state){
  67. if (stream.peek() === "}"){
  68. stream.next();
  69. state.tokenizer = currentTokenizer;
  70. return "operator";
  71. }else{
  72. return tokenBase(stream, state);
  73. }
  74. };
  75. };
  76. var indent = function(state){
  77. if (state.indentCount == 0){
  78. state.indentCount++;
  79. var lastScopeOffset = state.scopes[0].offset;
  80. var currentOffset = lastScopeOffset + config.indentUnit;
  81. state.scopes.unshift({ offset:currentOffset });
  82. }
  83. };
  84. var dedent = function(state){
  85. if (state.scopes.length == 1) { return; }
  86. state.scopes.shift();
  87. };
  88. var tokenBase = function(stream, state) {
  89. var ch = stream.peek();
  90. // Single line Comment
  91. if (stream.match('//')) {
  92. stream.skipToEnd();
  93. return "comment";
  94. }
  95. // Multiline Comment
  96. if (stream.match('/*')){
  97. state.tokenizer = multilineComment;
  98. return state.tokenizer(stream, state);
  99. }
  100. // Interpolation
  101. if (stream.match('#{')){
  102. state.tokenizer = buildInterpolationTokenizer(tokenBase);
  103. return "operator";
  104. }
  105. if (ch === "."){
  106. stream.next();
  107. // Match class selectors
  108. if (stream.match(/^[\w-]+/)){
  109. indent(state);
  110. return "atom";
  111. }else if (stream.peek() === "#"){
  112. indent(state);
  113. return "atom";
  114. }else{
  115. return "operator";
  116. }
  117. }
  118. if (ch === "#"){
  119. stream.next();
  120. // Hex numbers
  121. if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){
  122. return "number";
  123. }
  124. // ID selectors
  125. if (stream.match(/^[\w-]+/)){
  126. indent(state);
  127. return "atom";
  128. }
  129. if (stream.peek() === "#"){
  130. indent(state);
  131. return "atom";
  132. }
  133. }
  134. // Numbers
  135. if (stream.match(/^-?[0-9\.]+/)){
  136. return "number";
  137. }
  138. // Units
  139. if (stream.match(/^(px|em|in)\b/)){
  140. return "unit";
  141. }
  142. if (stream.match(keywordsRegexp)){
  143. return "keyword";
  144. }
  145. if (stream.match(/^url/) && stream.peek() === "("){
  146. state.tokenizer = urlTokens;
  147. return "atom";
  148. }
  149. // Variables
  150. if (ch === "$"){
  151. stream.next();
  152. stream.eatWhile(/[\w-]/);
  153. if (stream.peek() === ":"){
  154. stream.next();
  155. return "variable-2";
  156. }else{
  157. return "variable-3";
  158. }
  159. }
  160. if (ch === "!"){
  161. stream.next();
  162. if (stream.match(/^[\w]+/)){
  163. return "keyword";
  164. }
  165. return "operator";
  166. }
  167. if (ch === "="){
  168. stream.next();
  169. // Match shortcut mixin definition
  170. if (stream.match(/^[\w-]+/)){
  171. indent(state);
  172. return "meta";
  173. }else {
  174. return "operator";
  175. }
  176. }
  177. if (ch === "+"){
  178. stream.next();
  179. // Match shortcut mixin definition
  180. if (stream.match(/^[\w-]+/)){
  181. return "variable-3";
  182. }else {
  183. return "operator";
  184. }
  185. }
  186. // Indent Directives
  187. if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)){
  188. indent(state);
  189. return "meta";
  190. }
  191. // Other Directives
  192. if (ch === "@"){
  193. stream.next();
  194. stream.eatWhile(/[\w-]/);
  195. return "meta";
  196. }
  197. // Strings
  198. if (ch === '"' || ch === "'"){
  199. stream.next();
  200. state.tokenizer = buildStringTokenizer(ch);
  201. return "string";
  202. }
  203. // Pseudo element selectors
  204. if (ch == ':' && stream.match(pseudoElementsRegexp)){
  205. return "keyword";
  206. }
  207. // atoms
  208. if (stream.eatWhile(/[\w-&]/)){
  209. // matches a property definition
  210. if (stream.peek() === ":" && !stream.match(pseudoElementsRegexp, false))
  211. return "property";
  212. else
  213. return "atom";
  214. }
  215. if (stream.match(opRegexp)){
  216. return "operator";
  217. }
  218. // If we haven't returned by now, we move 1 character
  219. // and return an error
  220. stream.next();
  221. return null;
  222. };
  223. var tokenLexer = function(stream, state) {
  224. if (stream.sol()){
  225. state.indentCount = 0;
  226. }
  227. var style = state.tokenizer(stream, state);
  228. var current = stream.current();
  229. if (current === "@return"){
  230. dedent(state);
  231. }
  232. if (style === "atom"){
  233. indent(state);
  234. }
  235. if (style !== null){
  236. var startOfToken = stream.pos - current.length;
  237. var withCurrentIndent = startOfToken + (config.indentUnit * state.indentCount);
  238. var newScopes = [];
  239. for (var i = 0; i < state.scopes.length; i++){
  240. var scope = state.scopes[i];
  241. if (scope.offset <= withCurrentIndent){
  242. newScopes.push(scope);
  243. }
  244. }
  245. state.scopes = newScopes;
  246. }
  247. return style;
  248. };
  249. return {
  250. startState: function() {
  251. return {
  252. tokenizer: tokenBase,
  253. scopes: [{offset: 0, type: 'sass'}],
  254. definedVars: [],
  255. definedMixins: []
  256. };
  257. },
  258. token: function(stream, state) {
  259. var style = tokenLexer(stream, state);
  260. state.lastToken = { style: style, content: stream.current() };
  261. return style;
  262. },
  263. indent: function(state) {
  264. return state.scopes[0].offset;
  265. }
  266. };
  267. });
  268. CodeMirror.defineMIME("text/x-sass", "sass");