| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321 | 
							- CodeMirror.defineMode("python", function(conf) {
 
-     var ERRORCLASS = 'error';
 
-     
 
-     function wordRegexp(words) {
 
-         return new RegExp("^((" + words.join(")|(") + "))\\b");
 
-     }
 
-     
 
-     var singleOperators = new RegExp("^[\\+\\-\\*/%&|\\^~<>!]");
 
-     var singleDelimiters = new RegExp('^[\\(\\)\\[\\]\\{\\}@,:`=;\\.]');
 
-     var doubleOperators = new RegExp("^((==)|(!=)|(<=)|(>=)|(<>)|(<<)|(>>)|(//)|(\\*\\*))");
 
-     var doubleDelimiters = new RegExp("^((\\+=)|(\\-=)|(\\*=)|(%=)|(/=)|(&=)|(\\|=)|(\\^=))");
 
-     var tripleDelimiters = new RegExp("^((//=)|(>>=)|(<<=)|(\\*\\*=))");
 
-     var identifiers = new RegExp("^[_A-Za-z][_A-Za-z0-9]*");
 
-     var wordOperators = wordRegexp(['and', 'or', 'not', 'is', 'in']);
 
-     var commonkeywords = ['as', 'assert', 'break', 'class', 'continue',
 
-                           'def', 'del', 'elif', 'else', 'except', 'finally',
 
-                           'for', 'from', 'global', 'if', 'import',
 
-                           'lambda', 'pass', 'raise', 'return',
 
-                           'try', 'while', 'with', 'yield'];
 
-     var commontypes = ['bool', 'classmethod', 'complex', 'dict', 'enumerate',
 
-                        'float', 'frozenset', 'int', 'list', 'object',
 
-                        'property', 'reversed', 'set', 'slice', 'staticmethod',
 
-                        'str', 'super', 'tuple', 'type'];
 
-     var py2 = {'types': ['basestring', 'buffer', 'file', 'long', 'unicode',
 
-                          'xrange'],
 
-                'keywords': ['exec', 'print']};
 
-     var py3 = {'types': ['bytearray', 'bytes', 'filter', 'map', 'memoryview',
 
-                          'open', 'range', 'zip'],
 
-                'keywords': ['nonlocal']};
 
-     if (!!conf.mode.version && parseInt(conf.mode.version, 10) === 3) {
 
-         commonkeywords = commonkeywords.concat(py3.keywords);
 
-         commontypes = commontypes.concat(py3.types);
 
-         var stringPrefixes = new RegExp("^(([rb]|(br))?('{3}|\"{3}|['\"]))", "i");
 
-     } else {
 
-         commonkeywords = commonkeywords.concat(py2.keywords);
 
-         commontypes = commontypes.concat(py2.types);
 
-         var stringPrefixes = new RegExp("^(([rub]|(ur)|(br))?('{3}|\"{3}|['\"]))", "i");
 
-     }
 
-     var keywords = wordRegexp(commonkeywords);
 
-     var types = wordRegexp(commontypes);
 
-     var indentInfo = null;
 
-     // tokenizers
 
-     function tokenBase(stream, state) {
 
-         // Handle scope changes
 
-         if (stream.sol()) {
 
-             var scopeOffset = state.scopes[0].offset;
 
-             if (stream.eatSpace()) {
 
-                 var lineOffset = stream.indentation();
 
-                 if (lineOffset > scopeOffset) {
 
-                     indentInfo = 'indent';
 
-                 } else if (lineOffset < scopeOffset) {
 
-                     indentInfo = 'dedent';
 
-                 }
 
-                 return null;
 
-             } else {
 
-                 if (scopeOffset > 0) {
 
-                     dedent(stream, state);
 
-                 }
 
-             }
 
-         }
 
-         if (stream.eatSpace()) {
 
-             return null;
 
-         }
 
-         
 
-         var ch = stream.peek();
 
-         
 
-         // Handle Comments
 
-         if (ch === '#') {
 
-             stream.skipToEnd();
 
-             return 'comment';
 
-         }
 
-         
 
-         // Handle Number Literals
 
-         if (stream.match(/^[0-9\.]/, false)) {
 
-             var floatLiteral = false;
 
-             // Floats
 
-             if (stream.match(/^\d*\.\d+(e[\+\-]?\d+)?/i)) { floatLiteral = true; }
 
-             if (stream.match(/^\d+\.\d*/)) { floatLiteral = true; }
 
-             if (stream.match(/^\.\d+/)) { floatLiteral = true; }
 
-             if (floatLiteral) {
 
-                 // Float literals may be "imaginary"
 
-                 stream.eat(/J/i);
 
-                 return 'number';
 
-             }
 
-             // Integers
 
-             var intLiteral = false;
 
-             // Hex
 
-             if (stream.match(/^0x[0-9a-f]+/i)) { intLiteral = true; }
 
-             // Binary
 
-             if (stream.match(/^0b[01]+/i)) { intLiteral = true; }
 
-             // Octal
 
-             if (stream.match(/^0o[0-7]+/i)) { intLiteral = true; }
 
-             // Decimal
 
-             if (stream.match(/^[1-9]\d*(e[\+\-]?\d+)?/)) {
 
-                 // Decimal literals may be "imaginary"
 
-                 stream.eat(/J/i);
 
-                 // TODO - Can you have imaginary longs?
 
-                 intLiteral = true;
 
-             }
 
-             // Zero by itself with no other piece of number.
 
-             if (stream.match(/^0(?![\dx])/i)) { intLiteral = true; }
 
-             if (intLiteral) {
 
-                 // Integer literals may be "long"
 
-                 stream.eat(/L/i);
 
-                 return 'number';
 
-             }
 
-         }
 
-         
 
-         // Handle Strings
 
-         if (stream.match(stringPrefixes)) {
 
-             state.tokenize = tokenStringFactory(stream.current());
 
-             return state.tokenize(stream, state);
 
-         }
 
-         
 
-         // Handle operators and Delimiters
 
-         if (stream.match(tripleDelimiters) || stream.match(doubleDelimiters)) {
 
-             return null;
 
-         }
 
-         if (stream.match(doubleOperators)
 
-             || stream.match(singleOperators)
 
-             || stream.match(wordOperators)) {
 
-             return 'operator';
 
-         }
 
-         if (stream.match(singleDelimiters)) {
 
-             return null;
 
-         }
 
-         
 
-         if (stream.match(types)) {
 
-             return 'builtin';
 
-         }
 
-         
 
-         if (stream.match(keywords)) {
 
-             return 'keyword';
 
-         }
 
-         
 
-         if (stream.match(identifiers)) {
 
-             return 'variable';
 
-         }
 
-         
 
-         // Handle non-detected items
 
-         stream.next();
 
-         return ERRORCLASS;
 
-     }
 
-     
 
-     function tokenStringFactory(delimiter) {
 
-         while ('rub'.indexOf(delimiter[0].toLowerCase()) >= 0) {
 
-             delimiter = delimiter.substr(1);
 
-         }
 
-         var delim_re = new RegExp(delimiter);
 
-         var singleline = delimiter.length == 1;
 
-         var OUTCLASS = 'string';
 
-         
 
-         return function tokenString(stream, state) {
 
-             while (!stream.eol()) {
 
-                 stream.eatWhile(/[^'"\\]/);
 
-                 if (stream.eat('\\')) {
 
-                     stream.next();
 
-                     if (singleline && stream.eol()) {
 
-                         return OUTCLASS;
 
-                     }
 
-                 } else if (stream.match(delim_re)) {
 
-                     state.tokenize = tokenBase;
 
-                     return OUTCLASS;
 
-                 } else {
 
-                     stream.eat(/['"]/);
 
-                 }
 
-             }
 
-             if (singleline) {
 
-                 if (conf.mode.singleLineStringErrors) {
 
-                     OUTCLASS = ERRORCLASS
 
-                 } else {
 
-                     state.tokenize = tokenBase;
 
-                 }
 
-             }
 
-             return OUTCLASS;
 
-         };
 
-     }
 
-     
 
-     function indent(stream, state, type) {
 
-         type = type || 'py';
 
-         var indentUnit = 0;
 
-         if (type === 'py') {
 
-             for (var i = 0; i < state.scopes.length; ++i) {
 
-                 if (state.scopes[i].type === 'py') {
 
-                     indentUnit = state.scopes[i].offset + conf.indentUnit;
 
-                     break;
 
-                 }
 
-             }
 
-         } else {
 
-             indentUnit = stream.column() + stream.current().length;
 
-         }
 
-         state.scopes.unshift({
 
-             offset: indentUnit,
 
-             type: type
 
-         });
 
-     }
 
-     
 
-     function dedent(stream, state) {
 
-         if (state.scopes.length == 1) return;
 
-         if (state.scopes[0].type === 'py') {
 
-             var _indent = stream.indentation();
 
-             var _indent_index = -1;
 
-             for (var i = 0; i < state.scopes.length; ++i) {
 
-                 if (_indent === state.scopes[i].offset) {
 
-                     _indent_index = i;
 
-                     break;
 
-                 }
 
-             }
 
-             if (_indent_index === -1) {
 
-                 return true;
 
-             }
 
-             while (state.scopes[0].offset !== _indent) {
 
-                 state.scopes.shift();
 
-             }
 
-             return false
 
-         } else {
 
-             state.scopes.shift();
 
-             return false;
 
-         }
 
-     }
 
-     function tokenLexer(stream, state) {
 
-         indentInfo = null;
 
-         var style = state.tokenize(stream, state);
 
-         var current = stream.current();
 
-         // Handle '.' connected identifiers
 
-         if (current === '.') {
 
-             style = state.tokenize(stream, state);
 
-             current = stream.current();
 
-             if (style === 'variable') {
 
-                 return 'variable';
 
-             } else {
 
-                 return ERRORCLASS;
 
-             }
 
-         }
 
-         
 
-         // Handle decorators
 
-         if (current === '@') {
 
-             style = state.tokenize(stream, state);
 
-             current = stream.current();
 
-             if (style === 'variable'
 
-                 || current === '@staticmethod'
 
-                 || current === '@classmethod') {
 
-                 return 'meta';
 
-             } else {
 
-                 return ERRORCLASS;
 
-             }
 
-         }
 
-         
 
-         // Handle scope changes.
 
-         if (current === 'pass' || current === 'return') {
 
-             state.dedent += 1;
 
-         }
 
-         if ((current === ':' && !state.lambda && state.scopes[0].type == 'py')
 
-             || indentInfo === 'indent') {
 
-             indent(stream, state);
 
-         }
 
-         var delimiter_index = '[({'.indexOf(current);
 
-         if (delimiter_index !== -1) {
 
-             indent(stream, state, '])}'.slice(delimiter_index, delimiter_index+1));
 
-         }
 
-         if (indentInfo === 'dedent') {
 
-             if (dedent(stream, state)) {
 
-                 return ERRORCLASS;
 
-             }
 
-         }
 
-         delimiter_index = '])}'.indexOf(current);
 
-         if (delimiter_index !== -1) {
 
-             if (dedent(stream, state)) {
 
-                 return ERRORCLASS;
 
-             }
 
-         }
 
-         if (state.dedent > 0 && stream.eol() && state.scopes[0].type == 'py') {
 
-             if (state.scopes.length > 1) state.scopes.shift();
 
-             state.dedent -= 1;
 
-         }
 
-         
 
-         return style;
 
-     }
 
-     var external = {
 
-         startState: function(basecolumn) {
 
-             return {
 
-               tokenize: tokenBase,
 
-               scopes: [{offset:basecolumn || 0, type:'py'}],
 
-               lastToken: null,
 
-               lambda: false,
 
-               dedent: 0
 
-           };
 
-         },
 
-         
 
-         token: function(stream, state) {
 
-             var style = tokenLexer(stream, state);
 
-             
 
-             state.lastToken = {style:style, content: stream.current()};
 
-             
 
-             if (stream.eol() && stream.lambda) {
 
-                 state.lambda = false;
 
-             }
 
-             
 
-             return style;
 
-         },
 
-         
 
-         indent: function(state, textAfter) {
 
-             if (state.tokenize != tokenBase) {
 
-                 return 0;
 
-             }
 
-             
 
-             return state.scopes[0].offset;
 
-         }
 
-         
 
-     };
 
-     return external;
 
- });
 
- CodeMirror.defineMIME("text/x-python", "python");
 
 
  |