OLD | NEW |
(Empty) | |
| 1 CodeMirror.defineMode("python", function(conf, parserConf) { |
| 2 var ERRORCLASS = 'error'; |
| 3 |
| 4 function wordRegexp(words) { |
| 5 return new RegExp("^((" + words.join(")|(") + "))\\b"); |
| 6 } |
| 7 |
| 8 var singleOperators = parserConf.singleOperators || new RegExp("^[\\+\\-\\*/
%&|\\^~<>!]"); |
| 9 var singleDelimiters = parserConf.singleDelimiters || new RegExp('^[\\(\\)\\
[\\]\\{\\}@,:`=;\\.]'); |
| 10 var doubleOperators = parserConf.doubleOperators || new RegExp("^((==)|(!=)|
(<=)|(>=)|(<>)|(<<)|(>>)|(//)|(\\*\\*))"); |
| 11 var doubleDelimiters = parserConf.doubleDelimiters || new RegExp("^((\\+=)|(
\\-=)|(\\*=)|(%=)|(/=)|(&=)|(\\|=)|(\\^=))"); |
| 12 var tripleDelimiters = parserConf.tripleDelimiters || new RegExp("^((//=)|(>
>=)|(<<=)|(\\*\\*=))"); |
| 13 var identifiers = parserConf.identifiers|| new RegExp("^[_A-Za-z][_A-Za-z0-9
]*"); |
| 14 |
| 15 var wordOperators = wordRegexp(['and', 'or', 'not', 'is', 'in']); |
| 16 var commonkeywords = ['as', 'assert', 'break', 'class', 'continue', |
| 17 'def', 'del', 'elif', 'else', 'except', 'finally', |
| 18 'for', 'from', 'global', 'if', 'import', |
| 19 'lambda', 'pass', 'raise', 'return', |
| 20 'try', 'while', 'with', 'yield']; |
| 21 var commonBuiltins = ['abs', 'all', 'any', 'bin', 'bool', 'bytearray', 'call
able', 'chr', |
| 22 'classmethod', 'compile', 'complex', 'delattr', 'dict'
, 'dir', 'divmod', |
| 23 'enumerate', 'eval', 'filter', 'float', 'format', 'fro
zenset', |
| 24 'getattr', 'globals', 'hasattr', 'hash', 'help', 'hex'
, 'id', |
| 25 'input', 'int', 'isinstance', 'issubclass', 'iter', 'l
en', |
| 26 'list', 'locals', 'map', 'max', 'memoryview', 'min', '
next', |
| 27 'object', 'oct', 'open', 'ord', 'pow', 'property', 'ra
nge', |
| 28 'repr', 'reversed', 'round', 'set', 'setattr', 'slice'
, |
| 29 'sorted', 'staticmethod', 'str', 'sum', 'super', 'tupl
e', |
| 30 'type', 'vars', 'zip', '__import__', 'NotImplemented', |
| 31 'Ellipsis', '__debug__']; |
| 32 var py2 = {'builtins': ['apply', 'basestring', 'buffer', 'cmp', 'coerce', 'e
xecfile', |
| 33 'file', 'intern', 'long', 'raw_input', 'reduce', 're
load', |
| 34 'unichr', 'unicode', 'xrange', 'False', 'True', 'Non
e'], |
| 35 'keywords': ['exec', 'print']}; |
| 36 var py3 = {'builtins': ['ascii', 'bytes', 'exec', 'print'], |
| 37 'keywords': ['nonlocal', 'False', 'True', 'None']}; |
| 38 |
| 39 if (!!parserConf.version && parseInt(parserConf.version, 10) === 3) { |
| 40 commonkeywords = commonkeywords.concat(py3.keywords); |
| 41 commonBuiltins = commonBuiltins.concat(py3.builtins); |
| 42 var stringPrefixes = new RegExp("^(([rb]|(br))?('{3}|\"{3}|['\"]))", "i"
); |
| 43 } else { |
| 44 commonkeywords = commonkeywords.concat(py2.keywords); |
| 45 commonBuiltins = commonBuiltins.concat(py2.builtins); |
| 46 var stringPrefixes = new RegExp("^(([rub]|(ur)|(br))?('{3}|\"{3}|['\"]))
", "i"); |
| 47 } |
| 48 var keywords = wordRegexp(commonkeywords); |
| 49 var builtins = wordRegexp(commonBuiltins); |
| 50 |
| 51 var indentInfo = null; |
| 52 |
| 53 // tokenizers |
| 54 function tokenBase(stream, state) { |
| 55 // Handle scope changes |
| 56 if (stream.sol()) { |
| 57 var scopeOffset = state.scopes[0].offset; |
| 58 if (stream.eatSpace()) { |
| 59 var lineOffset = stream.indentation(); |
| 60 if (lineOffset > scopeOffset) { |
| 61 indentInfo = 'indent'; |
| 62 } else if (lineOffset < scopeOffset) { |
| 63 indentInfo = 'dedent'; |
| 64 } |
| 65 return null; |
| 66 } else { |
| 67 if (scopeOffset > 0) { |
| 68 dedent(stream, state); |
| 69 } |
| 70 } |
| 71 } |
| 72 if (stream.eatSpace()) { |
| 73 return null; |
| 74 } |
| 75 |
| 76 var ch = stream.peek(); |
| 77 |
| 78 // Handle Comments |
| 79 if (ch === '#') { |
| 80 stream.skipToEnd(); |
| 81 return 'comment'; |
| 82 } |
| 83 |
| 84 // Handle Number Literals |
| 85 if (stream.match(/^[0-9\.]/, false)) { |
| 86 var floatLiteral = false; |
| 87 // Floats |
| 88 if (stream.match(/^\d*\.\d+(e[\+\-]?\d+)?/i)) { floatLiteral = true;
} |
| 89 if (stream.match(/^\d+\.\d*/)) { floatLiteral = true; } |
| 90 if (stream.match(/^\.\d+/)) { floatLiteral = true; } |
| 91 if (floatLiteral) { |
| 92 // Float literals may be "imaginary" |
| 93 stream.eat(/J/i); |
| 94 return 'number'; |
| 95 } |
| 96 // Integers |
| 97 var intLiteral = false; |
| 98 // Hex |
| 99 if (stream.match(/^0x[0-9a-f]+/i)) { intLiteral = true; } |
| 100 // Binary |
| 101 if (stream.match(/^0b[01]+/i)) { intLiteral = true; } |
| 102 // Octal |
| 103 if (stream.match(/^0o[0-7]+/i)) { intLiteral = true; } |
| 104 // Decimal |
| 105 if (stream.match(/^[1-9]\d*(e[\+\-]?\d+)?/)) { |
| 106 // Decimal literals may be "imaginary" |
| 107 stream.eat(/J/i); |
| 108 // TODO - Can you have imaginary longs? |
| 109 intLiteral = true; |
| 110 } |
| 111 // Zero by itself with no other piece of number. |
| 112 if (stream.match(/^0(?![\dx])/i)) { intLiteral = true; } |
| 113 if (intLiteral) { |
| 114 // Integer literals may be "long" |
| 115 stream.eat(/L/i); |
| 116 return 'number'; |
| 117 } |
| 118 } |
| 119 |
| 120 // Handle Strings |
| 121 if (stream.match(stringPrefixes)) { |
| 122 state.tokenize = tokenStringFactory(stream.current()); |
| 123 return state.tokenize(stream, state); |
| 124 } |
| 125 |
| 126 // Handle operators and Delimiters |
| 127 if (stream.match(tripleDelimiters) || stream.match(doubleDelimiters)) { |
| 128 return null; |
| 129 } |
| 130 if (stream.match(doubleOperators) |
| 131 || stream.match(singleOperators) |
| 132 || stream.match(wordOperators)) { |
| 133 return 'operator'; |
| 134 } |
| 135 if (stream.match(singleDelimiters)) { |
| 136 return null; |
| 137 } |
| 138 |
| 139 if (stream.match(keywords)) { |
| 140 return 'keyword'; |
| 141 } |
| 142 |
| 143 if (stream.match(builtins)) { |
| 144 return 'builtin'; |
| 145 } |
| 146 |
| 147 if (stream.match(identifiers)) { |
| 148 return 'variable'; |
| 149 } |
| 150 |
| 151 // Handle non-detected items |
| 152 stream.next(); |
| 153 return ERRORCLASS; |
| 154 } |
| 155 |
| 156 function tokenStringFactory(delimiter) { |
| 157 while ('rub'.indexOf(delimiter.charAt(0).toLowerCase()) >= 0) { |
| 158 delimiter = delimiter.substr(1); |
| 159 } |
| 160 var singleline = delimiter.length == 1; |
| 161 var OUTCLASS = 'string'; |
| 162 |
| 163 function tokenString(stream, state) { |
| 164 while (!stream.eol()) { |
| 165 stream.eatWhile(/[^'"\\]/); |
| 166 if (stream.eat('\\')) { |
| 167 stream.next(); |
| 168 if (singleline && stream.eol()) { |
| 169 return OUTCLASS; |
| 170 } |
| 171 } else if (stream.match(delimiter)) { |
| 172 state.tokenize = tokenBase; |
| 173 return OUTCLASS; |
| 174 } else { |
| 175 stream.eat(/['"]/); |
| 176 } |
| 177 } |
| 178 if (singleline) { |
| 179 if (parserConf.singleLineStringErrors) { |
| 180 return ERRORCLASS; |
| 181 } else { |
| 182 state.tokenize = tokenBase; |
| 183 } |
| 184 } |
| 185 return OUTCLASS; |
| 186 } |
| 187 tokenString.isString = true; |
| 188 return tokenString; |
| 189 } |
| 190 |
| 191 function indent(stream, state, type) { |
| 192 type = type || 'py'; |
| 193 var indentUnit = 0; |
| 194 if (type === 'py') { |
| 195 if (state.scopes[0].type !== 'py') { |
| 196 state.scopes[0].offset = stream.indentation(); |
| 197 return; |
| 198 } |
| 199 for (var i = 0; i < state.scopes.length; ++i) { |
| 200 if (state.scopes[i].type === 'py') { |
| 201 indentUnit = state.scopes[i].offset + conf.indentUnit; |
| 202 break; |
| 203 } |
| 204 } |
| 205 } else { |
| 206 indentUnit = stream.column() + stream.current().length; |
| 207 } |
| 208 state.scopes.unshift({ |
| 209 offset: indentUnit, |
| 210 type: type |
| 211 }); |
| 212 } |
| 213 |
| 214 function dedent(stream, state, type) { |
| 215 type = type || 'py'; |
| 216 if (state.scopes.length == 1) return; |
| 217 if (state.scopes[0].type === 'py') { |
| 218 var _indent = stream.indentation(); |
| 219 var _indent_index = -1; |
| 220 for (var i = 0; i < state.scopes.length; ++i) { |
| 221 if (_indent === state.scopes[i].offset) { |
| 222 _indent_index = i; |
| 223 break; |
| 224 } |
| 225 } |
| 226 if (_indent_index === -1) { |
| 227 return true; |
| 228 } |
| 229 while (state.scopes[0].offset !== _indent) { |
| 230 state.scopes.shift(); |
| 231 } |
| 232 return false; |
| 233 } else { |
| 234 if (type === 'py') { |
| 235 state.scopes[0].offset = stream.indentation(); |
| 236 return false; |
| 237 } else { |
| 238 if (state.scopes[0].type != type) { |
| 239 return true; |
| 240 } |
| 241 state.scopes.shift(); |
| 242 return false; |
| 243 } |
| 244 } |
| 245 } |
| 246 |
| 247 function tokenLexer(stream, state) { |
| 248 indentInfo = null; |
| 249 var style = state.tokenize(stream, state); |
| 250 var current = stream.current(); |
| 251 |
| 252 // Handle '.' connected identifiers |
| 253 if (current === '.') { |
| 254 style = stream.match(identifiers, false) ? null : ERRORCLASS; |
| 255 if (style === null && state.lastToken === 'meta') { |
| 256 // Apply 'meta' style to '.' connected identifiers when |
| 257 // appropriate. |
| 258 style = 'meta'; |
| 259 } |
| 260 return style; |
| 261 } |
| 262 |
| 263 // Handle decorators |
| 264 if (current === '@') { |
| 265 return stream.match(identifiers, false) ? 'meta' : ERRORCLASS; |
| 266 } |
| 267 |
| 268 if ((style === 'variable' || style === 'builtin') |
| 269 && state.lastToken === 'meta') { |
| 270 style = 'meta'; |
| 271 } |
| 272 |
| 273 // Handle scope changes. |
| 274 if (current === 'pass' || current === 'return') { |
| 275 state.dedent += 1; |
| 276 } |
| 277 if (current === 'lambda') state.lambda = true; |
| 278 if ((current === ':' && !state.lambda && state.scopes[0].type == 'py') |
| 279 || indentInfo === 'indent') { |
| 280 indent(stream, state); |
| 281 } |
| 282 var delimiter_index = '[({'.indexOf(current); |
| 283 if (delimiter_index !== -1) { |
| 284 indent(stream, state, '])}'.slice(delimiter_index, delimiter_index+1
)); |
| 285 } |
| 286 if (indentInfo === 'dedent') { |
| 287 if (dedent(stream, state)) { |
| 288 return ERRORCLASS; |
| 289 } |
| 290 } |
| 291 delimiter_index = '])}'.indexOf(current); |
| 292 if (delimiter_index !== -1) { |
| 293 if (dedent(stream, state, current)) { |
| 294 return ERRORCLASS; |
| 295 } |
| 296 } |
| 297 if (state.dedent > 0 && stream.eol() && state.scopes[0].type == 'py') { |
| 298 if (state.scopes.length > 1) state.scopes.shift(); |
| 299 state.dedent -= 1; |
| 300 } |
| 301 |
| 302 return style; |
| 303 } |
| 304 |
| 305 var external = { |
| 306 startState: function(basecolumn) { |
| 307 return { |
| 308 tokenize: tokenBase, |
| 309 scopes: [{offset:basecolumn || 0, type:'py'}], |
| 310 lastToken: null, |
| 311 lambda: false, |
| 312 dedent: 0 |
| 313 }; |
| 314 }, |
| 315 |
| 316 token: function(stream, state) { |
| 317 var style = tokenLexer(stream, state); |
| 318 |
| 319 state.lastToken = style; |
| 320 |
| 321 if (stream.eol() && stream.lambda) { |
| 322 state.lambda = false; |
| 323 } |
| 324 |
| 325 return style; |
| 326 }, |
| 327 |
| 328 indent: function(state) { |
| 329 if (state.tokenize != tokenBase) { |
| 330 return state.tokenize.isString ? CodeMirror.Pass : 0; |
| 331 } |
| 332 |
| 333 return state.scopes[0].offset; |
| 334 }, |
| 335 |
| 336 lineComment: "#" |
| 337 }; |
| 338 return external; |
| 339 }); |
| 340 |
| 341 CodeMirror.defineMIME("text/x-python", "python"); |
OLD | NEW |