OLD | NEW |
(Empty) | |
| 1 /** |
| 2 * Link to the project's GitHub page: |
| 3 * https://github.com/pickhardt/coffeescript-codemirror-mode |
| 4 */ |
| 5 CodeMirror.defineMode('coffeescript', function(conf) { |
| 6 var ERRORCLASS = 'error'; |
| 7 |
| 8 function wordRegexp(words) { |
| 9 return new RegExp("^((" + words.join(")|(") + "))\\b"); |
| 10 } |
| 11 |
| 12 var singleOperators = new RegExp("^[\\+\\-\\*/%&|\\^~<>!\?]"); |
| 13 var singleDelimiters = new RegExp('^[\\(\\)\\[\\]\\{\\},:`=;\\.]'); |
| 14 var doubleOperators = new RegExp("^((\->)|(\=>)|(\\+\\+)|(\\+\\=)|(\\-\\-)|(
\\-\\=)|(\\*\\*)|(\\*\\=)|(\\/\\/)|(\\/\\=)|(==)|(!=)|(<=)|(>=)|(<>)|(<<)|(>>)|(
//))"); |
| 15 var doubleDelimiters = new RegExp("^((\\.\\.)|(\\+=)|(\\-=)|(\\*=)|(%=)|(/=)
|(&=)|(\\|=)|(\\^=))"); |
| 16 var tripleDelimiters = new RegExp("^((\\.\\.\\.)|(//=)|(>>=)|(<<=)|(\\*\\*=)
)"); |
| 17 var identifiers = new RegExp("^[_A-Za-z$][_A-Za-z$0-9]*"); |
| 18 var properties = new RegExp("^(@|this\.)[_A-Za-z$][_A-Za-z$0-9]*"); |
| 19 |
| 20 var wordOperators = wordRegexp(['and', 'or', 'not', |
| 21 'is', 'isnt', 'in', |
| 22 'instanceof', 'typeof']); |
| 23 var indentKeywords = ['for', 'while', 'loop', 'if', 'unless', 'else', |
| 24 'switch', 'try', 'catch', 'finally', 'class']; |
| 25 var commonKeywords = ['break', 'by', 'continue', 'debugger', 'delete', |
| 26 'do', 'in', 'of', 'new', 'return', 'then', |
| 27 'this', 'throw', 'when', 'until']; |
| 28 |
| 29 var keywords = wordRegexp(indentKeywords.concat(commonKeywords)); |
| 30 |
| 31 indentKeywords = wordRegexp(indentKeywords); |
| 32 |
| 33 |
| 34 var stringPrefixes = new RegExp("^('{3}|\"{3}|['\"])"); |
| 35 var regexPrefixes = new RegExp("^(/{3}|/)"); |
| 36 var commonConstants = ['Infinity', 'NaN', 'undefined', 'null', 'true', 'fals
e', 'on', 'off', 'yes', 'no']; |
| 37 var constants = wordRegexp(commonConstants); |
| 38 |
| 39 // Tokenizers |
| 40 function tokenBase(stream, state) { |
| 41 // Handle scope changes |
| 42 if (stream.sol()) { |
| 43 var scopeOffset = state.scopes[0].offset; |
| 44 if (stream.eatSpace()) { |
| 45 var lineOffset = stream.indentation(); |
| 46 if (lineOffset > scopeOffset) { |
| 47 return 'indent'; |
| 48 } else if (lineOffset < scopeOffset) { |
| 49 return 'dedent'; |
| 50 } |
| 51 return null; |
| 52 } else { |
| 53 if (scopeOffset > 0) { |
| 54 dedent(stream, state); |
| 55 } |
| 56 } |
| 57 } |
| 58 if (stream.eatSpace()) { |
| 59 return null; |
| 60 } |
| 61 |
| 62 var ch = stream.peek(); |
| 63 |
| 64 // Handle docco title comment (single line) |
| 65 if (stream.match("####")) { |
| 66 stream.skipToEnd(); |
| 67 return 'comment'; |
| 68 } |
| 69 |
| 70 // Handle multi line comments |
| 71 if (stream.match("###")) { |
| 72 state.tokenize = longComment; |
| 73 return state.tokenize(stream, state); |
| 74 } |
| 75 |
| 76 // Single line comment |
| 77 if (ch === '#') { |
| 78 stream.skipToEnd(); |
| 79 return 'comment'; |
| 80 } |
| 81 |
| 82 // Handle number literals |
| 83 if (stream.match(/^-?[0-9\.]/, false)) { |
| 84 var floatLiteral = false; |
| 85 // Floats |
| 86 if (stream.match(/^-?\d*\.\d+(e[\+\-]?\d+)?/i)) { |
| 87 floatLiteral = true; |
| 88 } |
| 89 if (stream.match(/^-?\d+\.\d*/)) { |
| 90 floatLiteral = true; |
| 91 } |
| 92 if (stream.match(/^-?\.\d+/)) { |
| 93 floatLiteral = true; |
| 94 } |
| 95 |
| 96 if (floatLiteral) { |
| 97 // prevent from getting extra . on 1.. |
| 98 if (stream.peek() == "."){ |
| 99 stream.backUp(1); |
| 100 } |
| 101 return 'number'; |
| 102 } |
| 103 // Integers |
| 104 var intLiteral = false; |
| 105 // Hex |
| 106 if (stream.match(/^-?0x[0-9a-f]+/i)) { |
| 107 intLiteral = true; |
| 108 } |
| 109 // Decimal |
| 110 if (stream.match(/^-?[1-9]\d*(e[\+\-]?\d+)?/)) { |
| 111 intLiteral = true; |
| 112 } |
| 113 // Zero by itself with no other piece of number. |
| 114 if (stream.match(/^-?0(?![\dx])/i)) { |
| 115 intLiteral = true; |
| 116 } |
| 117 if (intLiteral) { |
| 118 return 'number'; |
| 119 } |
| 120 } |
| 121 |
| 122 // Handle strings |
| 123 if (stream.match(stringPrefixes)) { |
| 124 state.tokenize = tokenFactory(stream.current(), 'string'); |
| 125 return state.tokenize(stream, state); |
| 126 } |
| 127 // Handle regex literals |
| 128 if (stream.match(regexPrefixes)) { |
| 129 if (stream.current() != '/' || stream.match(/^.*\//, false)) { // pr
event highlight of division |
| 130 state.tokenize = tokenFactory(stream.current(), 'string-2'); |
| 131 return state.tokenize(stream, state); |
| 132 } else { |
| 133 stream.backUp(1); |
| 134 } |
| 135 } |
| 136 |
| 137 // Handle operators and delimiters |
| 138 if (stream.match(tripleDelimiters) || stream.match(doubleDelimiters)) { |
| 139 return 'punctuation'; |
| 140 } |
| 141 if (stream.match(doubleOperators) |
| 142 || stream.match(singleOperators) |
| 143 || stream.match(wordOperators)) { |
| 144 return 'operator'; |
| 145 } |
| 146 if (stream.match(singleDelimiters)) { |
| 147 return 'punctuation'; |
| 148 } |
| 149 |
| 150 if (stream.match(constants)) { |
| 151 return 'atom'; |
| 152 } |
| 153 |
| 154 if (stream.match(keywords)) { |
| 155 return 'keyword'; |
| 156 } |
| 157 |
| 158 if (stream.match(identifiers)) { |
| 159 return 'variable'; |
| 160 } |
| 161 |
| 162 if (stream.match(properties)) { |
| 163 return 'property'; |
| 164 } |
| 165 |
| 166 // Handle non-detected items |
| 167 stream.next(); |
| 168 return ERRORCLASS; |
| 169 } |
| 170 |
| 171 function tokenFactory(delimiter, outclass) { |
| 172 var singleline = delimiter.length == 1; |
| 173 return function(stream, state) { |
| 174 while (!stream.eol()) { |
| 175 stream.eatWhile(/[^'"\/\\]/); |
| 176 if (stream.eat('\\')) { |
| 177 stream.next(); |
| 178 if (singleline && stream.eol()) { |
| 179 return outclass; |
| 180 } |
| 181 } else if (stream.match(delimiter)) { |
| 182 state.tokenize = tokenBase; |
| 183 return outclass; |
| 184 } else { |
| 185 stream.eat(/['"\/]/); |
| 186 } |
| 187 } |
| 188 if (singleline) { |
| 189 if (conf.mode.singleLineStringErrors) { |
| 190 outclass = ERRORCLASS; |
| 191 } else { |
| 192 state.tokenize = tokenBase; |
| 193 } |
| 194 } |
| 195 return outclass; |
| 196 }; |
| 197 } |
| 198 |
| 199 function longComment(stream, state) { |
| 200 while (!stream.eol()) { |
| 201 stream.eatWhile(/[^#]/); |
| 202 if (stream.match("###")) { |
| 203 state.tokenize = tokenBase; |
| 204 break; |
| 205 } |
| 206 stream.eatWhile("#"); |
| 207 } |
| 208 return "comment"; |
| 209 } |
| 210 |
| 211 function indent(stream, state, type) { |
| 212 type = type || 'coffee'; |
| 213 var indentUnit = 0; |
| 214 if (type === 'coffee') { |
| 215 for (var i = 0; i < state.scopes.length; i++) { |
| 216 if (state.scopes[i].type === 'coffee') { |
| 217 indentUnit = state.scopes[i].offset + conf.indentUnit; |
| 218 break; |
| 219 } |
| 220 } |
| 221 } else { |
| 222 indentUnit = stream.column() + stream.current().length; |
| 223 } |
| 224 state.scopes.unshift({ |
| 225 offset: indentUnit, |
| 226 type: type |
| 227 }); |
| 228 } |
| 229 |
| 230 function dedent(stream, state) { |
| 231 if (state.scopes.length == 1) return; |
| 232 if (state.scopes[0].type === 'coffee') { |
| 233 var _indent = stream.indentation(); |
| 234 var _indent_index = -1; |
| 235 for (var i = 0; i < state.scopes.length; ++i) { |
| 236 if (_indent === state.scopes[i].offset) { |
| 237 _indent_index = i; |
| 238 break; |
| 239 } |
| 240 } |
| 241 if (_indent_index === -1) { |
| 242 return true; |
| 243 } |
| 244 while (state.scopes[0].offset !== _indent) { |
| 245 state.scopes.shift(); |
| 246 } |
| 247 return false; |
| 248 } else { |
| 249 state.scopes.shift(); |
| 250 return false; |
| 251 } |
| 252 } |
| 253 |
| 254 function tokenLexer(stream, state) { |
| 255 var style = state.tokenize(stream, state); |
| 256 var current = stream.current(); |
| 257 |
| 258 // Handle '.' connected identifiers |
| 259 if (current === '.') { |
| 260 style = state.tokenize(stream, state); |
| 261 current = stream.current(); |
| 262 if (style === 'variable') { |
| 263 return 'variable'; |
| 264 } else { |
| 265 return ERRORCLASS; |
| 266 } |
| 267 } |
| 268 |
| 269 // Handle scope changes. |
| 270 if (current === 'return') { |
| 271 state.dedent += 1; |
| 272 } |
| 273 if (((current === '->' || current === '=>') && |
| 274 !state.lambda && |
| 275 state.scopes[0].type == 'coffee' && |
| 276 stream.peek() === '') |
| 277 || style === 'indent') { |
| 278 indent(stream, state); |
| 279 } |
| 280 var delimiter_index = '[({'.indexOf(current); |
| 281 if (delimiter_index !== -1) { |
| 282 indent(stream, state, '])}'.slice(delimiter_index, delimiter_index+1
)); |
| 283 } |
| 284 if (indentKeywords.exec(current)){ |
| 285 indent(stream, state); |
| 286 } |
| 287 if (current == 'then'){ |
| 288 dedent(stream, state); |
| 289 } |
| 290 |
| 291 |
| 292 if (style === 'dedent') { |
| 293 if (dedent(stream, state)) { |
| 294 return ERRORCLASS; |
| 295 } |
| 296 } |
| 297 delimiter_index = '])}'.indexOf(current); |
| 298 if (delimiter_index !== -1) { |
| 299 if (dedent(stream, state)) { |
| 300 return ERRORCLASS; |
| 301 } |
| 302 } |
| 303 if (state.dedent > 0 && stream.eol() && state.scopes[0].type == 'coffee'
) { |
| 304 if (state.scopes.length > 1) state.scopes.shift(); |
| 305 state.dedent -= 1; |
| 306 } |
| 307 |
| 308 return style; |
| 309 } |
| 310 |
| 311 var external = { |
| 312 startState: function(basecolumn) { |
| 313 return { |
| 314 tokenize: tokenBase, |
| 315 scopes: [{offset:basecolumn || 0, type:'coffee'}], |
| 316 lastToken: null, |
| 317 lambda: false, |
| 318 dedent: 0 |
| 319 }; |
| 320 }, |
| 321 |
| 322 token: function(stream, state) { |
| 323 var style = tokenLexer(stream, state); |
| 324 |
| 325 state.lastToken = {style:style, content: stream.current()}; |
| 326 |
| 327 if (stream.eol() && stream.lambda) { |
| 328 state.lambda = false; |
| 329 } |
| 330 |
| 331 return style; |
| 332 }, |
| 333 |
| 334 indent: function(state) { |
| 335 if (state.tokenize != tokenBase) { |
| 336 return 0; |
| 337 } |
| 338 |
| 339 return state.scopes[0].offset; |
| 340 }, |
| 341 |
| 342 lineComment: "#" |
| 343 }; |
| 344 return external; |
| 345 }); |
| 346 |
| 347 CodeMirror.defineMIME('text/x-coffeescript', 'coffeescript'); |
OLD | NEW |