2
* Link to the project's GitHub page:
3
* https://github.com/pickhardt/coffeescript-codemirror-mode
5
CodeMirror.defineMode('coffeescript', function(conf) {
6
var ERRORCLASS = 'error';
8
function wordRegexp(words) {
9
return new RegExp("^((" + words.join(")|(") + "))\\b");
12
var singleOperators = new RegExp("^[\\+\\-\\*/%&|\\^~<>!\?]");
13
var singleDelimiters = new RegExp('^[\\(\\)\\[\\]\\{\\}@,:`=;\\.]');
14
var doubleOperators = new RegExp("^((\->)|(\=>)|(\\+\\+)|(\\+\\=)|(\\-\\-)|(\\-\\=)|(\\*\\*)|(\\*\\=)|(\\/\\/)|(\\/\\=)|(==)|(!=)|(<=)|(>=)|(<>)|(<<)|(>>)|(//))");
15
var doubleDelimiters = new RegExp("^((\\.\\.)|(\\+=)|(\\-=)|(\\*=)|(%=)|(/=)|(&=)|(\\|=)|(\\^=))");
16
var tripleDelimiters = new RegExp("^((\\.\\.\\.)|(//=)|(>>=)|(<<=)|(\\*\\*=))");
17
var identifiers = new RegExp("^[_A-Za-z$][_A-Za-z$0-9]*");
19
var wordOperators = wordRegexp(['and', 'or', 'not',
21
'instanceof', 'typeof']);
22
var indentKeywords = ['for', 'while', 'loop', 'if', 'unless', 'else',
23
'switch', 'try', 'catch', 'finally', 'class'];
24
var commonKeywords = ['break', 'by', 'continue', 'debugger', 'delete',
25
'do', 'in', 'of', 'new', 'return', 'then',
26
'this', 'throw', 'when', 'until'];
28
var keywords = wordRegexp(indentKeywords.concat(commonKeywords));
30
indentKeywords = wordRegexp(indentKeywords);
33
var stringPrefixes = new RegExp("^('{3}|\"{3}|['\"])");
34
var regexPrefixes = new RegExp("^(/{3}|/)");
35
var commonConstants = ['Infinity', 'NaN', 'undefined', 'null', 'true', 'false', 'on', 'off', 'yes', 'no'];
36
var constants = wordRegexp(commonConstants);
39
function tokenBase(stream, state) {
40
// Handle scope changes
42
var scopeOffset = state.scopes[0].offset;
43
if (stream.eatSpace()) {
44
var lineOffset = stream.indentation();
45
if (lineOffset > scopeOffset) {
47
} else if (lineOffset < scopeOffset) {
52
if (scopeOffset > 0) {
53
dedent(stream, state);
57
if (stream.eatSpace()) {
61
var ch = stream.peek();
63
// Handle multi line comments
64
if (stream.match("###")) {
65
state.tokenize = longComment;
66
return state.tokenize(stream, state);
69
// Single line comment
75
// Handle number literals
76
if (stream.match(/^-?[0-9\.]/, false)) {
77
var floatLiteral = false;
79
if (stream.match(/^-?\d*\.\d+(e[\+\-]?\d+)?/i)) {
82
if (stream.match(/^-?\d+\.\d*/)) {
85
if (stream.match(/^-?\.\d+/)) {
90
// prevent from getting extra . on 1..
91
if (stream.peek() == "."){
97
var intLiteral = false;
99
if (stream.match(/^-?0x[0-9a-f]+/i)) {
103
if (stream.match(/^-?[1-9]\d*(e[\+\-]?\d+)?/)) {
106
// Zero by itself with no other piece of number.
107
if (stream.match(/^-?0(?![\dx])/i)) {
116
if (stream.match(stringPrefixes)) {
117
state.tokenize = tokenFactory(stream.current(), 'string');
118
return state.tokenize(stream, state);
120
// Handle regex literals
121
if (stream.match(regexPrefixes)) {
122
if (stream.current() != '/' || stream.match(/^.*\//, false)) { // prevent highlight of division
123
state.tokenize = tokenFactory(stream.current(), 'string-2');
124
return state.tokenize(stream, state);
130
// Handle operators and delimiters
131
if (stream.match(tripleDelimiters) || stream.match(doubleDelimiters)) {
132
return 'punctuation';
134
if (stream.match(doubleOperators)
135
|| stream.match(singleOperators)
136
|| stream.match(wordOperators)) {
139
if (stream.match(singleDelimiters)) {
140
return 'punctuation';
143
if (stream.match(constants)) {
147
if (stream.match(keywords)) {
151
if (stream.match(identifiers)) {
155
// Handle non-detected items
160
function tokenFactory(delimiter, outclass) {
161
var singleline = delimiter.length == 1;
162
return function tokenString(stream, state) {
163
while (!stream.eol()) {
164
stream.eatWhile(/[^'"\/\\]/);
165
if (stream.eat('\\')) {
167
if (singleline && stream.eol()) {
170
} else if (stream.match(delimiter)) {
171
state.tokenize = tokenBase;
174
stream.eat(/['"\/]/);
178
if (conf.mode.singleLineStringErrors) {
179
outclass = ERRORCLASS
181
state.tokenize = tokenBase;
188
function longComment(stream, state) {
189
while (!stream.eol()) {
190
stream.eatWhile(/[^#]/);
191
if (stream.match("###")) {
192
state.tokenize = tokenBase;
195
stream.eatWhile("#");
200
function indent(stream, state, type) {
201
type = type || 'coffee';
203
if (type === 'coffee') {
204
for (var i = 0; i < state.scopes.length; i++) {
205
if (state.scopes[i].type === 'coffee') {
206
indentUnit = state.scopes[i].offset + conf.indentUnit;
211
indentUnit = stream.column() + stream.current().length;
213
state.scopes.unshift({
219
function dedent(stream, state) {
220
if (state.scopes.length == 1) return;
221
if (state.scopes[0].type === 'coffee') {
222
var _indent = stream.indentation();
223
var _indent_index = -1;
224
for (var i = 0; i < state.scopes.length; ++i) {
225
if (_indent === state.scopes[i].offset) {
230
if (_indent_index === -1) {
233
while (state.scopes[0].offset !== _indent) {
234
state.scopes.shift();
238
state.scopes.shift();
243
function tokenLexer(stream, state) {
244
var style = state.tokenize(stream, state);
245
var current = stream.current();
247
// Handle '.' connected identifiers
248
if (current === '.') {
249
style = state.tokenize(stream, state);
250
current = stream.current();
251
if (style === 'variable') {
259
if (current === '@') {
264
// Handle scope changes.
265
if (current === 'return') {
268
if (((current === '->' || current === '=>') &&
270
state.scopes[0].type == 'coffee' &&
271
stream.peek() === '')
272
|| style === 'indent') {
273
indent(stream, state);
275
var delimiter_index = '[({'.indexOf(current);
276
if (delimiter_index !== -1) {
277
indent(stream, state, '])}'.slice(delimiter_index, delimiter_index+1));
279
if (indentKeywords.exec(current)){
280
indent(stream, state);
282
if (current == 'then'){
283
dedent(stream, state);
287
if (style === 'dedent') {
288
if (dedent(stream, state)) {
292
delimiter_index = '])}'.indexOf(current);
293
if (delimiter_index !== -1) {
294
if (dedent(stream, state)) {
298
if (state.dedent > 0 && stream.eol() && state.scopes[0].type == 'coffee') {
299
if (state.scopes.length > 1) state.scopes.shift();
307
startState: function(basecolumn) {
310
scopes: [{offset:basecolumn || 0, type:'coffee'}],
317
token: function(stream, state) {
318
var style = tokenLexer(stream, state);
320
state.lastToken = {style:style, content: stream.current()};
322
if (stream.eol() && stream.lambda) {
323
state.lambda = false;
329
indent: function(state, textAfter) {
330
if (state.tokenize != tokenBase) {
334
return state.scopes[0].offset;
341
CodeMirror.defineMIME('text/x-coffeescript', 'coffeescript');