You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

391 lines
11 KiB

  1. // CodeMirror, copyright (c) by Marijn Haverbeke and others
  2. // Distributed under an MIT license: http://codemirror.net/LICENSE
  3. (function(mod) {
  4. if (typeof exports == "object" && typeof module == "object") // CommonJS
  5. mod(require("../../lib/codemirror"));
  6. else if (typeof define == "function" && define.amd) // AMD
  7. define(["../../lib/codemirror"], mod);
  8. else // Plain browser env
  9. mod(CodeMirror);
  10. })(function(CodeMirror) {
  11. "use strict";
  12. CodeMirror.defineMode("crystal", function(config) {
  13. function wordRegExp(words, end) {
  14. return new RegExp((end ? "" : "^") + "(?:" + words.join("|") + ")" + (end ? "$" : "\\b"));
  15. }
  16. function chain(tokenize, stream, state) {
  17. state.tokenize.push(tokenize);
  18. return tokenize(stream, state);
  19. }
  20. var operators = /^(?:[-+/%|&^]|\*\*?|[<>]{2})/;
  21. var conditionalOperators = /^(?:[=!]~|===|<=>|[<>=!]=?|[|&]{2}|~)/;
  22. var indexingOperators = /^(?:\[\][?=]?)/;
  23. var anotherOperators = /^(?:\.(?:\.{2})?|->|[?:])/;
  24. var idents = /^[a-z_\u009F-\uFFFF][a-zA-Z0-9_\u009F-\uFFFF]*/;
  25. var types = /^[A-Z_\u009F-\uFFFF][a-zA-Z0-9_\u009F-\uFFFF]*/;
  26. var keywords = wordRegExp([
  27. "abstract", "alias", "as", "asm", "begin", "break", "case", "class", "def", "do",
  28. "else", "elsif", "end", "ensure", "enum", "extend", "for", "fun", "if", "ifdef",
  29. "include", "instance_sizeof", "lib", "macro", "module", "next", "of", "out", "pointerof",
  30. "private", "protected", "rescue", "return", "require", "sizeof", "struct",
  31. "super", "then", "type", "typeof", "union", "unless", "until", "when", "while", "with",
  32. "yield", "__DIR__", "__FILE__", "__LINE__"
  33. ]);
  34. var atomWords = wordRegExp(["true", "false", "nil", "self"]);
  35. var indentKeywordsArray = [
  36. "def", "fun", "macro",
  37. "class", "module", "struct", "lib", "enum", "union",
  38. "if", "unless", "case", "while", "until", "begin", "then",
  39. "do",
  40. "for", "ifdef"
  41. ];
  42. var indentKeywords = wordRegExp(indentKeywordsArray);
  43. var dedentKeywordsArray = [
  44. "end",
  45. "else", "elsif",
  46. "rescue", "ensure"
  47. ];
  48. var dedentKeywords = wordRegExp(dedentKeywordsArray);
  49. var dedentPunctualsArray = ["\\)", "\\}", "\\]"];
  50. var dedentPunctuals = new RegExp("^(?:" + dedentPunctualsArray.join("|") + ")$");
  51. var nextTokenizer = {
  52. "def": tokenFollowIdent, "fun": tokenFollowIdent, "macro": tokenMacroDef,
  53. "class": tokenFollowType, "module": tokenFollowType, "struct": tokenFollowType,
  54. "lib": tokenFollowType, "enum": tokenFollowType, "union": tokenFollowType
  55. };
  56. var matching = {"[": "]", "{": "}", "(": ")", "<": ">"};
  57. function tokenBase(stream, state) {
  58. if (stream.eatSpace()) {
  59. return null;
  60. }
  61. // Macros
  62. if (state.lastToken != "\\" && stream.match("{%", false)) {
  63. return chain(tokenMacro("%", "%"), stream, state);
  64. }
  65. if (state.lastToken != "\\" && stream.match("{{", false)) {
  66. return chain(tokenMacro("{", "}"), stream, state);
  67. }
  68. // Comments
  69. if (stream.peek() == "#") {
  70. stream.skipToEnd();
  71. return "comment";
  72. }
  73. // Variables and keywords
  74. var matched;
  75. if (stream.match(idents)) {
  76. stream.eat(/[?!]/);
  77. matched = stream.current();
  78. if (stream.eat(":")) {
  79. return "atom";
  80. } else if (state.lastToken == ".") {
  81. return "property";
  82. } else if (keywords.test(matched)) {
  83. if (state.lastToken != "abstract" && indentKeywords.test(matched)) {
  84. if (!(matched == "fun" && state.blocks.indexOf("lib") >= 0)) {
  85. state.blocks.push(matched);
  86. state.currentIndent += 1;
  87. }
  88. } else if (dedentKeywords.test(matched)) {
  89. state.blocks.pop();
  90. state.currentIndent -= 1;
  91. }
  92. if (nextTokenizer.hasOwnProperty(matched)) {
  93. state.tokenize.push(nextTokenizer[matched]);
  94. }
  95. return "keyword";
  96. } else if (atomWords.test(matched)) {
  97. return "atom";
  98. }
  99. return "variable";
  100. }
  101. // Class variables and instance variables
  102. // or attributes
  103. if (stream.eat("@")) {
  104. if (stream.peek() == "[") {
  105. return chain(tokenNest("[", "]", "meta"), stream, state);
  106. }
  107. stream.eat("@");
  108. stream.match(idents) || stream.match(types);
  109. return "variable-2";
  110. }
  111. // Global variables
  112. if (stream.eat("$")) {
  113. stream.eat(/[0-9]+|\?/) || stream.match(idents) || stream.match(types);
  114. return "variable-3";
  115. }
  116. // Constants and types
  117. if (stream.match(types)) {
  118. return "tag";
  119. }
  120. // Symbols or ':' operator
  121. if (stream.eat(":")) {
  122. if (stream.eat("\"")) {
  123. return chain(tokenQuote("\"", "atom", false), stream, state);
  124. } else if (stream.match(idents) || stream.match(types) ||
  125. stream.match(operators) || stream.match(conditionalOperators) || stream.match(indexingOperators)) {
  126. return "atom";
  127. }
  128. stream.eat(":");
  129. return "operator";
  130. }
  131. // Strings
  132. if (stream.eat("\"")) {
  133. return chain(tokenQuote("\"", "string", true), stream, state);
  134. }
  135. // Strings or regexps or macro variables or '%' operator
  136. if (stream.peek() == "%") {
  137. var style = "string";
  138. var embed = true;
  139. var delim;
  140. if (stream.match("%r")) {
  141. // Regexps
  142. style = "string-2";
  143. delim = stream.next();
  144. } else if (stream.match("%w")) {
  145. embed = false;
  146. delim = stream.next();
  147. } else {
  148. if(delim = stream.match(/^%([^\w\s=])/)) {
  149. delim = delim[1];
  150. } else if (stream.match(/^%[a-zA-Z0-9_\u009F-\uFFFF]*/)) {
  151. // Macro variables
  152. return "meta";
  153. } else {
  154. // '%' operator
  155. return "operator";
  156. }
  157. }
  158. if (matching.hasOwnProperty(delim)) {
  159. delim = matching[delim];
  160. }
  161. return chain(tokenQuote(delim, style, embed), stream, state);
  162. }
  163. // Characters
  164. if (stream.eat("'")) {
  165. stream.match(/^(?:[^']|\\(?:[befnrtv0'"]|[0-7]{3}|u(?:[0-9a-fA-F]{4}|\{[0-9a-fA-F]{1,6}\})))/);
  166. stream.eat("'");
  167. return "atom";
  168. }
  169. // Numbers
  170. if (stream.eat("0")) {
  171. if (stream.eat("x")) {
  172. stream.match(/^[0-9a-fA-F]+/);
  173. } else if (stream.eat("o")) {
  174. stream.match(/^[0-7]+/);
  175. } else if (stream.eat("b")) {
  176. stream.match(/^[01]+/);
  177. }
  178. return "number";
  179. }
  180. if (stream.eat(/\d/)) {
  181. stream.match(/^\d*(?:\.\d+)?(?:[eE][+-]?\d+)?/);
  182. return "number";
  183. }
  184. // Operators
  185. if (stream.match(operators)) {
  186. stream.eat("="); // Operators can follow assign symbol.
  187. return "operator";
  188. }
  189. if (stream.match(conditionalOperators) || stream.match(anotherOperators)) {
  190. return "operator";
  191. }
  192. // Parens and braces
  193. if (matched = stream.match(/[({[]/, false)) {
  194. matched = matched[0];
  195. return chain(tokenNest(matched, matching[matched], null), stream, state);
  196. }
  197. // Escapes
  198. if (stream.eat("\\")) {
  199. stream.next();
  200. return "meta";
  201. }
  202. stream.next();
  203. return null;
  204. }
  205. function tokenNest(begin, end, style, started) {
  206. return function (stream, state) {
  207. if (!started && stream.match(begin)) {
  208. state.tokenize[state.tokenize.length - 1] = tokenNest(begin, end, style, true);
  209. state.currentIndent += 1;
  210. return style;
  211. }
  212. var nextStyle = tokenBase(stream, state);
  213. if (stream.current() === end) {
  214. state.tokenize.pop();
  215. state.currentIndent -= 1;
  216. nextStyle = style;
  217. }
  218. return nextStyle;
  219. };
  220. }
  221. function tokenMacro(begin, end, started) {
  222. return function (stream, state) {
  223. if (!started && stream.match("{" + begin)) {
  224. state.currentIndent += 1;
  225. state.tokenize[state.tokenize.length - 1] = tokenMacro(begin, end, true);
  226. return "meta";
  227. }
  228. if (stream.match(end + "}")) {
  229. state.currentIndent -= 1;
  230. state.tokenize.pop();
  231. return "meta";
  232. }
  233. return tokenBase(stream, state);
  234. };
  235. }
  236. function tokenMacroDef(stream, state) {
  237. if (stream.eatSpace()) {
  238. return null;
  239. }
  240. var matched;
  241. if (matched = stream.match(idents)) {
  242. if (matched == "def") {
  243. return "keyword";
  244. }
  245. stream.eat(/[?!]/);
  246. }
  247. state.tokenize.pop();
  248. return "def";
  249. }
  250. function tokenFollowIdent(stream, state) {
  251. if (stream.eatSpace()) {
  252. return null;
  253. }
  254. if (stream.match(idents)) {
  255. stream.eat(/[!?]/);
  256. } else {
  257. stream.match(operators) || stream.match(conditionalOperators) || stream.match(indexingOperators);
  258. }
  259. state.tokenize.pop();
  260. return "def";
  261. }
  262. function tokenFollowType(stream, state) {
  263. if (stream.eatSpace()) {
  264. return null;
  265. }
  266. stream.match(types);
  267. state.tokenize.pop();
  268. return "def";
  269. }
  270. function tokenQuote(end, style, embed) {
  271. return function (stream, state) {
  272. var escaped = false;
  273. while (stream.peek()) {
  274. if (!escaped) {
  275. if (stream.match("{%", false)) {
  276. state.tokenize.push(tokenMacro("%", "%"));
  277. return style;
  278. }
  279. if (stream.match("{{", false)) {
  280. state.tokenize.push(tokenMacro("{", "}"));
  281. return style;
  282. }
  283. if (embed && stream.match("#{", false)) {
  284. state.tokenize.push(tokenNest("#{", "}", "meta"));
  285. return style;
  286. }
  287. var ch = stream.next();
  288. if (ch == end) {
  289. state.tokenize.pop();
  290. return style;
  291. }
  292. escaped = ch == "\\";
  293. } else {
  294. stream.next();
  295. escaped = false;
  296. }
  297. }
  298. return style;
  299. };
  300. }
  301. return {
  302. startState: function () {
  303. return {
  304. tokenize: [tokenBase],
  305. currentIndent: 0,
  306. lastToken: null,
  307. blocks: []
  308. };
  309. },
  310. token: function (stream, state) {
  311. var style = state.tokenize[state.tokenize.length - 1](stream, state);
  312. var token = stream.current();
  313. if (style && style != "comment") {
  314. state.lastToken = token;
  315. }
  316. return style;
  317. },
  318. indent: function (state, textAfter) {
  319. textAfter = textAfter.replace(/^\s*(?:\{%)?\s*|\s*(?:%\})?\s*$/g, "");
  320. if (dedentKeywords.test(textAfter) || dedentPunctuals.test(textAfter)) {
  321. return config.indentUnit * (state.currentIndent - 1);
  322. }
  323. return config.indentUnit * state.currentIndent;
  324. },
  325. fold: "indent",
  326. electricInput: wordRegExp(dedentPunctualsArray.concat(dedentKeywordsArray), true),
  327. lineComment: '#'
  328. };
  329. });
  330. CodeMirror.defineMIME("text/x-crystal", "crystal");
  331. });