版博士V2.0程序
Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.
 
 
 
 

1395 строки
52 KiB

  1. /*!
  2. * message-compiler v9.3.0-beta.17
  3. * (c) 2023 kazuya kawaguchi
  4. * Released under the MIT License.
  5. */
  6. /**
  7. * Original Utilities
  8. * written by kazuya kawaguchi
  9. */
  10. const RE_ARGS = /\{([0-9a-zA-Z]+)\}/g;
  11. /* eslint-disable */
  12. function format(message, ...args) {
  13. if (args.length === 1 && isObject(args[0])) {
  14. args = args[0];
  15. }
  16. if (!args || !args.hasOwnProperty) {
  17. args = {};
  18. }
  19. return message.replace(RE_ARGS, (match, identifier) => {
  20. return args.hasOwnProperty(identifier) ? args[identifier] : '';
  21. });
  22. }
  23. const assign = Object.assign;
  24. const isString = (val) => typeof val === 'string';
  25. // eslint-disable-next-line @typescript-eslint/no-explicit-any
  26. const isObject = (val) => val !== null && typeof val === 'object';
  27. const CompileErrorCodes = {
  28. // tokenizer error codes
  29. EXPECTED_TOKEN: 1,
  30. INVALID_TOKEN_IN_PLACEHOLDER: 2,
  31. UNTERMINATED_SINGLE_QUOTE_IN_PLACEHOLDER: 3,
  32. UNKNOWN_ESCAPE_SEQUENCE: 4,
  33. INVALID_UNICODE_ESCAPE_SEQUENCE: 5,
  34. UNBALANCED_CLOSING_BRACE: 6,
  35. UNTERMINATED_CLOSING_BRACE: 7,
  36. EMPTY_PLACEHOLDER: 8,
  37. NOT_ALLOW_NEST_PLACEHOLDER: 9,
  38. INVALID_LINKED_FORMAT: 10,
  39. // parser error codes
  40. MUST_HAVE_MESSAGES_IN_PLURAL: 11,
  41. UNEXPECTED_EMPTY_LINKED_MODIFIER: 12,
  42. UNEXPECTED_EMPTY_LINKED_KEY: 13,
  43. UNEXPECTED_LEXICAL_ANALYSIS: 14,
  44. // Special value for higher-order compilers to pick up the last code
  45. // to avoid collision of error codes. This should always be kept as the last
  46. // item.
  47. __EXTEND_POINT__: 15
  48. };
  49. /** @internal */
  50. const errorMessages = {
  51. // tokenizer error messages
  52. [CompileErrorCodes.EXPECTED_TOKEN]: `Expected token: '{0}'`,
  53. [CompileErrorCodes.INVALID_TOKEN_IN_PLACEHOLDER]: `Invalid token in placeholder: '{0}'`,
  54. [CompileErrorCodes.UNTERMINATED_SINGLE_QUOTE_IN_PLACEHOLDER]: `Unterminated single quote in placeholder`,
  55. [CompileErrorCodes.UNKNOWN_ESCAPE_SEQUENCE]: `Unknown escape sequence: \\{0}`,
  56. [CompileErrorCodes.INVALID_UNICODE_ESCAPE_SEQUENCE]: `Invalid unicode escape sequence: {0}`,
  57. [CompileErrorCodes.UNBALANCED_CLOSING_BRACE]: `Unbalanced closing brace`,
  58. [CompileErrorCodes.UNTERMINATED_CLOSING_BRACE]: `Unterminated closing brace`,
  59. [CompileErrorCodes.EMPTY_PLACEHOLDER]: `Empty placeholder`,
  60. [CompileErrorCodes.NOT_ALLOW_NEST_PLACEHOLDER]: `Not allowed nest placeholder`,
  61. [CompileErrorCodes.INVALID_LINKED_FORMAT]: `Invalid linked format`,
  62. // parser error messages
  63. [CompileErrorCodes.MUST_HAVE_MESSAGES_IN_PLURAL]: `Plural must have messages`,
  64. [CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_MODIFIER]: `Unexpected empty linked modifier`,
  65. [CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_KEY]: `Unexpected empty linked key`,
  66. [CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS]: `Unexpected lexical analysis in token: '{0}'`
  67. };
  68. function createCompileError(code, loc, options = {}) {
  69. const { domain, messages, args } = options;
  70. const msg = format((messages || errorMessages)[code] || '', ...(args || []))
  71. ;
  72. const error = new SyntaxError(String(msg));
  73. error.code = code;
  74. if (loc) {
  75. error.location = loc;
  76. }
  77. error.domain = domain;
  78. return error;
  79. }
  80. /** @internal */
  81. function defaultOnError(error) {
  82. throw error;
  83. }
  84. const RE_HTML_TAG = /<\/?[\w\s="/.':;#-\/]+>/;
  85. const detectHtmlTag = (source) => RE_HTML_TAG.test(source);
  86. const LocationStub = {
  87. start: { line: 1, column: 1, offset: 0 },
  88. end: { line: 1, column: 1, offset: 0 }
  89. };
  90. function createPosition(line, column, offset) {
  91. return { line, column, offset };
  92. }
  93. function createLocation(start, end, source) {
  94. const loc = { start, end };
  95. if (source != null) {
  96. loc.source = source;
  97. }
  98. return loc;
  99. }
  100. const CHAR_SP = ' ';
  101. const CHAR_CR = '\r';
  102. const CHAR_LF = '\n';
  103. const CHAR_LS = String.fromCharCode(0x2028);
  104. const CHAR_PS = String.fromCharCode(0x2029);
  105. function createScanner(str) {
  106. const _buf = str;
  107. let _index = 0;
  108. let _line = 1;
  109. let _column = 1;
  110. let _peekOffset = 0;
  111. const isCRLF = (index) => _buf[index] === CHAR_CR && _buf[index + 1] === CHAR_LF;
  112. const isLF = (index) => _buf[index] === CHAR_LF;
  113. const isPS = (index) => _buf[index] === CHAR_PS;
  114. const isLS = (index) => _buf[index] === CHAR_LS;
  115. const isLineEnd = (index) => isCRLF(index) || isLF(index) || isPS(index) || isLS(index);
  116. const index = () => _index;
  117. const line = () => _line;
  118. const column = () => _column;
  119. const peekOffset = () => _peekOffset;
  120. const charAt = (offset) => isCRLF(offset) || isPS(offset) || isLS(offset) ? CHAR_LF : _buf[offset];
  121. const currentChar = () => charAt(_index);
  122. const currentPeek = () => charAt(_index + _peekOffset);
  123. function next() {
  124. _peekOffset = 0;
  125. if (isLineEnd(_index)) {
  126. _line++;
  127. _column = 0;
  128. }
  129. if (isCRLF(_index)) {
  130. _index++;
  131. }
  132. _index++;
  133. _column++;
  134. return _buf[_index];
  135. }
  136. function peek() {
  137. if (isCRLF(_index + _peekOffset)) {
  138. _peekOffset++;
  139. }
  140. _peekOffset++;
  141. return _buf[_index + _peekOffset];
  142. }
  143. function reset() {
  144. _index = 0;
  145. _line = 1;
  146. _column = 1;
  147. _peekOffset = 0;
  148. }
  149. function resetPeek(offset = 0) {
  150. _peekOffset = offset;
  151. }
  152. function skipToPeek() {
  153. const target = _index + _peekOffset;
  154. // eslint-disable-next-line no-unmodified-loop-condition
  155. while (target !== _index) {
  156. next();
  157. }
  158. _peekOffset = 0;
  159. }
  160. return {
  161. index,
  162. line,
  163. column,
  164. peekOffset,
  165. charAt,
  166. currentChar,
  167. currentPeek,
  168. next,
  169. peek,
  170. reset,
  171. resetPeek,
  172. skipToPeek
  173. };
  174. }
  175. const EOF = undefined;
  176. const LITERAL_DELIMITER = "'";
  177. const ERROR_DOMAIN$1 = 'tokenizer';
  178. function createTokenizer(source, options = {}) {
  179. const location = options.location !== false;
  180. const _scnr = createScanner(source);
  181. const currentOffset = () => _scnr.index();
  182. const currentPosition = () => createPosition(_scnr.line(), _scnr.column(), _scnr.index());
  183. const _initLoc = currentPosition();
  184. const _initOffset = currentOffset();
  185. const _context = {
  186. currentType: 14 /* TokenTypes.EOF */,
  187. offset: _initOffset,
  188. startLoc: _initLoc,
  189. endLoc: _initLoc,
  190. lastType: 14 /* TokenTypes.EOF */,
  191. lastOffset: _initOffset,
  192. lastStartLoc: _initLoc,
  193. lastEndLoc: _initLoc,
  194. braceNest: 0,
  195. inLinked: false,
  196. text: ''
  197. };
  198. const context = () => _context;
  199. const { onError } = options;
  200. function emitError(code, pos, offset, ...args) {
  201. const ctx = context();
  202. pos.column += offset;
  203. pos.offset += offset;
  204. if (onError) {
  205. const loc = createLocation(ctx.startLoc, pos);
  206. const err = createCompileError(code, loc, {
  207. domain: ERROR_DOMAIN$1,
  208. args
  209. });
  210. onError(err);
  211. }
  212. }
  213. function getToken(context, type, value) {
  214. context.endLoc = currentPosition();
  215. context.currentType = type;
  216. const token = { type };
  217. if (location) {
  218. token.loc = createLocation(context.startLoc, context.endLoc);
  219. }
  220. if (value != null) {
  221. token.value = value;
  222. }
  223. return token;
  224. }
  225. const getEndToken = (context) => getToken(context, 14 /* TokenTypes.EOF */);
  226. function eat(scnr, ch) {
  227. if (scnr.currentChar() === ch) {
  228. scnr.next();
  229. return ch;
  230. }
  231. else {
  232. emitError(CompileErrorCodes.EXPECTED_TOKEN, currentPosition(), 0, ch);
  233. return '';
  234. }
  235. }
  236. function peekSpaces(scnr) {
  237. let buf = '';
  238. while (scnr.currentPeek() === CHAR_SP || scnr.currentPeek() === CHAR_LF) {
  239. buf += scnr.currentPeek();
  240. scnr.peek();
  241. }
  242. return buf;
  243. }
  244. function skipSpaces(scnr) {
  245. const buf = peekSpaces(scnr);
  246. scnr.skipToPeek();
  247. return buf;
  248. }
  249. function isIdentifierStart(ch) {
  250. if (ch === EOF) {
  251. return false;
  252. }
  253. const cc = ch.charCodeAt(0);
  254. return ((cc >= 97 && cc <= 122) || // a-z
  255. (cc >= 65 && cc <= 90) || // A-Z
  256. cc === 95 // _
  257. );
  258. }
  259. function isNumberStart(ch) {
  260. if (ch === EOF) {
  261. return false;
  262. }
  263. const cc = ch.charCodeAt(0);
  264. return cc >= 48 && cc <= 57; // 0-9
  265. }
  266. function isNamedIdentifierStart(scnr, context) {
  267. const { currentType } = context;
  268. if (currentType !== 2 /* TokenTypes.BraceLeft */) {
  269. return false;
  270. }
  271. peekSpaces(scnr);
  272. const ret = isIdentifierStart(scnr.currentPeek());
  273. scnr.resetPeek();
  274. return ret;
  275. }
  276. function isListIdentifierStart(scnr, context) {
  277. const { currentType } = context;
  278. if (currentType !== 2 /* TokenTypes.BraceLeft */) {
  279. return false;
  280. }
  281. peekSpaces(scnr);
  282. const ch = scnr.currentPeek() === '-' ? scnr.peek() : scnr.currentPeek();
  283. const ret = isNumberStart(ch);
  284. scnr.resetPeek();
  285. return ret;
  286. }
  287. function isLiteralStart(scnr, context) {
  288. const { currentType } = context;
  289. if (currentType !== 2 /* TokenTypes.BraceLeft */) {
  290. return false;
  291. }
  292. peekSpaces(scnr);
  293. const ret = scnr.currentPeek() === LITERAL_DELIMITER;
  294. scnr.resetPeek();
  295. return ret;
  296. }
  297. function isLinkedDotStart(scnr, context) {
  298. const { currentType } = context;
  299. if (currentType !== 8 /* TokenTypes.LinkedAlias */) {
  300. return false;
  301. }
  302. peekSpaces(scnr);
  303. const ret = scnr.currentPeek() === "." /* TokenChars.LinkedDot */;
  304. scnr.resetPeek();
  305. return ret;
  306. }
  307. function isLinkedModifierStart(scnr, context) {
  308. const { currentType } = context;
  309. if (currentType !== 9 /* TokenTypes.LinkedDot */) {
  310. return false;
  311. }
  312. peekSpaces(scnr);
  313. const ret = isIdentifierStart(scnr.currentPeek());
  314. scnr.resetPeek();
  315. return ret;
  316. }
  317. function isLinkedDelimiterStart(scnr, context) {
  318. const { currentType } = context;
  319. if (!(currentType === 8 /* TokenTypes.LinkedAlias */ ||
  320. currentType === 12 /* TokenTypes.LinkedModifier */)) {
  321. return false;
  322. }
  323. peekSpaces(scnr);
  324. const ret = scnr.currentPeek() === ":" /* TokenChars.LinkedDelimiter */;
  325. scnr.resetPeek();
  326. return ret;
  327. }
  328. function isLinkedReferStart(scnr, context) {
  329. const { currentType } = context;
  330. if (currentType !== 10 /* TokenTypes.LinkedDelimiter */) {
  331. return false;
  332. }
  333. const fn = () => {
  334. const ch = scnr.currentPeek();
  335. if (ch === "{" /* TokenChars.BraceLeft */) {
  336. return isIdentifierStart(scnr.peek());
  337. }
  338. else if (ch === "@" /* TokenChars.LinkedAlias */ ||
  339. ch === "%" /* TokenChars.Modulo */ ||
  340. ch === "|" /* TokenChars.Pipe */ ||
  341. ch === ":" /* TokenChars.LinkedDelimiter */ ||
  342. ch === "." /* TokenChars.LinkedDot */ ||
  343. ch === CHAR_SP ||
  344. !ch) {
  345. return false;
  346. }
  347. else if (ch === CHAR_LF) {
  348. scnr.peek();
  349. return fn();
  350. }
  351. else {
  352. // other characters
  353. return isIdentifierStart(ch);
  354. }
  355. };
  356. const ret = fn();
  357. scnr.resetPeek();
  358. return ret;
  359. }
  360. function isPluralStart(scnr) {
  361. peekSpaces(scnr);
  362. const ret = scnr.currentPeek() === "|" /* TokenChars.Pipe */;
  363. scnr.resetPeek();
  364. return ret;
  365. }
  366. function detectModuloStart(scnr) {
  367. const spaces = peekSpaces(scnr);
  368. const ret = scnr.currentPeek() === "%" /* TokenChars.Modulo */ &&
  369. scnr.peek() === "{" /* TokenChars.BraceLeft */;
  370. scnr.resetPeek();
  371. return {
  372. isModulo: ret,
  373. hasSpace: spaces.length > 0
  374. };
  375. }
  376. function isTextStart(scnr, reset = true) {
  377. const fn = (hasSpace = false, prev = '', detectModulo = false) => {
  378. const ch = scnr.currentPeek();
  379. if (ch === "{" /* TokenChars.BraceLeft */) {
  380. return prev === "%" /* TokenChars.Modulo */ ? false : hasSpace;
  381. }
  382. else if (ch === "@" /* TokenChars.LinkedAlias */ || !ch) {
  383. return prev === "%" /* TokenChars.Modulo */ ? true : hasSpace;
  384. }
  385. else if (ch === "%" /* TokenChars.Modulo */) {
  386. scnr.peek();
  387. return fn(hasSpace, "%" /* TokenChars.Modulo */, true);
  388. }
  389. else if (ch === "|" /* TokenChars.Pipe */) {
  390. return prev === "%" /* TokenChars.Modulo */ || detectModulo
  391. ? true
  392. : !(prev === CHAR_SP || prev === CHAR_LF);
  393. }
  394. else if (ch === CHAR_SP) {
  395. scnr.peek();
  396. return fn(true, CHAR_SP, detectModulo);
  397. }
  398. else if (ch === CHAR_LF) {
  399. scnr.peek();
  400. return fn(true, CHAR_LF, detectModulo);
  401. }
  402. else {
  403. return true;
  404. }
  405. };
  406. const ret = fn();
  407. reset && scnr.resetPeek();
  408. return ret;
  409. }
  410. function takeChar(scnr, fn) {
  411. const ch = scnr.currentChar();
  412. if (ch === EOF) {
  413. return EOF;
  414. }
  415. if (fn(ch)) {
  416. scnr.next();
  417. return ch;
  418. }
  419. return null;
  420. }
  421. function takeIdentifierChar(scnr) {
  422. const closure = (ch) => {
  423. const cc = ch.charCodeAt(0);
  424. return ((cc >= 97 && cc <= 122) || // a-z
  425. (cc >= 65 && cc <= 90) || // A-Z
  426. (cc >= 48 && cc <= 57) || // 0-9
  427. cc === 95 || // _
  428. cc === 36 // $
  429. );
  430. };
  431. return takeChar(scnr, closure);
  432. }
  433. function takeDigit(scnr) {
  434. const closure = (ch) => {
  435. const cc = ch.charCodeAt(0);
  436. return cc >= 48 && cc <= 57; // 0-9
  437. };
  438. return takeChar(scnr, closure);
  439. }
  440. function takeHexDigit(scnr) {
  441. const closure = (ch) => {
  442. const cc = ch.charCodeAt(0);
  443. return ((cc >= 48 && cc <= 57) || // 0-9
  444. (cc >= 65 && cc <= 70) || // A-F
  445. (cc >= 97 && cc <= 102)); // a-f
  446. };
  447. return takeChar(scnr, closure);
  448. }
  449. function getDigits(scnr) {
  450. let ch = '';
  451. let num = '';
  452. while ((ch = takeDigit(scnr))) {
  453. num += ch;
  454. }
  455. return num;
  456. }
  457. function readModulo(scnr) {
  458. skipSpaces(scnr);
  459. const ch = scnr.currentChar();
  460. if (ch !== "%" /* TokenChars.Modulo */) {
  461. emitError(CompileErrorCodes.EXPECTED_TOKEN, currentPosition(), 0, ch);
  462. }
  463. scnr.next();
  464. return "%" /* TokenChars.Modulo */;
  465. }
  466. function readText(scnr) {
  467. let buf = '';
  468. while (true) {
  469. const ch = scnr.currentChar();
  470. if (ch === "{" /* TokenChars.BraceLeft */ ||
  471. ch === "}" /* TokenChars.BraceRight */ ||
  472. ch === "@" /* TokenChars.LinkedAlias */ ||
  473. ch === "|" /* TokenChars.Pipe */ ||
  474. !ch) {
  475. break;
  476. }
  477. else if (ch === "%" /* TokenChars.Modulo */) {
  478. if (isTextStart(scnr)) {
  479. buf += ch;
  480. scnr.next();
  481. }
  482. else {
  483. break;
  484. }
  485. }
  486. else if (ch === CHAR_SP || ch === CHAR_LF) {
  487. if (isTextStart(scnr)) {
  488. buf += ch;
  489. scnr.next();
  490. }
  491. else if (isPluralStart(scnr)) {
  492. break;
  493. }
  494. else {
  495. buf += ch;
  496. scnr.next();
  497. }
  498. }
  499. else {
  500. buf += ch;
  501. scnr.next();
  502. }
  503. }
  504. return buf;
  505. }
  506. function readNamedIdentifier(scnr) {
  507. skipSpaces(scnr);
  508. let ch = '';
  509. let name = '';
  510. while ((ch = takeIdentifierChar(scnr))) {
  511. name += ch;
  512. }
  513. if (scnr.currentChar() === EOF) {
  514. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  515. }
  516. return name;
  517. }
  518. function readListIdentifier(scnr) {
  519. skipSpaces(scnr);
  520. let value = '';
  521. if (scnr.currentChar() === '-') {
  522. scnr.next();
  523. value += `-${getDigits(scnr)}`;
  524. }
  525. else {
  526. value += getDigits(scnr);
  527. }
  528. if (scnr.currentChar() === EOF) {
  529. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  530. }
  531. return value;
  532. }
  533. function readLiteral(scnr) {
  534. skipSpaces(scnr);
  535. eat(scnr, `\'`);
  536. let ch = '';
  537. let literal = '';
  538. const fn = (x) => x !== LITERAL_DELIMITER && x !== CHAR_LF;
  539. while ((ch = takeChar(scnr, fn))) {
  540. if (ch === '\\') {
  541. literal += readEscapeSequence(scnr);
  542. }
  543. else {
  544. literal += ch;
  545. }
  546. }
  547. const current = scnr.currentChar();
  548. if (current === CHAR_LF || current === EOF) {
  549. emitError(CompileErrorCodes.UNTERMINATED_SINGLE_QUOTE_IN_PLACEHOLDER, currentPosition(), 0);
  550. // TODO: Is it correct really?
  551. if (current === CHAR_LF) {
  552. scnr.next();
  553. eat(scnr, `\'`);
  554. }
  555. return literal;
  556. }
  557. eat(scnr, `\'`);
  558. return literal;
  559. }
  560. function readEscapeSequence(scnr) {
  561. const ch = scnr.currentChar();
  562. switch (ch) {
  563. case '\\':
  564. case `\'`:
  565. scnr.next();
  566. return `\\${ch}`;
  567. case 'u':
  568. return readUnicodeEscapeSequence(scnr, ch, 4);
  569. case 'U':
  570. return readUnicodeEscapeSequence(scnr, ch, 6);
  571. default:
  572. emitError(CompileErrorCodes.UNKNOWN_ESCAPE_SEQUENCE, currentPosition(), 0, ch);
  573. return '';
  574. }
  575. }
  576. function readUnicodeEscapeSequence(scnr, unicode, digits) {
  577. eat(scnr, unicode);
  578. let sequence = '';
  579. for (let i = 0; i < digits; i++) {
  580. const ch = takeHexDigit(scnr);
  581. if (!ch) {
  582. emitError(CompileErrorCodes.INVALID_UNICODE_ESCAPE_SEQUENCE, currentPosition(), 0, `\\${unicode}${sequence}${scnr.currentChar()}`);
  583. break;
  584. }
  585. sequence += ch;
  586. }
  587. return `\\${unicode}${sequence}`;
  588. }
  589. function readInvalidIdentifier(scnr) {
  590. skipSpaces(scnr);
  591. let ch = '';
  592. let identifiers = '';
  593. const closure = (ch) => ch !== "{" /* TokenChars.BraceLeft */ &&
  594. ch !== "}" /* TokenChars.BraceRight */ &&
  595. ch !== CHAR_SP &&
  596. ch !== CHAR_LF;
  597. while ((ch = takeChar(scnr, closure))) {
  598. identifiers += ch;
  599. }
  600. return identifiers;
  601. }
  602. function readLinkedModifier(scnr) {
  603. let ch = '';
  604. let name = '';
  605. while ((ch = takeIdentifierChar(scnr))) {
  606. name += ch;
  607. }
  608. return name;
  609. }
  610. function readLinkedRefer(scnr) {
  611. const fn = (detect = false, buf) => {
  612. const ch = scnr.currentChar();
  613. if (ch === "{" /* TokenChars.BraceLeft */ ||
  614. ch === "%" /* TokenChars.Modulo */ ||
  615. ch === "@" /* TokenChars.LinkedAlias */ ||
  616. ch === "|" /* TokenChars.Pipe */ ||
  617. !ch) {
  618. return buf;
  619. }
  620. else if (ch === CHAR_SP) {
  621. return buf;
  622. }
  623. else if (ch === CHAR_LF) {
  624. buf += ch;
  625. scnr.next();
  626. return fn(detect, buf);
  627. }
  628. else {
  629. buf += ch;
  630. scnr.next();
  631. return fn(true, buf);
  632. }
  633. };
  634. return fn(false, '');
  635. }
  636. function readPlural(scnr) {
  637. skipSpaces(scnr);
  638. const plural = eat(scnr, "|" /* TokenChars.Pipe */);
  639. skipSpaces(scnr);
  640. return plural;
  641. }
  642. // TODO: We need refactoring of token parsing ...
  643. function readTokenInPlaceholder(scnr, context) {
  644. let token = null;
  645. const ch = scnr.currentChar();
  646. switch (ch) {
  647. case "{" /* TokenChars.BraceLeft */:
  648. if (context.braceNest >= 1) {
  649. emitError(CompileErrorCodes.NOT_ALLOW_NEST_PLACEHOLDER, currentPosition(), 0);
  650. }
  651. scnr.next();
  652. token = getToken(context, 2 /* TokenTypes.BraceLeft */, "{" /* TokenChars.BraceLeft */);
  653. skipSpaces(scnr);
  654. context.braceNest++;
  655. return token;
  656. case "}" /* TokenChars.BraceRight */:
  657. if (context.braceNest > 0 &&
  658. context.currentType === 2 /* TokenTypes.BraceLeft */) {
  659. emitError(CompileErrorCodes.EMPTY_PLACEHOLDER, currentPosition(), 0);
  660. }
  661. scnr.next();
  662. token = getToken(context, 3 /* TokenTypes.BraceRight */, "}" /* TokenChars.BraceRight */);
  663. context.braceNest--;
  664. context.braceNest > 0 && skipSpaces(scnr);
  665. if (context.inLinked && context.braceNest === 0) {
  666. context.inLinked = false;
  667. }
  668. return token;
  669. case "@" /* TokenChars.LinkedAlias */:
  670. if (context.braceNest > 0) {
  671. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  672. }
  673. token = readTokenInLinked(scnr, context) || getEndToken(context);
  674. context.braceNest = 0;
  675. return token;
  676. default:
  677. let validNamedIdentifier = true;
  678. let validListIdentifier = true;
  679. let validLiteral = true;
  680. if (isPluralStart(scnr)) {
  681. if (context.braceNest > 0) {
  682. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  683. }
  684. token = getToken(context, 1 /* TokenTypes.Pipe */, readPlural(scnr));
  685. // reset
  686. context.braceNest = 0;
  687. context.inLinked = false;
  688. return token;
  689. }
  690. if (context.braceNest > 0 &&
  691. (context.currentType === 5 /* TokenTypes.Named */ ||
  692. context.currentType === 6 /* TokenTypes.List */ ||
  693. context.currentType === 7 /* TokenTypes.Literal */)) {
  694. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  695. context.braceNest = 0;
  696. return readToken(scnr, context);
  697. }
  698. if ((validNamedIdentifier = isNamedIdentifierStart(scnr, context))) {
  699. token = getToken(context, 5 /* TokenTypes.Named */, readNamedIdentifier(scnr));
  700. skipSpaces(scnr);
  701. return token;
  702. }
  703. if ((validListIdentifier = isListIdentifierStart(scnr, context))) {
  704. token = getToken(context, 6 /* TokenTypes.List */, readListIdentifier(scnr));
  705. skipSpaces(scnr);
  706. return token;
  707. }
  708. if ((validLiteral = isLiteralStart(scnr, context))) {
  709. token = getToken(context, 7 /* TokenTypes.Literal */, readLiteral(scnr));
  710. skipSpaces(scnr);
  711. return token;
  712. }
  713. if (!validNamedIdentifier && !validListIdentifier && !validLiteral) {
  714. // TODO: we should be re-designed invalid cases, when we will extend message syntax near the future ...
  715. token = getToken(context, 13 /* TokenTypes.InvalidPlace */, readInvalidIdentifier(scnr));
  716. emitError(CompileErrorCodes.INVALID_TOKEN_IN_PLACEHOLDER, currentPosition(), 0, token.value);
  717. skipSpaces(scnr);
  718. return token;
  719. }
  720. break;
  721. }
  722. return token;
  723. }
  724. // TODO: We need refactoring of token parsing ...
  725. function readTokenInLinked(scnr, context) {
  726. const { currentType } = context;
  727. let token = null;
  728. const ch = scnr.currentChar();
  729. if ((currentType === 8 /* TokenTypes.LinkedAlias */ ||
  730. currentType === 9 /* TokenTypes.LinkedDot */ ||
  731. currentType === 12 /* TokenTypes.LinkedModifier */ ||
  732. currentType === 10 /* TokenTypes.LinkedDelimiter */) &&
  733. (ch === CHAR_LF || ch === CHAR_SP)) {
  734. emitError(CompileErrorCodes.INVALID_LINKED_FORMAT, currentPosition(), 0);
  735. }
  736. switch (ch) {
  737. case "@" /* TokenChars.LinkedAlias */:
  738. scnr.next();
  739. token = getToken(context, 8 /* TokenTypes.LinkedAlias */, "@" /* TokenChars.LinkedAlias */);
  740. context.inLinked = true;
  741. return token;
  742. case "." /* TokenChars.LinkedDot */:
  743. skipSpaces(scnr);
  744. scnr.next();
  745. return getToken(context, 9 /* TokenTypes.LinkedDot */, "." /* TokenChars.LinkedDot */);
  746. case ":" /* TokenChars.LinkedDelimiter */:
  747. skipSpaces(scnr);
  748. scnr.next();
  749. return getToken(context, 10 /* TokenTypes.LinkedDelimiter */, ":" /* TokenChars.LinkedDelimiter */);
  750. default:
  751. if (isPluralStart(scnr)) {
  752. token = getToken(context, 1 /* TokenTypes.Pipe */, readPlural(scnr));
  753. // reset
  754. context.braceNest = 0;
  755. context.inLinked = false;
  756. return token;
  757. }
  758. if (isLinkedDotStart(scnr, context) ||
  759. isLinkedDelimiterStart(scnr, context)) {
  760. skipSpaces(scnr);
  761. return readTokenInLinked(scnr, context);
  762. }
  763. if (isLinkedModifierStart(scnr, context)) {
  764. skipSpaces(scnr);
  765. return getToken(context, 12 /* TokenTypes.LinkedModifier */, readLinkedModifier(scnr));
  766. }
  767. if (isLinkedReferStart(scnr, context)) {
  768. skipSpaces(scnr);
  769. if (ch === "{" /* TokenChars.BraceLeft */) {
  770. // scan the placeholder
  771. return readTokenInPlaceholder(scnr, context) || token;
  772. }
  773. else {
  774. return getToken(context, 11 /* TokenTypes.LinkedKey */, readLinkedRefer(scnr));
  775. }
  776. }
  777. if (currentType === 8 /* TokenTypes.LinkedAlias */) {
  778. emitError(CompileErrorCodes.INVALID_LINKED_FORMAT, currentPosition(), 0);
  779. }
  780. context.braceNest = 0;
  781. context.inLinked = false;
  782. return readToken(scnr, context);
  783. }
  784. }
  785. // TODO: We need refactoring of token parsing ...
  786. function readToken(scnr, context) {
  787. let token = { type: 14 /* TokenTypes.EOF */ };
  788. if (context.braceNest > 0) {
  789. return readTokenInPlaceholder(scnr, context) || getEndToken(context);
  790. }
  791. if (context.inLinked) {
  792. return readTokenInLinked(scnr, context) || getEndToken(context);
  793. }
  794. const ch = scnr.currentChar();
  795. switch (ch) {
  796. case "{" /* TokenChars.BraceLeft */:
  797. return readTokenInPlaceholder(scnr, context) || getEndToken(context);
  798. case "}" /* TokenChars.BraceRight */:
  799. emitError(CompileErrorCodes.UNBALANCED_CLOSING_BRACE, currentPosition(), 0);
  800. scnr.next();
  801. return getToken(context, 3 /* TokenTypes.BraceRight */, "}" /* TokenChars.BraceRight */);
  802. case "@" /* TokenChars.LinkedAlias */:
  803. return readTokenInLinked(scnr, context) || getEndToken(context);
  804. default:
  805. if (isPluralStart(scnr)) {
  806. token = getToken(context, 1 /* TokenTypes.Pipe */, readPlural(scnr));
  807. // reset
  808. context.braceNest = 0;
  809. context.inLinked = false;
  810. return token;
  811. }
  812. const { isModulo, hasSpace } = detectModuloStart(scnr);
  813. if (isModulo) {
  814. return hasSpace
  815. ? getToken(context, 0 /* TokenTypes.Text */, readText(scnr))
  816. : getToken(context, 4 /* TokenTypes.Modulo */, readModulo(scnr));
  817. }
  818. if (isTextStart(scnr)) {
  819. return getToken(context, 0 /* TokenTypes.Text */, readText(scnr));
  820. }
  821. break;
  822. }
  823. return token;
  824. }
  825. function nextToken() {
  826. const { currentType, offset, startLoc, endLoc } = _context;
  827. _context.lastType = currentType;
  828. _context.lastOffset = offset;
  829. _context.lastStartLoc = startLoc;
  830. _context.lastEndLoc = endLoc;
  831. _context.offset = currentOffset();
  832. _context.startLoc = currentPosition();
  833. if (_scnr.currentChar() === EOF) {
  834. return getToken(_context, 14 /* TokenTypes.EOF */);
  835. }
  836. return readToken(_scnr, _context);
  837. }
  838. return {
  839. nextToken,
  840. currentOffset,
  841. currentPosition,
  842. context
  843. };
  844. }
  845. const ERROR_DOMAIN = 'parser';
  846. // Backslash backslash, backslash quote, uHHHH, UHHHHHH.
  847. const KNOWN_ESCAPES = /(?:\\\\|\\'|\\u([0-9a-fA-F]{4})|\\U([0-9a-fA-F]{6}))/g;
  848. function fromEscapeSequence(match, codePoint4, codePoint6) {
  849. switch (match) {
  850. case `\\\\`:
  851. return `\\`;
  852. case `\\\'`:
  853. return `\'`;
  854. default: {
  855. const codePoint = parseInt(codePoint4 || codePoint6, 16);
  856. if (codePoint <= 0xd7ff || codePoint >= 0xe000) {
  857. return String.fromCodePoint(codePoint);
  858. }
  859. // invalid ...
  860. // Replace them with U+FFFD REPLACEMENT CHARACTER.
  861. return '�';
  862. }
  863. }
  864. }
  865. function createParser(options = {}) {
  866. const location = options.location !== false;
  867. const { onError } = options;
  868. function emitError(tokenzer, code, start, offset, ...args) {
  869. const end = tokenzer.currentPosition();
  870. end.offset += offset;
  871. end.column += offset;
  872. if (onError) {
  873. const loc = createLocation(start, end);
  874. const err = createCompileError(code, loc, {
  875. domain: ERROR_DOMAIN,
  876. args
  877. });
  878. onError(err);
  879. }
  880. }
  881. function startNode(type, offset, loc) {
  882. const node = {
  883. type,
  884. start: offset,
  885. end: offset
  886. };
  887. if (location) {
  888. node.loc = { start: loc, end: loc };
  889. }
  890. return node;
  891. }
  892. function endNode(node, offset, pos, type) {
  893. node.end = offset;
  894. if (type) {
  895. node.type = type;
  896. }
  897. if (location && node.loc) {
  898. node.loc.end = pos;
  899. }
  900. }
  901. function parseText(tokenizer, value) {
  902. const context = tokenizer.context();
  903. const node = startNode(3 /* NodeTypes.Text */, context.offset, context.startLoc);
  904. node.value = value;
  905. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  906. return node;
  907. }
  908. function parseList(tokenizer, index) {
  909. const context = tokenizer.context();
  910. const { lastOffset: offset, lastStartLoc: loc } = context; // get brace left loc
  911. const node = startNode(5 /* NodeTypes.List */, offset, loc);
  912. node.index = parseInt(index, 10);
  913. tokenizer.nextToken(); // skip brach right
  914. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  915. return node;
  916. }
  917. function parseNamed(tokenizer, key) {
  918. const context = tokenizer.context();
  919. const { lastOffset: offset, lastStartLoc: loc } = context; // get brace left loc
  920. const node = startNode(4 /* NodeTypes.Named */, offset, loc);
  921. node.key = key;
  922. tokenizer.nextToken(); // skip brach right
  923. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  924. return node;
  925. }
  926. function parseLiteral(tokenizer, value) {
  927. const context = tokenizer.context();
  928. const { lastOffset: offset, lastStartLoc: loc } = context; // get brace left loc
  929. const node = startNode(9 /* NodeTypes.Literal */, offset, loc);
  930. node.value = value.replace(KNOWN_ESCAPES, fromEscapeSequence);
  931. tokenizer.nextToken(); // skip brach right
  932. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  933. return node;
  934. }
  935. function parseLinkedModifier(tokenizer) {
  936. const token = tokenizer.nextToken();
  937. const context = tokenizer.context();
  938. const { lastOffset: offset, lastStartLoc: loc } = context; // get linked dot loc
  939. const node = startNode(8 /* NodeTypes.LinkedModifier */, offset, loc);
  940. if (token.type !== 12 /* TokenTypes.LinkedModifier */) {
  941. // empty modifier
  942. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_MODIFIER, context.lastStartLoc, 0);
  943. node.value = '';
  944. endNode(node, offset, loc);
  945. return {
  946. nextConsumeToken: token,
  947. node
  948. };
  949. }
  950. // check token
  951. if (token.value == null) {
  952. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  953. }
  954. node.value = token.value || '';
  955. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  956. return {
  957. node
  958. };
  959. }
  960. function parseLinkedKey(tokenizer, value) {
  961. const context = tokenizer.context();
  962. const node = startNode(7 /* NodeTypes.LinkedKey */, context.offset, context.startLoc);
  963. node.value = value;
  964. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  965. return node;
  966. }
  967. function parseLinked(tokenizer) {
  968. const context = tokenizer.context();
  969. const linkedNode = startNode(6 /* NodeTypes.Linked */, context.offset, context.startLoc);
  970. let token = tokenizer.nextToken();
  971. if (token.type === 9 /* TokenTypes.LinkedDot */) {
  972. const parsed = parseLinkedModifier(tokenizer);
  973. linkedNode.modifier = parsed.node;
  974. token = parsed.nextConsumeToken || tokenizer.nextToken();
  975. }
  976. // asset check token
  977. if (token.type !== 10 /* TokenTypes.LinkedDelimiter */) {
  978. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  979. }
  980. token = tokenizer.nextToken();
  981. // skip brace left
  982. if (token.type === 2 /* TokenTypes.BraceLeft */) {
  983. token = tokenizer.nextToken();
  984. }
  985. switch (token.type) {
  986. case 11 /* TokenTypes.LinkedKey */:
  987. if (token.value == null) {
  988. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  989. }
  990. linkedNode.key = parseLinkedKey(tokenizer, token.value || '');
  991. break;
  992. case 5 /* TokenTypes.Named */:
  993. if (token.value == null) {
  994. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  995. }
  996. linkedNode.key = parseNamed(tokenizer, token.value || '');
  997. break;
  998. case 6 /* TokenTypes.List */:
  999. if (token.value == null) {
  1000. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1001. }
  1002. linkedNode.key = parseList(tokenizer, token.value || '');
  1003. break;
  1004. case 7 /* TokenTypes.Literal */:
  1005. if (token.value == null) {
  1006. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1007. }
  1008. linkedNode.key = parseLiteral(tokenizer, token.value || '');
  1009. break;
  1010. default:
  1011. // empty key
  1012. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_KEY, context.lastStartLoc, 0);
  1013. const nextContext = tokenizer.context();
  1014. const emptyLinkedKeyNode = startNode(7 /* NodeTypes.LinkedKey */, nextContext.offset, nextContext.startLoc);
  1015. emptyLinkedKeyNode.value = '';
  1016. endNode(emptyLinkedKeyNode, nextContext.offset, nextContext.startLoc);
  1017. linkedNode.key = emptyLinkedKeyNode;
  1018. endNode(linkedNode, nextContext.offset, nextContext.startLoc);
  1019. return {
  1020. nextConsumeToken: token,
  1021. node: linkedNode
  1022. };
  1023. }
  1024. endNode(linkedNode, tokenizer.currentOffset(), tokenizer.currentPosition());
  1025. return {
  1026. node: linkedNode
  1027. };
  1028. }
  1029. function parseMessage(tokenizer) {
  1030. const context = tokenizer.context();
  1031. const startOffset = context.currentType === 1 /* TokenTypes.Pipe */
  1032. ? tokenizer.currentOffset()
  1033. : context.offset;
  1034. const startLoc = context.currentType === 1 /* TokenTypes.Pipe */
  1035. ? context.endLoc
  1036. : context.startLoc;
  1037. const node = startNode(2 /* NodeTypes.Message */, startOffset, startLoc);
  1038. node.items = [];
  1039. let nextToken = null;
  1040. do {
  1041. const token = nextToken || tokenizer.nextToken();
  1042. nextToken = null;
  1043. switch (token.type) {
  1044. case 0 /* TokenTypes.Text */:
  1045. if (token.value == null) {
  1046. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1047. }
  1048. node.items.push(parseText(tokenizer, token.value || ''));
  1049. break;
  1050. case 6 /* TokenTypes.List */:
  1051. if (token.value == null) {
  1052. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1053. }
  1054. node.items.push(parseList(tokenizer, token.value || ''));
  1055. break;
  1056. case 5 /* TokenTypes.Named */:
  1057. if (token.value == null) {
  1058. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1059. }
  1060. node.items.push(parseNamed(tokenizer, token.value || ''));
  1061. break;
  1062. case 7 /* TokenTypes.Literal */:
  1063. if (token.value == null) {
  1064. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1065. }
  1066. node.items.push(parseLiteral(tokenizer, token.value || ''));
  1067. break;
  1068. case 8 /* TokenTypes.LinkedAlias */:
  1069. const parsed = parseLinked(tokenizer);
  1070. node.items.push(parsed.node);
  1071. nextToken = parsed.nextConsumeToken || null;
  1072. break;
  1073. }
  1074. } while (context.currentType !== 14 /* TokenTypes.EOF */ &&
  1075. context.currentType !== 1 /* TokenTypes.Pipe */);
  1076. // adjust message node loc
  1077. const endOffset = context.currentType === 1 /* TokenTypes.Pipe */
  1078. ? context.lastOffset
  1079. : tokenizer.currentOffset();
  1080. const endLoc = context.currentType === 1 /* TokenTypes.Pipe */
  1081. ? context.lastEndLoc
  1082. : tokenizer.currentPosition();
  1083. endNode(node, endOffset, endLoc);
  1084. return node;
  1085. }
  1086. function parsePlural(tokenizer, offset, loc, msgNode) {
  1087. const context = tokenizer.context();
  1088. let hasEmptyMessage = msgNode.items.length === 0;
  1089. const node = startNode(1 /* NodeTypes.Plural */, offset, loc);
  1090. node.cases = [];
  1091. node.cases.push(msgNode);
  1092. do {
  1093. const msg = parseMessage(tokenizer);
  1094. if (!hasEmptyMessage) {
  1095. hasEmptyMessage = msg.items.length === 0;
  1096. }
  1097. node.cases.push(msg);
  1098. } while (context.currentType !== 14 /* TokenTypes.EOF */);
  1099. if (hasEmptyMessage) {
  1100. emitError(tokenizer, CompileErrorCodes.MUST_HAVE_MESSAGES_IN_PLURAL, loc, 0);
  1101. }
  1102. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  1103. return node;
  1104. }
  1105. function parseResource(tokenizer) {
  1106. const context = tokenizer.context();
  1107. const { offset, startLoc } = context;
  1108. const msgNode = parseMessage(tokenizer);
  1109. if (context.currentType === 14 /* TokenTypes.EOF */) {
  1110. return msgNode;
  1111. }
  1112. else {
  1113. return parsePlural(tokenizer, offset, startLoc, msgNode);
  1114. }
  1115. }
  1116. function parse(source) {
  1117. const tokenizer = createTokenizer(source, assign({}, options));
  1118. const context = tokenizer.context();
  1119. const node = startNode(0 /* NodeTypes.Resource */, context.offset, context.startLoc);
  1120. if (location && node.loc) {
  1121. node.loc.source = source;
  1122. }
  1123. node.body = parseResource(tokenizer);
  1124. // assert whether achieved to EOF
  1125. if (context.currentType !== 14 /* TokenTypes.EOF */) {
  1126. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, source[context.offset] || '');
  1127. }
  1128. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  1129. return node;
  1130. }
  1131. return { parse };
  1132. }
  1133. function getTokenCaption(token) {
  1134. if (token.type === 14 /* TokenTypes.EOF */) {
  1135. return 'EOF';
  1136. }
  1137. const name = (token.value || '').replace(/\r?\n/gu, '\\n');
  1138. return name.length > 10 ? name.slice(0, 9) + '…' : name;
  1139. }
  1140. function createTransformer(ast, options = {} // eslint-disable-line
  1141. ) {
  1142. const _context = {
  1143. ast,
  1144. helpers: new Set()
  1145. };
  1146. const context = () => _context;
  1147. const helper = (name) => {
  1148. _context.helpers.add(name);
  1149. return name;
  1150. };
  1151. return { context, helper };
  1152. }
  1153. function traverseNodes(nodes, transformer) {
  1154. for (let i = 0; i < nodes.length; i++) {
  1155. traverseNode(nodes[i], transformer);
  1156. }
  1157. }
  1158. function traverseNode(node, transformer) {
  1159. // TODO: if we need pre-hook of transform, should be implemented to here
  1160. switch (node.type) {
  1161. case 1 /* NodeTypes.Plural */:
  1162. traverseNodes(node.cases, transformer);
  1163. transformer.helper("plural" /* HelperNameMap.PLURAL */);
  1164. break;
  1165. case 2 /* NodeTypes.Message */:
  1166. traverseNodes(node.items, transformer);
  1167. break;
  1168. case 6 /* NodeTypes.Linked */:
  1169. const linked = node;
  1170. traverseNode(linked.key, transformer);
  1171. transformer.helper("linked" /* HelperNameMap.LINKED */);
  1172. transformer.helper("type" /* HelperNameMap.TYPE */);
  1173. break;
  1174. case 5 /* NodeTypes.List */:
  1175. transformer.helper("interpolate" /* HelperNameMap.INTERPOLATE */);
  1176. transformer.helper("list" /* HelperNameMap.LIST */);
  1177. break;
  1178. case 4 /* NodeTypes.Named */:
  1179. transformer.helper("interpolate" /* HelperNameMap.INTERPOLATE */);
  1180. transformer.helper("named" /* HelperNameMap.NAMED */);
  1181. break;
  1182. }
  1183. // TODO: if we need post-hook of transform, should be implemented to here
  1184. }
  1185. // transform AST
  1186. function transform(ast, options = {} // eslint-disable-line
  1187. ) {
  1188. const transformer = createTransformer(ast);
  1189. transformer.helper("normalize" /* HelperNameMap.NORMALIZE */);
  1190. // traverse
  1191. ast.body && traverseNode(ast.body, transformer);
  1192. // set meta information
  1193. const context = transformer.context();
  1194. ast.helpers = Array.from(context.helpers);
  1195. }
  1196. function createCodeGenerator(ast, options) {
  1197. const { sourceMap, filename, breakLineCode, needIndent: _needIndent } = options;
  1198. const _context = {
  1199. source: ast.loc.source,
  1200. filename,
  1201. code: '',
  1202. column: 1,
  1203. line: 1,
  1204. offset: 0,
  1205. map: undefined,
  1206. breakLineCode,
  1207. needIndent: _needIndent,
  1208. indentLevel: 0
  1209. };
  1210. const context = () => _context;
  1211. function push(code, node) {
  1212. _context.code += code;
  1213. }
  1214. function _newline(n, withBreakLine = true) {
  1215. const _breakLineCode = withBreakLine ? breakLineCode : '';
  1216. push(_needIndent ? _breakLineCode + ` `.repeat(n) : _breakLineCode);
  1217. }
  1218. function indent(withNewLine = true) {
  1219. const level = ++_context.indentLevel;
  1220. withNewLine && _newline(level);
  1221. }
  1222. function deindent(withNewLine = true) {
  1223. const level = --_context.indentLevel;
  1224. withNewLine && _newline(level);
  1225. }
  1226. function newline() {
  1227. _newline(_context.indentLevel);
  1228. }
  1229. const helper = (key) => `_${key}`;
  1230. const needIndent = () => _context.needIndent;
  1231. return {
  1232. context,
  1233. push,
  1234. indent,
  1235. deindent,
  1236. newline,
  1237. helper,
  1238. needIndent
  1239. };
  1240. }
  1241. function generateLinkedNode(generator, node) {
  1242. const { helper } = generator;
  1243. generator.push(`${helper("linked" /* HelperNameMap.LINKED */)}(`);
  1244. generateNode(generator, node.key);
  1245. if (node.modifier) {
  1246. generator.push(`, `);
  1247. generateNode(generator, node.modifier);
  1248. generator.push(`, _type`);
  1249. }
  1250. else {
  1251. generator.push(`, undefined, _type`);
  1252. }
  1253. generator.push(`)`);
  1254. }
  1255. function generateMessageNode(generator, node) {
  1256. const { helper, needIndent } = generator;
  1257. generator.push(`${helper("normalize" /* HelperNameMap.NORMALIZE */)}([`);
  1258. generator.indent(needIndent());
  1259. const length = node.items.length;
  1260. for (let i = 0; i < length; i++) {
  1261. generateNode(generator, node.items[i]);
  1262. if (i === length - 1) {
  1263. break;
  1264. }
  1265. generator.push(', ');
  1266. }
  1267. generator.deindent(needIndent());
  1268. generator.push('])');
  1269. }
  1270. function generatePluralNode(generator, node) {
  1271. const { helper, needIndent } = generator;
  1272. if (node.cases.length > 1) {
  1273. generator.push(`${helper("plural" /* HelperNameMap.PLURAL */)}([`);
  1274. generator.indent(needIndent());
  1275. const length = node.cases.length;
  1276. for (let i = 0; i < length; i++) {
  1277. generateNode(generator, node.cases[i]);
  1278. if (i === length - 1) {
  1279. break;
  1280. }
  1281. generator.push(', ');
  1282. }
  1283. generator.deindent(needIndent());
  1284. generator.push(`])`);
  1285. }
  1286. }
  1287. function generateResource(generator, node) {
  1288. if (node.body) {
  1289. generateNode(generator, node.body);
  1290. }
  1291. else {
  1292. generator.push('null');
  1293. }
  1294. }
  1295. function generateNode(generator, node) {
  1296. const { helper } = generator;
  1297. switch (node.type) {
  1298. case 0 /* NodeTypes.Resource */:
  1299. generateResource(generator, node);
  1300. break;
  1301. case 1 /* NodeTypes.Plural */:
  1302. generatePluralNode(generator, node);
  1303. break;
  1304. case 2 /* NodeTypes.Message */:
  1305. generateMessageNode(generator, node);
  1306. break;
  1307. case 6 /* NodeTypes.Linked */:
  1308. generateLinkedNode(generator, node);
  1309. break;
  1310. case 8 /* NodeTypes.LinkedModifier */:
  1311. generator.push(JSON.stringify(node.value), node);
  1312. break;
  1313. case 7 /* NodeTypes.LinkedKey */:
  1314. generator.push(JSON.stringify(node.value), node);
  1315. break;
  1316. case 5 /* NodeTypes.List */:
  1317. generator.push(`${helper("interpolate" /* HelperNameMap.INTERPOLATE */)}(${helper("list" /* HelperNameMap.LIST */)}(${node.index}))`, node);
  1318. break;
  1319. case 4 /* NodeTypes.Named */:
  1320. generator.push(`${helper("interpolate" /* HelperNameMap.INTERPOLATE */)}(${helper("named" /* HelperNameMap.NAMED */)}(${JSON.stringify(node.key)}))`, node);
  1321. break;
  1322. case 9 /* NodeTypes.Literal */:
  1323. generator.push(JSON.stringify(node.value), node);
  1324. break;
  1325. case 3 /* NodeTypes.Text */:
  1326. generator.push(JSON.stringify(node.value), node);
  1327. break;
  1328. default:
  1329. {
  1330. throw new Error(`unhandled codegen node type: ${node.type}`);
  1331. }
  1332. }
  1333. }
  1334. // generate code from AST
  1335. const generate = (ast, options = {} // eslint-disable-line
  1336. ) => {
  1337. const mode = isString(options.mode) ? options.mode : 'normal';
  1338. const filename = isString(options.filename)
  1339. ? options.filename
  1340. : 'message.intl';
  1341. const sourceMap = !!options.sourceMap;
  1342. // prettier-ignore
  1343. const breakLineCode = options.breakLineCode != null
  1344. ? options.breakLineCode
  1345. : mode === 'arrow'
  1346. ? ';'
  1347. : '\n';
  1348. const needIndent = options.needIndent ? options.needIndent : mode !== 'arrow';
  1349. const helpers = ast.helpers || [];
  1350. const generator = createCodeGenerator(ast, {
  1351. mode,
  1352. filename,
  1353. sourceMap,
  1354. breakLineCode,
  1355. needIndent
  1356. });
  1357. generator.push(mode === 'normal' ? `function __msg__ (ctx) {` : `(ctx) => {`);
  1358. generator.indent(needIndent);
  1359. if (helpers.length > 0) {
  1360. generator.push(`const { ${helpers.map(s => `${s}: _${s}`).join(', ')} } = ctx`);
  1361. generator.newline();
  1362. }
  1363. generator.push(`return `);
  1364. generateNode(generator, ast);
  1365. generator.deindent(needIndent);
  1366. generator.push(`}`);
  1367. const { code, map } = generator.context();
  1368. return {
  1369. ast,
  1370. code,
  1371. map: map ? map.toJSON() : undefined // eslint-disable-line @typescript-eslint/no-explicit-any
  1372. };
  1373. };
  1374. function baseCompile(source, options = {}) {
  1375. const assignedOptions = assign({}, options);
  1376. // parse source codes
  1377. const parser = createParser(assignedOptions);
  1378. const ast = parser.parse(source);
  1379. // transform ASTs
  1380. transform(ast, assignedOptions);
  1381. // generate javascript codes
  1382. return generate(ast, assignedOptions);
  1383. }
  1384. export { CompileErrorCodes, ERROR_DOMAIN, LocationStub, baseCompile, createCompileError, createLocation, createParser, createPosition, defaultOnError, detectHtmlTag, errorMessages };