版博士V2.0程序
Вы не можете выбрать более 25 тем Темы должны начинаться с буквы или цифры, могут содержать дефисы(-) и должны содержать не более 35 символов.
 
 
 
 

1444 строки
54 KiB

  1. /*!
  2. * message-compiler v9.3.0-beta.17
  3. * (c) 2023 kazuya kawaguchi
  4. * Released under the MIT License.
  5. */
  6. 'use strict';
  7. var shared = require('@intlify/shared');
  8. var sourceMap = require('source-map');
  9. const CompileErrorCodes = {
  10. // tokenizer error codes
  11. EXPECTED_TOKEN: 1,
  12. INVALID_TOKEN_IN_PLACEHOLDER: 2,
  13. UNTERMINATED_SINGLE_QUOTE_IN_PLACEHOLDER: 3,
  14. UNKNOWN_ESCAPE_SEQUENCE: 4,
  15. INVALID_UNICODE_ESCAPE_SEQUENCE: 5,
  16. UNBALANCED_CLOSING_BRACE: 6,
  17. UNTERMINATED_CLOSING_BRACE: 7,
  18. EMPTY_PLACEHOLDER: 8,
  19. NOT_ALLOW_NEST_PLACEHOLDER: 9,
  20. INVALID_LINKED_FORMAT: 10,
  21. // parser error codes
  22. MUST_HAVE_MESSAGES_IN_PLURAL: 11,
  23. UNEXPECTED_EMPTY_LINKED_MODIFIER: 12,
  24. UNEXPECTED_EMPTY_LINKED_KEY: 13,
  25. UNEXPECTED_LEXICAL_ANALYSIS: 14,
  26. // Special value for higher-order compilers to pick up the last code
  27. // to avoid collision of error codes. This should always be kept as the last
  28. // item.
  29. __EXTEND_POINT__: 15
  30. };
  31. /** @internal */
  32. const errorMessages = {
  33. // tokenizer error messages
  34. [CompileErrorCodes.EXPECTED_TOKEN]: `Expected token: '{0}'`,
  35. [CompileErrorCodes.INVALID_TOKEN_IN_PLACEHOLDER]: `Invalid token in placeholder: '{0}'`,
  36. [CompileErrorCodes.UNTERMINATED_SINGLE_QUOTE_IN_PLACEHOLDER]: `Unterminated single quote in placeholder`,
  37. [CompileErrorCodes.UNKNOWN_ESCAPE_SEQUENCE]: `Unknown escape sequence: \\{0}`,
  38. [CompileErrorCodes.INVALID_UNICODE_ESCAPE_SEQUENCE]: `Invalid unicode escape sequence: {0}`,
  39. [CompileErrorCodes.UNBALANCED_CLOSING_BRACE]: `Unbalanced closing brace`,
  40. [CompileErrorCodes.UNTERMINATED_CLOSING_BRACE]: `Unterminated closing brace`,
  41. [CompileErrorCodes.EMPTY_PLACEHOLDER]: `Empty placeholder`,
  42. [CompileErrorCodes.NOT_ALLOW_NEST_PLACEHOLDER]: `Not allowed nest placeholder`,
  43. [CompileErrorCodes.INVALID_LINKED_FORMAT]: `Invalid linked format`,
  44. // parser error messages
  45. [CompileErrorCodes.MUST_HAVE_MESSAGES_IN_PLURAL]: `Plural must have messages`,
  46. [CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_MODIFIER]: `Unexpected empty linked modifier`,
  47. [CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_KEY]: `Unexpected empty linked key`,
  48. [CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS]: `Unexpected lexical analysis in token: '{0}'`
  49. };
  50. function createCompileError(code, loc, options = {}) {
  51. const { domain, messages, args } = options;
  52. const msg = shared.format((messages || errorMessages)[code] || '', ...(args || []))
  53. ;
  54. const error = new SyntaxError(String(msg));
  55. error.code = code;
  56. if (loc) {
  57. error.location = loc;
  58. }
  59. error.domain = domain;
  60. return error;
  61. }
  62. /** @internal */
  63. function defaultOnError(error) {
  64. throw error;
  65. }
  66. const RE_HTML_TAG = /<\/?[\w\s="/.':;#-\/]+>/;
  67. const detectHtmlTag = (source) => RE_HTML_TAG.test(source);
  68. const LocationStub = {
  69. start: { line: 1, column: 1, offset: 0 },
  70. end: { line: 1, column: 1, offset: 0 }
  71. };
  72. function createPosition(line, column, offset) {
  73. return { line, column, offset };
  74. }
  75. function createLocation(start, end, source) {
  76. const loc = { start, end };
  77. if (source != null) {
  78. loc.source = source;
  79. }
  80. return loc;
  81. }
  82. const CHAR_SP = ' ';
  83. const CHAR_CR = '\r';
  84. const CHAR_LF = '\n';
  85. const CHAR_LS = String.fromCharCode(0x2028);
  86. const CHAR_PS = String.fromCharCode(0x2029);
  87. function createScanner(str) {
  88. const _buf = str;
  89. let _index = 0;
  90. let _line = 1;
  91. let _column = 1;
  92. let _peekOffset = 0;
  93. const isCRLF = (index) => _buf[index] === CHAR_CR && _buf[index + 1] === CHAR_LF;
  94. const isLF = (index) => _buf[index] === CHAR_LF;
  95. const isPS = (index) => _buf[index] === CHAR_PS;
  96. const isLS = (index) => _buf[index] === CHAR_LS;
  97. const isLineEnd = (index) => isCRLF(index) || isLF(index) || isPS(index) || isLS(index);
  98. const index = () => _index;
  99. const line = () => _line;
  100. const column = () => _column;
  101. const peekOffset = () => _peekOffset;
  102. const charAt = (offset) => isCRLF(offset) || isPS(offset) || isLS(offset) ? CHAR_LF : _buf[offset];
  103. const currentChar = () => charAt(_index);
  104. const currentPeek = () => charAt(_index + _peekOffset);
  105. function next() {
  106. _peekOffset = 0;
  107. if (isLineEnd(_index)) {
  108. _line++;
  109. _column = 0;
  110. }
  111. if (isCRLF(_index)) {
  112. _index++;
  113. }
  114. _index++;
  115. _column++;
  116. return _buf[_index];
  117. }
  118. function peek() {
  119. if (isCRLF(_index + _peekOffset)) {
  120. _peekOffset++;
  121. }
  122. _peekOffset++;
  123. return _buf[_index + _peekOffset];
  124. }
  125. function reset() {
  126. _index = 0;
  127. _line = 1;
  128. _column = 1;
  129. _peekOffset = 0;
  130. }
  131. function resetPeek(offset = 0) {
  132. _peekOffset = offset;
  133. }
  134. function skipToPeek() {
  135. const target = _index + _peekOffset;
  136. // eslint-disable-next-line no-unmodified-loop-condition
  137. while (target !== _index) {
  138. next();
  139. }
  140. _peekOffset = 0;
  141. }
  142. return {
  143. index,
  144. line,
  145. column,
  146. peekOffset,
  147. charAt,
  148. currentChar,
  149. currentPeek,
  150. next,
  151. peek,
  152. reset,
  153. resetPeek,
  154. skipToPeek
  155. };
  156. }
  157. const EOF = undefined;
  158. const LITERAL_DELIMITER = "'";
  159. const ERROR_DOMAIN$1 = 'tokenizer';
  160. function createTokenizer(source, options = {}) {
  161. const location = options.location !== false;
  162. const _scnr = createScanner(source);
  163. const currentOffset = () => _scnr.index();
  164. const currentPosition = () => createPosition(_scnr.line(), _scnr.column(), _scnr.index());
  165. const _initLoc = currentPosition();
  166. const _initOffset = currentOffset();
  167. const _context = {
  168. currentType: 14 /* TokenTypes.EOF */,
  169. offset: _initOffset,
  170. startLoc: _initLoc,
  171. endLoc: _initLoc,
  172. lastType: 14 /* TokenTypes.EOF */,
  173. lastOffset: _initOffset,
  174. lastStartLoc: _initLoc,
  175. lastEndLoc: _initLoc,
  176. braceNest: 0,
  177. inLinked: false,
  178. text: ''
  179. };
  180. const context = () => _context;
  181. const { onError } = options;
  182. function emitError(code, pos, offset, ...args) {
  183. const ctx = context();
  184. pos.column += offset;
  185. pos.offset += offset;
  186. if (onError) {
  187. const loc = createLocation(ctx.startLoc, pos);
  188. const err = createCompileError(code, loc, {
  189. domain: ERROR_DOMAIN$1,
  190. args
  191. });
  192. onError(err);
  193. }
  194. }
  195. function getToken(context, type, value) {
  196. context.endLoc = currentPosition();
  197. context.currentType = type;
  198. const token = { type };
  199. if (location) {
  200. token.loc = createLocation(context.startLoc, context.endLoc);
  201. }
  202. if (value != null) {
  203. token.value = value;
  204. }
  205. return token;
  206. }
  207. const getEndToken = (context) => getToken(context, 14 /* TokenTypes.EOF */);
  208. function eat(scnr, ch) {
  209. if (scnr.currentChar() === ch) {
  210. scnr.next();
  211. return ch;
  212. }
  213. else {
  214. emitError(CompileErrorCodes.EXPECTED_TOKEN, currentPosition(), 0, ch);
  215. return '';
  216. }
  217. }
  218. function peekSpaces(scnr) {
  219. let buf = '';
  220. while (scnr.currentPeek() === CHAR_SP || scnr.currentPeek() === CHAR_LF) {
  221. buf += scnr.currentPeek();
  222. scnr.peek();
  223. }
  224. return buf;
  225. }
  226. function skipSpaces(scnr) {
  227. const buf = peekSpaces(scnr);
  228. scnr.skipToPeek();
  229. return buf;
  230. }
  231. function isIdentifierStart(ch) {
  232. if (ch === EOF) {
  233. return false;
  234. }
  235. const cc = ch.charCodeAt(0);
  236. return ((cc >= 97 && cc <= 122) || // a-z
  237. (cc >= 65 && cc <= 90) || // A-Z
  238. cc === 95 // _
  239. );
  240. }
  241. function isNumberStart(ch) {
  242. if (ch === EOF) {
  243. return false;
  244. }
  245. const cc = ch.charCodeAt(0);
  246. return cc >= 48 && cc <= 57; // 0-9
  247. }
  248. function isNamedIdentifierStart(scnr, context) {
  249. const { currentType } = context;
  250. if (currentType !== 2 /* TokenTypes.BraceLeft */) {
  251. return false;
  252. }
  253. peekSpaces(scnr);
  254. const ret = isIdentifierStart(scnr.currentPeek());
  255. scnr.resetPeek();
  256. return ret;
  257. }
  258. function isListIdentifierStart(scnr, context) {
  259. const { currentType } = context;
  260. if (currentType !== 2 /* TokenTypes.BraceLeft */) {
  261. return false;
  262. }
  263. peekSpaces(scnr);
  264. const ch = scnr.currentPeek() === '-' ? scnr.peek() : scnr.currentPeek();
  265. const ret = isNumberStart(ch);
  266. scnr.resetPeek();
  267. return ret;
  268. }
  269. function isLiteralStart(scnr, context) {
  270. const { currentType } = context;
  271. if (currentType !== 2 /* TokenTypes.BraceLeft */) {
  272. return false;
  273. }
  274. peekSpaces(scnr);
  275. const ret = scnr.currentPeek() === LITERAL_DELIMITER;
  276. scnr.resetPeek();
  277. return ret;
  278. }
  279. function isLinkedDotStart(scnr, context) {
  280. const { currentType } = context;
  281. if (currentType !== 8 /* TokenTypes.LinkedAlias */) {
  282. return false;
  283. }
  284. peekSpaces(scnr);
  285. const ret = scnr.currentPeek() === "." /* TokenChars.LinkedDot */;
  286. scnr.resetPeek();
  287. return ret;
  288. }
  289. function isLinkedModifierStart(scnr, context) {
  290. const { currentType } = context;
  291. if (currentType !== 9 /* TokenTypes.LinkedDot */) {
  292. return false;
  293. }
  294. peekSpaces(scnr);
  295. const ret = isIdentifierStart(scnr.currentPeek());
  296. scnr.resetPeek();
  297. return ret;
  298. }
  299. function isLinkedDelimiterStart(scnr, context) {
  300. const { currentType } = context;
  301. if (!(currentType === 8 /* TokenTypes.LinkedAlias */ ||
  302. currentType === 12 /* TokenTypes.LinkedModifier */)) {
  303. return false;
  304. }
  305. peekSpaces(scnr);
  306. const ret = scnr.currentPeek() === ":" /* TokenChars.LinkedDelimiter */;
  307. scnr.resetPeek();
  308. return ret;
  309. }
  310. function isLinkedReferStart(scnr, context) {
  311. const { currentType } = context;
  312. if (currentType !== 10 /* TokenTypes.LinkedDelimiter */) {
  313. return false;
  314. }
  315. const fn = () => {
  316. const ch = scnr.currentPeek();
  317. if (ch === "{" /* TokenChars.BraceLeft */) {
  318. return isIdentifierStart(scnr.peek());
  319. }
  320. else if (ch === "@" /* TokenChars.LinkedAlias */ ||
  321. ch === "%" /* TokenChars.Modulo */ ||
  322. ch === "|" /* TokenChars.Pipe */ ||
  323. ch === ":" /* TokenChars.LinkedDelimiter */ ||
  324. ch === "." /* TokenChars.LinkedDot */ ||
  325. ch === CHAR_SP ||
  326. !ch) {
  327. return false;
  328. }
  329. else if (ch === CHAR_LF) {
  330. scnr.peek();
  331. return fn();
  332. }
  333. else {
  334. // other characters
  335. return isIdentifierStart(ch);
  336. }
  337. };
  338. const ret = fn();
  339. scnr.resetPeek();
  340. return ret;
  341. }
  342. function isPluralStart(scnr) {
  343. peekSpaces(scnr);
  344. const ret = scnr.currentPeek() === "|" /* TokenChars.Pipe */;
  345. scnr.resetPeek();
  346. return ret;
  347. }
  348. function detectModuloStart(scnr) {
  349. const spaces = peekSpaces(scnr);
  350. const ret = scnr.currentPeek() === "%" /* TokenChars.Modulo */ &&
  351. scnr.peek() === "{" /* TokenChars.BraceLeft */;
  352. scnr.resetPeek();
  353. return {
  354. isModulo: ret,
  355. hasSpace: spaces.length > 0
  356. };
  357. }
  358. function isTextStart(scnr, reset = true) {
  359. const fn = (hasSpace = false, prev = '', detectModulo = false) => {
  360. const ch = scnr.currentPeek();
  361. if (ch === "{" /* TokenChars.BraceLeft */) {
  362. return prev === "%" /* TokenChars.Modulo */ ? false : hasSpace;
  363. }
  364. else if (ch === "@" /* TokenChars.LinkedAlias */ || !ch) {
  365. return prev === "%" /* TokenChars.Modulo */ ? true : hasSpace;
  366. }
  367. else if (ch === "%" /* TokenChars.Modulo */) {
  368. scnr.peek();
  369. return fn(hasSpace, "%" /* TokenChars.Modulo */, true);
  370. }
  371. else if (ch === "|" /* TokenChars.Pipe */) {
  372. return prev === "%" /* TokenChars.Modulo */ || detectModulo
  373. ? true
  374. : !(prev === CHAR_SP || prev === CHAR_LF);
  375. }
  376. else if (ch === CHAR_SP) {
  377. scnr.peek();
  378. return fn(true, CHAR_SP, detectModulo);
  379. }
  380. else if (ch === CHAR_LF) {
  381. scnr.peek();
  382. return fn(true, CHAR_LF, detectModulo);
  383. }
  384. else {
  385. return true;
  386. }
  387. };
  388. const ret = fn();
  389. reset && scnr.resetPeek();
  390. return ret;
  391. }
  392. function takeChar(scnr, fn) {
  393. const ch = scnr.currentChar();
  394. if (ch === EOF) {
  395. return EOF;
  396. }
  397. if (fn(ch)) {
  398. scnr.next();
  399. return ch;
  400. }
  401. return null;
  402. }
  403. function takeIdentifierChar(scnr) {
  404. const closure = (ch) => {
  405. const cc = ch.charCodeAt(0);
  406. return ((cc >= 97 && cc <= 122) || // a-z
  407. (cc >= 65 && cc <= 90) || // A-Z
  408. (cc >= 48 && cc <= 57) || // 0-9
  409. cc === 95 || // _
  410. cc === 36 // $
  411. );
  412. };
  413. return takeChar(scnr, closure);
  414. }
  415. function takeDigit(scnr) {
  416. const closure = (ch) => {
  417. const cc = ch.charCodeAt(0);
  418. return cc >= 48 && cc <= 57; // 0-9
  419. };
  420. return takeChar(scnr, closure);
  421. }
  422. function takeHexDigit(scnr) {
  423. const closure = (ch) => {
  424. const cc = ch.charCodeAt(0);
  425. return ((cc >= 48 && cc <= 57) || // 0-9
  426. (cc >= 65 && cc <= 70) || // A-F
  427. (cc >= 97 && cc <= 102)); // a-f
  428. };
  429. return takeChar(scnr, closure);
  430. }
  431. function getDigits(scnr) {
  432. let ch = '';
  433. let num = '';
  434. while ((ch = takeDigit(scnr))) {
  435. num += ch;
  436. }
  437. return num;
  438. }
  439. function readModulo(scnr) {
  440. skipSpaces(scnr);
  441. const ch = scnr.currentChar();
  442. if (ch !== "%" /* TokenChars.Modulo */) {
  443. emitError(CompileErrorCodes.EXPECTED_TOKEN, currentPosition(), 0, ch);
  444. }
  445. scnr.next();
  446. return "%" /* TokenChars.Modulo */;
  447. }
  448. function readText(scnr) {
  449. let buf = '';
  450. while (true) {
  451. const ch = scnr.currentChar();
  452. if (ch === "{" /* TokenChars.BraceLeft */ ||
  453. ch === "}" /* TokenChars.BraceRight */ ||
  454. ch === "@" /* TokenChars.LinkedAlias */ ||
  455. ch === "|" /* TokenChars.Pipe */ ||
  456. !ch) {
  457. break;
  458. }
  459. else if (ch === "%" /* TokenChars.Modulo */) {
  460. if (isTextStart(scnr)) {
  461. buf += ch;
  462. scnr.next();
  463. }
  464. else {
  465. break;
  466. }
  467. }
  468. else if (ch === CHAR_SP || ch === CHAR_LF) {
  469. if (isTextStart(scnr)) {
  470. buf += ch;
  471. scnr.next();
  472. }
  473. else if (isPluralStart(scnr)) {
  474. break;
  475. }
  476. else {
  477. buf += ch;
  478. scnr.next();
  479. }
  480. }
  481. else {
  482. buf += ch;
  483. scnr.next();
  484. }
  485. }
  486. return buf;
  487. }
  488. function readNamedIdentifier(scnr) {
  489. skipSpaces(scnr);
  490. let ch = '';
  491. let name = '';
  492. while ((ch = takeIdentifierChar(scnr))) {
  493. name += ch;
  494. }
  495. if (scnr.currentChar() === EOF) {
  496. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  497. }
  498. return name;
  499. }
  500. function readListIdentifier(scnr) {
  501. skipSpaces(scnr);
  502. let value = '';
  503. if (scnr.currentChar() === '-') {
  504. scnr.next();
  505. value += `-${getDigits(scnr)}`;
  506. }
  507. else {
  508. value += getDigits(scnr);
  509. }
  510. if (scnr.currentChar() === EOF) {
  511. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  512. }
  513. return value;
  514. }
  515. function readLiteral(scnr) {
  516. skipSpaces(scnr);
  517. eat(scnr, `\'`);
  518. let ch = '';
  519. let literal = '';
  520. const fn = (x) => x !== LITERAL_DELIMITER && x !== CHAR_LF;
  521. while ((ch = takeChar(scnr, fn))) {
  522. if (ch === '\\') {
  523. literal += readEscapeSequence(scnr);
  524. }
  525. else {
  526. literal += ch;
  527. }
  528. }
  529. const current = scnr.currentChar();
  530. if (current === CHAR_LF || current === EOF) {
  531. emitError(CompileErrorCodes.UNTERMINATED_SINGLE_QUOTE_IN_PLACEHOLDER, currentPosition(), 0);
  532. // TODO: Is it correct really?
  533. if (current === CHAR_LF) {
  534. scnr.next();
  535. eat(scnr, `\'`);
  536. }
  537. return literal;
  538. }
  539. eat(scnr, `\'`);
  540. return literal;
  541. }
  542. function readEscapeSequence(scnr) {
  543. const ch = scnr.currentChar();
  544. switch (ch) {
  545. case '\\':
  546. case `\'`:
  547. scnr.next();
  548. return `\\${ch}`;
  549. case 'u':
  550. return readUnicodeEscapeSequence(scnr, ch, 4);
  551. case 'U':
  552. return readUnicodeEscapeSequence(scnr, ch, 6);
  553. default:
  554. emitError(CompileErrorCodes.UNKNOWN_ESCAPE_SEQUENCE, currentPosition(), 0, ch);
  555. return '';
  556. }
  557. }
  558. function readUnicodeEscapeSequence(scnr, unicode, digits) {
  559. eat(scnr, unicode);
  560. let sequence = '';
  561. for (let i = 0; i < digits; i++) {
  562. const ch = takeHexDigit(scnr);
  563. if (!ch) {
  564. emitError(CompileErrorCodes.INVALID_UNICODE_ESCAPE_SEQUENCE, currentPosition(), 0, `\\${unicode}${sequence}${scnr.currentChar()}`);
  565. break;
  566. }
  567. sequence += ch;
  568. }
  569. return `\\${unicode}${sequence}`;
  570. }
  571. function readInvalidIdentifier(scnr) {
  572. skipSpaces(scnr);
  573. let ch = '';
  574. let identifiers = '';
  575. const closure = (ch) => ch !== "{" /* TokenChars.BraceLeft */ &&
  576. ch !== "}" /* TokenChars.BraceRight */ &&
  577. ch !== CHAR_SP &&
  578. ch !== CHAR_LF;
  579. while ((ch = takeChar(scnr, closure))) {
  580. identifiers += ch;
  581. }
  582. return identifiers;
  583. }
  584. function readLinkedModifier(scnr) {
  585. let ch = '';
  586. let name = '';
  587. while ((ch = takeIdentifierChar(scnr))) {
  588. name += ch;
  589. }
  590. return name;
  591. }
  592. function readLinkedRefer(scnr) {
  593. const fn = (detect = false, buf) => {
  594. const ch = scnr.currentChar();
  595. if (ch === "{" /* TokenChars.BraceLeft */ ||
  596. ch === "%" /* TokenChars.Modulo */ ||
  597. ch === "@" /* TokenChars.LinkedAlias */ ||
  598. ch === "|" /* TokenChars.Pipe */ ||
  599. !ch) {
  600. return buf;
  601. }
  602. else if (ch === CHAR_SP) {
  603. return buf;
  604. }
  605. else if (ch === CHAR_LF) {
  606. buf += ch;
  607. scnr.next();
  608. return fn(detect, buf);
  609. }
  610. else {
  611. buf += ch;
  612. scnr.next();
  613. return fn(true, buf);
  614. }
  615. };
  616. return fn(false, '');
  617. }
  618. function readPlural(scnr) {
  619. skipSpaces(scnr);
  620. const plural = eat(scnr, "|" /* TokenChars.Pipe */);
  621. skipSpaces(scnr);
  622. return plural;
  623. }
  624. // TODO: We need refactoring of token parsing ...
  625. function readTokenInPlaceholder(scnr, context) {
  626. let token = null;
  627. const ch = scnr.currentChar();
  628. switch (ch) {
  629. case "{" /* TokenChars.BraceLeft */:
  630. if (context.braceNest >= 1) {
  631. emitError(CompileErrorCodes.NOT_ALLOW_NEST_PLACEHOLDER, currentPosition(), 0);
  632. }
  633. scnr.next();
  634. token = getToken(context, 2 /* TokenTypes.BraceLeft */, "{" /* TokenChars.BraceLeft */);
  635. skipSpaces(scnr);
  636. context.braceNest++;
  637. return token;
  638. case "}" /* TokenChars.BraceRight */:
  639. if (context.braceNest > 0 &&
  640. context.currentType === 2 /* TokenTypes.BraceLeft */) {
  641. emitError(CompileErrorCodes.EMPTY_PLACEHOLDER, currentPosition(), 0);
  642. }
  643. scnr.next();
  644. token = getToken(context, 3 /* TokenTypes.BraceRight */, "}" /* TokenChars.BraceRight */);
  645. context.braceNest--;
  646. context.braceNest > 0 && skipSpaces(scnr);
  647. if (context.inLinked && context.braceNest === 0) {
  648. context.inLinked = false;
  649. }
  650. return token;
  651. case "@" /* TokenChars.LinkedAlias */:
  652. if (context.braceNest > 0) {
  653. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  654. }
  655. token = readTokenInLinked(scnr, context) || getEndToken(context);
  656. context.braceNest = 0;
  657. return token;
  658. default:
  659. let validNamedIdentifier = true;
  660. let validListIdentifier = true;
  661. let validLiteral = true;
  662. if (isPluralStart(scnr)) {
  663. if (context.braceNest > 0) {
  664. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  665. }
  666. token = getToken(context, 1 /* TokenTypes.Pipe */, readPlural(scnr));
  667. // reset
  668. context.braceNest = 0;
  669. context.inLinked = false;
  670. return token;
  671. }
  672. if (context.braceNest > 0 &&
  673. (context.currentType === 5 /* TokenTypes.Named */ ||
  674. context.currentType === 6 /* TokenTypes.List */ ||
  675. context.currentType === 7 /* TokenTypes.Literal */)) {
  676. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  677. context.braceNest = 0;
  678. return readToken(scnr, context);
  679. }
  680. if ((validNamedIdentifier = isNamedIdentifierStart(scnr, context))) {
  681. token = getToken(context, 5 /* TokenTypes.Named */, readNamedIdentifier(scnr));
  682. skipSpaces(scnr);
  683. return token;
  684. }
  685. if ((validListIdentifier = isListIdentifierStart(scnr, context))) {
  686. token = getToken(context, 6 /* TokenTypes.List */, readListIdentifier(scnr));
  687. skipSpaces(scnr);
  688. return token;
  689. }
  690. if ((validLiteral = isLiteralStart(scnr, context))) {
  691. token = getToken(context, 7 /* TokenTypes.Literal */, readLiteral(scnr));
  692. skipSpaces(scnr);
  693. return token;
  694. }
  695. if (!validNamedIdentifier && !validListIdentifier && !validLiteral) {
  696. // TODO: we should be re-designed invalid cases, when we will extend message syntax near the future ...
  697. token = getToken(context, 13 /* TokenTypes.InvalidPlace */, readInvalidIdentifier(scnr));
  698. emitError(CompileErrorCodes.INVALID_TOKEN_IN_PLACEHOLDER, currentPosition(), 0, token.value);
  699. skipSpaces(scnr);
  700. return token;
  701. }
  702. break;
  703. }
  704. return token;
  705. }
  706. // TODO: We need refactoring of token parsing ...
  707. function readTokenInLinked(scnr, context) {
  708. const { currentType } = context;
  709. let token = null;
  710. const ch = scnr.currentChar();
  711. if ((currentType === 8 /* TokenTypes.LinkedAlias */ ||
  712. currentType === 9 /* TokenTypes.LinkedDot */ ||
  713. currentType === 12 /* TokenTypes.LinkedModifier */ ||
  714. currentType === 10 /* TokenTypes.LinkedDelimiter */) &&
  715. (ch === CHAR_LF || ch === CHAR_SP)) {
  716. emitError(CompileErrorCodes.INVALID_LINKED_FORMAT, currentPosition(), 0);
  717. }
  718. switch (ch) {
  719. case "@" /* TokenChars.LinkedAlias */:
  720. scnr.next();
  721. token = getToken(context, 8 /* TokenTypes.LinkedAlias */, "@" /* TokenChars.LinkedAlias */);
  722. context.inLinked = true;
  723. return token;
  724. case "." /* TokenChars.LinkedDot */:
  725. skipSpaces(scnr);
  726. scnr.next();
  727. return getToken(context, 9 /* TokenTypes.LinkedDot */, "." /* TokenChars.LinkedDot */);
  728. case ":" /* TokenChars.LinkedDelimiter */:
  729. skipSpaces(scnr);
  730. scnr.next();
  731. return getToken(context, 10 /* TokenTypes.LinkedDelimiter */, ":" /* TokenChars.LinkedDelimiter */);
  732. default:
  733. if (isPluralStart(scnr)) {
  734. token = getToken(context, 1 /* TokenTypes.Pipe */, readPlural(scnr));
  735. // reset
  736. context.braceNest = 0;
  737. context.inLinked = false;
  738. return token;
  739. }
  740. if (isLinkedDotStart(scnr, context) ||
  741. isLinkedDelimiterStart(scnr, context)) {
  742. skipSpaces(scnr);
  743. return readTokenInLinked(scnr, context);
  744. }
  745. if (isLinkedModifierStart(scnr, context)) {
  746. skipSpaces(scnr);
  747. return getToken(context, 12 /* TokenTypes.LinkedModifier */, readLinkedModifier(scnr));
  748. }
  749. if (isLinkedReferStart(scnr, context)) {
  750. skipSpaces(scnr);
  751. if (ch === "{" /* TokenChars.BraceLeft */) {
  752. // scan the placeholder
  753. return readTokenInPlaceholder(scnr, context) || token;
  754. }
  755. else {
  756. return getToken(context, 11 /* TokenTypes.LinkedKey */, readLinkedRefer(scnr));
  757. }
  758. }
  759. if (currentType === 8 /* TokenTypes.LinkedAlias */) {
  760. emitError(CompileErrorCodes.INVALID_LINKED_FORMAT, currentPosition(), 0);
  761. }
  762. context.braceNest = 0;
  763. context.inLinked = false;
  764. return readToken(scnr, context);
  765. }
  766. }
  767. // TODO: We need refactoring of token parsing ...
  768. function readToken(scnr, context) {
  769. let token = { type: 14 /* TokenTypes.EOF */ };
  770. if (context.braceNest > 0) {
  771. return readTokenInPlaceholder(scnr, context) || getEndToken(context);
  772. }
  773. if (context.inLinked) {
  774. return readTokenInLinked(scnr, context) || getEndToken(context);
  775. }
  776. const ch = scnr.currentChar();
  777. switch (ch) {
  778. case "{" /* TokenChars.BraceLeft */:
  779. return readTokenInPlaceholder(scnr, context) || getEndToken(context);
  780. case "}" /* TokenChars.BraceRight */:
  781. emitError(CompileErrorCodes.UNBALANCED_CLOSING_BRACE, currentPosition(), 0);
  782. scnr.next();
  783. return getToken(context, 3 /* TokenTypes.BraceRight */, "}" /* TokenChars.BraceRight */);
  784. case "@" /* TokenChars.LinkedAlias */:
  785. return readTokenInLinked(scnr, context) || getEndToken(context);
  786. default:
  787. if (isPluralStart(scnr)) {
  788. token = getToken(context, 1 /* TokenTypes.Pipe */, readPlural(scnr));
  789. // reset
  790. context.braceNest = 0;
  791. context.inLinked = false;
  792. return token;
  793. }
  794. const { isModulo, hasSpace } = detectModuloStart(scnr);
  795. if (isModulo) {
  796. return hasSpace
  797. ? getToken(context, 0 /* TokenTypes.Text */, readText(scnr))
  798. : getToken(context, 4 /* TokenTypes.Modulo */, readModulo(scnr));
  799. }
  800. if (isTextStart(scnr)) {
  801. return getToken(context, 0 /* TokenTypes.Text */, readText(scnr));
  802. }
  803. break;
  804. }
  805. return token;
  806. }
  807. function nextToken() {
  808. const { currentType, offset, startLoc, endLoc } = _context;
  809. _context.lastType = currentType;
  810. _context.lastOffset = offset;
  811. _context.lastStartLoc = startLoc;
  812. _context.lastEndLoc = endLoc;
  813. _context.offset = currentOffset();
  814. _context.startLoc = currentPosition();
  815. if (_scnr.currentChar() === EOF) {
  816. return getToken(_context, 14 /* TokenTypes.EOF */);
  817. }
  818. return readToken(_scnr, _context);
  819. }
  820. return {
  821. nextToken,
  822. currentOffset,
  823. currentPosition,
  824. context
  825. };
  826. }
  827. const ERROR_DOMAIN = 'parser';
  828. // Backslash backslash, backslash quote, uHHHH, UHHHHHH.
  829. const KNOWN_ESCAPES = /(?:\\\\|\\'|\\u([0-9a-fA-F]{4})|\\U([0-9a-fA-F]{6}))/g;
  830. function fromEscapeSequence(match, codePoint4, codePoint6) {
  831. switch (match) {
  832. case `\\\\`:
  833. return `\\`;
  834. case `\\\'`:
  835. return `\'`;
  836. default: {
  837. const codePoint = parseInt(codePoint4 || codePoint6, 16);
  838. if (codePoint <= 0xd7ff || codePoint >= 0xe000) {
  839. return String.fromCodePoint(codePoint);
  840. }
  841. // invalid ...
  842. // Replace them with U+FFFD REPLACEMENT CHARACTER.
  843. return '�';
  844. }
  845. }
  846. }
  847. function createParser(options = {}) {
  848. const location = options.location !== false;
  849. const { onError } = options;
  850. function emitError(tokenzer, code, start, offset, ...args) {
  851. const end = tokenzer.currentPosition();
  852. end.offset += offset;
  853. end.column += offset;
  854. if (onError) {
  855. const loc = createLocation(start, end);
  856. const err = createCompileError(code, loc, {
  857. domain: ERROR_DOMAIN,
  858. args
  859. });
  860. onError(err);
  861. }
  862. }
  863. function startNode(type, offset, loc) {
  864. const node = {
  865. type,
  866. start: offset,
  867. end: offset
  868. };
  869. if (location) {
  870. node.loc = { start: loc, end: loc };
  871. }
  872. return node;
  873. }
  874. function endNode(node, offset, pos, type) {
  875. node.end = offset;
  876. if (type) {
  877. node.type = type;
  878. }
  879. if (location && node.loc) {
  880. node.loc.end = pos;
  881. }
  882. }
  883. function parseText(tokenizer, value) {
  884. const context = tokenizer.context();
  885. const node = startNode(3 /* NodeTypes.Text */, context.offset, context.startLoc);
  886. node.value = value;
  887. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  888. return node;
  889. }
  890. function parseList(tokenizer, index) {
  891. const context = tokenizer.context();
  892. const { lastOffset: offset, lastStartLoc: loc } = context; // get brace left loc
  893. const node = startNode(5 /* NodeTypes.List */, offset, loc);
  894. node.index = parseInt(index, 10);
  895. tokenizer.nextToken(); // skip brach right
  896. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  897. return node;
  898. }
  899. function parseNamed(tokenizer, key) {
  900. const context = tokenizer.context();
  901. const { lastOffset: offset, lastStartLoc: loc } = context; // get brace left loc
  902. const node = startNode(4 /* NodeTypes.Named */, offset, loc);
  903. node.key = key;
  904. tokenizer.nextToken(); // skip brach right
  905. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  906. return node;
  907. }
  908. function parseLiteral(tokenizer, value) {
  909. const context = tokenizer.context();
  910. const { lastOffset: offset, lastStartLoc: loc } = context; // get brace left loc
  911. const node = startNode(9 /* NodeTypes.Literal */, offset, loc);
  912. node.value = value.replace(KNOWN_ESCAPES, fromEscapeSequence);
  913. tokenizer.nextToken(); // skip brach right
  914. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  915. return node;
  916. }
  917. function parseLinkedModifier(tokenizer) {
  918. const token = tokenizer.nextToken();
  919. const context = tokenizer.context();
  920. const { lastOffset: offset, lastStartLoc: loc } = context; // get linked dot loc
  921. const node = startNode(8 /* NodeTypes.LinkedModifier */, offset, loc);
  922. if (token.type !== 12 /* TokenTypes.LinkedModifier */) {
  923. // empty modifier
  924. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_MODIFIER, context.lastStartLoc, 0);
  925. node.value = '';
  926. endNode(node, offset, loc);
  927. return {
  928. nextConsumeToken: token,
  929. node
  930. };
  931. }
  932. // check token
  933. if (token.value == null) {
  934. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  935. }
  936. node.value = token.value || '';
  937. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  938. return {
  939. node
  940. };
  941. }
  942. function parseLinkedKey(tokenizer, value) {
  943. const context = tokenizer.context();
  944. const node = startNode(7 /* NodeTypes.LinkedKey */, context.offset, context.startLoc);
  945. node.value = value;
  946. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  947. return node;
  948. }
  949. function parseLinked(tokenizer) {
  950. const context = tokenizer.context();
  951. const linkedNode = startNode(6 /* NodeTypes.Linked */, context.offset, context.startLoc);
  952. let token = tokenizer.nextToken();
  953. if (token.type === 9 /* TokenTypes.LinkedDot */) {
  954. const parsed = parseLinkedModifier(tokenizer);
  955. linkedNode.modifier = parsed.node;
  956. token = parsed.nextConsumeToken || tokenizer.nextToken();
  957. }
  958. // asset check token
  959. if (token.type !== 10 /* TokenTypes.LinkedDelimiter */) {
  960. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  961. }
  962. token = tokenizer.nextToken();
  963. // skip brace left
  964. if (token.type === 2 /* TokenTypes.BraceLeft */) {
  965. token = tokenizer.nextToken();
  966. }
  967. switch (token.type) {
  968. case 11 /* TokenTypes.LinkedKey */:
  969. if (token.value == null) {
  970. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  971. }
  972. linkedNode.key = parseLinkedKey(tokenizer, token.value || '');
  973. break;
  974. case 5 /* TokenTypes.Named */:
  975. if (token.value == null) {
  976. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  977. }
  978. linkedNode.key = parseNamed(tokenizer, token.value || '');
  979. break;
  980. case 6 /* TokenTypes.List */:
  981. if (token.value == null) {
  982. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  983. }
  984. linkedNode.key = parseList(tokenizer, token.value || '');
  985. break;
  986. case 7 /* TokenTypes.Literal */:
  987. if (token.value == null) {
  988. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  989. }
  990. linkedNode.key = parseLiteral(tokenizer, token.value || '');
  991. break;
  992. default:
  993. // empty key
  994. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_KEY, context.lastStartLoc, 0);
  995. const nextContext = tokenizer.context();
  996. const emptyLinkedKeyNode = startNode(7 /* NodeTypes.LinkedKey */, nextContext.offset, nextContext.startLoc);
  997. emptyLinkedKeyNode.value = '';
  998. endNode(emptyLinkedKeyNode, nextContext.offset, nextContext.startLoc);
  999. linkedNode.key = emptyLinkedKeyNode;
  1000. endNode(linkedNode, nextContext.offset, nextContext.startLoc);
  1001. return {
  1002. nextConsumeToken: token,
  1003. node: linkedNode
  1004. };
  1005. }
  1006. endNode(linkedNode, tokenizer.currentOffset(), tokenizer.currentPosition());
  1007. return {
  1008. node: linkedNode
  1009. };
  1010. }
  1011. function parseMessage(tokenizer) {
  1012. const context = tokenizer.context();
  1013. const startOffset = context.currentType === 1 /* TokenTypes.Pipe */
  1014. ? tokenizer.currentOffset()
  1015. : context.offset;
  1016. const startLoc = context.currentType === 1 /* TokenTypes.Pipe */
  1017. ? context.endLoc
  1018. : context.startLoc;
  1019. const node = startNode(2 /* NodeTypes.Message */, startOffset, startLoc);
  1020. node.items = [];
  1021. let nextToken = null;
  1022. do {
  1023. const token = nextToken || tokenizer.nextToken();
  1024. nextToken = null;
  1025. switch (token.type) {
  1026. case 0 /* TokenTypes.Text */:
  1027. if (token.value == null) {
  1028. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1029. }
  1030. node.items.push(parseText(tokenizer, token.value || ''));
  1031. break;
  1032. case 6 /* TokenTypes.List */:
  1033. if (token.value == null) {
  1034. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1035. }
  1036. node.items.push(parseList(tokenizer, token.value || ''));
  1037. break;
  1038. case 5 /* TokenTypes.Named */:
  1039. if (token.value == null) {
  1040. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1041. }
  1042. node.items.push(parseNamed(tokenizer, token.value || ''));
  1043. break;
  1044. case 7 /* TokenTypes.Literal */:
  1045. if (token.value == null) {
  1046. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1047. }
  1048. node.items.push(parseLiteral(tokenizer, token.value || ''));
  1049. break;
  1050. case 8 /* TokenTypes.LinkedAlias */:
  1051. const parsed = parseLinked(tokenizer);
  1052. node.items.push(parsed.node);
  1053. nextToken = parsed.nextConsumeToken || null;
  1054. break;
  1055. }
  1056. } while (context.currentType !== 14 /* TokenTypes.EOF */ &&
  1057. context.currentType !== 1 /* TokenTypes.Pipe */);
  1058. // adjust message node loc
  1059. const endOffset = context.currentType === 1 /* TokenTypes.Pipe */
  1060. ? context.lastOffset
  1061. : tokenizer.currentOffset();
  1062. const endLoc = context.currentType === 1 /* TokenTypes.Pipe */
  1063. ? context.lastEndLoc
  1064. : tokenizer.currentPosition();
  1065. endNode(node, endOffset, endLoc);
  1066. return node;
  1067. }
  1068. function parsePlural(tokenizer, offset, loc, msgNode) {
  1069. const context = tokenizer.context();
  1070. let hasEmptyMessage = msgNode.items.length === 0;
  1071. const node = startNode(1 /* NodeTypes.Plural */, offset, loc);
  1072. node.cases = [];
  1073. node.cases.push(msgNode);
  1074. do {
  1075. const msg = parseMessage(tokenizer);
  1076. if (!hasEmptyMessage) {
  1077. hasEmptyMessage = msg.items.length === 0;
  1078. }
  1079. node.cases.push(msg);
  1080. } while (context.currentType !== 14 /* TokenTypes.EOF */);
  1081. if (hasEmptyMessage) {
  1082. emitError(tokenizer, CompileErrorCodes.MUST_HAVE_MESSAGES_IN_PLURAL, loc, 0);
  1083. }
  1084. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  1085. return node;
  1086. }
  1087. function parseResource(tokenizer) {
  1088. const context = tokenizer.context();
  1089. const { offset, startLoc } = context;
  1090. const msgNode = parseMessage(tokenizer);
  1091. if (context.currentType === 14 /* TokenTypes.EOF */) {
  1092. return msgNode;
  1093. }
  1094. else {
  1095. return parsePlural(tokenizer, offset, startLoc, msgNode);
  1096. }
  1097. }
  1098. function parse(source) {
  1099. const tokenizer = createTokenizer(source, shared.assign({}, options));
  1100. const context = tokenizer.context();
  1101. const node = startNode(0 /* NodeTypes.Resource */, context.offset, context.startLoc);
  1102. if (location && node.loc) {
  1103. node.loc.source = source;
  1104. }
  1105. node.body = parseResource(tokenizer);
  1106. // assert whether achieved to EOF
  1107. if (context.currentType !== 14 /* TokenTypes.EOF */) {
  1108. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, source[context.offset] || '');
  1109. }
  1110. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  1111. return node;
  1112. }
  1113. return { parse };
  1114. }
  1115. function getTokenCaption(token) {
  1116. if (token.type === 14 /* TokenTypes.EOF */) {
  1117. return 'EOF';
  1118. }
  1119. const name = (token.value || '').replace(/\r?\n/gu, '\\n');
  1120. return name.length > 10 ? name.slice(0, 9) + '…' : name;
  1121. }
  1122. function createTransformer(ast, options = {} // eslint-disable-line
  1123. ) {
  1124. const _context = {
  1125. ast,
  1126. helpers: new Set()
  1127. };
  1128. const context = () => _context;
  1129. const helper = (name) => {
  1130. _context.helpers.add(name);
  1131. return name;
  1132. };
  1133. return { context, helper };
  1134. }
  1135. function traverseNodes(nodes, transformer) {
  1136. for (let i = 0; i < nodes.length; i++) {
  1137. traverseNode(nodes[i], transformer);
  1138. }
  1139. }
  1140. function traverseNode(node, transformer) {
  1141. // TODO: if we need pre-hook of transform, should be implemented to here
  1142. switch (node.type) {
  1143. case 1 /* NodeTypes.Plural */:
  1144. traverseNodes(node.cases, transformer);
  1145. transformer.helper("plural" /* HelperNameMap.PLURAL */);
  1146. break;
  1147. case 2 /* NodeTypes.Message */:
  1148. traverseNodes(node.items, transformer);
  1149. break;
  1150. case 6 /* NodeTypes.Linked */:
  1151. const linked = node;
  1152. traverseNode(linked.key, transformer);
  1153. transformer.helper("linked" /* HelperNameMap.LINKED */);
  1154. transformer.helper("type" /* HelperNameMap.TYPE */);
  1155. break;
  1156. case 5 /* NodeTypes.List */:
  1157. transformer.helper("interpolate" /* HelperNameMap.INTERPOLATE */);
  1158. transformer.helper("list" /* HelperNameMap.LIST */);
  1159. break;
  1160. case 4 /* NodeTypes.Named */:
  1161. transformer.helper("interpolate" /* HelperNameMap.INTERPOLATE */);
  1162. transformer.helper("named" /* HelperNameMap.NAMED */);
  1163. break;
  1164. }
  1165. // TODO: if we need post-hook of transform, should be implemented to here
  1166. }
  1167. // transform AST
  1168. function transform(ast, options = {} // eslint-disable-line
  1169. ) {
  1170. const transformer = createTransformer(ast);
  1171. transformer.helper("normalize" /* HelperNameMap.NORMALIZE */);
  1172. // traverse
  1173. ast.body && traverseNode(ast.body, transformer);
  1174. // set meta information
  1175. const context = transformer.context();
  1176. ast.helpers = Array.from(context.helpers);
  1177. }
  1178. function createCodeGenerator(ast, options) {
  1179. const { sourceMap: sourceMap$1, filename, breakLineCode, needIndent: _needIndent } = options;
  1180. const _context = {
  1181. source: ast.loc.source,
  1182. filename,
  1183. code: '',
  1184. column: 1,
  1185. line: 1,
  1186. offset: 0,
  1187. map: undefined,
  1188. breakLineCode,
  1189. needIndent: _needIndent,
  1190. indentLevel: 0
  1191. };
  1192. const context = () => _context;
  1193. function push(code, node) {
  1194. _context.code += code;
  1195. if (_context.map) {
  1196. if (node && node.loc && node.loc !== LocationStub) {
  1197. addMapping(node.loc.start, getMappingName(node));
  1198. }
  1199. advancePositionWithSource(_context, code);
  1200. }
  1201. }
  1202. function _newline(n, withBreakLine = true) {
  1203. const _breakLineCode = withBreakLine ? breakLineCode : '';
  1204. push(_needIndent ? _breakLineCode + ` `.repeat(n) : _breakLineCode);
  1205. }
  1206. function indent(withNewLine = true) {
  1207. const level = ++_context.indentLevel;
  1208. withNewLine && _newline(level);
  1209. }
  1210. function deindent(withNewLine = true) {
  1211. const level = --_context.indentLevel;
  1212. withNewLine && _newline(level);
  1213. }
  1214. function newline() {
  1215. _newline(_context.indentLevel);
  1216. }
  1217. const helper = (key) => `_${key}`;
  1218. const needIndent = () => _context.needIndent;
  1219. function addMapping(loc, name) {
  1220. _context.map.addMapping({
  1221. name,
  1222. source: _context.filename,
  1223. original: {
  1224. line: loc.line,
  1225. column: loc.column - 1
  1226. },
  1227. generated: {
  1228. line: _context.line,
  1229. column: _context.column - 1
  1230. }
  1231. });
  1232. }
  1233. if (sourceMap$1) {
  1234. _context.map = new sourceMap.SourceMapGenerator();
  1235. _context.map.setSourceContent(filename, _context.source);
  1236. }
  1237. return {
  1238. context,
  1239. push,
  1240. indent,
  1241. deindent,
  1242. newline,
  1243. helper,
  1244. needIndent
  1245. };
  1246. }
  1247. function generateLinkedNode(generator, node) {
  1248. const { helper } = generator;
  1249. generator.push(`${helper("linked" /* HelperNameMap.LINKED */)}(`);
  1250. generateNode(generator, node.key);
  1251. if (node.modifier) {
  1252. generator.push(`, `);
  1253. generateNode(generator, node.modifier);
  1254. generator.push(`, _type`);
  1255. }
  1256. else {
  1257. generator.push(`, undefined, _type`);
  1258. }
  1259. generator.push(`)`);
  1260. }
  1261. function generateMessageNode(generator, node) {
  1262. const { helper, needIndent } = generator;
  1263. generator.push(`${helper("normalize" /* HelperNameMap.NORMALIZE */)}([`);
  1264. generator.indent(needIndent());
  1265. const length = node.items.length;
  1266. for (let i = 0; i < length; i++) {
  1267. generateNode(generator, node.items[i]);
  1268. if (i === length - 1) {
  1269. break;
  1270. }
  1271. generator.push(', ');
  1272. }
  1273. generator.deindent(needIndent());
  1274. generator.push('])');
  1275. }
  1276. function generatePluralNode(generator, node) {
  1277. const { helper, needIndent } = generator;
  1278. if (node.cases.length > 1) {
  1279. generator.push(`${helper("plural" /* HelperNameMap.PLURAL */)}([`);
  1280. generator.indent(needIndent());
  1281. const length = node.cases.length;
  1282. for (let i = 0; i < length; i++) {
  1283. generateNode(generator, node.cases[i]);
  1284. if (i === length - 1) {
  1285. break;
  1286. }
  1287. generator.push(', ');
  1288. }
  1289. generator.deindent(needIndent());
  1290. generator.push(`])`);
  1291. }
  1292. }
  1293. function generateResource(generator, node) {
  1294. if (node.body) {
  1295. generateNode(generator, node.body);
  1296. }
  1297. else {
  1298. generator.push('null');
  1299. }
  1300. }
  1301. function generateNode(generator, node) {
  1302. const { helper } = generator;
  1303. switch (node.type) {
  1304. case 0 /* NodeTypes.Resource */:
  1305. generateResource(generator, node);
  1306. break;
  1307. case 1 /* NodeTypes.Plural */:
  1308. generatePluralNode(generator, node);
  1309. break;
  1310. case 2 /* NodeTypes.Message */:
  1311. generateMessageNode(generator, node);
  1312. break;
  1313. case 6 /* NodeTypes.Linked */:
  1314. generateLinkedNode(generator, node);
  1315. break;
  1316. case 8 /* NodeTypes.LinkedModifier */:
  1317. generator.push(JSON.stringify(node.value), node);
  1318. break;
  1319. case 7 /* NodeTypes.LinkedKey */:
  1320. generator.push(JSON.stringify(node.value), node);
  1321. break;
  1322. case 5 /* NodeTypes.List */:
  1323. generator.push(`${helper("interpolate" /* HelperNameMap.INTERPOLATE */)}(${helper("list" /* HelperNameMap.LIST */)}(${node.index}))`, node);
  1324. break;
  1325. case 4 /* NodeTypes.Named */:
  1326. generator.push(`${helper("interpolate" /* HelperNameMap.INTERPOLATE */)}(${helper("named" /* HelperNameMap.NAMED */)}(${JSON.stringify(node.key)}))`, node);
  1327. break;
  1328. case 9 /* NodeTypes.Literal */:
  1329. generator.push(JSON.stringify(node.value), node);
  1330. break;
  1331. case 3 /* NodeTypes.Text */:
  1332. generator.push(JSON.stringify(node.value), node);
  1333. break;
  1334. default:
  1335. {
  1336. throw new Error(`unhandled codegen node type: ${node.type}`);
  1337. }
  1338. }
  1339. }
  1340. // generate code from AST
  1341. const generate = (ast, options = {} // eslint-disable-line
  1342. ) => {
  1343. const mode = shared.isString(options.mode) ? options.mode : 'normal';
  1344. const filename = shared.isString(options.filename)
  1345. ? options.filename
  1346. : 'message.intl';
  1347. const sourceMap = !!options.sourceMap;
  1348. // prettier-ignore
  1349. const breakLineCode = options.breakLineCode != null
  1350. ? options.breakLineCode
  1351. : mode === 'arrow'
  1352. ? ';'
  1353. : '\n';
  1354. const needIndent = options.needIndent ? options.needIndent : mode !== 'arrow';
  1355. const helpers = ast.helpers || [];
  1356. const generator = createCodeGenerator(ast, {
  1357. mode,
  1358. filename,
  1359. sourceMap,
  1360. breakLineCode,
  1361. needIndent
  1362. });
  1363. generator.push(mode === 'normal' ? `function __msg__ (ctx) {` : `(ctx) => {`);
  1364. generator.indent(needIndent);
  1365. if (helpers.length > 0) {
  1366. generator.push(`const { ${helpers.map(s => `${s}: _${s}`).join(', ')} } = ctx`);
  1367. generator.newline();
  1368. }
  1369. generator.push(`return `);
  1370. generateNode(generator, ast);
  1371. generator.deindent(needIndent);
  1372. generator.push(`}`);
  1373. const { code, map } = generator.context();
  1374. return {
  1375. ast,
  1376. code,
  1377. map: map ? map.toJSON() : undefined // eslint-disable-line @typescript-eslint/no-explicit-any
  1378. };
  1379. };
  1380. function getMappingName(node) {
  1381. switch (node.type) {
  1382. case 3 /* NodeTypes.Text */:
  1383. case 9 /* NodeTypes.Literal */:
  1384. case 8 /* NodeTypes.LinkedModifier */:
  1385. case 7 /* NodeTypes.LinkedKey */:
  1386. return node.value;
  1387. case 5 /* NodeTypes.List */:
  1388. return node.index.toString();
  1389. case 4 /* NodeTypes.Named */:
  1390. return node.key;
  1391. default:
  1392. return undefined;
  1393. }
  1394. }
  1395. function advancePositionWithSource(pos, source, numberOfCharacters = source.length) {
  1396. let linesCount = 0;
  1397. let lastNewLinePos = -1;
  1398. for (let i = 0; i < numberOfCharacters; i++) {
  1399. if (source.charCodeAt(i) === 10 /* newline char code */) {
  1400. linesCount++;
  1401. lastNewLinePos = i;
  1402. }
  1403. }
  1404. pos.offset += numberOfCharacters;
  1405. pos.line += linesCount;
  1406. pos.column =
  1407. lastNewLinePos === -1
  1408. ? pos.column + numberOfCharacters
  1409. : numberOfCharacters - lastNewLinePos;
  1410. return pos;
  1411. }
  1412. function baseCompile(source, options = {}) {
  1413. const assignedOptions = shared.assign({}, options);
  1414. // parse source codes
  1415. const parser = createParser(assignedOptions);
  1416. const ast = parser.parse(source);
  1417. // transform ASTs
  1418. transform(ast, assignedOptions);
  1419. // generate javascript codes
  1420. return generate(ast, assignedOptions);
  1421. }
  1422. exports.CompileErrorCodes = CompileErrorCodes;
  1423. exports.ERROR_DOMAIN = ERROR_DOMAIN;
  1424. exports.LocationStub = LocationStub;
  1425. exports.baseCompile = baseCompile;
  1426. exports.createCompileError = createCompileError;
  1427. exports.createLocation = createLocation;
  1428. exports.createParser = createParser;
  1429. exports.createPosition = createPosition;
  1430. exports.defaultOnError = defaultOnError;
  1431. exports.detectHtmlTag = detectHtmlTag;
  1432. exports.errorMessages = errorMessages;