版博士V2.0程序
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

1376 lines
52 KiB

  1. /*!
  2. * message-compiler v9.3.0-beta.17
  3. * (c) 2023 kazuya kawaguchi
  4. * Released under the MIT License.
  5. */
  6. import { format, assign, isString } from '@intlify/shared';
  7. const CompileErrorCodes = {
  8. // tokenizer error codes
  9. EXPECTED_TOKEN: 1,
  10. INVALID_TOKEN_IN_PLACEHOLDER: 2,
  11. UNTERMINATED_SINGLE_QUOTE_IN_PLACEHOLDER: 3,
  12. UNKNOWN_ESCAPE_SEQUENCE: 4,
  13. INVALID_UNICODE_ESCAPE_SEQUENCE: 5,
  14. UNBALANCED_CLOSING_BRACE: 6,
  15. UNTERMINATED_CLOSING_BRACE: 7,
  16. EMPTY_PLACEHOLDER: 8,
  17. NOT_ALLOW_NEST_PLACEHOLDER: 9,
  18. INVALID_LINKED_FORMAT: 10,
  19. // parser error codes
  20. MUST_HAVE_MESSAGES_IN_PLURAL: 11,
  21. UNEXPECTED_EMPTY_LINKED_MODIFIER: 12,
  22. UNEXPECTED_EMPTY_LINKED_KEY: 13,
  23. UNEXPECTED_LEXICAL_ANALYSIS: 14,
  24. // Special value for higher-order compilers to pick up the last code
  25. // to avoid collision of error codes. This should always be kept as the last
  26. // item.
  27. __EXTEND_POINT__: 15
  28. };
  29. /** @internal */
  30. const errorMessages = {
  31. // tokenizer error messages
  32. [CompileErrorCodes.EXPECTED_TOKEN]: `Expected token: '{0}'`,
  33. [CompileErrorCodes.INVALID_TOKEN_IN_PLACEHOLDER]: `Invalid token in placeholder: '{0}'`,
  34. [CompileErrorCodes.UNTERMINATED_SINGLE_QUOTE_IN_PLACEHOLDER]: `Unterminated single quote in placeholder`,
  35. [CompileErrorCodes.UNKNOWN_ESCAPE_SEQUENCE]: `Unknown escape sequence: \\{0}`,
  36. [CompileErrorCodes.INVALID_UNICODE_ESCAPE_SEQUENCE]: `Invalid unicode escape sequence: {0}`,
  37. [CompileErrorCodes.UNBALANCED_CLOSING_BRACE]: `Unbalanced closing brace`,
  38. [CompileErrorCodes.UNTERMINATED_CLOSING_BRACE]: `Unterminated closing brace`,
  39. [CompileErrorCodes.EMPTY_PLACEHOLDER]: `Empty placeholder`,
  40. [CompileErrorCodes.NOT_ALLOW_NEST_PLACEHOLDER]: `Not allowed nest placeholder`,
  41. [CompileErrorCodes.INVALID_LINKED_FORMAT]: `Invalid linked format`,
  42. // parser error messages
  43. [CompileErrorCodes.MUST_HAVE_MESSAGES_IN_PLURAL]: `Plural must have messages`,
  44. [CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_MODIFIER]: `Unexpected empty linked modifier`,
  45. [CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_KEY]: `Unexpected empty linked key`,
  46. [CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS]: `Unexpected lexical analysis in token: '{0}'`
  47. };
  48. function createCompileError(code, loc, options = {}) {
  49. const { domain, messages, args } = options;
  50. const msg = (process.env.NODE_ENV !== 'production')
  51. ? format((messages || errorMessages)[code] || '', ...(args || []))
  52. : code;
  53. const error = new SyntaxError(String(msg));
  54. error.code = code;
  55. if (loc) {
  56. error.location = loc;
  57. }
  58. error.domain = domain;
  59. return error;
  60. }
  61. /** @internal */
  62. function defaultOnError(error) {
  63. throw error;
  64. }
  65. const RE_HTML_TAG = /<\/?[\w\s="/.':;#-\/]+>/;
  66. const detectHtmlTag = (source) => RE_HTML_TAG.test(source);
  67. const LocationStub = {
  68. start: { line: 1, column: 1, offset: 0 },
  69. end: { line: 1, column: 1, offset: 0 }
  70. };
  71. function createPosition(line, column, offset) {
  72. return { line, column, offset };
  73. }
  74. function createLocation(start, end, source) {
  75. const loc = { start, end };
  76. if (source != null) {
  77. loc.source = source;
  78. }
  79. return loc;
  80. }
  81. const CHAR_SP = ' ';
  82. const CHAR_CR = '\r';
  83. const CHAR_LF = '\n';
  84. const CHAR_LS = String.fromCharCode(0x2028);
  85. const CHAR_PS = String.fromCharCode(0x2029);
  86. function createScanner(str) {
  87. const _buf = str;
  88. let _index = 0;
  89. let _line = 1;
  90. let _column = 1;
  91. let _peekOffset = 0;
  92. const isCRLF = (index) => _buf[index] === CHAR_CR && _buf[index + 1] === CHAR_LF;
  93. const isLF = (index) => _buf[index] === CHAR_LF;
  94. const isPS = (index) => _buf[index] === CHAR_PS;
  95. const isLS = (index) => _buf[index] === CHAR_LS;
  96. const isLineEnd = (index) => isCRLF(index) || isLF(index) || isPS(index) || isLS(index);
  97. const index = () => _index;
  98. const line = () => _line;
  99. const column = () => _column;
  100. const peekOffset = () => _peekOffset;
  101. const charAt = (offset) => isCRLF(offset) || isPS(offset) || isLS(offset) ? CHAR_LF : _buf[offset];
  102. const currentChar = () => charAt(_index);
  103. const currentPeek = () => charAt(_index + _peekOffset);
  104. function next() {
  105. _peekOffset = 0;
  106. if (isLineEnd(_index)) {
  107. _line++;
  108. _column = 0;
  109. }
  110. if (isCRLF(_index)) {
  111. _index++;
  112. }
  113. _index++;
  114. _column++;
  115. return _buf[_index];
  116. }
  117. function peek() {
  118. if (isCRLF(_index + _peekOffset)) {
  119. _peekOffset++;
  120. }
  121. _peekOffset++;
  122. return _buf[_index + _peekOffset];
  123. }
  124. function reset() {
  125. _index = 0;
  126. _line = 1;
  127. _column = 1;
  128. _peekOffset = 0;
  129. }
  130. function resetPeek(offset = 0) {
  131. _peekOffset = offset;
  132. }
  133. function skipToPeek() {
  134. const target = _index + _peekOffset;
  135. // eslint-disable-next-line no-unmodified-loop-condition
  136. while (target !== _index) {
  137. next();
  138. }
  139. _peekOffset = 0;
  140. }
  141. return {
  142. index,
  143. line,
  144. column,
  145. peekOffset,
  146. charAt,
  147. currentChar,
  148. currentPeek,
  149. next,
  150. peek,
  151. reset,
  152. resetPeek,
  153. skipToPeek
  154. };
  155. }
  156. const EOF = undefined;
  157. const LITERAL_DELIMITER = "'";
  158. const ERROR_DOMAIN$1 = 'tokenizer';
  159. function createTokenizer(source, options = {}) {
  160. const location = options.location !== false;
  161. const _scnr = createScanner(source);
  162. const currentOffset = () => _scnr.index();
  163. const currentPosition = () => createPosition(_scnr.line(), _scnr.column(), _scnr.index());
  164. const _initLoc = currentPosition();
  165. const _initOffset = currentOffset();
  166. const _context = {
  167. currentType: 14 /* TokenTypes.EOF */,
  168. offset: _initOffset,
  169. startLoc: _initLoc,
  170. endLoc: _initLoc,
  171. lastType: 14 /* TokenTypes.EOF */,
  172. lastOffset: _initOffset,
  173. lastStartLoc: _initLoc,
  174. lastEndLoc: _initLoc,
  175. braceNest: 0,
  176. inLinked: false,
  177. text: ''
  178. };
  179. const context = () => _context;
  180. const { onError } = options;
  181. function emitError(code, pos, offset, ...args) {
  182. const ctx = context();
  183. pos.column += offset;
  184. pos.offset += offset;
  185. if (onError) {
  186. const loc = createLocation(ctx.startLoc, pos);
  187. const err = createCompileError(code, loc, {
  188. domain: ERROR_DOMAIN$1,
  189. args
  190. });
  191. onError(err);
  192. }
  193. }
  194. function getToken(context, type, value) {
  195. context.endLoc = currentPosition();
  196. context.currentType = type;
  197. const token = { type };
  198. if (location) {
  199. token.loc = createLocation(context.startLoc, context.endLoc);
  200. }
  201. if (value != null) {
  202. token.value = value;
  203. }
  204. return token;
  205. }
  206. const getEndToken = (context) => getToken(context, 14 /* TokenTypes.EOF */);
  207. function eat(scnr, ch) {
  208. if (scnr.currentChar() === ch) {
  209. scnr.next();
  210. return ch;
  211. }
  212. else {
  213. emitError(CompileErrorCodes.EXPECTED_TOKEN, currentPosition(), 0, ch);
  214. return '';
  215. }
  216. }
  217. function peekSpaces(scnr) {
  218. let buf = '';
  219. while (scnr.currentPeek() === CHAR_SP || scnr.currentPeek() === CHAR_LF) {
  220. buf += scnr.currentPeek();
  221. scnr.peek();
  222. }
  223. return buf;
  224. }
  225. function skipSpaces(scnr) {
  226. const buf = peekSpaces(scnr);
  227. scnr.skipToPeek();
  228. return buf;
  229. }
  230. function isIdentifierStart(ch) {
  231. if (ch === EOF) {
  232. return false;
  233. }
  234. const cc = ch.charCodeAt(0);
  235. return ((cc >= 97 && cc <= 122) || // a-z
  236. (cc >= 65 && cc <= 90) || // A-Z
  237. cc === 95 // _
  238. );
  239. }
  240. function isNumberStart(ch) {
  241. if (ch === EOF) {
  242. return false;
  243. }
  244. const cc = ch.charCodeAt(0);
  245. return cc >= 48 && cc <= 57; // 0-9
  246. }
  247. function isNamedIdentifierStart(scnr, context) {
  248. const { currentType } = context;
  249. if (currentType !== 2 /* TokenTypes.BraceLeft */) {
  250. return false;
  251. }
  252. peekSpaces(scnr);
  253. const ret = isIdentifierStart(scnr.currentPeek());
  254. scnr.resetPeek();
  255. return ret;
  256. }
  257. function isListIdentifierStart(scnr, context) {
  258. const { currentType } = context;
  259. if (currentType !== 2 /* TokenTypes.BraceLeft */) {
  260. return false;
  261. }
  262. peekSpaces(scnr);
  263. const ch = scnr.currentPeek() === '-' ? scnr.peek() : scnr.currentPeek();
  264. const ret = isNumberStart(ch);
  265. scnr.resetPeek();
  266. return ret;
  267. }
  268. function isLiteralStart(scnr, context) {
  269. const { currentType } = context;
  270. if (currentType !== 2 /* TokenTypes.BraceLeft */) {
  271. return false;
  272. }
  273. peekSpaces(scnr);
  274. const ret = scnr.currentPeek() === LITERAL_DELIMITER;
  275. scnr.resetPeek();
  276. return ret;
  277. }
  278. function isLinkedDotStart(scnr, context) {
  279. const { currentType } = context;
  280. if (currentType !== 8 /* TokenTypes.LinkedAlias */) {
  281. return false;
  282. }
  283. peekSpaces(scnr);
  284. const ret = scnr.currentPeek() === "." /* TokenChars.LinkedDot */;
  285. scnr.resetPeek();
  286. return ret;
  287. }
  288. function isLinkedModifierStart(scnr, context) {
  289. const { currentType } = context;
  290. if (currentType !== 9 /* TokenTypes.LinkedDot */) {
  291. return false;
  292. }
  293. peekSpaces(scnr);
  294. const ret = isIdentifierStart(scnr.currentPeek());
  295. scnr.resetPeek();
  296. return ret;
  297. }
  298. function isLinkedDelimiterStart(scnr, context) {
  299. const { currentType } = context;
  300. if (!(currentType === 8 /* TokenTypes.LinkedAlias */ ||
  301. currentType === 12 /* TokenTypes.LinkedModifier */)) {
  302. return false;
  303. }
  304. peekSpaces(scnr);
  305. const ret = scnr.currentPeek() === ":" /* TokenChars.LinkedDelimiter */;
  306. scnr.resetPeek();
  307. return ret;
  308. }
  309. function isLinkedReferStart(scnr, context) {
  310. const { currentType } = context;
  311. if (currentType !== 10 /* TokenTypes.LinkedDelimiter */) {
  312. return false;
  313. }
  314. const fn = () => {
  315. const ch = scnr.currentPeek();
  316. if (ch === "{" /* TokenChars.BraceLeft */) {
  317. return isIdentifierStart(scnr.peek());
  318. }
  319. else if (ch === "@" /* TokenChars.LinkedAlias */ ||
  320. ch === "%" /* TokenChars.Modulo */ ||
  321. ch === "|" /* TokenChars.Pipe */ ||
  322. ch === ":" /* TokenChars.LinkedDelimiter */ ||
  323. ch === "." /* TokenChars.LinkedDot */ ||
  324. ch === CHAR_SP ||
  325. !ch) {
  326. return false;
  327. }
  328. else if (ch === CHAR_LF) {
  329. scnr.peek();
  330. return fn();
  331. }
  332. else {
  333. // other characters
  334. return isIdentifierStart(ch);
  335. }
  336. };
  337. const ret = fn();
  338. scnr.resetPeek();
  339. return ret;
  340. }
  341. function isPluralStart(scnr) {
  342. peekSpaces(scnr);
  343. const ret = scnr.currentPeek() === "|" /* TokenChars.Pipe */;
  344. scnr.resetPeek();
  345. return ret;
  346. }
  347. function detectModuloStart(scnr) {
  348. const spaces = peekSpaces(scnr);
  349. const ret = scnr.currentPeek() === "%" /* TokenChars.Modulo */ &&
  350. scnr.peek() === "{" /* TokenChars.BraceLeft */;
  351. scnr.resetPeek();
  352. return {
  353. isModulo: ret,
  354. hasSpace: spaces.length > 0
  355. };
  356. }
  357. function isTextStart(scnr, reset = true) {
  358. const fn = (hasSpace = false, prev = '', detectModulo = false) => {
  359. const ch = scnr.currentPeek();
  360. if (ch === "{" /* TokenChars.BraceLeft */) {
  361. return prev === "%" /* TokenChars.Modulo */ ? false : hasSpace;
  362. }
  363. else if (ch === "@" /* TokenChars.LinkedAlias */ || !ch) {
  364. return prev === "%" /* TokenChars.Modulo */ ? true : hasSpace;
  365. }
  366. else if (ch === "%" /* TokenChars.Modulo */) {
  367. scnr.peek();
  368. return fn(hasSpace, "%" /* TokenChars.Modulo */, true);
  369. }
  370. else if (ch === "|" /* TokenChars.Pipe */) {
  371. return prev === "%" /* TokenChars.Modulo */ || detectModulo
  372. ? true
  373. : !(prev === CHAR_SP || prev === CHAR_LF);
  374. }
  375. else if (ch === CHAR_SP) {
  376. scnr.peek();
  377. return fn(true, CHAR_SP, detectModulo);
  378. }
  379. else if (ch === CHAR_LF) {
  380. scnr.peek();
  381. return fn(true, CHAR_LF, detectModulo);
  382. }
  383. else {
  384. return true;
  385. }
  386. };
  387. const ret = fn();
  388. reset && scnr.resetPeek();
  389. return ret;
  390. }
  391. function takeChar(scnr, fn) {
  392. const ch = scnr.currentChar();
  393. if (ch === EOF) {
  394. return EOF;
  395. }
  396. if (fn(ch)) {
  397. scnr.next();
  398. return ch;
  399. }
  400. return null;
  401. }
  402. function takeIdentifierChar(scnr) {
  403. const closure = (ch) => {
  404. const cc = ch.charCodeAt(0);
  405. return ((cc >= 97 && cc <= 122) || // a-z
  406. (cc >= 65 && cc <= 90) || // A-Z
  407. (cc >= 48 && cc <= 57) || // 0-9
  408. cc === 95 || // _
  409. cc === 36 // $
  410. );
  411. };
  412. return takeChar(scnr, closure);
  413. }
  414. function takeDigit(scnr) {
  415. const closure = (ch) => {
  416. const cc = ch.charCodeAt(0);
  417. return cc >= 48 && cc <= 57; // 0-9
  418. };
  419. return takeChar(scnr, closure);
  420. }
  421. function takeHexDigit(scnr) {
  422. const closure = (ch) => {
  423. const cc = ch.charCodeAt(0);
  424. return ((cc >= 48 && cc <= 57) || // 0-9
  425. (cc >= 65 && cc <= 70) || // A-F
  426. (cc >= 97 && cc <= 102)); // a-f
  427. };
  428. return takeChar(scnr, closure);
  429. }
  430. function getDigits(scnr) {
  431. let ch = '';
  432. let num = '';
  433. while ((ch = takeDigit(scnr))) {
  434. num += ch;
  435. }
  436. return num;
  437. }
  438. function readModulo(scnr) {
  439. skipSpaces(scnr);
  440. const ch = scnr.currentChar();
  441. if (ch !== "%" /* TokenChars.Modulo */) {
  442. emitError(CompileErrorCodes.EXPECTED_TOKEN, currentPosition(), 0, ch);
  443. }
  444. scnr.next();
  445. return "%" /* TokenChars.Modulo */;
  446. }
  447. function readText(scnr) {
  448. let buf = '';
  449. while (true) {
  450. const ch = scnr.currentChar();
  451. if (ch === "{" /* TokenChars.BraceLeft */ ||
  452. ch === "}" /* TokenChars.BraceRight */ ||
  453. ch === "@" /* TokenChars.LinkedAlias */ ||
  454. ch === "|" /* TokenChars.Pipe */ ||
  455. !ch) {
  456. break;
  457. }
  458. else if (ch === "%" /* TokenChars.Modulo */) {
  459. if (isTextStart(scnr)) {
  460. buf += ch;
  461. scnr.next();
  462. }
  463. else {
  464. break;
  465. }
  466. }
  467. else if (ch === CHAR_SP || ch === CHAR_LF) {
  468. if (isTextStart(scnr)) {
  469. buf += ch;
  470. scnr.next();
  471. }
  472. else if (isPluralStart(scnr)) {
  473. break;
  474. }
  475. else {
  476. buf += ch;
  477. scnr.next();
  478. }
  479. }
  480. else {
  481. buf += ch;
  482. scnr.next();
  483. }
  484. }
  485. return buf;
  486. }
  487. function readNamedIdentifier(scnr) {
  488. skipSpaces(scnr);
  489. let ch = '';
  490. let name = '';
  491. while ((ch = takeIdentifierChar(scnr))) {
  492. name += ch;
  493. }
  494. if (scnr.currentChar() === EOF) {
  495. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  496. }
  497. return name;
  498. }
  499. function readListIdentifier(scnr) {
  500. skipSpaces(scnr);
  501. let value = '';
  502. if (scnr.currentChar() === '-') {
  503. scnr.next();
  504. value += `-${getDigits(scnr)}`;
  505. }
  506. else {
  507. value += getDigits(scnr);
  508. }
  509. if (scnr.currentChar() === EOF) {
  510. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  511. }
  512. return value;
  513. }
  514. function readLiteral(scnr) {
  515. skipSpaces(scnr);
  516. eat(scnr, `\'`);
  517. let ch = '';
  518. let literal = '';
  519. const fn = (x) => x !== LITERAL_DELIMITER && x !== CHAR_LF;
  520. while ((ch = takeChar(scnr, fn))) {
  521. if (ch === '\\') {
  522. literal += readEscapeSequence(scnr);
  523. }
  524. else {
  525. literal += ch;
  526. }
  527. }
  528. const current = scnr.currentChar();
  529. if (current === CHAR_LF || current === EOF) {
  530. emitError(CompileErrorCodes.UNTERMINATED_SINGLE_QUOTE_IN_PLACEHOLDER, currentPosition(), 0);
  531. // TODO: Is it correct really?
  532. if (current === CHAR_LF) {
  533. scnr.next();
  534. eat(scnr, `\'`);
  535. }
  536. return literal;
  537. }
  538. eat(scnr, `\'`);
  539. return literal;
  540. }
  541. function readEscapeSequence(scnr) {
  542. const ch = scnr.currentChar();
  543. switch (ch) {
  544. case '\\':
  545. case `\'`:
  546. scnr.next();
  547. return `\\${ch}`;
  548. case 'u':
  549. return readUnicodeEscapeSequence(scnr, ch, 4);
  550. case 'U':
  551. return readUnicodeEscapeSequence(scnr, ch, 6);
  552. default:
  553. emitError(CompileErrorCodes.UNKNOWN_ESCAPE_SEQUENCE, currentPosition(), 0, ch);
  554. return '';
  555. }
  556. }
  557. function readUnicodeEscapeSequence(scnr, unicode, digits) {
  558. eat(scnr, unicode);
  559. let sequence = '';
  560. for (let i = 0; i < digits; i++) {
  561. const ch = takeHexDigit(scnr);
  562. if (!ch) {
  563. emitError(CompileErrorCodes.INVALID_UNICODE_ESCAPE_SEQUENCE, currentPosition(), 0, `\\${unicode}${sequence}${scnr.currentChar()}`);
  564. break;
  565. }
  566. sequence += ch;
  567. }
  568. return `\\${unicode}${sequence}`;
  569. }
  570. function readInvalidIdentifier(scnr) {
  571. skipSpaces(scnr);
  572. let ch = '';
  573. let identifiers = '';
  574. const closure = (ch) => ch !== "{" /* TokenChars.BraceLeft */ &&
  575. ch !== "}" /* TokenChars.BraceRight */ &&
  576. ch !== CHAR_SP &&
  577. ch !== CHAR_LF;
  578. while ((ch = takeChar(scnr, closure))) {
  579. identifiers += ch;
  580. }
  581. return identifiers;
  582. }
  583. function readLinkedModifier(scnr) {
  584. let ch = '';
  585. let name = '';
  586. while ((ch = takeIdentifierChar(scnr))) {
  587. name += ch;
  588. }
  589. return name;
  590. }
  591. function readLinkedRefer(scnr) {
  592. const fn = (detect = false, buf) => {
  593. const ch = scnr.currentChar();
  594. if (ch === "{" /* TokenChars.BraceLeft */ ||
  595. ch === "%" /* TokenChars.Modulo */ ||
  596. ch === "@" /* TokenChars.LinkedAlias */ ||
  597. ch === "|" /* TokenChars.Pipe */ ||
  598. !ch) {
  599. return buf;
  600. }
  601. else if (ch === CHAR_SP) {
  602. return buf;
  603. }
  604. else if (ch === CHAR_LF) {
  605. buf += ch;
  606. scnr.next();
  607. return fn(detect, buf);
  608. }
  609. else {
  610. buf += ch;
  611. scnr.next();
  612. return fn(true, buf);
  613. }
  614. };
  615. return fn(false, '');
  616. }
  617. function readPlural(scnr) {
  618. skipSpaces(scnr);
  619. const plural = eat(scnr, "|" /* TokenChars.Pipe */);
  620. skipSpaces(scnr);
  621. return plural;
  622. }
  623. // TODO: We need refactoring of token parsing ...
  624. function readTokenInPlaceholder(scnr, context) {
  625. let token = null;
  626. const ch = scnr.currentChar();
  627. switch (ch) {
  628. case "{" /* TokenChars.BraceLeft */:
  629. if (context.braceNest >= 1) {
  630. emitError(CompileErrorCodes.NOT_ALLOW_NEST_PLACEHOLDER, currentPosition(), 0);
  631. }
  632. scnr.next();
  633. token = getToken(context, 2 /* TokenTypes.BraceLeft */, "{" /* TokenChars.BraceLeft */);
  634. skipSpaces(scnr);
  635. context.braceNest++;
  636. return token;
  637. case "}" /* TokenChars.BraceRight */:
  638. if (context.braceNest > 0 &&
  639. context.currentType === 2 /* TokenTypes.BraceLeft */) {
  640. emitError(CompileErrorCodes.EMPTY_PLACEHOLDER, currentPosition(), 0);
  641. }
  642. scnr.next();
  643. token = getToken(context, 3 /* TokenTypes.BraceRight */, "}" /* TokenChars.BraceRight */);
  644. context.braceNest--;
  645. context.braceNest > 0 && skipSpaces(scnr);
  646. if (context.inLinked && context.braceNest === 0) {
  647. context.inLinked = false;
  648. }
  649. return token;
  650. case "@" /* TokenChars.LinkedAlias */:
  651. if (context.braceNest > 0) {
  652. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  653. }
  654. token = readTokenInLinked(scnr, context) || getEndToken(context);
  655. context.braceNest = 0;
  656. return token;
  657. default:
  658. let validNamedIdentifier = true;
  659. let validListIdentifier = true;
  660. let validLiteral = true;
  661. if (isPluralStart(scnr)) {
  662. if (context.braceNest > 0) {
  663. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  664. }
  665. token = getToken(context, 1 /* TokenTypes.Pipe */, readPlural(scnr));
  666. // reset
  667. context.braceNest = 0;
  668. context.inLinked = false;
  669. return token;
  670. }
  671. if (context.braceNest > 0 &&
  672. (context.currentType === 5 /* TokenTypes.Named */ ||
  673. context.currentType === 6 /* TokenTypes.List */ ||
  674. context.currentType === 7 /* TokenTypes.Literal */)) {
  675. emitError(CompileErrorCodes.UNTERMINATED_CLOSING_BRACE, currentPosition(), 0);
  676. context.braceNest = 0;
  677. return readToken(scnr, context);
  678. }
  679. if ((validNamedIdentifier = isNamedIdentifierStart(scnr, context))) {
  680. token = getToken(context, 5 /* TokenTypes.Named */, readNamedIdentifier(scnr));
  681. skipSpaces(scnr);
  682. return token;
  683. }
  684. if ((validListIdentifier = isListIdentifierStart(scnr, context))) {
  685. token = getToken(context, 6 /* TokenTypes.List */, readListIdentifier(scnr));
  686. skipSpaces(scnr);
  687. return token;
  688. }
  689. if ((validLiteral = isLiteralStart(scnr, context))) {
  690. token = getToken(context, 7 /* TokenTypes.Literal */, readLiteral(scnr));
  691. skipSpaces(scnr);
  692. return token;
  693. }
  694. if (!validNamedIdentifier && !validListIdentifier && !validLiteral) {
  695. // TODO: we should be re-designed invalid cases, when we will extend message syntax near the future ...
  696. token = getToken(context, 13 /* TokenTypes.InvalidPlace */, readInvalidIdentifier(scnr));
  697. emitError(CompileErrorCodes.INVALID_TOKEN_IN_PLACEHOLDER, currentPosition(), 0, token.value);
  698. skipSpaces(scnr);
  699. return token;
  700. }
  701. break;
  702. }
  703. return token;
  704. }
  705. // TODO: We need refactoring of token parsing ...
  706. function readTokenInLinked(scnr, context) {
  707. const { currentType } = context;
  708. let token = null;
  709. const ch = scnr.currentChar();
  710. if ((currentType === 8 /* TokenTypes.LinkedAlias */ ||
  711. currentType === 9 /* TokenTypes.LinkedDot */ ||
  712. currentType === 12 /* TokenTypes.LinkedModifier */ ||
  713. currentType === 10 /* TokenTypes.LinkedDelimiter */) &&
  714. (ch === CHAR_LF || ch === CHAR_SP)) {
  715. emitError(CompileErrorCodes.INVALID_LINKED_FORMAT, currentPosition(), 0);
  716. }
  717. switch (ch) {
  718. case "@" /* TokenChars.LinkedAlias */:
  719. scnr.next();
  720. token = getToken(context, 8 /* TokenTypes.LinkedAlias */, "@" /* TokenChars.LinkedAlias */);
  721. context.inLinked = true;
  722. return token;
  723. case "." /* TokenChars.LinkedDot */:
  724. skipSpaces(scnr);
  725. scnr.next();
  726. return getToken(context, 9 /* TokenTypes.LinkedDot */, "." /* TokenChars.LinkedDot */);
  727. case ":" /* TokenChars.LinkedDelimiter */:
  728. skipSpaces(scnr);
  729. scnr.next();
  730. return getToken(context, 10 /* TokenTypes.LinkedDelimiter */, ":" /* TokenChars.LinkedDelimiter */);
  731. default:
  732. if (isPluralStart(scnr)) {
  733. token = getToken(context, 1 /* TokenTypes.Pipe */, readPlural(scnr));
  734. // reset
  735. context.braceNest = 0;
  736. context.inLinked = false;
  737. return token;
  738. }
  739. if (isLinkedDotStart(scnr, context) ||
  740. isLinkedDelimiterStart(scnr, context)) {
  741. skipSpaces(scnr);
  742. return readTokenInLinked(scnr, context);
  743. }
  744. if (isLinkedModifierStart(scnr, context)) {
  745. skipSpaces(scnr);
  746. return getToken(context, 12 /* TokenTypes.LinkedModifier */, readLinkedModifier(scnr));
  747. }
  748. if (isLinkedReferStart(scnr, context)) {
  749. skipSpaces(scnr);
  750. if (ch === "{" /* TokenChars.BraceLeft */) {
  751. // scan the placeholder
  752. return readTokenInPlaceholder(scnr, context) || token;
  753. }
  754. else {
  755. return getToken(context, 11 /* TokenTypes.LinkedKey */, readLinkedRefer(scnr));
  756. }
  757. }
  758. if (currentType === 8 /* TokenTypes.LinkedAlias */) {
  759. emitError(CompileErrorCodes.INVALID_LINKED_FORMAT, currentPosition(), 0);
  760. }
  761. context.braceNest = 0;
  762. context.inLinked = false;
  763. return readToken(scnr, context);
  764. }
  765. }
  766. // TODO: We need refactoring of token parsing ...
  767. function readToken(scnr, context) {
  768. let token = { type: 14 /* TokenTypes.EOF */ };
  769. if (context.braceNest > 0) {
  770. return readTokenInPlaceholder(scnr, context) || getEndToken(context);
  771. }
  772. if (context.inLinked) {
  773. return readTokenInLinked(scnr, context) || getEndToken(context);
  774. }
  775. const ch = scnr.currentChar();
  776. switch (ch) {
  777. case "{" /* TokenChars.BraceLeft */:
  778. return readTokenInPlaceholder(scnr, context) || getEndToken(context);
  779. case "}" /* TokenChars.BraceRight */:
  780. emitError(CompileErrorCodes.UNBALANCED_CLOSING_BRACE, currentPosition(), 0);
  781. scnr.next();
  782. return getToken(context, 3 /* TokenTypes.BraceRight */, "}" /* TokenChars.BraceRight */);
  783. case "@" /* TokenChars.LinkedAlias */:
  784. return readTokenInLinked(scnr, context) || getEndToken(context);
  785. default:
  786. if (isPluralStart(scnr)) {
  787. token = getToken(context, 1 /* TokenTypes.Pipe */, readPlural(scnr));
  788. // reset
  789. context.braceNest = 0;
  790. context.inLinked = false;
  791. return token;
  792. }
  793. const { isModulo, hasSpace } = detectModuloStart(scnr);
  794. if (isModulo) {
  795. return hasSpace
  796. ? getToken(context, 0 /* TokenTypes.Text */, readText(scnr))
  797. : getToken(context, 4 /* TokenTypes.Modulo */, readModulo(scnr));
  798. }
  799. if (isTextStart(scnr)) {
  800. return getToken(context, 0 /* TokenTypes.Text */, readText(scnr));
  801. }
  802. break;
  803. }
  804. return token;
  805. }
  806. function nextToken() {
  807. const { currentType, offset, startLoc, endLoc } = _context;
  808. _context.lastType = currentType;
  809. _context.lastOffset = offset;
  810. _context.lastStartLoc = startLoc;
  811. _context.lastEndLoc = endLoc;
  812. _context.offset = currentOffset();
  813. _context.startLoc = currentPosition();
  814. if (_scnr.currentChar() === EOF) {
  815. return getToken(_context, 14 /* TokenTypes.EOF */);
  816. }
  817. return readToken(_scnr, _context);
  818. }
  819. return {
  820. nextToken,
  821. currentOffset,
  822. currentPosition,
  823. context
  824. };
  825. }
  826. const ERROR_DOMAIN = 'parser';
  827. // Backslash backslash, backslash quote, uHHHH, UHHHHHH.
  828. const KNOWN_ESCAPES = /(?:\\\\|\\'|\\u([0-9a-fA-F]{4})|\\U([0-9a-fA-F]{6}))/g;
  829. function fromEscapeSequence(match, codePoint4, codePoint6) {
  830. switch (match) {
  831. case `\\\\`:
  832. return `\\`;
  833. case `\\\'`:
  834. return `\'`;
  835. default: {
  836. const codePoint = parseInt(codePoint4 || codePoint6, 16);
  837. if (codePoint <= 0xd7ff || codePoint >= 0xe000) {
  838. return String.fromCodePoint(codePoint);
  839. }
  840. // invalid ...
  841. // Replace them with U+FFFD REPLACEMENT CHARACTER.
  842. return '�';
  843. }
  844. }
  845. }
  846. function createParser(options = {}) {
  847. const location = options.location !== false;
  848. const { onError } = options;
  849. function emitError(tokenzer, code, start, offset, ...args) {
  850. const end = tokenzer.currentPosition();
  851. end.offset += offset;
  852. end.column += offset;
  853. if (onError) {
  854. const loc = createLocation(start, end);
  855. const err = createCompileError(code, loc, {
  856. domain: ERROR_DOMAIN,
  857. args
  858. });
  859. onError(err);
  860. }
  861. }
  862. function startNode(type, offset, loc) {
  863. const node = {
  864. type,
  865. start: offset,
  866. end: offset
  867. };
  868. if (location) {
  869. node.loc = { start: loc, end: loc };
  870. }
  871. return node;
  872. }
  873. function endNode(node, offset, pos, type) {
  874. node.end = offset;
  875. if (type) {
  876. node.type = type;
  877. }
  878. if (location && node.loc) {
  879. node.loc.end = pos;
  880. }
  881. }
  882. function parseText(tokenizer, value) {
  883. const context = tokenizer.context();
  884. const node = startNode(3 /* NodeTypes.Text */, context.offset, context.startLoc);
  885. node.value = value;
  886. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  887. return node;
  888. }
  889. function parseList(tokenizer, index) {
  890. const context = tokenizer.context();
  891. const { lastOffset: offset, lastStartLoc: loc } = context; // get brace left loc
  892. const node = startNode(5 /* NodeTypes.List */, offset, loc);
  893. node.index = parseInt(index, 10);
  894. tokenizer.nextToken(); // skip brach right
  895. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  896. return node;
  897. }
  898. function parseNamed(tokenizer, key) {
  899. const context = tokenizer.context();
  900. const { lastOffset: offset, lastStartLoc: loc } = context; // get brace left loc
  901. const node = startNode(4 /* NodeTypes.Named */, offset, loc);
  902. node.key = key;
  903. tokenizer.nextToken(); // skip brach right
  904. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  905. return node;
  906. }
  907. function parseLiteral(tokenizer, value) {
  908. const context = tokenizer.context();
  909. const { lastOffset: offset, lastStartLoc: loc } = context; // get brace left loc
  910. const node = startNode(9 /* NodeTypes.Literal */, offset, loc);
  911. node.value = value.replace(KNOWN_ESCAPES, fromEscapeSequence);
  912. tokenizer.nextToken(); // skip brach right
  913. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  914. return node;
  915. }
  916. function parseLinkedModifier(tokenizer) {
  917. const token = tokenizer.nextToken();
  918. const context = tokenizer.context();
  919. const { lastOffset: offset, lastStartLoc: loc } = context; // get linked dot loc
  920. const node = startNode(8 /* NodeTypes.LinkedModifier */, offset, loc);
  921. if (token.type !== 12 /* TokenTypes.LinkedModifier */) {
  922. // empty modifier
  923. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_MODIFIER, context.lastStartLoc, 0);
  924. node.value = '';
  925. endNode(node, offset, loc);
  926. return {
  927. nextConsumeToken: token,
  928. node
  929. };
  930. }
  931. // check token
  932. if (token.value == null) {
  933. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  934. }
  935. node.value = token.value || '';
  936. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  937. return {
  938. node
  939. };
  940. }
  941. function parseLinkedKey(tokenizer, value) {
  942. const context = tokenizer.context();
  943. const node = startNode(7 /* NodeTypes.LinkedKey */, context.offset, context.startLoc);
  944. node.value = value;
  945. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  946. return node;
  947. }
  948. function parseLinked(tokenizer) {
  949. const context = tokenizer.context();
  950. const linkedNode = startNode(6 /* NodeTypes.Linked */, context.offset, context.startLoc);
  951. let token = tokenizer.nextToken();
  952. if (token.type === 9 /* TokenTypes.LinkedDot */) {
  953. const parsed = parseLinkedModifier(tokenizer);
  954. linkedNode.modifier = parsed.node;
  955. token = parsed.nextConsumeToken || tokenizer.nextToken();
  956. }
  957. // asset check token
  958. if (token.type !== 10 /* TokenTypes.LinkedDelimiter */) {
  959. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  960. }
  961. token = tokenizer.nextToken();
  962. // skip brace left
  963. if (token.type === 2 /* TokenTypes.BraceLeft */) {
  964. token = tokenizer.nextToken();
  965. }
  966. switch (token.type) {
  967. case 11 /* TokenTypes.LinkedKey */:
  968. if (token.value == null) {
  969. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  970. }
  971. linkedNode.key = parseLinkedKey(tokenizer, token.value || '');
  972. break;
  973. case 5 /* TokenTypes.Named */:
  974. if (token.value == null) {
  975. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  976. }
  977. linkedNode.key = parseNamed(tokenizer, token.value || '');
  978. break;
  979. case 6 /* TokenTypes.List */:
  980. if (token.value == null) {
  981. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  982. }
  983. linkedNode.key = parseList(tokenizer, token.value || '');
  984. break;
  985. case 7 /* TokenTypes.Literal */:
  986. if (token.value == null) {
  987. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  988. }
  989. linkedNode.key = parseLiteral(tokenizer, token.value || '');
  990. break;
  991. default:
  992. // empty key
  993. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_EMPTY_LINKED_KEY, context.lastStartLoc, 0);
  994. const nextContext = tokenizer.context();
  995. const emptyLinkedKeyNode = startNode(7 /* NodeTypes.LinkedKey */, nextContext.offset, nextContext.startLoc);
  996. emptyLinkedKeyNode.value = '';
  997. endNode(emptyLinkedKeyNode, nextContext.offset, nextContext.startLoc);
  998. linkedNode.key = emptyLinkedKeyNode;
  999. endNode(linkedNode, nextContext.offset, nextContext.startLoc);
  1000. return {
  1001. nextConsumeToken: token,
  1002. node: linkedNode
  1003. };
  1004. }
  1005. endNode(linkedNode, tokenizer.currentOffset(), tokenizer.currentPosition());
  1006. return {
  1007. node: linkedNode
  1008. };
  1009. }
  1010. function parseMessage(tokenizer) {
  1011. const context = tokenizer.context();
  1012. const startOffset = context.currentType === 1 /* TokenTypes.Pipe */
  1013. ? tokenizer.currentOffset()
  1014. : context.offset;
  1015. const startLoc = context.currentType === 1 /* TokenTypes.Pipe */
  1016. ? context.endLoc
  1017. : context.startLoc;
  1018. const node = startNode(2 /* NodeTypes.Message */, startOffset, startLoc);
  1019. node.items = [];
  1020. let nextToken = null;
  1021. do {
  1022. const token = nextToken || tokenizer.nextToken();
  1023. nextToken = null;
  1024. switch (token.type) {
  1025. case 0 /* TokenTypes.Text */:
  1026. if (token.value == null) {
  1027. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1028. }
  1029. node.items.push(parseText(tokenizer, token.value || ''));
  1030. break;
  1031. case 6 /* TokenTypes.List */:
  1032. if (token.value == null) {
  1033. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1034. }
  1035. node.items.push(parseList(tokenizer, token.value || ''));
  1036. break;
  1037. case 5 /* TokenTypes.Named */:
  1038. if (token.value == null) {
  1039. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1040. }
  1041. node.items.push(parseNamed(tokenizer, token.value || ''));
  1042. break;
  1043. case 7 /* TokenTypes.Literal */:
  1044. if (token.value == null) {
  1045. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, getTokenCaption(token));
  1046. }
  1047. node.items.push(parseLiteral(tokenizer, token.value || ''));
  1048. break;
  1049. case 8 /* TokenTypes.LinkedAlias */:
  1050. const parsed = parseLinked(tokenizer);
  1051. node.items.push(parsed.node);
  1052. nextToken = parsed.nextConsumeToken || null;
  1053. break;
  1054. }
  1055. } while (context.currentType !== 14 /* TokenTypes.EOF */ &&
  1056. context.currentType !== 1 /* TokenTypes.Pipe */);
  1057. // adjust message node loc
  1058. const endOffset = context.currentType === 1 /* TokenTypes.Pipe */
  1059. ? context.lastOffset
  1060. : tokenizer.currentOffset();
  1061. const endLoc = context.currentType === 1 /* TokenTypes.Pipe */
  1062. ? context.lastEndLoc
  1063. : tokenizer.currentPosition();
  1064. endNode(node, endOffset, endLoc);
  1065. return node;
  1066. }
  1067. function parsePlural(tokenizer, offset, loc, msgNode) {
  1068. const context = tokenizer.context();
  1069. let hasEmptyMessage = msgNode.items.length === 0;
  1070. const node = startNode(1 /* NodeTypes.Plural */, offset, loc);
  1071. node.cases = [];
  1072. node.cases.push(msgNode);
  1073. do {
  1074. const msg = parseMessage(tokenizer);
  1075. if (!hasEmptyMessage) {
  1076. hasEmptyMessage = msg.items.length === 0;
  1077. }
  1078. node.cases.push(msg);
  1079. } while (context.currentType !== 14 /* TokenTypes.EOF */);
  1080. if (hasEmptyMessage) {
  1081. emitError(tokenizer, CompileErrorCodes.MUST_HAVE_MESSAGES_IN_PLURAL, loc, 0);
  1082. }
  1083. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  1084. return node;
  1085. }
  1086. function parseResource(tokenizer) {
  1087. const context = tokenizer.context();
  1088. const { offset, startLoc } = context;
  1089. const msgNode = parseMessage(tokenizer);
  1090. if (context.currentType === 14 /* TokenTypes.EOF */) {
  1091. return msgNode;
  1092. }
  1093. else {
  1094. return parsePlural(tokenizer, offset, startLoc, msgNode);
  1095. }
  1096. }
  1097. function parse(source) {
  1098. const tokenizer = createTokenizer(source, assign({}, options));
  1099. const context = tokenizer.context();
  1100. const node = startNode(0 /* NodeTypes.Resource */, context.offset, context.startLoc);
  1101. if (location && node.loc) {
  1102. node.loc.source = source;
  1103. }
  1104. node.body = parseResource(tokenizer);
  1105. // assert whether achieved to EOF
  1106. if (context.currentType !== 14 /* TokenTypes.EOF */) {
  1107. emitError(tokenizer, CompileErrorCodes.UNEXPECTED_LEXICAL_ANALYSIS, context.lastStartLoc, 0, source[context.offset] || '');
  1108. }
  1109. endNode(node, tokenizer.currentOffset(), tokenizer.currentPosition());
  1110. return node;
  1111. }
  1112. return { parse };
  1113. }
  1114. function getTokenCaption(token) {
  1115. if (token.type === 14 /* TokenTypes.EOF */) {
  1116. return 'EOF';
  1117. }
  1118. const name = (token.value || '').replace(/\r?\n/gu, '\\n');
  1119. return name.length > 10 ? name.slice(0, 9) + '…' : name;
  1120. }
  1121. function createTransformer(ast, options = {} // eslint-disable-line
  1122. ) {
  1123. const _context = {
  1124. ast,
  1125. helpers: new Set()
  1126. };
  1127. const context = () => _context;
  1128. const helper = (name) => {
  1129. _context.helpers.add(name);
  1130. return name;
  1131. };
  1132. return { context, helper };
  1133. }
  1134. function traverseNodes(nodes, transformer) {
  1135. for (let i = 0; i < nodes.length; i++) {
  1136. traverseNode(nodes[i], transformer);
  1137. }
  1138. }
  1139. function traverseNode(node, transformer) {
  1140. // TODO: if we need pre-hook of transform, should be implemented to here
  1141. switch (node.type) {
  1142. case 1 /* NodeTypes.Plural */:
  1143. traverseNodes(node.cases, transformer);
  1144. transformer.helper("plural" /* HelperNameMap.PLURAL */);
  1145. break;
  1146. case 2 /* NodeTypes.Message */:
  1147. traverseNodes(node.items, transformer);
  1148. break;
  1149. case 6 /* NodeTypes.Linked */:
  1150. const linked = node;
  1151. traverseNode(linked.key, transformer);
  1152. transformer.helper("linked" /* HelperNameMap.LINKED */);
  1153. transformer.helper("type" /* HelperNameMap.TYPE */);
  1154. break;
  1155. case 5 /* NodeTypes.List */:
  1156. transformer.helper("interpolate" /* HelperNameMap.INTERPOLATE */);
  1157. transformer.helper("list" /* HelperNameMap.LIST */);
  1158. break;
  1159. case 4 /* NodeTypes.Named */:
  1160. transformer.helper("interpolate" /* HelperNameMap.INTERPOLATE */);
  1161. transformer.helper("named" /* HelperNameMap.NAMED */);
  1162. break;
  1163. }
  1164. // TODO: if we need post-hook of transform, should be implemented to here
  1165. }
  1166. // transform AST
  1167. function transform(ast, options = {} // eslint-disable-line
  1168. ) {
  1169. const transformer = createTransformer(ast);
  1170. transformer.helper("normalize" /* HelperNameMap.NORMALIZE */);
  1171. // traverse
  1172. ast.body && traverseNode(ast.body, transformer);
  1173. // set meta information
  1174. const context = transformer.context();
  1175. ast.helpers = Array.from(context.helpers);
  1176. }
  1177. function createCodeGenerator(ast, options) {
  1178. const { sourceMap, filename, breakLineCode, needIndent: _needIndent } = options;
  1179. const _context = {
  1180. source: ast.loc.source,
  1181. filename,
  1182. code: '',
  1183. column: 1,
  1184. line: 1,
  1185. offset: 0,
  1186. map: undefined,
  1187. breakLineCode,
  1188. needIndent: _needIndent,
  1189. indentLevel: 0
  1190. };
  1191. const context = () => _context;
  1192. function push(code, node) {
  1193. _context.code += code;
  1194. }
  1195. function _newline(n, withBreakLine = true) {
  1196. const _breakLineCode = withBreakLine ? breakLineCode : '';
  1197. push(_needIndent ? _breakLineCode + ` `.repeat(n) : _breakLineCode);
  1198. }
  1199. function indent(withNewLine = true) {
  1200. const level = ++_context.indentLevel;
  1201. withNewLine && _newline(level);
  1202. }
  1203. function deindent(withNewLine = true) {
  1204. const level = --_context.indentLevel;
  1205. withNewLine && _newline(level);
  1206. }
  1207. function newline() {
  1208. _newline(_context.indentLevel);
  1209. }
  1210. const helper = (key) => `_${key}`;
  1211. const needIndent = () => _context.needIndent;
  1212. return {
  1213. context,
  1214. push,
  1215. indent,
  1216. deindent,
  1217. newline,
  1218. helper,
  1219. needIndent
  1220. };
  1221. }
  1222. function generateLinkedNode(generator, node) {
  1223. const { helper } = generator;
  1224. generator.push(`${helper("linked" /* HelperNameMap.LINKED */)}(`);
  1225. generateNode(generator, node.key);
  1226. if (node.modifier) {
  1227. generator.push(`, `);
  1228. generateNode(generator, node.modifier);
  1229. generator.push(`, _type`);
  1230. }
  1231. else {
  1232. generator.push(`, undefined, _type`);
  1233. }
  1234. generator.push(`)`);
  1235. }
  1236. function generateMessageNode(generator, node) {
  1237. const { helper, needIndent } = generator;
  1238. generator.push(`${helper("normalize" /* HelperNameMap.NORMALIZE */)}([`);
  1239. generator.indent(needIndent());
  1240. const length = node.items.length;
  1241. for (let i = 0; i < length; i++) {
  1242. generateNode(generator, node.items[i]);
  1243. if (i === length - 1) {
  1244. break;
  1245. }
  1246. generator.push(', ');
  1247. }
  1248. generator.deindent(needIndent());
  1249. generator.push('])');
  1250. }
  1251. function generatePluralNode(generator, node) {
  1252. const { helper, needIndent } = generator;
  1253. if (node.cases.length > 1) {
  1254. generator.push(`${helper("plural" /* HelperNameMap.PLURAL */)}([`);
  1255. generator.indent(needIndent());
  1256. const length = node.cases.length;
  1257. for (let i = 0; i < length; i++) {
  1258. generateNode(generator, node.cases[i]);
  1259. if (i === length - 1) {
  1260. break;
  1261. }
  1262. generator.push(', ');
  1263. }
  1264. generator.deindent(needIndent());
  1265. generator.push(`])`);
  1266. }
  1267. }
  1268. function generateResource(generator, node) {
  1269. if (node.body) {
  1270. generateNode(generator, node.body);
  1271. }
  1272. else {
  1273. generator.push('null');
  1274. }
  1275. }
  1276. function generateNode(generator, node) {
  1277. const { helper } = generator;
  1278. switch (node.type) {
  1279. case 0 /* NodeTypes.Resource */:
  1280. generateResource(generator, node);
  1281. break;
  1282. case 1 /* NodeTypes.Plural */:
  1283. generatePluralNode(generator, node);
  1284. break;
  1285. case 2 /* NodeTypes.Message */:
  1286. generateMessageNode(generator, node);
  1287. break;
  1288. case 6 /* NodeTypes.Linked */:
  1289. generateLinkedNode(generator, node);
  1290. break;
  1291. case 8 /* NodeTypes.LinkedModifier */:
  1292. generator.push(JSON.stringify(node.value), node);
  1293. break;
  1294. case 7 /* NodeTypes.LinkedKey */:
  1295. generator.push(JSON.stringify(node.value), node);
  1296. break;
  1297. case 5 /* NodeTypes.List */:
  1298. generator.push(`${helper("interpolate" /* HelperNameMap.INTERPOLATE */)}(${helper("list" /* HelperNameMap.LIST */)}(${node.index}))`, node);
  1299. break;
  1300. case 4 /* NodeTypes.Named */:
  1301. generator.push(`${helper("interpolate" /* HelperNameMap.INTERPOLATE */)}(${helper("named" /* HelperNameMap.NAMED */)}(${JSON.stringify(node.key)}))`, node);
  1302. break;
  1303. case 9 /* NodeTypes.Literal */:
  1304. generator.push(JSON.stringify(node.value), node);
  1305. break;
  1306. case 3 /* NodeTypes.Text */:
  1307. generator.push(JSON.stringify(node.value), node);
  1308. break;
  1309. default:
  1310. if ((process.env.NODE_ENV !== 'production')) {
  1311. throw new Error(`unhandled codegen node type: ${node.type}`);
  1312. }
  1313. }
  1314. }
  1315. // generate code from AST
  1316. const generate = (ast, options = {} // eslint-disable-line
  1317. ) => {
  1318. const mode = isString(options.mode) ? options.mode : 'normal';
  1319. const filename = isString(options.filename)
  1320. ? options.filename
  1321. : 'message.intl';
  1322. const sourceMap = !!options.sourceMap;
  1323. // prettier-ignore
  1324. const breakLineCode = options.breakLineCode != null
  1325. ? options.breakLineCode
  1326. : mode === 'arrow'
  1327. ? ';'
  1328. : '\n';
  1329. const needIndent = options.needIndent ? options.needIndent : mode !== 'arrow';
  1330. const helpers = ast.helpers || [];
  1331. const generator = createCodeGenerator(ast, {
  1332. mode,
  1333. filename,
  1334. sourceMap,
  1335. breakLineCode,
  1336. needIndent
  1337. });
  1338. generator.push(mode === 'normal' ? `function __msg__ (ctx) {` : `(ctx) => {`);
  1339. generator.indent(needIndent);
  1340. if (helpers.length > 0) {
  1341. generator.push(`const { ${helpers.map(s => `${s}: _${s}`).join(', ')} } = ctx`);
  1342. generator.newline();
  1343. }
  1344. generator.push(`return `);
  1345. generateNode(generator, ast);
  1346. generator.deindent(needIndent);
  1347. generator.push(`}`);
  1348. const { code, map } = generator.context();
  1349. return {
  1350. ast,
  1351. code,
  1352. map: map ? map.toJSON() : undefined // eslint-disable-line @typescript-eslint/no-explicit-any
  1353. };
  1354. };
  1355. function baseCompile(source, options = {}) {
  1356. const assignedOptions = assign({}, options);
  1357. // parse source codes
  1358. const parser = createParser(assignedOptions);
  1359. const ast = parser.parse(source);
  1360. // transform ASTs
  1361. transform(ast, assignedOptions);
  1362. // generate javascript codes
  1363. return generate(ast, assignedOptions);
  1364. }
  1365. export { CompileErrorCodes, ERROR_DOMAIN, LocationStub, baseCompile, createCompileError, createLocation, createParser, createPosition, defaultOnError, detectHtmlTag, errorMessages };