]> git.ipfire.org Git - thirdparty/vuejs/router.git/commitdiff
feat: handle optional custom re
authorEduardo San Martin Morote <posva13@gmail.com>
Wed, 11 Dec 2019 08:42:16 +0000 (09:42 +0100)
committerEduardo San Martin Morote <posva13@gmail.com>
Wed, 18 Dec 2019 09:26:15 +0000 (10:26 +0100)
__tests__/matcher/path-parser.spec.ts
src/matcher/tokenizer.ts

index 1576ce3d0df81d0b90fb31ff1c5d432717282947..866c6ce0f095e927faeac278a7cd0f75f38e2603 100644 (file)
@@ -52,6 +52,96 @@ describe('Path parser', () => {
       ])
     })
 
+    it('param custom re followed by static', () => {
+      expect(tokenizePath('/:id(\\d+)hey')).toEqual([
+        [
+          {
+            type: TokenType.Param,
+            value: 'id',
+            regexp: '\\d+',
+            repeatable: false,
+            optional: false,
+          },
+          {
+            type: TokenType.Static,
+            value: 'hey',
+          },
+        ],
+      ])
+    })
+
+    it('param custom re followed by new segment', () => {
+      expect(tokenizePath('/:id(\\d+)/new')).toEqual([
+        [
+          {
+            type: TokenType.Param,
+            value: 'id',
+            regexp: '\\d+',
+            repeatable: false,
+            optional: false,
+          },
+        ],
+        [
+          {
+            type: TokenType.Static,
+            value: 'new',
+          },
+        ],
+      ])
+    })
+
+    it('param custom re?', () => {
+      expect(tokenizePath('/:id(\\d+)?')).toEqual([
+        [
+          {
+            type: TokenType.Param,
+            value: 'id',
+            regexp: '\\d+',
+            repeatable: false,
+            optional: true,
+          },
+        ],
+      ])
+    })
+
+    it('param custom re? followed by static', () => {
+      expect(tokenizePath('/:id(\\d+)?hey')).toEqual([
+        [
+          {
+            type: TokenType.Param,
+            value: 'id',
+            regexp: '\\d+',
+            repeatable: false,
+            optional: true,
+          },
+          {
+            type: TokenType.Static,
+            value: 'hey',
+          },
+        ],
+      ])
+    })
+
+    it('param custom re? followed by new segment', () => {
+      expect(tokenizePath('/:id(\\d+)?/new')).toEqual([
+        [
+          {
+            type: TokenType.Param,
+            value: 'id',
+            regexp: '\\d+',
+            repeatable: false,
+            optional: true,
+          },
+        ],
+        [
+          {
+            type: TokenType.Static,
+            value: 'new',
+          },
+        ],
+      ])
+    })
+
     it('param single?', () => {
       expect(tokenizePath('/:id?')).toEqual([
         [
index dee2dd93aafbfc238c96ad1a404df5cb1809f4a0..d5fba2f4dfadc8a6584060d7d8b113f2dc8e01da 100644 (file)
@@ -7,6 +7,7 @@ const enum TokenizerState {
   Static,
   Param,
   ParamRegExp, // custom re for a param
+  ParamRegExpEnd, // check if there is any ? + *
   EscapeNext,
 }
 
@@ -72,7 +73,8 @@ export function tokenizePath(path: string): Array<Token[]> {
       })
     } else if (
       state === TokenizerState.Param ||
-      state === TokenizerState.ParamRegExp
+      state === TokenizerState.ParamRegExp ||
+      state === TokenizerState.ParamRegExpEnd
     ) {
       if (segment.length > 1 && (char === '*' || char === '+'))
         crash(
@@ -136,21 +138,26 @@ export function tokenizePath(path: string): Array<Token[]> {
           consumeBuffer()
           state = TokenizerState.Static
           // go back one character if we were not modifying
-          if (char !== '*' && char !== '?' && char !== '+') {
-            i--
-          }
+          if (char !== '*' && char !== '?' && char !== '+') i--
         }
         break
 
       case TokenizerState.ParamRegExp:
         if (char === ')') {
-          consumeBuffer()
-          state = TokenizerState.Static
+          state = TokenizerState.ParamRegExpEnd
         } else {
           customRe += char
         }
         break
 
+      case TokenizerState.ParamRegExpEnd:
+        // same as finalizing a param
+        consumeBuffer()
+        state = TokenizerState.Static
+        // go back one character if we were not modifying
+        if (char !== '*' && char !== '?' && char !== '+') i--
+        break
+
       default:
         crash('Unkwnonw state')
         break
@@ -202,6 +209,15 @@ export function tokensToParser(segments: Array<Token[]>): PathParser {
           optional: token.optional,
         })
         const re = token.regexp ? token.regexp : BASE_PARAM_PATTERN
+        if (re !== BASE_PARAM_PATTERN) {
+          try {
+            new RegExp(re)
+          } catch (err) {
+            throw new Error(
+              `Invalid custom RegExp for param "${token.value}": ` + err.message
+            )
+          }
+        }
         pattern += token.repeatable ? `((?:${re})(?:/(?:${re}))*)` : `(${re})`
         if (token.optional) pattern += '?'
       }