]> git.ipfire.org Git - thirdparty/vuejs/router.git/commitdiff
feat: custom regexp, tokensToParser
authorEduardo San Martin Morote <posva13@gmail.com>
Tue, 10 Dec 2019 22:01:51 +0000 (23:01 +0100)
committerEduardo San Martin Morote <posva13@gmail.com>
Wed, 18 Dec 2019 09:26:15 +0000 (10:26 +0100)
__tests__/matcher/path-parser.spec.ts
src/matcher/tokenizer.ts

index 18d35abd2f19a7d6bf38d427ff96ad7d841631bd..2145eb67ae08b31df8e5f0ad024b82f721be11d3 100644 (file)
@@ -30,6 +30,21 @@ describe('Path parser', () => {
           {
             type: TokenType.Param,
             value: 'id',
+            regexp: '',
+            repeatable: false,
+            optional: false,
+          },
+        ],
+      ])
+    })
+
+    it('param custom re', () => {
+      expect(tokenizePath('/:id(\\d+)')).toEqual([
+        [
+          {
+            type: TokenType.Param,
+            value: 'id',
+            regexp: '\\d+',
             repeatable: false,
             optional: false,
           },
@@ -43,6 +58,7 @@ describe('Path parser', () => {
           {
             type: TokenType.Param,
             value: 'id',
+            regexp: '',
             repeatable: false,
             optional: true,
           },
@@ -51,7 +67,7 @@ describe('Path parser', () => {
     })
 
     it('param single+', () => {
-      expect(tokenizePath('/:id+')).toEqual([
+      expect(tokenizePath('/:id+')).toMatchObject([
         [
           {
             type: TokenType.Param,
@@ -64,7 +80,7 @@ describe('Path parser', () => {
     })
 
     it('param single*', () => {
-      expect(tokenizePath('/:id*')).toEqual([
+      expect(tokenizePath('/:id*')).toMatchObject([
         [
           {
             type: TokenType.Param,
@@ -77,7 +93,7 @@ describe('Path parser', () => {
     })
 
     it('param multiple', () => {
-      expect(tokenizePath('/:id/:other')).toEqual([
+      expect(tokenizePath('/:id/:other')).toMatchObject([
         [
           {
             type: TokenType.Param,
@@ -97,8 +113,33 @@ describe('Path parser', () => {
       ])
     })
 
+    it('param multiple together', () => {
+      expect(tokenizePath('/:id:other:more')).toMatchObject([
+        [
+          {
+            type: TokenType.Param,
+            value: 'id',
+            repeatable: false,
+            optional: false,
+          },
+          {
+            type: TokenType.Param,
+            value: 'other',
+            repeatable: false,
+            optional: false,
+          },
+          {
+            type: TokenType.Param,
+            value: 'more',
+            repeatable: false,
+            optional: false,
+          },
+        ],
+      ])
+    })
+
     it('param with static in between', () => {
-      expect(tokenizePath('/:id-:other')).toEqual([
+      expect(tokenizePath('/:id-:other')).toMatchObject([
         [
           {
             type: TokenType.Param,
@@ -121,7 +162,7 @@ describe('Path parser', () => {
     })
 
     it('param with static beginning', () => {
-      expect(tokenizePath('/hey-:id')).toEqual([
+      expect(tokenizePath('/hey-:id')).toMatchObject([
         [
           {
             type: TokenType.Static,
@@ -138,7 +179,7 @@ describe('Path parser', () => {
     })
 
     it('param with static end', () => {
-      expect(tokenizePath('/:id-end')).toEqual([
+      expect(tokenizePath('/:id-end')).toMatchObject([
         [
           {
             type: TokenType.Param,
@@ -162,16 +203,30 @@ describe('Path parser', () => {
     ) {
       const pathParser = tokensToRegExp(...args)
       expect(expectedRe).toBe(
-        pathParser.re.toString().replace(/(:?^\/|\\|\/$)/g, '')
+        pathParser.re
+          .toString()
+          .replace(/(:?^\/|\/$)/g, '')
+          .replace(/\\\//g, '/')
       )
     }
 
-    it('static', () => {
+    it('static single', () => {
+      matchRegExp('^/$', [[]])
+    })
+
+    it('static single', () => {
       matchRegExp('^/home$', [[{ type: TokenType.Static, value: 'home' }]])
     })
 
-    it('param simple', () => {
-      matchRegExp('^/([^/]+)$', [
+    it('static multiple', () => {
+      matchRegExp('^/home/other$', [
+        [{ type: TokenType.Static, value: 'home' }],
+        [{ type: TokenType.Static, value: 'other' }],
+      ])
+    })
+
+    it('param single', () => {
+      matchRegExp('^/([^/]+?)$', [
         [
           {
             type: TokenType.Param,
@@ -182,5 +237,69 @@ describe('Path parser', () => {
         ],
       ])
     })
+
+    it('param multiple', () => {
+      matchRegExp('^/([^/]+?)/([^/]+?)$', [
+        [
+          {
+            type: TokenType.Param,
+            value: 'id',
+            repeatable: false,
+            optional: false,
+          },
+        ],
+        [
+          {
+            type: TokenType.Param,
+            value: 'two',
+            repeatable: false,
+            optional: false,
+          },
+        ],
+      ])
+    })
+
+    it('param*', () => {
+      matchRegExp('^/((?:\\d+)(?:/(?:\\d+))*)?$', [
+        [
+          {
+            type: TokenType.Param,
+            value: 'id',
+            regexp: '\\d+',
+            repeatable: true,
+            optional: true,
+          },
+        ],
+      ])
+    })
+
+    it('param?', () => {
+      matchRegExp('^/(\\d+)?$', [
+        [
+          {
+            type: TokenType.Param,
+            value: 'id',
+            regexp: '\\d+',
+            repeatable: false,
+            optional: true,
+          },
+        ],
+      ])
+    })
+
+    it('param+', () => {
+      matchRegExp('^/((?:\\d+)(?:/(?:\\d+))*)$', [
+        [
+          {
+            type: TokenType.Param,
+            value: 'id',
+            regexp: '\\d+',
+            repeatable: true,
+            optional: false,
+          },
+        ],
+      ])
+    })
+    // end of describe
   })
 })
index 08f40f07fbfd58f0dd7d9cf1840a5446b3157395..dcad9167be366b5469f520a86c55c01d928a4a33 100644 (file)
@@ -6,6 +6,7 @@ export const enum TokenType {
 const enum TokenizerState {
   Static,
   Param,
+  ParamRegExp, // custom re for a param
   EscapeNext,
 }
 
@@ -16,13 +17,7 @@ interface TokenStatic {
 
 interface TokenParam {
   type: TokenType.Param
-  regex?: string
-  value: string
-}
-
-interface TokenParam {
-  type: TokenType.Param
-  regex?: string
+  regexp?: string
   value: string
   optional: boolean
   repeatable: boolean
@@ -64,6 +59,8 @@ export function tokenizePath(path: string): Array<Token[]> {
   let char: string
   // buffer of the value read
   let buffer: string = ''
+  // custom regexp for a param
+  let customRe: string = ''
 
   function consumeBuffer() {
     if (!buffer) return
@@ -73,10 +70,14 @@ export function tokenizePath(path: string): Array<Token[]> {
         type: TokenType.Static,
         value: buffer,
       })
-    } else if (state === TokenizerState.Param) {
+    } else if (
+      state === TokenizerState.Param ||
+      state === TokenizerState.ParamRegExp
+    ) {
       segment.push({
         type: TokenType.Param,
         value: buffer,
+        regexp: customRe,
         repeatable: char === '*' || char === '+',
         optional: char === '*' || char === '?',
       })
@@ -93,7 +94,7 @@ export function tokenizePath(path: string): Array<Token[]> {
   while (i < path.length) {
     char = path[i++]
 
-    if (char === '\\') {
+    if (char === '\\' && state !== TokenizerState.ParamRegExp) {
       previousState = state
       state = TokenizerState.EscapeNext
       continue
@@ -123,7 +124,8 @@ export function tokenizePath(path: string): Array<Token[]> {
 
       case TokenizerState.Param:
         if (char === '(') {
-          // TODO: start custom regex
+          state = TokenizerState.ParamRegExp
+          customRe = ''
         } else if (VALID_PARAM_RE.test(char)) {
           addCharToBuffer()
         } else {
@@ -134,12 +136,24 @@ export function tokenizePath(path: string): Array<Token[]> {
         }
         break
 
+      case TokenizerState.ParamRegExp:
+        if (char === ')') {
+          consumeBuffer()
+          state = TokenizerState.Static
+        } else {
+          customRe += char
+        }
+        break
+
       default:
         crash('Unkwnonw state')
         break
     }
   }
 
+  if (state === TokenizerState.ParamRegExp)
+    crash(`Unfinished custom RegExp for param "${buffer}"`)
+
   consumeBuffer()
   finalizeSegment()
 
@@ -152,6 +166,8 @@ interface PathParser {
   keys: string[]
 }
 
+const BASE_PARAM_PATTERN = '[^/]+?'
+
 export function tokensToRegExp(segments: Array<Token[]>): PathParser {
   let score = 0
   let pattern = '^'
@@ -165,8 +181,9 @@ export function tokensToRegExp(segments: Array<Token[]>): PathParser {
         pattern += token.value
       } else if (token.type === TokenType.Param) {
         keys.push(token.value)
-        pattern += `([^/]+)`
-        // TODO: repeatable and others
+        const re = token.regexp ? token.regexp : BASE_PARAM_PATTERN
+        pattern += token.repeatable ? `((?:${re})(?:/(?:${re}))*)` : `(${re})`
+        if (token.optional) pattern += '?'
       }
     }
   }