From: Eduardo San Martin Morote Date: Tue, 10 Dec 2019 22:01:51 +0000 (+0100) Subject: feat: custom regexp, tokensToParser X-Git-Tag: v4.0.0-alpha.0~157 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=5187ff138425f184761f05be491a36b7d429d47b;p=thirdparty%2Fvuejs%2Frouter.git feat: custom regexp, tokensToParser --- diff --git a/__tests__/matcher/path-parser.spec.ts b/__tests__/matcher/path-parser.spec.ts index 18d35abd..2145eb67 100644 --- a/__tests__/matcher/path-parser.spec.ts +++ b/__tests__/matcher/path-parser.spec.ts @@ -30,6 +30,21 @@ describe('Path parser', () => { { type: TokenType.Param, value: 'id', + regexp: '', + repeatable: false, + optional: false, + }, + ], + ]) + }) + + it('param custom re', () => { + expect(tokenizePath('/:id(\\d+)')).toEqual([ + [ + { + type: TokenType.Param, + value: 'id', + regexp: '\\d+', repeatable: false, optional: false, }, @@ -43,6 +58,7 @@ describe('Path parser', () => { { type: TokenType.Param, value: 'id', + regexp: '', repeatable: false, optional: true, }, @@ -51,7 +67,7 @@ describe('Path parser', () => { }) it('param single+', () => { - expect(tokenizePath('/:id+')).toEqual([ + expect(tokenizePath('/:id+')).toMatchObject([ [ { type: TokenType.Param, @@ -64,7 +80,7 @@ describe('Path parser', () => { }) it('param single*', () => { - expect(tokenizePath('/:id*')).toEqual([ + expect(tokenizePath('/:id*')).toMatchObject([ [ { type: TokenType.Param, @@ -77,7 +93,7 @@ describe('Path parser', () => { }) it('param multiple', () => { - expect(tokenizePath('/:id/:other')).toEqual([ + expect(tokenizePath('/:id/:other')).toMatchObject([ [ { type: TokenType.Param, @@ -97,8 +113,33 @@ describe('Path parser', () => { ]) }) + it('param multiple together', () => { + expect(tokenizePath('/:id:other:more')).toMatchObject([ + [ + { + type: TokenType.Param, + value: 'id', + repeatable: false, + optional: false, + }, + { + type: TokenType.Param, + value: 'other', + repeatable: false, + optional: false, + }, + { + type: TokenType.Param, + value: 'more', + repeatable: false, + optional: false, + }, + ], + ]) + }) + it('param with static in between', () => { - expect(tokenizePath('/:id-:other')).toEqual([ + expect(tokenizePath('/:id-:other')).toMatchObject([ [ { type: TokenType.Param, @@ -121,7 +162,7 @@ describe('Path parser', () => { }) it('param with static beginning', () => { - expect(tokenizePath('/hey-:id')).toEqual([ + expect(tokenizePath('/hey-:id')).toMatchObject([ [ { type: TokenType.Static, @@ -138,7 +179,7 @@ describe('Path parser', () => { }) it('param with static end', () => { - expect(tokenizePath('/:id-end')).toEqual([ + expect(tokenizePath('/:id-end')).toMatchObject([ [ { type: TokenType.Param, @@ -162,16 +203,30 @@ describe('Path parser', () => { ) { const pathParser = tokensToRegExp(...args) expect(expectedRe).toBe( - pathParser.re.toString().replace(/(:?^\/|\\|\/$)/g, '') + pathParser.re + .toString() + .replace(/(:?^\/|\/$)/g, '') + .replace(/\\\//g, '/') ) } - it('static', () => { + it('static single', () => { + matchRegExp('^/$', [[]]) + }) + + it('static single', () => { matchRegExp('^/home$', [[{ type: TokenType.Static, value: 'home' }]]) }) - it('param simple', () => { - matchRegExp('^/([^/]+)$', [ + it('static multiple', () => { + matchRegExp('^/home/other$', [ + [{ type: TokenType.Static, value: 'home' }], + [{ type: TokenType.Static, value: 'other' }], + ]) + }) + + it('param single', () => { + matchRegExp('^/([^/]+?)$', [ [ { type: TokenType.Param, @@ -182,5 +237,69 @@ describe('Path parser', () => { ], ]) }) + + it('param multiple', () => { + matchRegExp('^/([^/]+?)/([^/]+?)$', [ + [ + { + type: TokenType.Param, + value: 'id', + repeatable: false, + optional: false, + }, + ], + [ + { + type: TokenType.Param, + value: 'two', + repeatable: false, + optional: false, + }, + ], + ]) + }) + + it('param*', () => { + matchRegExp('^/((?:\\d+)(?:/(?:\\d+))*)?$', [ + [ + { + type: TokenType.Param, + value: 'id', + regexp: '\\d+', + repeatable: true, + optional: true, + }, + ], + ]) + }) + + it('param?', () => { + matchRegExp('^/(\\d+)?$', [ + [ + { + type: TokenType.Param, + value: 'id', + regexp: '\\d+', + repeatable: false, + optional: true, + }, + ], + ]) + }) + + it('param+', () => { + matchRegExp('^/((?:\\d+)(?:/(?:\\d+))*)$', [ + [ + { + type: TokenType.Param, + value: 'id', + regexp: '\\d+', + repeatable: true, + optional: false, + }, + ], + ]) + }) + // end of describe }) }) diff --git a/src/matcher/tokenizer.ts b/src/matcher/tokenizer.ts index 08f40f07..dcad9167 100644 --- a/src/matcher/tokenizer.ts +++ b/src/matcher/tokenizer.ts @@ -6,6 +6,7 @@ export const enum TokenType { const enum TokenizerState { Static, Param, + ParamRegExp, // custom re for a param EscapeNext, } @@ -16,13 +17,7 @@ interface TokenStatic { interface TokenParam { type: TokenType.Param - regex?: string - value: string -} - -interface TokenParam { - type: TokenType.Param - regex?: string + regexp?: string value: string optional: boolean repeatable: boolean @@ -64,6 +59,8 @@ export function tokenizePath(path: string): Array { let char: string // buffer of the value read let buffer: string = '' + // custom regexp for a param + let customRe: string = '' function consumeBuffer() { if (!buffer) return @@ -73,10 +70,14 @@ export function tokenizePath(path: string): Array { type: TokenType.Static, value: buffer, }) - } else if (state === TokenizerState.Param) { + } else if ( + state === TokenizerState.Param || + state === TokenizerState.ParamRegExp + ) { segment.push({ type: TokenType.Param, value: buffer, + regexp: customRe, repeatable: char === '*' || char === '+', optional: char === '*' || char === '?', }) @@ -93,7 +94,7 @@ export function tokenizePath(path: string): Array { while (i < path.length) { char = path[i++] - if (char === '\\') { + if (char === '\\' && state !== TokenizerState.ParamRegExp) { previousState = state state = TokenizerState.EscapeNext continue @@ -123,7 +124,8 @@ export function tokenizePath(path: string): Array { case TokenizerState.Param: if (char === '(') { - // TODO: start custom regex + state = TokenizerState.ParamRegExp + customRe = '' } else if (VALID_PARAM_RE.test(char)) { addCharToBuffer() } else { @@ -134,12 +136,24 @@ export function tokenizePath(path: string): Array { } break + case TokenizerState.ParamRegExp: + if (char === ')') { + consumeBuffer() + state = TokenizerState.Static + } else { + customRe += char + } + break + default: crash('Unkwnonw state') break } } + if (state === TokenizerState.ParamRegExp) + crash(`Unfinished custom RegExp for param "${buffer}"`) + consumeBuffer() finalizeSegment() @@ -152,6 +166,8 @@ interface PathParser { keys: string[] } +const BASE_PARAM_PATTERN = '[^/]+?' + export function tokensToRegExp(segments: Array): PathParser { let score = 0 let pattern = '^' @@ -165,8 +181,9 @@ export function tokensToRegExp(segments: Array): PathParser { pattern += token.value } else if (token.type === TokenType.Param) { keys.push(token.value) - pattern += `([^/]+)` - // TODO: repeatable and others + const re = token.regexp ? token.regexp : BASE_PARAM_PATTERN + pattern += token.repeatable ? `((?:${re})(?:/(?:${re}))*)` : `(${re})` + if (token.optional) pattern += '?' } } }