import {
tokenizePath,
TokenType,
- tokensToRegExp,
+ tokensToParser,
} from '../../src/matcher/tokenizer'
describe('Path parser', () => {
})
})
- describe('tokensToRegexp', () => {
+ describe('tokensToParser', () => {
function matchRegExp(
expectedRe: string,
- ...args: Parameters<typeof tokensToRegExp>
+ ...args: Parameters<typeof tokensToParser>
) {
- const pathParser = tokensToRegExp(...args)
+ const pathParser = tokensToParser(...args)
expect(expectedRe).toBe(
pathParser.re
.toString()
})
// end of describe
})
+
+ describe('parsing urls', () => {
+ function matchParams(
+ path: string,
+ pathToTest: string,
+ params: ReturnType<ReturnType<typeof tokensToParser>['parse']>
+ ) {
+ const pathParser = tokensToParser(tokenizePath(path))
+
+ expect(pathParser.parse(pathToTest)).toEqual(params)
+ }
+
+ it('returns null if no match', () => {
+ matchParams('/home', '/', null)
+ })
+
+ it('returns an empty object with no keys', () => {
+ matchParams('/home', '/home', {})
+ })
+
+ it('param single', () => {
+ matchParams('/:id', '/a', { id: 'a' })
+ })
+
+ it('param combined', () => {
+ matchParams('/hey:a', '/heyedu', {
+ a: 'edu',
+ })
+ })
+
+ it('param multiple', () => {
+ matchParams('/:a-:b-:c', '/one-two-three', {
+ a: 'one',
+ b: 'two',
+ c: 'three',
+ })
+ })
+
+ it('param optional', () => {
+ matchParams('/:a?', '/', {
+ a: '',
+ })
+ matchParams('/:a*', '/', {
+ a: '',
+ })
+ })
+
+ it('param repeatable', () => {
+ matchParams('/:a+', '/one/two', {
+ a: ['one', 'two'],
+ })
+ matchParams('/:a*', '/one/two', {
+ a: ['one', 'two'],
+ })
+ })
+
+ // end of parsing urls
+ })
})
state === TokenizerState.Param ||
state === TokenizerState.ParamRegExp
) {
+ if (segment.length > 1 && (char === '*' || char === '+'))
+ crash(
+ `A repeatable param (${buffer}) must be alone in its segment. eg: '/:ids+.`
+ )
segment.push({
type: TokenType.Param,
value: buffer,
consumeBuffer()
state = TokenizerState.Static
// go back one character if we were not modifying
- if (char !== '*' && char !== '?' && char !== '+') i--
+ if (char !== '*' && char !== '?' && char !== '+') {
+ i--
+ }
}
break
return tokens
}
+type Params = Record<string, string | string[]>
+
+interface ParamKey {
+ name: string
+ repeatable: boolean
+ optional: boolean
+}
+
interface PathParser {
re: RegExp
score: number
- keys: string[]
+ keys: ParamKey[]
+ parse(path: string): Params | null
+ stringify(params: Params): string
}
const BASE_PARAM_PATTERN = '[^/]+?'
-export function tokensToRegExp(segments: Array<Token[]>): PathParser {
+export function tokensToParser(segments: Array<Token[]>): PathParser {
let score = 0
let pattern = '^'
- const keys: string[] = []
+ const keys: ParamKey[] = []
for (const segment of segments) {
pattern += '/'
if (token.type === TokenType.Static) {
pattern += token.value
} else if (token.type === TokenType.Param) {
- keys.push(token.value)
+ keys.push({
+ name: token.value,
+ repeatable: token.repeatable,
+ optional: token.optional,
+ })
const re = token.regexp ? token.regexp : BASE_PARAM_PATTERN
pattern += token.repeatable ? `((?:${re})(?:/(?:${re}))*)` : `(${re})`
if (token.optional) pattern += '?'
pattern += '$'
+ const re = new RegExp(pattern)
+
+ function parse(path: string): Params | null {
+ const match = path.match(re)
+ const params: Params = {}
+
+ if (!match) return null
+
+ for (let i = 1; i < match.length; i++) {
+ const value: string = match[i] || ''
+ const key = keys[i - 1]
+ params[key.name] = value && key.repeatable ? value.split('/') : value
+ }
+
+ return params
+ }
+
+ function stringify(params: Params): string {
+ let path = ''
+ // TODO: implem
+
+ return path
+ }
+
return {
- re: new RegExp(pattern),
+ re,
score,
keys,
+ parse,
+ stringify,
}
}