]> git.ipfire.org Git - thirdparty/squid.git/blob - src/parser/Tokenizer.cc
Merged from trunk (r13515).
[thirdparty/squid.git] / src / parser / Tokenizer.cc
1 #include "squid.h"
2 #include "parser/Tokenizer.h"
3
4 #include <cerrno>
5 #if HAVE_CTYPE_H
6 #include <ctype.h>
7 #endif
8 #if HAVE_STDINT_H
9 #include <stdint.h>
10 #endif
11 #ifndef INT64_MIN
12 /* Native 64 bit system without strtoll() */
13 #if defined(LONG_MIN) && (SIZEOF_LONG == 8)
14 #define INT64_MIN LONG_MIN
15 #else
16 /* 32 bit system */
17 #define INT64_MIN (-9223372036854775807LL-1LL)
18 #endif
19 #endif
20
21 #ifndef INT64_MAX
22 /* Native 64 bit system without strtoll() */
23 #if defined(LONG_MAX) && (SIZEOF_LONG == 8)
24 #define INT64_MAX LONG_MAX
25 #else
26 /* 32 bit system */
27 #define INT64_MAX 9223372036854775807LL
28 #endif
29 #endif
30
31 bool
32 Parser::Tokenizer::token(SBuf &returnedToken, const CharacterSet &delimiters)
33 {
34 const SBuf savebuf(buf_);
35 skip(delimiters);
36 const SBuf::size_type tokenLen = buf_.findFirstOf(delimiters); // not found = npos => consume to end
37 if (tokenLen == SBuf::npos && !delimiters['\0']) {
38 // no delimiter found, nor is NUL/EOS/npos acceptible as one
39 buf_ = savebuf;
40 return false;
41 }
42 const SBuf retval = buf_.consume(tokenLen);
43 skip(delimiters);
44 returnedToken = retval;
45 return true;
46 }
47
48 bool
49 Parser::Tokenizer::prefix(SBuf &returnedToken, const CharacterSet &tokenChars, const SBuf::size_type limit)
50 {
51 const SBuf::size_type prefixLen = buf_.substr(0,limit).findFirstNotOf(tokenChars);
52 if (prefixLen == 0)
53 return false;
54 returnedToken = buf_.consume(prefixLen);
55 return true;
56 }
57
58 bool
59 Parser::Tokenizer::skip(const CharacterSet &tokenChars)
60 {
61 const SBuf::size_type prefixLen = buf_.findFirstNotOf(tokenChars);
62 if (prefixLen == 0)
63 return false;
64 buf_.consume(prefixLen);
65 return true;
66 }
67
68 bool
69 Parser::Tokenizer::skip(const SBuf &tokenToSkip)
70 {
71 if (buf_.startsWith(tokenToSkip)) {
72 buf_.consume(tokenToSkip.length());
73 return true;
74 }
75 return false;
76 }
77
78 bool
79 Parser::Tokenizer::skip(const char tokenChar)
80 {
81 if (buf_[0] == tokenChar) {
82 buf_.consume(1);
83 return true;
84 }
85 return false;
86 }
87
88 /* reworked from compat/strtoll.c */
89 bool
90 Parser::Tokenizer::int64(int64_t & result, int base)
91 {
92 if (buf_.isEmpty())
93 return false;
94
95 //fixme: account for buf_.size()
96 bool neg = false;
97 const char *s = buf_.rawContent();
98 const char *end = buf_.rawContent() + buf_.length();
99
100 if (*s == '-') {
101 neg = true;
102 ++s;
103 } else if (*s == '+') {
104 ++s;
105 }
106 if (s >= end) return false;
107 if (( base == 0 || base == 16) && *s == '0' && (s+1 <= end ) &&
108 tolower(*(s+1)) == 'x') {
109 s += 2;
110 base = 16;
111 }
112 if (base == 0) {
113 if ( *s == '0') {
114 base = 8;
115 ++s;
116 } else {
117 base = 10;
118 }
119 }
120 if (s >= end) return false;
121
122 uint64_t cutoff;
123
124 cutoff = neg ? -static_cast<uint64_t>(INT64_MIN) : INT64_MAX;
125 const int cutlim = cutoff % static_cast<int64_t>(base);
126 cutoff /= static_cast<uint64_t>(base);
127
128 int any = 0, c;
129 int64_t acc = 0;
130 for (c = *s++; s <= end; c = *s++) {
131 if (xisdigit(c)) {
132 c -= '0';
133 } else if (xisalpha(c)) {
134 c -= xisupper(c) ? 'A' - 10 : 'a' - 10;
135 } else {
136 break;
137 }
138 if (c >= base)
139 break;
140 if (any < 0 || static_cast<uint64_t>(acc) > cutoff || (static_cast<uint64_t>(acc) == cutoff && c > cutlim))
141 any = -1;
142 else {
143 any = 1;
144 acc *= base;
145 acc += c;
146 }
147 }
148
149 if (any == 0) // nothing was parsed
150 return false;
151 if (any < 0) {
152 acc = neg ? INT64_MIN : INT64_MAX;
153 errno = ERANGE;
154 return false;
155 } else if (neg)
156 acc = -acc;
157
158 result = acc;
159 buf_.consume(s - buf_.rawContent() -1);
160 return true;
161 }