]> git.ipfire.org Git - thirdparty/squid.git/blob - src/parser/BinaryTokenizer.cc
C++11: Remove GnuRegex and all -lregex related code
[thirdparty/squid.git] / src / parser / BinaryTokenizer.cc
1 /*
2 * Copyright (C) 1996-2016 The Squid Software Foundation and contributors
3 *
4 * Squid software is distributed under GPLv2+ license and includes
5 * contributions from numerous individuals and organizations.
6 * Please see the COPYING and CONTRIBUTORS files for details.
7 */
8
9 /* DEBUG: section 24 SBuf */
10
11 #include "squid.h"
12 #include "parser/BinaryTokenizer.h"
13
14 Parser::BinaryTokenizer::BinaryTokenizer(): BinaryTokenizer(SBuf())
15 {
16 }
17
18 Parser::BinaryTokenizer::BinaryTokenizer(const SBuf &data, const bool expectMore):
19 context(nullptr),
20 data_(data),
21 parsed_(0),
22 syncPoint_(0),
23 expectMore_(expectMore)
24 {
25 }
26
27 static inline
28 std::ostream &
29 operator <<(std::ostream &os, const Parser::BinaryTokenizerContext *context)
30 {
31 if (context)
32 os << context->parent << context->name;
33 return os;
34 }
35
36 /// debugging helper that prints a "standard" debugs() trailer
37 #define BinaryTokenizer_tail(size, start) \
38 " occupying " << (size) << " bytes @" << (start) << " in " << this << \
39 (expectMore_ ? ';' : '.');
40
41 /// logs and throws if fewer than size octets remain; no other side effects
42 void
43 Parser::BinaryTokenizer::want(uint64_t size, const char *description) const
44 {
45 if (parsed_ + size > data_.length()) {
46 debugs(24, 5, (parsed_ + size - data_.length()) << " more bytes for " <<
47 context << description << BinaryTokenizer_tail(size, parsed_));
48 Must(expectMore_); // throw an error on premature input termination
49 throw InsufficientInput();
50 }
51 }
52
53 void
54 Parser::BinaryTokenizer::got(uint64_t size, const char *description) const
55 {
56 debugs(24, 7, context << description <<
57 BinaryTokenizer_tail(size, parsed_ - size));
58 }
59
60 /// debugging helper for parsed number fields
61 void
62 Parser::BinaryTokenizer::got(uint32_t value, uint64_t size, const char *description) const
63 {
64 debugs(24, 7, context << description << '=' << value <<
65 BinaryTokenizer_tail(size, parsed_ - size));
66 }
67
68 /// debugging helper for parsed areas/blobs
69 void
70 Parser::BinaryTokenizer::got(const SBuf &value, uint64_t size, const char *description) const
71 {
72 debugs(24, 7, context << description << '=' <<
73 Raw(nullptr, value.rawContent(), value.length()).hex() <<
74 BinaryTokenizer_tail(size, parsed_ - size));
75
76 }
77
78 /// debugging helper for skipped fields
79 void
80 Parser::BinaryTokenizer::skipped(uint64_t size, const char *description) const
81 {
82 debugs(24, 7, context << description << BinaryTokenizer_tail(size, parsed_ - size));
83
84 }
85
86 /// Returns the next ready-for-shift byte, adjusting the number of parsed bytes.
87 /// The larger 32-bit return type helps callers shift/merge octets into numbers.
88 /// This internal method does not perform out-of-bounds checks.
89 uint32_t
90 Parser::BinaryTokenizer::octet()
91 {
92 // While char may be signed, we view data characters as unsigned,
93 // which helps to arrive at the right 32-bit return value.
94 return static_cast<uint8_t>(data_[parsed_++]);
95 }
96
97 void
98 Parser::BinaryTokenizer::reset(const SBuf &data, const bool expectMore)
99 {
100 *this = BinaryTokenizer(data, expectMore);
101 }
102
103 void
104 Parser::BinaryTokenizer::rollback()
105 {
106 parsed_ = syncPoint_;
107 }
108
109 void
110 Parser::BinaryTokenizer::commit()
111 {
112 syncPoint_ = parsed_;
113 }
114
115 bool
116 Parser::BinaryTokenizer::atEnd() const
117 {
118 return parsed_ >= data_.length();
119 }
120
121 uint8_t
122 Parser::BinaryTokenizer::uint8(const char *description)
123 {
124 want(1, description);
125 const uint8_t result = octet();
126 got(result, 1, description);
127 return result;
128 }
129
130 uint16_t
131 Parser::BinaryTokenizer::uint16(const char *description)
132 {
133 want(2, description);
134 const uint16_t result = (octet() << 8) | octet();
135 got(result, 2, description);
136 return result;
137 }
138
139 uint32_t
140 Parser::BinaryTokenizer::uint24(const char *description)
141 {
142 want(3, description);
143 const uint32_t result = (octet() << 16) | (octet() << 8) | octet();
144 got(result, 3, description);
145 return result;
146 }
147
148 uint32_t
149 Parser::BinaryTokenizer::uint32(const char *description)
150 {
151 want(4, description);
152 const uint32_t result = (octet() << 24) | (octet() << 16) | (octet() << 8) | octet();
153 got(result, 4, description);
154 return result;
155 }
156
157 SBuf
158 Parser::BinaryTokenizer::area(uint64_t size, const char *description)
159 {
160 want(size, description);
161 const SBuf result = data_.substr(parsed_, size);
162 parsed_ += size;
163 got(result, size, description);
164 return result;
165 }
166
167 void
168 Parser::BinaryTokenizer::skip(uint64_t size, const char *description)
169 {
170 want(size, description);
171 parsed_ += size;
172 skipped(size, description);
173 }
174
175 /*
176 * BinaryTokenizer::pstringN() implementations below reduce debugging noise by
177 * not parsing empty areas and not summarizing parsing context.success().
178 */
179
180 SBuf
181 Parser::BinaryTokenizer::pstring8(const char *description)
182 {
183 BinaryTokenizerContext pstring(*this, description);
184 if (const uint8_t length = uint8(".length"))
185 return area(length, ".octets");
186 return SBuf();
187 }
188
189 SBuf
190 Parser::BinaryTokenizer::pstring16(const char *description)
191 {
192 BinaryTokenizerContext pstring(*this, description);
193 if (const uint16_t length = uint16(".length"))
194 return area(length, ".octets");
195 return SBuf();
196 }
197
198 SBuf
199 Parser::BinaryTokenizer::pstring24(const char *description)
200 {
201 BinaryTokenizerContext pstring(*this, description);
202 if (const uint32_t length = uint24(".length"))
203 return area(length, ".octets");
204 return SBuf();
205 }
206