// Classifies keyword (i.e. gets id for keyword).
TokenId classify_keyword (const std::string &str);
- // Builds a token from the input queue.
- TokenPtr build_token ();
-
std::tuple<std::string, int, bool> parse_in_decimal ();
std::pair<std::string, int> parse_in_exponent_part ();
std::pair<PrimitiveCoreType, int> parse_in_type_suffix ();
// Peeks the current token.
const_TokenPtr peek_token () { return peek_token (0); }
+ // Builds a token from the input queue.
+ TokenPtr build_token ();
+
// Advances current token to n + 1 tokens ahead of current position.
void skip_token (int n);
// Skips the current token.
return ProcMacro::Span::make_span (location.gcc_location (), 0);
}
+static Location
+convert (ProcMacro::Span span)
+{
+ return Location (span.start);
+}
+
static void
handle_suffix (ProcMacro::TokenStream &ts, const const_TokenPtr &token,
ProcMacro::LitKind kind)
if (ident.is_raw)
value = "r#" + value;
- // TODO: Inject span -> for now spans are not stored in Ident, once changed
- // the span should be injected in the built token below.
Lexer lexer (value);
- result.push_back (lexer.peek_token ());
+ auto token = lexer.build_token ();
+ token->set_locus (convert (ident.span));
+ result.push_back (token);
}
/**
std::vector<const_TokenPtr> &result)
{
auto lookup = suffixes.lookup (literal.suffix.to_string ());
+ auto loc = convert (literal.span);
auto suffix
= suffixes.is_iter_ok (lookup) ? lookup->second : CORETYPE_UNKNOWN;
// FIXME: Add spans instead of empty locations
{
case ProcMacro::BYTE:
result.push_back (
- Token::make_byte_char (Location (), literal.text.to_string ()[0]));
+ Token::make_byte_char (loc, literal.text.to_string ()[0]));
break;
case ProcMacro::CHAR:
- result.push_back (
- Token::make_char (Location (), literal.text.to_string ()[0]));
+ result.push_back (Token::make_char (loc, literal.text.to_string ()[0]));
break;
case ProcMacro::INTEGER:
result.push_back (
- Token::make_int (Location (), literal.text.to_string (), suffix));
+ Token::make_int (loc, literal.text.to_string (), suffix));
break;
case ProcMacro::FLOAT:
result.push_back (
- Token::make_float (Location (), literal.text.to_string (), suffix));
+ Token::make_float (loc, literal.text.to_string (), suffix));
break;
case ProcMacro::STR:
- result.push_back (
- Token::make_string (Location (), literal.text.to_string ()));
+ result.push_back (Token::make_string (loc, literal.text.to_string ()));
break;
case ProcMacro::BYTE_STR:
result.push_back (
- Token::make_byte_string (Location (), literal.text.to_string ()));
+ Token::make_byte_string (loc, literal.text.to_string ()));
break;
// FIXME: Handle raw string
case ProcMacro::STR_RAW:
// TODO: UTF-8 string
std::string whole (acc.begin (), acc.end ());
auto lexer = Lexer (whole);
- result.push_back (lexer.peek_token ());
+ auto token = lexer.build_token ();
+ token->set_locus (convert (punct.span));
+ result.push_back (token);
acc.clear ();
}
}
static void
from_group (const ProcMacro::Group &g, std::vector<const_TokenPtr> &result)
{
+ auto loc = convert (g.span);
switch (g.delimiter)
{
case ProcMacro::PARENTHESIS:
- result.push_back (Token::make (LEFT_PAREN, Location ()));
+ result.push_back (Token::make (LEFT_PAREN, loc));
from_tokenstream (g.stream, result);
- result.push_back (Token::make (RIGHT_PAREN, Location ()));
+ result.push_back (Token::make (RIGHT_PAREN, loc));
break;
case ProcMacro::BRACE:
- result.push_back (Token::make (LEFT_CURLY, Location ()));
+ result.push_back (Token::make (LEFT_CURLY, loc));
from_tokenstream (g.stream, result);
- result.push_back (Token::make (RIGHT_CURLY, Location ()));
+ result.push_back (Token::make (RIGHT_CURLY, loc));
break;
case ProcMacro::BRACKET:
- result.push_back (Token::make (LEFT_SQUARE, Location ()));
+ result.push_back (Token::make (LEFT_SQUARE, loc));
from_tokenstream (g.stream, result);
- result.push_back (Token::make (RIGHT_SQUARE, Location ()));
+ result.push_back (Token::make (RIGHT_SQUARE, loc));
break;
case ProcMacro::NONE:
from_tokenstream (g.stream, result);