Browse Source

Lexer+Reader: Don't tokenize comments

master
Riyyi 1 year ago
parent
commit
453ca1f796
  1. 12
      src/lexer.cpp
  2. 1
      src/lexer.h
  3. 13
      src/reader.cpp

12
src/lexer.cpp

@ -256,9 +256,6 @@ bool Lexer::consumeValue()
bool Lexer::consumeComment()
{
size_t column = m_column;
std::string comment;
ignore(); // ;
static std::unordered_set<char> exit = {
@ -275,18 +272,9 @@ bool Lexer::consumeComment()
break;
}
comment += character;
ignore();
}
// Trim comment
comment.erase(comment.begin(),
std::find_if(comment.begin(), comment.end(), [](char c) { return !std::isspace(c); }));
comment.erase(std::find_if(comment.rbegin(), comment.rend(), [](char c) { return !std::isspace(c); }).base(),
comment.end());
m_tokens.push_back({ Token::Type::Comment, m_line, column, comment });
return true;
}

1
src/lexer.h

@ -34,7 +34,6 @@ struct Token {
String, // "foobar"
Keyword, // :keyword
Value, // number, "nil", "true", "false", symbol
Comment, // ;
Error,
};

13
src/reader.cpp

@ -45,14 +45,7 @@ void Reader::read()
// Check for multiple expressions
if (!isEOF()) {
Token::Type type = peek().type;
switch (type) {
case Token::Type::Comment:
break;
default:
Error::the().add("more than one sexp in input");
break;
};
Error::the().add("more than one sexp in input");
}
}
@ -108,10 +101,6 @@ ValuePtr Reader::readImpl()
case Token::Type::Keyword: // :keyword
return readKeyword();
break;
case Token::Type::Comment: // ;
ignore();
return nullptr;
break;
case Token::Type::Value: // true, false, nil
return readValue();
break;

Loading…
Cancel
Save