mirror of
https://github.com/YosysHQ/yosys
synced 2025-04-18 22:59:03 +00:00
Fix two parsing bugs that were causing private regression tests to fail.
These were introduced by 0a6d9f4
.
1) While in a paren "(", don't error on newline.
2) Don't parse an extra token when parsing vector ranges. Let the caller parse the next token as necessary.
This commit is contained in:
parent
23f59e0196
commit
4610889d27
|
@ -360,7 +360,7 @@ void LibertyParser::report_unexpected_token(int tok)
|
|||
|
||||
// FIXME: the AST needs to be extended to store
|
||||
// these vector ranges.
|
||||
int LibertyParser::parse_vector_range(int tok)
|
||||
void LibertyParser::parse_vector_range(int tok)
|
||||
{
|
||||
// parse vector range [A] or [A:B]
|
||||
std::string arg;
|
||||
|
@ -397,7 +397,6 @@ int LibertyParser::parse_vector_range(int tok)
|
|||
{
|
||||
error("Expected ']' on array range.");
|
||||
}
|
||||
return lexer(arg);
|
||||
}
|
||||
|
||||
LibertyAst *LibertyParser::parse()
|
||||
|
@ -437,8 +436,10 @@ LibertyAst *LibertyParser::parse()
|
|||
tok = lexer(ast->value);
|
||||
if (tok == 'v') {
|
||||
tok = lexer(str);
|
||||
if (tok == '[')
|
||||
tok = parse_vector_range(tok);
|
||||
if (tok == '[') {
|
||||
parse_vector_range(tok);
|
||||
tok = lexer(str);
|
||||
}
|
||||
}
|
||||
while (tok == '+' || tok == '-' || tok == '*' || tok == '/' || tok == '!') {
|
||||
ast->value += tok;
|
||||
|
@ -471,9 +472,11 @@ LibertyAst *LibertyParser::parse()
|
|||
|
||||
if (tok == '[')
|
||||
{
|
||||
tok = parse_vector_range(tok);
|
||||
parse_vector_range(tok);
|
||||
continue;
|
||||
}
|
||||
if (tok == 'n')
|
||||
continue;
|
||||
if (tok != 'v') {
|
||||
report_unexpected_token(tok);
|
||||
}
|
||||
|
|
|
@ -106,7 +106,7 @@ namespace Yosys
|
|||
int lexer(std::string &str);
|
||||
|
||||
void report_unexpected_token(int tok);
|
||||
int parse_vector_range(int tok);
|
||||
void parse_vector_range(int tok);
|
||||
LibertyAst *parse();
|
||||
void error() const;
|
||||
void error(const std::string &str) const;
|
||||
|
|
Loading…
Reference in a new issue