From 22053041bdb8f203bdd36629de707c4b7885d46f Mon Sep 17 00:00:00 2001 From: jorenchik Date: Sat, 5 Oct 2024 17:52:22 +0300 Subject: [PATCH] ditched the sections and identifiers / added cooldown inplace of identifier --- memorybase/all_types.mdem | 8 +-- src/cpp/include/lexer.h | 9 ++-- src/cpp/include/parser.h | 6 +-- src/cpp/transpiler/lexer.cpp | 45 +++------------- src/cpp/transpiler/parser.cpp | 96 ++++++++++------------------------- 5 files changed, 42 insertions(+), 122 deletions(-) diff --git a/memorybase/all_types.mdem b/memorybase/all_types.mdem index 3347abc..5fabe09 100644 --- a/memorybase/all_types.mdem +++ b/memorybase/all_types.mdem @@ -1,9 +1,9 @@ -- Arrange these events in the order they occurred > +- [2.52] Arrange these events in the order they occurred > -^ The Fall of the Roman Empire -^ The Renaissance -^ The Industrial Revolution -- [planet_characteristics] Match Planets to Their Characteristics > +- [3.22] Match Planets to Their Characteristics > - Earth: - Contains Life - Mars: @@ -32,12 +32,12 @@ - What is the capital of Latvia? > - Rīga -- [cap_est] What is the capital of Estonia? > +- What is the capital of Estonia? > - Tallin -- [scient_method_order] Place the following steps of the scientific method in the correct order > +- Place the following steps of the scientific method in the correct order > - Ask a Question - Form a Hypothesis -^ Conduct an Experiment diff --git a/src/cpp/include/lexer.h b/src/cpp/include/lexer.h index 94443bd..7f1ddc4 100644 --- a/src/cpp/include/lexer.h +++ b/src/cpp/include/lexer.h @@ -12,12 +12,9 @@ enum class TokenType { ElementDashStart, ElementPlusStart, ElementOrderModifier, - Identifier, - IdentifierStart, - IdentifierEnd, - SectionIdentifierStart, - SectionStart, - SectionEnd, + Cooldown, + CooldownStart, + CooldownEnd, SOF, EndOfFile }; diff --git a/src/cpp/include/parser.h b/src/cpp/include/parser.h index 2f2c224..9437775 100644 --- a/src/cpp/include/parser.h +++ b/src/cpp/include/parser.h @@ -8,9 +8,9 @@ struct Question { - std::string ID; - std::string QuestionText; - std::string Section; + double Cooldown; + std::string QuestionText; + std::string Section; virtual std::string ToString() const = 0; virtual ~Question() = default; diff --git a/src/cpp/transpiler/lexer.cpp b/src/cpp/transpiler/lexer.cpp index b210bf6..b47477f 100644 --- a/src/cpp/transpiler/lexer.cpp +++ b/src/cpp/transpiler/lexer.cpp @@ -140,7 +140,7 @@ Result> TokenizeMdem(const std::string& fileRunes) { switch (c) { case '[': makeTokenWithTokenBuffer( - TokenType::IdentifierStart, + TokenType::CooldownStart, 1, TokenType::TextFragment ); @@ -159,45 +159,15 @@ Result> TokenizeMdem(const std::string& fileRunes) { }; } makeTokenWithTokenBuffer( - TokenType::IdentifierEnd, + TokenType::CooldownEnd, 1, - TokenType::Identifier + TokenType::Cooldown ); previousRow = row; previousColumn = column; textStarted = false; identifierStarted = false; break; - case '#': - makeTokenWithTokenBuffer( - TokenType::SectionIdentifierStart, - 1, - TokenType::TextFragment - ); - previousRow = row; - previousColumn = column; - textStarted = false; - break; - case '{': - makeTokenWithTokenBuffer( - TokenType::SectionStart, - 1, - TokenType::Identifier - ); - previousRow = row; - previousColumn = column; - textStarted = false; - break; - case '}': - makeTokenWithTokenBuffer( - TokenType::SectionEnd, - 1, - TokenType::TextFragment - ); - previousRow = row; - previousColumn = column; - textStarted = false; - break; case '-': makeTokenWithTokenBuffer( TokenType::ElementDashStart, @@ -288,12 +258,9 @@ std::string Token::ToString(const TokenType* ttype) { case TokenType::ElementDashStart: return "dash element start"; case TokenType::ElementOrderModifier: return "order element modifier"; case TokenType::ElementPlusStart: return "plus element start"; - case TokenType::Identifier: return "identifier"; - case TokenType::IdentifierStart: return "start of identifier"; - case TokenType::IdentifierEnd: return "end of identifier"; - case TokenType::SectionIdentifierStart: return "section identifier start"; - case TokenType::SectionStart: return "start of section"; - case TokenType::SectionEnd: return "end of section"; + case TokenType::Cooldown: return "cooldown"; + case TokenType::CooldownStart: return "start of cooldown"; + case TokenType::CooldownEnd: return "end of cooldown"; case TokenType::EndOfFile: return "end of file"; default: return "unrecognized token"; } diff --git a/src/cpp/transpiler/parser.cpp b/src/cpp/transpiler/parser.cpp index 4465846..f472d3a 100644 --- a/src/cpp/transpiler/parser.cpp +++ b/src/cpp/transpiler/parser.cpp @@ -35,7 +35,7 @@ std::string MultiElementQuestion::ToString() const { return std::format( "\nsection:{}\nid:{}\n{}\n{}", Section, - ID, + Cooldown, QuestionText, ss.str() ); @@ -53,7 +53,7 @@ std::string GroupQuestion::ToString() const { return std::format( "\nsection:{}\nid:{}\n{}\n{}", Section, - ID, + Cooldown, QuestionText, ss.str() ); @@ -74,10 +74,7 @@ std::map> parserAutomata() { TokenType::ElementDashStart, TokenType::ElementPlusStart, TokenType::MatchGroupEnd, - TokenType::SectionIdentifierStart, - TokenType::SectionStart, TokenType::EndOfFile, - TokenType::SectionEnd }; automata[TokenType::MatchGroupEnd] = { TokenType::ElementDashStart @@ -87,7 +84,7 @@ std::map> parserAutomata() { TokenType::ElementPlusStart }; automata[TokenType::ElementDashStart] = { - TokenType::IdentifierStart, + TokenType::CooldownStart, TokenType::TextFragment, TokenType::ElementOrderModifier }; @@ -97,32 +94,17 @@ std::map> parserAutomata() { automata[TokenType::ElementPlusStart] = { TokenType::TextFragment }; - automata[TokenType::Identifier] = { - TokenType::IdentifierEnd, - TokenType::SectionStart + automata[TokenType::Cooldown] = { + TokenType::CooldownEnd, }; - automata[TokenType::IdentifierStart] = { - TokenType::Identifier + automata[TokenType::CooldownStart] = { + TokenType::Cooldown }; - automata[TokenType::IdentifierEnd] = { + automata[TokenType::CooldownEnd] = { TokenType::TextFragment }; - automata[TokenType::SectionIdentifierStart] = { - TokenType::Identifier - }; - automata[TokenType::SectionStart] = { - TokenType::ElementDashStart, - TokenType::SectionIdentifierStart, - TokenType::EndOfFile - }; - automata[TokenType::SectionEnd] = { - TokenType::SectionIdentifierStart, - TokenType::ElementDashStart, - TokenType::EndOfFile - }; automata[TokenType::SOF] = { TokenType::ElementDashStart, - TokenType::SectionIdentifierStart, TokenType::EndOfFile }; automata[TokenType::EndOfFile] = {}; @@ -182,7 +164,8 @@ Result> ParseQuestions(const std::vector& tokens) while (i < tokens.size()) { if (tokens[i].tokenType == TokenType::ElementDashStart) { - std::string id, questionText; + double cooldown; + std::string questionText; std::vector questionElements; bool isOrderQuestion = false; bool isGroupQuestion = false; @@ -201,12 +184,21 @@ Result> ParseQuestions(const std::vector& tokens) tokens[i + 1].column }; } - if (isInBounds(i + 1) && tokens[i + 1].tokenType == TokenType::IdentifierStart) { - id = tokens[i + 2].content; + if (isInBounds(i + 1) && tokens[i + 1].tokenType == TokenType::CooldownStart) { + try { + cooldown = std::stod(tokens[i + 2].content); + } catch (std::exception e) { + return { + questions, + "error parsing cooldown", + tokens[i + 1].row, + tokens[i + 1].column + }; + } questionText = tokens[i + 4].content; i += 6; } else { - id = ""; + cooldown = 0; questionText = tokens[i + 1].content; i += 3; } @@ -214,22 +206,14 @@ Result> ParseQuestions(const std::vector& tokens) // Parse elements of a question. while (isInBounds(i)) { - // Handle other constructs. - if (tokens[i].tokenType == TokenType::SectionIdentifierStart) { - break; - } - if (tokens[i].tokenType == TokenType::SectionEnd) { - break; - } - // Check question end. if (isInBounds(i + 3) && tokens[i].tokenType == TokenType::ElementDashStart) { // Distance to the possible question end. size_t offset; if (tokens[i + 1].tokenType == TokenType::ElementOrderModifier) { - offset = tokens[i + 2].tokenType == TokenType::IdentifierStart ? 6 : 3; + offset = tokens[i + 2].tokenType == TokenType::CooldownStart ? 6 : 3; } else { - offset = tokens[i + 1].tokenType == TokenType::IdentifierStart ? 5 : 2; + offset = tokens[i + 1].tokenType == TokenType::CooldownStart ? 5 : 2; } if (isInBounds(i + offset) && tokens[i + offset].tokenType == TokenType::QuestionEnd) { break; @@ -315,7 +299,7 @@ Result> ParseQuestions(const std::vector& tokens) if (questionElements.size() > 0) { if (isGroupQuestion) { auto *question = new GroupQuestion(); - question->ID = id; + question->Cooldown = cooldown; question->QuestionText = questionText; question->Section = section; int32_t k = -1; @@ -342,7 +326,7 @@ Result> ParseQuestions(const std::vector& tokens) } } else { auto *question = new MultiElementQuestion(); - question->ID = id; + question->Cooldown = cooldown; question->QuestionText = cleanContent(questionText); question->Section = section; for (const auto& elem : questionElements) { @@ -364,34 +348,6 @@ Result> ParseQuestions(const std::vector& tokens) } } } - } else if (tokens[i].tokenType == TokenType::SectionIdentifierStart) { - if (section.length() > 0) { - return { - questions, - std::format("Section \"{}\" is not closed, cannot open other section", section), - tokens[i].row, - tokens[i].column - }; - } - section = tokens[i + 1].content; - i += 3; - if (debug) { - std::cout << "Started section: " << section << "\n"; - } - } else if (tokens[i].tokenType == TokenType::SectionEnd) { - if (section.length() <= 0) { - return { - questions, - std::format("Cannot close section, no section was opened", section), - tokens[i].row, - tokens[i].column - }; - } - section.clear(); - i += 1; - if (debug) { - std::cout << "Section ended.\n"; - } } else if (tokens[i].tokenType == TokenType::EndOfFile) { if (debug) { std::cout << "File terminated: EndOfFile\n";