ditched the sections and identifiers / added cooldown inplace of

identifier
This commit is contained in:
jorenchik
2024-10-05 17:52:22 +03:00
parent 14ee1e91dc
commit 22053041bd
5 changed files with 42 additions and 122 deletions

View File

@@ -1,9 +1,9 @@
- Arrange these events in the order they occurred > - [2.52] Arrange these events in the order they occurred >
-^ The Fall of the Roman Empire -^ The Fall of the Roman Empire
-^ The Renaissance -^ The Renaissance
-^ The Industrial Revolution -^ The Industrial Revolution
- [planet_characteristics] Match Planets to Their Characteristics > - [3.22] Match Planets to Their Characteristics >
- Earth: - Earth:
- Contains Life - Contains Life
- Mars: - Mars:
@@ -32,12 +32,12 @@
- What is the capital of Latvia? > - What is the capital of Latvia? >
- Rīga - Rīga
- [cap_est] What is the capital of Estonia? > - What is the capital of Estonia? >
- Tallin - Tallin
- [scient_method_order] Place the following steps of the scientific method in the correct order > - Place the following steps of the scientific method in the correct order >
- Ask a Question - Ask a Question
- Form a Hypothesis - Form a Hypothesis
-^ Conduct an Experiment -^ Conduct an Experiment

View File

@@ -12,12 +12,9 @@ enum class TokenType {
ElementDashStart, ElementDashStart,
ElementPlusStart, ElementPlusStart,
ElementOrderModifier, ElementOrderModifier,
Identifier, Cooldown,
IdentifierStart, CooldownStart,
IdentifierEnd, CooldownEnd,
SectionIdentifierStart,
SectionStart,
SectionEnd,
SOF, SOF,
EndOfFile EndOfFile
}; };

View File

@@ -8,7 +8,7 @@
struct Question { struct Question {
std::string ID; double Cooldown;
std::string QuestionText; std::string QuestionText;
std::string Section; std::string Section;

View File

@@ -140,7 +140,7 @@ Result<std::vector<Token>> TokenizeMdem(const std::string& fileRunes) {
switch (c) { switch (c) {
case '[': case '[':
makeTokenWithTokenBuffer( makeTokenWithTokenBuffer(
TokenType::IdentifierStart, TokenType::CooldownStart,
1, 1,
TokenType::TextFragment TokenType::TextFragment
); );
@@ -159,45 +159,15 @@ Result<std::vector<Token>> TokenizeMdem(const std::string& fileRunes) {
}; };
} }
makeTokenWithTokenBuffer( makeTokenWithTokenBuffer(
TokenType::IdentifierEnd, TokenType::CooldownEnd,
1, 1,
TokenType::Identifier TokenType::Cooldown
); );
previousRow = row; previousRow = row;
previousColumn = column; previousColumn = column;
textStarted = false; textStarted = false;
identifierStarted = false; identifierStarted = false;
break; break;
case '#':
makeTokenWithTokenBuffer(
TokenType::SectionIdentifierStart,
1,
TokenType::TextFragment
);
previousRow = row;
previousColumn = column;
textStarted = false;
break;
case '{':
makeTokenWithTokenBuffer(
TokenType::SectionStart,
1,
TokenType::Identifier
);
previousRow = row;
previousColumn = column;
textStarted = false;
break;
case '}':
makeTokenWithTokenBuffer(
TokenType::SectionEnd,
1,
TokenType::TextFragment
);
previousRow = row;
previousColumn = column;
textStarted = false;
break;
case '-': case '-':
makeTokenWithTokenBuffer( makeTokenWithTokenBuffer(
TokenType::ElementDashStart, TokenType::ElementDashStart,
@@ -288,12 +258,9 @@ std::string Token::ToString(const TokenType* ttype) {
case TokenType::ElementDashStart: return "dash element start"; case TokenType::ElementDashStart: return "dash element start";
case TokenType::ElementOrderModifier: return "order element modifier"; case TokenType::ElementOrderModifier: return "order element modifier";
case TokenType::ElementPlusStart: return "plus element start"; case TokenType::ElementPlusStart: return "plus element start";
case TokenType::Identifier: return "identifier"; case TokenType::Cooldown: return "cooldown";
case TokenType::IdentifierStart: return "start of identifier"; case TokenType::CooldownStart: return "start of cooldown";
case TokenType::IdentifierEnd: return "end of identifier"; case TokenType::CooldownEnd: return "end of cooldown";
case TokenType::SectionIdentifierStart: return "section identifier start";
case TokenType::SectionStart: return "start of section";
case TokenType::SectionEnd: return "end of section";
case TokenType::EndOfFile: return "end of file"; case TokenType::EndOfFile: return "end of file";
default: return "unrecognized token"; default: return "unrecognized token";
} }

View File

@@ -35,7 +35,7 @@ std::string MultiElementQuestion::ToString() const {
return std::format( return std::format(
"<Multiple element>\nsection:{}\nid:{}\n{}\n{}", "<Multiple element>\nsection:{}\nid:{}\n{}\n{}",
Section, Section,
ID, Cooldown,
QuestionText, QuestionText,
ss.str() ss.str()
); );
@@ -53,7 +53,7 @@ std::string GroupQuestion::ToString() const {
return std::format( return std::format(
"<GroupQuestion>\nsection:{}\nid:{}\n{}\n{}", "<GroupQuestion>\nsection:{}\nid:{}\n{}\n{}",
Section, Section,
ID, Cooldown,
QuestionText, QuestionText,
ss.str() ss.str()
); );
@@ -74,10 +74,7 @@ std::map<TokenType, std::vector<TokenType>> parserAutomata() {
TokenType::ElementDashStart, TokenType::ElementDashStart,
TokenType::ElementPlusStart, TokenType::ElementPlusStart,
TokenType::MatchGroupEnd, TokenType::MatchGroupEnd,
TokenType::SectionIdentifierStart,
TokenType::SectionStart,
TokenType::EndOfFile, TokenType::EndOfFile,
TokenType::SectionEnd
}; };
automata[TokenType::MatchGroupEnd] = { automata[TokenType::MatchGroupEnd] = {
TokenType::ElementDashStart TokenType::ElementDashStart
@@ -87,7 +84,7 @@ std::map<TokenType, std::vector<TokenType>> parserAutomata() {
TokenType::ElementPlusStart TokenType::ElementPlusStart
}; };
automata[TokenType::ElementDashStart] = { automata[TokenType::ElementDashStart] = {
TokenType::IdentifierStart, TokenType::CooldownStart,
TokenType::TextFragment, TokenType::TextFragment,
TokenType::ElementOrderModifier TokenType::ElementOrderModifier
}; };
@@ -97,32 +94,17 @@ std::map<TokenType, std::vector<TokenType>> parserAutomata() {
automata[TokenType::ElementPlusStart] = { automata[TokenType::ElementPlusStart] = {
TokenType::TextFragment TokenType::TextFragment
}; };
automata[TokenType::Identifier] = { automata[TokenType::Cooldown] = {
TokenType::IdentifierEnd, TokenType::CooldownEnd,
TokenType::SectionStart
}; };
automata[TokenType::IdentifierStart] = { automata[TokenType::CooldownStart] = {
TokenType::Identifier TokenType::Cooldown
}; };
automata[TokenType::IdentifierEnd] = { automata[TokenType::CooldownEnd] = {
TokenType::TextFragment TokenType::TextFragment
}; };
automata[TokenType::SectionIdentifierStart] = {
TokenType::Identifier
};
automata[TokenType::SectionStart] = {
TokenType::ElementDashStart,
TokenType::SectionIdentifierStart,
TokenType::EndOfFile
};
automata[TokenType::SectionEnd] = {
TokenType::SectionIdentifierStart,
TokenType::ElementDashStart,
TokenType::EndOfFile
};
automata[TokenType::SOF] = { automata[TokenType::SOF] = {
TokenType::ElementDashStart, TokenType::ElementDashStart,
TokenType::SectionIdentifierStart,
TokenType::EndOfFile TokenType::EndOfFile
}; };
automata[TokenType::EndOfFile] = {}; automata[TokenType::EndOfFile] = {};
@@ -182,7 +164,8 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
while (i < tokens.size()) { while (i < tokens.size()) {
if (tokens[i].tokenType == TokenType::ElementDashStart) { if (tokens[i].tokenType == TokenType::ElementDashStart) {
std::string id, questionText; double cooldown;
std::string questionText;
std::vector<QuestionElement> questionElements; std::vector<QuestionElement> questionElements;
bool isOrderQuestion = false; bool isOrderQuestion = false;
bool isGroupQuestion = false; bool isGroupQuestion = false;
@@ -201,12 +184,21 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
tokens[i + 1].column tokens[i + 1].column
}; };
} }
if (isInBounds(i + 1) && tokens[i + 1].tokenType == TokenType::IdentifierStart) { if (isInBounds(i + 1) && tokens[i + 1].tokenType == TokenType::CooldownStart) {
id = tokens[i + 2].content; try {
cooldown = std::stod(tokens[i + 2].content);
} catch (std::exception e) {
return {
questions,
"error parsing cooldown",
tokens[i + 1].row,
tokens[i + 1].column
};
}
questionText = tokens[i + 4].content; questionText = tokens[i + 4].content;
i += 6; i += 6;
} else { } else {
id = ""; cooldown = 0;
questionText = tokens[i + 1].content; questionText = tokens[i + 1].content;
i += 3; i += 3;
} }
@@ -214,22 +206,14 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
// Parse elements of a question. // Parse elements of a question.
while (isInBounds(i)) { while (isInBounds(i)) {
// Handle other constructs.
if (tokens[i].tokenType == TokenType::SectionIdentifierStart) {
break;
}
if (tokens[i].tokenType == TokenType::SectionEnd) {
break;
}
// Check question end. // Check question end.
if (isInBounds(i + 3) && tokens[i].tokenType == TokenType::ElementDashStart) { if (isInBounds(i + 3) && tokens[i].tokenType == TokenType::ElementDashStart) {
// Distance to the possible question end. // Distance to the possible question end.
size_t offset; size_t offset;
if (tokens[i + 1].tokenType == TokenType::ElementOrderModifier) { if (tokens[i + 1].tokenType == TokenType::ElementOrderModifier) {
offset = tokens[i + 2].tokenType == TokenType::IdentifierStart ? 6 : 3; offset = tokens[i + 2].tokenType == TokenType::CooldownStart ? 6 : 3;
} else { } else {
offset = tokens[i + 1].tokenType == TokenType::IdentifierStart ? 5 : 2; offset = tokens[i + 1].tokenType == TokenType::CooldownStart ? 5 : 2;
} }
if (isInBounds(i + offset) && tokens[i + offset].tokenType == TokenType::QuestionEnd) { if (isInBounds(i + offset) && tokens[i + offset].tokenType == TokenType::QuestionEnd) {
break; break;
@@ -315,7 +299,7 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
if (questionElements.size() > 0) { if (questionElements.size() > 0) {
if (isGroupQuestion) { if (isGroupQuestion) {
auto *question = new GroupQuestion(); auto *question = new GroupQuestion();
question->ID = id; question->Cooldown = cooldown;
question->QuestionText = questionText; question->QuestionText = questionText;
question->Section = section; question->Section = section;
int32_t k = -1; int32_t k = -1;
@@ -342,7 +326,7 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
} }
} else { } else {
auto *question = new MultiElementQuestion(); auto *question = new MultiElementQuestion();
question->ID = id; question->Cooldown = cooldown;
question->QuestionText = cleanContent(questionText); question->QuestionText = cleanContent(questionText);
question->Section = section; question->Section = section;
for (const auto& elem : questionElements) { for (const auto& elem : questionElements) {
@@ -364,34 +348,6 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
} }
} }
} }
} else if (tokens[i].tokenType == TokenType::SectionIdentifierStart) {
if (section.length() > 0) {
return {
questions,
std::format("Section \"{}\" is not closed, cannot open other section", section),
tokens[i].row,
tokens[i].column
};
}
section = tokens[i + 1].content;
i += 3;
if (debug) {
std::cout << "Started section: " << section << "\n";
}
} else if (tokens[i].tokenType == TokenType::SectionEnd) {
if (section.length() <= 0) {
return {
questions,
std::format("Cannot close section, no section was opened", section),
tokens[i].row,
tokens[i].column
};
}
section.clear();
i += 1;
if (debug) {
std::cout << "Section ended.\n";
}
} else if (tokens[i].tokenType == TokenType::EndOfFile) { } else if (tokens[i].tokenType == TokenType::EndOfFile) {
if (debug) { if (debug) {
std::cout << "File terminated: EndOfFile\n"; std::cout << "File terminated: EndOfFile\n";