ditched the sections and identifiers / added cooldown inplace of

identifier
This commit is contained in:
jorenchik
2024-10-05 17:52:22 +03:00
parent 14ee1e91dc
commit 22053041bd
5 changed files with 42 additions and 122 deletions

View File

@@ -12,12 +12,9 @@ enum class TokenType {
ElementDashStart,
ElementPlusStart,
ElementOrderModifier,
Identifier,
IdentifierStart,
IdentifierEnd,
SectionIdentifierStart,
SectionStart,
SectionEnd,
Cooldown,
CooldownStart,
CooldownEnd,
SOF,
EndOfFile
};

View File

@@ -8,9 +8,9 @@
struct Question {
std::string ID;
std::string QuestionText;
std::string Section;
double Cooldown;
std::string QuestionText;
std::string Section;
virtual std::string ToString() const = 0;
virtual ~Question() = default;

View File

@@ -140,7 +140,7 @@ Result<std::vector<Token>> TokenizeMdem(const std::string& fileRunes) {
switch (c) {
case '[':
makeTokenWithTokenBuffer(
TokenType::IdentifierStart,
TokenType::CooldownStart,
1,
TokenType::TextFragment
);
@@ -159,45 +159,15 @@ Result<std::vector<Token>> TokenizeMdem(const std::string& fileRunes) {
};
}
makeTokenWithTokenBuffer(
TokenType::IdentifierEnd,
TokenType::CooldownEnd,
1,
TokenType::Identifier
TokenType::Cooldown
);
previousRow = row;
previousColumn = column;
textStarted = false;
identifierStarted = false;
break;
case '#':
makeTokenWithTokenBuffer(
TokenType::SectionIdentifierStart,
1,
TokenType::TextFragment
);
previousRow = row;
previousColumn = column;
textStarted = false;
break;
case '{':
makeTokenWithTokenBuffer(
TokenType::SectionStart,
1,
TokenType::Identifier
);
previousRow = row;
previousColumn = column;
textStarted = false;
break;
case '}':
makeTokenWithTokenBuffer(
TokenType::SectionEnd,
1,
TokenType::TextFragment
);
previousRow = row;
previousColumn = column;
textStarted = false;
break;
case '-':
makeTokenWithTokenBuffer(
TokenType::ElementDashStart,
@@ -288,12 +258,9 @@ std::string Token::ToString(const TokenType* ttype) {
case TokenType::ElementDashStart: return "dash element start";
case TokenType::ElementOrderModifier: return "order element modifier";
case TokenType::ElementPlusStart: return "plus element start";
case TokenType::Identifier: return "identifier";
case TokenType::IdentifierStart: return "start of identifier";
case TokenType::IdentifierEnd: return "end of identifier";
case TokenType::SectionIdentifierStart: return "section identifier start";
case TokenType::SectionStart: return "start of section";
case TokenType::SectionEnd: return "end of section";
case TokenType::Cooldown: return "cooldown";
case TokenType::CooldownStart: return "start of cooldown";
case TokenType::CooldownEnd: return "end of cooldown";
case TokenType::EndOfFile: return "end of file";
default: return "unrecognized token";
}

View File

@@ -35,7 +35,7 @@ std::string MultiElementQuestion::ToString() const {
return std::format(
"<Multiple element>\nsection:{}\nid:{}\n{}\n{}",
Section,
ID,
Cooldown,
QuestionText,
ss.str()
);
@@ -53,7 +53,7 @@ std::string GroupQuestion::ToString() const {
return std::format(
"<GroupQuestion>\nsection:{}\nid:{}\n{}\n{}",
Section,
ID,
Cooldown,
QuestionText,
ss.str()
);
@@ -74,10 +74,7 @@ std::map<TokenType, std::vector<TokenType>> parserAutomata() {
TokenType::ElementDashStart,
TokenType::ElementPlusStart,
TokenType::MatchGroupEnd,
TokenType::SectionIdentifierStart,
TokenType::SectionStart,
TokenType::EndOfFile,
TokenType::SectionEnd
};
automata[TokenType::MatchGroupEnd] = {
TokenType::ElementDashStart
@@ -87,7 +84,7 @@ std::map<TokenType, std::vector<TokenType>> parserAutomata() {
TokenType::ElementPlusStart
};
automata[TokenType::ElementDashStart] = {
TokenType::IdentifierStart,
TokenType::CooldownStart,
TokenType::TextFragment,
TokenType::ElementOrderModifier
};
@@ -97,32 +94,17 @@ std::map<TokenType, std::vector<TokenType>> parserAutomata() {
automata[TokenType::ElementPlusStart] = {
TokenType::TextFragment
};
automata[TokenType::Identifier] = {
TokenType::IdentifierEnd,
TokenType::SectionStart
automata[TokenType::Cooldown] = {
TokenType::CooldownEnd,
};
automata[TokenType::IdentifierStart] = {
TokenType::Identifier
automata[TokenType::CooldownStart] = {
TokenType::Cooldown
};
automata[TokenType::IdentifierEnd] = {
automata[TokenType::CooldownEnd] = {
TokenType::TextFragment
};
automata[TokenType::SectionIdentifierStart] = {
TokenType::Identifier
};
automata[TokenType::SectionStart] = {
TokenType::ElementDashStart,
TokenType::SectionIdentifierStart,
TokenType::EndOfFile
};
automata[TokenType::SectionEnd] = {
TokenType::SectionIdentifierStart,
TokenType::ElementDashStart,
TokenType::EndOfFile
};
automata[TokenType::SOF] = {
TokenType::ElementDashStart,
TokenType::SectionIdentifierStart,
TokenType::EndOfFile
};
automata[TokenType::EndOfFile] = {};
@@ -182,7 +164,8 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
while (i < tokens.size()) {
if (tokens[i].tokenType == TokenType::ElementDashStart) {
std::string id, questionText;
double cooldown;
std::string questionText;
std::vector<QuestionElement> questionElements;
bool isOrderQuestion = false;
bool isGroupQuestion = false;
@@ -201,12 +184,21 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
tokens[i + 1].column
};
}
if (isInBounds(i + 1) && tokens[i + 1].tokenType == TokenType::IdentifierStart) {
id = tokens[i + 2].content;
if (isInBounds(i + 1) && tokens[i + 1].tokenType == TokenType::CooldownStart) {
try {
cooldown = std::stod(tokens[i + 2].content);
} catch (std::exception e) {
return {
questions,
"error parsing cooldown",
tokens[i + 1].row,
tokens[i + 1].column
};
}
questionText = tokens[i + 4].content;
i += 6;
} else {
id = "";
cooldown = 0;
questionText = tokens[i + 1].content;
i += 3;
}
@@ -214,22 +206,14 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
// Parse elements of a question.
while (isInBounds(i)) {
// Handle other constructs.
if (tokens[i].tokenType == TokenType::SectionIdentifierStart) {
break;
}
if (tokens[i].tokenType == TokenType::SectionEnd) {
break;
}
// Check question end.
if (isInBounds(i + 3) && tokens[i].tokenType == TokenType::ElementDashStart) {
// Distance to the possible question end.
size_t offset;
if (tokens[i + 1].tokenType == TokenType::ElementOrderModifier) {
offset = tokens[i + 2].tokenType == TokenType::IdentifierStart ? 6 : 3;
offset = tokens[i + 2].tokenType == TokenType::CooldownStart ? 6 : 3;
} else {
offset = tokens[i + 1].tokenType == TokenType::IdentifierStart ? 5 : 2;
offset = tokens[i + 1].tokenType == TokenType::CooldownStart ? 5 : 2;
}
if (isInBounds(i + offset) && tokens[i + offset].tokenType == TokenType::QuestionEnd) {
break;
@@ -315,7 +299,7 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
if (questionElements.size() > 0) {
if (isGroupQuestion) {
auto *question = new GroupQuestion();
question->ID = id;
question->Cooldown = cooldown;
question->QuestionText = questionText;
question->Section = section;
int32_t k = -1;
@@ -342,7 +326,7 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
}
} else {
auto *question = new MultiElementQuestion();
question->ID = id;
question->Cooldown = cooldown;
question->QuestionText = cleanContent(questionText);
question->Section = section;
for (const auto& elem : questionElements) {
@@ -364,34 +348,6 @@ Result<std::vector<Question*>> ParseQuestions(const std::vector<Token>& tokens)
}
}
}
} else if (tokens[i].tokenType == TokenType::SectionIdentifierStart) {
if (section.length() > 0) {
return {
questions,
std::format("Section \"{}\" is not closed, cannot open other section", section),
tokens[i].row,
tokens[i].column
};
}
section = tokens[i + 1].content;
i += 3;
if (debug) {
std::cout << "Started section: " << section << "\n";
}
} else if (tokens[i].tokenType == TokenType::SectionEnd) {
if (section.length() <= 0) {
return {
questions,
std::format("Cannot close section, no section was opened", section),
tokens[i].row,
tokens[i].column
};
}
section.clear();
i += 1;
if (debug) {
std::cout << "Section ended.\n";
}
} else if (tokens[i].tokenType == TokenType::EndOfFile) {
if (debug) {
std::cout << "File terminated: EndOfFile\n";