From e726489619e231c06a981bc6b00c6e410bbc7af0 Mon Sep 17 00:00:00 2001 From: jorenchik Date: Sun, 4 Aug 2024 09:36:44 +0300 Subject: [PATCH] Removed grammar errors from lexer and unused structures/vars --- src/compiler/compiler.go | 151 +++++++++++++++------------------------ 1 file changed, 58 insertions(+), 93 deletions(-) diff --git a/src/compiler/compiler.go b/src/compiler/compiler.go index ac8dbbc..7e8a79e 100644 --- a/src/compiler/compiler.go +++ b/src/compiler/compiler.go @@ -1,7 +1,5 @@ package main -// TODO previous token start and end - import ( "fmt" "log" @@ -11,6 +9,11 @@ import ( var tokens []Token var buffer []rune +var row int32 = 1 +var column int32 = 1 +var previousRow int32 = -1 +var previousColumn int32 = -1 +var textStarted bool = false type TokenType int const ( @@ -23,6 +26,7 @@ const ( IdentifierEnd SectionStart SectionEnd + SOF EOF ) @@ -33,55 +37,12 @@ type Token struct { column int32; } -type Flashcard struct { - question string; - answer string; -} - -func toString (ttype *TokenType) string { - switch *ttype { - case TextFragment: - return "TextFragment" - case QuestionEnd: - return "QuestionEnd" - case ElementDashStart: - return "ElementDashStart" - case ElementPlusStart: - return "ElementPlusStart" - case Identifier: - return "Identifier" - case IdentifierStart: - return "IdentifierStart" - case IdentifierEnd: - return "IdentifierEnd" - case SectionStart: - return "SectionStart" - case SectionEnd: - return "SectionEnd" - case EOF: - return "EndOfFile" - default: - return "NOT_DEFINED" - } -} - type LexingErr struct { message string; row int32; column int32; } -func (e LexingErr) Error() string { - return fmt.Sprintf("%d:%d - %s", e.row, e.column, e.message) -} - -var row int32 = 1 -var column int32 = 1 -var previous_row int32 = -1 -var previous_col int32 = -1 -var can_have_text bool = false -var textStarted bool = false - func makePostTextToken(ttype TokenType, tokenLen int32) { if (len(strings.Trim(string(buffer), " \n")) - 1 > 0) { textFragment := []rune{} @@ -94,8 +55,8 @@ func makePostTextToken(ttype TokenType, tokenLen int32) { Token{ tokenType: TextFragment, content: string(textFragment), - row: int32(previous_row), - column: int32(previous_col), + row: int32(previousRow), + column: int32(previousColumn), }, ) } @@ -108,8 +69,8 @@ func makePostTextToken(ttype TokenType, tokenLen int32) { column: int32(column), }, ) - previous_row = row - previous_col = column + previousRow = row + previousColumn = column buffer = []rune{} } @@ -118,7 +79,6 @@ func tokenize(runes []rune) error { buffer = []rune{} for i := 0; i < len(runes); i++ { - // TODO previous token start and end c := runes[i] if (c == '\n') { @@ -128,10 +88,10 @@ func tokenize(runes []rune) error { buffer = append(buffer, c) if !textStarted { if c == '\n' { - previous_row += 1 - previous_col = 1 + previousRow += 1 + previousColumn = 1 } else if (c == ' ') { - previous_col += 1 + previousColumn += 1 } else { textStarted = true } @@ -142,23 +102,15 @@ func tokenize(runes []rune) error { lastTwo := buffer[len(trimmedBuffer) - 1:] switch string(lastTwo) { case "|>": - if (len(trimmedBuffer) - 2 > 0 && !can_have_text) { - return LexingErr{"Text cannot be here", previous_row, previous_col} - } makePostTextToken(SectionStart, 2) - previous_row = row - previous_col = column - can_have_text = true + previousRow = row + previousColumn = column textStarted = false continue case "<|": - if (len(trimmedBuffer) - 2 > 0 && !can_have_text) { - return LexingErr{"Text cannot be here", previous_row, previous_col} - } makePostTextToken(SectionEnd, 2) - previous_row = row - previous_col = column - can_have_text = false + previousRow = row + previousColumn = column textStarted = false continue } @@ -166,9 +118,6 @@ func tokenize(runes []rune) error { switch c { case ']': - if (len(trimmedBuffer) - 1 > 0 && !can_have_text) { - return LexingErr{"Text cannot be here", row, column} - } tokens = append( tokens, Token{ @@ -190,11 +139,10 @@ func tokenize(runes []rune) error { Token{ tokenType: Identifier, content: string(textFragment), - row: int32(previous_row), - column: int32(previous_col), + row: int32(previousRow), + column: int32(previousColumn), }, ) - can_have_text = true } tokens = append( tokens, @@ -205,36 +153,24 @@ func tokenize(runes []rune) error { column: int32(column), }, ) - previous_row = row - previous_col = column - fmt.Printf("--> %d:%d\n", row, column) + previousRow = row + previousColumn = column textStarted = false buffer = []rune{} case '+': - if (len(trimmedBuffer) - 1 > 0 && !can_have_text) { - return LexingErr{"Text cannot be here", previous_row, previous_col} - } makePostTextToken(ElementPlusStart, 1) - previous_row = row - previous_col = column - can_have_text = true + previousRow = row + previousColumn = column textStarted = false case '-': - if (len(trimmedBuffer) - 1 > 0 && !can_have_text) { - return LexingErr{"Text cannot be here", previous_row, previous_col} } makePostTextToken(ElementDashStart, 1) - previous_row = row - previous_col = column - can_have_text = true + previousRow = row + previousColumn = column textStarted = false case '>': - if (len(trimmedBuffer) - 1 > 0 && !can_have_text) { - return LexingErr{"Text cannot be here", previous_row, previous_col} - } makePostTextToken(QuestionEnd, 1) - previous_row = row - previous_col = column - can_have_text = false + previousRow = row + previousColumn = column } column += 1 } @@ -242,8 +178,6 @@ func tokenize(runes []rune) error { return nil } -var fileRunes []rune - func main() { log.Println("Compilation started") @@ -277,3 +211,34 @@ func main() { log.Println("Compilation completed") } + +func toString (ttype *TokenType) string { + switch *ttype { + case TextFragment: + return "TextFragment" + case QuestionEnd: + return "QuestionEnd" + case ElementDashStart: + return "ElementDashStart" + case ElementPlusStart: + return "ElementPlusStart" + case Identifier: + return "Identifier" + case IdentifierStart: + return "IdentifierStart" + case IdentifierEnd: + return "IdentifierEnd" + case SectionStart: + return "SectionStart" + case SectionEnd: + return "SectionEnd" + case EOF: + return "EndOfFile" + default: + return "NOT_DEFINED" + } +} + +func (e LexingErr) Error() string { + return fmt.Sprintf("%d:%d - %s", e.row, e.column, e.message) +}