From 42e4d93ae2440134814cc97b832d51a4eb812edb Mon Sep 17 00:00:00 2001 From: jorenchik Date: Sat, 7 Sep 2024 13:37:07 +0300 Subject: [PATCH] output and error handling --- src/compiler/.gitignore | 1 + src/compiler/compiler.go | 31 +++++++------- src/compiler/lexer/lexer.go | 8 ++++ src/compiler/parser/parser.go | 79 ++++++++++++++++++++++------------- 4 files changed, 75 insertions(+), 44 deletions(-) create mode 100644 src/compiler/.gitignore diff --git a/src/compiler/.gitignore b/src/compiler/.gitignore new file mode 100644 index 0000000..86a7c8e --- /dev/null +++ b/src/compiler/.gitignore @@ -0,0 +1 @@ +compiler diff --git a/src/compiler/compiler.go b/src/compiler/compiler.go index d0d59f7..ddee883 100644 --- a/src/compiler/compiler.go +++ b/src/compiler/compiler.go @@ -1,37 +1,38 @@ package main import ( - "fmt" - "log" + "fmt" "os" "github.com/jorenchik/mdemory/src/compiler/parser" + "github.com/jorenchik/mdemory/src/compiler/lexer" ) func main() { - log.Println("Compilation started") + fmt.Println("Compilation started...") file, err := os.ReadFile( "/home/jorenchik/Code/mdemory/src/compiler/input.mdem", ) if (err != nil) { - log.Fatalf( - "Cannot open the input file: %s", + fmt.Printf( + "Cannot open the input file: %s\n", err.Error(), ) return } fileContents := string(file) - questions, err := parser.ParseQuestions(fileContents) - if (err != nil) { - log.Fatal(err.Error()) - } - if (true) { - log.Println("Parser output:") - for _, element := range questions { - fmt.Printf("%s", element.ToString()) - } + tokens, err := lexer.TokenizeMdem([]rune(fileContents)) + if err != nil { + fmt.Println(err.Error()) + return } - log.Println("Compilation completed") + _, err = parser.ParseQuestions(tokens) + if (err != nil) { + fmt.Println(err.Error()) + return + } + + fmt.Println("Compilation completed") } diff --git a/src/compiler/lexer/lexer.go b/src/compiler/lexer/lexer.go index 8fb8ee6..51f4a1d 100644 --- a/src/compiler/lexer/lexer.go +++ b/src/compiler/lexer/lexer.go @@ -164,6 +164,14 @@ func TokenizeMdem(fileRunes []rune) ( []Token, error ) { // EmitEOF makeTokenWithTokenBuffer(EOF, 0, TextFragment) + + if true { + fmt.Printf("Lexer output:\n") + for _, el := range tokens { + fmt.Print(el.ToString()) + } + fmt.Printf("End Lexer output\n") + } return tokens, nil } diff --git a/src/compiler/parser/parser.go b/src/compiler/parser/parser.go index cd1b7e3..54d865c 100644 --- a/src/compiler/parser/parser.go +++ b/src/compiler/parser/parser.go @@ -2,9 +2,9 @@ package parser import ( "fmt" - "github.com/jorenchik/mdemory/src/compiler/lexer" - "log" "strings" + + "github.com/jorenchik/mdemory/src/compiler/lexer" ) type Question interface { @@ -32,7 +32,7 @@ type MultipleChoiceQuestion struct { func (question SingleAnswerQuestion) ToString() string { return fmt.Sprintf( - "%20s: section: %-10s id: %-10s %-30s: %-30s\n", + "%20s: section: %-10s id: %-10s %-30s: %-30s", "", question.Section, question.ID, @@ -44,7 +44,7 @@ func (question SingleAnswerQuestion) ToString() string { func (question MultipleChoiceQuestion) ToString() string { acc := "" acc += fmt.Sprintf( - "%20s: section: %-10s id: %-10s %-30s\n", + "%20s: section: %-10s id: %-10s %-30s", "", question.section, question.id, @@ -98,16 +98,19 @@ func parserAutomata() map[lexer.TokenType][]lexer.TokenType { lexer.SectionEnd, } automata[lexer.QuestionEnd] = []lexer.TokenType{ - lexer.ElementDashStart, lexer.ElementPlusStart, + lexer.ElementDashStart, + lexer.ElementPlusStart, } automata[lexer.ElementDashStart] = []lexer.TokenType{ - lexer.IdentifierStart, lexer.TextFragment, + lexer.IdentifierStart, + lexer.TextFragment, } automata[lexer.ElementPlusStart] = []lexer.TokenType{ lexer.TextFragment, } automata[lexer.Identifier] = []lexer.TokenType{ - lexer.IdentifierEnd, lexer.SectionStart, + lexer.IdentifierEnd, + lexer.SectionStart, } automata[lexer.IdentifierStart] = []lexer.TokenType{ lexer.Identifier, @@ -119,13 +122,19 @@ func parserAutomata() map[lexer.TokenType][]lexer.TokenType { lexer.Identifier, } automata[lexer.SectionStart] = []lexer.TokenType{ - lexer.ElementDashStart, lexer.SectionIdentifierStart, lexer.EOF, + lexer.ElementDashStart, + lexer.SectionIdentifierStart, + lexer.EOF, } automata[lexer.SectionEnd] = []lexer.TokenType{ - lexer.SectionIdentifierStart, lexer.ElementDashStart, lexer.EOF, + lexer.SectionIdentifierStart, + lexer.ElementDashStart, + lexer.EOF, } automata[lexer.SOF] = []lexer.TokenType{ - lexer.ElementDashStart, lexer.SectionIdentifierStart, lexer.EOF, + lexer.ElementDashStart, + lexer.SectionIdentifierStart, + lexer.EOF, } automata[lexer.EOF] = []lexer.TokenType{} return automata @@ -151,26 +160,20 @@ func ValidateGrammar(tokens []lexer.Token) error { return nil } -func ParseQuestions(fileContents string) ([]Question, error) { - tokens, err := lexer.TokenizeMdem([]rune(fileContents)) - if err != nil { - return nil, err - } - if true { - log.Println("Lexer output:") - for _, el := range tokens { - fmt.Print(el.ToString()) - } - } +var DEBUG bool = true - err = ValidateGrammar(tokens) +func ParseQuestions(tokens []lexer.Token) ([]Question, error) { + err := ValidateGrammar(tokens) if err != nil { - log.Fatal(err.Error()) + return nil, err } questions := []Question{} section := "" i := 0 + if DEBUG { + fmt.Printf("Parser output:\n") + } for { if i >= len(tokens) { break @@ -212,6 +215,13 @@ func ParseQuestions(fileContents string) ([]Question, error) { tokens[i + offset].TokenType == lexer.QuestionEnd { break } + if offset == 5 && tokens[i + 5].TokenType != lexer.QuestionEnd { + return nil, CompilerErr{ + message: "Cannot have an identifier here", + row: tokens[i].Row, + column: tokens[i].Column, + } + } } if (i + 2 >= len(tokens)) { break; @@ -243,6 +253,9 @@ func ParseQuestions(fileContents string) ([]Question, error) { } question.choices = choices questions = append(questions, question) + if DEBUG { + fmt.Printf("%s", question.ToString()) + } } else if len(questionElements) == 1 { question := SingleAnswerQuestion{ ID: id, @@ -253,22 +266,30 @@ func ParseQuestions(fileContents string) ([]Question, error) { question.Section = section } questions = append(questions, question) + if DEBUG { + fmt.Printf("%s\n", question.ToString()) + } } } else if tokens[i].TokenType == lexer.SectionIdentifierStart { - section = tokens[i+1].Content + section = tokens[i + 1].Content i += 3 + if DEBUG { + fmt.Printf("Started section: %s\n", section) + } } else if tokens[i].TokenType == lexer.SectionEnd { section = "" i += 1 + if DEBUG { + fmt.Printf("Section ended: %s\n", section) + } } else if tokens[i].TokenType == lexer.EOF { + if DEBUG { + fmt.Printf("File terminated: EOF\n") + } break } else { - log.Fatalf( - "Not handled: %s", - lexer.ToString(&tokens[i].TokenType), - ) return nil, CompilerErr{ - message: "", + message: "Unexpeced token: %s", row: tokens[i].Row, column: tokens[i].Column, }