Removed grammar errors from lexer and unused structures/vars

This commit is contained in:
jorenchik
2024-08-04 09:36:44 +03:00
parent d78b48ecb3
commit e726489619

View File

@@ -1,7 +1,5 @@
package main package main
// TODO previous token start and end
import ( import (
"fmt" "fmt"
"log" "log"
@@ -11,6 +9,11 @@ import (
var tokens []Token var tokens []Token
var buffer []rune var buffer []rune
var row int32 = 1
var column int32 = 1
var previousRow int32 = -1
var previousColumn int32 = -1
var textStarted bool = false
type TokenType int type TokenType int
const ( const (
@@ -23,6 +26,7 @@ const (
IdentifierEnd IdentifierEnd
SectionStart SectionStart
SectionEnd SectionEnd
SOF
EOF EOF
) )
@@ -33,55 +37,12 @@ type Token struct {
column int32; column int32;
} }
type Flashcard struct {
question string;
answer string;
}
func toString (ttype *TokenType) string {
switch *ttype {
case TextFragment:
return "TextFragment"
case QuestionEnd:
return "QuestionEnd"
case ElementDashStart:
return "ElementDashStart"
case ElementPlusStart:
return "ElementPlusStart"
case Identifier:
return "Identifier"
case IdentifierStart:
return "IdentifierStart"
case IdentifierEnd:
return "IdentifierEnd"
case SectionStart:
return "SectionStart"
case SectionEnd:
return "SectionEnd"
case EOF:
return "EndOfFile"
default:
return "NOT_DEFINED"
}
}
type LexingErr struct { type LexingErr struct {
message string; message string;
row int32; row int32;
column int32; column int32;
} }
func (e LexingErr) Error() string {
return fmt.Sprintf("%d:%d - %s", e.row, e.column, e.message)
}
var row int32 = 1
var column int32 = 1
var previous_row int32 = -1
var previous_col int32 = -1
var can_have_text bool = false
var textStarted bool = false
func makePostTextToken(ttype TokenType, tokenLen int32) { func makePostTextToken(ttype TokenType, tokenLen int32) {
if (len(strings.Trim(string(buffer), " \n")) - 1 > 0) { if (len(strings.Trim(string(buffer), " \n")) - 1 > 0) {
textFragment := []rune{} textFragment := []rune{}
@@ -94,8 +55,8 @@ func makePostTextToken(ttype TokenType, tokenLen int32) {
Token{ Token{
tokenType: TextFragment, tokenType: TextFragment,
content: string(textFragment), content: string(textFragment),
row: int32(previous_row), row: int32(previousRow),
column: int32(previous_col), column: int32(previousColumn),
}, },
) )
} }
@@ -108,8 +69,8 @@ func makePostTextToken(ttype TokenType, tokenLen int32) {
column: int32(column), column: int32(column),
}, },
) )
previous_row = row previousRow = row
previous_col = column previousColumn = column
buffer = []rune{} buffer = []rune{}
} }
@@ -118,7 +79,6 @@ func tokenize(runes []rune) error {
buffer = []rune{} buffer = []rune{}
for i := 0; i < len(runes); i++ { for i := 0; i < len(runes); i++ {
// TODO previous token start and end
c := runes[i] c := runes[i]
if (c == '\n') { if (c == '\n') {
@@ -128,10 +88,10 @@ func tokenize(runes []rune) error {
buffer = append(buffer, c) buffer = append(buffer, c)
if !textStarted { if !textStarted {
if c == '\n' { if c == '\n' {
previous_row += 1 previousRow += 1
previous_col = 1 previousColumn = 1
} else if (c == ' ') { } else if (c == ' ') {
previous_col += 1 previousColumn += 1
} else { } else {
textStarted = true textStarted = true
} }
@@ -142,23 +102,15 @@ func tokenize(runes []rune) error {
lastTwo := buffer[len(trimmedBuffer) - 1:] lastTwo := buffer[len(trimmedBuffer) - 1:]
switch string(lastTwo) { switch string(lastTwo) {
case "|>": case "|>":
if (len(trimmedBuffer) - 2 > 0 && !can_have_text) {
return LexingErr{"Text cannot be here", previous_row, previous_col}
}
makePostTextToken(SectionStart, 2) makePostTextToken(SectionStart, 2)
previous_row = row previousRow = row
previous_col = column previousColumn = column
can_have_text = true
textStarted = false textStarted = false
continue continue
case "<|": case "<|":
if (len(trimmedBuffer) - 2 > 0 && !can_have_text) {
return LexingErr{"Text cannot be here", previous_row, previous_col}
}
makePostTextToken(SectionEnd, 2) makePostTextToken(SectionEnd, 2)
previous_row = row previousRow = row
previous_col = column previousColumn = column
can_have_text = false
textStarted = false textStarted = false
continue continue
} }
@@ -166,9 +118,6 @@ func tokenize(runes []rune) error {
switch c { switch c {
case ']': case ']':
if (len(trimmedBuffer) - 1 > 0 && !can_have_text) {
return LexingErr{"Text cannot be here", row, column}
}
tokens = append( tokens = append(
tokens, tokens,
Token{ Token{
@@ -190,11 +139,10 @@ func tokenize(runes []rune) error {
Token{ Token{
tokenType: Identifier, tokenType: Identifier,
content: string(textFragment), content: string(textFragment),
row: int32(previous_row), row: int32(previousRow),
column: int32(previous_col), column: int32(previousColumn),
}, },
) )
can_have_text = true
} }
tokens = append( tokens = append(
tokens, tokens,
@@ -205,36 +153,24 @@ func tokenize(runes []rune) error {
column: int32(column), column: int32(column),
}, },
) )
previous_row = row previousRow = row
previous_col = column previousColumn = column
fmt.Printf("--> %d:%d\n", row, column)
textStarted = false textStarted = false
buffer = []rune{} buffer = []rune{}
case '+': case '+':
if (len(trimmedBuffer) - 1 > 0 && !can_have_text) {
return LexingErr{"Text cannot be here", previous_row, previous_col}
}
makePostTextToken(ElementPlusStart, 1) makePostTextToken(ElementPlusStart, 1)
previous_row = row previousRow = row
previous_col = column previousColumn = column
can_have_text = true
textStarted = false textStarted = false
case '-': case '-':
if (len(trimmedBuffer) - 1 > 0 && !can_have_text) {
return LexingErr{"Text cannot be here", previous_row, previous_col} }
makePostTextToken(ElementDashStart, 1) makePostTextToken(ElementDashStart, 1)
previous_row = row previousRow = row
previous_col = column previousColumn = column
can_have_text = true
textStarted = false textStarted = false
case '>': case '>':
if (len(trimmedBuffer) - 1 > 0 && !can_have_text) {
return LexingErr{"Text cannot be here", previous_row, previous_col}
}
makePostTextToken(QuestionEnd, 1) makePostTextToken(QuestionEnd, 1)
previous_row = row previousRow = row
previous_col = column previousColumn = column
can_have_text = false
} }
column += 1 column += 1
} }
@@ -242,8 +178,6 @@ func tokenize(runes []rune) error {
return nil return nil
} }
var fileRunes []rune
func main() { func main() {
log.Println("Compilation started") log.Println("Compilation started")
@@ -277,3 +211,34 @@ func main() {
log.Println("Compilation completed") log.Println("Compilation completed")
} }
func toString (ttype *TokenType) string {
switch *ttype {
case TextFragment:
return "TextFragment"
case QuestionEnd:
return "QuestionEnd"
case ElementDashStart:
return "ElementDashStart"
case ElementPlusStart:
return "ElementPlusStart"
case Identifier:
return "Identifier"
case IdentifierStart:
return "IdentifierStart"
case IdentifierEnd:
return "IdentifierEnd"
case SectionStart:
return "SectionStart"
case SectionEnd:
return "SectionEnd"
case EOF:
return "EndOfFile"
default:
return "NOT_DEFINED"
}
}
func (e LexingErr) Error() string {
return fmt.Sprintf("%d:%d - %s", e.row, e.column, e.message)
}