1
0

removing useless buffer, changing token type to int constants

This commit is contained in:
2021-03-04 17:48:40 +01:00
parent 13a990bb4a
commit caea5f7c25
2 changed files with 18 additions and 14 deletions

View File

@ -5,14 +5,12 @@ import (
"strings" "strings"
) )
// Lexer embbed a temporary buffer to store "content" // Lexer embbed an array of token to retrieve them later for building urls
// (url, description, tags) and an array of tokens
type Lexer struct { type Lexer struct {
Buf string
Tokens []Token Tokens []Token
} }
func (l *Lexer) AddToken(t string, s string) { func (l *Lexer) AddToken(t int, s string) {
l.Tokens = append(l.Tokens, Token{t, s}) l.Tokens = append(l.Tokens, Token{t, s})
} }
@ -40,11 +38,11 @@ func (l *Lexer) ProcessSimpleLink(s string) {
for w := range ss { for w := range ss {
if w == 0 { if w == 0 {
url := strings.TrimSpace(ss[w]) url := strings.TrimSpace(ss[w])
l.AddToken("URL", url) l.AddToken(URL, url)
} else { } else {
if ss[w] != "" && ss[w] != " " { if ss[w] != "" && ss[w] != " " {
tag := strings.ReplaceAll(ss[w], ":", "") tag := strings.ReplaceAll(ss[w], ":", "")
l.AddToken("TAG", tag) l.AddToken(TAG, tag)
} }
} }
} }
@ -60,17 +58,17 @@ func (l Lexer) Process(s string) []Token {
re := regexp.MustCompile(`(?:\[\[)(?P<url>\S+)(?:\]\[)(?P<desc>.+)(?:\]\])(?P<tags>.+)?`) re := regexp.MustCompile(`(?:\[\[)(?P<url>\S+)(?:\]\[)(?P<desc>.+)(?:\]\])(?P<tags>.+)?`)
matches := re.FindStringSubmatch(s) matches := re.FindStringSubmatch(s)
if len(matches) > 1 { if len(matches) > 1 {
l.AddToken("URL", strings.TrimSpace(matches[1])) l.AddToken(URL, strings.TrimSpace(matches[1]))
} }
if len(matches) > 2 { if len(matches) > 2 {
l.AddToken("DESC", strings.TrimSpace(matches[2])) l.AddToken(DESCRIPTION, strings.TrimSpace(matches[2]))
} }
if len(matches) > 3 { if len(matches) > 3 {
tags := strings.Split(matches[3], " ") tags := strings.Split(matches[3], " ")
for t := range tags { for t := range tags {
if tags[t] != "" && tags[t] != " " { if tags[t] != "" && tags[t] != " " {
tag := strings.ReplaceAll(tags[t], ":", "") tag := strings.ReplaceAll(tags[t], ":", "")
l.AddToken("TAG", strings.TrimSpace(tag)) l.AddToken(TAG, strings.TrimSpace(tag))
} }
} }
} }

View File

@ -4,28 +4,34 @@ import (
"fmt" "fmt"
) )
const (
URL = iota
TAG
DESCRIPTION
)
type Token struct { type Token struct {
Type string Type int
Value string Value string
} }
func (t Token) String() string { func (t Token) String() string {
return fmt.Sprintf("%s : '%s'", t.Type, t.Value) return fmt.Sprintf("%v : '%s'", t.Type, t.Value)
} }
func Parse(t []Token) Feed { func Parse(t []Token) Feed {
var f Feed var f Feed
for i := range t { for i := range t {
token := t[i] token := t[i]
if token.Type == "URL" { if token.Type == URL {
f.URL = token.Value f.URL = token.Value
} }
if token.Type == "DESC" { if token.Type == DESCRIPTION {
f.Description = token.Value f.Description = token.Value
} }
if token.Type == "TAG" { if token.Type == TAG {
f.Tags = append(f.Tags, token.Value) f.Tags = append(f.Tags, token.Value)
} }
} }