1
0

more tests for good boy points, split entities into files

This commit is contained in:
2021-03-04 17:30:38 +01:00
parent f2347b8801
commit 13a990bb4a
6 changed files with 83 additions and 53 deletions

View File

@ -1,5 +1,5 @@
build: build:
go build -o bin/org2nb main.go go build -o bin/org2nb main.go feed.go lexer.go token.go
install: install:
cp bin/org2nb ~/.local/bin/ cp bin/org2nb ~/.local/bin/

27
feed.go Normal file
View File

@ -0,0 +1,27 @@
package main
import (
"fmt"
"strings"
)
type Feed struct {
URL string
Description string
Tags []string
}
// Return the final feed string, depending on either the link has a description, tags or not
func (f Feed) String() string {
var ff string
var tags string = strings.TrimSpace(strings.Join(f.Tags, " "))
ff = fmt.Sprintf("%s %s", f.URL, tags)
if f.Description != "" {
ff = fmt.Sprintf("%s # %s", strings.TrimSpace(ff), f.Description)
}
return strings.TrimSpace(ff)
}

View File

@ -1,41 +1,10 @@
package main package main
import ( import (
"fmt"
"regexp" "regexp"
"strings" "strings"
) )
type Feed struct {
URL string
Description string
Tags []string
}
// Return the final feed string, depending on either the link has a description, tags or not
func (f Feed) String() string {
var ff string
var tags string = strings.TrimSpace(strings.Join(f.Tags, " "))
ff = fmt.Sprintf("%s %s", f.URL, tags)
if f.Description != "" {
ff = fmt.Sprintf("%s # %s", strings.TrimSpace(ff), f.Description)
}
return strings.TrimSpace(ff)
}
type Token struct {
Type string
Value string
}
func (t Token) String() string {
return fmt.Sprintf("%s : '%s'", t.Type, t.Value)
}
// Lexer embbed a temporary buffer to store "content" // Lexer embbed a temporary buffer to store "content"
// (url, description, tags) and an array of tokens // (url, description, tags) and an array of tokens
type Lexer struct { type Lexer struct {
@ -108,23 +77,3 @@ func (l Lexer) Process(s string) []Token {
return l.Tokens return l.Tokens
} }
func Parse(t []Token) Feed {
var f Feed
for i := range t {
token := t[i]
if token.Type == "URL" {
f.URL = token.Value
}
if token.Type == "DESC" {
f.Description = token.Value
}
if token.Type == "TAG" {
f.Tags = append(f.Tags, token.Value)
}
}
return f
}

View File

@ -51,7 +51,7 @@ func main() {
tokens := lexer.Process(scanner.Text()) tokens := lexer.Process(scanner.Text())
feed := Parse(tokens).String() feed := Parse(tokens).String()
file.WriteString(feed) file.WriteString(feed + "\n")
} }
file.Close() file.Close()

View File

@ -45,6 +45,16 @@ func TestLinkDescTag(t *testing.T) {
} }
} }
func TestLinkDescManyTagsSpaces(t *testing.T) {
var message string = "** [[https://pleroma.social/announcements/feed.xml][Pleroma Social]] :software: :social: :cofe:"
var expected string = "https://pleroma.social/announcements/feed.xml software social cofe # Pleroma Social"
var result string = LexerTestWrapper(message, expected)
if result != expected {
LexerTestWrapperFail(expected, result)
t.Fail()
}
}
func TestLinkDescNoTag(t *testing.T) { func TestLinkDescNoTag(t *testing.T) {
var message string = "** [[https://pleroma.social/announcements/feed.xml][Pleroma Social]]" var message string = "** [[https://pleroma.social/announcements/feed.xml][Pleroma Social]]"
@ -55,3 +65,13 @@ func TestLinkDescNoTag(t *testing.T) {
t.Fail() t.Fail()
} }
} }
func TestLinkDescWithSymbols(t *testing.T) {
var message string = "** [[https://pleroma.social/announcements/feed.xml][Pleroma Social [*Very Cool*]]] :software:"
var expected string = "https://pleroma.social/announcements/feed.xml software # Pleroma Social [*Very Cool*]"
var result string = LexerTestWrapper(message, expected)
if result != expected {
LexerTestWrapperFail(expected, result)
t.Fail()
}
}

34
token.go Normal file
View File

@ -0,0 +1,34 @@
package main
import (
"fmt"
)
type Token struct {
Type string
Value string
}
func (t Token) String() string {
return fmt.Sprintf("%s : '%s'", t.Type, t.Value)
}
func Parse(t []Token) Feed {
var f Feed
for i := range t {
token := t[i]
if token.Type == "URL" {
f.URL = token.Value
}
if token.Type == "DESC" {
f.Description = token.Value
}
if token.Type == "TAG" {
f.Tags = append(f.Tags, token.Value)
}
}
return f
}