Text to tokens Go library.
package main
import (
"fmt"
"strings"
"github.com/wmentor/tokens"
)
func main() {
txt := "Hello, my little friend!"
tokenizer := tokens.New(strings.NewReader(txt))
for {
tok, err := range tokenizer.Token()
if err != nil { // io.EOF
break
}
fmt.Println(tok)
}
}
Result:
hello
,
my
little
friend
!
Case sensitive mode:
package main
import (
"fmt"
"strings"
"github.com/wmentor/tokens"
)
func main() {
txt := "Hello, my little friend!"
tokenizer := tokens.New(strings.NewReader(txt), tokens.WithCaseSensitive())
for {
tok, err := range tokenizer.Token()
if err != nil { // io.EOF
break
}
fmt.Println(tok)
}
}
Result:
Hello
,
my
liTTle
friend
!