Skip to content

Commit

Permalink
Refactor Lexer, Parser, Token into packages (#427)
Browse files Browse the repository at this point in the history
* refactor: lexer, parser, token packages

* refactor: fix tests

* refactor: remove stutter in lexer.NewLexer and parser.NewParser

* refactor: remove dot imports
  • Loading branch information
maaslalani authored Jan 5, 2024
1 parent 7dd24a2 commit 2a1d303
Show file tree
Hide file tree
Showing 18 changed files with 1,145 additions and 1,116 deletions.
227 changes: 86 additions & 141 deletions command.go

Large diffs are not rendered by default.

6 changes: 4 additions & 2 deletions command_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@ package main
import (
"reflect"
"testing"

"github.com/charmbracelet/vhs/parser"
)

func TestCommand(t *testing.T) {
const numberOfCommands = 27
if len(CommandTypes) != numberOfCommands {
t.Errorf("Expected %d commands, got %d", numberOfCommands, len(CommandTypes))
if len(parser.CommandTypes) != numberOfCommands {
t.Errorf("Expected %d commands, got %d", numberOfCommands, len(parser.CommandTypes))
}

const numberOfCommandFuncs = 27
Expand Down
14 changes: 0 additions & 14 deletions draw.go
Original file line number Diff line number Diff line change
Expand Up @@ -205,20 +205,6 @@ func MakeBorderRadiusMask(width, height, radius int, targetpng string) {
}
}

// Check if a given windowbar type is valid
func isValidWindowBar(windowbar string) bool {
switch windowbar {
case
"",
"Colorful",
"ColorfulRight",
"Rings",
"RingsRight":
return true
}
return false
}

// Make a window bar and save it to a file
func MakeWindowBar(termWidth, termHeight int, opts StyleOptions, file string) {
var err error
Expand Down
35 changes: 6 additions & 29 deletions error.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,48 +4,25 @@ import (
"fmt"
"io"
"strings"

"github.com/charmbracelet/vhs/parser"
)

// InvalidSyntaxError is returned when the parser encounters one or more errors.
type InvalidSyntaxError struct {
Errors []ParserError
Errors []parser.Error
}

func (e InvalidSyntaxError) Error() string {
return fmt.Sprintf("parser: %d error(s)", len(e.Errors))
}

// ParserError represents an error with parsing a tape file.
// It tracks the token causing the error and a human readable error message.
type ParserError struct {
Token Token
Msg string
}

// NewError returns a new ParserError with the given token and message.
func NewError(token Token, msg string) ParserError {
return ParserError{
Token: token,
Msg: msg,
}
}

// ErrorColumnOffset is the number of columns that an error should be printed
// to the left to account for the line number.
const ErrorColumnOffset = 5

// String returns a human readable error message printing the token line number
// and message.
func (e ParserError) String() string {
return fmt.Sprintf("%2d:%-2d │ %s", e.Token.Line, e.Token.Column, e.Msg)
}

func (e ParserError) Error() string {
return e.String()
}

// Underline returns a string of ^ characters which helps underline the problematic token
// in a ParserError.
// in a parser.Error.
func Underline(n int) string {
return ErrorStyle.Render(strings.Repeat("^", n))
}
Expand All @@ -55,7 +32,7 @@ func LineNumber(line int) string {
return LineNumberStyle.Render(fmt.Sprintf(" %2d │ ", line))
}

func printParserError(out io.Writer, tape string, err ParserError) {
func printError(out io.Writer, tape string, err parser.Error) {
lines := strings.Split(tape, "\n")

fmt.Fprint(out, LineNumber(err.Token.Line))
Expand All @@ -70,7 +47,7 @@ func printErrors(out io.Writer, tape string, errs []error) {
switch err := err.(type) {
case InvalidSyntaxError:
for _, v := range err.Errors {
printParserError(out, tape, v)
printError(out, tape, v)
}
fmt.Fprintln(out, ErrorStyle.Render(err.Error()))

Expand Down
36 changes: 20 additions & 16 deletions evaluator.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ import (
"io"
"log"
"os"

"github.com/charmbracelet/vhs/lexer"
"github.com/charmbracelet/vhs/parser"
"github.com/charmbracelet/vhs/token"
)

// EvaluatorOption is a function that can be used to modify the VHS instance.
Expand All @@ -14,8 +18,8 @@ type EvaluatorOption func(*VHS)
// Evaluate takes as input a tape string, an output writer, and an output file
// and evaluates all the commands within the tape string and produces a GIF.
func Evaluate(ctx context.Context, tape string, out io.Writer, opts ...EvaluatorOption) []error {
l := NewLexer(tape)
p := NewParser(l)
l := lexer.New(tape)
p := parser.New(l)

cmds := p.Parse()
errs := p.Errors()
Expand All @@ -25,8 +29,8 @@ func Evaluate(ctx context.Context, tape string, out io.Writer, opts ...Evaluator

v := New()
for _, cmd := range cmds {
if cmd.Type == SET && cmd.Options == "Shell" {
cmd.Execute(&v)
if cmd.Type == token.SET && cmd.Options == "Shell" {
Execute(cmd, &v)
}
}

Expand All @@ -39,10 +43,10 @@ func Evaluate(ctx context.Context, tape string, out io.Writer, opts ...Evaluator
// Run Output and Set commands as they only modify options on the VHS instance.
var offset int
for i, cmd := range cmds {
if cmd.Type == SET || cmd.Type == OUTPUT || cmd.Type == REQUIRE {
fmt.Fprintln(out, cmd.Highlight(false))
if cmd.Type == token.SET || cmd.Type == token.OUTPUT || cmd.Type == token.REQUIRE {
fmt.Fprintln(out, Highlight(cmd, false))
if cmd.Options != "Shell" {
cmd.Execute(&v)
Execute(cmd, &v)
}
} else {
offset = i
Expand Down Expand Up @@ -77,14 +81,14 @@ func Evaluate(ctx context.Context, tape string, out io.Writer, opts ...Evaluator
// If the first command (after Settings and Outputs) is a Hide command, we can
// begin executing the commands before we start recording to avoid capturing
// any unwanted frames.
if cmds[offset].Type == HIDE {
if cmds[offset].Type == token.HIDE {
for i, cmd := range cmds[offset:] {
if cmd.Type == SHOW {
if cmd.Type == token.SHOW {
offset += i
break
}
fmt.Fprintln(out, cmd.Highlight(true))
cmd.Execute(&v)
fmt.Fprintln(out, Highlight(cmd, true))
Execute(cmd, &v)
}
}

Expand Down Expand Up @@ -130,13 +134,13 @@ func Evaluate(ctx context.Context, tape string, out io.Writer, opts ...Evaluator
// GIF as the frame sequence will change dimensions. This is fixable.
//
// We should remove if isSetting statement.
isSetting := cmd.Type == SET && cmd.Options != "TypingSpeed"
if isSetting || cmd.Type == REQUIRE {
fmt.Fprintln(out, cmd.Highlight(true))
isSetting := cmd.Type == token.SET && cmd.Options != "TypingSpeed"
if isSetting || cmd.Type == token.REQUIRE {
fmt.Fprintln(out, Highlight(cmd, true))
continue
}
fmt.Fprintln(out, cmd.Highlight(!v.recording || cmd.Type == SHOW || cmd.Type == HIDE || isSetting))
cmd.Execute(&v)
fmt.Fprintln(out, Highlight(cmd, !v.recording || cmd.Type == token.SHOW || cmd.Type == token.HIDE || isSetting))
Execute(cmd, &v)
}

// If running as an SSH server, the output file is a temporary file
Expand Down
4 changes: 2 additions & 2 deletions examples/demo.gif
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
42 changes: 22 additions & 20 deletions lexer.go → lexer/lexer.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
package main
package lexer

import "github.com/charmbracelet/vhs/token"

// Lexer is a lexer that tokenizes the input.
type Lexer struct {
Expand All @@ -10,8 +12,8 @@ type Lexer struct {
column int
}

// NewLexer returns a new lexer for tokenizing the input string.
func NewLexer(input string) *Lexer {
// New returns a new lexer for tokenizing the input string.
func New(input string) *Lexer {
l := &Lexer{input: input, line: 1, column: 0}
l.readChar()
return l
Expand All @@ -26,64 +28,64 @@ func (l *Lexer) readChar() {
}

// NextToken returns the next token in the input.
func (l *Lexer) NextToken() Token {
func (l *Lexer) NextToken() token.Token {
l.skipWhitespace()

var tok = Token{Line: l.line, Column: l.column}
var tok = token.Token{Line: l.line, Column: l.column}

switch l.ch {
case 0:
tok = l.newToken(EOF, l.ch)
tok = l.newToken(token.EOF, l.ch)
case '@':
tok = l.newToken(AT, l.ch)
tok = l.newToken(token.AT, l.ch)
l.readChar()
case '=':
tok = l.newToken(EQUAL, l.ch)
tok = l.newToken(token.EQUAL, l.ch)
l.readChar()
case '%':
tok = l.newToken(PERCENT, l.ch)
tok = l.newToken(token.PERCENT, l.ch)
l.readChar()
case '#':
tok.Type = COMMENT
tok.Type = token.COMMENT
tok.Literal = l.readComment()
case '+':
tok = l.newToken(PLUS, l.ch)
tok = l.newToken(token.PLUS, l.ch)
l.readChar()
case '{':
tok.Type = JSON
tok.Type = token.JSON
tok.Literal = "{" + l.readJSON() + "}"
l.readChar()
case '`':
tok.Type = STRING
tok.Type = token.STRING
tok.Literal = l.readString('`')
l.readChar()
case '\'':
tok.Type = STRING
tok.Type = token.STRING
tok.Literal = l.readString('\'')
l.readChar()
case '"':
tok.Type = STRING
tok.Type = token.STRING
tok.Literal = l.readString('"')
l.readChar()
default:
if isDigit(l.ch) || (isDot(l.ch) && isDigit(l.peekChar())) {
tok.Literal = l.readNumber()
tok.Type = NUMBER
tok.Type = token.NUMBER
} else if isLetter(l.ch) || isDot(l.ch) {
tok.Literal = l.readIdentifier()
tok.Type = LookupIdentifier(tok.Literal)
tok.Type = token.LookupIdentifier(tok.Literal)
} else {
tok = l.newToken(ILLEGAL, l.ch)
tok = l.newToken(token.ILLEGAL, l.ch)
l.readChar()
}
}
return tok
}

// newToken creates a new token with the given type and literal.
func (l *Lexer) newToken(tokenType TokenType, ch byte) Token {
func (l *Lexer) newToken(tokenType token.Type, ch byte) token.Token {
literal := string(ch)
return Token{
return token.Token{
Type: tokenType,
Literal: literal,
Line: l.line,
Expand Down
Loading

0 comments on commit 2a1d303

Please sign in to comment.