Skip to content

Commit

Permalink
Update import paths to use yappsrt from third_party
Browse files Browse the repository at this point in the history
  • Loading branch information
nicksay committed Jan 23, 2016
1 parent 8c07b23 commit ef45ccb
Show file tree
Hide file tree
Showing 9 changed files with 41 additions and 44 deletions.
6 changes: 1 addition & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,8 @@
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.

ifndef PYTHONPATH
export PYTHONPATH = ../yapps2
endif

ifndef YAPPS
export YAPPS = ../yapps2/yapps2.py
export YAPPS = third_party/yapps2/yapps2.py
endif

ifndef PYTHON
Expand Down
2 changes: 1 addition & 1 deletion scripts/spitfire-compile
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def init_psyco(options):
psyco.bind(spitfire.compiler.scanner.SpitfireScanner.scan)
import copy
psyco.bind(copy.deepcopy)
import yappsrt
from third_party.yapps2 import yappsrt
psyco.bind(yappsrt.Scanner.token)
import spitfire.compiler.ast
psyco.bind(spitfire.compiler.ast.NodeList.__iter__)
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
"spitfire.compiler.macros",
"spitfire.runtime",
],
py_modules=['yappsrt'],
py_modules=['third_party.yapps2.yappsrt'],
scripts=["scripts/crunner.py",
"scripts/spitfire-compile",
],
Expand Down
4 changes: 2 additions & 2 deletions spitfire/compiler/scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.

import yappsrt
from third_party.yapps2 import yappsrt

import spitfire.compiler.parser

Expand Down Expand Up @@ -32,7 +32,7 @@ def token(self, i, restrict=0):
elif not restrict and not restriction:
return self.tokens[i]
raise yappsrt.NoMoreTokens(i, len(self.tokens), self.tokens[i], restrict, self.restrictions[i], self.tokens)

def scan(self, restrict):
"""Should scan another token and add it to the list, self.tokens,
and add the restriction to self.restrictions"""
Expand Down
2 changes: 1 addition & 1 deletion spitfire/compiler/tests/test_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from spitfire.compiler.ast import *
from spitfire.compiler import util
from spitfire.compiler import walker
import yappsrt
from third_party.yapps2 import yappsrt


class BaseTest(unittest.TestCase):
Expand Down
1 change: 1 addition & 0 deletions third_party/README
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@ Modifications:
- Removed all files except LICENSE, yapps2.py, and yappsrt.py
- Clarified copyright in LICENSE
- Patched yappsrt.py to improve error handling and token position tracking
- Patched yapps2.py to modify generated import paths
Empty file added third_party/__init__.py
Empty file.
Empty file added third_party/yapps2/__init__.py
Empty file.
68 changes: 34 additions & 34 deletions third_party/yapps2/yapps2.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def __init__(self, name, options, tokens, rules):
self.options = options
self.preparser = ''
self.postparser = None

self.tokens = {} # Map from tokens to regexps
self.ignore = [] # List of token names to ignore in parsing
self.terminals = [] # List of token names (to maintain ordering)
Expand All @@ -51,24 +51,24 @@ def __init__(self, name, options, tokens, rules):
print 'Warning: token', n, 'multiply defined.'
self.tokens[n] = t
self.terminals.append(n)

self.rules = {} # Map from rule names to parser nodes
self.params = {} # Map from rule names to parameters
self.goals = [] # List of rule names (to maintain ordering)
for n,p,r in rules:
self.params[n] = p
self.rules[n] = r
self.goals.append(n)

self.output = sys.stdout

def __getitem__(self, name):
# Get options
return self.options.get(name, 0)

def non_ignored_tokens(self):
return filter(lambda x, i=self.ignore: x not in i, self.terminals)

def changed(self):
self.change_count = 1+self.change_count

Expand All @@ -77,13 +77,13 @@ def subset(self, a, b):
for x in a:
if x not in b: return 0
return 1

def equal_set(self, a, b):
"See if a and b have the same elements"
if len(a) != len(b): return 0
if a == b: return 1
return self.subset(a, b) and self.subset(b, a)

def add_to(self, parent, additions):
"Modify parent to include all elements in additions"
for x in additions:
Expand All @@ -107,7 +107,7 @@ def in_test(self, x, full, b):
not_b = filter(lambda x, b=b: x not in b, full)
return self.not_in_test(x, full, not_b)
return '%s in %s' % (x, repr(b))

def not_in_test(self, x, full, b):
if not b: return '1'
if len(b) == 1: return '%s != %s' % (x, repr(b[0]))
Expand All @@ -118,7 +118,7 @@ def peek_call(self, a):
if self.equal_set(a, self.non_ignored_tokens()): a_set = ''
if self['context-insensitive-scanner']: a_set = ''
return 'self._peek(%s)' % a_set

def peek_test(self, a, b):
if self.subset(a, b): return '1'
if self['context-insensitive-scanner']: a = self.non_ignored_tokens()
Expand Down Expand Up @@ -159,14 +159,14 @@ def dump_information(self):
print ' FIRST:', join(top.first+eps, ', ')
print ' FOLLOW:', join(top.follow, ', ')
for x in top.get_children(): queue.append(x)

def generate_output(self):
self.calculate()
self.write(self.preparser)
# TODO: remove "import *" construct
self.write("from string import *\n")
self.write("import re\n")
self.write("from yappsrt import *\n")
self.write("from third_party.yapps2.yappsrt import *\n")
self.write("\n")
self.write("class ", self.name, "Scanner(Scanner):\n")
self.write(" patterns = [\n")
Expand All @@ -178,7 +178,7 @@ def generate_output(self):
self.write(" Scanner.__init__(self,None,%s,str)\n" %
repr(self.ignore))
self.write("\n")

self.write("class ", self.name, "(Parser):\n")
for r in self.goals:
self.write(INDENT, "def ", r, "(self")
Expand Down Expand Up @@ -212,7 +212,7 @@ def __init__(self):
self.follow = []
self.accepts_epsilon = 0
self.rule = '?'

def setup(self, gen, rule):
# Setup will change accepts_epsilon,
# sometimes from 0 to 1 but never 1 to 0.
Expand All @@ -226,18 +226,18 @@ def used(self, vars):
def get_children(self):
"Return a list of sub-nodes"
return []

def __repr__(self):
return str(self)

def update(self, gen):
if self.accepts_epsilon:
gen.add_to(self.first, self.follow)

def output(self, gen, indent):
"Write out code to _gen_ with _indent_:string indentation"
gen.write(indent, "assert 0 # Invalid parser node\n")

class Terminal(Node):
def __init__(self, token):
Node.__init__(self)
Expand All @@ -258,7 +258,7 @@ def output(self, gen, indent):
if re.match('[a-zA-Z_][a-zA-Z_0-9]*$', self.token):
gen.write(self.token, " = ")
gen.write("self._scan(%s)\n" % repr(self.token))

class Eval(Node):
def __init__(self, expr):
Node.__init__(self)
Expand All @@ -275,7 +275,7 @@ def __str__(self):

def output(self, gen, indent):
gen.write(indent, strip(self.expr), '\n')

class NonTerminal(Node):
def __init__(self, name, args):
Node.__init__(self)
Expand All @@ -292,7 +292,7 @@ def setup(self, gen, rule):
except KeyError: # Oops, it's nonexistent
print 'Error: no rule <%s>' % self.name
self.target = self

def __str__(self):
return '<%s>' % self.name

Expand All @@ -305,7 +305,7 @@ def output(self, gen, indent):
gen.write(indent)
gen.write(self.name, " = ")
gen.write("self.", self.name, "(", self.args, ")\n")

class Sequence(Node):
def __init__(self, *children):
Node.__init__(self)
Expand All @@ -314,7 +314,7 @@ def __init__(self, *children):
def setup(self, gen, rule):
Node.setup(self, gen, rule)
for c in self.children: c.setup(gen, rule)

if not self.accepts_epsilon:
# If it's not already accepting epsilon, it might now do so.
for c in self.children:
Expand All @@ -326,7 +326,7 @@ def setup(self, gen, rule):

def get_children(self):
return self.children

def __str__(self):
return '( %s )' % join(map(lambda x: str(x), self.children))

Expand All @@ -338,10 +338,10 @@ def update(self, gen):
empty = 1
for g_i in range(len(self.children)):
g = self.children[g_i]

if empty: gen.add_to(self.first, g.first)
if not g.accepts_epsilon: empty = 0

if g_i == len(self.children)-1:
next = self.follow
else:
Expand All @@ -358,7 +358,7 @@ def output(self, gen, indent):
else:
# Placeholder for empty sequences, just in case
gen.write(indent, 'pass\n')

class Choice(Node):
def __init__(self, *children):
Node.__init__(self)
Expand All @@ -367,7 +367,7 @@ def __init__(self, *children):
def setup(self, gen, rule):
Node.setup(self, gen, rule)
for c in self.children: c.setup(gen, rule)

if not self.accepts_epsilon:
for c in self.children:
if c.accepts_epsilon:
Expand All @@ -376,7 +376,7 @@ def setup(self, gen, rule):

def get_children(self):
return self.children

def __str__(self):
return '( %s )' % join(map(lambda x: str(x), self.children), ' | ')

Expand Down Expand Up @@ -418,7 +418,7 @@ def output(self, gen, indent):
print 'Warning:', self
print ' * These tokens are being ignored:', join(removed, ', ')
print ' due to previous choices using them.'

if testset:
if not tokens_unseen: # context sensitive scanners only!
if test == 'if':
Expand Down Expand Up @@ -455,7 +455,7 @@ def setup(self, gen, rule):

def get_children(self):
return [self.child]

def update(self, gen):
Node.update(self, gen)
self.child.update(gen)
Expand All @@ -478,7 +478,7 @@ def output(self, gen, indent):
gen.write(indent, "if %s:\n" %
gen.peek_test(self.first, self.child.first))
self.child.output(gen, indent+INDENT)

class Plus(Wrapper):
def setup(self, gen, rule):
Wrapper.setup(self, gen, rule)
Expand Down Expand Up @@ -721,10 +721,10 @@ def generate(inputfilename, outputfilename='', dump=0, **flags):
outputfilename = inputfilename[:-2]+'.py'
else:
raise Exception("Missing output filename")

print 'Input Grammar:', inputfilename
print 'Output File:', outputfilename

DIVIDER = '\n%%\n' # This pattern separates the pre/post parsers
preparser, postparser = None, None # Code before and after the parser desc

Expand All @@ -742,7 +742,7 @@ def generate(inputfilename, outputfilename='', dump=0, **flags):
# Create the parser and scanner
p = ParserDescription(ParserDescriptionScanner(s))
if not p: return

# Now parse the file
t = wrap_error_reporter(p, 'Parser')
if not t: return # Error
Expand All @@ -757,7 +757,7 @@ def generate(inputfilename, outputfilename='', dump=0, **flags):
print 'Warning: unrecognized option', f
# Add command line options to the set
for f in flags.keys(): t.options[f] = flags[f]

# Generate the output
if dump:
t.dump_information()
Expand Down

0 comments on commit ef45ccb

Please sign in to comment.