diff --git a/runtime/Python3/src/antlr4/BufferedTokenStream.py b/runtime/Python3/src/antlr4/BufferedTokenStream.py
index aaa58c63dc..4fe39e262d 100644
--- a/runtime/Python3/src/antlr4/BufferedTokenStream.py
+++ b/runtime/Python3/src/antlr4/BufferedTokenStream.py
@@ -14,8 +14,8 @@
 # {@link Token#HIDDEN_CHANNEL}, use a filtering token stream such a
 # {@link CommonTokenStream}.</p>
 from io import StringIO
-from antlr4.Token import Token
-from antlr4.error.Errors import IllegalStateException
+from .Token import Token
+from .error.Errors import IllegalStateException
 
 # need forward declaration
 Lexer = None
@@ -230,7 +230,7 @@ def getHiddenTokensToRight(self, tokenIndex:int, channel:int=-1):
         self.lazyInit()
         if tokenIndex<0 or tokenIndex>=len(self.tokens):
             raise Exception(str(tokenIndex) + " not in 0.." + str(len(self.tokens)-1))
-        from antlr4.Lexer import Lexer
+        from .Lexer import Lexer
         nextOnChannel = self.nextTokenOnChannel(tokenIndex + 1, Lexer.DEFAULT_TOKEN_CHANNEL)
         from_ = tokenIndex+1
         # if none onchannel to right, nextOnChannel=-1 so set to = last token
@@ -245,7 +245,7 @@ def getHiddenTokensToLeft(self, tokenIndex:int, channel:int=-1):
         self.lazyInit()
         if tokenIndex<0 or tokenIndex>=len(self.tokens):
             raise Exception(str(tokenIndex) + " not in 0.." + str(len(self.tokens)-1))
-        from antlr4.Lexer import Lexer
+        from .Lexer import Lexer
         prevOnChannel = self.previousTokenOnChannel(tokenIndex - 1, Lexer.DEFAULT_TOKEN_CHANNEL)
         if prevOnChannel == tokenIndex - 1:
             return None
@@ -260,7 +260,7 @@ def filterForChannel(self, left:int, right:int, channel:int):
         for i in range(left, right+1):
             t = self.tokens[i]
             if channel==-1:
-                from antlr4.Lexer import Lexer
+                from .Lexer import Lexer
                 if t.channel!= Lexer.DEFAULT_TOKEN_CHANNEL:
                     hidden.append(t)
             elif t.channel==channel:
diff --git a/runtime/Python3/src/antlr4/CommonTokenFactory.py b/runtime/Python3/src/antlr4/CommonTokenFactory.py
index 0d09ad7859..6e1eed4cbe 100644
--- a/runtime/Python3/src/antlr4/CommonTokenFactory.py
+++ b/runtime/Python3/src/antlr4/CommonTokenFactory.py
@@ -8,7 +8,7 @@
 # This default implementation of {@link TokenFactory} creates
 # {@link CommonToken} objects.
 #
-from antlr4.Token import CommonToken
+from .Token import CommonToken
 
 class TokenFactory(object):
 
diff --git a/runtime/Python3/src/antlr4/CommonTokenStream.py b/runtime/Python3/src/antlr4/CommonTokenStream.py
index dd0a984e33..b09c876f92 100644
--- a/runtime/Python3/src/antlr4/CommonTokenStream.py
+++ b/runtime/Python3/src/antlr4/CommonTokenStream.py
@@ -29,9 +29,9 @@
 # channel.</p>
 #/
 
-from antlr4.BufferedTokenStream import BufferedTokenStream
-from antlr4.Lexer import Lexer
-from antlr4.Token import Token
+from .BufferedTokenStream import BufferedTokenStream
+from .Lexer import Lexer
+from .Token import Token
 
 
 class CommonTokenStream(BufferedTokenStream):
diff --git a/runtime/Python3/src/antlr4/FileStream.py b/runtime/Python3/src/antlr4/FileStream.py
index 1c6ce9fbc0..a14aa65529 100644
--- a/runtime/Python3/src/antlr4/FileStream.py
+++ b/runtime/Python3/src/antlr4/FileStream.py
@@ -10,7 +10,7 @@
 #
 
 import codecs
-from antlr4.InputStream import InputStream
+from .InputStream import InputStream
 
 
 class FileStream(InputStream):
diff --git a/runtime/Python3/src/antlr4/InputStream.py b/runtime/Python3/src/antlr4/InputStream.py
index 5d077d1357..cd370a705b 100644
--- a/runtime/Python3/src/antlr4/InputStream.py
+++ b/runtime/Python3/src/antlr4/InputStream.py
@@ -8,7 +8,7 @@
 #
 #  Vacuum all input from a string and then treat it like a buffer.
 #
-from antlr4.Token import Token
+from .Token import Token
 
 
 class InputStream (object):
diff --git a/runtime/Python3/src/antlr4/IntervalSet.py b/runtime/Python3/src/antlr4/IntervalSet.py
index fda8e6da3f..3566b10af6 100644
--- a/runtime/Python3/src/antlr4/IntervalSet.py
+++ b/runtime/Python3/src/antlr4/IntervalSet.py
@@ -5,7 +5,7 @@
 #
 
 from io import StringIO
-from antlr4.Token import Token
+from .Token import Token
 
 # need forward declarations
 IntervalSet = None
diff --git a/runtime/Python3/src/antlr4/LL1Analyzer.py b/runtime/Python3/src/antlr4/LL1Analyzer.py
index ac149fa2f1..842c0c1742 100644
--- a/runtime/Python3/src/antlr4/LL1Analyzer.py
+++ b/runtime/Python3/src/antlr4/LL1Analyzer.py
@@ -3,14 +3,14 @@
 # Use of this file is governed by the BSD 3-clause license that
 # can be found in the LICENSE.txt file in the project root.
 #/
-from antlr4.IntervalSet import IntervalSet
-from antlr4.Token import Token
-from antlr4.PredictionContext import PredictionContext, SingletonPredictionContext, PredictionContextFromRuleContext
-from antlr4.RuleContext import RuleContext
-from antlr4.atn.ATN import ATN
-from antlr4.atn.ATNConfig import ATNConfig
-from antlr4.atn.ATNState import ATNState, RuleStopState
-from antlr4.atn.Transition import WildcardTransition, NotSetTransition, AbstractPredicateTransition, RuleTransition
+from .IntervalSet import IntervalSet
+from .Token import Token
+from .PredictionContext import PredictionContext, SingletonPredictionContext, PredictionContextFromRuleContext
+from .RuleContext import RuleContext
+from .atn.ATN import ATN
+from .atn.ATNConfig import ATNConfig
+from .atn.ATNState import ATNState, RuleStopState
+from .atn.Transition import WildcardTransition, NotSetTransition, AbstractPredicateTransition, RuleTransition
 
 
 class LL1Analyzer (object):
diff --git a/runtime/Python3/src/antlr4/Lexer.py b/runtime/Python3/src/antlr4/Lexer.py
index 18ce7e3923..8525657bb6 100644
--- a/runtime/Python3/src/antlr4/Lexer.py
+++ b/runtime/Python3/src/antlr4/Lexer.py
@@ -15,12 +15,12 @@
     from typing import TextIO
 else:
     from typing.io import TextIO
-from antlr4.CommonTokenFactory import CommonTokenFactory
-from antlr4.atn.LexerATNSimulator import LexerATNSimulator
-from antlr4.InputStream import InputStream
-from antlr4.Recognizer import Recognizer
-from antlr4.Token import Token
-from antlr4.error.Errors import IllegalStateException, LexerNoViableAltException, RecognitionException
+from .CommonTokenFactory import CommonTokenFactory
+from .atn.LexerATNSimulator import LexerATNSimulator
+from .InputStream import InputStream
+from .Recognizer import Recognizer
+from .Token import Token
+from .error.Errors import IllegalStateException, LexerNoViableAltException, RecognitionException
 
 class TokenSource(object):
 
diff --git a/runtime/Python3/src/antlr4/ListTokenSource.py b/runtime/Python3/src/antlr4/ListTokenSource.py
index 40bcaf94a2..287e3a3e71 100644
--- a/runtime/Python3/src/antlr4/ListTokenSource.py
+++ b/runtime/Python3/src/antlr4/ListTokenSource.py
@@ -12,9 +12,9 @@
 # as the EOF token for every call to {@link #nextToken} after the end of the
 # list is reached. Otherwise, an EOF token will be created.</p>
 #
-from antlr4.CommonTokenFactory import CommonTokenFactory
-from antlr4.Lexer import TokenSource
-from antlr4.Token import Token
+from .CommonTokenFactory import CommonTokenFactory
+from .Lexer import TokenSource
+from .Token import Token
 
 
 class ListTokenSource(TokenSource):
diff --git a/runtime/Python3/src/antlr4/Parser.py b/runtime/Python3/src/antlr4/Parser.py
index 710daab411..62620513a4 100644
--- a/runtime/Python3/src/antlr4/Parser.py
+++ b/runtime/Python3/src/antlr4/Parser.py
@@ -7,20 +7,20 @@
     from typing import TextIO
 else:
     from typing.io import TextIO
-from antlr4.BufferedTokenStream import TokenStream
-from antlr4.CommonTokenFactory import TokenFactory
-from antlr4.error.ErrorStrategy import DefaultErrorStrategy
-from antlr4.InputStream import InputStream
-from antlr4.Recognizer import Recognizer
-from antlr4.RuleContext import RuleContext
-from antlr4.ParserRuleContext import ParserRuleContext
-from antlr4.Token import Token
-from antlr4.Lexer import Lexer
-from antlr4.atn.ATNDeserializer import ATNDeserializer
-from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions
-from antlr4.error.Errors import UnsupportedOperationException, RecognitionException
-from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher
-from antlr4.tree.Tree import ParseTreeListener, TerminalNode, ErrorNode
+from .BufferedTokenStream import TokenStream
+from .CommonTokenFactory import TokenFactory
+from .error.ErrorStrategy import DefaultErrorStrategy
+from .InputStream import InputStream
+from .Recognizer import Recognizer
+from .RuleContext import RuleContext
+from .ParserRuleContext import ParserRuleContext
+from .Token import Token
+from .Lexer import Lexer
+from .atn.ATNDeserializer import ATNDeserializer
+from .atn.ATNDeserializationOptions import ATNDeserializationOptions
+from .error.Errors import UnsupportedOperationException, RecognitionException
+from .tree.ParseTreePatternMatcher import ParseTreePatternMatcher
+from .tree.Tree import ParseTreeListener, TerminalNode, ErrorNode
 
 class TraceListener(ParseTreeListener):
     __slots__ = '_parser'
diff --git a/runtime/Python3/src/antlr4/ParserInterpreter.py b/runtime/Python3/src/antlr4/ParserInterpreter.py
index 4de43623cf..5525eff3a9 100644
--- a/runtime/Python3/src/antlr4/ParserInterpreter.py
+++ b/runtime/Python3/src/antlr4/ParserInterpreter.py
@@ -17,18 +17,18 @@
 #
 #  See TestParserInterpreter for examples.
 #
-from antlr4.dfa.DFA import DFA
-from antlr4.BufferedTokenStream import TokenStream
-from antlr4.Lexer import Lexer
-from antlr4.Parser import Parser
-from antlr4.ParserRuleContext import InterpreterRuleContext, ParserRuleContext
-from antlr4.Token import Token
-from antlr4.atn.ATN import ATN
-from antlr4.atn.ATNState import StarLoopEntryState, ATNState, LoopEndState
-from antlr4.atn.ParserATNSimulator import ParserATNSimulator
-from antlr4.PredictionContext import PredictionContextCache
-from antlr4.atn.Transition import Transition
-from antlr4.error.Errors import RecognitionException, UnsupportedOperationException, FailedPredicateException
+from .dfa.DFA import DFA
+from .BufferedTokenStream import TokenStream
+from .Lexer import Lexer
+from .Parser import Parser
+from .ParserRuleContext import InterpreterRuleContext, ParserRuleContext
+from .Token import Token
+from .atn.ATN import ATN
+from .atn.ATNState import StarLoopEntryState, ATNState, LoopEndState
+from .atn.ParserATNSimulator import ParserATNSimulator
+from .PredictionContext import PredictionContextCache
+from .atn.Transition import Transition
+from .error.Errors import RecognitionException, UnsupportedOperationException, FailedPredicateException
 
 
 class ParserInterpreter(Parser):
diff --git a/runtime/Python3/src/antlr4/ParserRuleContext.py b/runtime/Python3/src/antlr4/ParserRuleContext.py
index f945c5ba94..4605b61352 100644
--- a/runtime/Python3/src/antlr4/ParserRuleContext.py
+++ b/runtime/Python3/src/antlr4/ParserRuleContext.py
@@ -25,9 +25,9 @@
 #  group values such as this aggregate.  The getters/setters are there to
 #  satisfy the superclass interface.
 
-from antlr4.RuleContext import RuleContext
-from antlr4.Token import Token
-from antlr4.tree.Tree import ParseTreeListener, ParseTree, TerminalNodeImpl, ErrorNodeImpl, TerminalNode, \
+from .RuleContext import RuleContext
+from .Token import Token
+from .tree.Tree import ParseTreeListener, ParseTree, TerminalNodeImpl, ErrorNodeImpl, TerminalNode, \
     INVALID_INTERVAL
 
 # need forward declaration
diff --git a/runtime/Python3/src/antlr4/PredictionContext.py b/runtime/Python3/src/antlr4/PredictionContext.py
index 1ae1d377cc..e113633eb1 100644
--- a/runtime/Python3/src/antlr4/PredictionContext.py
+++ b/runtime/Python3/src/antlr4/PredictionContext.py
@@ -3,9 +3,9 @@
 # Use of this file is governed by the BSD 3-clause license that
 # can be found in the LICENSE.txt file in the project root.
 #/
-from antlr4.RuleContext import RuleContext
-from antlr4.atn.ATN import ATN
-from antlr4.error.Errors import IllegalStateException
+from .RuleContext import RuleContext
+from .atn.ATN import ATN
+from .error.Errors import IllegalStateException
 from io import StringIO
 
 # dup ParserATNSimulator class var here to avoid circular import; no idea why this can't be in PredictionContext
diff --git a/runtime/Python3/src/antlr4/Recognizer.py b/runtime/Python3/src/antlr4/Recognizer.py
index 9f9d6e660e..51f240c38a 100644
--- a/runtime/Python3/src/antlr4/Recognizer.py
+++ b/runtime/Python3/src/antlr4/Recognizer.py
@@ -3,9 +3,9 @@
 # Use of this file is governed by the BSD 3-clause license that
 # can be found in the LICENSE.txt file in the project root.
 #
-from antlr4.RuleContext import RuleContext
-from antlr4.Token import Token
-from antlr4.error.ErrorListener import ProxyErrorListener, ConsoleErrorListener
+from .RuleContext import RuleContext
+from .Token import Token
+from .error.ErrorListener import ProxyErrorListener, ConsoleErrorListener
 
 # need forward delcaration
 RecognitionException = None
@@ -52,7 +52,7 @@ def removeErrorListeners(self):
     def getTokenTypeMap(self):
         tokenNames = self.getTokenNames()
         if tokenNames is None:
-            from antlr4.error.Errors import UnsupportedOperationException
+            from .error.Errors import UnsupportedOperationException
             raise UnsupportedOperationException("The current recognizer does not provide a list of token names.")
         result = self.tokenTypeMapCache.get(tokenNames, None)
         if result is None:
@@ -68,7 +68,7 @@ def getTokenTypeMap(self):
     def getRuleIndexMap(self):
         ruleNames = self.getRuleNames()
         if ruleNames is None:
-            from antlr4.error.Errors import UnsupportedOperationException
+            from .error.Errors import UnsupportedOperationException
             raise UnsupportedOperationException("The current recognizer does not provide a list of rule names.")
         result = self.ruleIndexMapCache.get(ruleNames, None)
         if result is None:
diff --git a/runtime/Python3/src/antlr4/RuleContext.py b/runtime/Python3/src/antlr4/RuleContext.py
index 7812ba3b1c..790d628ea7 100644
--- a/runtime/Python3/src/antlr4/RuleContext.py
+++ b/runtime/Python3/src/antlr4/RuleContext.py
@@ -25,8 +25,8 @@
 #  @see ParserRuleContext
 #/
 from io import StringIO
-from antlr4.tree.Tree import RuleNode, INVALID_INTERVAL, ParseTreeVisitor
-from antlr4.tree.Trees import Trees
+from .tree.Tree import RuleNode, INVALID_INTERVAL, ParseTreeVisitor
+from .tree.Trees import Trees
 
 # need forward declarations
 RuleContext = None
diff --git a/runtime/Python3/src/antlr4/StdinStream.py b/runtime/Python3/src/antlr4/StdinStream.py
index f044fc4d77..7e6130cd3f 100644
--- a/runtime/Python3/src/antlr4/StdinStream.py
+++ b/runtime/Python3/src/antlr4/StdinStream.py
@@ -1,7 +1,7 @@
 import codecs
 import sys
 
-from antlr4.InputStream import InputStream
+from .InputStream import InputStream
 
 
 class StdinStream(InputStream):
diff --git a/runtime/Python3/src/antlr4/TokenStreamRewriter.py b/runtime/Python3/src/antlr4/TokenStreamRewriter.py
index 59baf8f47a..efaab1e9d2 100644
--- a/runtime/Python3/src/antlr4/TokenStreamRewriter.py
+++ b/runtime/Python3/src/antlr4/TokenStreamRewriter.py
@@ -5,9 +5,9 @@
 #
 
 from io import StringIO
-from antlr4.Token import Token
+from .Token import Token
 
-from antlr4.CommonTokenStream import CommonTokenStream
+from .CommonTokenStream import CommonTokenStream
 
 
 class TokenStreamRewriter(object):
diff --git a/runtime/Python3/src/antlr4/__init__.py b/runtime/Python3/src/antlr4/__init__.py
index 42027289e7..adcf82a7b2 100644
--- a/runtime/Python3/src/antlr4/__init__.py
+++ b/runtime/Python3/src/antlr4/__init__.py
@@ -1,21 +1,21 @@
-from antlr4.Token import Token
-from antlr4.InputStream import InputStream
-from antlr4.FileStream import FileStream
-from antlr4.StdinStream import StdinStream
-from antlr4.BufferedTokenStream import TokenStream
-from antlr4.CommonTokenStream import CommonTokenStream
-from antlr4.Lexer import Lexer
-from antlr4.Parser import Parser
-from antlr4.dfa.DFA import DFA
-from antlr4.atn.ATN import ATN
-from antlr4.atn.ATNDeserializer import ATNDeserializer
-from antlr4.atn.LexerATNSimulator import LexerATNSimulator
-from antlr4.atn.ParserATNSimulator import ParserATNSimulator
-from antlr4.atn.PredictionMode import PredictionMode
-from antlr4.PredictionContext import PredictionContextCache
-from antlr4.ParserRuleContext import RuleContext, ParserRuleContext
-from antlr4.tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
-from antlr4.error.Errors import RecognitionException, IllegalStateException, NoViableAltException
-from antlr4.error.ErrorStrategy import BailErrorStrategy
-from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener
-from antlr4.Utils import str_list
+from .Token import Token
+from .InputStream import InputStream
+from .FileStream import FileStream
+from .StdinStream import StdinStream
+from .BufferedTokenStream import TokenStream
+from .CommonTokenStream import CommonTokenStream
+from .Lexer import Lexer
+from .Parser import Parser
+from .dfa.DFA import DFA
+from .atn.ATN import ATN
+from .atn.ATNDeserializer import ATNDeserializer
+from .atn.LexerATNSimulator import LexerATNSimulator
+from .atn.ParserATNSimulator import ParserATNSimulator
+from .atn.PredictionMode import PredictionMode
+from .PredictionContext import PredictionContextCache
+from .ParserRuleContext import RuleContext, ParserRuleContext
+from .tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
+from .error.Errors import RecognitionException, IllegalStateException, NoViableAltException
+from .error.ErrorStrategy import BailErrorStrategy
+from .error.DiagnosticErrorListener import DiagnosticErrorListener
+from .Utils import str_list
diff --git a/runtime/Python3/src/antlr4/_pygrun.py b/runtime/Python3/src/antlr4/_pygrun.py
index c392ca16aa..6cff0ed175 100644
--- a/runtime/Python3/src/antlr4/_pygrun.py
+++ b/runtime/Python3/src/antlr4/_pygrun.py
@@ -1,10 +1,9 @@
-#!python
 __author__ = 'jszheng'
 import optparse
 import sys
 import os
-import importlib
-from antlr4 import *
+
+from . import *
 
 
 # this is a python version of TestRig
@@ -165,7 +164,3 @@ def process(input_stream, class_lexer, class_parser):
             process(input_stream, class_lexer, class_parser)
         else:
             print("[ERROR] file {} not exist".format(os.path.normpath(file_name)))
-
-
-if __name__ == '__main__':
-    main()
diff --git a/runtime/Python3/src/antlr4/atn/ATN.py b/runtime/Python3/src/antlr4/atn/ATN.py
index 3f1abe0a4a..7556de1d30 100644
--- a/runtime/Python3/src/antlr4/atn/ATN.py
+++ b/runtime/Python3/src/antlr4/atn/ATN.py
@@ -2,13 +2,13 @@
 # Use of this file is governed by the BSD 3-clause license that
 # can be found in the LICENSE.txt file in the project root.
 #/
-from antlr4.IntervalSet import IntervalSet
+from ..IntervalSet import IntervalSet
 
-from antlr4.RuleContext import RuleContext
+from ..RuleContext import RuleContext
 
-from antlr4.Token import Token
-from antlr4.atn.ATNType import ATNType
-from antlr4.atn.ATNState import ATNState, DecisionState
+from ..Token import Token
+from ..atn.ATNType import ATNType
+from ..atn.ATNState import ATNState, DecisionState
 
 
 class ATN(object):
@@ -52,7 +52,7 @@ def __init__(self, grammarType:ATNType , maxTokenType:int ):
     #  the rule surrounding {@code s}. In other words, the set will be
     #  restricted to tokens reachable staying within {@code s}'s rule.
     def nextTokensInContext(self, s:ATNState, ctx:RuleContext):
-        from antlr4.LL1Analyzer import LL1Analyzer
+        from ..LL1Analyzer import LL1Analyzer
         anal = LL1Analyzer(self)
         return anal.LOOK(s, ctx=ctx)
 
diff --git a/runtime/Python3/src/antlr4/atn/ATNConfig.py b/runtime/Python3/src/antlr4/atn/ATNConfig.py
index e008fb2efa..2ec9d05c11 100644
--- a/runtime/Python3/src/antlr4/atn/ATNConfig.py
+++ b/runtime/Python3/src/antlr4/atn/ATNConfig.py
@@ -12,10 +12,10 @@
 #  an ATN state.
 #/
 from io import StringIO
-from antlr4.PredictionContext import PredictionContext
-from antlr4.atn.ATNState import ATNState, DecisionState
-from antlr4.atn.LexerActionExecutor import LexerActionExecutor
-from antlr4.atn.SemanticContext import SemanticContext
+from ..PredictionContext import PredictionContext
+from ..atn.ATNState import ATNState, DecisionState
+from ..atn.LexerActionExecutor import LexerActionExecutor
+from ..atn.SemanticContext import SemanticContext
 
 # need a forward declaration
 ATNConfig = None
diff --git a/runtime/Python3/src/antlr4/atn/ATNConfigSet.py b/runtime/Python3/src/antlr4/atn/ATNConfigSet.py
index d1886a4a5d..fa15f4c8a1 100644
--- a/runtime/Python3/src/antlr4/atn/ATNConfigSet.py
+++ b/runtime/Python3/src/antlr4/atn/ATNConfigSet.py
@@ -3,12 +3,12 @@
 # Use of this file is governed by the BSD 3-clause license that
 # can be found in the LICENSE.txt file in the project root.
 
-from antlr4.PredictionContext import merge
-from antlr4.Utils import str_list
-from antlr4.atn.ATN import ATN
-from antlr4.atn.ATNConfig import ATNConfig
-from antlr4.atn.SemanticContext import SemanticContext
-from antlr4.error.Errors import UnsupportedOperationException, IllegalStateException
+from ..PredictionContext import merge
+from ..Utils import str_list
+from ..atn.ATN import ATN
+from ..atn.ATNConfig import ATNConfig
+from ..atn.SemanticContext import SemanticContext
+from ..error.Errors import UnsupportedOperationException, IllegalStateException
 from functools import reduce
 #
 # Specialized {@link Set}{@code <}{@link ATNConfig}{@code >} that can track
diff --git a/runtime/Python3/src/antlr4/atn/ATNDeserializer.py b/runtime/Python3/src/antlr4/atn/ATNDeserializer.py
index c491ac4c65..05001088a5 100644
--- a/runtime/Python3/src/antlr4/atn/ATNDeserializer.py
+++ b/runtime/Python3/src/antlr4/atn/ATNDeserializer.py
@@ -4,13 +4,13 @@
 #/
 from io import StringIO
 from typing import Callable
-from antlr4.Token import Token
-from antlr4.atn.ATN import ATN
-from antlr4.atn.ATNType import ATNType
-from antlr4.atn.ATNState import *
-from antlr4.atn.Transition import *
-from antlr4.atn.LexerAction import *
-from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions
+from ..Token import Token
+from ..atn.ATN import ATN
+from ..atn.ATNType import ATNType
+from ..atn.ATNState import *
+from ..atn.Transition import *
+from ..atn.LexerAction import *
+from ..atn.ATNDeserializationOptions import ATNDeserializationOptions
 
 SERIALIZED_VERSION = 4
 
diff --git a/runtime/Python3/src/antlr4/atn/ATNSimulator.py b/runtime/Python3/src/antlr4/atn/ATNSimulator.py
index 4f6f53f488..de39ed16cc 100644
--- a/runtime/Python3/src/antlr4/atn/ATNSimulator.py
+++ b/runtime/Python3/src/antlr4/atn/ATNSimulator.py
@@ -3,10 +3,10 @@
 # Use of this file is governed by the BSD 3-clause license that
 # can be found in the LICENSE.txt file in the project root.
 #/
-from antlr4.PredictionContext import PredictionContextCache, PredictionContext, getCachedPredictionContext
-from antlr4.atn.ATN import ATN
-from antlr4.atn.ATNConfigSet import ATNConfigSet
-from antlr4.dfa.DFAState import DFAState
+from ..PredictionContext import PredictionContextCache, PredictionContext, getCachedPredictionContext
+from ..atn.ATN import ATN
+from ..atn.ATNConfigSet import ATNConfigSet
+from ..dfa.DFAState import DFAState
 
 
 class ATNSimulator(object):
diff --git a/runtime/Python3/src/antlr4/atn/ATNState.py b/runtime/Python3/src/antlr4/atn/ATNState.py
index fbf6a7b944..29dffd0aab 100644
--- a/runtime/Python3/src/antlr4/atn/ATNState.py
+++ b/runtime/Python3/src/antlr4/atn/ATNState.py
@@ -64,7 +64,7 @@
 # <embed src="images/OptionalNonGreedy.svg" type="image/svg+xml"/>
 #
 
-from antlr4.atn.Transition import Transition
+from ..atn.Transition import Transition
 
 INITIAL_NUM_TRANSITIONS = 4
 
diff --git a/runtime/Python3/src/antlr4/atn/LexerATNSimulator.py b/runtime/Python3/src/antlr4/atn/LexerATNSimulator.py
index 71201ff5f9..2514f2781e 100644
--- a/runtime/Python3/src/antlr4/atn/LexerATNSimulator.py
+++ b/runtime/Python3/src/antlr4/atn/LexerATNSimulator.py
@@ -20,18 +20,18 @@
 #  can simply return the predicted token type.</p>
 #/
 
-from antlr4.PredictionContext import PredictionContextCache, SingletonPredictionContext, PredictionContext
-from antlr4.InputStream import InputStream
-from antlr4.Token import Token
-from antlr4.atn.ATN import ATN
-from antlr4.atn.ATNConfig import LexerATNConfig
-from antlr4.atn.ATNSimulator import ATNSimulator
-from antlr4.atn.ATNConfigSet import ATNConfigSet, OrderedATNConfigSet
-from antlr4.atn.ATNState import RuleStopState, ATNState
-from antlr4.atn.LexerActionExecutor import LexerActionExecutor
-from antlr4.atn.Transition import Transition
-from antlr4.dfa.DFAState import DFAState
-from antlr4.error.Errors import LexerNoViableAltException, UnsupportedOperationException
+from ..PredictionContext import PredictionContextCache, SingletonPredictionContext, PredictionContext
+from ..InputStream import InputStream
+from ..Token import Token
+from ..atn.ATN import ATN
+from ..atn.ATNConfig import LexerATNConfig
+from ..atn.ATNSimulator import ATNSimulator
+from ..atn.ATNConfigSet import ATNConfigSet, OrderedATNConfigSet
+from ..atn.ATNState import RuleStopState, ATNState
+from ..atn.LexerActionExecutor import LexerActionExecutor
+from ..atn.Transition import Transition
+from ..dfa.DFAState import DFAState
+from ..error.Errors import LexerNoViableAltException, UnsupportedOperationException
 
 class SimState(object):
     __slots__ = ('index', 'line', 'column', 'dfaState')
@@ -76,7 +76,7 @@ def __init__(self, recog:Lexer, atn:ATN, decisionToDFA:list, sharedContextCache:
         self.line = 1
         # The index of the character relative to the beginning of the line 0..n-1#/
         self.column = 0
-        from antlr4.Lexer import Lexer
+        from ..Lexer import Lexer
         self.mode = Lexer.DEFAULT_MODE
         # Cache Lexer properties to avoid further imports
         self.DEFAULT_MODE = Lexer.DEFAULT_MODE
diff --git a/runtime/Python3/src/antlr4/atn/LexerActionExecutor.py b/runtime/Python3/src/antlr4/atn/LexerActionExecutor.py
index 5c6462c3a2..a7192cdbf9 100644
--- a/runtime/Python3/src/antlr4/atn/LexerActionExecutor.py
+++ b/runtime/Python3/src/antlr4/atn/LexerActionExecutor.py
@@ -12,8 +12,8 @@
 # not cause bloating of the {@link DFA} created for the lexer.</p>
 
 
-from antlr4.InputStream import InputStream
-from antlr4.atn.LexerAction import LexerAction, LexerIndexedCustomAction
+from ..InputStream import InputStream
+from ..atn.LexerAction import LexerAction, LexerIndexedCustomAction
 
 # need a forward declaration
 Lexer = None
diff --git a/runtime/Python3/src/antlr4/atn/ParserATNSimulator.py b/runtime/Python3/src/antlr4/atn/ParserATNSimulator.py
index 470aaef0ed..3dac7c6222 100644
--- a/runtime/Python3/src/antlr4/atn/ParserATNSimulator.py
+++ b/runtime/Python3/src/antlr4/atn/ParserATNSimulator.py
@@ -232,26 +232,26 @@
 # the input.</p>
 #
 import sys
-from antlr4 import DFA
-from antlr4.BufferedTokenStream import TokenStream
-from antlr4.Parser import Parser
-from antlr4.ParserRuleContext import ParserRuleContext
-from antlr4.PredictionContext import PredictionContextCache, PredictionContext, SingletonPredictionContext, \
+from ..dfa.DFA import DFA
+from ..BufferedTokenStream import TokenStream
+from ..Parser import Parser
+from ..ParserRuleContext import ParserRuleContext
+from ..PredictionContext import PredictionContextCache, PredictionContext, SingletonPredictionContext, \
     PredictionContextFromRuleContext
-from antlr4.RuleContext import RuleContext
-from antlr4.Token import Token
-from antlr4.Utils import str_list
-from antlr4.atn.ATN import ATN
-from antlr4.atn.ATNConfig import ATNConfig
-from antlr4.atn.ATNConfigSet import ATNConfigSet
-from antlr4.atn.ATNSimulator import ATNSimulator
-from antlr4.atn.ATNState import DecisionState, RuleStopState, ATNState
-from antlr4.atn.PredictionMode import PredictionMode
-from antlr4.atn.SemanticContext import SemanticContext, andContext, orContext
-from antlr4.atn.Transition import Transition, RuleTransition, ActionTransition, PrecedencePredicateTransition, \
+from ..RuleContext import RuleContext
+from ..Token import Token
+from ..Utils import str_list
+from ..atn.ATN import ATN
+from ..atn.ATNConfig import ATNConfig
+from ..atn.ATNConfigSet import ATNConfigSet
+from ..atn.ATNSimulator import ATNSimulator
+from ..atn.ATNState import DecisionState, RuleStopState, ATNState
+from ..atn.PredictionMode import PredictionMode
+from ..atn.SemanticContext import SemanticContext, andContext, orContext
+from ..atn.Transition import Transition, RuleTransition, ActionTransition, PrecedencePredicateTransition, \
     PredicateTransition, AtomTransition, SetTransition, NotSetTransition
-from antlr4.dfa.DFAState import DFAState, PredPrediction
-from antlr4.error.Errors import NoViableAltException
+from ..dfa.DFAState import DFAState, PredPrediction
+from ..error.Errors import NoViableAltException
 
 
 class ParserATNSimulator(ATNSimulator):
diff --git a/runtime/Python3/src/antlr4/atn/PredictionMode.py b/runtime/Python3/src/antlr4/atn/PredictionMode.py
index 8e5c73bb47..68cd31334d 100644
--- a/runtime/Python3/src/antlr4/atn/PredictionMode.py
+++ b/runtime/Python3/src/antlr4/atn/PredictionMode.py
@@ -10,11 +10,11 @@
 
 
 from enum import Enum
-from antlr4.atn.ATN import ATN
-from antlr4.atn.ATNConfig import ATNConfig
-from antlr4.atn.ATNConfigSet import ATNConfigSet
-from antlr4.atn.ATNState import RuleStopState
-from antlr4.atn.SemanticContext import SemanticContext
+from ..atn.ATN import ATN
+from ..atn.ATNConfig import ATNConfig
+from ..atn.ATNConfigSet import ATNConfigSet
+from ..atn.ATNState import RuleStopState
+from ..atn.SemanticContext import SemanticContext
 
 PredictionMode = None
 
diff --git a/runtime/Python3/src/antlr4/atn/SemanticContext.py b/runtime/Python3/src/antlr4/atn/SemanticContext.py
index 3f33a18010..be04343f66 100644
--- a/runtime/Python3/src/antlr4/atn/SemanticContext.py
+++ b/runtime/Python3/src/antlr4/atn/SemanticContext.py
@@ -11,8 +11,8 @@
 #  <p>I have scoped the {@link AND}, {@link OR}, and {@link Predicate} subclasses of
 #  {@link SemanticContext} within the scope of this outer class.</p>
 #
-from antlr4.Recognizer import Recognizer
-from antlr4.RuleContext import RuleContext
+from ..Recognizer import Recognizer
+from ..RuleContext import RuleContext
 from io import StringIO
 
 
diff --git a/runtime/Python3/src/antlr4/atn/Transition.py b/runtime/Python3/src/antlr4/atn/Transition.py
index 2e4c997176..f504e20f4b 100644
--- a/runtime/Python3/src/antlr4/atn/Transition.py
+++ b/runtime/Python3/src/antlr4/atn/Transition.py
@@ -16,11 +16,11 @@
 #  the states. We'll use the term Edge for the DFA to distinguish them from
 #  ATN transitions.</p>
 #
-from antlr4.IntervalSet import IntervalSet
-from antlr4.Token import Token
+from ..IntervalSet import IntervalSet
+from ..Token import Token
 
 # need forward declarations
-from antlr4.atn.SemanticContext import Predicate, PrecedencePredicate
+from ..atn.SemanticContext import Predicate, PrecedencePredicate
 
 ATNState = None
 RuleStartState = None
@@ -265,4 +265,4 @@ def __str__(self):
 del ATNState
 del RuleStartState
 
-from antlr4.atn.ATNState import *
+from ..atn.ATNState import *
diff --git a/runtime/Python3/src/antlr4/dfa/DFA.py b/runtime/Python3/src/antlr4/dfa/DFA.py
index d80589a683..6339aadcef 100644
--- a/runtime/Python3/src/antlr4/dfa/DFA.py
+++ b/runtime/Python3/src/antlr4/dfa/DFA.py
@@ -2,12 +2,12 @@
 # Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
 # Use of this file is governed by the BSD 3-clause license that
 # can be found in the LICENSE.txt file in the project root.
-from antlr4.atn.ATNState import StarLoopEntryState
+from ..atn.ATNState import StarLoopEntryState
 
-from antlr4.atn.ATNConfigSet import ATNConfigSet
-from antlr4.atn.ATNState import DecisionState
-from antlr4.dfa.DFAState import DFAState
-from antlr4.error.Errors import IllegalStateException
+from ..atn.ATNConfigSet import ATNConfigSet
+from ..atn.ATNState import DecisionState
+from ..dfa.DFAState import DFAState
+from ..error.Errors import IllegalStateException
 
 
 class DFA(object):
@@ -121,13 +121,13 @@ def __str__(self):
     def toString(self, literalNames:list=None, symbolicNames:list=None):
         if self.s0 is None:
             return ""
-        from antlr4.dfa.DFASerializer import DFASerializer
+        from ..dfa.DFASerializer import DFASerializer
         serializer = DFASerializer(self,literalNames,symbolicNames)
         return str(serializer)
 
     def toLexerString(self):
         if self.s0 is None:
             return ""
-        from antlr4.dfa.DFASerializer import LexerDFASerializer
+        from ..dfa.DFASerializer import LexerDFASerializer
         serializer = LexerDFASerializer(self)
         return str(serializer)
diff --git a/runtime/Python3/src/antlr4/dfa/DFASerializer.py b/runtime/Python3/src/antlr4/dfa/DFASerializer.py
index bca0727b76..51990751c8 100644
--- a/runtime/Python3/src/antlr4/dfa/DFASerializer.py
+++ b/runtime/Python3/src/antlr4/dfa/DFASerializer.py
@@ -6,9 +6,9 @@
 
 # A DFA walker that knows how to dump them to serialized strings.#/
 from io import StringIO
-from antlr4 import DFA
-from antlr4.Utils import str_list
-from antlr4.dfa.DFAState import DFAState
+from ..dfa.DFA import DFA
+from ..Utils import str_list
+from ..dfa.DFAState import DFAState
 
 
 class DFASerializer(object):
diff --git a/runtime/Python3/src/antlr4/dfa/DFAState.py b/runtime/Python3/src/antlr4/dfa/DFAState.py
index 51955a4488..98dcdeba9c 100644
--- a/runtime/Python3/src/antlr4/dfa/DFAState.py
+++ b/runtime/Python3/src/antlr4/dfa/DFAState.py
@@ -6,8 +6,8 @@
 
 # Map a predicate to a predicted alternative.#/
 from io import StringIO
-from antlr4.atn.ATNConfigSet import ATNConfigSet
-from antlr4.atn.SemanticContext import SemanticContext
+from ..atn.ATNConfigSet import ATNConfigSet
+from ..atn.SemanticContext import SemanticContext
 
 
 class PredPrediction(object):
diff --git a/runtime/Python3/src/antlr4/error/DiagnosticErrorListener.py b/runtime/Python3/src/antlr4/error/DiagnosticErrorListener.py
index 32ac14b635..71c05129cb 100644
--- a/runtime/Python3/src/antlr4/error/DiagnosticErrorListener.py
+++ b/runtime/Python3/src/antlr4/error/DiagnosticErrorListener.py
@@ -25,9 +25,10 @@
 # </ul>
 
 from io import StringIO
-from antlr4 import Parser, DFA
-from antlr4.atn.ATNConfigSet import ATNConfigSet
-from antlr4.error.ErrorListener import ErrorListener
+from ..dfa.DFA import DFA
+from ..Parser import Parser
+from ..atn.ATNConfigSet import ATNConfigSet
+from ..error.ErrorListener import ErrorListener
 
 class DiagnosticErrorListener(ErrorListener):
 
diff --git a/runtime/Python3/src/antlr4/error/ErrorStrategy.py b/runtime/Python3/src/antlr4/error/ErrorStrategy.py
index 0f7caadb24..0959a1db3f 100644
--- a/runtime/Python3/src/antlr4/error/ErrorStrategy.py
+++ b/runtime/Python3/src/antlr4/error/ErrorStrategy.py
@@ -4,11 +4,11 @@
 # can be found in the LICENSE.txt file in the project root.
 #
 import sys
-from antlr4.IntervalSet import IntervalSet
+from ..IntervalSet import IntervalSet
 
-from antlr4.Token import Token
-from antlr4.atn.ATNState import ATNState
-from antlr4.error.Errors import RecognitionException, NoViableAltException, InputMismatchException, \
+from ..Token import Token
+from ..atn.ATNState import ATNState
+from ..error.Errors import RecognitionException, NoViableAltException, InputMismatchException, \
     FailedPredicateException, ParseCancellationException
 
 # need forward declaration
diff --git a/runtime/Python3/src/antlr4/error/Errors.py b/runtime/Python3/src/antlr4/error/Errors.py
index 3f34492693..f26df060e5 100644
--- a/runtime/Python3/src/antlr4/error/Errors.py
+++ b/runtime/Python3/src/antlr4/error/Errors.py
@@ -34,9 +34,9 @@ def __init__(self, msg:str):
 #  in the input, where it is in the ATN, the rule invocation stack,
 #  and what kind of problem occurred.
 
-from antlr4.InputStream import InputStream
-from antlr4.ParserRuleContext import ParserRuleContext
-from antlr4.Recognizer import Recognizer
+from ..InputStream import InputStream
+from ..ParserRuleContext import ParserRuleContext
+from ..Recognizer import Recognizer
 
 class RecognitionException(Exception):
 
@@ -143,7 +143,7 @@ def __init__(self, recognizer:Parser, predicate:str=None, message:str=None):
                          input=recognizer.getInputStream(), ctx=recognizer._ctx)
         s = recognizer._interp.atn.states[recognizer.state]
         trans = s.transitions[0]
-        from antlr4.atn.Transition import PredicateTransition
+        from ..atn.Transition import PredicateTransition
         if isinstance(trans, PredicateTransition):
             self.ruleIndex = trans.ruleIndex
             self.predicateIndex = trans.predIndex
diff --git a/runtime/Python3/src/antlr4/tree/ParseTreeMatch.py b/runtime/Python3/src/antlr4/tree/ParseTreeMatch.py
index c02bc0357d..043c8d1aec 100644
--- a/runtime/Python3/src/antlr4/tree/ParseTreeMatch.py
+++ b/runtime/Python3/src/antlr4/tree/ParseTreeMatch.py
@@ -9,8 +9,8 @@
 # Represents the result of matching a {@link ParseTree} against a tree pattern.
 #
 from io import StringIO
-from antlr4.tree.ParseTreePattern import ParseTreePattern
-from antlr4.tree.Tree import ParseTree
+from ..tree.ParseTreePattern import ParseTreePattern
+from ..tree.Tree import ParseTree
 
 
 class ParseTreeMatch(object):
diff --git a/runtime/Python3/src/antlr4/tree/ParseTreePattern.py b/runtime/Python3/src/antlr4/tree/ParseTreePattern.py
index b09bf1a542..c27fa16e9a 100644
--- a/runtime/Python3/src/antlr4/tree/ParseTreePattern.py
+++ b/runtime/Python3/src/antlr4/tree/ParseTreePattern.py
@@ -8,9 +8,9 @@
 # A pattern like {@code <ID> = <expr>;} converted to a {@link ParseTree} by
 # {@link ParseTreePatternMatcher#compile(String, int)}.
 #
-from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher
-from antlr4.tree.Tree import ParseTree
-from antlr4.xpath.XPathLexer import XPathLexer
+from ..tree.ParseTreePatternMatcher import ParseTreePatternMatcher
+from ..tree.Tree import ParseTree
+from ..xpath.XPathLexer import XPathLexer
 
 
 class ParseTreePattern(object):
diff --git a/runtime/Python3/src/antlr4/tree/ParseTreePatternMatcher.py b/runtime/Python3/src/antlr4/tree/ParseTreePatternMatcher.py
index 62fd197b0d..848b3d6b9c 100644
--- a/runtime/Python3/src/antlr4/tree/ParseTreePatternMatcher.py
+++ b/runtime/Python3/src/antlr4/tree/ParseTreePatternMatcher.py
@@ -61,18 +61,18 @@
 # {@link #setDelimiters}. You must escape both start and stop strings
 # {@code \<} and {@code \>}.</p>
 #
-from antlr4.CommonTokenStream import CommonTokenStream
-from antlr4.InputStream import InputStream
-from antlr4.ParserRuleContext import ParserRuleContext
-from antlr4.Lexer import Lexer
-from antlr4.ListTokenSource import ListTokenSource
-from antlr4.Token import Token
-from antlr4.error.ErrorStrategy import BailErrorStrategy
-from antlr4.error.Errors import RecognitionException, ParseCancellationException
-from antlr4.tree.Chunk import TagChunk, TextChunk
-from antlr4.tree.RuleTagToken import RuleTagToken
-from antlr4.tree.TokenTagToken import TokenTagToken
-from antlr4.tree.Tree import ParseTree, TerminalNode, RuleNode
+from ..CommonTokenStream import CommonTokenStream
+from ..InputStream import InputStream
+from ..ParserRuleContext import ParserRuleContext
+from ..Lexer import Lexer
+from ..ListTokenSource import ListTokenSource
+from ..Token import Token
+from ..error.ErrorStrategy import BailErrorStrategy
+from ..error.Errors import RecognitionException, ParseCancellationException
+from ..tree.Chunk import TagChunk, TextChunk
+from ..tree.RuleTagToken import RuleTagToken
+from ..tree.TokenTagToken import TokenTagToken
+from ..tree.Tree import ParseTree, TerminalNode, RuleNode
 
 # need forward declaration
 Parser = None
@@ -151,7 +151,7 @@ def matchRuleIndex(self, tree:ParseTree, pattern:str, patternRuleIndex:int):
     def matchPattern(self, tree:ParseTree, pattern:ParseTreePattern):
         labels = dict()
         mismatchedNode = self.matchImpl(tree, pattern.patternTree, labels)
-        from antlr4.tree.ParseTreeMatch import ParseTreeMatch
+        from ..tree.ParseTreeMatch import ParseTreeMatch
         return ParseTreeMatch(tree, pattern, labels, mismatchedNode)
 
     #
@@ -162,7 +162,7 @@ def compileTreePattern(self, pattern:str, patternRuleIndex:int):
         tokenList = self.tokenize(pattern)
         tokenSrc = ListTokenSource(tokenList)
         tokens = CommonTokenStream(tokenSrc)
-        from antlr4.ParserInterpreter import ParserInterpreter
+        from ..ParserInterpreter import ParserInterpreter
         parserInterp = ParserInterpreter(self.parser.grammarFileName, self.parser.tokenNames,
                                 self.parser.ruleNames, self.parser.getATNWithBypassAlts(),tokens)
         tree = None
@@ -180,7 +180,7 @@ def compileTreePattern(self, pattern:str, patternRuleIndex:int):
         if tokens.LA(1)!=Token.EOF:
             raise StartRuleDoesNotConsumeFullPattern()
 
-        from antlr4.tree.ParseTreePattern import ParseTreePattern
+        from ..tree.ParseTreePattern import ParseTreePattern
         return ParseTreePattern(self, pattern, patternRuleIndex, tree)
 
     #
diff --git a/runtime/Python3/src/antlr4/tree/RuleTagToken.py b/runtime/Python3/src/antlr4/tree/RuleTagToken.py
index a198f7da13..59d2eb685f 100644
--- a/runtime/Python3/src/antlr4/tree/RuleTagToken.py
+++ b/runtime/Python3/src/antlr4/tree/RuleTagToken.py
@@ -9,7 +9,7 @@
 # rule; e.g., {@code <expr>}. These tokens are created for {@link TagChunk}
 # chunks where the tag corresponds to a parser rule.
 #
-from antlr4.Token import Token
+from ..Token import Token
 
 
 class RuleTagToken(Token):
diff --git a/runtime/Python3/src/antlr4/tree/TokenTagToken.py b/runtime/Python3/src/antlr4/tree/TokenTagToken.py
index b7beeb8768..87e411ef5e 100644
--- a/runtime/Python3/src/antlr4/tree/TokenTagToken.py
+++ b/runtime/Python3/src/antlr4/tree/TokenTagToken.py
@@ -9,7 +9,7 @@
 # {@code <ID>}. These tokens are created for {@link TagChunk} chunks where the
 # tag corresponds to a lexer rule or token type.
 #
-from antlr4.Token import CommonToken
+from ..Token import CommonToken
 
 
 class TokenTagToken(CommonToken):
diff --git a/runtime/Python3/src/antlr4/tree/Tree.py b/runtime/Python3/src/antlr4/tree/Tree.py
index 812acc96bb..86fce25ce0 100644
--- a/runtime/Python3/src/antlr4/tree/Tree.py
+++ b/runtime/Python3/src/antlr4/tree/Tree.py
@@ -7,7 +7,7 @@
 # The basic notion of a tree has a parent, a payload, and a list of children.
 #  It is the most abstract interface for all the trees used by ANTLR.
 #/
-from antlr4.Token import Token
+from ..Token import Token
 
 INVALID_INTERVAL = (-1, -2)
 
diff --git a/runtime/Python3/src/antlr4/tree/Trees.py b/runtime/Python3/src/antlr4/tree/Trees.py
index 686b8cb287..b4ef66c2ef 100644
--- a/runtime/Python3/src/antlr4/tree/Trees.py
+++ b/runtime/Python3/src/antlr4/tree/Trees.py
@@ -7,9 +7,9 @@
 
 # A set of utility routines useful for all kinds of ANTLR trees.#
 from io import StringIO
-from antlr4.Token import Token
-from antlr4.Utils import escapeWhitespace
-from antlr4.tree.Tree import RuleNode, ErrorNode, TerminalNode, Tree, ParseTree
+from ..Token import Token
+from ..Utils import escapeWhitespace
+from ..tree.Tree import RuleNode, ErrorNode, TerminalNode, Tree, ParseTree
 
 # need forward declaration
 Parser  = None
@@ -91,7 +91,7 @@ def findAllNodes(cls, t:ParseTree, index:int, findTokens:bool):
 
     @classmethod
     def _findAllNodes(cls, t:ParseTree, index:int, findTokens:bool, nodes:list):
-        from antlr4.ParserRuleContext import ParserRuleContext
+        from ..ParserRuleContext import ParserRuleContext
         # check this node (the root) first
         if findTokens and isinstance(t, TerminalNode):
             if t.symbol.type==index:
diff --git a/runtime/Python3/src/antlr4/xpath/XPath.py b/runtime/Python3/src/antlr4/xpath/XPath.py
index 92f5e0dac3..47fbfe93b4 100644
--- a/runtime/Python3/src/antlr4/xpath/XPath.py
+++ b/runtime/Python3/src/antlr4/xpath/XPath.py
@@ -47,18 +47,24 @@
 # <p>
 # Whitespace is not allowed.</p>
 #
-from antlr4 import CommonTokenStream, DFA, PredictionContextCache, Lexer, LexerATNSimulator, ParserRuleContext, TerminalNode
-from antlr4.InputStream import InputStream
-from antlr4.Parser import Parser
-from antlr4.RuleContext import RuleContext
-from antlr4.Token import Token
-from antlr4.atn.ATNDeserializer import ATNDeserializer
-from antlr4.error.ErrorListener import ErrorListener
-from antlr4.error.Errors import LexerNoViableAltException
-from antlr4.tree.Tree import ParseTree
-from antlr4.tree.Trees import Trees
+from ..Lexer import Lexer
+from ..CommonTokenStream import CommonTokenStream
+from ..ParserRuleContext import ParserRuleContext
+from ..tree.Tree import TerminalNode
+from ..atn.LexerATNSimulator import LexerATNSimulator
+from ..PredictionContext import PredictionContextCache
+from ..dfa.DFA import DFA
+from ..InputStream import InputStream
+from ..Parser import Parser
+from ..RuleContext import RuleContext
+from ..Token import Token
+from ..atn.ATNDeserializer import ATNDeserializer
+from ..error.ErrorListener import ErrorListener
+from ..error.Errors import LexerNoViableAltException
+from ..tree.Tree import ParseTree
+from ..tree.Trees import Trees
 from io import StringIO
-from antlr4.xpath.XPathLexer import XPathLexer
+from ..xpath.XPathLexer import XPathLexer
 
 
 class XPath(object):