Skip to content

Commit

Permalink
Py runtime: Move to relative imports (#4705)
Browse files Browse the repository at this point in the history
Signed-off-by: Phil Elson <[email protected]>
  • Loading branch information
pelson authored Nov 22, 2024
1 parent c251d2d commit 2c5cc31
Show file tree
Hide file tree
Showing 45 changed files with 234 additions and 232 deletions.
10 changes: 5 additions & 5 deletions runtime/Python3/src/antlr4/BufferedTokenStream.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
# {@link Token#HIDDEN_CHANNEL}, use a filtering token stream such a
# {@link CommonTokenStream}.</p>
from io import StringIO
from antlr4.Token import Token
from antlr4.error.Errors import IllegalStateException
from .Token import Token
from .error.Errors import IllegalStateException

# need forward declaration
Lexer = None
Expand Down Expand Up @@ -230,7 +230,7 @@ def getHiddenTokensToRight(self, tokenIndex:int, channel:int=-1):
self.lazyInit()
if tokenIndex<0 or tokenIndex>=len(self.tokens):
raise Exception(str(tokenIndex) + " not in 0.." + str(len(self.tokens)-1))
from antlr4.Lexer import Lexer
from .Lexer import Lexer
nextOnChannel = self.nextTokenOnChannel(tokenIndex + 1, Lexer.DEFAULT_TOKEN_CHANNEL)
from_ = tokenIndex+1
# if none onchannel to right, nextOnChannel=-1 so set to = last token
Expand All @@ -245,7 +245,7 @@ def getHiddenTokensToLeft(self, tokenIndex:int, channel:int=-1):
self.lazyInit()
if tokenIndex<0 or tokenIndex>=len(self.tokens):
raise Exception(str(tokenIndex) + " not in 0.." + str(len(self.tokens)-1))
from antlr4.Lexer import Lexer
from .Lexer import Lexer
prevOnChannel = self.previousTokenOnChannel(tokenIndex - 1, Lexer.DEFAULT_TOKEN_CHANNEL)
if prevOnChannel == tokenIndex - 1:
return None
Expand All @@ -260,7 +260,7 @@ def filterForChannel(self, left:int, right:int, channel:int):
for i in range(left, right+1):
t = self.tokens[i]
if channel==-1:
from antlr4.Lexer import Lexer
from .Lexer import Lexer
if t.channel!= Lexer.DEFAULT_TOKEN_CHANNEL:
hidden.append(t)
elif t.channel==channel:
Expand Down
2 changes: 1 addition & 1 deletion runtime/Python3/src/antlr4/CommonTokenFactory.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
# This default implementation of {@link TokenFactory} creates
# {@link CommonToken} objects.
#
from antlr4.Token import CommonToken
from .Token import CommonToken

class TokenFactory(object):

Expand Down
6 changes: 3 additions & 3 deletions runtime/Python3/src/antlr4/CommonTokenStream.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,9 @@
# channel.</p>
#/

from antlr4.BufferedTokenStream import BufferedTokenStream
from antlr4.Lexer import Lexer
from antlr4.Token import Token
from .BufferedTokenStream import BufferedTokenStream
from .Lexer import Lexer
from .Token import Token


class CommonTokenStream(BufferedTokenStream):
Expand Down
2 changes: 1 addition & 1 deletion runtime/Python3/src/antlr4/FileStream.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
#

import codecs
from antlr4.InputStream import InputStream
from .InputStream import InputStream


class FileStream(InputStream):
Expand Down
2 changes: 1 addition & 1 deletion runtime/Python3/src/antlr4/InputStream.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
#
# Vacuum all input from a string and then treat it like a buffer.
#
from antlr4.Token import Token
from .Token import Token


class InputStream (object):
Expand Down
2 changes: 1 addition & 1 deletion runtime/Python3/src/antlr4/IntervalSet.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
#

from io import StringIO
from antlr4.Token import Token
from .Token import Token

# need forward declarations
IntervalSet = None
Expand Down
16 changes: 8 additions & 8 deletions runtime/Python3/src/antlr4/LL1Analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
# Use of this file is governed by the BSD 3-clause license that
# can be found in the LICENSE.txt file in the project root.
#/
from antlr4.IntervalSet import IntervalSet
from antlr4.Token import Token
from antlr4.PredictionContext import PredictionContext, SingletonPredictionContext, PredictionContextFromRuleContext
from antlr4.RuleContext import RuleContext
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNConfig import ATNConfig
from antlr4.atn.ATNState import ATNState, RuleStopState
from antlr4.atn.Transition import WildcardTransition, NotSetTransition, AbstractPredicateTransition, RuleTransition
from .IntervalSet import IntervalSet
from .Token import Token
from .PredictionContext import PredictionContext, SingletonPredictionContext, PredictionContextFromRuleContext
from .RuleContext import RuleContext
from .atn.ATN import ATN
from .atn.ATNConfig import ATNConfig
from .atn.ATNState import ATNState, RuleStopState
from .atn.Transition import WildcardTransition, NotSetTransition, AbstractPredicateTransition, RuleTransition


class LL1Analyzer (object):
Expand Down
12 changes: 6 additions & 6 deletions runtime/Python3/src/antlr4/Lexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,12 @@
from typing import TextIO
else:
from typing.io import TextIO
from antlr4.CommonTokenFactory import CommonTokenFactory
from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.InputStream import InputStream
from antlr4.Recognizer import Recognizer
from antlr4.Token import Token
from antlr4.error.Errors import IllegalStateException, LexerNoViableAltException, RecognitionException
from .CommonTokenFactory import CommonTokenFactory
from .atn.LexerATNSimulator import LexerATNSimulator
from .InputStream import InputStream
from .Recognizer import Recognizer
from .Token import Token
from .error.Errors import IllegalStateException, LexerNoViableAltException, RecognitionException

class TokenSource(object):

Expand Down
6 changes: 3 additions & 3 deletions runtime/Python3/src/antlr4/ListTokenSource.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
# as the EOF token for every call to {@link #nextToken} after the end of the
# list is reached. Otherwise, an EOF token will be created.</p>
#
from antlr4.CommonTokenFactory import CommonTokenFactory
from antlr4.Lexer import TokenSource
from antlr4.Token import Token
from .CommonTokenFactory import CommonTokenFactory
from .Lexer import TokenSource
from .Token import Token


class ListTokenSource(TokenSource):
Expand Down
28 changes: 14 additions & 14 deletions runtime/Python3/src/antlr4/Parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,20 @@
from typing import TextIO
else:
from typing.io import TextIO
from antlr4.BufferedTokenStream import TokenStream
from antlr4.CommonTokenFactory import TokenFactory
from antlr4.error.ErrorStrategy import DefaultErrorStrategy
from antlr4.InputStream import InputStream
from antlr4.Recognizer import Recognizer
from antlr4.RuleContext import RuleContext
from antlr4.ParserRuleContext import ParserRuleContext
from antlr4.Token import Token
from antlr4.Lexer import Lexer
from antlr4.atn.ATNDeserializer import ATNDeserializer
from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions
from antlr4.error.Errors import UnsupportedOperationException, RecognitionException
from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher
from antlr4.tree.Tree import ParseTreeListener, TerminalNode, ErrorNode
from .BufferedTokenStream import TokenStream
from .CommonTokenFactory import TokenFactory
from .error.ErrorStrategy import DefaultErrorStrategy
from .InputStream import InputStream
from .Recognizer import Recognizer
from .RuleContext import RuleContext
from .ParserRuleContext import ParserRuleContext
from .Token import Token
from .Lexer import Lexer
from .atn.ATNDeserializer import ATNDeserializer
from .atn.ATNDeserializationOptions import ATNDeserializationOptions
from .error.Errors import UnsupportedOperationException, RecognitionException
from .tree.ParseTreePatternMatcher import ParseTreePatternMatcher
from .tree.Tree import ParseTreeListener, TerminalNode, ErrorNode

class TraceListener(ParseTreeListener):
__slots__ = '_parser'
Expand Down
24 changes: 12 additions & 12 deletions runtime/Python3/src/antlr4/ParserInterpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,18 @@
#
# See TestParserInterpreter for examples.
#
from antlr4.dfa.DFA import DFA
from antlr4.BufferedTokenStream import TokenStream
from antlr4.Lexer import Lexer
from antlr4.Parser import Parser
from antlr4.ParserRuleContext import InterpreterRuleContext, ParserRuleContext
from antlr4.Token import Token
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNState import StarLoopEntryState, ATNState, LoopEndState
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.PredictionContext import PredictionContextCache
from antlr4.atn.Transition import Transition
from antlr4.error.Errors import RecognitionException, UnsupportedOperationException, FailedPredicateException
from .dfa.DFA import DFA
from .BufferedTokenStream import TokenStream
from .Lexer import Lexer
from .Parser import Parser
from .ParserRuleContext import InterpreterRuleContext, ParserRuleContext
from .Token import Token
from .atn.ATN import ATN
from .atn.ATNState import StarLoopEntryState, ATNState, LoopEndState
from .atn.ParserATNSimulator import ParserATNSimulator
from .PredictionContext import PredictionContextCache
from .atn.Transition import Transition
from .error.Errors import RecognitionException, UnsupportedOperationException, FailedPredicateException


class ParserInterpreter(Parser):
Expand Down
6 changes: 3 additions & 3 deletions runtime/Python3/src/antlr4/ParserRuleContext.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@
# group values such as this aggregate. The getters/setters are there to
# satisfy the superclass interface.

from antlr4.RuleContext import RuleContext
from antlr4.Token import Token
from antlr4.tree.Tree import ParseTreeListener, ParseTree, TerminalNodeImpl, ErrorNodeImpl, TerminalNode, \
from .RuleContext import RuleContext
from .Token import Token
from .tree.Tree import ParseTreeListener, ParseTree, TerminalNodeImpl, ErrorNodeImpl, TerminalNode, \
INVALID_INTERVAL

# need forward declaration
Expand Down
6 changes: 3 additions & 3 deletions runtime/Python3/src/antlr4/PredictionContext.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
# Use of this file is governed by the BSD 3-clause license that
# can be found in the LICENSE.txt file in the project root.
#/
from antlr4.RuleContext import RuleContext
from antlr4.atn.ATN import ATN
from antlr4.error.Errors import IllegalStateException
from .RuleContext import RuleContext
from .atn.ATN import ATN
from .error.Errors import IllegalStateException
from io import StringIO

# dup ParserATNSimulator class var here to avoid circular import; no idea why this can't be in PredictionContext
Expand Down
10 changes: 5 additions & 5 deletions runtime/Python3/src/antlr4/Recognizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
# Use of this file is governed by the BSD 3-clause license that
# can be found in the LICENSE.txt file in the project root.
#
from antlr4.RuleContext import RuleContext
from antlr4.Token import Token
from antlr4.error.ErrorListener import ProxyErrorListener, ConsoleErrorListener
from .RuleContext import RuleContext
from .Token import Token
from .error.ErrorListener import ProxyErrorListener, ConsoleErrorListener

# need forward delcaration
RecognitionException = None
Expand Down Expand Up @@ -52,7 +52,7 @@ def removeErrorListeners(self):
def getTokenTypeMap(self):
tokenNames = self.getTokenNames()
if tokenNames is None:
from antlr4.error.Errors import UnsupportedOperationException
from .error.Errors import UnsupportedOperationException
raise UnsupportedOperationException("The current recognizer does not provide a list of token names.")
result = self.tokenTypeMapCache.get(tokenNames, None)
if result is None:
Expand All @@ -68,7 +68,7 @@ def getTokenTypeMap(self):
def getRuleIndexMap(self):
ruleNames = self.getRuleNames()
if ruleNames is None:
from antlr4.error.Errors import UnsupportedOperationException
from .error.Errors import UnsupportedOperationException
raise UnsupportedOperationException("The current recognizer does not provide a list of rule names.")
result = self.ruleIndexMapCache.get(ruleNames, None)
if result is None:
Expand Down
4 changes: 2 additions & 2 deletions runtime/Python3/src/antlr4/RuleContext.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@
# @see ParserRuleContext
#/
from io import StringIO
from antlr4.tree.Tree import RuleNode, INVALID_INTERVAL, ParseTreeVisitor
from antlr4.tree.Trees import Trees
from .tree.Tree import RuleNode, INVALID_INTERVAL, ParseTreeVisitor
from .tree.Trees import Trees

# need forward declarations
RuleContext = None
Expand Down
2 changes: 1 addition & 1 deletion runtime/Python3/src/antlr4/StdinStream.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import codecs
import sys

from antlr4.InputStream import InputStream
from .InputStream import InputStream


class StdinStream(InputStream):
Expand Down
4 changes: 2 additions & 2 deletions runtime/Python3/src/antlr4/TokenStreamRewriter.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
#

from io import StringIO
from antlr4.Token import Token
from .Token import Token

from antlr4.CommonTokenStream import CommonTokenStream
from .CommonTokenStream import CommonTokenStream


class TokenStreamRewriter(object):
Expand Down
42 changes: 21 additions & 21 deletions runtime/Python3/src/antlr4/__init__.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,21 @@
from antlr4.Token import Token
from antlr4.InputStream import InputStream
from antlr4.FileStream import FileStream
from antlr4.StdinStream import StdinStream
from antlr4.BufferedTokenStream import TokenStream
from antlr4.CommonTokenStream import CommonTokenStream
from antlr4.Lexer import Lexer
from antlr4.Parser import Parser
from antlr4.dfa.DFA import DFA
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNDeserializer import ATNDeserializer
from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.atn.ParserATNSimulator import ParserATNSimulator
from antlr4.atn.PredictionMode import PredictionMode
from antlr4.PredictionContext import PredictionContextCache
from antlr4.ParserRuleContext import RuleContext, ParserRuleContext
from antlr4.tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
from antlr4.error.Errors import RecognitionException, IllegalStateException, NoViableAltException
from antlr4.error.ErrorStrategy import BailErrorStrategy
from antlr4.error.DiagnosticErrorListener import DiagnosticErrorListener
from antlr4.Utils import str_list
from .Token import Token
from .InputStream import InputStream
from .FileStream import FileStream
from .StdinStream import StdinStream
from .BufferedTokenStream import TokenStream
from .CommonTokenStream import CommonTokenStream
from .Lexer import Lexer
from .Parser import Parser
from .dfa.DFA import DFA
from .atn.ATN import ATN
from .atn.ATNDeserializer import ATNDeserializer
from .atn.LexerATNSimulator import LexerATNSimulator
from .atn.ParserATNSimulator import ParserATNSimulator
from .atn.PredictionMode import PredictionMode
from .PredictionContext import PredictionContextCache
from .ParserRuleContext import RuleContext, ParserRuleContext
from .tree.Tree import ParseTreeListener, ParseTreeVisitor, ParseTreeWalker, TerminalNode, ErrorNode, RuleNode
from .error.Errors import RecognitionException, IllegalStateException, NoViableAltException
from .error.ErrorStrategy import BailErrorStrategy
from .error.DiagnosticErrorListener import DiagnosticErrorListener
from .Utils import str_list
9 changes: 2 additions & 7 deletions runtime/Python3/src/antlr4/_pygrun.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
#!python
__author__ = 'jszheng'
import optparse
import sys
import os
import importlib
from antlr4 import *

from . import *


# this is a python version of TestRig
Expand Down Expand Up @@ -165,7 +164,3 @@ def process(input_stream, class_lexer, class_parser):
process(input_stream, class_lexer, class_parser)
else:
print("[ERROR] file {} not exist".format(os.path.normpath(file_name)))


if __name__ == '__main__':
main()
12 changes: 6 additions & 6 deletions runtime/Python3/src/antlr4/atn/ATN.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@
# Use of this file is governed by the BSD 3-clause license that
# can be found in the LICENSE.txt file in the project root.
#/
from antlr4.IntervalSet import IntervalSet
from ..IntervalSet import IntervalSet

from antlr4.RuleContext import RuleContext
from ..RuleContext import RuleContext

from antlr4.Token import Token
from antlr4.atn.ATNType import ATNType
from antlr4.atn.ATNState import ATNState, DecisionState
from ..Token import Token
from ..atn.ATNType import ATNType
from ..atn.ATNState import ATNState, DecisionState


class ATN(object):
Expand Down Expand Up @@ -52,7 +52,7 @@ def __init__(self, grammarType:ATNType , maxTokenType:int ):
# the rule surrounding {@code s}. In other words, the set will be
# restricted to tokens reachable staying within {@code s}'s rule.
def nextTokensInContext(self, s:ATNState, ctx:RuleContext):
from antlr4.LL1Analyzer import LL1Analyzer
from ..LL1Analyzer import LL1Analyzer
anal = LL1Analyzer(self)
return anal.LOOK(s, ctx=ctx)

Expand Down
Loading

0 comments on commit 2c5cc31

Please sign in to comment.