Skip to content

Commit

Permalink
New debugging technique in generating GraphML files of internal data …
Browse files Browse the repository at this point in the history
…structures.
  • Loading branch information
Paebbels committed Mar 21, 2023
1 parent 5292cf1 commit 9f149a5
Show file tree
Hide file tree
Showing 5 changed files with 99 additions and 93 deletions.
12 changes: 6 additions & 6 deletions pyVHDLParser/Blocks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ class BlockIterator:

state: int #: internal states: 0 = normal, 1 = reached stopBlock, 2 = reached EndOfBlock

def __init__(self, startBlock: 'Block', inclusiveStartBlock: bool=False, stopBlock: 'Block'=None):
def __init__(self, startBlock: 'Block', inclusiveStartBlock: bool=False, inclusiveStopBlock: bool=True, stopBlock: 'Block'=None):
self.startBlock = startBlock
self.currentBlock = startBlock if inclusiveStartBlock else startBlock.NextBlock
self.stopBlock = stopBlock
Expand All @@ -257,7 +257,7 @@ def __next__(self) -> 'Block':
raise StopIteration(self.state)

block = self.currentBlock
if block is self.stopToken:
if block is self.stopBlock:
self.currentBlock = None
self.state = 1
elif isinstance(self.currentBlock, EndOfBlock):
Expand Down Expand Up @@ -341,11 +341,11 @@ def __iter__(self) -> TokenIterator:
"""Returns a token iterator that iterates from :attr:`~Block.StartToken` to :attr:`~Block.EndToken`."""
return TokenIterator(self.StartToken, inclusiveStartToken=True, stopToken=self.EndToken)

def GetIterator(self, stopBlock: 'Block'=None) -> BlockIterator:
return BlockIterator(self, stopBlock=stopBlock)
def GetIterator(self, inclusiveStartBlock: bool = False, inclusiveStopBlock: bool = True, stopBlock: 'Block'=None) -> BlockIterator:
return BlockIterator(self, inclusiveStartBlock=inclusiveStartBlock, inclusiveStopBlock=inclusiveStopBlock, stopBlock=stopBlock)

def GetReverseIterator(self, stopBlock: 'Block'=None) -> BlockReverseIterator:
return BlockReverseIterator(self, stopBlock=stopBlock)
def GetReverseIterator(self, inclusiveStartBlock: bool = False, inclusiveStopBlock: bool = True, stopBlock: 'Block'=None) -> BlockReverseIterator:
return BlockReverseIterator(self, inclusiveStartBlock=inclusiveStartBlock, inclusiveStopBlock=inclusiveStopBlock, stopBlock=stopBlock)

def __str__(self) -> str:
buffer = ""
Expand Down
30 changes: 29 additions & 1 deletion pyVHDLParser/CLI/Block.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@

from pyAttributes.ArgParseAttributes import CommandAttribute

from .GraphML import GraphML
from ..Base import ParserException
from ..Token import Token, StartOfDocumentToken, EndOfDocumentToken
from ..Token.Parser import Tokenizer
Expand All @@ -53,6 +54,8 @@ class BlockStreamHandlers:
def HandleBlockStreaming(self: FrontEndProtocol, args):
self.PrintHeadline()

# self._writeLevel = Severity.Verbose

file = Path(args.Filename)

if not file.exists():
Expand All @@ -64,8 +67,22 @@ def HandleBlockStreaming(self: FrontEndProtocol, args):
tokenStream = Tokenizer.GetVHDLTokenizer(content)
blockStream = TokenToBlockParser.Transform(tokenStream)

blockIterator = iter(blockStream)
firstBlock = next(blockIterator)

try:
for block in blockStream:
while next(blockIterator):
pass
except StopIteration:
pass

if isinstance(firstBlock, StartOfDocumentBlock):
print("{YELLOW}{block!r}{NOCOLOR}".format(block=firstBlock, **self.Foreground))
print(" {YELLOW}{token!r}{NOCOLOR}".format(token=firstBlock.StartToken, **self.Foreground))

try:
blockIterator = firstBlock.GetIterator(inclusiveStopBlock=False)
for block in blockIterator:
if isinstance(block, (LinebreakBlock, IndentationBlock)):
self.WriteNormal("{DARK_GRAY}{block!r}{NOCOLOR}".format(block=block, **self.Foreground))
elif isinstance(block, CommentBlock):
Expand All @@ -84,11 +101,22 @@ def HandleBlockStreaming(self: FrontEndProtocol, args):
for token in block:
self.WriteVerbose(repr(token))

blockIterator = block.GetIterator()
lastBlock = next(blockIterator)
if isinstance(lastBlock, EndOfDocumentBlock):
print("{YELLOW}{block!r}{NOCOLOR}".format(block=lastBlock, **self.Foreground))
print(" {YELLOW}{token!r}{NOCOLOR}".format(token=lastBlock.StartToken, **self.Foreground))

except ParserException as ex:
print("{RED}ERROR: {0!s}{NOCOLOR}".format(ex, **self.Foreground))
except NotImplementedError as ex:
print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format(ex, **self.Foreground))

exporter = GraphML()
exporter.AddTokenStream(firstBlock.StartToken)
# exporter.AddBlockStream(firstBlock)
exporter.WriteDocument(Path.cwd() / "temp/BlockStream.graphml")

self.exit()

# ----------------------------------------------------------------------------
Expand Down
46 changes: 46 additions & 0 deletions pyVHDLParser/CLI/GraphML.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
from pyTooling.Graph import Graph, Subgraph, Vertex
from pyTooling.Graph.GraphML import GraphMLDocument

from pyVHDLParser.Token import Token


class GraphML:
_graph: Graph

def __init__(self):
self._graph = Graph(name="Streams")

def AddTokenStream(self, firstToken: Token):
subgraph = Subgraph(name="TokenStream", graph=self._graph)

firstVertex = Vertex(vertexID=id(firstToken), value=f"{firstToken!s}", subgraph=subgraph)
firstVertex["order"] = 0
firstVertex["kind"] = type(firstToken).__name__

tokenIterator = firstToken.GetIterator(inclusiveStopToken=False)
for tokenID, token in enumerate(tokenIterator, start=1):
vertex = Vertex(vertexID=id(token), value=f"{token!s}", subgraph=subgraph)
vertex["order"] = tokenID
vertex["kind"] = type(token).__name__

tokenIterator = token.GetIterator()
lastToken = next(tokenIterator)
lastVertex = Vertex(vertexID=id(lastToken), value=f"{lastToken!s}", subgraph=subgraph)
lastVertex["order"] = tokenID + 1
lastVertex["kind"] = type(lastToken).__name__

firstVertex.EdgeToVertex(subgraph._verticesWithID[id(firstToken.NextToken)], edgeID=f"n0_next")
tokenIterator = firstToken.GetIterator(inclusiveStopToken=False)
for tokenID, token in enumerate(tokenIterator, start=1):
vertex = subgraph._verticesWithID[id(token)]
vertex.EdgeToVertex(subgraph._verticesWithID[id(token.PreviousToken)], edgeID=f"n{tokenID}_prev")
vertex.EdgeToVertex(subgraph._verticesWithID[id(token.NextToken)], edgeID=f"n{tokenID}_next")
tokenIterator = token.GetIterator()
lastToken = next(tokenIterator)
lastVertex = subgraph._verticesWithID[id(lastToken)]
lastVertex.EdgeToVertex(subgraph._verticesWithID[id(lastToken.PreviousToken)], edgeID=f"n{tokenID + 1}_prev")

def WriteDocument(self, path):
graphMLDocument = GraphMLDocument("Streams")
graphMLDocument.FromGraph(self._graph)
graphMLDocument.WriteToFile(path)
86 changes: 11 additions & 75 deletions pyVHDLParser/CLI/Token.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,18 @@
# ==================================================================================================================== #
#
from pathlib import Path
from textwrap import dedent

from pyAttributes.ArgParseAttributes import CommandAttribute
from pyTooling.Graph import Graph, Vertex, Subgraph
from pyTooling.Graph.GraphML import GraphMLDocument

from ..Base import ParserException
from ..Token import StartOfDocumentToken, EndOfDocumentToken, CharacterToken, SpaceToken, WordToken, LinebreakToken, CommentToken, IndentationToken
from ..Token import CharacterTranslation, SingleLineCommentToken
from ..Token.Parser import Tokenizer
from pyVHDLParser.Base import ParserException
from pyVHDLParser.CLI.GraphML import GraphML
from pyVHDLParser.Token import StartOfDocumentToken, EndOfDocumentToken, CharacterToken, SpaceToken, WordToken, LinebreakToken, CommentToken, IndentationToken
from pyVHDLParser.Token import CharacterTranslation, SingleLineCommentToken
from pyVHDLParser.Token.Parser import Tokenizer

from . import FrontEndProtocol, FilenameAttribute, translate
from pyVHDLParser.CLI import FrontEndProtocol, FilenameAttribute, translate


class TokenStreamHandlers:
Expand Down Expand Up @@ -94,75 +96,9 @@ def HandleTokenize(self: FrontEndProtocol, args):
except NotImplementedError as ex:
print("{RED}NotImplementedError: {0!s}{NOCOLOR}".format(ex, **self.Foreground))

nodeFormat="t_{line}_{id}"
nodeID = 0
line = 0
node = nodeFormat.format(line=line, id=nodeID)
graphvizBuffer = dedent("""\
digraph TokenStream {{
graph [rankdir=LR splines=ortho]
node [shape=record];
{node} [style=filled, fillcolor=gold, label="{caption}|{{None|None|Next}}"];
""").format(
node=node,
caption=firstToken.__class__.__qualname__
)
lline = 0
sameRanked = [node]
lineStarts = [node]

tokenIterator = firstToken.GetIterator(inclusiveStopToken=False)
for token in tokenIterator:
nodeID += 1
nnode=nodeFormat.format(line=line, id=nodeID)
graphvizBuffer += dedent("""\
{lnode} -> {node};
{node} [style=filled, fillcolor={color}, label="{caption}|{{Prev|{content}|Next}}"];
""").format(
node=nnode,
lnode=node,
color=translate(token),
caption=token.__class__.__qualname__,
content=CharacterTranslation(str(token))
)
node = nnode
if len(sameRanked) == 0:
lineStarts.append(node)
sameRanked.append(node)

if isinstance(token, (LinebreakToken, SingleLineCommentToken)):
# graphvizBuffer += dedent("""\
#
# {{ rank=same {nodes} }}
#
# """).format(nodes=" ".join(sameRanked))

sameRanked = []
line += 1
else:
lline = line

tokenIterator = token.GetIterator()
lastToken = next(tokenIterator)

graphvizBuffer += dedent("""\
t_{lline}_{lid} -> t_{line}_00;
t_{line}_00 [style=filled, fillcolor=gold, label="{caption}|{{Prev|None|None}}"];
{{ rank=same {nodes} }}
}}
""").format(
line=line,
lline=lline,
lid=nodeID - 1,
caption=lastToken.__class__.__qualname__,
nodes=" ".join(lineStarts)
)

gvFile = file.with_suffix('.gv')
with gvFile.open('w') as fileHandle:
fileHandle.write(graphvizBuffer)
exporter = GraphML()
exporter.AddTokenStream(firstToken)
exporter.WriteDocument(Path.cwd() / "temp/TokenStream.graphml")

self.exit()

Expand Down
18 changes: 7 additions & 11 deletions pyVHDLParser/CLI/VHDLParser.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,8 @@ class Application(LineTerminal, ArgParseMixin, TokenStreamHandlers, BlockStreamH
# TODO: use pyTooling Platform
__PLATFORM = platform_system()

def __init__(self, debug=False, verbose=False, quiet=False, sphinx=False):
super().__init__(verbose, debug, quiet)
def __init__(self):
super().__init__()

# Late-initialize Block classes
# --------------------------------------------------------------------------
Expand Down Expand Up @@ -104,11 +104,6 @@ def __init__(self, *args, **kwargs):
add_help=False
)

# If executed in Sphinx to auto-document CLI arguments, exit now
# --------------------------------------------------------------------------
if sphinx:
return

# Change error and warning reporting
# --------------------------------------------------------------------------
self._LOG_MESSAGE_FORMAT__[Severity.Fatal] = "{DARK_RED}[FATAL] {message}{NOCOLOR}"
Expand Down Expand Up @@ -199,13 +194,14 @@ def main(): # mccabe:disable=MC0001
"""
from sys import argv as sys_argv

debug = "-d" in sys_argv
verbose = "-v" in sys_argv
quiet = "-q" in sys_argv

try:
# handover to a class instance
app = Application() # debug, verbose, quiet)
app.Configure(
verbose="-v" in sys_argv,
debug="-d" in sys_argv,
quiet="-q" in sys_argv
)
app.Run()
app.exit()

Expand Down

0 comments on commit 9f149a5

Please sign in to comment.