Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@
"--outpath",
"./out/",
"--technology",
"mlsi",
"dropx",
"${file}"
],
"console": "integratedTerminal"
Expand Down
114 changes: 114 additions & 0 deletions lfr/api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
import os
from pathlib import Path
import sys
from typing import List

from antlr4 import CommonTokenStream, FileStream, ParseTreeWalker
from lfr import parameters
from lfr.antlrgen.lfr.lfrXLexer import lfrXLexer
from lfr.antlrgen.lfr.lfrXParser import lfrXParser
from lfr.moduleinstanceListener import ModuleInstanceListener
from lfr.netlistgenerator.generator import (
generate,
generate_dropx_library,
generate_mars_library,
generate_mlsi_library,
)
from lfr.postProcessListener import PostProcessListener

from lfr.preprocessor import PreProcessor
from lfr.utils import print_netlist, printgraph, serialize_netlist


def compile_lfr(
input_files: List[str],
outpath: str = "out/",
technology: str = "dropx",
library_path: str = "./library",
no_mapping_flag: bool = False,
no_gen_flag: bool = False,
no_annotations_flag: bool = False,
pre_load: List[str] = [],
):
pre_load_file_list = pre_load
print(pre_load_file_list)
# Utilize the prepreocessor to generate the input file
preprocessor = PreProcessor(input_files, pre_load_file_list)

if preprocessor.check_syntax_errors():
print("Stopping compiler because of syntax errors")
sys.exit(0)

preprocessor.process()

print("output dir:", outpath)
print(input_files)

rel_input_path = "pre_processor_dump.lfr"
input_path = Path(rel_input_path).resolve()

abspath = os.path.abspath(outpath)
parameters.OUTPUT_DIR = abspath

if os.path.isdir(abspath) is not True:
print("Creating the output directory:")
path = Path(parameters.OUTPUT_DIR)
path.mkdir(parents=True)

library = None
# library = libraries[library_name]

# Modifiy this to translate relative path to absolute path in the future
finput = FileStream(str(input_path))

lexer = lfrXLexer(finput)

stream = CommonTokenStream(lexer)

parser = lfrXParser(stream)

tree = parser.skeleton()

walker = ParseTreeWalker()

if no_annotations_flag is True:
mapping_listener = ModuleInstanceListener()
else:
mapping_listener = PostProcessListener()

walker.walk(mapping_listener, tree)

mapping_listener.print_stack()

mapping_listener.print_variables()

if mapping_listener.currentModule is not None:
interactiongraph = mapping_listener.currentModule.FIG
printgraph(interactiongraph, mapping_listener.currentModule.name + ".dot")

if no_gen_flag is True:
sys.exit(0)

# Check if the module compilation was successful
if mapping_listener.success:
# Now Process the Modules Generated
# V2 generator
if technology == "dropx":
library = generate_dropx_library()
elif technology == "mars":
library = generate_mars_library()
elif technology == "mlsi":
library = generate_mlsi_library()
else:
print("Implement Library for whatever else")
pass

if mapping_listener.currentModule is None:
raise ValueError()
if library is None:
raise ValueError()
unsized_devices = generate(mapping_listener.currentModule, library)

for unsized_device in unsized_devices:
print_netlist(unsized_device)
serialize_netlist(unsized_device)
117 changes: 21 additions & 96 deletions lfr/cmdline.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,11 @@
import argparse
import glob
import json
from lfr.postProcessListener import PostProcessListener
from lfr.api import compile_lfr
import os
import sys
from pathlib import Path

from antlr4 import CommonTokenStream, FileStream, ParseTreeWalker

import lfr.parameters as parameters
from lfr.moduleinstanceListener import ModuleInstanceListener
from lfr.antlrgen.lfr.lfrXLexer import lfrXLexer
from lfr.antlrgen.lfr.lfrXParser import lfrXParser
from lfr.netlistgenerator.mappinglibrary import MappingLibrary
from lfr.netlistgenerator.generator import (
generate,
generate_dropx_library,
generate_mars_library,
generate_mlsi_library,
)
from lfr.utils import print_netlist, printgraph, serialize_netlist
from lfr.preprocessor import PreProcessor
from art import tprint


Expand Down Expand Up @@ -84,86 +69,26 @@ def main():
)
args = parser.parse_args()

pre_load_file_list = args.pre_load
# Utilize the prepreocessor to generate the input file
preprocessor = PreProcessor(args.input, pre_load_file_list)

if preprocessor.check_syntax_errors():
print("Stopping compiler because of syntax errors")
sys.exit(0)

preprocessor.process()

print("output dir:", args.outpath)
print(args.input)

rel_input_path = "pre_processor_dump.lfr"
input_path = Path(rel_input_path).resolve()

abspath = os.path.abspath(args.outpath)
parameters.OUTPUT_DIR = abspath

if os.path.isdir(abspath) is not True:
print("Creating the output directory:")
path = Path(parameters.OUTPUT_DIR)
path.mkdir(parents=True)

library = None
# library = libraries[library_name]

# Modifiy this to translate relative path to absolute path in the future
finput = FileStream(str(input_path))

lexer = lfrXLexer(finput)

stream = CommonTokenStream(lexer)

parser = lfrXParser(stream)

tree = parser.skeleton()

walker = ParseTreeWalker()

if args.no_annotations is True:
mapping_listener = ModuleInstanceListener()
else:
mapping_listener = PostProcessListener()

walker.walk(mapping_listener, tree)

mapping_listener.print_stack()

mapping_listener.print_variables()

if mapping_listener.currentModule is not None:
interactiongraph = mapping_listener.currentModule.FIG
printgraph(interactiongraph, mapping_listener.currentModule.name + ".dot")

if args.no_gen is True:
sys.exit(0)

# Check if the module compilation was successful
if mapping_listener.success:
# Now Process the Modules Generated
# V2 generator
if args.technology == "dropx":
library = generate_dropx_library()
elif args.technology == "mars":
library = generate_mars_library()
elif args.technology == "mlsi":
library = generate_mlsi_library()
else:
print("Implement Library for whatever else")
pass

if mapping_listener.currentModule is None:
raise ValueError()
if library is None:
raise ValueError()
unsized_device = generate(mapping_listener.currentModule, library)

print_netlist(unsized_device)
serialize_netlist(unsized_device)
# Generate proxy variables for the parsed args
input_files = args.input
outpath = args.outpath
technology = args.technology
library_path = args.library
no_mapping_flag = args.no_mapping
no_gen_flag = args.no_gen
no_annotations_flag = args.no_annotations
pre_load = args.pre_load

compile_lfr(
input_files=input_files,
outpath=outpath,
technology=technology,
library_path=library_path,
no_mapping_flag=no_mapping_flag,
no_gen_flag=no_gen_flag,
no_annotations_flag=no_annotations_flag,
pre_load=pre_load,
)


if __name__ == "__main__":
Expand Down
18 changes: 15 additions & 3 deletions lfr/graphmatch/interface.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from lfr.fig.annotation import DistributeAnnotation
from lfr.graphmatch.figmappingmatcher import FIGMappingMatcher
from typing import Any, Dict, FrozenSet, List, Tuple
from typing import Any, Dict, FrozenSet, List, Optional, Tuple
from lfr.netlistgenerator.mappinglibrary import MappingLibrary
from lfr.fig.fluidinteractiongraph import FluidInteractionGraph
from lfr.graphmatch.matchpattern import MatchPattern
Expand All @@ -12,7 +12,7 @@ def bijective_match_node_constraints(
semantic_information: Dict[str, NodeFilter],
subgraph: Dict[str, str],
) -> bool:
# TODO - Check if the constraints match for the subgraph
# Check if the constraints match for the subgraph
# STEP 1 - generate new unique names for each node to simplify the matching
# algorihtm

Expand Down Expand Up @@ -177,7 +177,7 @@ def bijective_match_node_constraints(

def get_fig_matches(
fig: FluidInteractionGraph, library: MappingLibrary
) -> List[Tuple[str, Any]]:
) -> List[Tuple[str, Dict[str, str]]]:
patterns: Dict[
str, MatchPattern
] = dict() # Store the mint and the match pattern object here
Expand Down Expand Up @@ -269,3 +269,15 @@ def get_fig_matches(
continue

return ret


def generate_single_match(
fig_subgraph, library_entry
) -> Optional[Tuple[str, Dict[str, str]]]:

# TODO - using fig subgraph view test to see if the subgraph is a structural match
# to technology entry from the mapping library, pass back the match tuple if it is
# if it isn't then figure out how to do this separately. Also don't enable node
# filters for this step. Enabling them will cause the match to fail.

return ("test", {"test": "test"})
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from copy import copy
from typing import Dict, List, Optional, Set, Tuple

from networkx import nx
import networkx as nx
from networkx.algorithms import isomorphism
from networkx.classes.digraph import DiGraph
from pymint.mintcomponent import MINTComponent
Expand All @@ -20,7 +20,7 @@
)


class ConstructionGraph(nx.DiGraph):
class OLDConstructionGraph(nx.DiGraph):
"""Construction Graph is the proxy datastructure that we use for representing the
loose connections between the fluid interaction graph and the real hardware
design primitives that would be pieced together.
Expand Down
Empty file.
Loading