Skip to content

Commit ff32826

Browse files
committed
pybricksdev.compile: fix parsing of imports
This fixes incorrectly including imports that are commented out or in doc strings. There is a standard Python module for discovering imports so we don't have to come up with our own implementation. This also removes support for relative imports since Python doesn't support them outside of packages.
1 parent c719eb4 commit ff32826

File tree

2 files changed

+17
-56
lines changed

2 files changed

+17
-56
lines changed

demo/multidemo.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
from pybricks.hubs import ThisHub
22
from pybricks.parameters import Color
33
from pybricks.tools import wait
4-
from .module2 import nice_color
4+
from module2 import nice_color
55

66
hub = ThisHub()
77
hub.light.on(Color.RED)

pybricksdev/compile.py

Lines changed: 16 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,7 @@
44
import asyncio
55
import logging
66
import os
7-
import re
8-
9-
from pathlib import Path
7+
from modulefinder import ModuleFinder
108
from typing import List, Optional
119

1210
import mpy_cross_v5
@@ -114,61 +112,24 @@ async def compile_multi_file(path: str, abi: int):
114112
subprocess.CalledProcessError: if executing the ``mpy-cross` tool failed.
115113
"""
116114

117-
# Make the build directory
118-
make_build_dir()
115+
# compile files using Python to find imports contained within the same directory as path
116+
finder = ModuleFinder([os.path.dirname(path)])
117+
finder.run_script(path)
119118

120-
# Directory where main and dependencies are located
121-
source_dir = Path(path).parent
122-
123-
# Set of all dependencies
124-
dependencies = set()
125-
not_found = set()
126-
127-
# Find all dependencies recursively
128-
def find_dependencies(module_name):
129-
try:
130-
path = source_dir / Path(*module_name.split(".")).with_suffix(".py")
131-
with open(path) as source:
132-
# Search non-recursively through current module
133-
local_dependencies = set()
134-
for line in source:
135-
# from my_module import thing1, thing2 ---> my_module
136-
if result := re.search("from (.*) import (.*)", line):
137-
local_dependencies.add(result.group(1))
138-
# import my_module ---> my_module
139-
elif result := re.search("import (.*)", line):
140-
local_dependencies.add(result.group(1))
141-
142-
# Add each file that wasn't already done, and find its
143-
# dependencies.
144-
for dep in local_dependencies.difference(dependencies):
145-
if dep not in dependencies:
146-
dependencies.add(dep)
147-
find_dependencies(dep)
148-
# Some modules are stored on the hub so we can't find them here.
149-
except FileNotFoundError:
150-
not_found.add(module_name)
151-
152-
# Start searching from the top level.
153-
main_module = Path(path).stem
154-
find_dependencies(main_module)
155-
156-
# Subtract the (builtin or missing) modules we won't upload.
157-
dependencies = dependencies.difference(not_found)
158-
159-
# Get the total tuple of main programs and module
160-
modules = [main_module] + sorted(tuple(dependencies))
119+
# we expect missing modules, namely builtin MicroPython packages like pybricks.*
120+
logger.debug("missing modules: %r", finder.any_missing())
161121

162122
# Get a data blob with all scripts.
163-
blob = bytearray([])
164-
for module in modules:
165-
name = module.encode() + b"\x00"
166-
mpy = await compile_file(
167-
source_dir / Path(*module.split(".")).with_suffix(".py"), abi
168-
)
169-
size = len(mpy).to_bytes(4, "little")
170-
blob += size + name + mpy
171-
return blob
123+
parts: List[bytes] = []
124+
125+
for name, module in finder.modules.items():
126+
mpy = await compile_file(module.__file__, abi)
127+
128+
parts.append(len(mpy).to_bytes(4, "little"))
129+
parts.append(name.encode() + b"\x00")
130+
parts.append(mpy)
131+
132+
return b"".join(parts)
172133

173134

174135
def save_script(py_string):

0 commit comments

Comments
 (0)