gemini-code-assist[bot] commented on code in PR #295:
URL: https://github.com/apache/tvm-ffi/pull/295#discussion_r2629201735


##########
python/tvm_ffi/stub/cli.py:
##########
@@ -55,79 +45,207 @@ def __main__() -> int:
     overview and examples of the block syntax.
     """
     opt = _parse_args()
+    for imp in opt.imports or []:
+        importlib.import_module(imp)
     dlls = [ctypes.CDLL(lib) for lib in opt.dlls]
     files: list[FileInfo] = collect_files([Path(f) for f in opt.files])
+    global_funcs: dict[str, list[FuncInfo]] = collect_global_funcs()
+    init_path: Path | None = None
+    if opt.files:
+        init_path = Path(opt.files[0]).resolve()
+        if init_path.is_file():
+            init_path = init_path.parent
 
-    # Stage 1: Process `tvm-ffi-stubgen(ty-map)`
+    # Stage 1: Collect information
+    # - type maps: `tvm-ffi-stubgen(ty-map)`
+    # - defined global functions: `tvm-ffi-stubgen(begin): global/...`
+    # - defined object types: `tvm-ffi-stubgen(begin): object/...`
     ty_map: dict[str, str] = C.TY_MAP_DEFAULTS.copy()
-
-    def _stage_1(file: FileInfo) -> None:
-        for code in file.code_blocks:
-            if code.kind == "ty-map":
-                try:
-                    lhs, rhs = code.param.split("->")
-                except ValueError as e:
-                    raise ValueError(
-                        f"Invalid ty_map format at line {code.lineno_start}. 
Example: `A.B -> C.D`"
-                    ) from e
-                ty_map[lhs.strip()] = rhs.strip()
-
     for file in files:
         try:
-            _stage_1(file)
+            _stage_1(file, ty_map)
         except Exception:
             print(
                 f'{C.TERM_RED}[Failed] File "{file.path}": 
{traceback.format_exc()}{C.TERM_RESET}'
             )
 
-    # Stage 2: Process
+    # Stage 2. Generate stubs if they are not defined on the file.
+    if opt.init:
+        assert init_path is not None, "init-path could not be determined"
+        _stage_2(
+            files,
+            ty_map,
+            init_cfg=opt.init,
+            init_path=init_path,
+            global_funcs=global_funcs,
+        )
+
+    # Stage 3: Process
     # - `tvm-ffi-stubgen(begin): global/...`
     # - `tvm-ffi-stubgen(begin): object/...`
-    global_funcs = collect_global_funcs()
-
-    def _stage_2(file: FileInfo) -> None:
-        all_defined = set()
+    for file in files:
         if opt.verbose:
             print(f"{C.TERM_CYAN}[File] {file.path}{C.TERM_RESET}")
-        ty_used: set[str] = set()
-        ty_on_file: set[str] = set()
-        fn_ty_map_fn = _fn_ty_map(ty_map, ty_used)
-        # Stage 2.1. Process `tvm-ffi-stubgen(begin): global/...`
-        for code in file.code_blocks:
-            if code.kind == "global":
-                funcs = global_funcs.get(code.param, [])
-                for func in funcs:
-                    all_defined.add(func.schema.name)
-                G.generate_global_funcs(code, funcs, fn_ty_map_fn, opt)
-        # Stage 2.2. Process `tvm-ffi-stubgen(begin): object/...`
-        for code in file.code_blocks:
-            if code.kind == "object":
-                type_key = code.param
-                ty_on_file.add(ty_map.get(type_key, type_key))
-                G.generate_object(code, fn_ty_map_fn, opt)
-        # Stage 2.3. Add imports for used types.
-        for code in file.code_blocks:
-            if code.kind == "import":
-                G.generate_imports(code, ty_used - ty_on_file, opt)
-                break  # Only one import block per file is supported for now.
-        # Stage 2.4. Add `__all__` for defined classes and functions.
-        for code in file.code_blocks:
-            if code.kind == "__all__":
-                G.generate_all(code, all_defined | ty_on_file, opt)
-                break  # Only one __all__ block per file is supported for now.
-        file.update(show_diff=opt.verbose, dry_run=opt.dry_run)
-
-    for file in files:
         try:
-            _stage_2(file)
-        except:
+            _stage_3(file, opt, ty_map, global_funcs)
+        except Exception:
             print(
                 f'{C.TERM_RED}[Failed] File "{file.path}": 
{traceback.format_exc()}{C.TERM_RESET}'
             )
     del dlls
     return 0
 
 
+def _stage_1(
+    file: FileInfo,
+    ty_map: dict[str, str],
+) -> None:
+    for code in file.code_blocks:
+        if code.kind == "ty-map":
+            try:
+                assert isinstance(code.param, str)
+                lhs, rhs = code.param.split("->")
+            except ValueError as e:
+                raise ValueError(
+                    f"Invalid ty_map format at line {code.lineno_start}. 
Example: `A.B -> C.D`"
+                ) from e
+            ty_map[lhs.strip()] = rhs.strip()
+
+
+def _stage_2(
+    files: list[FileInfo],
+    ty_map: dict[str, str],
+    init_cfg: InitConfig,
+    init_path: Path,
+    global_funcs: dict[str, list[FuncInfo]],
+) -> None:
+    def _find_or_insert_file(path: Path) -> FileInfo:
+        ret: FileInfo | None
+        if not path.exists():
+            ret = FileInfo(path=path, lines=(), code_blocks=[])
+        else:
+            for file in files:
+                if path.samefile(file.path):
+                    return file
+            ret = FileInfo.from_file(file=path, include_empty=True)
+            assert ret is not None, f"Failed to read file: {path}"
+        files.append(ret)
+        return ret
+
+    # Step 0. Find out functions and classes already defined on files.
+    defined_func_prefixes: set[str] = {  # type: ignore[union-attr]
+        code.param[0] for file in files for code in file.code_blocks if 
code.kind == "global"
+    }
+    defined_objs: set[str] = {  # type: ignore[assignment]
+        code.param for file in files for code in file.code_blocks if code.kind 
== "object"
+    } | C.BUILTIN_TYPE_KEYS
+
+    # Step 0. Generate missing `_ffi_api.py` and `__init__.py` under each 
prefix.
+    prefixes: dict[str, list[str]] = collect_type_keys()
+    for prefix in global_funcs:
+        prefixes.setdefault(prefix, [])
+
+    root_ffi_api_py = init_path / init_cfg.prefix.rstrip(".") / "_ffi_api.py"
+    for prefix, obj_names in prefixes.items():
+        # TODO(@junrushao): control the prefix to generate stubs for
+        if prefix.startswith("testing") or prefix.startswith("ffi"):
+            continue
+        funcs = sorted(
+            [] if prefix in defined_func_prefixes else 
global_funcs.get(prefix, []),
+            key=lambda f: f.schema.name,
+        )
+        objs = sorted(set(obj_names) - defined_objs)
+        object_infos = collect_object_infos(objs)
+        if not funcs and not object_infos:
+            continue
+        # Step 1. Create target directory if not exists
+        directory = init_path / prefix.replace(".", "/")
+        directory.mkdir(parents=True, exist_ok=True)
+        # Step 2. Generate `_ffi_api.py`
+        target_path = directory / "_ffi_api.py"
+        target_file = _find_or_insert_file(target_path)
+        with target_path.open("a", encoding="utf-8") as f:
+            f.write(
+                G.generate_ffi_api(
+                    target_file.code_blocks,
+                    ty_map,
+                    prefix,
+                    object_infos,
+                    init_cfg,
+                    is_root=root_ffi_api_py.samefile(target_path),
+                )
+            )
+        target_file.reload()
+        # Step 3. Generate `__init__.py`
+        target_path = directory / "__init__.py"
+        target_file = _find_or_insert_file(target_path)
+        with target_path.open("a", encoding="utf-8") as f:
+            f.write(G.generate_init(target_file.code_blocks, prefix, 
submodule="_ffi_api"))
+        target_file.reload()
+
+
+def _stage_3(  # noqa: PLR0912
+    file: FileInfo,
+    opt: Options,
+    ty_map: dict[str, str],
+    global_funcs: dict[str, list[FuncInfo]],
+) -> None:
+    defined_funcs: set[str] = set()
+    defined_types: set[str] = set()
+    imports: list[ImportItem] = []
+    ffi_load_lib_imported = False
+    # Stage 1. Collect `tvm-ffi-stubgen(import-object): ...`
+    for code in file.code_blocks:
+        if code.kind == "import-object":
+            name, type_checking_only, alias = code.param  # type: ignore[misc]
+            imports.append(
+                ImportItem(
+                    name,
+                    type_checking_only=(type_checking_only != "False" and 
bool(type_checking_only)),
+                    alias=alias if alias else None,
+                )
+            )

Review Comment:
   ![medium](https://www.gstatic.com/codereviewagent/medium-priority.svg)
   
   The logic for parsing the `type_checking_only` boolean from a string is a 
bit subtle. It relies on `bool('False')` being `True` and the `!= 'False'` 
check to get the correct behavior, which may not be immediately obvious to 
readers.
   
   For better clarity and to make the intent more explicit, I suggest using a 
direct, case-insensitive comparison to `'true'`, which is easier to understand 
at a glance.
   
   ```suggestion
               imports.append(
                   ImportItem(
                       name,
                       type_checking_only=type_checking_only.lower() == "true",
                       alias=alias if alias else None,
                   )
               )
   ```



##########
python/tvm_ffi/stub/lib_state.py:
##########
@@ -0,0 +1,115 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+"""Stateful helpers for querying TVM FFI runtime metadata."""
+
+from __future__ import annotations
+
+import functools
+from collections import defaultdict
+
+from tvm_ffi._ffi_api import GetRegisteredTypeKeys
+from tvm_ffi.core import TypeSchema, 
_lookup_or_register_type_info_from_type_key
+from tvm_ffi.registry import get_global_func_metadata, list_global_func_names
+
+from . import consts as C
+from .utils import FuncInfo, NamedTypeSchema, ObjectInfo
+
+
+def func_info_from_global_name(name: str) -> FuncInfo:
+    """Construct a `FuncInfo` from a global function name."""
+    return FuncInfo(
+        schema=NamedTypeSchema(
+            name=name,
+            
schema=TypeSchema.from_json_str(get_global_func_metadata(name)["type_schema"]),
+        ),
+        is_member=False,
+    )
+
+
[email protected]_cache(maxsize=None)
+def object_info_from_type_key(type_key: str) -> ObjectInfo:
+    """Construct an `ObjectInfo` from an object type key."""
+    type_info = _lookup_or_register_type_info_from_type_key(str(type_key))
+    assert type_info.type_key == type_key
+    return ObjectInfo.from_type_info(type_info)
+
+
+def collect_global_funcs() -> dict[str, list[FuncInfo]]:
+    """Collect global functions from TVM FFI's global registry."""
+    global_funcs: dict[str, list[FuncInfo]] = {}
+    for name in list_global_func_names():
+        try:
+            prefix, _ = name.rsplit(".", 1)
+        except ValueError:
+            print(f"{C.TERM_YELLOW}[Skipped] Invalid name in global function: 
{name}{C.TERM_RESET}")
+        else:
+            try:
+                global_funcs.setdefault(prefix, 
[]).append(func_info_from_global_name(name))
+            except Exception:
+                print(f"{C.TERM_YELLOW}[Skipped] Function has no type schema: 
{name}{C.TERM_RESET}")
+    for k in list(global_funcs.keys()):
+        global_funcs[k].sort(key=lambda x: x.schema.name)
+    return global_funcs
+
+
+def collect_type_keys() -> dict[str, list[str]]:
+    """Collect registered object type keys from TVM FFI's global registry."""
+    global_objects: dict[str, list[str]] = {}
+    for type_key in GetRegisteredTypeKeys():
+        try:
+            prefix, _ = type_key.rsplit(".", 1)
+        except ValueError:
+            pass
+        else:
+            global_objects.setdefault(prefix, []).append(type_key)
+    for k in list(global_objects.keys()):
+        global_objects[k].sort()
+    return global_objects
+
+
+def collect_object_infos(type_keys: list[str]) -> list[ObjectInfo]:
+    """Collect ObjectInfo objects for type keys, topologically sorted by 
inheritance."""
+    # Remove duplicates while preserving order.
+    unique_type_keys = list(dict.fromkeys(type_keys))
+    infos: dict[str, ObjectInfo] = {
+        type_key: object_info_from_type_key(type_key) for type_key in 
unique_type_keys
+    }
+
+    child_types: dict[str, list[str]] = defaultdict(list)
+    in_degree: dict[str, int] = defaultdict(int)
+    for type_key, info in infos.items():
+        parent_type_key = info.parent_type_key
+        if parent_type_key in infos:
+            child_types[parent_type_key].append(type_key)
+            in_degree[type_key] += 1
+            in_degree[parent_type_key] += 0
+        else:
+            in_degree[type_key] += 0
+
+    queue: list[str] = sorted([ty for ty, deg in in_degree.items() if deg == 
0])
+    sorted_keys: list[str] = []
+    while queue:
+        type_key = queue.pop(0)
+        sorted_keys.append(type_key)
+        for child_type_key in sorted(child_types[type_key]):
+            in_degree[child_type_key] -= 1
+            if in_degree[child_type_key] == 0:
+                queue.append(child_type_key)
+                queue.sort()

Review Comment:
   ![medium](https://www.gstatic.com/codereviewagent/medium-priority.svg)
   
   In the topological sort implementation, `queue.sort()` is called within the 
loop every time an element is added. For a large number of types, repeatedly 
sorting the entire list can be inefficient (O(n log n) for each sort).
   
   To improve performance, I recommend using the `heapq` module (a min-heap) 
for the queue. This will reduce the complexity of adding items to the queue 
from O(n log n) to O(log n), making the overall topological sort more efficient.
   
   You'll need to add `import heapq` at the top of the file.
   
   ```suggestion
       queue: list[str] = [ty for ty, deg in in_degree.items() if deg == 0]
       heapq.heapify(queue)
       sorted_keys: list[str] = []
       while queue:
           type_key = heapq.heappop(queue)
           sorted_keys.append(type_key)
           for child_type_key in sorted(child_types[type_key]):
               in_degree[child_type_key] -= 1
               if in_degree[child_type_key] == 0:
                   heapq.heappush(queue, child_type_key)
   ```



##########
python/tvm_ffi/stub/cli.py:
##########
@@ -55,79 +45,207 @@ def __main__() -> int:
     overview and examples of the block syntax.
     """
     opt = _parse_args()
+    for imp in opt.imports or []:
+        importlib.import_module(imp)
     dlls = [ctypes.CDLL(lib) for lib in opt.dlls]
     files: list[FileInfo] = collect_files([Path(f) for f in opt.files])
+    global_funcs: dict[str, list[FuncInfo]] = collect_global_funcs()
+    init_path: Path | None = None
+    if opt.files:
+        init_path = Path(opt.files[0]).resolve()
+        if init_path.is_file():
+            init_path = init_path.parent
 
-    # Stage 1: Process `tvm-ffi-stubgen(ty-map)`
+    # Stage 1: Collect information
+    # - type maps: `tvm-ffi-stubgen(ty-map)`
+    # - defined global functions: `tvm-ffi-stubgen(begin): global/...`
+    # - defined object types: `tvm-ffi-stubgen(begin): object/...`
     ty_map: dict[str, str] = C.TY_MAP_DEFAULTS.copy()
-
-    def _stage_1(file: FileInfo) -> None:
-        for code in file.code_blocks:
-            if code.kind == "ty-map":
-                try:
-                    lhs, rhs = code.param.split("->")
-                except ValueError as e:
-                    raise ValueError(
-                        f"Invalid ty_map format at line {code.lineno_start}. 
Example: `A.B -> C.D`"
-                    ) from e
-                ty_map[lhs.strip()] = rhs.strip()
-
     for file in files:
         try:
-            _stage_1(file)
+            _stage_1(file, ty_map)
         except Exception:
             print(
                 f'{C.TERM_RED}[Failed] File "{file.path}": 
{traceback.format_exc()}{C.TERM_RESET}'
             )
 
-    # Stage 2: Process
+    # Stage 2. Generate stubs if they are not defined on the file.
+    if opt.init:
+        assert init_path is not None, "init-path could not be determined"
+        _stage_2(
+            files,
+            ty_map,
+            init_cfg=opt.init,
+            init_path=init_path,
+            global_funcs=global_funcs,
+        )
+
+    # Stage 3: Process
     # - `tvm-ffi-stubgen(begin): global/...`
     # - `tvm-ffi-stubgen(begin): object/...`
-    global_funcs = collect_global_funcs()
-
-    def _stage_2(file: FileInfo) -> None:
-        all_defined = set()
+    for file in files:
         if opt.verbose:
             print(f"{C.TERM_CYAN}[File] {file.path}{C.TERM_RESET}")
-        ty_used: set[str] = set()
-        ty_on_file: set[str] = set()
-        fn_ty_map_fn = _fn_ty_map(ty_map, ty_used)
-        # Stage 2.1. Process `tvm-ffi-stubgen(begin): global/...`
-        for code in file.code_blocks:
-            if code.kind == "global":
-                funcs = global_funcs.get(code.param, [])
-                for func in funcs:
-                    all_defined.add(func.schema.name)
-                G.generate_global_funcs(code, funcs, fn_ty_map_fn, opt)
-        # Stage 2.2. Process `tvm-ffi-stubgen(begin): object/...`
-        for code in file.code_blocks:
-            if code.kind == "object":
-                type_key = code.param
-                ty_on_file.add(ty_map.get(type_key, type_key))
-                G.generate_object(code, fn_ty_map_fn, opt)
-        # Stage 2.3. Add imports for used types.
-        for code in file.code_blocks:
-            if code.kind == "import":
-                G.generate_imports(code, ty_used - ty_on_file, opt)
-                break  # Only one import block per file is supported for now.
-        # Stage 2.4. Add `__all__` for defined classes and functions.
-        for code in file.code_blocks:
-            if code.kind == "__all__":
-                G.generate_all(code, all_defined | ty_on_file, opt)
-                break  # Only one __all__ block per file is supported for now.
-        file.update(show_diff=opt.verbose, dry_run=opt.dry_run)
-
-    for file in files:
         try:
-            _stage_2(file)
-        except:
+            _stage_3(file, opt, ty_map, global_funcs)
+        except Exception:
             print(
                 f'{C.TERM_RED}[Failed] File "{file.path}": 
{traceback.format_exc()}{C.TERM_RESET}'
             )
     del dlls
     return 0
 
 
+def _stage_1(
+    file: FileInfo,
+    ty_map: dict[str, str],
+) -> None:
+    for code in file.code_blocks:
+        if code.kind == "ty-map":
+            try:
+                assert isinstance(code.param, str)
+                lhs, rhs = code.param.split("->")
+            except ValueError as e:
+                raise ValueError(
+                    f"Invalid ty_map format at line {code.lineno_start}. 
Example: `A.B -> C.D`"
+                ) from e
+            ty_map[lhs.strip()] = rhs.strip()
+
+
+def _stage_2(
+    files: list[FileInfo],
+    ty_map: dict[str, str],
+    init_cfg: InitConfig,
+    init_path: Path,
+    global_funcs: dict[str, list[FuncInfo]],
+) -> None:
+    def _find_or_insert_file(path: Path) -> FileInfo:
+        ret: FileInfo | None
+        if not path.exists():
+            ret = FileInfo(path=path, lines=(), code_blocks=[])
+        else:
+            for file in files:
+                if path.samefile(file.path):
+                    return file
+            ret = FileInfo.from_file(file=path, include_empty=True)
+            assert ret is not None, f"Failed to read file: {path}"
+        files.append(ret)
+        return ret
+
+    # Step 0. Find out functions and classes already defined on files.
+    defined_func_prefixes: set[str] = {  # type: ignore[union-attr]
+        code.param[0] for file in files for code in file.code_blocks if 
code.kind == "global"
+    }
+    defined_objs: set[str] = {  # type: ignore[assignment]
+        code.param for file in files for code in file.code_blocks if code.kind 
== "object"
+    } | C.BUILTIN_TYPE_KEYS
+
+    # Step 0. Generate missing `_ffi_api.py` and `__init__.py` under each 
prefix.
+    prefixes: dict[str, list[str]] = collect_type_keys()
+    for prefix in global_funcs:
+        prefixes.setdefault(prefix, [])
+
+    root_ffi_api_py = init_path / init_cfg.prefix.rstrip(".") / "_ffi_api.py"
+    for prefix, obj_names in prefixes.items():
+        # TODO(@junrushao): control the prefix to generate stubs for
+        if prefix.startswith("testing") or prefix.startswith("ffi"):
+            continue
+        funcs = sorted(
+            [] if prefix in defined_func_prefixes else 
global_funcs.get(prefix, []),
+            key=lambda f: f.schema.name,
+        )
+        objs = sorted(set(obj_names) - defined_objs)
+        object_infos = collect_object_infos(objs)
+        if not funcs and not object_infos:
+            continue
+        # Step 1. Create target directory if not exists
+        directory = init_path / prefix.replace(".", "/")
+        directory.mkdir(parents=True, exist_ok=True)
+        # Step 2. Generate `_ffi_api.py`
+        target_path = directory / "_ffi_api.py"
+        target_file = _find_or_insert_file(target_path)
+        with target_path.open("a", encoding="utf-8") as f:
+            f.write(
+                G.generate_ffi_api(
+                    target_file.code_blocks,
+                    ty_map,
+                    prefix,
+                    object_infos,
+                    init_cfg,
+                    is_root=root_ffi_api_py.samefile(target_path),
+                )
+            )
+        target_file.reload()
+        # Step 3. Generate `__init__.py`
+        target_path = directory / "__init__.py"
+        target_file = _find_or_insert_file(target_path)
+        with target_path.open("a", encoding="utf-8") as f:
+            f.write(G.generate_init(target_file.code_blocks, prefix, 
submodule="_ffi_api"))
+        target_file.reload()
+
+
+def _stage_3(  # noqa: PLR0912
+    file: FileInfo,
+    opt: Options,
+    ty_map: dict[str, str],
+    global_funcs: dict[str, list[FuncInfo]],
+) -> None:
+    defined_funcs: set[str] = set()
+    defined_types: set[str] = set()
+    imports: list[ImportItem] = []
+    ffi_load_lib_imported = False
+    # Stage 1. Collect `tvm-ffi-stubgen(import-object): ...`
+    for code in file.code_blocks:
+        if code.kind == "import-object":
+            name, type_checking_only, alias = code.param  # type: ignore[misc]
+            imports.append(
+                ImportItem(
+                    name,
+                    type_checking_only=(type_checking_only != "False" and 
bool(type_checking_only)),
+                    alias=alias if alias else None,
+                )
+            )
+            if (alias and alias == "_FFI_LOAD_LIB") or 
name.endswith("libinfo.load_lib_module"):
+                ffi_load_lib_imported = True
+    # Stage 2. Process `tvm-ffi-stubgen(begin): global/...`
+    for code in file.code_blocks:
+        if code.kind == "global":
+            funcs = global_funcs.get(code.param[0], [])
+            for func in funcs:
+                defined_funcs.add(func.schema.name)
+            G.generate_global_funcs(code, funcs, ty_map, imports, opt)
+    # Stage 3. Process `tvm-ffi-stubgen(begin): object/...`
+    for code in file.code_blocks:
+        if code.kind == "object":
+            type_key = code.param
+            assert isinstance(type_key, str)
+            obj_info = object_info_from_type_key(type_key)
+            type_key = ty_map.get(type_key, type_key)
+            full_name = ImportItem(type_key).full_name
+            defined_types.add(full_name)
+            G.generate_object(code, ty_map, imports, opt, obj_info)
+    # Stage 4. Add imports for used types.
+    imports = [i for i in imports if i.full_name not in defined_types]
+    for code in file.code_blocks:
+        if code.kind == "import-section":
+            G.generate_import_section(code, imports, opt)
+            break  # Only one import block per file is supported for now.
+    # Stage 5. Add `__all__` for defined classes and functions.
+    for code in file.code_blocks:
+        if code.kind == "__all__":
+            export_names = defined_funcs | defined_types
+            if ffi_load_lib_imported:
+                export_names = export_names | {"LIB"}
+            G.generate_all(code, export_names, opt)
+            break  # Only one __all__ block per file is supported for now.
+    # Stage 6. Process `tvm-ffi-stubgen(begin): export/...`
+    for code in file.code_blocks:
+        if code.kind == "export":
+            G.generate_export(code)
+    # Finalize: write back to file
+    file.update(verbose=opt.verbose, dry_run=opt.dry_run)
+

Review Comment:
   ![medium](https://www.gstatic.com/codereviewagent/medium-priority.svg)
   
   The `_stage_3` function is quite long and complex, as indicated by the `# 
noqa: PLR0912` for too many branches. It handles several distinct processing 
steps for a file.
   
   To improve maintainability and readability, consider refactoring this 
function by breaking it down into smaller, more focused helper functions. Each 
of the logical stages commented in the code (e.g., collecting imports, 
processing global blocks, processing object blocks) could become its own 
function. This would make the code easier to understand, test, and modify in 
the future.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to