llvmbot wrote:
<!--LLVM PR SUMMARY COMMENT--> @llvm/pr-subscribers-lldb Author: John Harrison (ashgti) <details> <summary>Changes</summary> …sistency. (#<!-- -->143818)" This reverts commit 362b9d78b4ee9107da2b5e90b3764b0f0fa610fe. Buildbots using python3.10 are running into errors from this change. --- Patch is 65.43 KiB, truncated to 20.00 KiB below, full version: https://github.com/llvm/llvm-project/pull/144616.diff 7 Files Affected: - (modified) lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py (+348-527) - (modified) lldb/packages/Python/lldbsuite/test/tools/lldb-dap/lldbdap_testcase.py (+33-46) - (modified) lldb/test/API/tools/lldb-dap/breakpoint/TestDAP_setBreakpoints.py (+2-3) - (modified) lldb/test/API/tools/lldb-dap/cancel/TestDAP_cancel.py (+5-5) - (modified) lldb/test/API/tools/lldb-dap/launch/TestDAP_launch.py (+6-6) - (modified) lldb/test/API/tools/lldb-dap/module/TestDAP_module.py (+1-1) - (modified) lldb/test/API/tools/lldb-dap/output/TestDAP_output.py (+2-2) ``````````diff diff --git a/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py b/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py index 23178a215206e..6d32491eaa5e9 100644 --- a/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py +++ b/lldb/packages/Python/lldbsuite/test/tools/lldb-dap/dap_server.py @@ -10,124 +10,17 @@ import subprocess import signal import sys -from dataclasses import dataclass import threading import time -from typing import ( - IO, - Any, - Callable, - Dict, - List, - Optional, - Tuple, - TypeVar, - Generic, - TypedDict, - Union, - BinaryIO, - TextIO, - Literal, - cast, -) +from typing import Any, Optional, Union, BinaryIO, TextIO ## DAP type references - -T = TypeVar("T") -Te = TypeVar("Te") # Generic type for event body -Ta = TypeVar("Ta") # Generic type for request arguments -Tb = TypeVar("Tb") # Generic type for response body - - -class Event(Generic[Te], TypedDict): - type: Literal["event"] - seq: int - event: str - body: Optional[Te] - - -class Request(Generic[Ta], TypedDict, total=False): - type: Literal["request"] - seq: int - command: str - arguments: Ta - - -class Response(Generic[Tb], TypedDict): - type: Literal["response"] - seq: int - request_seq: int - success: bool - command: str - message: Optional[str] - body: Optional[Tb] - - +Event = dict[str, Any] +Request = dict[str, Any] +Response = dict[str, Any] ProtocolMessage = Union[Event, Request, Response] -class AttachOrLaunchArguments(TypedDict, total=False): - stopOnEntry: bool - disableASLR: bool - disableSTDIO: bool - enableAutoVariableSummaries: bool - displayExtendedBacktrace: bool - enableSyntheticChildDebugging: bool - initCommands: List[str] - preRunCommands: List[str] - postRunCommands: List[str] - stopCommands: List[str] - exitCommands: List[str] - terminateCommands: List[str] - sourceMap: Union[List[Tuple[str, str]], Dict[str, str]] - sourcePath: str - debuggerRoot: str - commandEscapePrefix: str - customFrameFormat: str - customThreadFormat: str - - -class LaunchArguments(AttachOrLaunchArguments, total=False): - program: str - args: List[str] - cwd: str - env: Dict[str, str] - shellExpandArguments: bool - runInTerminal: bool - launchCommands: List[str] - - -# Using the function form of TypedDict to allow for hyphenated keys. -AttachGdbServer = TypedDict( - "AttachGdbServer", {"gdb-remote-port": int, "gdb-remote-hostname": str}, total=False -) - - -class AttachArguments(AttachGdbServer, AttachOrLaunchArguments, total=False): - program: str - pid: int - waitFor: bool - attachCommands: List[str] - coreFile: str - - -class BreakpointData(TypedDict, total=False): - column: int - condition: str - hitCondition: str - logMessage: str - mode: str - - -class SourceBreakpoint(BreakpointData): - line: int - - -class Breakpoint(TypedDict, total=False): - id: int - verified: bool - - def dump_memory(base_addr, data, num_per_line, outfile): data_len = len(data) hex_string = binascii.hexlify(data) @@ -165,9 +58,7 @@ def dump_memory(base_addr, data, num_per_line, outfile): outfile.write("\n") -def read_packet( - f: IO[bytes], trace_file: Optional[IO[str]] = None -) -> Optional[ProtocolMessage]: +def read_packet(f, verbose=False, trace_file=None): """Decode a JSON packet that starts with the content length and is followed by the JSON bytes from a file 'f'. Returns None on EOF. """ @@ -179,20 +70,32 @@ def read_packet( prefix = "Content-Length: " if line.startswith(prefix): # Decode length of JSON bytes + if verbose: + print('content: "%s"' % (line)) length = int(line[len(prefix) :]) + if verbose: + print('length: "%u"' % (length)) # Skip empty line - line = f.readline().decode() + line = f.readline() + if verbose: + print('empty: "%s"' % (line)) # Read JSON bytes json_str = f.read(length) + if verbose: + print('json: "%s"' % (json_str)) if trace_file: - trace_file.write(f"from adapter:\n{json_str!r}\n") + trace_file.write("from adapter:\n%s\n" % (json_str)) # Decode the JSON bytes into a python dictionary return json.loads(json_str) raise Exception("unexpected malformed message from lldb-dap: " + line) -def dump_dap_log(log_file: Optional[str]) -> None: +def packet_type_is(packet, packet_type): + return "type" in packet and packet["type"] == packet_type + + +def dump_dap_log(log_file): print("========= DEBUG ADAPTER PROTOCOL LOGS =========", file=sys.stderr) if log_file is None: print("no log file available", file=sys.stderr) @@ -202,30 +105,34 @@ def dump_dap_log(log_file: Optional[str]) -> None: print("========= END =========", file=sys.stderr) -@dataclass -class Source: - path: Optional[str] - source_reference: Optional[int] - - @property - def name(self) -> Optional[str]: - if not self.path: - return None - return os.path.basename(self.path) - +class Source(object): def __init__( self, path: Optional[str] = None, source_reference: Optional[int] = None ): - if path is None and source_reference is None: + self._name = None + self._path = None + self._source_reference = None + + if path is not None: + self._name = os.path.basename(path) + self._path = path + elif source_reference is not None: + self._source_reference = source_reference + else: raise ValueError("Either path or source_reference must be provided") - self.path = path - self.source_reference = source_reference + def __str__(self): + return f"Source(name={self.name}, path={self.path}), source_reference={self.source_reference})" - def to_DAP(self) -> dict: - if self.path: - return {"path": self.path, "name": self.name} - return {"sourceReference": self.source_reference} + def as_dict(self): + source_dict = {} + if self._name is not None: + source_dict["name"] = self._name + if self._path is not None: + source_dict["path"] = self._path + if self._source_reference is not None: + source_dict["sourceReference"] = self._source_reference + return source_dict class NotSupportedError(KeyError): @@ -237,7 +144,7 @@ def __init__( self, recv: BinaryIO, send: BinaryIO, - init_commands: List[str], + init_commands: list[str], log_file: Optional[TextIO] = None, ): # For debugging test failures, try setting `trace_file = sys.stderr`. @@ -245,50 +152,35 @@ def __init__( self.log_file = log_file self.send = send self.recv = recv - # Packets that have been received and processed but have not yet been - # requested by a test case. - self._pending_packets: List[Optional[ProtocolMessage]] = [] - # Received packets that have not yet been processed. - self._recv_packets: List[Optional[ProtocolMessage]] = [] - # Used as a mutex for _recv_packets and for notify when _recv_packets - # changes. - self._recv_condition = threading.Condition() - self._recv_thread = threading.Thread(target=self._read_packet_thread) - - # session state - self.init_commands = init_commands + self.recv_packets: list[Optional[ProtocolMessage]] = [] + self.recv_condition = threading.Condition() + self.recv_thread = threading.Thread(target=self._read_packet_thread) + self.process_event_body = None self.exit_status: Optional[int] = None - self.capabilities: Optional[Dict] = None - self.initialized: bool = False - self.configuration_done_sent: bool = False - self.process_event_body: Optional[Dict] = None - self.terminated: bool = False - self.events: List[Event] = [] - self.progress_events: List[Event] = [] - self.reverse_requests: List[Request] = [] - self.module_events: List[Dict] = [] - self.sequence: int = 1 - self.output: Dict[str, str] = {} - - # debuggee state - self.threads: Optional[dict] = None - self.thread_stop_reasons: Dict[str, Any] = {} - self.frame_scopes: Dict[str, Any] = {} - # keyed by breakpoint id - self.resolved_breakpoints: Dict[str, bool] = {} - - # trigger enqueue thread - self._recv_thread.start() + self.capabilities: dict[str, Any] = {} + self.progress_events: list[Event] = [] + self.reverse_requests = [] + self.sequence = 1 + self.threads = None + self.thread_stop_reasons = {} + self.recv_thread.start() + self.output_condition = threading.Condition() + self.output: dict[str, list[str]] = {} + self.configuration_done_sent = False + self.initialized = False + self.frame_scopes = {} + self.init_commands = init_commands + self.resolved_breakpoints = {} @classmethod def encode_content(cls, s: str) -> bytes: return ("Content-Length: %u\r\n\r\n%s" % (len(s), s)).encode("utf-8") @classmethod - def validate_response(cls, request: Request, response: Response) -> None: - if request["command"] != response["command"]: + def validate_response(cls, command, response): + if command["command"] != response["command"]: raise ValueError("command mismatch in response") - if request["seq"] != response["request_seq"]: + if command["seq"] != response["request_seq"]: raise ValueError("seq mismatch in response") def _read_packet_thread(self): @@ -297,323 +189,262 @@ def _read_packet_thread(self): while not done: packet = read_packet(self.recv, trace_file=self.trace_file) # `packet` will be `None` on EOF. We want to pass it down to - # handle_recv_packet anyway so the main thread can handle - # unexpected termination of lldb-dap and stop waiting for new - # packets. + # handle_recv_packet anyway so the main thread can handle unexpected + # termination of lldb-dap and stop waiting for new packets. done = not self._handle_recv_packet(packet) finally: dump_dap_log(self.log_file) - def _handle_recv_packet(self, packet: Optional[ProtocolMessage]) -> bool: - """Handles an incoming packet. + def get_modules(self): + module_list = self.request_modules()["body"]["modules"] + modules = {} + for module in module_list: + modules[module["name"]] = module + return modules - Called by the read thread that is waiting for all incoming packets - to store the incoming packet in "self._recv_packets" in a thread safe - way. This function will then signal the "self._recv_condition" to - indicate a new packet is available. + def get_output(self, category, timeout=0.0, clear=True): + self.output_condition.acquire() + output = None + if category in self.output: + output = self.output[category] + if clear: + del self.output[category] + elif timeout != 0.0: + self.output_condition.wait(timeout) + if category in self.output: + output = self.output[category] + if clear: + del self.output[category] + self.output_condition.release() + return output - Args: - packet: A new packet to store. + def collect_output(self, category, timeout_secs, pattern, clear=True): + end_time = time.time() + timeout_secs + collected_output = "" + while end_time > time.time(): + output = self.get_output(category, timeout=0.25, clear=clear) + if output: + collected_output += output + if pattern is not None and pattern in output: + break + return collected_output if collected_output else None + + def _enqueue_recv_packet(self, packet: Optional[ProtocolMessage]): + self.recv_condition.acquire() + self.recv_packets.append(packet) + self.recv_condition.notify() + self.recv_condition.release() - Returns: - True if the caller should keep calling this function for more - packets. - """ - with self._recv_condition: - self._recv_packets.append(packet) - self._recv_condition.notify() - # packet is None on EOF - return packet is not None and not ( - packet["type"] == "response" and packet["command"] == "disconnect" - ) - - def _recv_packet( - self, - *, - predicate: Optional[Callable[[ProtocolMessage], bool]] = None, - timeout: Optional[float] = None, - ) -> Optional[ProtocolMessage]: - """Processes received packets from the adapter. - - Updates the DebugCommunication stateful properties based on the received - packets in the order they are received. - - NOTE: The only time the session state properties should be updated is - during this call to ensure consistency during tests. - - Args: - predicate: - Optional, if specified, returns the first packet that matches - the given predicate. - timeout: - Optional, if specified, processes packets until either the - timeout occurs or the predicate matches a packet, whichever - occurs first. - - Returns: - The first matching packet for the given predicate, if specified, - otherwise None. + def _handle_recv_packet(self, packet: Optional[ProtocolMessage]) -> bool: + """Called by the read thread that is waiting for all incoming packets + to store the incoming packet in "self.recv_packets" in a thread safe + way. This function will then signal the "self.recv_condition" to + indicate a new packet is available. Returns True if the caller + should keep calling this function for more packets. """ - assert ( - threading.current_thread != self._recv_thread - ), "Must not be called from the _recv_thread" - - def process_until_match(): - self._process_recv_packets() - for i, packet in enumerate(self._pending_packets): - if packet is None: - # We need to return a truthy value to break out of the - # wait_for, use `EOFError` as an indicator of EOF. - return EOFError() - if predicate and predicate(packet): - self._pending_packets.pop(i) - return packet - - with self._recv_condition: - packet = self._recv_condition.wait_for(process_until_match, timeout) - return None if isinstance(packet, EOFError) else packet - - def _process_recv_packets(self) -> None: - """Process received packets, updating the session state.""" - with self._recv_condition: - for packet in self._recv_packets: - # Handle events that may modify any stateful properties of - # the DAP session. - if packet and packet["type"] == "event": - self._handle_event(packet) - elif packet and packet["type"] == "request": - # Handle reverse requests and keep processing. - self._handle_reverse_request(packet) - # Move the packet to the pending queue. - self._pending_packets.append(packet) - self._recv_packets.clear() - - def _handle_event(self, packet: Event) -> None: - """Handle any events that modify debug session state we track.""" - event = packet["event"] - body: Optional[Dict] = packet.get("body", None) - - if event == "output" and body: - # Store any output we receive so clients can retrieve it later. - category = body["category"] - output = body["output"] - if category in self.output: - self.output[category] += output - else: - self.output[category] = output - elif event == "initialized": - self.initialized = True - elif event == "process": - # When a new process is attached or launched, remember the - # details that are available in the body of the event - self.process_event_body = body - elif event == "exited" and body: - # Process exited, mark the status to indicate the process is not - # alive. - self.exit_status = body["exitCode"] - elif event == "continued" and body: - # When the process continues, clear the known threads and - # thread_stop_reasons. - all_threads_continued = body.get("allThreadsContinued", True) - tid = body["threadId"] - if tid in self.thread_stop_reasons: - del self.thread_stop_reasons[tid] - self._process_continued(all_threads_continued) - elif event == "stopped" and body: - # Each thread that stops with a reason will send a - # 'stopped' event. We need to remember the thread stop - # reasons since the 'threads' command doesn't return - # that information. - self._process_stopped() - tid = body["threadId"] - self.thread_stop_reasons[tid] = body - elif event.startswith("progress"): - # Progress events come in as 'progressStart', 'progressUpdate', - # and 'progressEnd' events. Keep these around in case test - # cases want to verify them. - self.progress_events.append(packet) - elif event == "breakpoint" and body: - # Breakpoint events are sent when a breakpoint is resolved - self._update_verified_breakpoints([body["breakpoint"]]) - elif event == "capabilities" and body: - if self.capabilities is None: - self.capabilities = {} - # Update the capabilities with new ones from the event. - self.capabilities.update(body["capabilities"]) - - def _handle_reverse_request(self, request: Request) -> None: - if request in self.reverse_requests: - return - self.reverse_requests.append(request) - arguments = request.get("arguments") - if request["command"] == "runInTerminal" and arguments is not None: - in_shell = arguments.get("argsCanBeInterpretedByShell", False) - proc = subprocess.Popen( - arguments["args"], - env=arguments.get("env", {}), - cwd=arguments["cwd"], - stdin=subprocess.DEVNULL, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - shell=in_shell, - ) - body = {} - if in_shell: - body["shellProcessId"] = proc.pid - else: - body["processId"] = proc.pid - self.send_packet( - { - "type": "response", - "seq": 0, - "request_seq": request["seq"], - "success": True, - "command": "runInTerminal", - "message": None, - "body": body, - } - ) - elif request["command"] == "startDebugging": - self.send_packet( - { - "type": "response", - "seq": 0, - "request_seq": request["seq"], - "success": True, - "message": None, - "command": "startDebugging", - "body": {}, - ... [truncated] `````````` </details> https://github.com/llvm/llvm-project/pull/144616 _______________________________________________ lldb-commits mailing list lldb-commits@lists.llvm.org https://lists.llvm.org/cgi-bin/mailman/listinfo/lldb-commits