1"""Module for supporting unit testing of the lldb-server debug monitor exe. 2""" 3 4from __future__ import division, print_function 5 6import binascii 7import os 8import os.path 9import platform 10import re 11import six 12import socket 13import subprocess 14from lldbsuite.support import seven 15from lldbsuite.test.lldbtest import * 16from lldbsuite.test import configuration 17from textwrap import dedent 18import shutil 19 20def _get_support_exe(basename): 21 support_dir = lldb.SBHostOS.GetLLDBPath(lldb.ePathTypeSupportExecutableDir) 22 23 return shutil.which(basename, path=support_dir.GetDirectory()) 24 25 26def get_lldb_server_exe(): 27 """Return the lldb-server exe path. 28 29 Returns: 30 A path to the lldb-server exe if it is found to exist; otherwise, 31 returns None. 32 """ 33 34 return _get_support_exe("lldb-server") 35 36 37def get_debugserver_exe(): 38 """Return the debugserver exe path. 39 40 Returns: 41 A path to the debugserver exe if it is found to exist; otherwise, 42 returns None. 43 """ 44 if configuration.arch and configuration.arch == "x86_64" and \ 45 platform.machine().startswith("arm64"): 46 return '/Library/Apple/usr/libexec/oah/debugserver' 47 48 return _get_support_exe("debugserver") 49 50_LOG_LINE_REGEX = re.compile(r'^(lldb-server|debugserver)\s+<\s*(\d+)>' + 51 '\s+(read|send)\s+packet:\s+(.+)$') 52 53 54def _is_packet_lldb_gdbserver_input(packet_type, llgs_input_is_read): 55 """Return whether a given packet is input for lldb-gdbserver. 56 57 Args: 58 packet_type: a string indicating 'send' or 'receive', from a 59 gdbremote packet protocol log. 60 61 llgs_input_is_read: true if lldb-gdbserver input (content sent to 62 lldb-gdbserver) is listed as 'read' or 'send' in the packet 63 log entry. 64 65 Returns: 66 True if the packet should be considered input for lldb-gdbserver; False 67 otherwise. 68 """ 69 if packet_type == 'read': 70 # when llgs is the read side, then a read packet is meant for 71 # input to llgs (when captured from the llgs/debugserver exe). 72 return llgs_input_is_read 73 elif packet_type == 'send': 74 # when llgs is the send side, then a send packet is meant to 75 # be input to llgs (when captured from the lldb exe). 76 return not llgs_input_is_read 77 else: 78 # don't understand what type of packet this is 79 raise "Unknown packet type: {}".format(packet_type) 80 81 82_STRIP_CHECKSUM_REGEX = re.compile(r'#[0-9a-fA-F]{2}$') 83_STRIP_COMMAND_PREFIX_REGEX = re.compile(r"^\$") 84_STRIP_COMMAND_PREFIX_M_REGEX = re.compile(r"^\$m") 85 86 87def assert_packets_equal(asserter, actual_packet, expected_packet): 88 # strip off the checksum digits of the packet. When we're in 89 # no-ack mode, the # checksum is ignored, and should not be cause 90 # for a mismatched packet. 91 actual_stripped = _STRIP_CHECKSUM_REGEX.sub('', actual_packet) 92 expected_stripped = _STRIP_CHECKSUM_REGEX.sub('', expected_packet) 93 asserter.assertEqual(actual_stripped, expected_stripped) 94 95 96def expect_lldb_gdbserver_replay( 97 asserter, 98 server, 99 test_sequence, 100 timeout_seconds, 101 logger=None): 102 """Replay socket communication with lldb-gdbserver and verify responses. 103 104 Args: 105 asserter: the object providing assertEqual(first, second, msg=None), e.g. TestCase instance. 106 107 test_sequence: a GdbRemoteTestSequence instance that describes 108 the messages sent to the gdb remote and the responses 109 expected from it. 110 111 timeout_seconds: any response taking more than this number of 112 seconds will cause an exception to be raised. 113 114 logger: a Python logger instance. 115 116 Returns: 117 The context dictionary from running the given gdbremote 118 protocol sequence. This will contain any of the capture 119 elements specified to any GdbRemoteEntry instances in 120 test_sequence. 121 122 The context will also contain an entry, context["O_content"] 123 which contains the text from the inferior received via $O 124 packets. $O packets should not attempt to be matched 125 directly since they are not entirely deterministic as to 126 how many arrive and how much text is in each one. 127 128 context["O_count"] will contain an integer of the number of 129 O packets received. 130 """ 131 132 # Ensure we have some work to do. 133 if len(test_sequence.entries) < 1: 134 return {} 135 136 context = {"O_count": 0, "O_content": ""} 137 138 # Grab the first sequence entry. 139 sequence_entry = test_sequence.entries.pop(0) 140 141 # While we have an active sequence entry, send messages 142 # destined for the stub and collect/match/process responses 143 # expected from the stub. 144 while sequence_entry: 145 if sequence_entry.is_send_to_remote(): 146 # This is an entry to send to the remote debug monitor. 147 send_packet = sequence_entry.get_send_packet() 148 if logger: 149 if len(send_packet) == 1 and send_packet[0] == chr(3): 150 packet_desc = "^C" 151 else: 152 packet_desc = send_packet 153 logger.info( 154 "sending packet to remote: {}".format(packet_desc)) 155 server.send_raw(send_packet.encode()) 156 else: 157 # This is an entry expecting to receive content from the remote 158 # debug monitor. 159 160 # We'll pull from (and wait on) the queue appropriate for the type of matcher. 161 # We keep separate queues for process output (coming from non-deterministic 162 # $O packet division) and for all other packets. 163 try: 164 if sequence_entry.is_output_matcher(): 165 # Grab next entry from the output queue. 166 content = server.get_raw_output_packet() 167 else: 168 content = server.get_raw_normal_packet() 169 content = seven.bitcast_to_string(content) 170 except socket.timeout: 171 asserter.fail( 172 "timed out while waiting for '{}':\n{}".format(sequence_entry, server)) 173 174 # Give the sequence entry the opportunity to match the content. 175 # Output matchers might match or pass after more output accumulates. 176 # Other packet types generally must match. 177 asserter.assertIsNotNone(content) 178 context = sequence_entry.assert_match( 179 asserter, content, context=context) 180 181 # Move on to next sequence entry as needed. Some sequence entries support executing multiple 182 # times in different states (for looping over query/response 183 # packets). 184 if sequence_entry.is_consumed(): 185 if len(test_sequence.entries) > 0: 186 sequence_entry = test_sequence.entries.pop(0) 187 else: 188 sequence_entry = None 189 190 # Fill in the O_content entries. 191 context["O_count"] = 1 192 context["O_content"] = server.consume_accumulated_output() 193 194 return context 195 196 197def gdbremote_hex_encode_string(str): 198 output = '' 199 for c in str: 200 output += '{0:02x}'.format(ord(c)) 201 return output 202 203 204def gdbremote_hex_decode_string(str): 205 return str.decode("hex") 206 207 208def gdbremote_packet_encode_string(str): 209 checksum = 0 210 for c in str: 211 checksum += ord(c) 212 return '$' + str + '#{0:02x}'.format(checksum % 256) 213 214 215def build_gdbremote_A_packet(args_list): 216 """Given a list of args, create a properly-formed $A packet containing each arg. 217 """ 218 payload = "A" 219 220 # build the arg content 221 arg_index = 0 222 for arg in args_list: 223 # Comma-separate the args. 224 if arg_index > 0: 225 payload += ',' 226 227 # Hex-encode the arg. 228 hex_arg = gdbremote_hex_encode_string(arg) 229 230 # Build the A entry. 231 payload += "{},{},{}".format(len(hex_arg), arg_index, hex_arg) 232 233 # Next arg index, please. 234 arg_index += 1 235 236 # return the packetized payload 237 return gdbremote_packet_encode_string(payload) 238 239 240def parse_reg_info_response(response_packet): 241 if not response_packet: 242 raise Exception("response_packet cannot be None") 243 244 # Strip off prefix $ and suffix #xx if present. 245 response_packet = _STRIP_COMMAND_PREFIX_REGEX.sub("", response_packet) 246 response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet) 247 248 # Build keyval pairs 249 values = {} 250 for kv in response_packet.split(";"): 251 if len(kv) < 1: 252 continue 253 (key, val) = kv.split(':') 254 values[key] = val 255 256 return values 257 258 259def parse_threadinfo_response(response_packet): 260 if not response_packet: 261 raise Exception("response_packet cannot be None") 262 263 # Strip off prefix $ and suffix #xx if present. 264 response_packet = _STRIP_COMMAND_PREFIX_M_REGEX.sub("", response_packet) 265 response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet) 266 267 for tid in response_packet.split(","): 268 if not tid: 269 continue 270 if tid.startswith("p"): 271 pid, _, tid = tid.partition(".") 272 yield (int(pid[1:], 16), int(tid, 16)) 273 else: 274 yield int(tid, 16) 275 276 277def unpack_endian_binary_string(endian, value_string): 278 """Unpack a gdb-remote binary (post-unescaped, i.e. not escaped) response to an unsigned int given endianness of the inferior.""" 279 if not endian: 280 raise Exception("endian cannot be None") 281 if not value_string or len(value_string) < 1: 282 raise Exception("value_string cannot be None or empty") 283 284 if endian == 'little': 285 value = 0 286 i = 0 287 while len(value_string) > 0: 288 value += (ord(value_string[0]) << i) 289 value_string = value_string[1:] 290 i += 8 291 return value 292 elif endian == 'big': 293 value = 0 294 while len(value_string) > 0: 295 value = (value << 8) + ord(value_string[0]) 296 value_string = value_string[1:] 297 return value 298 else: 299 # pdp is valid but need to add parse code once needed. 300 raise Exception("unsupported endian:{}".format(endian)) 301 302 303def unpack_register_hex_unsigned(endian, value_string): 304 """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior.""" 305 if not endian: 306 raise Exception("endian cannot be None") 307 if not value_string or len(value_string) < 1: 308 raise Exception("value_string cannot be None or empty") 309 310 if endian == 'little': 311 value = 0 312 i = 0 313 while len(value_string) > 0: 314 value += (int(value_string[0:2], 16) << i) 315 value_string = value_string[2:] 316 i += 8 317 return value 318 elif endian == 'big': 319 return int(value_string, 16) 320 else: 321 # pdp is valid but need to add parse code once needed. 322 raise Exception("unsupported endian:{}".format(endian)) 323 324 325def pack_register_hex(endian, value, byte_size=None): 326 """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior.""" 327 if not endian: 328 raise Exception("endian cannot be None") 329 330 if endian == 'little': 331 # Create the litt-endian return value. 332 retval = "" 333 while value != 0: 334 retval = retval + "{:02x}".format(value & 0xff) 335 value = value >> 8 336 if byte_size: 337 # Add zero-fill to the right/end (MSB side) of the value. 338 retval += "00" * (byte_size - len(retval) // 2) 339 return retval 340 341 elif endian == 'big': 342 retval = "" 343 while value != 0: 344 retval = "{:02x}".format(value & 0xff) + retval 345 value = value >> 8 346 if byte_size: 347 # Add zero-fill to the left/front (MSB side) of the value. 348 retval = ("00" * (byte_size - len(retval) // 2)) + retval 349 return retval 350 351 else: 352 # pdp is valid but need to add parse code once needed. 353 raise Exception("unsupported endian:{}".format(endian)) 354 355 356class GdbRemoteEntryBase(object): 357 358 def is_output_matcher(self): 359 return False 360 361 362class GdbRemoteEntry(GdbRemoteEntryBase): 363 364 def __init__( 365 self, 366 is_send_to_remote=True, 367 exact_payload=None, 368 regex=None, 369 capture=None): 370 """Create an entry representing one piece of the I/O to/from a gdb remote debug monitor. 371 372 Args: 373 374 is_send_to_remote: True if this entry is a message to be 375 sent to the gdbremote debug monitor; False if this 376 entry represents text to be matched against the reply 377 from the gdbremote debug monitor. 378 379 exact_payload: if not None, then this packet is an exact 380 send (when sending to the remote) or an exact match of 381 the response from the gdbremote. The checksums are 382 ignored on exact match requests since negotiation of 383 no-ack makes the checksum content essentially 384 undefined. 385 386 regex: currently only valid for receives from gdbremote. When 387 specified (and only if exact_payload is None), indicates the 388 gdbremote response must match the given regex. Match groups in 389 the regex can be used for the matching portion (see capture 390 arg). It is perfectly valid to have just a regex arg without a 391 capture arg. This arg only makes sense if exact_payload is not 392 specified. 393 394 capture: if specified, is a dictionary of regex match 395 group indices (should start with 1) to variable names 396 that will store the capture group indicated by the 397 index. For example, {1:"thread_id"} will store capture 398 group 1's content in the context dictionary where 399 "thread_id" is the key and the match group value is 400 the value. This arg only makes sense when regex is specified. 401 """ 402 self._is_send_to_remote = is_send_to_remote 403 self.exact_payload = exact_payload 404 self.regex = regex 405 self.capture = capture 406 407 def is_send_to_remote(self): 408 return self._is_send_to_remote 409 410 def is_consumed(self): 411 # For now, all packets are consumed after first use. 412 return True 413 414 def get_send_packet(self): 415 if not self.is_send_to_remote(): 416 raise Exception( 417 "get_send_packet() called on GdbRemoteEntry that is not a send-to-remote packet") 418 if not self.exact_payload: 419 raise Exception( 420 "get_send_packet() called on GdbRemoteEntry but it doesn't have an exact payload") 421 return self.exact_payload 422 423 def _assert_exact_payload_match(self, asserter, actual_packet): 424 assert_packets_equal(asserter, actual_packet, self.exact_payload) 425 return None 426 427 def _assert_regex_match(self, asserter, actual_packet, context): 428 # Ensure the actual packet matches from the start of the actual packet. 429 match = self.regex.match(actual_packet) 430 if not match: 431 asserter.fail( 432 "regex '{}' failed to match against content '{}'".format( 433 self.regex.pattern, actual_packet)) 434 435 if self.capture: 436 # Handle captures. 437 for group_index, var_name in list(self.capture.items()): 438 capture_text = match.group(group_index) 439 # It is okay for capture text to be None - which it will be if it is a group that can match nothing. 440 # The user must be okay with it since the regex itself matched 441 # above. 442 context[var_name] = capture_text 443 444 return context 445 446 def assert_match(self, asserter, actual_packet, context=None): 447 # This only makes sense for matching lines coming from the 448 # remote debug monitor. 449 if self.is_send_to_remote(): 450 raise Exception( 451 "Attempted to match a packet being sent to the remote debug monitor, doesn't make sense.") 452 453 # Create a new context if needed. 454 if not context: 455 context = {} 456 457 # If this is an exact payload, ensure they match exactly, 458 # ignoring the packet checksum which is optional for no-ack 459 # mode. 460 if self.exact_payload: 461 self._assert_exact_payload_match(asserter, actual_packet) 462 return context 463 elif self.regex: 464 return self._assert_regex_match(asserter, actual_packet, context) 465 else: 466 raise Exception( 467 "Don't know how to match a remote-sent packet when exact_payload isn't specified.") 468 469 470class MultiResponseGdbRemoteEntry(GdbRemoteEntryBase): 471 """Represents a query/response style packet. 472 473 Assumes the first item is sent to the gdb remote. 474 An end sequence regex indicates the end of the query/response 475 packet sequence. All responses up through (but not including) the 476 end response are stored in a context variable. 477 478 Settings accepted from params: 479 480 next_query or query: required. The typical query packet without the $ prefix or #xx suffix. 481 If there is a special first packet to start the iteration query, see the 482 first_query key. 483 484 first_query: optional. If the first query requires a special query command, specify 485 it with this key. Do not specify the $ prefix or #xx suffix. 486 487 append_iteration_suffix: defaults to False. Specify True if the 0-based iteration 488 index should be appended as a suffix to the command. e.g. qRegisterInfo with 489 this key set true will generate query packets of qRegisterInfo0, qRegisterInfo1, 490 etc. 491 492 end_regex: required. Specifies a compiled regex object that will match the full text 493 of any response that signals an end to the iteration. It must include the 494 initial $ and ending #xx and must match the whole packet. 495 496 save_key: required. Specifies the key within the context where an array will be stored. 497 Each packet received from the gdb remote that does not match the end_regex will get 498 appended to the array stored within the context at that key. 499 500 runaway_response_count: optional. Defaults to 10000. If this many responses are retrieved, 501 assume there is something wrong with either the response collection or the ending 502 detection regex and throw an exception. 503 """ 504 505 def __init__(self, params): 506 self._next_query = params.get("next_query", params.get("query")) 507 if not self._next_query: 508 raise "either next_query or query key must be specified for MultiResponseGdbRemoteEntry" 509 510 self._first_query = params.get("first_query", self._next_query) 511 self._append_iteration_suffix = params.get( 512 "append_iteration_suffix", False) 513 self._iteration = 0 514 self._end_regex = params["end_regex"] 515 self._save_key = params["save_key"] 516 self._runaway_response_count = params.get( 517 "runaway_response_count", 10000) 518 self._is_send_to_remote = True 519 self._end_matched = False 520 521 def is_send_to_remote(self): 522 return self._is_send_to_remote 523 524 def get_send_packet(self): 525 if not self.is_send_to_remote(): 526 raise Exception( 527 "get_send_packet() called on MultiResponseGdbRemoteEntry that is not in the send state") 528 if self._end_matched: 529 raise Exception( 530 "get_send_packet() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.") 531 532 # Choose the first or next query for the base payload. 533 if self._iteration == 0 and self._first_query: 534 payload = self._first_query 535 else: 536 payload = self._next_query 537 538 # Append the suffix as needed. 539 if self._append_iteration_suffix: 540 payload += "%x" % self._iteration 541 542 # Keep track of the iteration. 543 self._iteration += 1 544 545 # Now that we've given the query packet, flip the mode to 546 # receive/match. 547 self._is_send_to_remote = False 548 549 # Return the result, converted to packet form. 550 return gdbremote_packet_encode_string(payload) 551 552 def is_consumed(self): 553 return self._end_matched 554 555 def assert_match(self, asserter, actual_packet, context=None): 556 # This only makes sense for matching lines coming from the remote debug 557 # monitor. 558 if self.is_send_to_remote(): 559 raise Exception( 560 "assert_match() called on MultiResponseGdbRemoteEntry but state is set to send a query packet.") 561 562 if self._end_matched: 563 raise Exception( 564 "assert_match() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.") 565 566 # Set up a context as needed. 567 if not context: 568 context = {} 569 570 # Check if the packet matches the end condition. 571 match = self._end_regex.match(actual_packet) 572 if match: 573 # We're done iterating. 574 self._end_matched = True 575 return context 576 577 # Not done iterating - save the packet. 578 context[self._save_key] = context.get(self._save_key, []) 579 context[self._save_key].append(actual_packet) 580 581 # Check for a runaway response cycle. 582 if len(context[self._save_key]) >= self._runaway_response_count: 583 raise Exception( 584 "runaway query/response cycle detected: %d responses captured so far. Last response: %s" % 585 (len( 586 context[ 587 self._save_key]), context[ 588 self._save_key][ 589 -1])) 590 591 # Flip the mode to send for generating the query. 592 self._is_send_to_remote = True 593 return context 594 595 596class MatchRemoteOutputEntry(GdbRemoteEntryBase): 597 """Waits for output from the debug monitor to match a regex or time out. 598 599 This entry type tries to match each time new gdb remote output is accumulated 600 using a provided regex. If the output does not match the regex within the 601 given timeframe, the command fails the playback session. If the regex does 602 match, any capture fields are recorded in the context. 603 604 Settings accepted from params: 605 606 regex: required. Specifies a compiled regex object that must either succeed 607 with re.match or re.search (see regex_mode below) within the given timeout 608 (see timeout_seconds below) or cause the playback to fail. 609 610 regex_mode: optional. Available values: "match" or "search". If "match", the entire 611 stub output as collected so far must match the regex. If search, then the regex 612 must match starting somewhere within the output text accumulated thus far. 613 Default: "match" (i.e. the regex must match the entirety of the accumulated output 614 buffer, so unexpected text will generally fail the match). 615 616 capture: optional. If specified, is a dictionary of regex match group indices (should start 617 with 1) to variable names that will store the capture group indicated by the 618 index. For example, {1:"thread_id"} will store capture group 1's content in the 619 context dictionary where "thread_id" is the key and the match group value is 620 the value. This arg only makes sense when regex is specified. 621 """ 622 623 def __init__(self, regex=None, regex_mode="match", capture=None): 624 self._regex = regex 625 self._regex_mode = regex_mode 626 self._capture = capture 627 self._matched = False 628 629 if not self._regex: 630 raise Exception("regex cannot be None") 631 632 if not self._regex_mode in ["match", "search"]: 633 raise Exception( 634 "unsupported regex mode \"{}\": must be \"match\" or \"search\"".format( 635 self._regex_mode)) 636 637 def is_output_matcher(self): 638 return True 639 640 def is_send_to_remote(self): 641 # This is always a "wait for remote" command. 642 return False 643 644 def is_consumed(self): 645 return self._matched 646 647 def assert_match(self, asserter, accumulated_output, context): 648 # Validate args. 649 if not accumulated_output: 650 raise Exception("accumulated_output cannot be none") 651 if not context: 652 raise Exception("context cannot be none") 653 654 # Validate that we haven't already matched. 655 if self._matched: 656 raise Exception( 657 "invalid state - already matched, attempting to match again") 658 659 # If we don't have any content yet, we don't match. 660 if len(accumulated_output) < 1: 661 return context 662 663 # Check if we match 664 if self._regex_mode == "match": 665 match = self._regex.match(accumulated_output) 666 elif self._regex_mode == "search": 667 match = self._regex.search(accumulated_output) 668 else: 669 raise Exception( 670 "Unexpected regex mode: {}".format( 671 self._regex_mode)) 672 673 # If we don't match, wait to try again after next $O content, or time 674 # out. 675 if not match: 676 # print("re pattern \"{}\" did not match against \"{}\"".format(self._regex.pattern, accumulated_output)) 677 return context 678 679 # We do match. 680 self._matched = True 681 # print("re pattern \"{}\" matched against \"{}\"".format(self._regex.pattern, accumulated_output)) 682 683 # Collect up any captures into the context. 684 if self._capture: 685 # Handle captures. 686 for group_index, var_name in list(self._capture.items()): 687 capture_text = match.group(group_index) 688 if not capture_text: 689 raise Exception( 690 "No content for group index {}".format(group_index)) 691 context[var_name] = capture_text 692 693 return context 694 695 696class GdbRemoteTestSequence(object): 697 698 _LOG_LINE_REGEX = re.compile(r'^.*(read|send)\s+packet:\s+(.+)$') 699 700 def __init__(self, logger): 701 self.entries = [] 702 self.logger = logger 703 704 def __len__(self): 705 return len(self.entries) 706 707 def add_log_lines(self, log_lines, remote_input_is_read): 708 for line in log_lines: 709 if isinstance(line, str): 710 # Handle log line import 711 # if self.logger: 712 # self.logger.debug("processing log line: {}".format(line)) 713 match = self._LOG_LINE_REGEX.match(line) 714 if match: 715 playback_packet = match.group(2) 716 direction = match.group(1) 717 if _is_packet_lldb_gdbserver_input( 718 direction, remote_input_is_read): 719 # Handle as something to send to the remote debug monitor. 720 # if self.logger: 721 # self.logger.info("processed packet to send to remote: {}".format(playback_packet)) 722 self.entries.append( 723 GdbRemoteEntry( 724 is_send_to_remote=True, 725 exact_payload=playback_packet)) 726 else: 727 # Log line represents content to be expected from the remote debug monitor. 728 # if self.logger: 729 # self.logger.info("receiving packet from llgs, should match: {}".format(playback_packet)) 730 self.entries.append( 731 GdbRemoteEntry( 732 is_send_to_remote=False, 733 exact_payload=playback_packet)) 734 else: 735 raise Exception( 736 "failed to interpret log line: {}".format(line)) 737 elif isinstance(line, dict): 738 entry_type = line.get("type", "regex_capture") 739 if entry_type == "regex_capture": 740 # Handle more explicit control over details via dictionary. 741 direction = line.get("direction", None) 742 regex = line.get("regex", None) 743 capture = line.get("capture", None) 744 745 # Compile the regex. 746 if regex and (isinstance(regex, str)): 747 regex = re.compile(regex, re.DOTALL) 748 749 if _is_packet_lldb_gdbserver_input( 750 direction, remote_input_is_read): 751 # Handle as something to send to the remote debug monitor. 752 # if self.logger: 753 # self.logger.info("processed dict sequence to send to remote") 754 self.entries.append( 755 GdbRemoteEntry( 756 is_send_to_remote=True, 757 regex=regex, 758 capture=capture)) 759 else: 760 # Log line represents content to be expected from the remote debug monitor. 761 # if self.logger: 762 # self.logger.info("processed dict sequence to match receiving from remote") 763 self.entries.append( 764 GdbRemoteEntry( 765 is_send_to_remote=False, 766 regex=regex, 767 capture=capture)) 768 elif entry_type == "multi_response": 769 self.entries.append(MultiResponseGdbRemoteEntry(line)) 770 elif entry_type == "output_match": 771 772 regex = line.get("regex", None) 773 # Compile the regex. 774 if regex and (isinstance(regex, str)): 775 regex = re.compile(regex, re.DOTALL) 776 777 regex_mode = line.get("regex_mode", "match") 778 capture = line.get("capture", None) 779 self.entries.append( 780 MatchRemoteOutputEntry( 781 regex=regex, 782 regex_mode=regex_mode, 783 capture=capture)) 784 else: 785 raise Exception("unknown entry type \"%s\"" % entry_type) 786 787 788def process_is_running(pid, unknown_value=True): 789 """If possible, validate that the given pid represents a running process on the local system. 790 791 Args: 792 793 pid: an OS-specific representation of a process id. Should be an integral value. 794 795 unknown_value: value used when we cannot determine how to check running local 796 processes on the OS. 797 798 Returns: 799 800 If we can figure out how to check running process ids on the given OS: 801 return True if the process is running, or False otherwise. 802 803 If we don't know how to check running process ids on the given OS: 804 return the value provided by the unknown_value arg. 805 """ 806 if not isinstance(pid, six.integer_types): 807 raise Exception( 808 "pid must be an integral type (actual type: %s)" % str( 809 type(pid))) 810 811 process_ids = [] 812 813 if lldb.remote_platform: 814 # Don't know how to get list of running process IDs on a remote 815 # platform 816 return unknown_value 817 elif platform.system() in ['Darwin', 'Linux', 'FreeBSD', 'NetBSD']: 818 # Build the list of running process ids 819 output = subprocess.check_output( 820 "ps ax | awk '{ print $1; }'", shell=True).decode("utf-8") 821 text_process_ids = output.split('\n')[1:] 822 # Convert text pids to ints 823 process_ids = [int(text_pid) 824 for text_pid in text_process_ids if text_pid != ''] 825 elif platform.system() == 'Windows': 826 output = subprocess.check_output( 827 "for /f \"tokens=2 delims=,\" %F in ('tasklist /nh /fi \"PID ne 0\" /fo csv') do @echo %~F", shell=True).decode("utf-8") 828 text_process_ids = output.split('\n')[1:] 829 process_ids = [int(text_pid) 830 for text_pid in text_process_ids if text_pid != ''] 831 # elif {your_platform_here}: 832 # fill in process_ids as a list of int type process IDs running on 833 # the local system. 834 else: 835 # Don't know how to get list of running process IDs on this 836 # OS, so return the "don't know" value. 837 return unknown_value 838 839 # Check if the pid is in the process_ids 840 return pid in process_ids 841 842 843def _handle_output_packet_string(packet_contents): 844 # Warning: in non-stop mode, we currently handle only the first output 845 # packet since we'd need to inject vStdio packets 846 if not packet_contents.startswith((b"$O", b"%Stdio:O")): 847 return None 848 elif packet_contents == b"$OK": 849 return None 850 else: 851 return binascii.unhexlify(packet_contents.partition(b"O")[2]) 852 853 854class Server(object): 855 856 _GDB_REMOTE_PACKET_REGEX = re.compile(br'^([\$%][^\#]*)#[0-9a-fA-F]{2}') 857 858 class ChecksumMismatch(Exception): 859 pass 860 861 def __init__(self, sock, proc = None): 862 self._accumulated_output = b"" 863 self._receive_buffer = b"" 864 self._normal_queue = [] 865 self._output_queue = [] 866 self._sock = sock 867 self._proc = proc 868 869 def send_raw(self, frame): 870 self._sock.sendall(frame) 871 872 def send_ack(self): 873 self.send_raw(b"+") 874 875 def send_packet(self, packet): 876 self.send_raw(b'$%s#%02x'%(packet, self._checksum(packet))) 877 878 @staticmethod 879 def _checksum(packet): 880 checksum = 0 881 for c in six.iterbytes(packet): 882 checksum += c 883 return checksum % 256 884 885 def _read(self, q): 886 while not q: 887 new_bytes = self._sock.recv(4096) 888 self._process_new_bytes(new_bytes) 889 return q.pop(0) 890 891 def _process_new_bytes(self, new_bytes): 892 # Add new bytes to our accumulated unprocessed packet bytes. 893 self._receive_buffer += new_bytes 894 895 # Parse fully-formed packets into individual packets. 896 has_more = len(self._receive_buffer) > 0 897 while has_more: 898 if len(self._receive_buffer) <= 0: 899 has_more = False 900 # handle '+' ack 901 elif self._receive_buffer[0:1] == b"+": 902 self._normal_queue += [b"+"] 903 self._receive_buffer = self._receive_buffer[1:] 904 else: 905 packet_match = self._GDB_REMOTE_PACKET_REGEX.match( 906 self._receive_buffer) 907 if packet_match: 908 # Our receive buffer matches a packet at the 909 # start of the receive buffer. 910 new_output_content = _handle_output_packet_string( 911 packet_match.group(1)) 912 if new_output_content: 913 # This was an $O packet with new content. 914 self._accumulated_output += new_output_content 915 self._output_queue += [self._accumulated_output] 916 else: 917 # Any packet other than $O. 918 self._normal_queue += [packet_match.group(0)] 919 920 # Remove the parsed packet from the receive 921 # buffer. 922 self._receive_buffer = self._receive_buffer[ 923 len(packet_match.group(0)):] 924 else: 925 # We don't have enough in the receive bufferto make a full 926 # packet. Stop trying until we read more. 927 has_more = False 928 929 def get_raw_output_packet(self): 930 return self._read(self._output_queue) 931 932 def get_raw_normal_packet(self): 933 return self._read(self._normal_queue) 934 935 @staticmethod 936 def _get_payload(frame): 937 payload = frame[1:-3] 938 checksum = int(frame[-2:], 16) 939 if checksum != Server._checksum(payload): 940 raise ChecksumMismatch 941 return payload 942 943 def get_normal_packet(self): 944 frame = self.get_raw_normal_packet() 945 if frame == b"+": return frame 946 return self._get_payload(frame) 947 948 def get_accumulated_output(self): 949 return self._accumulated_output 950 951 def consume_accumulated_output(self): 952 output = self._accumulated_output 953 self._accumulated_output = b"" 954 return output 955 956 def __str__(self): 957 return dedent("""\ 958 server '{}' on '{}' 959 _receive_buffer: {} 960 _normal_queue: {} 961 _output_queue: {} 962 _accumulated_output: {} 963 """).format(self._proc, self._sock, self._receive_buffer, 964 self._normal_queue, self._output_queue, 965 self._accumulated_output) 966