1"""Module for supporting unit testing of the lldb-server debug monitor exe.
2"""
3
4from __future__ import print_function
5
6
7import os
8import os.path
9import platform
10import re
11import six
12import socket_packet_pump
13import subprocess
14import time
15from lldbsuite.test.lldbtest import *
16
17from six.moves import queue
18
19
20def _get_debug_monitor_from_lldb(lldb_exe, debug_monitor_basename):
21    """Return the debug monitor exe path given the lldb exe path.
22
23    This method attempts to construct a valid debug monitor exe name
24    from a given lldb exe name.  It will return None if the synthesized
25    debug monitor name is not found to exist.
26
27    The debug monitor exe path is synthesized by taking the directory
28    of the lldb exe, and replacing the portion of the base name that
29    matches "lldb" (case insensitive) and replacing with the value of
30    debug_monitor_basename.
31
32    Args:
33        lldb_exe: the path to an lldb executable.
34
35        debug_monitor_basename: the base name portion of the debug monitor
36            that will replace 'lldb'.
37
38    Returns:
39        A path to the debug monitor exe if it is found to exist; otherwise,
40        returns None.
41
42    """
43    if not lldb_exe:
44        return None
45
46    exe_dir = os.path.dirname(lldb_exe)
47    exe_base = os.path.basename(lldb_exe)
48
49    # we'll rebuild the filename by replacing lldb with
50    # the debug monitor basename, keeping any prefix or suffix in place.
51    regex = re.compile(r"lldb", re.IGNORECASE)
52    new_base = regex.sub(debug_monitor_basename, exe_base)
53
54    debug_monitor_exe = os.path.join(exe_dir, new_base)
55    if os.path.exists(debug_monitor_exe):
56        return debug_monitor_exe
57
58    new_base = regex.sub(
59        'LLDB.framework/Versions/A/Resources/' +
60        debug_monitor_basename,
61        exe_base)
62    debug_monitor_exe = os.path.join(exe_dir, new_base)
63    if os.path.exists(debug_monitor_exe):
64        return debug_monitor_exe
65
66    return None
67
68
69def get_lldb_server_exe():
70    """Return the lldb-server exe path.
71
72    Returns:
73        A path to the lldb-server exe if it is found to exist; otherwise,
74        returns None.
75    """
76    if "LLDB_DEBUGSERVER_PATH" in os.environ:
77        return os.environ["LLDB_DEBUGSERVER_PATH"]
78
79    return _get_debug_monitor_from_lldb(
80        lldbtest_config.lldbExec, "lldb-server")
81
82
83def get_debugserver_exe():
84    """Return the debugserver exe path.
85
86    Returns:
87        A path to the debugserver exe if it is found to exist; otherwise,
88        returns None.
89    """
90    if "LLDB_DEBUGSERVER_PATH" in os.environ:
91        return os.environ["LLDB_DEBUGSERVER_PATH"]
92
93    return _get_debug_monitor_from_lldb(
94        lldbtest_config.lldbExec, "debugserver")
95
96_LOG_LINE_REGEX = re.compile(r'^(lldb-server|debugserver)\s+<\s*(\d+)>' +
97                             '\s+(read|send)\s+packet:\s+(.+)$')
98
99
100def _is_packet_lldb_gdbserver_input(packet_type, llgs_input_is_read):
101    """Return whether a given packet is input for lldb-gdbserver.
102
103    Args:
104        packet_type: a string indicating 'send' or 'receive', from a
105            gdbremote packet protocol log.
106
107        llgs_input_is_read: true if lldb-gdbserver input (content sent to
108            lldb-gdbserver) is listed as 'read' or 'send' in the packet
109            log entry.
110
111    Returns:
112        True if the packet should be considered input for lldb-gdbserver; False
113        otherwise.
114    """
115    if packet_type == 'read':
116        # when llgs is the read side, then a read packet is meant for
117        # input to llgs (when captured from the llgs/debugserver exe).
118        return llgs_input_is_read
119    elif packet_type == 'send':
120        # when llgs is the send side, then a send packet is meant to
121        # be input to llgs (when captured from the lldb exe).
122        return not llgs_input_is_read
123    else:
124        # don't understand what type of packet this is
125        raise "Unknown packet type: {}".format(packet_type)
126
127
128def handle_O_packet(context, packet_contents, logger):
129    """Handle O packets."""
130    if (not packet_contents) or (len(packet_contents) < 1):
131        return False
132    elif packet_contents[0] != "O":
133        return False
134    elif packet_contents == "OK":
135        return False
136
137    new_text = gdbremote_hex_decode_string(packet_contents[1:])
138    context["O_content"] += new_text
139    context["O_count"] += 1
140
141    if logger:
142        logger.debug(
143            "text: new \"{}\", cumulative: \"{}\"".format(
144                new_text, context["O_content"]))
145
146    return True
147
148_STRIP_CHECKSUM_REGEX = re.compile(r'#[0-9a-fA-F]{2}$')
149_STRIP_COMMAND_PREFIX_REGEX = re.compile(r"^\$")
150_STRIP_COMMAND_PREFIX_M_REGEX = re.compile(r"^\$m")
151
152
153def assert_packets_equal(asserter, actual_packet, expected_packet):
154    # strip off the checksum digits of the packet.  When we're in
155    # no-ack mode, the # checksum is ignored, and should not be cause
156    # for a mismatched packet.
157    actual_stripped = _STRIP_CHECKSUM_REGEX.sub('', actual_packet)
158    expected_stripped = _STRIP_CHECKSUM_REGEX.sub('', expected_packet)
159    asserter.assertEqual(actual_stripped, expected_stripped)
160
161
162def expect_lldb_gdbserver_replay(
163        asserter,
164        sock,
165        test_sequence,
166        pump_queues,
167        timeout_seconds,
168        logger=None):
169    """Replay socket communication with lldb-gdbserver and verify responses.
170
171    Args:
172        asserter: the object providing assertEqual(first, second, msg=None), e.g. TestCase instance.
173
174        sock: the TCP socket connected to the lldb-gdbserver exe.
175
176        test_sequence: a GdbRemoteTestSequence instance that describes
177            the messages sent to the gdb remote and the responses
178            expected from it.
179
180        timeout_seconds: any response taking more than this number of
181           seconds will cause an exception to be raised.
182
183        logger: a Python logger instance.
184
185    Returns:
186        The context dictionary from running the given gdbremote
187        protocol sequence.  This will contain any of the capture
188        elements specified to any GdbRemoteEntry instances in
189        test_sequence.
190
191        The context will also contain an entry, context["O_content"]
192        which contains the text from the inferior received via $O
193        packets.  $O packets should not attempt to be matched
194        directly since they are not entirely deterministic as to
195        how many arrive and how much text is in each one.
196
197        context["O_count"] will contain an integer of the number of
198        O packets received.
199    """
200
201    # Ensure we have some work to do.
202    if len(test_sequence.entries) < 1:
203        return {}
204
205    context = {"O_count": 0, "O_content": ""}
206    with socket_packet_pump.SocketPacketPump(sock, pump_queues, logger) as pump:
207        # Grab the first sequence entry.
208        sequence_entry = test_sequence.entries.pop(0)
209
210        # While we have an active sequence entry, send messages
211        # destined for the stub and collect/match/process responses
212        # expected from the stub.
213        while sequence_entry:
214            if sequence_entry.is_send_to_remote():
215                # This is an entry to send to the remote debug monitor.
216                send_packet = sequence_entry.get_send_packet()
217                if logger:
218                    if len(send_packet) == 1 and send_packet[0] == chr(3):
219                        packet_desc = "^C"
220                    else:
221                        packet_desc = send_packet
222                    logger.info(
223                        "sending packet to remote: {}".format(packet_desc))
224                sock.sendall(send_packet)
225            else:
226                # This is an entry expecting to receive content from the remote
227                # debug monitor.
228
229                # We'll pull from (and wait on) the queue appropriate for the type of matcher.
230                # We keep separate queues for process output (coming from non-deterministic
231                # $O packet division) and for all other packets.
232                if sequence_entry.is_output_matcher():
233                    try:
234                        # Grab next entry from the output queue.
235                        content = pump_queues.output_queue().get(True, timeout_seconds)
236                    except queue.Empty:
237                        if logger:
238                            logger.warning(
239                                "timeout waiting for stub output (accumulated output:{})".format(
240                                    pump.get_accumulated_output()))
241                        raise Exception(
242                            "timed out while waiting for output match (accumulated output: {})".format(
243                                pump.get_accumulated_output()))
244                else:
245                    try:
246                        content = pump_queues.packet_queue().get(True, timeout_seconds)
247                    except queue.Empty:
248                        if logger:
249                            logger.warning(
250                                "timeout waiting for packet match (receive buffer: {})".format(
251                                    pump.get_receive_buffer()))
252                        raise Exception(
253                            "timed out while waiting for packet match (receive buffer: {})".format(
254                                pump.get_receive_buffer()))
255
256                # Give the sequence entry the opportunity to match the content.
257                # Output matchers might match or pass after more output accumulates.
258                # Other packet types generally must match.
259                asserter.assertIsNotNone(content)
260                context = sequence_entry.assert_match(
261                    asserter, content, context=context)
262
263            # Move on to next sequence entry as needed.  Some sequence entries support executing multiple
264            # times in different states (for looping over query/response
265            # packets).
266            if sequence_entry.is_consumed():
267                if len(test_sequence.entries) > 0:
268                    sequence_entry = test_sequence.entries.pop(0)
269                else:
270                    sequence_entry = None
271
272        # Fill in the O_content entries.
273        context["O_count"] = 1
274        context["O_content"] = pump.get_accumulated_output()
275
276    return context
277
278
279def gdbremote_hex_encode_string(str):
280    output = ''
281    for c in str:
282        output += '{0:02x}'.format(ord(c))
283    return output
284
285
286def gdbremote_hex_decode_string(str):
287    return str.decode("hex")
288
289
290def gdbremote_packet_encode_string(str):
291    checksum = 0
292    for c in str:
293        checksum += ord(c)
294    return '$' + str + '#{0:02x}'.format(checksum % 256)
295
296
297def build_gdbremote_A_packet(args_list):
298    """Given a list of args, create a properly-formed $A packet containing each arg.
299    """
300    payload = "A"
301
302    # build the arg content
303    arg_index = 0
304    for arg in args_list:
305        # Comma-separate the args.
306        if arg_index > 0:
307            payload += ','
308
309        # Hex-encode the arg.
310        hex_arg = gdbremote_hex_encode_string(arg)
311
312        # Build the A entry.
313        payload += "{},{},{}".format(len(hex_arg), arg_index, hex_arg)
314
315        # Next arg index, please.
316        arg_index += 1
317
318    # return the packetized payload
319    return gdbremote_packet_encode_string(payload)
320
321
322def parse_reg_info_response(response_packet):
323    if not response_packet:
324        raise Exception("response_packet cannot be None")
325
326    # Strip off prefix $ and suffix #xx if present.
327    response_packet = _STRIP_COMMAND_PREFIX_REGEX.sub("", response_packet)
328    response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet)
329
330    # Build keyval pairs
331    values = {}
332    for kv in response_packet.split(";"):
333        if len(kv) < 1:
334            continue
335        (key, val) = kv.split(':')
336        values[key] = val
337
338    return values
339
340
341def parse_threadinfo_response(response_packet):
342    if not response_packet:
343        raise Exception("response_packet cannot be None")
344
345    # Strip off prefix $ and suffix #xx if present.
346    response_packet = _STRIP_COMMAND_PREFIX_M_REGEX.sub("", response_packet)
347    response_packet = _STRIP_CHECKSUM_REGEX.sub("", response_packet)
348
349    # Return list of thread ids
350    return [int(thread_id_hex, 16) for thread_id_hex in response_packet.split(
351        ",") if len(thread_id_hex) > 0]
352
353
354def unpack_endian_binary_string(endian, value_string):
355    """Unpack a gdb-remote binary (post-unescaped, i.e. not escaped) response to an unsigned int given endianness of the inferior."""
356    if not endian:
357        raise Exception("endian cannot be None")
358    if not value_string or len(value_string) < 1:
359        raise Exception("value_string cannot be None or empty")
360
361    if endian == 'little':
362        value = 0
363        i = 0
364        while len(value_string) > 0:
365            value += (ord(value_string[0]) << i)
366            value_string = value_string[1:]
367            i += 8
368        return value
369    elif endian == 'big':
370        value = 0
371        while len(value_string) > 0:
372            value = (value << 8) + ord(value_string[0])
373            value_string = value_string[1:]
374        return value
375    else:
376        # pdp is valid but need to add parse code once needed.
377        raise Exception("unsupported endian:{}".format(endian))
378
379
380def unpack_register_hex_unsigned(endian, value_string):
381    """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior."""
382    if not endian:
383        raise Exception("endian cannot be None")
384    if not value_string or len(value_string) < 1:
385        raise Exception("value_string cannot be None or empty")
386
387    if endian == 'little':
388        value = 0
389        i = 0
390        while len(value_string) > 0:
391            value += (int(value_string[0:2], 16) << i)
392            value_string = value_string[2:]
393            i += 8
394        return value
395    elif endian == 'big':
396        return int(value_string, 16)
397    else:
398        # pdp is valid but need to add parse code once needed.
399        raise Exception("unsupported endian:{}".format(endian))
400
401
402def pack_register_hex(endian, value, byte_size=None):
403    """Unpack a gdb-remote $p-style response to an unsigned int given endianness of inferior."""
404    if not endian:
405        raise Exception("endian cannot be None")
406
407    if endian == 'little':
408        # Create the litt-endian return value.
409        retval = ""
410        while value != 0:
411            retval = retval + "{:02x}".format(value & 0xff)
412            value = value >> 8
413        if byte_size:
414            # Add zero-fill to the right/end (MSB side) of the value.
415            retval += "00" * (byte_size - len(retval) / 2)
416        return retval
417
418    elif endian == 'big':
419        retval = ""
420        while value != 0:
421            retval = "{:02x}".format(value & 0xff) + retval
422            value = value >> 8
423        if byte_size:
424            # Add zero-fill to the left/front (MSB side) of the value.
425            retval = ("00" * (byte_size - len(retval) / 2)) + retval
426        return retval
427
428    else:
429        # pdp is valid but need to add parse code once needed.
430        raise Exception("unsupported endian:{}".format(endian))
431
432
433class GdbRemoteEntryBase(object):
434
435    def is_output_matcher(self):
436        return False
437
438
439class GdbRemoteEntry(GdbRemoteEntryBase):
440
441    def __init__(
442            self,
443            is_send_to_remote=True,
444            exact_payload=None,
445            regex=None,
446            capture=None,
447            expect_captures=None):
448        """Create an entry representing one piece of the I/O to/from a gdb remote debug monitor.
449
450        Args:
451
452            is_send_to_remote: True if this entry is a message to be
453                sent to the gdbremote debug monitor; False if this
454                entry represents text to be matched against the reply
455                from the gdbremote debug monitor.
456
457            exact_payload: if not None, then this packet is an exact
458                send (when sending to the remote) or an exact match of
459                the response from the gdbremote. The checksums are
460                ignored on exact match requests since negotiation of
461                no-ack makes the checksum content essentially
462                undefined.
463
464            regex: currently only valid for receives from gdbremote.
465                When specified (and only if exact_payload is None),
466                indicates the gdbremote response must match the given
467                regex. Match groups in the regex can be used for two
468                different purposes: saving the match (see capture
469                arg), or validating that a match group matches a
470                previously established value (see expect_captures). It
471                is perfectly valid to have just a regex arg and to
472                specify neither capture or expect_captures args. This
473                arg only makes sense if exact_payload is not
474                specified.
475
476            capture: if specified, is a dictionary of regex match
477                group indices (should start with 1) to variable names
478                that will store the capture group indicated by the
479                index. For example, {1:"thread_id"} will store capture
480                group 1's content in the context dictionary where
481                "thread_id" is the key and the match group value is
482                the value. The value stored off can be used later in a
483                expect_captures expression. This arg only makes sense
484                when regex is specified.
485
486            expect_captures: if specified, is a dictionary of regex
487                match group indices (should start with 1) to variable
488                names, where the match group should match the value
489                existing in the context at the given variable name.
490                For example, {2:"thread_id"} indicates that the second
491                match group must match the value stored under the
492                context's previously stored "thread_id" key. This arg
493                only makes sense when regex is specified.
494        """
495        self._is_send_to_remote = is_send_to_remote
496        self.exact_payload = exact_payload
497        self.regex = regex
498        self.capture = capture
499        self.expect_captures = expect_captures
500
501    def is_send_to_remote(self):
502        return self._is_send_to_remote
503
504    def is_consumed(self):
505        # For now, all packets are consumed after first use.
506        return True
507
508    def get_send_packet(self):
509        if not self.is_send_to_remote():
510            raise Exception(
511                "get_send_packet() called on GdbRemoteEntry that is not a send-to-remote packet")
512        if not self.exact_payload:
513            raise Exception(
514                "get_send_packet() called on GdbRemoteEntry but it doesn't have an exact payload")
515        return self.exact_payload
516
517    def _assert_exact_payload_match(self, asserter, actual_packet):
518        assert_packets_equal(asserter, actual_packet, self.exact_payload)
519        return None
520
521    def _assert_regex_match(self, asserter, actual_packet, context):
522        # Ensure the actual packet matches from the start of the actual packet.
523        match = self.regex.match(actual_packet)
524        if not match:
525            asserter.fail(
526                "regex '{}' failed to match against content '{}'".format(
527                    self.regex.pattern, actual_packet))
528
529        if self.capture:
530            # Handle captures.
531            for group_index, var_name in list(self.capture.items()):
532                capture_text = match.group(group_index)
533                # It is okay for capture text to be None - which it will be if it is a group that can match nothing.
534                # The user must be okay with it since the regex itself matched
535                # above.
536                context[var_name] = capture_text
537
538        if self.expect_captures:
539            # Handle comparing matched groups to context dictionary entries.
540            for group_index, var_name in list(self.expect_captures.items()):
541                capture_text = match.group(group_index)
542                if not capture_text:
543                    raise Exception(
544                        "No content to expect for group index {}".format(group_index))
545                asserter.assertEqual(capture_text, context[var_name])
546
547        return context
548
549    def assert_match(self, asserter, actual_packet, context=None):
550        # This only makes sense for matching lines coming from the
551        # remote debug monitor.
552        if self.is_send_to_remote():
553            raise Exception(
554                "Attempted to match a packet being sent to the remote debug monitor, doesn't make sense.")
555
556        # Create a new context if needed.
557        if not context:
558            context = {}
559
560        # If this is an exact payload, ensure they match exactly,
561        # ignoring the packet checksum which is optional for no-ack
562        # mode.
563        if self.exact_payload:
564            self._assert_exact_payload_match(asserter, actual_packet)
565            return context
566        elif self.regex:
567            return self._assert_regex_match(asserter, actual_packet, context)
568        else:
569            raise Exception(
570                "Don't know how to match a remote-sent packet when exact_payload isn't specified.")
571
572
573class MultiResponseGdbRemoteEntry(GdbRemoteEntryBase):
574    """Represents a query/response style packet.
575
576    Assumes the first item is sent to the gdb remote.
577    An end sequence regex indicates the end of the query/response
578    packet sequence.  All responses up through (but not including) the
579    end response are stored in a context variable.
580
581    Settings accepted from params:
582
583        next_query or query: required.  The typical query packet without the $ prefix or #xx suffix.
584            If there is a special first packet to start the iteration query, see the
585            first_query key.
586
587        first_query: optional. If the first query requires a special query command, specify
588            it with this key.  Do not specify the $ prefix or #xx suffix.
589
590        append_iteration_suffix: defaults to False.  Specify True if the 0-based iteration
591            index should be appended as a suffix to the command.  e.g. qRegisterInfo with
592            this key set true will generate query packets of qRegisterInfo0, qRegisterInfo1,
593            etc.
594
595        end_regex: required. Specifies a compiled regex object that will match the full text
596            of any response that signals an end to the iteration.  It must include the
597            initial $ and ending #xx and must match the whole packet.
598
599        save_key: required.  Specifies the key within the context where an array will be stored.
600            Each packet received from the gdb remote that does not match the end_regex will get
601            appended to the array stored within the context at that key.
602
603        runaway_response_count: optional. Defaults to 10000. If this many responses are retrieved,
604            assume there is something wrong with either the response collection or the ending
605            detection regex and throw an exception.
606    """
607
608    def __init__(self, params):
609        self._next_query = params.get("next_query", params.get("query"))
610        if not self._next_query:
611            raise "either next_query or query key must be specified for MultiResponseGdbRemoteEntry"
612
613        self._first_query = params.get("first_query", self._next_query)
614        self._append_iteration_suffix = params.get(
615            "append_iteration_suffix", False)
616        self._iteration = 0
617        self._end_regex = params["end_regex"]
618        self._save_key = params["save_key"]
619        self._runaway_response_count = params.get(
620            "runaway_response_count", 10000)
621        self._is_send_to_remote = True
622        self._end_matched = False
623
624    def is_send_to_remote(self):
625        return self._is_send_to_remote
626
627    def get_send_packet(self):
628        if not self.is_send_to_remote():
629            raise Exception(
630                "get_send_packet() called on MultiResponseGdbRemoteEntry that is not in the send state")
631        if self._end_matched:
632            raise Exception(
633                "get_send_packet() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.")
634
635        # Choose the first or next query for the base payload.
636        if self._iteration == 0 and self._first_query:
637            payload = self._first_query
638        else:
639            payload = self._next_query
640
641        # Append the suffix as needed.
642        if self._append_iteration_suffix:
643            payload += "%x" % self._iteration
644
645        # Keep track of the iteration.
646        self._iteration += 1
647
648        # Now that we've given the query packet, flip the mode to
649        # receive/match.
650        self._is_send_to_remote = False
651
652        # Return the result, converted to packet form.
653        return gdbremote_packet_encode_string(payload)
654
655    def is_consumed(self):
656        return self._end_matched
657
658    def assert_match(self, asserter, actual_packet, context=None):
659        # This only makes sense for matching lines coming from the remote debug
660        # monitor.
661        if self.is_send_to_remote():
662            raise Exception(
663                "assert_match() called on MultiResponseGdbRemoteEntry but state is set to send a query packet.")
664
665        if self._end_matched:
666            raise Exception(
667                "assert_match() called on MultiResponseGdbRemoteEntry but end of query/response sequence has already been seen.")
668
669        # Set up a context as needed.
670        if not context:
671            context = {}
672
673        # Check if the packet matches the end condition.
674        match = self._end_regex.match(actual_packet)
675        if match:
676            # We're done iterating.
677            self._end_matched = True
678            return context
679
680        # Not done iterating - save the packet.
681        context[self._save_key] = context.get(self._save_key, [])
682        context[self._save_key].append(actual_packet)
683
684        # Check for a runaway response cycle.
685        if len(context[self._save_key]) >= self._runaway_response_count:
686            raise Exception(
687                "runaway query/response cycle detected: %d responses captured so far. Last response: %s" %
688                (len(
689                    context[
690                        self._save_key]), context[
691                    self._save_key][
692                    -1]))
693
694        # Flip the mode to send for generating the query.
695        self._is_send_to_remote = True
696        return context
697
698
699class MatchRemoteOutputEntry(GdbRemoteEntryBase):
700    """Waits for output from the debug monitor to match a regex or time out.
701
702    This entry type tries to match each time new gdb remote output is accumulated
703    using a provided regex.  If the output does not match the regex within the
704    given timeframe, the command fails the playback session.  If the regex does
705    match, any capture fields are recorded in the context.
706
707    Settings accepted from params:
708
709        regex: required. Specifies a compiled regex object that must either succeed
710            with re.match or re.search (see regex_mode below) within the given timeout
711            (see timeout_seconds below) or cause the playback to fail.
712
713        regex_mode: optional. Available values: "match" or "search". If "match", the entire
714            stub output as collected so far must match the regex.  If search, then the regex
715            must match starting somewhere within the output text accumulated thus far.
716            Default: "match" (i.e. the regex must match the entirety of the accumulated output
717            buffer, so unexpected text will generally fail the match).
718
719        capture: optional.  If specified, is a dictionary of regex match group indices (should start
720            with 1) to variable names that will store the capture group indicated by the
721            index. For example, {1:"thread_id"} will store capture group 1's content in the
722            context dictionary where "thread_id" is the key and the match group value is
723            the value. The value stored off can be used later in a expect_captures expression.
724            This arg only makes sense when regex is specified.
725    """
726
727    def __init__(self, regex=None, regex_mode="match", capture=None):
728        self._regex = regex
729        self._regex_mode = regex_mode
730        self._capture = capture
731        self._matched = False
732
733        if not self._regex:
734            raise Exception("regex cannot be None")
735
736        if not self._regex_mode in ["match", "search"]:
737            raise Exception(
738                "unsupported regex mode \"{}\": must be \"match\" or \"search\"".format(
739                    self._regex_mode))
740
741    def is_output_matcher(self):
742        return True
743
744    def is_send_to_remote(self):
745        # This is always a "wait for remote" command.
746        return False
747
748    def is_consumed(self):
749        return self._matched
750
751    def assert_match(self, asserter, accumulated_output, context):
752        # Validate args.
753        if not accumulated_output:
754            raise Exception("accumulated_output cannot be none")
755        if not context:
756            raise Exception("context cannot be none")
757
758        # Validate that we haven't already matched.
759        if self._matched:
760            raise Exception(
761                "invalid state - already matched, attempting to match again")
762
763        # If we don't have any content yet, we don't match.
764        if len(accumulated_output) < 1:
765            return context
766
767        # Check if we match
768        if self._regex_mode == "match":
769            match = self._regex.match(accumulated_output)
770        elif self._regex_mode == "search":
771            match = self._regex.search(accumulated_output)
772        else:
773            raise Exception(
774                "Unexpected regex mode: {}".format(
775                    self._regex_mode))
776
777        # If we don't match, wait to try again after next $O content, or time
778        # out.
779        if not match:
780            # print("re pattern \"{}\" did not match against \"{}\"".format(self._regex.pattern, accumulated_output))
781            return context
782
783        # We do match.
784        self._matched = True
785        # print("re pattern \"{}\" matched against \"{}\"".format(self._regex.pattern, accumulated_output))
786
787        # Collect up any captures into the context.
788        if self._capture:
789            # Handle captures.
790            for group_index, var_name in list(self._capture.items()):
791                capture_text = match.group(group_index)
792                if not capture_text:
793                    raise Exception(
794                        "No content for group index {}".format(group_index))
795                context[var_name] = capture_text
796
797        return context
798
799
800class GdbRemoteTestSequence(object):
801
802    _LOG_LINE_REGEX = re.compile(r'^.*(read|send)\s+packet:\s+(.+)$')
803
804    def __init__(self, logger):
805        self.entries = []
806        self.logger = logger
807
808    def add_log_lines(self, log_lines, remote_input_is_read):
809        for line in log_lines:
810            if isinstance(line, str):
811                # Handle log line import
812                # if self.logger:
813                #     self.logger.debug("processing log line: {}".format(line))
814                match = self._LOG_LINE_REGEX.match(line)
815                if match:
816                    playback_packet = match.group(2)
817                    direction = match.group(1)
818                    if _is_packet_lldb_gdbserver_input(
819                            direction, remote_input_is_read):
820                        # Handle as something to send to the remote debug monitor.
821                        # if self.logger:
822                        #     self.logger.info("processed packet to send to remote: {}".format(playback_packet))
823                        self.entries.append(
824                            GdbRemoteEntry(
825                                is_send_to_remote=True,
826                                exact_payload=playback_packet))
827                    else:
828                        # Log line represents content to be expected from the remote debug monitor.
829                        # if self.logger:
830                        #     self.logger.info("receiving packet from llgs, should match: {}".format(playback_packet))
831                        self.entries.append(
832                            GdbRemoteEntry(
833                                is_send_to_remote=False,
834                                exact_payload=playback_packet))
835                else:
836                    raise Exception(
837                        "failed to interpret log line: {}".format(line))
838            elif isinstance(line, dict):
839                entry_type = line.get("type", "regex_capture")
840                if entry_type == "regex_capture":
841                    # Handle more explicit control over details via dictionary.
842                    direction = line.get("direction", None)
843                    regex = line.get("regex", None)
844                    capture = line.get("capture", None)
845                    expect_captures = line.get("expect_captures", None)
846
847                    # Compile the regex.
848                    if regex and (isinstance(regex, str)):
849                        regex = re.compile(regex)
850
851                    if _is_packet_lldb_gdbserver_input(
852                            direction, remote_input_is_read):
853                        # Handle as something to send to the remote debug monitor.
854                        # if self.logger:
855                        #     self.logger.info("processed dict sequence to send to remote")
856                        self.entries.append(
857                            GdbRemoteEntry(
858                                is_send_to_remote=True,
859                                regex=regex,
860                                capture=capture,
861                                expect_captures=expect_captures))
862                    else:
863                        # Log line represents content to be expected from the remote debug monitor.
864                        # if self.logger:
865                        #     self.logger.info("processed dict sequence to match receiving from remote")
866                        self.entries.append(
867                            GdbRemoteEntry(
868                                is_send_to_remote=False,
869                                regex=regex,
870                                capture=capture,
871                                expect_captures=expect_captures))
872                elif entry_type == "multi_response":
873                    self.entries.append(MultiResponseGdbRemoteEntry(line))
874                elif entry_type == "output_match":
875
876                    regex = line.get("regex", None)
877                    # Compile the regex.
878                    if regex and (isinstance(regex, str)):
879                        regex = re.compile(regex, re.DOTALL)
880
881                    regex_mode = line.get("regex_mode", "match")
882                    capture = line.get("capture", None)
883                    self.entries.append(
884                        MatchRemoteOutputEntry(
885                            regex=regex,
886                            regex_mode=regex_mode,
887                            capture=capture))
888                else:
889                    raise Exception("unknown entry type \"%s\"" % entry_type)
890
891
892def process_is_running(pid, unknown_value=True):
893    """If possible, validate that the given pid represents a running process on the local system.
894
895    Args:
896
897        pid: an OS-specific representation of a process id.  Should be an integral value.
898
899        unknown_value: value used when we cannot determine how to check running local
900        processes on the OS.
901
902    Returns:
903
904        If we can figure out how to check running process ids on the given OS:
905        return True if the process is running, or False otherwise.
906
907        If we don't know how to check running process ids on the given OS:
908        return the value provided by the unknown_value arg.
909    """
910    if not isinstance(pid, six.integer_types):
911        raise Exception(
912            "pid must be an integral type (actual type: %s)" % str(
913                type(pid)))
914
915    process_ids = []
916
917    if lldb.remote_platform:
918        # Don't know how to get list of running process IDs on a remote
919        # platform
920        return unknown_value
921    elif platform.system() in ['Darwin', 'Linux', 'FreeBSD', 'NetBSD']:
922        # Build the list of running process ids
923        output = subprocess.check_output(
924            "ps ax | awk '{ print $1; }'", shell=True)
925        text_process_ids = output.split('\n')[1:]
926        # Convert text pids to ints
927        process_ids = [int(text_pid)
928                       for text_pid in text_process_ids if text_pid != '']
929    # elif {your_platform_here}:
930    #   fill in process_ids as a list of int type process IDs running on
931    #   the local system.
932    else:
933        # Don't know how to get list of running process IDs on this
934        # OS, so return the "don't know" value.
935        return unknown_value
936
937    # Check if the pid is in the process_ids
938    return pid in process_ids
939
940if __name__ == '__main__':
941    EXE_PATH = get_lldb_server_exe()
942    if EXE_PATH:
943        print("lldb-server path detected: {}".format(EXE_PATH))
944    else:
945        print("lldb-server could not be found")
946