2 Base class for DarwinLog tests.
6 from __future__ import print_function
18 from lldb import SBProcess, SBTarget
20 from lldbsuite.test import decorators
21 from lldbsuite.test import lldbtest
22 from lldbsuite.test import lldbtest_config
23 from lldbsuite.test import lldbutil
26 def expand_darwinlog_command(command):
27 return "plugin structured-data darwin-log " + command
30 def expand_darwinlog_settings_set_command(command):
31 return "settings set plugin.structured-data.darwin-log." + command
34 class DarwinLogTestBase(lldbtest.TestBase):
35 """Base class for DarwinLog test cases that are pexpect-based."""
36 NO_DEBUG_INFO_TESTCASE = True
38 CONTINUE_REGEX = re.compile(r"Process \d+ resuming")
41 # Call super's setUp().
42 super(DarwinLogTestBase, self).setUp()
44 # Until other systems support this, exit
45 # early if we're not macOS version 10.12
47 version = platform.mac_ver()[0].split('.')
48 if ((int(version[0]) == 10) and (int(version[1]) < 12) or
49 (int(version[0]) < 10)):
50 self.skipTest("DarwinLog tests currently require macOS 10.12+")
54 self.child_prompt = '(lldb) '
55 self.strict_sources = False
56 self.enable_process_monitor_logging = False
58 def run_lldb_to_breakpoint(self, exe, source_file, line,
59 enable_command=None, settings_commands=None):
62 # Set self.child_prompt, which is "(lldb) ".
63 prompt = self.child_prompt
65 # So that the child gets torn down after the test.
66 self.child = pexpect.spawn('%s %s %s' % (lldbtest_config.lldbExec,
67 self.lldbOption, exe))
70 # Turn on logging for what the child sends back.
72 child.logfile_read = sys.stdout
74 if self.enable_process_monitor_logging:
75 if platform.system() == 'Darwin':
77 "settings set target.process.extra-startup-command "
78 "QSetLogging:bitmask=LOG_DARWIN_LOG;")
81 # Run the enable command if we have one.
82 if enable_command is not None:
83 self.runCmd(enable_command)
86 # Disable showing of source lines at our breakpoint.
87 # This is necessary for the logging tests, because the very
88 # text we want to match for output from the running inferior
89 # will show up in the source as well. We don't want the source
90 # output to erroneously make a match with our expected output.
91 self.runCmd("settings set stop-line-count-before 0")
93 self.runCmd("settings set stop-line-count-after 0")
96 # While we're debugging, turn on packet logging.
97 self.runCmd("log enable -f /tmp/packets.log gdb-remote packets")
100 # Prevent mirroring of NSLog/os_log content to stderr. We want log
101 # messages to come exclusively through our log channel.
103 "settings set target.env-vars IDE_DISABLED_OS_ACTIVITY_DT_MODE=1")
106 # Run any darwin-log settings commands now, before we enable logging.
107 if settings_commands is not None:
108 for setting_command in settings_commands:
110 expand_darwinlog_settings_set_command(setting_command))
113 # Set breakpoint right before the os_log() macros. We don't
114 # set it on the os_log*() calls because these are a number of
115 # nested-scoped calls that will cause the debugger to stop
116 # multiple times on the same line. That is difficult to match
117 # os_log() content by since it is non-deterministic what the
118 # ordering between stops and log lines will be. This is why
119 # we stop before, and then have the process run in a sleep
120 # afterwards, so we get the log messages while the target
121 # process is "running" (sleeping).
122 child.sendline('breakpoint set -f %s -l %d' % (source_file, line))
123 child.expect_exact(prompt)
125 # Now run to the breakpoint that we just set.
126 child.sendline('run')
127 child.expect_exact(prompt)
129 # Ensure we stopped at a breakpoint.
130 self.runCmd("thread list")
131 self.expect(re.compile(r"stop reason = breakpoint"))
133 # Now we're ready to check if DarwinLog is available.
134 if not self.darwin_log_available():
135 self.skipTest("DarwinLog not available")
137 def runCmd(self, cmd):
138 self.child.sendline(cmd)
140 def expect_prompt(self, exactly=True):
141 self.expect(self.child_prompt, exactly=exactly)
143 def expect(self, pattern, exactly=False, *args, **kwargs):
145 return self.child.expect_exact(pattern, *args, **kwargs)
146 return self.child.expect(pattern, *args, **kwargs)
148 def darwin_log_available(self):
149 self.runCmd("plugin structured-data darwin-log status")
150 self.expect(re.compile(r"Availability: ([\S]+)"))
151 return self.child.match is not None and (
152 self.child.match.group(1) == "available")
154 def do_test(self, enable_options, expect_regexes=None,
155 settings_commands=None):
156 """Test that a single fall-through reject rule rejects all logging."""
157 self.build(dictionary=self.d)
158 self.setTearDownCleanup(dictionary=self.d)
160 # Build the darwin-log enable command.
161 enable_cmd = expand_darwinlog_command('enable')
162 if enable_options is not None and len(enable_options) > 0:
163 enable_cmd += ' ' + ' '.join(enable_options)
165 exe = os.path.join(os.getcwd(), self.exe_name)
166 self.run_lldb_to_breakpoint(exe, self.source, self.line,
167 enable_command=enable_cmd,
168 settings_commands=settings_commands)
172 self.runCmd("process continue")
173 self.expect(self.CONTINUE_REGEX)
175 if expect_regexes is None:
176 # Expect matching a log line or program exit.
177 # Test methods determine which ones are valid.
179 [re.compile(r"source-log-([^-]+)-(\S+)"),
180 re.compile(r"exited with status")
182 self.expect(expect_regexes)
185 def remove_add_mode_entry(log_entries):
186 """libtrace creates an "Add Mode:..." message when logging is enabled.
187 Strip this out of results since our test subjects don't create it."""
188 return [entry for entry in log_entries
189 if "message" in entry and
190 not entry["message"].startswith("Add Mode:")]
193 class DarwinLogEventBasedTestBase(lldbtest.TestBase):
194 """Base class for event-based DarwinLog tests."""
195 NO_DEBUG_INFO_TESTCASE = True
197 class EventListenerThread(threading.Thread):
199 def __init__(self, listener, process, trace_on, max_entry_count):
201 DarwinLogEventBasedTestBase.EventListenerThread,
203 self.process = process
204 self.listener = listener
205 self.trace_on = trace_on
206 self.max_entry_count = max_entry_count
207 self.exception = None
208 self.structured_data_event_count = 0
209 self.wait_seconds = 2
210 self.max_timeout_count = 4
211 self.log_entries = []
213 def handle_structured_data_event(self, event):
214 structured_data = SBProcess.GetStructuredDataFromEvent(event)
215 if not structured_data.IsValid():
217 print("invalid structured data")
220 # Track that we received a valid structured data event.
221 self.structured_data_event_count += 1
223 # Grab the individual log entries from the JSON.
224 stream = lldb.SBStream()
225 structured_data.GetAsJSON(stream)
226 dict = json.loads(stream.GetData())
227 self.log_entries.extend(dict["events"])
229 print("Structured data (raw):", stream.GetData())
231 # Print the pretty-printed version.
234 structured_data.PrettyPrint(stream)
235 print("Structured data (pretty print):",
238 def done(self, timeout_count):
239 """Returns True when we're done listening for events."""
240 # See if we should consider the number of events retrieved.
241 if self.max_entry_count is not None:
242 if len(self.log_entries) >= self.max_entry_count:
243 # We've received the max threshold of events expected,
246 print("Event listener thread exiting due to max "
247 "expected log entry count being reached.")
250 # If our event timeout count has exceeded our maximum timeout count,
252 if timeout_count >= self.max_timeout_count:
254 print("Event listener thread exiting due to max number of "
255 "WaitForEvent() timeouts being reached.")
258 # If our process is dead, we're done.
259 if not self.process.is_alive:
261 print("Event listener thread exiting due to test inferior "
269 event = lldb.SBEvent()
273 # Wait up to 4 times for the event to arrive.
274 while not self.done(timeout_count):
276 print("Calling wait for event...")
277 if self.listener.WaitForEvent(self.wait_seconds, event):
278 while event.IsValid():
279 # Check if it's a process event.
280 if SBProcess.EventIsStructuredDataEvent(event):
281 self.handle_structured_data_event(event)
284 print("ignoring unexpected event:",
285 lldbutil.get_description(event))
286 # Grab the next event, if there is one.
288 if not self.listener.GetNextEvent(event):
290 print("listener has no more events "
291 "available at this time")
294 print("timeout occurred waiting for event...")
296 self.listener.Clear()
297 except Exception as e:
301 # Call super's setUp().
302 super(DarwinLogEventBasedTestBase, self).setUp()
304 # Until other systems support this, exit
305 # early if we're not macOS version 10.12
307 version = platform.mac_ver()[0].split('.')
308 if ((int(version[0]) == 10) and (int(version[1]) < 12) or
309 (int(version[0]) < 10)):
310 self.skipTest("DarwinLog tests currently require macOS 10.12+")
314 self.source = 'main.c'
317 self.exe_name = 'a.out'
318 self.d = {'C_SOURCES': self.source, 'EXE': self.exe_name}
321 self.line = lldbtest.line_number(self.source, '// break here')
323 # Enable debugserver logging of the darwin log collection
325 self.runCmd("settings set target.process.extra-startup-command "
326 "QSetLogging:bitmask=LOG_DARWIN_LOG;")
328 def darwin_log_available(self):
329 match = self.match("plugin structured-data darwin-log status",
330 patterns=[r"Availability: ([\S]+)"])
331 return match is not None and (match.group(1) == "available")
333 def do_test(self, enable_options, settings_commands=None,
334 run_enable_after_breakpoint=False, max_entry_count=None):
335 """Runs the test inferior, returning collected events.
337 This method runs the test inferior to the first breakpoint hit.
338 It then adds a listener for structured data events, and collects
339 all events from that point forward until end of execution of the
340 test inferior. It then returns those events.
343 A list of structured data events received, in the order they
346 self.build(dictionary=self.d)
347 self.setTearDownCleanup(dictionary=self.d)
349 exe = os.path.join(os.getcwd(), self.exe_name)
351 # Create a target by the debugger.
352 target = self.dbg.CreateTarget(exe)
353 self.assertTrue(target, lldbtest.VALID_TARGET)
355 # Run the darwin-log settings commands.
356 if settings_commands is not None:
357 for setting_command in settings_commands:
359 expand_darwinlog_settings_set_command(setting_command))
361 # Build the darwin-log enable command.
362 enable_cmd = expand_darwinlog_command("enable")
363 if enable_options is not None and len(enable_options) > 0:
364 enable_cmd += ' ' + ' '.join(enable_options)
366 # Run the darwin-log enable command now if we are not supposed
367 # to do it at the first breakpoint. This tests the start-up
368 # code, which has the benefit of being able to set os_log-related
369 # environment variables.
370 if not run_enable_after_breakpoint:
371 self.runCmd(enable_cmd)
373 # Create the breakpoint.
374 breakpoint = target.BreakpointCreateByLocation(self.source, self.line)
375 self.assertIsNotNone(breakpoint)
376 self.assertTrue(breakpoint.IsValid())
377 self.assertEqual(1, breakpoint.GetNumLocations(),
378 "Should have found one breakpoint")
380 # Enable packet logging.
381 # self.runCmd("log enable -f /tmp/packets.log gdb-remote packets")
382 # self.runCmd("log enable lldb process")
384 # Launch the process - doesn't stop at entry.
385 process = target.LaunchSimple(None, None, os.getcwd())
386 self.assertIsNotNone(process, lldbtest.PROCESS_IS_VALID)
388 # Keep track of whether we're tracing output.
389 trace_on = self.TraceOn()
391 # Get the next thread that stops.
392 from lldbsuite.test.lldbutil import get_stopped_thread
393 thread = get_stopped_thread(process, lldb.eStopReasonBreakpoint)
395 self.assertIsNotNone(thread, "There should be a thread stopped "
398 # The process should be stopped at this point.
399 self.expect("process status", lldbtest.PROCESS_STOPPED,
400 patterns=['Process .* stopped'])
402 # The stop reason of the thread should be breakpoint.
403 self.expect("thread list", lldbtest.STOPPED_DUE_TO_BREAKPOINT,
404 substrs=['stopped', 'stop reason = breakpoint'])
406 # And our one and only breakpoint should have been hit.
407 self.assertEquals(breakpoint.GetHitCount(), 1)
409 # Check if DarwinLog is available. This check cannot be done
410 # until after the process has started, as the feature availability
411 # comes through the stub. The stub isn't running until
412 # the target process is running. So this is really the earliest
414 if not self.darwin_log_available():
415 self.skipTest("DarwinLog not available")
417 # Now setup the structured data listener.
419 # Grab the broadcaster for the process. We'll be attaching our
421 broadcaster = process.GetBroadcaster()
422 self.assertIsNotNone(broadcaster)
424 listener = lldb.SBListener("SBStructuredData listener")
425 self.assertIsNotNone(listener)
427 rc = broadcaster.AddListener(
428 listener, lldb.SBProcess.eBroadcastBitStructuredData)
429 self.assertTrue(rc, "Successfully add listener to process broadcaster")
431 # Start the listening thread to retrieve the events.
432 # Bump up max entry count for the potentially included Add Mode:
434 if max_entry_count is not None:
436 event_thread = self.EventListenerThread(listener, process, trace_on,
440 # Continue the test inferior. We should get any events after this.
443 # Wait until the event thread terminates.
444 # print("main thread now waiting for event thread to receive events.")
447 # If the process is still alive, we kill it here.
451 # Fail on any exceptions that occurred during event execution.
452 if event_thread.exception is not None:
453 # Re-raise it here so it shows up as a test error.
456 # Return the collected logging events.
457 return remove_add_mode_entry(event_thread.log_entries)