2 The LLVM Compiler Infrastructure
4 This file is distributed under the University of Illinois Open Source
5 License. See LICENSE.TXT for details.
7 Sync lldb and related source from a local machine to a remote machine.
9 This facilitates working on the lldb sourcecode on multiple machines
10 and multiple OS types, verifying changes across all.
13 This module provides asyncore channels used within the LLDB test
17 from __future__ import print_function
18 from __future__ import absolute_import
26 from six.moves import cPickle
31 class UnpicklingForwardingReaderChannel(asyncore.dispatcher):
32 """Provides an unpickling, forwarding asyncore dispatch channel reader.
34 Inferior dotest.py processes with side-channel-based test results will
35 send test result event data in a pickled format, one event at a time.
36 This class supports reconstructing the pickled data and forwarding it
37 on to its final destination.
39 The channel data is written in the form:
40 {num_payload_bytes}#{payload_bytes}
42 The bulk of this class is devoted to reading and parsing out
45 def __init__(self, file_object, async_map, forwarding_func):
46 asyncore.dispatcher.__init__(self, sock=file_object, map=async_map)
48 self.header_contents = b""
49 self.packet_bytes_remaining = 0
50 self.reading_header = True
52 self.forwarding_func = forwarding_func
53 if forwarding_func is None:
54 # This whole class is useless if we do nothing with the
56 raise Exception("forwarding function must be set")
58 def deserialize_payload(self):
59 """Unpickles the collected input buffer bytes and forwards."""
60 if len(self.ibuffer) > 0:
61 self.forwarding_func(cPickle.loads(self.ibuffer))
64 def consume_header_bytes(self, data):
65 """Consumes header bytes from the front of data.
66 @param data the incoming data stream bytes
67 @return any data leftover after consuming header bytes.
69 # We're done if there is no content.
70 if not data or (len(data) == 0):
75 assert len(self.header_contents) < full_header_len
77 bytes_avail = len(data)
78 bytes_needed = full_header_len - len(self.header_contents)
79 header_bytes_avail = min(bytes_needed, bytes_avail)
80 self.header_contents += data[:header_bytes_avail]
81 if len(self.header_contents) == full_header_len:
84 self.packet_bytes_remaining = struct.unpack(
85 "!I", self.header_contents)[0]
86 self.header_contents = b""
87 self.reading_header = False
88 return data[header_bytes_avail:]
90 # If we made it here, we've exhausted the data and
91 # we're still parsing header content.
94 def consume_payload_bytes(self, data):
95 """Consumes payload bytes from the front of data.
96 @param data the incoming data stream bytes
97 @return any data leftover after consuming remaining payload bytes.
99 if not data or (len(data) == 0):
100 # We're done and there's nothing to do.
104 if data_len <= self.packet_bytes_remaining:
105 # We're consuming all the data provided.
107 self.packet_bytes_remaining -= data_len
109 # If we're no longer waiting for payload bytes,
110 # we flip back to parsing header bytes and we
111 # unpickle the payload contents.
112 if self.packet_bytes_remaining < 1:
113 self.reading_header = True
114 self.deserialize_payload()
116 # We're done, no more data left.
119 # We're only consuming a portion of the data since
120 # the data contains more than the payload amount.
121 self.ibuffer += data[:self.packet_bytes_remaining]
122 data = data[self.packet_bytes_remaining:]
124 # We now move on to reading the header.
125 self.reading_header = True
126 self.packet_bytes_remaining = 0
128 # And we can deserialize the payload.
129 self.deserialize_payload()
131 # Return the remaining data.
134 def handle_read(self):
135 # Read some data from the socket.
137 data = self.recv(8192)
138 # print('driver socket READ: %d bytes' % len(data))
139 except socket.error as socket_error:
141 "\nINFO: received socket error when reading data "
142 "from test inferior:\n{}".format(socket_error))
144 except Exception as general_exception:
146 "\nERROR: received non-socket error when reading data "
147 "from the test inferior:\n{}".format(general_exception))
150 # Consume the message content.
151 while data and (len(data) > 0):
152 # If we're reading the header, gather header bytes.
153 if self.reading_header:
154 data = self.consume_header_bytes(data)
156 data = self.consume_payload_bytes(data)
158 def handle_close(self):
159 # print("socket reader: closing port")
163 class UnpicklingForwardingListenerChannel(asyncore.dispatcher):
164 """Provides a socket listener asyncore channel for unpickling/forwarding.
166 This channel will listen on a socket port (use 0 for host-selected). Any
167 client that connects will have an UnpicklingForwardingReaderChannel handle
168 communication over the connection.
170 The dotest parallel test runners, when collecting test results, open the
171 test results side channel over a socket. This channel handles connections
172 from inferiors back to the test runner. Each worker fires up a listener
173 for each inferior invocation. This simplifies the asyncore.loop() usage,
174 one of the reasons for implementing with asyncore. This listener shuts
175 down once a single connection is made to it.
177 def __init__(self, async_map, host, port, backlog_count, forwarding_func):
178 asyncore.dispatcher.__init__(self, map=async_map)
179 self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
180 self.set_reuse_addr()
181 self.bind((host, port))
182 self.address = self.socket.getsockname()
183 self.listen(backlog_count)
185 self.async_map = async_map
186 self.forwarding_func = forwarding_func
187 if forwarding_func is None:
188 # This whole class is useless if we do nothing with the
190 raise Exception("forwarding function must be set")
192 def handle_accept(self):
193 (sock, addr) = self.socket.accept()
195 # print('Incoming connection from %s' % repr(addr))
196 self.handler = UnpicklingForwardingReaderChannel(
197 sock, self.async_map, self.forwarding_func)
199 def handle_close(self):