]> git.ipfire.org Git - thirdparty/binutils-gdb.git/commitdiff
gdb/python/dap: prefix internal attributes with underscore
authorSimon Marchi <simon.marchi@polymtl.ca>
Thu, 13 Feb 2025 20:08:29 +0000 (15:08 -0500)
committerSimon Marchi <simon.marchi@polymtl.ca>
Fri, 14 Feb 2025 17:46:47 +0000 (12:46 -0500)
I'm currently reading the DAP code, and I think this would help.  This
is pretty much standard Python style, we do it as some places but not
others.  I think it helps readability, by saying that this attribute
isn't mean to be accessed outside the class.

A similar pass could be done for internal methods, I haven't done that.

Change-Id: I8e8789b39adafe62d14404d19f7fc75e2a364e01
Approved-By: Tom Tromey <tom@tromey.com>
gdb/python/lib/gdb/dap/breakpoint.py
gdb/python/lib/gdb/dap/disassemble.py
gdb/python/lib/gdb/dap/frames.py
gdb/python/lib/gdb/dap/globalvars.py
gdb/python/lib/gdb/dap/scopes.py
gdb/python/lib/gdb/dap/server.py
gdb/python/lib/gdb/dap/varref.py

index f0fe0734a03820174a531f6a205798cdd8852c0a..5fd0c1f665d9b618fbcf143094c468c3b44d9bb1 100644 (file)
@@ -218,11 +218,11 @@ class _PrintBreakpoint(gdb.Breakpoint):
     def __init__(self, logMessage, **args):
         super().__init__(**args)
         # Split the message up for easier processing.
-        self.message = re.split("{(.*?)}", logMessage)
+        self._message = re.split("{(.*?)}", logMessage)
 
     def stop(self):
         output = ""
-        for idx, item in enumerate(self.message):
+        for idx, item in enumerate(self._message):
             if idx % 2 == 0:
                 # Even indices are plain text.
                 output += item
index 5389803c7445f0d4a80daa060dbc30700fd2210f..3c0d5178bd59b538b07d3b8b87ea7baf02436750 100644 (file)
@@ -26,30 +26,30 @@ class _BlockTracker:
         # Map from PC to symbol names.  A given PC is assumed to have
         # just one label -- DAP wouldn't let us return multiple labels
         # anyway.
-        self.labels = {}
+        self._labels = {}
         # Blocks that have already been handled.
-        self.blocks = set()
+        self._blocks = set()
 
     # Add a gdb.Block and its superblocks, ignoring the static and
     # global block.  BLOCK can also be None, which is ignored.
     def add_block(self, block):
         while block is not None:
-            if block.is_static or block.is_global or block in self.blocks:
+            if block.is_static or block.is_global or block in self._blocks:
                 return
-            self.blocks.add(block)
+            self._blocks.add(block)
             if block.function is not None:
-                self.labels[block.start] = block.function.name
+                self._labels[block.start] = block.function.name
             for sym in block:
                 if sym.addr_class == gdb.SYMBOL_LOC_LABEL:
-                    self.labels[int(sym.value())] = sym.name
+                    self._labels[int(sym.value())] = sym.name
             block = block.superblock
 
     # Add PC to this tracker.  Update RESULT as appropriate with
     # information about the source and any label.
     def add_pc(self, pc, result):
         self.add_block(gdb.block_for_pc(pc))
-        if pc in self.labels:
-            result["symbol"] = self.labels[pc]
+        if pc in self._labels:
+            result["symbol"] = self._labels[pc]
         sal = gdb.find_pc_line(pc)
         if sal.symtab is not None:
             if sal.line != 0:
index f4e6565b943c3fd49f8bfa133f188c294a01d5db..770a177619a381756493f65ea94415bc5168785d 100644 (file)
@@ -75,16 +75,16 @@ def select_frame(id):
 # what is needed for the current callers.
 class _MemoizingIterator:
     def __init__(self, iterator):
-        self.iterator = iterator
-        self.seen = []
+        self._iterator = iterator
+        self._seen = []
 
     def __iter__(self):
         # First the memoized items.
-        for item in self.seen:
+        for item in self._seen:
             yield item
         # Now memoize new items.
-        for item in self.iterator:
-            self.seen.append(item)
+        for item in self._iterator:
+            self._seen.append(item)
             yield item
 
 
index 104b242d8968a28fe1da2cec53ef3d2178c56829..bd9c53ad5eddd5ae41b104432e01356ddfd1a071 100644 (file)
@@ -37,8 +37,8 @@ gdb.events.cont.connect(clear)
 class _Globals(BaseReference):
     def __init__(self, filename, var_list):
         super().__init__("Globals")
-        self.filename = filename
-        self.var_list = var_list
+        self._filename = filename
+        self._var_list = var_list
 
     def to_object(self):
         result = super().to_object()
@@ -46,8 +46,8 @@ class _Globals(BaseReference):
         # How would we know?
         result["expensive"] = False
         result["namedVariables"] = self.child_count()
-        if self.filename is not None:
-            result["source"] = make_source(self.filename)
+        if self._filename is not None:
+            result["source"] = make_source(self._filename)
         return result
 
     def has_children(self):
@@ -56,11 +56,11 @@ class _Globals(BaseReference):
         return True
 
     def child_count(self):
-        return len(self.var_list)
+        return len(self._var_list)
 
     @in_gdb_thread
     def fetch_one_child(self, idx):
-        sym = self.var_list[idx]
+        sym = self._var_list[idx]
         return (sym.name, sym.value())
 
 
index 221ae35a0023522ad3d7304403232789a24d0fcf..1b98c4f49f7d8de30f895385d9ccbca2c0e51f76 100644 (file)
@@ -78,19 +78,19 @@ def symbol_value(sym, frame):
 class _ScopeReference(BaseReference):
     def __init__(self, name, hint, frameId: int, var_list):
         super().__init__(name)
-        self.hint = hint
-        self.frameId = frameId
+        self._hint = hint
+        self._frameId = frameId
         # VAR_LIST might be any kind of iterator, but it's convenient
         # here if it is just a collection.
-        self.var_list = tuple(var_list)
+        self._var_list = tuple(var_list)
 
     def to_object(self):
         result = super().to_object()
-        result["presentationHint"] = self.hint
+        result["presentationHint"] = self._hint
         # How would we know?
         result["expensive"] = False
         result["namedVariables"] = self.child_count()
-        frame = frame_for_id(self.frameId)
+        frame = frame_for_id(self._frameId)
         if frame.line() is not None:
             result["line"] = export_line(frame.line())
         filename = frame.filename()
@@ -102,11 +102,11 @@ class _ScopeReference(BaseReference):
         return True
 
     def child_count(self):
-        return len(self.var_list)
+        return len(self._var_list)
 
     @in_gdb_thread
     def fetch_one_child(self, idx):
-        return symbol_value(self.var_list[idx], frame_for_id(self.frameId))
+        return symbol_value(self._var_list[idx], frame_for_id(self._frameId))
 
 
 # A _ScopeReference that wraps the 'finish' value.  Note that this
@@ -136,10 +136,10 @@ class _RegisterReference(_ScopeReference):
     @in_gdb_thread
     def fetch_one_child(self, idx):
         return (
-            self.var_list[idx].name,
-            frame_for_id(self.frameId)
+            self._var_list[idx].name,
+            frame_for_id(self._frameId)
             .inferior_frame()
-            .read_register(self.var_list[idx]),
+            .read_register(self._var_list[idx]),
         )
 
 
index 6f3af732286b3260d90366882313265e35552fcc..8fdf0299d51c3f87444c15430918892af4942504 100644 (file)
@@ -115,10 +115,10 @@ class CancellationHandler:
         # The request currently being handled, or None.
         self.in_flight_dap_thread = None
         self.in_flight_gdb_thread = None
-        self.reqs = []
+        self._reqs = []
         # A set holding the request IDs of all deferred requests that
         # are still unresolved.
-        self.deferred_ids = set()
+        self._deferred_ids = set()
 
     @contextmanager
     def current_request(self, req):
@@ -138,7 +138,7 @@ class CancellationHandler:
     def defer_request(self, req):
         """Indicate that the request REQ has been deferred."""
         with self.lock:
-            self.deferred_ids.add(req)
+            self._deferred_ids.add(req)
 
     def request_finished(self, req):
         """Indicate that the request REQ is finished.
@@ -150,7 +150,7 @@ class CancellationHandler:
         with self.lock:
             # Use discard here, not remove, because this is called
             # regardless of whether REQ was deferred.
-            self.deferred_ids.discard(req)
+            self._deferred_ids.discard(req)
 
     def check_cancel(self, req):
         """Check whether request REQ is cancelled.
@@ -163,15 +163,15 @@ class CancellationHandler:
             deferred = []
             try:
                 # If the request is cancelled, don't execute the region.
-                while len(self.reqs) > 0 and self.reqs[0] <= req:
+                while len(self._reqs) > 0 and self._reqs[0] <= req:
                     # In most cases, if we see a cancellation request
                     # on the heap that is before REQ, we can just
                     # ignore it -- we missed our chance to cancel that
                     # request.
-                    next_id = heapq.heappop(self.reqs)
+                    next_id = heapq.heappop(self._reqs)
                     if next_id == req:
                         raise KeyboardInterrupt()
-                    elif next_id in self.deferred_ids:
+                    elif next_id in self._deferred_ids:
                         # We could be in a situation where we're
                         # processing request 23, but request 18 is
                         # still deferred.  In this case, popping
@@ -180,7 +180,7 @@ class CancellationHandler:
                         deferred.append(next_id)
             finally:
                 for x in deferred:
-                    heapq.heappush(self.reqs, x)
+                    heapq.heappush(self._reqs, x)
 
     def cancel(self, req):
         """Call to cancel a request.
@@ -198,7 +198,7 @@ class CancellationHandler:
                 # the weird property that a request can be cancelled
                 # before it is even sent.  It didn't seem worthwhile
                 # to try to check for this.
-                heapq.heappush(self.reqs, req)
+                heapq.heappush(self._reqs, req)
 
     @contextmanager
     def interruptable_region(self, req):
@@ -225,20 +225,20 @@ class Server:
     """The DAP server class."""
 
     def __init__(self, in_stream, out_stream, child_stream):
-        self.in_stream = in_stream
-        self.out_stream = out_stream
-        self.child_stream = child_stream
-        self.delayed_fns_lock = threading.Lock()
+        self._in_stream = in_stream
+        self._out_stream = out_stream
+        self._child_stream = child_stream
+        self._delayed_fns_lock = threading.Lock()
         self.defer_stop_events = False
-        self.delayed_fns = []
+        self._delayed_fns = []
         # This queue accepts JSON objects that are then sent to the
         # DAP client.  Writing is done in a separate thread to avoid
         # blocking the read loop.
-        self.write_queue = DAPQueue()
+        self._write_queue = DAPQueue()
         # Reading is also done in a separate thread, and a queue of
         # requests is kept.
-        self.read_queue = DAPQueue()
-        self.done = False
+        self._read_queue = DAPQueue()
+        self._done = False
         self.canceller = CancellationHandler()
         global _server
         _server = self
@@ -315,7 +315,7 @@ class Server:
     # is run in its own thread.
     def _read_inferior_output(self):
         while True:
-            line = self.child_stream.readline()
+            line = self._child_stream.readline()
             self.send_event(
                 "output",
                 {
@@ -327,7 +327,7 @@ class Server:
     # Send OBJ to the client, logging first if needed.
     def _send_json(self, obj):
         log("WROTE: <<<" + json.dumps(obj) + ">>>")
-        self.write_queue.put(obj)
+        self._write_queue.put(obj)
 
     # This is run in a separate thread and simply reads requests from
     # the client and puts them into a queue.  A separate thread is
@@ -335,7 +335,7 @@ class Server:
     # will normally block, waiting for each request to complete.
     def _reader_thread(self):
         while True:
-            cmd = read_json(self.in_stream)
+            cmd = read_json(self._in_stream)
             if cmd is None:
                 break
             log("READ: <<<" + json.dumps(cmd) + ">>>")
@@ -351,9 +351,9 @@ class Server:
                 and "requestId" in cmd["arguments"]
             ):
                 self.canceller.cancel(cmd["arguments"]["requestId"])
-            self.read_queue.put(cmd)
+            self._read_queue.put(cmd)
         # When we hit EOF, signal it with None.
-        self.read_queue.put(None)
+        self._read_queue.put(None)
 
     @in_dap_thread
     def main_loop(self):
@@ -361,10 +361,10 @@ class Server:
         # Before looping, start the thread that writes JSON to the
         # client, and the thread that reads output from the inferior.
         start_thread("output reader", self._read_inferior_output)
-        json_writer = start_json_writer(self.out_stream, self.write_queue)
+        json_writer = start_json_writer(self._out_stream, self._write_queue)
         start_thread("JSON reader", self._reader_thread)
-        while not self.done:
-            cmd = self.read_queue.get()
+        while not self._done:
+            cmd = self._read_queue.get()
             # A None value here means the reader hit EOF.
             if cmd is None:
                 break
@@ -372,16 +372,16 @@ class Server:
             with self.canceller.current_request(req):
                 self._handle_command(cmd)
             fns = None
-            with self.delayed_fns_lock:
-                fns = self.delayed_fns
-                self.delayed_fns = []
+            with self._delayed_fns_lock:
+                fns = self._delayed_fns
+                self._delayed_fns = []
                 self.defer_stop_events = False
             for fn in fns:
                 fn()
         # Got the terminate request.  This is handled by the
         # JSON-writing thread, so that we can ensure that all
         # responses are flushed to the client before exiting.
-        self.write_queue.put(None)
+        self._write_queue.put(None)
         json_writer.join()
         send_gdb("quit")
 
@@ -389,8 +389,8 @@ class Server:
     def send_event_later(self, event, body=None):
         """Send a DAP event back to the client, but only after the
         current request has completed."""
-        with self.delayed_fns_lock:
-            self.delayed_fns.append(lambda: self.send_event(event, body))
+        with self._delayed_fns_lock:
+            self._delayed_fns.append(lambda: self.send_event(event, body))
 
     @in_gdb_thread
     def send_event_maybe_later(self, event, body=None):
@@ -400,17 +400,17 @@ class Server:
         the client."""
         with self.canceller.lock:
             if self.canceller.in_flight_dap_thread:
-                with self.delayed_fns_lock:
+                with self._delayed_fns_lock:
                     if self.defer_stop_events:
-                        self.delayed_fns.append(lambda: self.send_event(event, body))
+                        self._delayed_fns.append(lambda: self.send_event(event, body))
                         return
         self.send_event(event, body)
 
     @in_dap_thread
     def call_function_later(self, fn):
         """Call FN later -- after the current request's response has been sent."""
-        with self.delayed_fns_lock:
-            self.delayed_fns.append(fn)
+        with self._delayed_fns_lock:
+            self._delayed_fns.append(fn)
 
     # Note that this does not need to be run in any particular thread,
     # because it just creates an object and writes it to a thread-safe
@@ -432,7 +432,7 @@ class Server:
         # Just set a flag.  This operation is complicated because we
         # want to write the result of the request before exiting.  See
         # main_loop.
-        self.done = True
+        self._done = True
 
 
 def send_event(event, body=None):
@@ -629,19 +629,19 @@ class Invoker(object):
     """A simple class that can invoke a gdb command."""
 
     def __init__(self, cmd):
-        self.cmd = cmd
+        self._cmd = cmd
 
     # This is invoked in the gdb thread to run the command.
     @in_gdb_thread
     def __call__(self):
-        exec_and_log(self.cmd)
+        exec_and_log(self._cmd)
 
 
 class Cancellable(object):
 
     def __init__(self, fn, result_q=None):
-        self.fn = fn
-        self.result_q = result_q
+        self._fn = fn
+        self._result_q = result_q
         with _server.canceller.lock:
             self.req = _server.canceller.in_flight_dap_thread
 
@@ -650,13 +650,13 @@ class Cancellable(object):
     def __call__(self):
         try:
             with _server.canceller.interruptable_region(self.req):
-                val = self.fn()
-                if self.result_q is not None:
-                    self.result_q.put(val)
+                val = self._fn()
+                if self._result_q is not None:
+                    self._result_q.put(val)
         except (Exception, KeyboardInterrupt) as e:
-            if self.result_q is not None:
+            if self._result_q is not None:
                 # Pass result or exception to caller.
-                self.result_q.put(e)
+                self._result_q.put(e)
             elif isinstance(e, KeyboardInterrupt):
                 # Fn was cancelled.
                 pass
index 0dd98797086e2e9e4d27875d15068b4fecee7443..b1d8ef7625fadfb127c3718cd3ab40922b6c0834 100644 (file)
@@ -60,8 +60,6 @@ class BaseReference(ABC):
 
     This class is just a base class, some methods must be implemented in
     subclasses.
-
-    The 'ref' field can be used as the variablesReference in the protocol.
     """
 
     @in_gdb_thread
@@ -73,8 +71,8 @@ class BaseReference(ABC):
 
         global all_variables
         all_variables.append(self)
-        self.ref = len(all_variables)
-        self.name = name
+        self._ref = len(all_variables)
+        self._name = name
         self.reset_children()
 
     @in_gdb_thread
@@ -83,9 +81,9 @@ class BaseReference(ABC):
 
         The resulting object is a starting point that can be filled in
         further.  See the Scope or Variable types in the spec"""
-        result = {"variablesReference": self.ref if self.has_children() else 0}
-        if self.name is not None:
-            result["name"] = str(self.name)
+        result = {"variablesReference": self._ref if self.has_children() else 0}
+        if self._name is not None:
+            result["name"] = str(self._name)
         return result
 
     @abstractmethod
@@ -97,13 +95,13 @@ class BaseReference(ABC):
         """Reset any cached information about the children of this object."""
         # A list of all the children.  Each child is a BaseReference
         # of some kind.
-        self.children = None
+        self._children = None
         # Map from the name of a child to a BaseReference.
-        self.by_name = {}
+        self._by_name = {}
         # Keep track of how many duplicates there are of a given name,
         # so that unique names can be generated.  Map from base name
         # to a count.
-        self.name_counts = defaultdict(lambda: 1)
+        self._name_counts = defaultdict(lambda: 1)
 
     @abstractmethod
     def fetch_one_child(self, index):
@@ -128,13 +126,13 @@ class BaseReference(ABC):
     # and
     # https://github.com/microsoft/debug-adapter-protocol/issues/149
     def _compute_name(self, name):
-        if name in self.by_name:
-            self.name_counts[name] += 1
+        if name in self._by_name:
+            self._name_counts[name] += 1
             # In theory there's no safe way to compute a name, because
             # a pretty-printer might already be generating names of
             # that form.  In practice I think we should not worry too
             # much.
-            name = name + " #" + str(self.name_counts[name])
+            name = name + " #" + str(self._name_counts[name])
         return name
 
     @in_gdb_thread
@@ -146,16 +144,16 @@ class BaseReference(ABC):
         Returns an iterable of some kind."""
         if count == 0:
             count = self.child_count()
-        if self.children is None:
-            self.children = [None] * self.child_count()
+        if self._children is None:
+            self._children = [None] * self.child_count()
         for idx in range(start, start + count):
-            if self.children[idx] is None:
+            if self._children[idx] is None:
                 (name, value) = self.fetch_one_child(idx)
                 name = self._compute_name(name)
                 var = VariableReference(name, value)
-                self.children[idx] = var
-                self.by_name[name] = var
-            yield self.children[idx]
+                self._children[idx] = var
+                self._by_name[name] = var
+            yield self._children[idx]
 
     @in_gdb_thread
     def find_child_by_name(self, name):
@@ -165,8 +163,8 @@ class BaseReference(ABC):
         # A lookup by name can only be done using names previously
         # provided to the client, so we can simply rely on the by-name
         # map here.
-        if name in self.by_name:
-            return self.by_name[name]
+        if name in self._by_name:
+            return self._by_name[name]
         raise DAPException("no variable named '" + name + "'")
 
 
@@ -181,15 +179,15 @@ class VariableReference(BaseReference):
         RESULT_NAME can be used to change how the simple string result
         is emitted in the result dictionary."""
         super().__init__(name)
-        self.result_name = result_name
-        self.value = value
+        self._result_name = result_name
+        self._value = value
         self._update_value()
 
     # Internal method to update local data when the value changes.
     def _update_value(self):
         self.reset_children()
-        self.printer = gdb.printing.make_visualizer(self.value)
-        self.child_cache = None
+        self._printer = gdb.printing.make_visualizer(self._value)
+        self._child_cache = None
         if self.has_children():
             self.count = -1
         else:
@@ -197,32 +195,32 @@ class VariableReference(BaseReference):
 
     def assign(self, value):
         """Assign VALUE to this object and update."""
-        self.value.assign(value)
+        self._value.assign(value)
         self._update_value()
 
     def has_children(self):
-        return hasattr(self.printer, "children")
+        return hasattr(self._printer, "children")
 
     def cache_children(self):
-        if self.child_cache is None:
+        if self._child_cache is None:
             # This discards all laziness.  This could be improved
             # slightly by lazily evaluating children, but because this
             # code also generally needs to know the number of
             # children, it probably wouldn't help much.  Note that
             # this is only needed with legacy (non-ValuePrinter)
             # printers.
-            self.child_cache = list(self.printer.children())
-        return self.child_cache
+            self._child_cache = list(self._printer.children())
+        return self._child_cache
 
     def child_count(self):
         if self.count is None:
             return None
         if self.count == -1:
             num_children = None
-            if isinstance(self.printer, gdb.ValuePrinter) and hasattr(
-                self.printer, "num_children"
+            if isinstance(self._printer, gdb.ValuePrinter) and hasattr(
+                self._printer, "num_children"
             ):
-                num_children = self.printer.num_children()
+                num_children = self._printer.num_children()
             if num_children is None:
                 num_children = len(self.cache_children())
             self.count = num_children
@@ -230,12 +228,12 @@ class VariableReference(BaseReference):
 
     def to_object(self):
         result = super().to_object()
-        result[self.result_name] = str(self.printer.to_string())
+        result[self._result_name] = str(self._printer.to_string())
         num_children = self.child_count()
         if num_children is not None:
             if (
-                hasattr(self.printer, "display_hint")
-                and self.printer.display_hint() == "array"
+                hasattr(self._printer, "display_hint")
+                and self._printer.display_hint() == "array"
             ):
                 result["indexedVariables"] = num_children
             else:
@@ -245,18 +243,18 @@ class VariableReference(BaseReference):
             # changed DAP to allow memory references for any of the
             # variable response requests, and to lift the restriction
             # to pointer-to-function from Variable.
-            if self.value.type.strip_typedefs().code == gdb.TYPE_CODE_PTR:
-                result["memoryReference"] = hex(int(self.value))
+            if self._value.type.strip_typedefs().code == gdb.TYPE_CODE_PTR:
+                result["memoryReference"] = hex(int(self._value))
         if client_bool_capability("supportsVariableType"):
-            result["type"] = str(self.value.type)
+            result["type"] = str(self._value.type)
         return result
 
     @in_gdb_thread
     def fetch_one_child(self, idx):
-        if isinstance(self.printer, gdb.ValuePrinter) and hasattr(
-            self.printer, "child"
+        if isinstance(self._printer, gdb.ValuePrinter) and hasattr(
+            self._printer, "child"
         ):
-            (name, val) = self.printer.child(idx)
+            (name, val) = self._printer.child(idx)
         else:
             (name, val) = self.cache_children()[idx]
         # A pretty-printer can return something other than a