forked from nuttx/nuttx-update
Debug:support python script auto debug nuttx kernel
This patch add an example and pyhton base modules: 1.add memdump.py is an example to analyze memory usage by python script. 2.add The most basic data structure analysis, like list, etc. future ideas: Maybe we can add modules related to "sched, drivers, arch, fs.." to automatically analyze scripts to debug some problems References: linux kernel (https://github.com/torvalds/linux/tree/master/scripts/gdb) The official manual of gdb (https://sourceware.org/gdb/onlinedocs/gdb/Python-API.html) Change-Id: Ib9025a0a141cb89f3813526f7c55dcb28de31ed9 Signed-off-by: anjiahao <anjiahao@xiaomi.com>
This commit is contained in:
parent
79c0fafc06
commit
adc5c8b5ac
5 changed files with 676 additions and 0 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -59,3 +59,4 @@ uImage
|
||||||
.dirlinks
|
.dirlinks
|
||||||
.vscode
|
.vscode
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
tools/gdb/__pycache__
|
||||||
|
|
39
tools/gdb/__init__.py
Normal file
39
tools/gdb/__init__.py
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
############################################################################
|
||||||
|
# tools/gdb/__init__.py
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership. The
|
||||||
|
# ASF licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance with the
|
||||||
|
# License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
############################################################################
|
||||||
|
|
||||||
|
import glob
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import gdb
|
||||||
|
|
||||||
|
python_dir = os.path.abspath(__file__)
|
||||||
|
python_dir = os.path.dirname(python_dir)
|
||||||
|
|
||||||
|
sys.path.insert(1, python_dir)
|
||||||
|
# Search the python dir for all .py files, and source each
|
||||||
|
py_files = glob.glob("%s/*.py" % python_dir)
|
||||||
|
py_files.remove(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
gdb.execute("set pagination off")
|
||||||
|
gdb.write("set pagination off\n")
|
||||||
|
for py_file in py_files:
|
||||||
|
gdb.execute("source %s" % py_file)
|
||||||
|
gdb.write("source %s\n" % py_file)
|
241
tools/gdb/lists.py
Normal file
241
tools/gdb/lists.py
Normal file
|
@ -0,0 +1,241 @@
|
||||||
|
############################################################################
|
||||||
|
# tools/gdb/lists.py
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership. The
|
||||||
|
# ASF licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance with the
|
||||||
|
# License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
############################################################################
|
||||||
|
|
||||||
|
import gdb
|
||||||
|
import utils
|
||||||
|
|
||||||
|
list_node = utils.CachedType("struct list_node")
|
||||||
|
sq_queue = utils.CachedType("sq_queue_t")
|
||||||
|
dq_queue = utils.CachedType("dq_queue_t")
|
||||||
|
|
||||||
|
|
||||||
|
def list_for_each(head):
|
||||||
|
"""Iterate over a list"""
|
||||||
|
if head.type == list_node.get_type().pointer():
|
||||||
|
head = head.dereference()
|
||||||
|
elif head.type != list_node.get_type():
|
||||||
|
raise TypeError("Must be struct list_node not {}".format(head.type))
|
||||||
|
|
||||||
|
if head["next"] == 0:
|
||||||
|
gdb.write(
|
||||||
|
"list_for_each: Uninitialized list '{}' treated as empty\n".format(
|
||||||
|
head.address
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
node = head["next"].dereference()
|
||||||
|
while node.address != head.address:
|
||||||
|
yield node.address
|
||||||
|
node = node["next"].dereference()
|
||||||
|
|
||||||
|
|
||||||
|
def list_for_each_entry(head, gdbtype, member):
|
||||||
|
"""Iterate over a list of structs"""
|
||||||
|
for node in list_for_each(head):
|
||||||
|
yield utils.container_of(node, gdbtype, member)
|
||||||
|
|
||||||
|
|
||||||
|
def list_check(head):
|
||||||
|
"""Check the consistency of a list"""
|
||||||
|
nb = 0
|
||||||
|
|
||||||
|
if head.type == list_node.get_type().pointer():
|
||||||
|
head = head.dereference()
|
||||||
|
elif head.type != list_node.get_type():
|
||||||
|
raise gdb.GdbError("argument must be of type (struct list_node [*])")
|
||||||
|
c = head
|
||||||
|
try:
|
||||||
|
gdb.write("Starting with: {}\n".format(c))
|
||||||
|
except gdb.MemoryError:
|
||||||
|
gdb.write("head is not accessible\n")
|
||||||
|
return
|
||||||
|
while True:
|
||||||
|
p = c["prev"].dereference()
|
||||||
|
n = c["next"].dereference()
|
||||||
|
try:
|
||||||
|
if p["next"] != c.address:
|
||||||
|
gdb.write(
|
||||||
|
"prev.next != current: "
|
||||||
|
"current@{current_addr}={current} "
|
||||||
|
"prev@{p_addr}={p}\n".format(
|
||||||
|
current_addr=c.address,
|
||||||
|
current=c,
|
||||||
|
p_addr=p.address,
|
||||||
|
p=p,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
except gdb.MemoryError:
|
||||||
|
gdb.write(
|
||||||
|
"prev is not accessible: "
|
||||||
|
"current@{current_addr}={current}\n".format(
|
||||||
|
current_addr=c.address, current=c
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
if n["prev"] != c.address:
|
||||||
|
gdb.write(
|
||||||
|
"next.prev != current: "
|
||||||
|
"current@{current_addr}={current} "
|
||||||
|
"next@{n_addr}={n}\n".format(
|
||||||
|
current_addr=c.address,
|
||||||
|
current=c,
|
||||||
|
n_addr=n.address,
|
||||||
|
n=n,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
except gdb.MemoryError:
|
||||||
|
gdb.write(
|
||||||
|
"next is not accessible: "
|
||||||
|
"current@{current_addr}={current}\n".format(
|
||||||
|
current_addr=c.address, current=c
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
c = n
|
||||||
|
nb += 1
|
||||||
|
if c == head:
|
||||||
|
gdb.write("list is consistent: {} node(s)\n".format(nb))
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
def sq_for_every(sq, entry):
|
||||||
|
"""Iterate over a singly linked list"""
|
||||||
|
if sq.type == sq_queue.get_type().pointer():
|
||||||
|
sq = sq.dereference()
|
||||||
|
elif sq.type != sq_queue.get_type():
|
||||||
|
gdb.write("Must be struct sq_queue not {}".format(sq.type))
|
||||||
|
return
|
||||||
|
|
||||||
|
if sq["head"] == 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
entry = sq["head"].dereference()
|
||||||
|
|
||||||
|
while entry.address:
|
||||||
|
yield entry.address
|
||||||
|
entry = entry["flink"].dereference()
|
||||||
|
|
||||||
|
|
||||||
|
def sq_is_empty(sq):
|
||||||
|
"""Check if a singly linked list is empty"""
|
||||||
|
if sq.type == sq_queue.get_type().pointer():
|
||||||
|
sq = sq.dereference()
|
||||||
|
elif sq.type != sq_queue.get_type():
|
||||||
|
return False
|
||||||
|
|
||||||
|
if sq["head"] == 0:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def sq_check(sq):
|
||||||
|
"""Check the consistency of a singly linked list"""
|
||||||
|
nb = 0
|
||||||
|
if sq.type == sq_queue.get_type().pointer():
|
||||||
|
sq = sq.dereference()
|
||||||
|
elif sq.type != sq_queue.get_type():
|
||||||
|
gdb.write("Must be struct sq_queue not {}".format(sq.type))
|
||||||
|
return
|
||||||
|
|
||||||
|
if sq["head"] == 0:
|
||||||
|
gdb.write("sq_queue head is empty {}\n".format(sq.address))
|
||||||
|
return
|
||||||
|
|
||||||
|
entry = sq["head"].dereference()
|
||||||
|
try:
|
||||||
|
while entry.address:
|
||||||
|
nb += 1
|
||||||
|
entry = entry["flink"].dereference()
|
||||||
|
except gdb.MemoryError:
|
||||||
|
gdb.write("entry address is unaccessible {}\n".format(entry.address))
|
||||||
|
return
|
||||||
|
|
||||||
|
gdb.write("sq_queue is consistent: {} node(s)\n".format(nb))
|
||||||
|
|
||||||
|
|
||||||
|
def dq_for_every(dq, entry):
|
||||||
|
"""Iterate over a doubly linked list"""
|
||||||
|
if dq.type == dq_queue.get_type().pointer():
|
||||||
|
dq = dq.dereference()
|
||||||
|
elif dq.type != dq_queue.get_type():
|
||||||
|
gdb.write("Must be struct dq_queue not {}".format(dq.type))
|
||||||
|
return
|
||||||
|
|
||||||
|
if dq["head"] == 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
entry = dq["head"].dereference()
|
||||||
|
while entry.address:
|
||||||
|
yield entry.address
|
||||||
|
entry = entry["flink"].dereference()
|
||||||
|
|
||||||
|
|
||||||
|
def dq_check(dq):
|
||||||
|
"""Check the consistency of a doubly linked list"""
|
||||||
|
nb = 0
|
||||||
|
if dq.type == dq_queue.get_type().pointer():
|
||||||
|
dq = dq.dereference()
|
||||||
|
elif dq.type != dq_queue.get_type():
|
||||||
|
gdb.write("Must be struct dq_queue not {}".format(dq.type))
|
||||||
|
return
|
||||||
|
|
||||||
|
if dq["head"] == 0:
|
||||||
|
gdb.write("dq_queue head is empty {}\n".format(dq.address))
|
||||||
|
return
|
||||||
|
entry = dq["head"].dereference()
|
||||||
|
try:
|
||||||
|
while entry.address:
|
||||||
|
nb += 1
|
||||||
|
entry = entry["flink"].dereference()
|
||||||
|
except gdb.MemoryError:
|
||||||
|
gdb.write("entry address is unaccessible {}\n".format(entry.address))
|
||||||
|
return
|
||||||
|
|
||||||
|
gdb.write("dq_queue is consistent: {} node(s)\n".format(nb))
|
||||||
|
|
||||||
|
|
||||||
|
class Nxlistcheck(gdb.Command):
|
||||||
|
"""Verify a list consistency"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(Nxlistcheck, self).__init__(
|
||||||
|
"listcheck", gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION
|
||||||
|
)
|
||||||
|
|
||||||
|
def invoke(self, arg, from_tty):
|
||||||
|
argv = gdb.string_to_argv(arg)
|
||||||
|
if len(argv) != 1:
|
||||||
|
raise gdb.GdbError("nx-list-check takes one argument")
|
||||||
|
|
||||||
|
obj = gdb.parse_and_eval(argv[0])
|
||||||
|
if obj.type == list_node.get_type().pointer():
|
||||||
|
list_check(obj)
|
||||||
|
elif obj.type == sq_queue.get_type().pointer():
|
||||||
|
sq_check(obj)
|
||||||
|
else:
|
||||||
|
raise gdb.GdbError("Invalid argument type: {}".format(obj.type))
|
||||||
|
|
||||||
|
|
||||||
|
Nxlistcheck()
|
223
tools/gdb/memdump.py
Normal file
223
tools/gdb/memdump.py
Normal file
|
@ -0,0 +1,223 @@
|
||||||
|
############################################################################
|
||||||
|
# tools/gdb/memdump.py
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership. The
|
||||||
|
# ASF licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance with the
|
||||||
|
# License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
############################################################################
|
||||||
|
|
||||||
|
import gdb
|
||||||
|
import utils
|
||||||
|
from lists import list_for_each_entry, sq_for_every, sq_is_empty, sq_queue
|
||||||
|
|
||||||
|
mempool_backtrace = utils.CachedType("struct mempool_backtrace_s")
|
||||||
|
|
||||||
|
MM_ALLOC_BIT = 0x1
|
||||||
|
MM_PREVFREE_BIT = 0x2
|
||||||
|
MM_MASK_BIT = MM_ALLOC_BIT | MM_PREVFREE_BIT
|
||||||
|
|
||||||
|
|
||||||
|
PID_MM_FREE = -4
|
||||||
|
PID_MM_ALLOC = -3
|
||||||
|
PID_MM_LEAK = -2
|
||||||
|
PID_MM_MEMPOOL = -1
|
||||||
|
|
||||||
|
|
||||||
|
def mm_foreach(heap):
|
||||||
|
"""Iterate over a heap, yielding each node"""
|
||||||
|
node = gdb.Value(heap["mm_heapstart"][0]).cast(
|
||||||
|
gdb.lookup_type("struct mm_allocnode_s").pointer()
|
||||||
|
)
|
||||||
|
while 1:
|
||||||
|
yield node
|
||||||
|
next = gdb.Value(node).cast(gdb.lookup_type("char").pointer())
|
||||||
|
next = gdb.Value(next + (node["size"] & ~MM_MASK_BIT)).cast(
|
||||||
|
gdb.lookup_type("struct mm_allocnode_s").pointer()
|
||||||
|
)
|
||||||
|
if node >= heap["mm_heapend"].dereference() or next == node:
|
||||||
|
break
|
||||||
|
node = next
|
||||||
|
|
||||||
|
|
||||||
|
def mempool_multiple_foreach(mpool):
|
||||||
|
"""Iterate over all pools in a mempool, yielding each pool"""
|
||||||
|
i = 0
|
||||||
|
while i < mpool["npools"]:
|
||||||
|
pool = mpool["pools"] + i
|
||||||
|
yield pool
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
|
||||||
|
class Nxmemdump(gdb.Command):
|
||||||
|
"""Dump the heap and mempool memory"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(Nxmemdump, self).__init__("memdump", gdb.COMMAND_USER)
|
||||||
|
|
||||||
|
def mempool_dump(self, mpool, pid, seqmin, seqmax):
|
||||||
|
"""Dump the mempool memory"""
|
||||||
|
for pool in mempool_multiple_foreach(mpool):
|
||||||
|
if pid == PID_MM_FREE:
|
||||||
|
entry = sq_queue.get_type().pointer()
|
||||||
|
|
||||||
|
for entry in sq_for_every(pool["queue"], entry):
|
||||||
|
gdb.write("%12u%#*x\n" % (pool["blocksize"], self.align, entry))
|
||||||
|
self.aordblks += 1
|
||||||
|
self.uordblks += pool["blocksize"]
|
||||||
|
|
||||||
|
for entry in sq_for_every(pool["iqueue"], entry):
|
||||||
|
gdb.write("%12u%#*x\n" % (pool["blocksize"], self.align, entry))
|
||||||
|
self.aordblks += 1
|
||||||
|
self.uordblks += pool["blocksize"]
|
||||||
|
else:
|
||||||
|
for node in list_for_each_entry(
|
||||||
|
pool["alist"], mempool_backtrace.get_type().pointer(), "node"
|
||||||
|
):
|
||||||
|
if (pid == node["pid"] or pid == PID_MM_ALLOC) and (
|
||||||
|
node["seqno"] >= seqmin and node["seqno"] < seqmax
|
||||||
|
):
|
||||||
|
charnode = gdb.Value(node).cast(
|
||||||
|
gdb.lookup_type("char").pointer()
|
||||||
|
)
|
||||||
|
gdb.write(
|
||||||
|
"%6d%12u%12u%#*x"
|
||||||
|
% (
|
||||||
|
node["pid"],
|
||||||
|
pool["blocksize"] & ~MM_MASK_BIT,
|
||||||
|
node["seqno"],
|
||||||
|
self.align,
|
||||||
|
(int)(charnode - pool["blocksize"]),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if node.type.has_key("backtrace"):
|
||||||
|
max = node["backtrace"].type.range()[1]
|
||||||
|
for x in range(0, max):
|
||||||
|
gdb.write(" ")
|
||||||
|
gdb.write(
|
||||||
|
node["backtrace"][x].format_string(
|
||||||
|
raw=False, symbols=True, address=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
gdb.write("\n")
|
||||||
|
self.aordblks += 1
|
||||||
|
self.uordblks += pool["blocksize"]
|
||||||
|
|
||||||
|
def memdump(self, pid, seqmin, seqmax):
|
||||||
|
"""Dump the heap memory"""
|
||||||
|
if pid >= PID_MM_ALLOC:
|
||||||
|
gdb.write("Dump all used memory node info:\n")
|
||||||
|
gdb.write(
|
||||||
|
"%6s%12s%12s%*s %s\n"
|
||||||
|
% ("PID", "Size", "Sequence", self.align, "Address", "Callstack")
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
gdb.write("Dump all free memory node info:\n")
|
||||||
|
gdb.write("%12s%*s\n" % ("Size", self.align, "Address"))
|
||||||
|
|
||||||
|
heap = gdb.parse_and_eval("g_mmheap")
|
||||||
|
if heap.type.has_key("mm_mpool"):
|
||||||
|
self.mempool_dump(heap["mm_mpool"], pid, seqmin, seqmax)
|
||||||
|
|
||||||
|
for node in mm_foreach(heap):
|
||||||
|
if node["size"] & MM_ALLOC_BIT != 0:
|
||||||
|
if (pid == node["pid"] or (pid == PID_MM_ALLOC and node["pid"] != PID_MM_MEMPOOL)) and (
|
||||||
|
node["seqno"] >= seqmin and node["seqno"] < seqmax
|
||||||
|
):
|
||||||
|
charnode = gdb.Value(node).cast(gdb.lookup_type("char").pointer())
|
||||||
|
gdb.write(
|
||||||
|
"%6d%12u%12u%#*x"
|
||||||
|
% (
|
||||||
|
node["pid"],
|
||||||
|
node["size"] & ~MM_MASK_BIT,
|
||||||
|
node["seqno"],
|
||||||
|
self.align,
|
||||||
|
(int)(
|
||||||
|
charnode
|
||||||
|
+ gdb.lookup_type("struct mm_allocnode_s").sizeof
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if node.type.has_key("backtrace"):
|
||||||
|
max = node["backtrace"].type.range()[1]
|
||||||
|
for x in range(0, max):
|
||||||
|
gdb.write(" ")
|
||||||
|
gdb.write(
|
||||||
|
node["backtrace"][x].format_string(
|
||||||
|
raw=False, symbols=True, address=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
gdb.write("\n")
|
||||||
|
|
||||||
|
self.aordblks += 1
|
||||||
|
self.uordblks += node["size"] & ~MM_MASK_BIT
|
||||||
|
else:
|
||||||
|
if pid == PID_MM_FREE:
|
||||||
|
charnode = gdb.Value(node).cast(gdb.lookup_type("char").pointer())
|
||||||
|
gdb.write(
|
||||||
|
"%12u%#*x\n"
|
||||||
|
% (
|
||||||
|
node["size"] & ~MM_MASK_BIT,
|
||||||
|
self.align,
|
||||||
|
(int)(
|
||||||
|
charnode
|
||||||
|
+ gdb.lookup_type("struct mm_allocnode_s").sizeof
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.aordblks += 1
|
||||||
|
self.uordblks += node["size"] & ~MM_MASK_BIT
|
||||||
|
|
||||||
|
gdb.write("%12s%12s\n" % ("Total Blks", "Total Size"))
|
||||||
|
gdb.write("%12d%12d\n" % (self.aordblks, self.uordblks))
|
||||||
|
|
||||||
|
def complete(self, text, word):
|
||||||
|
return gdb.COMPLETE_SYMBOL
|
||||||
|
|
||||||
|
def invoke(self, args, from_tty):
|
||||||
|
if gdb.lookup_type("size_t").sizeof == 4:
|
||||||
|
self.align = 11
|
||||||
|
else:
|
||||||
|
self.align = 19
|
||||||
|
|
||||||
|
arg = args.split(" ")
|
||||||
|
|
||||||
|
if arg[0] == "":
|
||||||
|
pid = PID_MM_ALLOC
|
||||||
|
elif arg[0] == "used":
|
||||||
|
pid = PID_MM_ALLOC
|
||||||
|
elif arg[0] == "free":
|
||||||
|
pid = PID_MM_LEAK
|
||||||
|
else:
|
||||||
|
pid = int(arg[0])
|
||||||
|
|
||||||
|
if len(arg) == 2:
|
||||||
|
seqmin = int(arg[1])
|
||||||
|
else:
|
||||||
|
seqmin = 0
|
||||||
|
|
||||||
|
if len(arg) == 3:
|
||||||
|
seqmax = int(arg[2])
|
||||||
|
else:
|
||||||
|
seqmax = 0xFFFFFFFF
|
||||||
|
|
||||||
|
self.aordblks = 0
|
||||||
|
self.uordblks = 0
|
||||||
|
self.memdump(pid, seqmin, seqmax)
|
||||||
|
|
||||||
|
|
||||||
|
Nxmemdump()
|
172
tools/gdb/utils.py
Normal file
172
tools/gdb/utils.py
Normal file
|
@ -0,0 +1,172 @@
|
||||||
|
############################################################################
|
||||||
|
# tools/gdb/utils.py
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||||
|
# contributor license agreements. See the NOTICE file distributed with
|
||||||
|
# this work for additional information regarding copyright ownership. The
|
||||||
|
# ASF licenses this file to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance with the
|
||||||
|
# License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
############################################################################
|
||||||
|
|
||||||
|
import gdb
|
||||||
|
|
||||||
|
|
||||||
|
class CachedType:
|
||||||
|
"""Cache a type object, so that we can reconnect to the new_objfile event"""
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self._type = None
|
||||||
|
self._name = name
|
||||||
|
|
||||||
|
def _new_objfile_handler(self, event):
|
||||||
|
self._type = None
|
||||||
|
gdb.events.new_objfile.disconnect(self._new_objfile_handler)
|
||||||
|
|
||||||
|
def get_type(self):
|
||||||
|
if self._type is None:
|
||||||
|
self._type = gdb.lookup_type(self._name)
|
||||||
|
if self._type is None:
|
||||||
|
raise gdb.GdbError("cannot resolve type '{0}'".format(self._name))
|
||||||
|
if hasattr(gdb, "events") and hasattr(gdb.events, "new_objfile"):
|
||||||
|
gdb.events.new_objfile.connect(self._new_objfile_handler)
|
||||||
|
return self._type
|
||||||
|
|
||||||
|
|
||||||
|
long_type = CachedType("long")
|
||||||
|
|
||||||
|
|
||||||
|
def get_long_type():
|
||||||
|
"""Return the cached long type object"""
|
||||||
|
global long_type
|
||||||
|
return long_type.get_type()
|
||||||
|
|
||||||
|
|
||||||
|
def offset_of(typeobj, field):
|
||||||
|
"""Return the offset of a field in a structure"""
|
||||||
|
element = gdb.Value(0).cast(typeobj)
|
||||||
|
return int(str(element[field].address).split()[0], 16)
|
||||||
|
|
||||||
|
|
||||||
|
def container_of(ptr, typeobj, member):
|
||||||
|
"""Return pointer to containing data structure"""
|
||||||
|
return (ptr.cast(get_long_type()) - offset_of(typeobj, member)).cast(typeobj)
|
||||||
|
|
||||||
|
|
||||||
|
class ContainerOf(gdb.Function):
|
||||||
|
"""Return pointer to containing data structure.
|
||||||
|
|
||||||
|
$container_of(PTR, "TYPE", "ELEMENT"): Given PTR, return a pointer to the
|
||||||
|
data structure of the type TYPE in which PTR is the address of ELEMENT.
|
||||||
|
Note that TYPE and ELEMENT have to be quoted as strings."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super(ContainerOf, self).__init__("container_of")
|
||||||
|
|
||||||
|
def invoke(self, ptr, typename, elementname):
|
||||||
|
return container_of(
|
||||||
|
ptr, gdb.lookup_type(typename.string()).pointer(), elementname.string()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
ContainerOf()
|
||||||
|
|
||||||
|
|
||||||
|
BIG_ENDIAN = 0
|
||||||
|
LITTLE_ENDIAN = 1
|
||||||
|
target_endianness = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_target_endianness():
|
||||||
|
"""Return the endianness of the target"""
|
||||||
|
global target_endianness
|
||||||
|
if target_endianness is None:
|
||||||
|
endian = gdb.execute("show endian", to_string=True)
|
||||||
|
if "little endian" in endian:
|
||||||
|
target_endianness = LITTLE_ENDIAN
|
||||||
|
elif "big endian" in endian:
|
||||||
|
target_endianness = BIG_ENDIAN
|
||||||
|
else:
|
||||||
|
raise gdb.GdbError("unknown endianness '{0}'".format(str(endian)))
|
||||||
|
return target_endianness
|
||||||
|
|
||||||
|
|
||||||
|
def read_memoryview(inf, start, length):
|
||||||
|
"""Read memory from the target and return a memoryview object"""
|
||||||
|
m = inf.read_memory(start, length)
|
||||||
|
if type(m) is memoryview:
|
||||||
|
return m
|
||||||
|
return memoryview(m)
|
||||||
|
|
||||||
|
|
||||||
|
def read_u16(buffer, offset):
|
||||||
|
"""Read a 16-bit unsigned integer from a buffer"""
|
||||||
|
buffer_val = buffer[offset : offset + 2]
|
||||||
|
value = [0, 0]
|
||||||
|
|
||||||
|
if type(buffer_val[0]) is str:
|
||||||
|
value[0] = ord(buffer_val[0])
|
||||||
|
value[1] = ord(buffer_val[1])
|
||||||
|
else:
|
||||||
|
value[0] = buffer_val[0]
|
||||||
|
value[1] = buffer_val[1]
|
||||||
|
|
||||||
|
if get_target_endianness() == LITTLE_ENDIAN:
|
||||||
|
return value[0] + (value[1] << 8)
|
||||||
|
else:
|
||||||
|
return value[1] + (value[0] << 8)
|
||||||
|
|
||||||
|
|
||||||
|
def read_u32(buffer, offset):
|
||||||
|
"""Read a 32-bit unsigned integer from a buffer"""
|
||||||
|
if get_target_endianness() == LITTLE_ENDIAN:
|
||||||
|
return read_u16(buffer, offset) + (read_u16(buffer, offset + 2) << 16)
|
||||||
|
else:
|
||||||
|
return read_u16(buffer, offset + 2) + (read_u16(buffer, offset) << 16)
|
||||||
|
|
||||||
|
|
||||||
|
def read_u64(buffer, offset):
|
||||||
|
"""Read a 64-bit unsigned integer from a buffer"""
|
||||||
|
if get_target_endianness() == LITTLE_ENDIAN:
|
||||||
|
return read_u32(buffer, offset) + (read_u32(buffer, offset + 4) << 32)
|
||||||
|
else:
|
||||||
|
return read_u32(buffer, offset + 4) + (read_u32(buffer, offset) << 32)
|
||||||
|
|
||||||
|
|
||||||
|
def read_ulong(buffer, offset):
|
||||||
|
"""Read a long from a buffer"""
|
||||||
|
if get_long_type().sizeof == 8:
|
||||||
|
return read_u64(buffer, offset)
|
||||||
|
else:
|
||||||
|
return read_u32(buffer, offset)
|
||||||
|
|
||||||
|
|
||||||
|
target_arch = None
|
||||||
|
|
||||||
|
|
||||||
|
def is_target_arch(arch):
|
||||||
|
"""Return True if the target architecture is ARCH"""
|
||||||
|
if hasattr(gdb.Frame, "architecture"):
|
||||||
|
return arch in gdb.newest_frame().architecture().name()
|
||||||
|
else:
|
||||||
|
global target_arch
|
||||||
|
if target_arch is None:
|
||||||
|
target_arch = gdb.execute("show architecture", to_string=True)
|
||||||
|
return arch in target_arch
|
||||||
|
|
||||||
|
|
||||||
|
def gdb_eval_or_none(expresssion):
|
||||||
|
"""Evaluate an expression and return None if it fails"""
|
||||||
|
try:
|
||||||
|
return gdb.parse_and_eval(expresssion)
|
||||||
|
except gdb.error:
|
||||||
|
return None
|
Loading…
Reference in a new issue