2018-07-19 02:46:21 +08:00
|
|
|
#!/usr/bin/python3
|
|
|
|
#
|
2018-07-19 04:48:22 +08:00
|
|
|
# srcxray - source code X-ray
|
2018-07-19 02:46:21 +08:00
|
|
|
#
|
|
|
|
# Analyzes interconnections between functions and structures in source code.
|
|
|
|
#
|
|
|
|
# Uses cscope and git grep --show-function to
|
|
|
|
# reveal references between identifiers.
|
|
|
|
#
|
|
|
|
# 2018 Constantine Shulyupin, const@MakeLinux.com
|
|
|
|
#
|
|
|
|
|
2018-08-27 13:59:08 +08:00
|
|
|
from inspect import currentframe, getframeinfo, getouterframes, stack
|
2018-07-19 12:52:33 +08:00
|
|
|
import random
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import collections
|
2018-08-07 13:59:50 +08:00
|
|
|
from munch import *
|
2018-07-19 12:52:33 +08:00
|
|
|
import subprocess
|
|
|
|
import re
|
2018-07-22 05:32:07 +08:00
|
|
|
import networkx as nx
|
2018-07-31 10:18:58 +08:00
|
|
|
# from networkx.drawing.nx_agraph import read_dot # changes order of successors
|
|
|
|
from networkx.drawing.nx_pydot import read_dot
|
2018-07-26 13:35:40 +08:00
|
|
|
from networkx.generators.ego import *
|
2018-08-07 13:59:50 +08:00
|
|
|
from networkx.algorithms.dag import *
|
2018-07-31 10:18:58 +08:00
|
|
|
from networkx.utils import open_file, make_str
|
2018-07-26 13:35:40 +08:00
|
|
|
from pprint import pprint
|
|
|
|
import difflib
|
2018-07-29 04:02:03 +08:00
|
|
|
import glob
|
|
|
|
from pathlib import *
|
2018-08-27 14:08:45 +08:00
|
|
|
import pygraphviz # python3-pygraphviz
|
2018-08-08 14:08:05 +08:00
|
|
|
import unittest
|
|
|
|
import types
|
2018-07-29 04:02:03 +08:00
|
|
|
|
2018-07-29 14:22:36 +08:00
|
|
|
default_root = 'starts'
|
2018-08-07 10:50:37 +08:00
|
|
|
black_list = ('aligned __attribute__ unlikely typeof u32 '
|
|
|
|
'PVOP_CALLEE0 PVOP_VCALLEE0 PVOP_VCALLEE1 if trace_hardirqs_off '
|
2018-07-29 04:02:03 +08:00
|
|
|
'i NULL likely unlikely true false test_bit NAPI_GRO_CB clear_bit '
|
2018-08-07 10:50:37 +08:00
|
|
|
'atomic_read preempt_disable preempt_enable container_of ENOSYS '
|
2018-07-29 04:02:03 +08:00
|
|
|
'READ_ONCE u64 u8 _RET_IP_ ret current '
|
|
|
|
'AT_FDCWD fdput EBADF file_inode '
|
|
|
|
'ssize_t path_put __user '
|
2018-08-07 10:50:37 +08:00
|
|
|
'list_empty memcpy size_t loff_t pos d_inode dput copy_to_user EIO bool out IS_ERR '
|
2018-07-29 04:02:03 +08:00
|
|
|
'EPERM rcu_read_lock rcu_read_unlock spin_lock spin_unlock list_for_each_entry kfree '
|
|
|
|
'GFP_KERNEL ENOMEM EFAULT ENOENT EAGAIN PTR_ERR PAGE_SHIFT PAGE_SIZE '
|
|
|
|
'pgoff_t pte_t pmd_t HPAGE_PMD_NR PageLocked entry swp_entry_t next unlock_page spinlock_t end start '
|
|
|
|
' VM_BUG_ON VM_BUG_ON_PAGE BDI_SHOW max '
|
|
|
|
'ssize_t path_put __user '
|
2018-08-07 10:50:37 +08:00
|
|
|
'list_del compound_head list_add cond_resched put_page nr_pages min spin_lock_irqsave IS_ENABLED '
|
2018-07-29 04:02:03 +08:00
|
|
|
'EBUSY UL NODE_DATA pr_err memset list size ptl PAGE_MASK pr_info offset addr get_page sprintf '
|
2018-08-07 10:53:42 +08:00
|
|
|
'INIT_LIST_HEAD NUMA_NO_NODE spin_unlock_irqrestore mutex_unlock mutex_lock '
|
|
|
|
'page_to_nid page_to_pfn pfn page_zone pfn_to_page '
|
|
|
|
'BUG BUG_ON flags WARN_ON_ONCE ENODEV cpu_to_le16 cpumask_bits '
|
|
|
|
'ERR_PTR ENOTSUPP EOPNOTSUPP EOPNOTSUPP WARN_ON EINVAL i name '
|
|
|
|
'sigset_t fdget put_user get_user copy_from_user LOOKUP_FOLLOW LOOKUP_EMPTY EINTR '
|
|
|
|
'O_CLOEXEC err getname access_ok task_pid_vnr cred '
|
|
|
|
'percpu_ref_put get_timespec64 sigdelsetmask ns_capable kzalloc capable f_mode O_LARGEFILE pos_from_hilo '
|
|
|
|
'pr_debug error current_cred ESRCH f_path find_task_by_vpid '
|
|
|
|
'retry LOOKUP_REVAL retry_estale user_path_at lookup_flags old '
|
|
|
|
'current_user_ns spin_lock_irq spin_unlock_irq prepare_creds '
|
|
|
|
'tasklist_lock commit_creds read_lock read_unlock SIGKILL SIGSTOP abort_creds fd_install '
|
2018-08-11 22:52:02 +08:00
|
|
|
'real_mount FMODE_WRITE tv_nsec putname ,'
|
2018-08-07 10:53:42 +08:00
|
|
|
).split()
|
2018-07-19 02:46:21 +08:00
|
|
|
|
|
|
|
|
2018-07-21 03:06:51 +08:00
|
|
|
level_limit = 8
|
2018-07-26 13:36:01 +08:00
|
|
|
limit = 100000
|
2018-07-19 02:46:21 +08:00
|
|
|
n = 0
|
|
|
|
|
2018-08-07 14:13:26 +08:00
|
|
|
scaled = False
|
|
|
|
|
2018-07-19 04:48:22 +08:00
|
|
|
|
2018-07-29 21:01:48 +08:00
|
|
|
def print_limited(a, out=None):
|
|
|
|
out = out if out else sys.stdout
|
|
|
|
out.write(str(a) + '\n')
|
2018-07-19 02:46:21 +08:00
|
|
|
global n
|
2018-07-19 04:48:22 +08:00
|
|
|
n += 1
|
2018-07-22 04:01:11 +08:00
|
|
|
if n > limit + 1:
|
2018-07-29 21:01:48 +08:00
|
|
|
out.write('...')
|
2018-07-19 02:46:21 +08:00
|
|
|
sys.exit(1)
|
2018-07-22 04:01:11 +08:00
|
|
|
# raise(Exception('Reached limit'))
|
2018-07-19 02:46:21 +08:00
|
|
|
|
2018-07-19 04:48:22 +08:00
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
def log(*args, **kwargs):
|
2018-07-26 13:36:01 +08:00
|
|
|
s = str(*args).rstrip()
|
2018-08-27 13:59:08 +08:00
|
|
|
frameinfo = getframeinfo(currentframe().f_back)
|
|
|
|
print("%s:%d %s" % (frameinfo.filename, frameinfo.lineno, stack()[1][3]),
|
2018-07-26 13:36:01 +08:00
|
|
|
s, file=sys.stderr, **kwargs)
|
|
|
|
return s
|
2018-07-19 02:46:21 +08:00
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
def popen(p):
|
2018-07-26 13:36:01 +08:00
|
|
|
return subprocess.check_output(p, shell=True).decode('utf-8').splitlines()
|
2018-07-19 02:46:21 +08:00
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
def extract_referer(line):
|
|
|
|
line = re.sub(r'__ro_after_init', '', line)
|
|
|
|
line = re.sub(r'FNAME\((\w+)\)', r'\1', line)
|
|
|
|
line = re.sub(r'.*TRACE_EVENT.*', '', line)
|
|
|
|
m = re.match(r'^[^\s]+=[^,]*\(\*(\b\w+)\)\s*[\(\[=][^;]*$', line)
|
|
|
|
if not m:
|
|
|
|
m = re.match(r'^[^\s]+=[^,]*(\b\w+)\s*[\(\[=][^;]*$', line)
|
|
|
|
if m:
|
|
|
|
return m.group(1)
|
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
def extract_referer_test():
|
|
|
|
for a in {
|
|
|
|
"fs=good2()",
|
2018-07-19 13:29:29 +08:00
|
|
|
"f=static int fastop(struct x86_emulate_ctxt *ctxt, "
|
|
|
|
+ "void (*fop)(struct fastop *))",
|
2018-07-19 02:46:21 +08:00
|
|
|
"f=int good(a, bad (*func)(arg))",
|
|
|
|
"f=EXPORT_SYMBOL_GPL(bad);",
|
|
|
|
"f=bad (*good)()",
|
|
|
|
"f=int FNAME(good)(a)",
|
|
|
|
"f=TRACE_EVENT(a)",
|
2018-07-19 04:48:22 +08:00
|
|
|
"f: a=in bad()"}:
|
2018-07-19 02:46:21 +08:00
|
|
|
print(a, '->', extract_referer(a))
|
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
def func_referers_git_grep(name):
|
2018-08-08 14:03:47 +08:00
|
|
|
res = list()
|
2018-07-19 02:46:21 +08:00
|
|
|
r = None
|
2018-08-08 14:03:47 +08:00
|
|
|
for line in popen(r'git grep --threads 1 --no-index --word-regexp --show-function '
|
2018-07-19 17:54:54 +08:00
|
|
|
r'"^\s.*\b%s" '
|
2018-08-07 10:53:42 +08:00
|
|
|
r'**.\[hc\] **.cpp **.cc **.hh || true' % (name)):
|
2018-07-19 13:29:29 +08:00
|
|
|
# Filter out names in comment afer function,
|
|
|
|
# when comment start from ' *'
|
2018-07-19 02:46:21 +08:00
|
|
|
# To see the problem try "git grep -p and"
|
2018-07-19 13:34:54 +08:00
|
|
|
for p in {
|
2018-07-19 17:54:54 +08:00
|
|
|
r'.*:\s+\* .*%s',
|
2018-07-19 13:34:54 +08:00
|
|
|
r'.*/\*.*%s',
|
|
|
|
r'.*//.*%s',
|
|
|
|
r'.*".*\b%s\b.*"'}:
|
|
|
|
if re.match(p % (name), line):
|
|
|
|
r = None
|
|
|
|
break
|
2018-07-19 13:29:29 +08:00
|
|
|
if r and r != name and r not in black_list:
|
2018-08-08 14:03:47 +08:00
|
|
|
res.append(r)
|
2018-07-19 02:46:21 +08:00
|
|
|
r = None
|
|
|
|
r = extract_referer(line)
|
|
|
|
return res
|
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
cscope_warned = False
|
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
def func_referers_cscope(name):
|
|
|
|
global cscope_warned
|
|
|
|
if not os.path.isfile('cscope.out'):
|
|
|
|
if not cscope_warned:
|
2018-08-27 15:10:04 +08:00
|
|
|
print("Recommended: cscope -Rcbk", file=sys.stderr)
|
2018-07-19 02:46:21 +08:00
|
|
|
cscope_warned = True
|
|
|
|
return []
|
2018-08-08 14:03:47 +08:00
|
|
|
res = list(dict.fromkeys([l.split()[1] for l in popen(r'cscope -d -L3 "%s"' %
|
|
|
|
(name)) if l not in black_list]))
|
2018-07-19 02:46:21 +08:00
|
|
|
if not res:
|
|
|
|
res = func_referers_git_grep(name)
|
|
|
|
return res
|
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
def func_referers_all(name):
|
2018-08-08 14:03:47 +08:00
|
|
|
return list(dict.fromkeys(func_referers_git_grep(name) + func_referers_cscope(name)))
|
2018-07-19 02:46:21 +08:00
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
|
|
|
def referers_tree(name, referer=None, printed=None, level=0):
|
2018-07-19 02:46:21 +08:00
|
|
|
if not referer:
|
|
|
|
if os.path.isfile('cscope.out'):
|
|
|
|
referer = func_referers_cscope
|
|
|
|
else:
|
2018-07-19 12:52:33 +08:00
|
|
|
print("Using git grep only, recommended to run: cscope -bkR",
|
|
|
|
file=sys.stderr)
|
2018-07-19 02:46:21 +08:00
|
|
|
referer = func_referers_git_grep
|
|
|
|
if isinstance(referer, str):
|
|
|
|
referer = eval(referer)
|
2018-07-19 12:52:33 +08:00
|
|
|
if not printed:
|
|
|
|
printed = set()
|
2018-07-19 02:46:21 +08:00
|
|
|
if name in printed:
|
|
|
|
print_limited(level*'\t' + name + ' ^')
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
print_limited(level*'\t' + name)
|
|
|
|
printed.add(name)
|
|
|
|
if level > level_limit - 2:
|
|
|
|
print_limited((level + 1)*'\t' + '...')
|
|
|
|
return ''
|
|
|
|
listed = set()
|
|
|
|
for a in referer(name):
|
|
|
|
referers_tree(a, referer, printed, level + 1)
|
|
|
|
listed.add(a)
|
|
|
|
return ''
|
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-07-19 19:03:30 +08:00
|
|
|
def referers_dep(name, referer=None, printed=None, level=0):
|
|
|
|
if not referer:
|
|
|
|
if os.path.isfile('cscope.out'):
|
|
|
|
referer = func_referers_cscope
|
|
|
|
else:
|
|
|
|
print("Using git grep only, recommended to run: cscope -bkR",
|
|
|
|
file=sys.stderr)
|
|
|
|
referer = func_referers_git_grep
|
|
|
|
if isinstance(referer, str):
|
|
|
|
referer = eval(referer)
|
|
|
|
if not printed:
|
|
|
|
printed = set()
|
|
|
|
if name in printed:
|
|
|
|
return
|
|
|
|
if level > level_limit - 2:
|
|
|
|
return ''
|
2018-08-08 14:03:47 +08:00
|
|
|
referers = referer(name)
|
2018-07-19 19:03:30 +08:00
|
|
|
if referers:
|
|
|
|
printed.add(name)
|
2018-08-08 14:03:47 +08:00
|
|
|
print("%s:" % (name), ' '.join(referers))
|
2018-07-19 19:03:30 +08:00
|
|
|
for a in referers:
|
|
|
|
referers_dep(a, referer, printed, level + 1)
|
|
|
|
else:
|
|
|
|
pass
|
|
|
|
# TODO: print terminal
|
|
|
|
# print('...')
|
|
|
|
return ''
|
|
|
|
|
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
def call_tree(node, printed=None, level=0):
|
2018-07-19 02:46:21 +08:00
|
|
|
if not os.path.isfile('cscope.out'):
|
2018-07-19 12:52:33 +08:00
|
|
|
print("Please run: cscope -bkR", file=sys.stderr)
|
|
|
|
return False
|
2018-07-19 13:29:29 +08:00
|
|
|
if printed is None:
|
2018-07-19 12:52:33 +08:00
|
|
|
printed = set()
|
2018-07-19 02:46:21 +08:00
|
|
|
if node in printed:
|
2018-07-19 04:48:22 +08:00
|
|
|
print_limited(level*'\t' + node + ' ^')
|
2018-07-19 02:46:21 +08:00
|
|
|
return
|
|
|
|
else:
|
|
|
|
print_limited(level*'\t' + node)
|
|
|
|
printed.add(node)
|
2018-07-19 04:48:22 +08:00
|
|
|
if level > level_limit - 2:
|
2018-07-19 02:46:21 +08:00
|
|
|
print_limited((level + 1)*'\t' + '...')
|
|
|
|
return ''
|
|
|
|
local_printed = set()
|
2018-07-19 12:52:33 +08:00
|
|
|
for line in popen('cscope -d -L2 "%s"' % (node)):
|
2018-07-19 13:29:29 +08:00
|
|
|
a = line.split()[1]
|
|
|
|
if a in local_printed or a in black_list:
|
2018-07-19 12:52:33 +08:00
|
|
|
continue
|
2018-07-19 13:29:29 +08:00
|
|
|
local_printed.add(a)
|
2018-07-19 16:41:45 +08:00
|
|
|
# try:
|
|
|
|
call_tree(line.split()[1], printed, level + 1)
|
|
|
|
# except Exception:
|
|
|
|
# pass
|
2018-07-19 02:46:21 +08:00
|
|
|
return ''
|
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-07-19 16:45:49 +08:00
|
|
|
def call_dep(node, printed=None, level=0):
|
|
|
|
if not os.path.isfile('cscope.out'):
|
|
|
|
print("Please run: cscope -bkR", file=sys.stderr)
|
|
|
|
return False
|
|
|
|
if printed is None:
|
|
|
|
printed = set()
|
|
|
|
if node in printed:
|
|
|
|
return
|
2018-08-08 14:03:47 +08:00
|
|
|
calls = list()
|
2018-07-19 16:45:49 +08:00
|
|
|
for a in [line.split()[1] for line in
|
|
|
|
popen('cscope -d -L2 "%s"' % (node))]:
|
|
|
|
if a in black_list:
|
|
|
|
continue
|
2018-08-08 14:03:47 +08:00
|
|
|
calls.append(a)
|
2018-07-19 16:45:49 +08:00
|
|
|
if calls:
|
|
|
|
if level < level_limit - 1:
|
|
|
|
printed.add(node)
|
2018-08-08 14:03:47 +08:00
|
|
|
print("%s:" % (node), ' '.join(list(dict.fromkeys(calls))))
|
|
|
|
for a in list(dict.fromkeys(calls)):
|
2018-07-19 16:45:49 +08:00
|
|
|
call_dep(a, printed, level + 1)
|
|
|
|
else:
|
|
|
|
pass
|
|
|
|
# TODO: print terminal
|
|
|
|
# print('...')
|
|
|
|
return ''
|
|
|
|
|
|
|
|
|
2018-07-22 05:32:07 +08:00
|
|
|
def my_graph(name=None):
|
|
|
|
g = nx.DiGraph(name=name)
|
2018-07-26 13:36:01 +08:00
|
|
|
# g.graph.update({'node': {'shape': 'none', 'fontsize': 50}})
|
|
|
|
# g.graph.update({'rankdir': 'LR', 'nodesep': 0, })
|
2018-07-22 05:32:07 +08:00
|
|
|
return g
|
|
|
|
|
|
|
|
|
2018-08-07 13:04:46 +08:00
|
|
|
def reduce_graph(g, m=None):
|
2018-07-22 22:16:40 +08:00
|
|
|
rm = set()
|
2018-08-07 13:04:46 +08:00
|
|
|
m = g.number_of_nodes() if not m else m
|
2018-08-27 13:59:08 +08:00
|
|
|
log(g.number_of_edges())
|
2018-08-07 13:04:46 +08:00
|
|
|
rm = [n for (n, d) in g.out_degree if not d and g.in_degree(n) <= m]
|
2018-07-22 22:16:40 +08:00
|
|
|
g.remove_nodes_from(rm)
|
2018-08-07 13:04:46 +08:00
|
|
|
print(g.number_of_edges())
|
2018-07-22 22:16:40 +08:00
|
|
|
return g
|
|
|
|
|
|
|
|
|
2018-07-26 13:28:37 +08:00
|
|
|
def includes(a):
|
|
|
|
res = []
|
|
|
|
# log(a)
|
|
|
|
for a in popen('man -s 2 %s 2> /dev/null |'
|
2018-08-07 10:53:42 +08:00
|
|
|
' head -n 20 | grep include || true' % (a)):
|
2018-07-26 13:28:37 +08:00
|
|
|
m = re.match('.*<(.*)>', a)
|
|
|
|
if m:
|
|
|
|
res.append(m.group(1))
|
|
|
|
if not res:
|
|
|
|
for a in popen('grep -l -r " %s *(" '
|
|
|
|
'/usr/include --include "*.h" '
|
|
|
|
'2> /dev/null || true' % (a)):
|
|
|
|
# log(a)
|
|
|
|
a = re.sub(r'.*/(bits)', r'\1', a)
|
|
|
|
a = re.sub(r'.*/(sys)', r'\1', a)
|
|
|
|
a = re.sub(r'/usr/include/(.*)', r'\1', a)
|
|
|
|
# log(a)
|
|
|
|
res.append(a)
|
|
|
|
res = set(res)
|
|
|
|
if res and len(res) > 1:
|
|
|
|
r = set()
|
|
|
|
for f in res:
|
2018-07-29 21:02:21 +08:00
|
|
|
# log('grep " %s \+\(" --include "%s" -r /usr/include/'%(a, f))
|
2018-07-26 13:28:37 +08:00
|
|
|
# log(os.system(
|
2018-07-29 21:02:21 +08:00
|
|
|
# 'grep -w "%s" --include "%s" -r /usr/include/'%(a, f)))
|
2018-07-26 13:28:37 +08:00
|
|
|
if 0 != os.system(
|
|
|
|
'grep " %s *(" --include "%s" -r /usr/include/ -q'
|
|
|
|
% (a, os.path.basename(f))):
|
|
|
|
r.add(f)
|
|
|
|
res = res.difference(r)
|
|
|
|
log(res)
|
|
|
|
return ','.join(list(res)) if res else 'unexported'
|
|
|
|
|
|
|
|
|
2018-07-22 05:32:07 +08:00
|
|
|
def syscalls():
|
|
|
|
sc = my_graph('syscalls')
|
2018-07-26 13:28:37 +08:00
|
|
|
inc = 'includes.list'
|
|
|
|
if not os.path.isfile(inc):
|
|
|
|
os.system('ctags --langmap=c:+.h --c-kinds=+pex -I __THROW '
|
|
|
|
+ ' -R -u -f- /usr/include/ | cut -f1,2 > '
|
|
|
|
+ inc)
|
|
|
|
'''
|
|
|
|
if False:
|
|
|
|
includes = {}
|
|
|
|
with open(inc, 'r') as f:
|
|
|
|
for s in f:
|
|
|
|
includes[s.split()[0]] = s.split()[1]
|
|
|
|
log(includes)
|
|
|
|
'''
|
2018-07-22 05:32:07 +08:00
|
|
|
scd = 'SYSCALL_DEFINE.list'
|
|
|
|
if not os.path.isfile(scd):
|
|
|
|
os.system("grep SYSCALL_DEFINE -r --include='*.c' > " + scd)
|
|
|
|
with open(scd, 'r') as f:
|
2018-08-07 10:50:37 +08:00
|
|
|
v = set(['sigsuspend', 'llseek', 'sysfs', 'sync_file_range2', 'ustat', 'bdflush'])
|
2018-07-22 05:32:07 +08:00
|
|
|
for s in f:
|
2018-07-26 13:31:32 +08:00
|
|
|
if any(x in s.lower() for x in ['compat', 'stub']):
|
|
|
|
continue
|
2018-07-22 05:32:07 +08:00
|
|
|
m = re.match(r'(.*?):.*SYSCALL.*\(([\w]+)', s)
|
|
|
|
if m:
|
|
|
|
for p in {
|
|
|
|
'^old',
|
|
|
|
'^xnew',
|
|
|
|
r'.*64',
|
|
|
|
r'.*32$',
|
|
|
|
r'.*16$',
|
|
|
|
}:
|
|
|
|
if re.match(p, m.group(2)):
|
|
|
|
m = None
|
|
|
|
break
|
|
|
|
if m:
|
|
|
|
syscall = m.group(2)
|
|
|
|
syscall = re.sub('^new', '', syscall)
|
|
|
|
path = m.group(1).split('/')
|
2018-07-26 13:31:32 +08:00
|
|
|
if (m.group(1).startswith('mm/nommu.c')
|
|
|
|
or m.group(1).startswith('arch/x86/ia32')
|
|
|
|
or m.group(1).startswith('arch/')
|
|
|
|
or syscall.startswith('vm86')
|
2018-07-22 05:32:07 +08:00
|
|
|
and not m.group(1).startswith('arch/x86')):
|
|
|
|
continue
|
2018-07-26 13:31:32 +08:00
|
|
|
if syscall in v:
|
|
|
|
continue
|
|
|
|
v.add(syscall)
|
2018-07-22 05:32:07 +08:00
|
|
|
p2 = '/'.join(path[1:])
|
2018-07-26 13:31:32 +08:00
|
|
|
p2 = m.group(1)
|
2018-07-29 21:02:21 +08:00
|
|
|
# if log(difflib.get_close_matches(syscall, v) or ''):
|
2018-07-26 13:31:32 +08:00
|
|
|
# log(syscall)
|
|
|
|
# log(syscall + ' ' + (includes.get(syscall) or '------'))
|
|
|
|
# man -s 2 timerfd_settime | head -n 20
|
2018-08-07 10:53:42 +08:00
|
|
|
if False:
|
|
|
|
i = includes(syscall)
|
|
|
|
log(p2 + ' ' + str(i) + ' ' + syscall)
|
|
|
|
sc.add_edge(i, i+' - '+p2)
|
|
|
|
sc.add_edge(i+' - '+p2, 'sys_' + syscall)
|
|
|
|
else:
|
|
|
|
sc.add_edge(path[0] + '/', p2)
|
|
|
|
sc.add_edge(p2, 'sys_' + syscall)
|
2018-07-22 05:32:07 +08:00
|
|
|
return sc
|
|
|
|
|
|
|
|
|
2018-07-22 21:19:45 +08:00
|
|
|
# DiGraph
|
|
|
|
# write_dot to_agraph AGraph
|
|
|
|
# agwrite
|
|
|
|
# srcxray.py 'write_dot(syscalls(), "syscalls.dot")'
|
2018-07-29 21:02:21 +08:00
|
|
|
# srcxray.py "write_dot(import_cflow(), 'a.dot')"
|
2018-07-26 13:36:16 +08:00
|
|
|
# write_graphml
|
2018-07-29 21:02:21 +08:00
|
|
|
# F=sys_mount; srcxray.py "digraph_print(import_cflow(), ['$F'])" > $F.tree
|
2018-07-26 13:36:16 +08:00
|
|
|
# srcxray.py "leaves(read_dot('a.dot'))"
|
|
|
|
# srcxray.py "most_used(read_dot('a.dot'))"
|
2018-07-29 04:02:03 +08:00
|
|
|
# srcxray.py "digraph_print(read_dot('a.dot'))"
|
2018-08-07 10:53:42 +08:00
|
|
|
# srcxray.py "write_dot(reduce_graph(read_dot('no-loops.dot')),'reduced.dot')"
|
2018-08-27 14:08:45 +08:00
|
|
|
# a=sys_clone;srcxray.py "write_dot(rank_couples(reduce_graph(remove_loops(read_dot('$a.dot')))),'$a.dot')"
|
2018-07-26 13:36:16 +08:00
|
|
|
# srcxray.py "pprint(most_used(read_dot('a.dot')))"
|
2018-08-07 10:53:42 +08:00
|
|
|
# srcxray.py "write_dot(digraph_tree(read_dot2('all.dot'), ['sys_clone']), 'sys_clone.dot')"
|
|
|
|
# srcxray.py "write_dot(add_rank('reduced.dot'), 'ranked.dot')"
|
|
|
|
# srcxray.py "write_dot(remove_loops(read_dot2('reduced.dot')), 'no-loops.dot')"
|
2018-07-26 13:36:16 +08:00
|
|
|
|
2018-07-29 04:02:03 +08:00
|
|
|
def cleanup(a):
|
2018-08-27 13:59:08 +08:00
|
|
|
log('')
|
2018-08-07 13:55:42 +08:00
|
|
|
g = to_dg(a)
|
2018-07-29 04:02:03 +08:00
|
|
|
print(dg.number_of_edges())
|
|
|
|
dg.remove_nodes_from(black_list)
|
|
|
|
print(dg.number_of_edges())
|
|
|
|
write_dot(dg, a)
|
|
|
|
|
|
|
|
|
2018-07-26 13:36:16 +08:00
|
|
|
def leaves(dg):
|
|
|
|
# [x for x in G.nodes() if G.out_degree(x)==0 and G.in_degree(x)==1]
|
|
|
|
return {n: dg.in_degree(n) for (n, d) in dg.out_degree if not d}
|
2018-07-22 21:19:45 +08:00
|
|
|
|
2018-07-26 13:34:40 +08:00
|
|
|
|
2018-08-07 13:05:52 +08:00
|
|
|
def sort_dict(d):
|
|
|
|
return [a for a, b in sorted(d.items(), key=lambda k: k[1], reverse=True)]
|
|
|
|
|
|
|
|
|
2018-07-26 13:34:40 +08:00
|
|
|
def most_used(dg, ins=10, outs=10):
|
|
|
|
# return {a: b for a, b in sorted(dg.in_degree, key=lambda k: k[1]) if b > 1 and}
|
2018-07-29 04:03:44 +08:00
|
|
|
# return [(x, dg.in_degree(x), dg.out_degree(x))
|
|
|
|
return [x
|
|
|
|
for x in dg.nodes()
|
|
|
|
if dg.in_degree(x) >= ins and dg.out_degree(x) >= outs]
|
2018-07-26 13:34:40 +08:00
|
|
|
|
|
|
|
|
|
|
|
def starts(dg): # roots
|
|
|
|
return {n: dg.out_degree(n) for (n, d) in dg.in_degree if not d}
|
|
|
|
|
|
|
|
|
2018-07-29 21:03:45 +08:00
|
|
|
def digraph_tree(dg, starts=None, black_list=black_list):
|
2018-07-29 05:26:33 +08:00
|
|
|
tree = nx.DiGraph()
|
2018-07-29 21:03:45 +08:00
|
|
|
|
2018-07-29 05:26:33 +08:00
|
|
|
def sub(node):
|
|
|
|
tree.add_node(node)
|
|
|
|
for o in dg.successors(node):
|
2018-07-29 21:03:45 +08:00
|
|
|
if o in black_list or tree.has_edge(node, o) or o in starts:
|
|
|
|
# print(o)
|
2018-07-29 05:26:33 +08:00
|
|
|
continue
|
2018-07-29 21:03:45 +08:00
|
|
|
tree.add_edge(node, o)
|
2018-07-29 05:26:33 +08:00
|
|
|
sub(o)
|
|
|
|
|
|
|
|
printed = set()
|
|
|
|
if not starts:
|
|
|
|
starts = {}
|
|
|
|
for i in [n for (n, d) in dg.in_degree if not d]:
|
|
|
|
starts[i] = dg.out_degree(i)
|
|
|
|
starts = [a[0] for a in sorted(starts.items(), key=lambda k: k[1], reverse=True)]
|
2018-07-29 14:06:36 +08:00
|
|
|
if len(starts) == 1:
|
|
|
|
sub(starts[0])
|
|
|
|
elif len(starts) > 1:
|
|
|
|
for o in starts:
|
|
|
|
if o in black_list:
|
|
|
|
continue
|
|
|
|
sub(o)
|
2018-07-29 05:26:33 +08:00
|
|
|
return tree
|
|
|
|
|
|
|
|
|
2018-07-29 21:01:48 +08:00
|
|
|
def digraph_print(dg, starts=None, dst_fn=None, sort=False):
|
|
|
|
dst = open(dst_fn, 'w') if dst_fn else None
|
2018-09-16 20:40:15 +08:00
|
|
|
printed = set()
|
2018-07-29 21:01:48 +08:00
|
|
|
|
2018-09-16 20:40:15 +08:00
|
|
|
def digraph_print_sub(path='', node=None, level=0):
|
2018-07-31 10:18:58 +08:00
|
|
|
if node in black_list:
|
|
|
|
return
|
2018-07-22 05:32:07 +08:00
|
|
|
if node in printed:
|
2018-07-29 21:01:48 +08:00
|
|
|
print_limited(level*'\t' + str(node) + ' ^', dst)
|
2018-07-22 05:32:07 +08:00
|
|
|
return
|
2018-07-31 10:18:58 +08:00
|
|
|
outs = {_: dg.out_degree(_) for _ in dg.successors(node)}
|
|
|
|
if sort:
|
|
|
|
outs = {a: b for a, b in sorted(outs.items(), key=lambda k: k[1], reverse=True)}
|
|
|
|
s = ''
|
2018-08-07 14:13:26 +08:00
|
|
|
if 'rank' in dg.nodes[node]:
|
|
|
|
s = str(dg.nodes[node]['rank'])
|
|
|
|
ranks[dg.nodes[node]['rank']].append(node)
|
2018-07-31 10:18:58 +08:00
|
|
|
if outs:
|
2018-08-07 14:13:26 +08:00
|
|
|
s += ' ...' if level > level_limit - 2 else ' @' + path
|
2018-07-31 10:18:58 +08:00
|
|
|
print_limited(level*'\t' + str(node) + s, dst)
|
2018-07-22 05:32:07 +08:00
|
|
|
printed.add(node)
|
|
|
|
if level > level_limit - 2:
|
|
|
|
return ''
|
|
|
|
passed = set()
|
2018-07-26 13:32:47 +08:00
|
|
|
for o in outs.keys():
|
2018-07-31 10:18:58 +08:00
|
|
|
if o in passed:
|
2018-07-22 05:32:07 +08:00
|
|
|
continue
|
|
|
|
passed.add(o)
|
2018-09-16 20:40:15 +08:00
|
|
|
digraph_print_sub(path + ' ' + str(node), o, level + 1)
|
2018-07-22 05:32:07 +08:00
|
|
|
|
2018-07-26 13:32:47 +08:00
|
|
|
if not starts:
|
|
|
|
starts = {}
|
|
|
|
for i in [n for (n, d) in dg.in_degree if not d]:
|
|
|
|
starts[i] = dg.out_degree(i)
|
|
|
|
starts = [a[0] for a in sorted(starts.items(), key=lambda k: k[1], reverse=True)]
|
|
|
|
if len(starts) > 1:
|
2018-07-29 21:01:48 +08:00
|
|
|
print_limited(default_root, dst)
|
2018-07-26 13:32:47 +08:00
|
|
|
for s in starts:
|
2018-07-29 21:01:48 +08:00
|
|
|
print_limited('\t' + s + ' ->', dst)
|
2018-07-22 05:32:07 +08:00
|
|
|
passed = set()
|
2018-07-26 13:32:47 +08:00
|
|
|
for o in starts:
|
2018-07-31 10:18:58 +08:00
|
|
|
if o in passed:
|
2018-07-22 05:32:07 +08:00
|
|
|
continue
|
|
|
|
passed.add(o)
|
2018-07-29 21:01:48 +08:00
|
|
|
if o in dg:
|
2018-09-16 20:40:15 +08:00
|
|
|
digraph_print_sub('', o)
|
2018-09-09 23:39:53 +08:00
|
|
|
# not yet printed rest:
|
|
|
|
for o in dg.nodes():
|
|
|
|
if o not in printed:
|
2018-09-16 20:40:15 +08:00
|
|
|
digraph_print_sub('', o)
|
2018-07-29 21:01:48 +08:00
|
|
|
if dst_fn:
|
|
|
|
print(dst_fn)
|
|
|
|
dst.close()
|
2018-07-22 05:32:07 +08:00
|
|
|
|
|
|
|
|
2018-07-22 21:33:13 +08:00
|
|
|
def cflow_preprocess(a):
|
2018-08-27 14:41:40 +08:00
|
|
|
with open(a, 'rb') as f:
|
2018-07-22 21:33:13 +08:00
|
|
|
for s in f:
|
2018-08-27 14:41:40 +08:00
|
|
|
try:
|
|
|
|
s = s.decode('utf-8')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
s = s.decode('latin1')
|
2018-07-22 21:33:13 +08:00
|
|
|
# treat struct like function
|
2018-08-27 15:10:04 +08:00
|
|
|
s = re.sub(r"^static struct (\w+) = ", r"\1()", s)
|
|
|
|
s = re.sub(r"^static struct (\w+)\[\] = ", r"\1()", s)
|
|
|
|
s = re.sub(r"^static const struct (\w+)\[\] = ", r"\1()", s)
|
2018-07-22 21:33:13 +08:00
|
|
|
s = re.sub(r"^static __initdata int \(\*actions\[\]\)\(void\) = ",
|
|
|
|
"int actions()", s) # treat struct like function
|
|
|
|
s = re.sub(r"^static ", "", s)
|
2018-07-26 13:36:33 +08:00
|
|
|
s = re.sub(r"SENSOR_DEVICE_ATTR.*\((\w*),", r"void sensor_dev_attr_\1()(", s)
|
2018-07-22 21:33:13 +08:00
|
|
|
s = re.sub(r"COMPAT_SYSCALL_DEFINE[0-9]\((\w*),",
|
|
|
|
r"compat_sys_\1(", s)
|
2018-08-27 15:10:04 +08:00
|
|
|
s = re.sub(r"SYSCALL_DEFINE[0-9]\((\w*)", r"sys_\1(", s)
|
2018-07-22 21:33:13 +08:00
|
|
|
s = re.sub(r"__setup\(.*,(.*)\)", r"void __setup() {\1();}", s)
|
2018-07-29 04:03:44 +08:00
|
|
|
s = re.sub(r"^(\w*)param\(.*,(.*)\)", r"void \1param() {\2();}", s)
|
2018-07-31 10:18:58 +08:00
|
|
|
s = re.sub(r"^(\w*)initcall\((.*)\)",
|
2018-07-29 04:03:44 +08:00
|
|
|
r"void \1initcall() {\2();}", s)
|
2018-07-22 21:33:13 +08:00
|
|
|
s = re.sub(r"^static ", "", s)
|
2018-07-29 04:03:44 +08:00
|
|
|
# s = re.sub(r"__read_mostly", "", s)
|
2018-07-22 21:33:13 +08:00
|
|
|
s = re.sub(r"^inline ", "", s)
|
|
|
|
s = re.sub(r"^const ", "", s)
|
|
|
|
s = re.sub(r"^struct (.*) =", r"\1()", s)
|
2018-07-31 10:18:58 +08:00
|
|
|
s = re.sub(r"\b__initdata\b", "", s)
|
2018-08-27 15:10:04 +08:00
|
|
|
s = re.sub(r"DEFINE_PER_CPU\((.*),(.*)\)", r"\1 \2", s)
|
2018-07-31 10:18:58 +08:00
|
|
|
# s = re.sub(r"__init_or_module", "", s)
|
2018-07-29 04:03:44 +08:00
|
|
|
# __attribute__
|
2018-07-22 21:33:13 +08:00
|
|
|
# for line in sys.stdin:
|
|
|
|
sys.stdout.write(s)
|
|
|
|
|
|
|
|
|
2018-07-29 04:03:44 +08:00
|
|
|
cflow_param = {
|
2018-08-07 10:50:37 +08:00
|
|
|
"modifier": "__init __inline__ noinline __initdata __randomize_layout asmlinkage "
|
|
|
|
" __visible __init __leaf__ __ref __latent_entropy __init_or_module ",
|
2018-07-29 04:03:44 +08:00
|
|
|
"wrapper": "__attribute__ __section__ "
|
|
|
|
"TRACE_EVENT MODULE_AUTHOR MODULE_DESCRIPTION MODULE_LICENSE MODULE_LICENSE MODULE_SOFTDEP "
|
2018-08-27 15:10:04 +08:00
|
|
|
"INIT_THREAD_INFO "
|
2018-07-29 04:03:44 +08:00
|
|
|
"__acquires __releases __ATTR"
|
|
|
|
# "wrapper": "__setup early_param"
|
|
|
|
}
|
|
|
|
|
|
|
|
# export CPATH=:include:arch/x86/include:../build/include/:../build/arch/x86/include/generated/:include/uapi
|
|
|
|
# srcxray.py "'\n'.join(cflow('init/main.c'))"
|
|
|
|
|
|
|
|
|
2018-08-27 15:10:04 +08:00
|
|
|
def cflow(a=None):
|
2018-08-07 14:03:32 +08:00
|
|
|
if os.path.isfile('include/linux/cache.h'):
|
|
|
|
for m in popen("ctags -x --c-kinds=d include/linux/cache.h | cut -d' ' -f 1 | sort -u"):
|
|
|
|
if m in cflow_param['modifier']:
|
|
|
|
print(m)
|
|
|
|
else:
|
|
|
|
cflow_param['modifier'] += ' ' + a
|
2018-07-26 13:36:48 +08:00
|
|
|
if not a:
|
2018-08-07 14:03:32 +08:00
|
|
|
a = "$(cat cscope.files)" if os.path.isfile('cscope.files') else "*.c *.h *.cpp *.hh "
|
2018-07-26 13:36:48 +08:00
|
|
|
elif isinstance(a, list):
|
|
|
|
pass
|
|
|
|
elif os.path.isdir(a):
|
2018-08-27 15:10:04 +08:00
|
|
|
a = "$(find {0} -name '*.[ch]' -o -name '*.cpp' -o -name '*.hh')".format(a)
|
2018-07-26 13:36:48 +08:00
|
|
|
pass
|
|
|
|
elif os.path.isfile(a):
|
|
|
|
pass
|
2018-07-29 04:03:44 +08:00
|
|
|
# "--depth=%d " %(level_limit+1) +
|
|
|
|
# --debug=1
|
|
|
|
cflow = (r"cflow -v "
|
|
|
|
# + "-DCONFIG_KALLSYMSZ "
|
|
|
|
+ "--preprocess='srcxray.py cflow_preprocess' "
|
|
|
|
+ ''.join([''.join(["--symbol={0}:{1} ".format(w, p)
|
|
|
|
for w in cflow_param[p].split()])
|
|
|
|
for p in cflow_param.keys()])
|
|
|
|
+ " --include=_sxt --brief --level-indent='0=\t' "
|
|
|
|
+ a)
|
2018-07-31 10:18:58 +08:00
|
|
|
# print(cflow)
|
2018-07-29 04:03:44 +08:00
|
|
|
return popen(cflow)
|
|
|
|
|
|
|
|
|
2018-07-31 10:18:58 +08:00
|
|
|
def import_cflow(a=None, cflow_out=None):
|
2018-07-26 13:36:48 +08:00
|
|
|
cf = my_graph()
|
2018-07-22 21:33:13 +08:00
|
|
|
stack = list()
|
|
|
|
nprev = -1
|
2018-07-31 10:18:58 +08:00
|
|
|
cflow_out = open(cflow_out, 'w') if cflow_out else None
|
2018-07-29 04:03:44 +08:00
|
|
|
for line in cflow(a):
|
2018-07-31 10:18:58 +08:00
|
|
|
if cflow_out:
|
|
|
|
cflow_out.write(line + '\n')
|
2018-07-22 21:33:13 +08:00
|
|
|
# --print-level
|
|
|
|
m = re.match(r'^([\t]*)([^(^ ^<]+)', str(line))
|
|
|
|
if m:
|
|
|
|
n = len(m.group(1))
|
|
|
|
id = str(m.group(2))
|
|
|
|
else:
|
|
|
|
raise Exception(line)
|
|
|
|
|
|
|
|
if n <= nprev:
|
|
|
|
stack = stack[:n - nprev - 1]
|
|
|
|
# print(n, id, stack)
|
2018-07-29 04:02:03 +08:00
|
|
|
if id not in black_list:
|
|
|
|
if len(stack):
|
|
|
|
cf.add_edge(stack[-1], id)
|
2018-07-22 21:33:13 +08:00
|
|
|
stack.append(id)
|
|
|
|
nprev = n
|
|
|
|
return cf
|
|
|
|
|
|
|
|
|
2018-08-07 14:06:07 +08:00
|
|
|
def rank(g, n):
|
|
|
|
try:
|
2018-08-27 15:30:29 +08:00
|
|
|
if g.nodes[n]['rank1'] == g.nodes[n]['rank2']:
|
|
|
|
return g.nodes[n]['rank1']
|
2018-08-07 14:06:07 +08:00
|
|
|
if g.nodes[n]['rank1'] < abs(g.nodes[n]['rank2']):
|
|
|
|
return g.nodes[n]['rank1']
|
|
|
|
else:
|
|
|
|
return g.__dict__['max_rank'] + 1 + g.nodes[n]['rank2']
|
|
|
|
except KeyError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2018-07-31 10:18:58 +08:00
|
|
|
def write_dot(g, dot):
|
|
|
|
dot = str(dot)
|
|
|
|
dot = open(dot, 'w')
|
|
|
|
dot.write('strict digraph "None" {\n')
|
2018-08-07 14:06:07 +08:00
|
|
|
dot.write('rankdir=LR\nnodesep=0\n')
|
|
|
|
# dot.write('ranksep=50\n')
|
|
|
|
dot.write('node [fontname=Ubuntu,shape=none];\n')
|
|
|
|
# dot.write('edge [width=10000];\n')
|
|
|
|
dot.write('edge [width=1];\n')
|
2018-07-31 10:18:58 +08:00
|
|
|
g.remove_nodes_from(black_list)
|
2018-08-07 14:06:07 +08:00
|
|
|
ranks = collections.defaultdict(list)
|
2018-07-31 10:18:58 +08:00
|
|
|
for n in g.nodes():
|
2018-08-07 14:06:07 +08:00
|
|
|
r = rank(g, n)
|
|
|
|
if r:
|
|
|
|
ranks[r].append(n)
|
2018-07-31 10:18:58 +08:00
|
|
|
if not g.out_degree(n):
|
|
|
|
continue
|
2018-08-07 14:06:07 +08:00
|
|
|
dot.write('"%s" -> { ' % (n))
|
2018-08-11 22:52:02 +08:00
|
|
|
dot.write(' '.join(['"%s"' % (str(a)) for a in g.successors(n)]))
|
2018-08-27 15:30:29 +08:00
|
|
|
if scaled and r and int(r):
|
2018-08-07 14:06:07 +08:00
|
|
|
dot.write(' } [penwidth=%d label=%d];\n' % (100/r, r))
|
|
|
|
else:
|
|
|
|
dot.write(' } ;\n')
|
|
|
|
print(ranks.keys())
|
|
|
|
for r in ranks.keys():
|
|
|
|
dot.write("{ rank=same %s }\n" % (' '.join(['"%s"' % (str(a)) for a in ranks[r]])))
|
|
|
|
for n in g.nodes():
|
|
|
|
prop = Munch()
|
|
|
|
if scaled and len(ranks):
|
|
|
|
prop.fontsize = 500 + 10000 / (len(ranks[rank(g, n)]) + 1)
|
|
|
|
# prop.label = n + ' ' + str(rank(g,n))
|
2018-08-13 04:28:53 +08:00
|
|
|
if prop:
|
|
|
|
dot.write('"%s" [%s]\n' % (n, ','.join(['%s="%s"' % (a, str(prop[a])) for a in prop])))
|
2018-08-27 15:30:29 +08:00
|
|
|
elif not g.number_of_edges():
|
|
|
|
dot.write('"%s"\n' % (n))
|
2018-08-07 14:06:07 +08:00
|
|
|
# else:
|
|
|
|
# dot.write('"%s"\n'%(n))
|
2018-07-31 10:18:58 +08:00
|
|
|
dot.write('}\n')
|
|
|
|
dot.close()
|
|
|
|
print(dot.name)
|
|
|
|
|
|
|
|
|
|
|
|
@open_file(0, mode='r')
|
|
|
|
def read_dot2(dot):
|
|
|
|
# read_dot pydot.graph_from_dot_data parse_dot_data from_pydot
|
|
|
|
dg = nx.DiGraph()
|
|
|
|
for a in dot:
|
2018-08-07 14:04:10 +08:00
|
|
|
a = a.strip()
|
2018-07-31 10:18:58 +08:00
|
|
|
if '->' in a:
|
2018-08-07 14:04:10 +08:00
|
|
|
m = re.match('"?([^"]+)"? -> {(.+)}', a)
|
2018-07-31 10:18:58 +08:00
|
|
|
if m:
|
2018-08-07 14:04:10 +08:00
|
|
|
dg.add_edges_from([(m.group(1), b.strip('"')) for b in m.group(2).split() if b != m.group(1)])
|
2018-07-31 10:18:58 +08:00
|
|
|
else:
|
2018-08-07 14:04:10 +08:00
|
|
|
m = re.match('"?([^"]+)"? -> "?([^"]*)"?;', a)
|
2018-07-31 10:18:58 +08:00
|
|
|
if m:
|
2018-08-07 14:04:10 +08:00
|
|
|
if m.group(1) != m.group(2):
|
|
|
|
dg.add_edge(m.group(1), m.group(2))
|
|
|
|
else:
|
|
|
|
log(a)
|
2018-08-27 15:30:29 +08:00
|
|
|
else:
|
|
|
|
m = re.match('"?([^"]+)"?', a)
|
|
|
|
if m:
|
|
|
|
if m.group(1):
|
|
|
|
dg.add_node(m.group(1))
|
|
|
|
|
2018-07-31 10:18:58 +08:00
|
|
|
return dg
|
|
|
|
|
|
|
|
|
2018-08-07 13:55:42 +08:00
|
|
|
def to_dg(a):
|
|
|
|
if isinstance(a, nx.DiGraph):
|
|
|
|
return a
|
|
|
|
if os.path.isfile(a):
|
|
|
|
return read_dot2(a)
|
|
|
|
|
|
|
|
|
2018-08-07 14:07:08 +08:00
|
|
|
def remove_loops(dg):
|
|
|
|
rm = []
|
|
|
|
visited = set()
|
|
|
|
path = [object()]
|
|
|
|
path_set = set(path)
|
|
|
|
stack = [iter(dg)]
|
|
|
|
while stack:
|
|
|
|
for v in stack[-1]:
|
|
|
|
if v in path_set:
|
|
|
|
rm.append((path[-1], v))
|
|
|
|
elif v not in visited:
|
|
|
|
visited.add(v)
|
|
|
|
path.append(v)
|
|
|
|
path_set.add(v)
|
|
|
|
stack.append(iter(dg[v]))
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
path_set.remove(path.pop())
|
|
|
|
stack.pop()
|
|
|
|
# print(rm)
|
|
|
|
dg.remove_edges_from(rm)
|
|
|
|
return dg
|
|
|
|
|
|
|
|
|
2018-08-07 14:11:55 +08:00
|
|
|
def cflow_dir(a):
|
|
|
|
index = nx.DiGraph()
|
|
|
|
for c in glob.glob(os.path.join(a, "*.c")):
|
|
|
|
g = None
|
|
|
|
dot = str(Path(c).with_suffix(".dot"))
|
|
|
|
if not os.path.isfile(dot):
|
|
|
|
g = import_cflow(c, Path(c).with_suffix(".cflow"))
|
|
|
|
write_dot(g, dot)
|
|
|
|
print(dot, popen("ctags -x %s | wc -l" % (c))[0], len(set(e[0] for e in g.edges())))
|
|
|
|
else:
|
|
|
|
print(dot)
|
|
|
|
try:
|
|
|
|
# g = nx.drawing.nx_agraph.read_dot(dot)
|
|
|
|
g = read_dot(dot)
|
|
|
|
except (TypeError, pygraphviz.agraph.DotError):
|
|
|
|
print('nx_pydot <- nx_agraph')
|
|
|
|
g = nx.drawing.nx_pydot.read_dot(dot)
|
|
|
|
# digraph_print(g, [], Path(c).with_suffix(".tree"))
|
|
|
|
# index.add_nodes_from(g.nodes())
|
|
|
|
index.add_edges_from(g.edges())
|
|
|
|
write_dot(index, str(os.path.join(a, 'index.dot')))
|
|
|
|
digraph_print(digraph_tree(index), [], os.path.join(a, 'index.tree'))
|
|
|
|
return index
|
|
|
|
|
|
|
|
|
2018-07-29 04:16:48 +08:00
|
|
|
def cflow_linux():
|
2018-07-29 21:04:10 +08:00
|
|
|
dirs = ('init kernel kernel/time '
|
2018-07-31 10:21:50 +08:00
|
|
|
'fs fs/ext4 block '
|
|
|
|
'ipc net '
|
|
|
|
'lib security security/keys '
|
2018-07-29 21:04:10 +08:00
|
|
|
'arch/x86/kernel drivers/char drivers/pci '
|
|
|
|
).split()
|
2018-07-31 10:21:50 +08:00
|
|
|
|
|
|
|
# dirs += ('mm net/ipv4 crypto').split()
|
|
|
|
dirs = ('init kernel arch/x86/kernel fs ').split()
|
2018-09-16 20:40:54 +08:00
|
|
|
dirs += ['mm']
|
2018-07-31 10:21:50 +08:00
|
|
|
|
2018-07-29 21:04:10 +08:00
|
|
|
# fs/notify/fanotify fs/notify/inotify
|
|
|
|
|
2018-07-31 10:21:50 +08:00
|
|
|
try:
|
|
|
|
print('loading all.dot')
|
|
|
|
all = read_dot2('all.dot')
|
|
|
|
# all = nx.DiGraph(read_dot('all.dot'))
|
|
|
|
except FileNotFoundError:
|
|
|
|
all = nx.DiGraph()
|
|
|
|
for a in dirs:
|
|
|
|
print(a)
|
2018-08-07 14:11:55 +08:00
|
|
|
index = cflow_dir(a)
|
2018-07-31 10:21:50 +08:00
|
|
|
# all.add_nodes_from(index.nodes())
|
|
|
|
all.add_edges_from(index.edges())
|
|
|
|
write_dot(all, 'all.dot')
|
2018-08-07 14:11:55 +08:00
|
|
|
remove_loops(all)
|
|
|
|
print('loops: ' + str(list(all.nodes_with_selfloops())))
|
2018-07-31 10:21:50 +08:00
|
|
|
print('trees:')
|
2018-08-07 14:11:55 +08:00
|
|
|
digraph_print(all, ['x86_64_start_kernel', 'start_kernel', 'main', 'initcall', 'early_param',
|
|
|
|
'__setup', 'sys_write', 'write'],
|
2018-07-29 21:04:10 +08:00
|
|
|
'all.tree')
|
2018-07-31 10:21:50 +08:00
|
|
|
start_kernel = digraph_tree(all, ['start_kernel'])
|
|
|
|
write_dot(start_kernel, 'start_kernel.dot')
|
|
|
|
write_dot(reduce_graph(start_kernel), 'start_kernel-reduced.dot')
|
|
|
|
write_dot(reduce_graph(reduce_graph(start_kernel)), 'start_kernel-reduced2.dot')
|
2018-09-16 20:40:54 +08:00
|
|
|
write_dot(reduce_graph(digraph_tree(all, ['sys_clone'])), 'sys_clone.dot')
|
2018-07-29 04:16:48 +08:00
|
|
|
|
|
|
|
|
2018-08-07 13:59:50 +08:00
|
|
|
def stats(a):
|
|
|
|
dg = to_dg(a)
|
|
|
|
stat = Munch()
|
|
|
|
im = dict()
|
|
|
|
om = dict()
|
|
|
|
leaves = set()
|
2018-09-16 20:41:21 +08:00
|
|
|
roots = dict()
|
2018-08-07 13:59:50 +08:00
|
|
|
stat.edge_nodes = 0
|
2018-08-13 04:28:19 +08:00
|
|
|
stat.couples = 0
|
2018-08-07 13:59:50 +08:00
|
|
|
for n in dg:
|
|
|
|
id = dg.in_degree(n)
|
|
|
|
od = dg.out_degree(n)
|
|
|
|
if id == 1 and od == 1:
|
|
|
|
stat.edge_nodes += 1
|
|
|
|
if id:
|
|
|
|
im[n] = id
|
|
|
|
else:
|
2018-09-16 20:41:21 +08:00
|
|
|
roots[n] = od
|
2018-08-07 13:59:50 +08:00
|
|
|
if od:
|
|
|
|
om[n] = od
|
|
|
|
else:
|
|
|
|
leaves.add(n)
|
2018-08-13 04:28:19 +08:00
|
|
|
if od == 1 and dg.in_degree(list(dg.successors(n))[0]) == 1:
|
|
|
|
stat.couples += 1
|
2018-08-07 13:59:50 +08:00
|
|
|
stat.max_in_degree = max(dict(dg.in_degree).values())
|
|
|
|
stat.max_out_degree = max(dict(dg.out_degree).values())
|
|
|
|
stat.leaves = len(leaves)
|
|
|
|
stat.roots = len(roots)
|
2018-09-16 20:41:21 +08:00
|
|
|
stat.big_roots = ' '.join(sort_dict(roots)[:20])
|
2018-08-07 13:59:50 +08:00
|
|
|
# pprint(im)
|
|
|
|
# pprint(om)
|
|
|
|
stat._popular = ' '.join(sort_dict(im)[:10])
|
|
|
|
stat._biggest = ' '.join(sort_dict(om)[:10])
|
|
|
|
gd = remove_loops(dg)
|
|
|
|
stat.dag_longest_path_len = len(dag_longest_path(dg))
|
2018-09-16 20:41:21 +08:00
|
|
|
stat.__longest_path = ' '.join(dag_longest_path(dg)[:10] + [''])
|
2018-08-07 13:59:50 +08:00
|
|
|
for a in [nx.DiGraph.number_of_nodes, nx.DiGraph.number_of_edges, nx.DiGraph.number_of_selfloops,
|
|
|
|
nx.DiGraph.order]:
|
|
|
|
stat[a.__name__] = a(dg)
|
|
|
|
pprint(dict(stat))
|
|
|
|
|
|
|
|
|
2018-08-07 14:12:22 +08:00
|
|
|
def dot_expand(a, b):
|
|
|
|
a = to_dg(a)
|
|
|
|
b = to_dg(b)
|
2018-08-27 13:59:08 +08:00
|
|
|
c = my_graph()
|
|
|
|
log(a.nodes())
|
|
|
|
c.add_edges_from(b.out_edges(b.nbunch_iter(a.nodes())))
|
|
|
|
print(list(b.nbunch_iter(a.nodes())))
|
|
|
|
return c
|
2018-08-07 14:12:22 +08:00
|
|
|
|
|
|
|
|
2018-08-07 14:13:26 +08:00
|
|
|
def add_rank(g):
|
2018-08-27 15:30:29 +08:00
|
|
|
g = to_dg(g)
|
2018-08-07 14:13:26 +08:00
|
|
|
passed1 = set()
|
|
|
|
passed2 = set()
|
|
|
|
rn1 = 1
|
|
|
|
rn2 = -1
|
|
|
|
r1 = [n for (n, d) in g.in_degree if not d]
|
|
|
|
r2 = [n for (n, d) in g.out_degree if not d]
|
|
|
|
while r1 or r2:
|
|
|
|
if r1:
|
|
|
|
nxt = set()
|
|
|
|
for n in r1:
|
|
|
|
g.nodes[n]['rank1'] = max(rn1, g.nodes[n].get('rank1', rn1))
|
|
|
|
for i in [_ for _ in g.successors(n)]:
|
|
|
|
nxt.add(i)
|
|
|
|
passed1.add(i)
|
|
|
|
rn1 += 1
|
|
|
|
r1 = nxt
|
|
|
|
if r2:
|
|
|
|
nxt = set()
|
|
|
|
for n in r2:
|
|
|
|
g.nodes[n]['rank2'] = min(rn2, g.nodes[n].get('rank2', rn2))
|
|
|
|
for i in [_ for _ in g.predecessors(n)]:
|
|
|
|
nxt.add(i)
|
|
|
|
passed2.add(i)
|
|
|
|
rn2 -= 1
|
|
|
|
r2 = nxt
|
|
|
|
g.__dict__['max_rank'] = rn1
|
|
|
|
return g
|
|
|
|
|
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
me = os.path.basename(sys.argv[0])
|
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
def usage():
|
2018-08-08 14:08:20 +08:00
|
|
|
print("Usage:\n")
|
2018-07-19 19:03:30 +08:00
|
|
|
for c in ["referers_tree", "call_tree", "referers_dep", "call_dep"]:
|
2018-07-19 16:45:49 +08:00
|
|
|
print(me, c, "<identifier>")
|
2018-08-08 14:08:20 +08:00
|
|
|
print("\nTry this:\n")
|
2018-07-19 02:46:21 +08:00
|
|
|
print("cd linux/init")
|
2018-08-08 14:08:05 +08:00
|
|
|
print(me, "unittest")
|
2018-07-19 02:46:21 +08:00
|
|
|
print(me, "referers_tree nfs_root_data")
|
|
|
|
print(me, "call_tree start_kernel")
|
2018-08-27 15:10:04 +08:00
|
|
|
print(me, "import_cflow $none_or_dir_or_file_or_mask")
|
2018-09-16 20:41:21 +08:00
|
|
|
print(me, "stats $dot")
|
2018-08-08 14:08:20 +08:00
|
|
|
print("Emergency termination: ^Z, kill %1")
|
|
|
|
print()
|
2018-07-19 02:46:21 +08:00
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-08-08 14:08:05 +08:00
|
|
|
class _unittest_autotest(unittest.TestCase):
|
|
|
|
def test_1(self):
|
2018-08-13 04:27:49 +08:00
|
|
|
write_dot(nx.DiGraph([(1, 2), (2, 3), (2, 4)]), 'test.dot')
|
|
|
|
g = read_dot2("test.dot")
|
|
|
|
self.assertEqual(list(g.successors("2")), ["3", "4"])
|
2018-08-08 14:08:05 +08:00
|
|
|
self.assertTrue(os.path.isdir('include/linux/'))
|
|
|
|
os.chdir('init')
|
|
|
|
self.assertEqual('\t\t\t\t\tprepare_namespace ^', popen('srcxray.py referers_tree nfs_root_data')[-1])
|
|
|
|
self.assertEqual('initrd_load: prepare_namespace', popen('srcxray.py referers_dep nfs_root_data')[-1])
|
|
|
|
self.assertEqual('calibrate_delay_converge: __delay', popen('srcxray.py call_dep start_kernel')[-2])
|
|
|
|
self.assertEqual('\t\tcpu_startup_entry', popen('srcxray.py call_tree start_kernel')[-1])
|
|
|
|
os.chdir('..')
|
|
|
|
self.assertTrue(syscalls().number_of_edges() > 400)
|
|
|
|
# digraph_print:
|
2018-09-16 20:40:15 +08:00
|
|
|
self.assertEqual("\t\tmount_initrd ^", popen("srcxray.py import_cflow init/do_mounts_initrd.c")[-1])
|
2018-08-08 14:08:05 +08:00
|
|
|
self.assertEqual("\t\t4", popen('srcxray.py "nx.DiGraph([{1,2},{2,3},{2,4}])"')[-1])
|
|
|
|
|
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
def main():
|
|
|
|
try:
|
|
|
|
ret = False
|
|
|
|
if len(sys.argv) == 1:
|
|
|
|
print('Run', me, 'usage')
|
|
|
|
else:
|
2018-08-08 14:08:05 +08:00
|
|
|
a1 = sys.argv[1]
|
|
|
|
sys.argv = sys.argv[1:]
|
|
|
|
if isinstance(eval(a1), types.ModuleType):
|
|
|
|
ret = eval(a1+".main()")
|
|
|
|
elif '(' in a1:
|
|
|
|
ret = eval(a1)
|
2018-08-07 14:14:44 +08:00
|
|
|
# ret = exec(sys.argv[1])
|
2018-07-19 02:46:21 +08:00
|
|
|
else:
|
2018-08-08 14:08:05 +08:00
|
|
|
ret = eval(a1 + '(' + ', '.join("'%s'" % (a1)
|
|
|
|
for a1 in sys.argv[1:]) + ')')
|
2018-07-29 14:23:00 +08:00
|
|
|
if isinstance(ret, nx.DiGraph):
|
|
|
|
digraph_print(ret)
|
2018-07-19 13:29:29 +08:00
|
|
|
if isinstance(ret, bool) and ret is False:
|
2018-07-19 02:46:21 +08:00
|
|
|
sys.exit(os.EX_CONFIG)
|
2018-08-08 14:08:05 +08:00
|
|
|
# if (ret is not None):
|
|
|
|
# print(ret)
|
2018-07-19 02:46:21 +08:00
|
|
|
except KeyboardInterrupt:
|
2018-07-26 13:36:01 +08:00
|
|
|
log("\nInterrupted")
|
2018-07-19 02:46:21 +08:00
|
|
|
|
2018-07-19 12:52:33 +08:00
|
|
|
|
2018-07-19 02:46:21 +08:00
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|