时间:2020-11-05 18:09:54 | 栏目:Python代码 | 点击:次
原始数据
原始数据大致是这样子的:
每条数据中的四个数据分别是 当前节点名称,节点描述(指代一些需要的节点属性),源节点(即最顶层节点),父节点(当前节点上一层节点)。
datas = [ ["root", "根节点", "root", None], ["node1", "一级节点1", "root", "root"], ["node2", "一级节点2", "root", "root"], ["node11", "二级节点11", "root", "node1"], ["node12", "二级节点12", "root", "node1"], ["node21", "二级节点21", "root", "node2"], ["node22", "二级节点22", "root", "node2"], ]
节点类
抽象封装出一个节点类:
class Node(object): def __init__(self, name: str, desc, parent: str, children: list): """ 初始化 :param name: :param desc: :param parent: :param children: """ self.name = name self.desc = desc self.parent = parent self.children = children def get_nodes(self): """ 获取该节点下的全部结构字典 """ d = dict() d['name'] = self.name d['desc'] = self.desc d['parent'] = self.parent children = self.get_children() if children: d['children'] = [child.get_nodes() for child in children] return d def get_children(self): """ 获取该节点下的全部节点对象 """ return [n for n in nodes if n.parent == self.name] def __repr__(self): return self.name
将原始数据转换为节点对象
nodes = list() for data in datas: node = Node(data[0], data[1], data[-1], []) nodes.append(node)
为各个节点建立联系
for node in nodes: children_names = [data[0] for data in datas if data[-1] == node.name] children = [node for node in nodes if node.name in children_names] node.children.extend(children)
测试
root = nodes[0] print(root) tree = root.get_nodes() print(json.dumps(tree, indent=4))
运行结果:
原始数据也可以是字典的形式:
### fork_tool.py import json class Node(object): def __init__(self, **kwargs): """ 初始化 :param nodes: 树的全部节点对象 :param kwargs: 当前节点参数 """ self.forked_id = kwargs.get("forked_id") self.max_drawdown = kwargs.get("max_drawdown") self.annualized_returns = kwargs.get("annualized_returns") self.create_time = kwargs.get("create_time") self.desc = kwargs.get("desc") self.origin = kwargs.get("origin") self.parent = kwargs.get("parent") self.children = kwargs.get("children", []) def get_nodes(self, nodes): """ 获取该节点下的全部结构字典,即建立树状联系 """ d = dict() d['forked_id'] = self.forked_id d['max_drawdown'] = self.max_drawdown d['annualized_returns'] = self.annualized_returns d['create_time'] = self.create_time d['desc'] = self.desc d['origin'] = self.origin d['parent'] = self.parent children = self.get_children(nodes) if children: d['children'] = [child.get_nodes(nodes) for child in children] return d def get_children(self, nodes): """ 获取该节点下的全部节点对象 """ return [n for n in nodes if n.parent == self.forked_id] # def __repr__(self): # return str(self.desc) def process_datas(datas): """ 处理原始数据 :param datas: :return: """ # forked_infos.append({"forked_id": str(forked_strategy.get("_id")), # "max_drawdown": max_drawdown, # "annualized_returns": annualized_returns, # "create_time": create_time, # 分支创建时间 # "desc": desc, # "origin": origin, # "parent": parent, # "children": [], # }) nodes = [] # 构建节点列表集 for data in datas: node = Node(**data) nodes.append(node) # 为各个节点对象建立类 nosql 结构的联系 for node in nodes: children_ids = [data["forked_id"] for data in datas if data["parent"] == node.forked_id] children = [node for node in nodes if node.forked_id in children_ids] node.children.extend(children) return nodes test_datas = [ {'annualized_returns': 0.01, 'children': [], 'create_time': 1562038393, 'desc': 'root', 'forked_id': '5d1ad079e86117f3883f361e', 'max_drawdown': 0.01, 'origin': None, 'parent': None}, {'annualized_returns': 0.314, 'children': [], 'create_time': 1562060612, 'desc': 'level1', 'forked_id': '5d1b2744b264566d3f3f3632', 'max_drawdown': 0.2, 'origin': '5d1ad079e86117f3883f361e', 'parent': '5d1ad079e86117f3883f361e'}, {'annualized_returns': 0.12, 'children': [], 'create_time': 1562060613, 'desc': 'level11', 'forked_id': '5d1b2745e86117f3883f3632', 'max_drawdown': None, 'origin': '5d1ad079e86117f3883f361e', 'parent': '5d1b2744b264566d3f3f3632'}, {'annualized_returns': 0.09, 'children': [], 'create_time': 1562060614, 'desc': 'level12', 'forked_id': '5d1b2746b264566d3f3f3633', 'max_drawdown': None, 'origin': '5d1ad079e86117f3883f361e', 'parent': '5d1b2744b264566d3f3f3632'}, {'annualized_returns': None, 'children': [], 'create_time': 1562060614, 'desc': 'level2', 'forked_id': '5d1b2746e86117f3883f3633', 'max_drawdown': None, 'origin': '5d1ad079e86117f3883f361e', 'parent': '5d1ad079e86117f3883f361e'}, {'annualized_returns': None, 'children': [], 'create_time': 1562060627, 'desc': 'level21', 'forked_id': '5d1b2753b264566d3f3f3635', 'max_drawdown': None, 'origin': '5d1ad079e86117f3883f361e', 'parent': '5d1b2746e86117f3883f3633'}, {'annualized_returns': None, 'children': [], 'create_time': 1562060628, 'desc': 'level211', 'forked_id': '5d1b2754b264566d3f3f3637', 'max_drawdown': None, 'origin': '5d1ad079e86117f3883f361e', 'parent': '5d1b2753b264566d3f3f3635'}, {'annualized_returns': None, 'children': [], 'create_time': 1562060640, 'desc': 'level212', 'forked_id': '5d1b2760e86117f3883f3634', 'max_drawdown': None, 'origin': '5d1ad079e86117f3883f361e', 'parent': '5d1b2753b264566d3f3f3635'}, ] if __name__ == "__main__": nodes = process_datas(test_datas) info = nodes[0].get_nodes(nodes) print(json.dumps(info, indent=4))