提交 028b556f 编写于 作者: M Mars Liu

uniform project structure

上级 e571f362
{
"keywords": [],
"keywords": ["web", "工作空间"],
"title": "创建工作空间目录",
"node_id": "toolchains-0c52dc8621ed42f6b587b99705ca744d",
"keywords_must": [],
......
{
"keywords": [],
"keywords": ["git"],
"title": "初始化git仓库",
"node_id": "toolchains-e5762965dec14f8a873428e4c04409d6",
"keywords_must": [],
......
{
"keywords": [],
"keywords": ["git", "gitignore"],
"title": "使用gitignore文件在版本库中忽略文件",
"node_id": "toolchains-02d8f669d1c745d599a2b321c6d15ca1",
"keywords_must": [],
......
{
"keywords": [],
"keywords": ["readme"],
"title": "为项目添加项目说明文档",
"node_id": "toolchains-b10e540d16da4c6b9ce71128fe712750",
"keywords_must": [],
......
{
"keywords": [],
"keywords": ["git", "commit"],
"title": "在本地仓库完成第一次版本提交",
"node_id": "toolchains-9c268abdef7e4712828235a4696329c5",
"keywords_must": [],
......
{
"node_id": "toolchains-1bfc87efcfe04179bba64f6874688ca5",
"keywords": [],
"keywords": ["codechina", "gitcode"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-4a090ef0913f41b9a25348b2a4c5498f",
"keywords": [],
"keywords": ["项目组织"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-d97f24c1f51e47e69e220e37975e49da",
"keywords": [],
"keywords": ["git", "remote"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-a7f5610f9a3242fa9ed3c9445b40d5a3",
"keywords": [],
"keywords": ["git", "key", "ssh"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-d110b9f5b6db482683cf117265eac2e0",
"keywords": [],
"keywords": ["git", "权限"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-ffc3c659453a48d19673ac82965d05e8",
"keywords": [],
"keywords": ["git", "remote", "push"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-3a1f4be17fcb4fc38907585dd297d48e",
"keywords": [],
"keywords": ["user", "permission"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-adb4004580bc476a85d513d0be3a5033",
"keywords": [],
"keywords": ["nodejs", "install"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-9493364f64ac4e0abe44b544b90546a3",
"keywords": [],
"keywords": ["npm"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-720ff92db7e94d0db637f7f3329ed92a",
"keywords": [],
"keywords": ["npm", "project"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-e997d39c64ea47e381d0f7097866abdf",
"keywords": [],
"keywords": ["vue"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-b3950355b1cc40dfb66e37eb0c6523c8",
"keywords": [],
"keywords": ["python", "anaconda"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-d117cf4df65c429cba528a1d9f3c3e27",
"keywords": [],
"keywords": ["python", "project"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-37e699a027764e44817f3432debd449c",
"keywords": [],
"keywords": ["python", "venv", "virtualenv"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-172e7ed9e81c4042bcb3e43de8712532",
"keywords": [],
"keywords": ["python", "flask"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-627cf848cba64a56b6b5a3cd550bfec7",
"keywords": [],
"keywords": ["vue"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-666893ee200a4d6db5435abd09d8f47a",
"keywords": [],
"keywords": ["git", "merge", "conflict", "resolve"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-f24bcb8bea6b45368070c9b9f508606c",
"keywords": [],
"keywords": ["vue", "state", "vuex"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-8b2b197bdbd944dea1918a4029343f0f",
"keywords": [],
"keywords": ["ajax", "http"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-8b07ab0558ef451b9bdf111107034eba",
"keywords": [],
"keywords": ["vue", "project"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-5c9d4bcbe11942d3a869ab47df26c097",
"keywords": [],
"keywords": ["vue", "page"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-e6cbb70c80394cda8c6b2abc0c779fc6",
"keywords": [],
"keywords": ["vue", "route"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-c7f2bf5d40ad4d0b955f7f107581223a",
"keywords": [],
"keywords": ["vue", "git", "commit"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-eb45a452a6a04511968ef5b15c657a1d",
"keywords": [],
"keywords": ["git", "branch"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-0f9e83a3974e4db0a7a4a125a2214e4e",
"keywords": [],
"keywords": ["vue", "component"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-772b173292fa46b3a5ce1209a22f076f",
"keywords": [],
"keywords": ["git", "merge"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-7e6fd53fe3a84f31a8aed859e4e45083",
"keywords": [],
"keywords": ["vue", "component"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-e1e9c87df6c64d69b44946f6f8bd38f0",
"keywords": [],
"keywords": ["python", "flask"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-9f6fdcbcafd3477897bf16d0dd2be262",
"keywords": [],
"keywords": ["python", "flask", "route"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-286ed617cbac42a29dbd757361f1f0b3",
"keywords": [],
"keywords": ["python", "flask", "json"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-93040f93301f41d080b62385eeb59d70",
"keywords": [],
"keywords": ["python", "flask", "form", "post", "database"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-3a33244e3ab6490e9819a30d8cae5612",
"keywords": [],
"keywords": ["python", "flask", "decorator"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-2f9cd0de2cc14dcb9ba4a1e76547b72b",
"keywords": [],
"keywords": ["python", "flask", "redis"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-d1a69b26dab3472ca23bab5cf4dfd40a",
"keywords": [],
"keywords": ["python", "flask", "auth", "token"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-807a4d7ac2c543df8668d6be52dbdae3",
"keywords": [],
"keywords": ["python", "pycharm"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-d2f1500609564ae8b7d9f33f1c03edf4",
"keywords": [],
"keywords": ["flask", "config", "settings"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-d34786cfe2d34f1ebbcc381f7234feeb",
"keywords": [],
"keywords": ["git", "commit"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-238848692f9e479c928f1a003acb8ffe",
"keywords": [],
"keywords": ["python", "mysql", "dbapi"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-f3a77b9ee7cd4687b8fdfa1a7e9ee70d",
"keywords": [],
"keywords": ["python", "sqlalchemy"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-c17747cb6fb34e0782cd63103c26059f",
"keywords": [],
"keywords": ["flask", "python", "sqlalchemy", "model"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-64e0f059275740a6abc4808a35f3e2d2",
"keywords": [],
"keywords": ["python", "flask", "sqlalchemy", "migration"],
"children": [],
"export": [],
"keywords_must": [],
......
{
"node_id": "toolchains-d96ecd596dba40ffbbca6d364a9bae6d",
"keywords": [],
"children": [],
"export": [],
"keywords_must": [],
"keywords_forbid": []
}
\ No newline at end of file
{
"node_id": "toolchains-179bf9f789a24f83beccaecd621474e1",
"keywords": [],
"keywords": ["python", "flask", "sqlalchemy", "serialization"],
"children": [],
"export": [],
"keywords_must": [],
......
此差异已折叠。
from src.tree import TreeWalker
from skill_tree.tree import TreeWalker
if __name__ == '__main__':
walker = TreeWalker("data", "toolchains", "toolchains")
......
import json
import logging
import os
import re
import subprocess
import sys
import uuid
import re
id_set = set()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
def search_author(author_dict, username):
for key in author_dict:
names = author_dict[key]
if username in names:
return key
return username
def user_name(md_file, author_dict):
ret = subprocess.Popen([
"git", "log", md_file
], stdout=subprocess.PIPE)
lines = list(map(lambda l: l.decode(), ret.stdout.readlines()))
author_lines = []
for line in lines:
if line.startswith('Author'):
author_lines.append(line.split(' ')[1])
author_nick_name = author_lines[-1]
return search_author(author_dict, author_nick_name)
def load_json(p):
with open(p, 'r', encoding="utf-8") as f:
return json.loads(f.read())
def dump_json(p, j, exist_ok=False, override=False):
if os.path.exists(p):
if exist_ok:
if not override:
return
else:
logger.error(f"{p} already exist")
sys.exit(0)
with open(p, 'w+', encoding="utf8") as f:
f.write(json.dumps(j, indent=2, ensure_ascii=False))
def ensure_config(path):
config_path = os.path.join(path, "config.json")
if not os.path.exists(config_path):
node = {"keywords": []}
dump_json(config_path, node, exist_ok=True, override=False)
return node
else:
return load_json(config_path)
def parse_no_name(d):
p = r'(\d+)\.(.*)'
m = re.search(p, d)
try:
no = int(m.group(1))
dir_name = m.group(2)
except:
sys.exit(0)
return no, dir_name
def check_export(base, cfg):
flag = False
exports = []
for export in cfg.get('export', []):
ecfg_path = os.path.join(base, export)
if os.path.exists(ecfg_path):
exports.append(export)
else:
flag = True
if flag:
cfg["export"] = exports
return flag
class TreeWalker:
def __init__(
self, root,
tree_name,
title=None,
log=None,
authors=None,
enable_notebook=None,
ignore_keywords=False
):
self.ignore_keywords = ignore_keywords
self.authors = authors if authors else {}
self.enable_notebook = enable_notebook
self.name = tree_name
self.root = root
self.title = tree_name if title is None else title
self.tree = {}
self.logger = logger if log is None else log
def walk(self):
root = self.load_root()
root_node = {
"node_id": root["node_id"],
"keywords": root["keywords"],
"children": [],
"keywords_must": root["keywords_must"],
"keywords_forbid": root["keywords_forbid"]
}
self.tree[root["tree_name"]] = root_node
self.load_levels(root_node)
self.load_chapters(self.root, root_node)
for index, level in enumerate(root_node["children"]):
level_title = list(level.keys())[0]
level_node = list(level.values())[0]
level_path = os.path.join(self.root, f"{index + 1}.{level_title}")
self.load_chapters(level_path, level_node)
for index, chapter in enumerate(level_node["children"]):
chapter_title = list(chapter.keys())[0]
chapter_node = list(chapter.values())[0]
chapter_path = os.path.join(
level_path, f"{index + 1}.{chapter_title}")
self.load_sections(chapter_path, chapter_node)
for index, section_node in enumerate(chapter_node["children"]):
section_title = list(section_node.keys())[0]
full_path = os.path.join(
chapter_path, f"{index + 1}.{section_title}")
if os.path.isdir(full_path):
self.check_section_keywords(full_path)
self.ensure_exercises(full_path)
tree_path = os.path.join(self.root, "tree.json")
dump_json(tree_path, self.tree, exist_ok=True, override=True)
return self.tree
def sort_dir_list(self, dirs):
result = [self.extract_node_env(dir) for dir in dirs]
result.sort(key=lambda item: item[0])
return result
def load_levels(self, root_node):
levels = []
for level in os.listdir(self.root):
if not os.path.isdir(level):
continue
level_path = os.path.join(self.root, level)
num, config = self.load_level_node(level_path)
levels.append((num, config))
levels = self.resort_children(self.root, levels)
root_node["children"] = [item[1] for item in levels]
return root_node
def load_level_node(self, level_path):
config = self.ensure_level_config(level_path)
num, name = self.extract_node_env(level_path)
result = {
name: {
"node_id": config["node_id"],
"keywords": config["keywords"],
"children": [],
"keywords_must": config["keywords_must"],
"keywords_forbid": config["keywords_forbid"]
}
}
return num, result
def load_chapters(self, base, level_node):
chapters = []
for name in os.listdir(base):
full_name = os.path.join(base, name)
if os.path.isdir(full_name):
num, chapter = self.load_chapter_node(full_name)
chapters.append((num, chapter))
chapters = self.resort_children(base, chapters)
level_node["children"] = [item[1] for item in chapters]
return level_node
def load_sections(self, base, chapter_node):
sections = []
for name in os.listdir(base):
full_name = os.path.join(base, name)
if os.path.isdir(full_name):
num, section = self.load_section_node(full_name)
sections.append((num, section))
sections = self.resort_children(base, sections)
chapter_node["children"] = [item[1] for item in sections]
return chapter_node
def resort_children(self, base, children):
children.sort(key=lambda item: item[0])
for index, [number, element] in enumerate(children):
title = list(element.keys())[0]
origin = os.path.join(base, f"{number}.{title}")
posted = os.path.join(base, f"{index + 1}.{title}")
if origin != posted:
self.logger.info(f"rename [{origin}] to [{posted}]")
os.rename(origin, posted)
return children
def ensure_chapters(self):
for subdir in os.listdir(self.root):
self.ensure_level_config(subdir)
def load_root(self):
config_path = os.path.join(self.root, "config.json")
if not os.path.exists(config_path):
config = {
"tree_name": self.name,
"keywords": [],
"node_id": self.gen_node_id(),
"keywords_must": [],
"keywords_forbid": []
}
dump_json(config_path, config, exist_ok=True, override=True)
else:
config = load_json(config_path)
flag, result = self.ensure_node_id(config)
if flag:
dump_json(config_path, result, exist_ok=True, override=True)
return config
def ensure_level_config(self, path):
config_path = os.path.join(path, "config.json")
if not os.path.exists(config_path):
config = {
"node_id": self.gen_node_id()
}
dump_json(config_path, config, exist_ok=True, override=True)
else:
config = load_json(config_path)
flag, result = self.ensure_node_id(config)
if flag:
dump_json(config_path, config, exist_ok=True, override=True)
return config
def ensure_chapter_config(self, path):
config_path = os.path.join(path, "config.json")
if not os.path.exists(config_path):
config = {
"node_id": self.gen_node_id(),
"keywords": [],
"keywords_must": [],
"keywords_forbid": []
}
dump_json(config_path, config, exist_ok=True, override=True)
else:
config = load_json(config_path)
flag, result = self.ensure_node_id(config)
if flag:
dump_json(config_path, config, exist_ok=True, override=True)
return config
def ensure_section_config(self, path):
config_path = os.path.join(path, "config.json")
if not os.path.exists(config_path):
config = {
"node_id": self.gen_node_id(),
"keywords": [],
"children": [],
"export": []
}
dump_json(config_path, config, exist_ok=True, override=True)
else:
config = load_json(config_path)
flag, result = self.ensure_node_id(config)
if flag:
dump_json(config_path, result, exist_ok=True, override=True)
return config
def ensure_node_id(self, config):
flag = False
if "node_id" not in config or \
not config["node_id"].startswith(f"{self.name}-") or \
config["node_id"] in id_set:
new_id = self.gen_node_id()
id_set.add(new_id)
config["node_id"] = new_id
flag = True
for child in config.get("children", []):
child_node = list(child.values())[0]
f, _ = self.ensure_node_id(child_node)
flag = flag or f
return flag, config
def gen_node_id(self):
return f"{self.name}-{uuid.uuid4().hex}"
def extract_node_env(self, path):
try:
_, dir = os.path.split(path)
self.logger.info(path)
number, title = dir.split(".", 1)
return int(number), title
except Exception as error:
self.logger.error(f"目录 [{path}] 解析失败,结构不合法,可能是缺少序号")
# sys.exit(1)
raise error
def load_chapter_node(self, full_name):
config = self.ensure_chapter_config(full_name)
num, name = self.extract_node_env(full_name)
result = {
name: {
"node_id": config["node_id"],
"keywords": config["keywords"],
"children": [],
"keywords_must": config["keywords_must"],
"keywords_forbid": config["keywords_forbid"]
}
}
return num, result
def load_section_node(self, full_name):
config = self.ensure_section_config(full_name)
num, name = self.extract_node_env(full_name)
result = {
name: {
"node_id": config["node_id"],
"keywords": config["keywords"],
"children": config.get("children", []),
"keywords_must": config["keywords_must"],
"keywords_forbid": config["keywords_forbid"]
}
}
# if "children" in config:
# result["children"] = config["children"]
return num, result
def ensure_exercises(self, section_path):
config = self.ensure_section_config(section_path)
flag = False
for e in os.listdir(section_path):
base, ext = os.path.splitext(e)
_, source = os.path.split(e)
if ext != ".md":
continue
mfile = base + ".json"
meta_path = os.path.join(section_path, mfile)
md_file = os.path.join(section_path, e)
self.ensure_exercises_meta(meta_path, source, md_file)
export = config.get("export", [])
if mfile not in export and self.name != "algorithm":
export.append(mfile)
flag = True
config["export"] = export
if flag:
dump_json(os.path.join(section_path, "config.json"),
config, True, True)
for e in config.get("export", []):
full_name = os.path.join(section_path, e)
exercise = load_json(full_name)
if "exercise_id" not in exercise or exercise.get("exercise_id") in id_set:
eid = uuid.uuid4().hex
exercise["exercise_id"] = eid
dump_json(full_name, exercise, True, True)
else:
id_set.add(exercise["exercise_id"])
def ensure_exercises_meta(self, meta_path, source, md_file):
_, mfile = os.path.split(meta_path)
meta = None
if os.path.exists(meta_path):
with open(meta_path) as f:
content = f.read()
if content:
meta = json.loads(content)
if "exercise_id" not in meta:
meta["exercise_id"] = uuid.uuid4().hex
if "notebook_enable" not in meta:
meta["notebook_enable"] = self.default_notebook()
if "source" not in meta:
meta["source"] = source
if "author" not in meta:
meta["author"] = user_name(md_file, self.authors)
if "type" not in meta:
meta["type"] = "code_options"
if meta is None:
meta = {
"type": "code_options",
"author": user_name(md_file, self.authors),
"source": source,
"notebook_enable": self.default_notebook(),
"exercise_id": uuid.uuid4().hex
}
dump_json(meta_path, meta, True, True)
def default_notebook(self):
if self.enable_notebook is not None:
return self.enable_notebook
if self.name in ["python", "java", "c"]:
return True
else:
return False
def check_section_keywords(self, full_path):
if self.ignore_keywords:
return
config = self.ensure_section_config(full_path)
if not config.get("keywords", []):
self.logger.error(f"节点 [{full_path}] 的关键字为空,请修改配置文件写入关键字")
sys.exit(1)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册