提交 1f362259 编写于 作者: F feilong

批量添加作者信息

上级 aee3f863
{
"Python小白进阶":"youcans",
"youcans": "youcans",
"AI浩":"hhhhhhhhhhwwwwwwwwww",
"hhhhhhhhhhwwwwwwwwww": "hhhhhhhhhhwwwwwwwwww",
"吴佳WuJia":"yvettewu",
"yvettewu": "yvettewu",
"幻灰龙":"huanhuilong",
"feilong":"huanhuilong",
"huanhuilong": "huanhuilong",
"xiaozhi_5638": "xiaozhi_5638",
"请叫我卷福": "xiaozhi_5638"
}
\ No newline at end of file
{ {
"type": "code_options", "type": "code_options",
"author": "huanhuilong", "author": "huanhuilong",
"source": "install.md", "source": "install.md",
"notebook_enable": true "notebook_enable": true,
"exercise_id": "d6a941686200409eb98a9b424682eabc"
} }
\ No newline at end of file
{ {
"type": "code_options", "type": "code_options",
"author": "huanhuilong", "author": "huanhuilong",
"source": "ball.md", "source": "ball.md",
"notebook_enable": true "notebook_enable": true,
"exercise_id": "350a8b30e2b346029787411d210dbf52"
} }
\ No newline at end of file
{ {
"node_id": "opencv-819f137c35c64f76945bcd7e60d37807", "node_id": "opencv-819f137c35c64f76945bcd7e60d37807",
"keywords": [ "keywords": [
"OpenCV颜色空间" "OpenCV颜色空间"
], ],
"children": [], "children": [],
"export": [ "export": [
"pixel.json", "pixel.json",
"hack.json" "hack.json",
"ball.json"
] ]
} }
\ No newline at end of file
{ {
"type": "code_options", "type": "code_options",
"author": "huanhuilong", "author": "huanhuilong",
"source": "hack.md", "source": "hack.md",
"notebook_enable": true "notebook_enable": true,
"exercise_id": "89bbad54fefe4f6e9d5d3b3c9dab538f"
} }
\ No newline at end of file
{ {
"type": "code_options", "type": "code_options",
"author": "huanhuilong", "author": "huanhuilong",
"source": "pixel.md", "source": "pixel.md",
"notebook_enable": true "notebook_enable": true,
"exercise_id": "712900b7dcac43eebee5f384d5cffc6f"
} }
\ No newline at end of file
{ {
"type": "code_options", "type": "code_options",
"author": "huanhuilong", "author": "huanhuilong",
"source": "doodle.md", "source": "doodle.md",
"notebook_enable": true "notebook_enable": true,
"exercise_id": "f1a092edc05a4e7084c5e64fb7f42704"
} }
\ No newline at end of file
{ {
"type": "code_options", "type": "code_options",
"author": "huanhuilong", "author": "huanhuilong",
"source": "img_buffer_convert.md", "source": "img_buffer_convert.md",
"notebook_enable": true "notebook_enable": true,
"exercise_id": "4e998acee3c1435eabb56355c9c116ab"
} }
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "img_read_write.md",
"notebook_enable": false,
"exercise_id": "755680a734654644828e707fc7c5f44d"
}
\ No newline at end of file
{ {
"type": "code_options", "type": "code_options",
"author": "huanhuilong", "author": "huanhuilong",
"source": "video_read_write.md", "source": "video_read_write.md",
"notebook_enable": true "notebook_enable": true,
"exercise_id": "462eb229fee74f01ae7e4a3a49a8789d"
} }
\ No newline at end of file
{
"type": "code_options",
"author": "youcans",
"source": "Grayscale.md",
"notebook_enable": false,
"exercise_id": "b45e7096e3c340ba8feebb63a63dc91b"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-b77391c3fa5648f4bb92e990d6cf1922", "node_id": "opencv-b77391c3fa5648f4bb92e990d6cf1922",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"Grayscale.json"
]
} }
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-43cb627865154bb69eaad017845e8944", "node_id": "opencv-43cb627865154bb69eaad017845e8944",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"erosion_dilation.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "erosion_dilation.md",
"notebook_enable": false,
"exercise_id": "4a8dfe8c881848c5bcb5d25da2a4ec1e"
}
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "close.md",
"notebook_enable": false,
"exercise_id": "bd9c9f15f56d4e4485efdebde82c05a3"
}
\ No newline at end of file
...@@ -2,5 +2,8 @@ ...@@ -2,5 +2,8 @@
"node_id": "opencv-50c11401b1e8431c964f0771cfe7941b", "node_id": "opencv-50c11401b1e8431c964f0771cfe7941b",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"open.json",
"close.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "open.md",
"notebook_enable": false,
"exercise_id": "8c62f2e6911b4325be5de3e78bd5010a"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-4fa8c9dee0b04714b3a9ac7ba402be5b", "node_id": "opencv-4fa8c9dee0b04714b3a9ac7ba402be5b",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"connect.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "connect.md",
"notebook_enable": false,
"exercise_id": "375ab08dcf78434883ea719de7b96e05"
}
\ No newline at end of file
{
"type": "code_options",
"author": "youcans",
"source": "Contours.md",
"notebook_enable": false,
"exercise_id": "4da5c24ede3e4d0abc182c17cf94ae59"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-9f13f163a994474a9edafc12adcfe529", "node_id": "opencv-9f13f163a994474a9edafc12adcfe529",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"Contours.json"
]
} }
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-996898f197114cd3b4195cc6de7edc42", "node_id": "opencv-996898f197114cd3b4195cc6de7edc42",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"flower.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "flower.md",
"notebook_enable": false,
"exercise_id": "685fd38e0be54e90adbf77399c976772"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-42497d66251142e58892ffdd81a59d5a", "node_id": "opencv-42497d66251142e58892ffdd81a59d5a",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"fish.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "fish.md",
"notebook_enable": false,
"exercise_id": "cc515db7ec124e1ea1e0bebd818485c8"
}
\ No newline at end of file
{
"type": "code_options",
"author": "youcans",
"source": "BoxFilter.md",
"notebook_enable": false,
"exercise_id": "1ed32d68531d4a5b98121647b45c662e"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-ff01e231d0234005b8167d63652c827b", "node_id": "opencv-ff01e231d0234005b8167d63652c827b",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"BoxFilter.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "youcans",
"source": "Gradient.md",
"notebook_enable": false,
"exercise_id": "4f6fec53a3374d0fbe05f9dbbd5c1157"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-bc6db682e6744c048c801360db22b182", "node_id": "opencv-bc6db682e6744c048c801360db22b182",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"Gradient.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "youcans",
"source": "Affine1.md",
"notebook_enable": false,
"exercise_id": "9253c5495bc64871b346a84f6c883062"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-2ee9206f0d84472cb0458b4c5fd695cc", "node_id": "opencv-2ee9206f0d84472cb0458b4c5fd695cc",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"Affine1.json"
]
} }
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-39059ce222a240309afeb5b18dfe528e", "node_id": "opencv-39059ce222a240309afeb5b18dfe528e",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"rust_face.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "rust_face.md",
"notebook_enable": false,
"exercise_id": "b09cae3cd2614994a64421cee6c12e82"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-fde1c160abc94e49bb7811fa6b90b9a7", "node_id": "opencv-fde1c160abc94e49bb7811fa6b90b9a7",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"harris.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "harris.md",
"notebook_enable": false,
"exercise_id": "da10924abdb64e3dbc50be0ca5f9c44d"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-47d476b32cc94ce48346eebaf03a61ae", "node_id": "opencv-47d476b32cc94ce48346eebaf03a61ae",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"match.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "match.md",
"notebook_enable": false,
"exercise_id": "a72edc31e2cc4e28bfa46f65197acd1a"
}
\ No newline at end of file
{
"node_id": "opencv-cc6f5f3a5c2d494c8574a55c8e43c237",
"keywords": [],
"children": [],
"export": [
"face_detect.json"
]
}
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "face_detect.md",
"notebook_enable": false,
"exercise_id": "2433108d26474391a26656385b23ae80"
}
\ No newline at end of file
{
"node_id": "opencv-c20cf8a094924526931da798e2642418",
"keywords": [],
"children": [],
"export": [
"detect_bird.json"
]
}
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "detect_bird.md",
"notebook_enable": false,
"exercise_id": "7b215146d4304553a50169c32f9ede82"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-e7cb65bb329040e98c85d2888f18d8bb", "node_id": "opencv-e7cb65bb329040e98c85d2888f18d8bb",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"flow.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "huanhuilong",
"source": "flow.md",
"notebook_enable": false,
"exercise_id": "8281d7e2e5b34437894e7f5a9a587e74"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-2959bd73f3894090b4a65be4fe8faff6", "node_id": "opencv-2959bd73f3894090b4a65be4fe8faff6",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"obj_tracker.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "xiaozhi_5638",
"source": "obj_tracker.md",
"notebook_enable": false,
"exercise_id": "309481fe48034e4195400f60e446da7d"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-92964722395c4b9f989694211b07df44", "node_id": "opencv-92964722395c4b9f989694211b07df44",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"deep_learning_object_detection.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "hhhhhhhhhhwwwwwwwwww",
"source": "deep_learning_object_detection.md",
"notebook_enable": false,
"exercise_id": "94a4d7de964e425e84ee9a82e016a539"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-a05295f5c1b94fdcaa370a45621cd5a5", "node_id": "opencv-a05295f5c1b94fdcaa370a45621cd5a5",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"detect_faces.json"
]
} }
\ No newline at end of file
{
"type": "code_options",
"author": "hhhhhhhhhhwwwwwwwwww",
"source": "detect_faces.md",
"notebook_enable": false,
"exercise_id": "c2ba1555aed74739aebe55d6ac73c96d"
}
\ No newline at end of file
{
"type": "code_options",
"author": "hhhhhhhhhhwwwwwwwwww",
"source": "attitude_estimation.md",
"notebook_enable": false,
"exercise_id": "0a887406b4204f288d5ed42ee2341945"
}
\ No newline at end of file
...@@ -2,5 +2,7 @@ ...@@ -2,5 +2,7 @@
"node_id": "opencv-8bcc98575f3b4325b84fcb623916e3d2", "node_id": "opencv-8bcc98575f3b4325b84fcb623916e3d2",
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
"attitude_estimation.json"
]
} }
\ No newline at end of file
{ {
"keywords": [], "keywords": [],
"children": [], "children": [],
"export": [] "export": [
} "opencv-yolo-inference-vehicle.json"
\ No newline at end of file ],
"node_id": "opencv-d3ca3567cef647b9bb960d7216361351"
}
\ No newline at end of file
{
"type": "code_options",
"author": "xiaozhi_5638",
"source": "opencv-yolo-inference-vehicle.md",
"notebook_enable": false,
"exercise_id": "e688f5c0f2794c4ba196f08ce1c05d3e"
}
\ No newline at end of file
...@@ -43,7 +43,9 @@ ...@@ -43,7 +43,9 @@
{ {
"图像的基本操作": { "图像的基本操作": {
"node_id": "opencv-819f137c35c64f76945bcd7e60d37807", "node_id": "opencv-819f137c35c64f76945bcd7e60d37807",
"keywords": [], "keywords": [
"OpenCV颜色空间"
],
"children": [] "children": []
} }
}, },
...@@ -176,7 +178,22 @@ ...@@ -176,7 +178,22 @@
"图像识别": { "图像识别": {
"node_id": "opencv-6794162341944f869fcefa767cba00c7", "node_id": "opencv-6794162341944f869fcefa767cba00c7",
"keywords": [], "keywords": [],
"children": [] "children": [
{
"人脸识别": {
"node_id": "opencv-cc6f5f3a5c2d494c8574a55c8e43c237",
"keywords": [],
"children": []
}
},
{
"鸟图识别": {
"node_id": "opencv-c20cf8a094924526931da798e2642418",
"keywords": [],
"children": []
}
}
]
} }
}, },
{ {
...@@ -197,13 +214,6 @@ ...@@ -197,13 +214,6 @@
"keywords": [], "keywords": [],
"children": [] "children": []
} }
},
{
"OpenCV中的目标跟踪": {
"node_id": "opencv-183829982f6e4279a4285d48ee04f1cd",
"keywords": [],
"children": []
}
} }
] ]
} }
...@@ -240,6 +250,13 @@ ...@@ -240,6 +250,13 @@
"keywords": [], "keywords": [],
"children": [] "children": []
} }
},
{
"车辆检测": {
"node_id": "opencv-1abadac8969e45cb838b7db3b2a2fe25",
"keywords": [],
"children": []
}
} }
] ]
} }
......
...@@ -4,11 +4,11 @@ from src.doc import DocWalker ...@@ -4,11 +4,11 @@ from src.doc import DocWalker
from src.img import ImgWalker from src.img import ImgWalker
if __name__ == '__main__': if __name__ == '__main__':
# walker = TreeWalker("data", "opencv", "OpenCV") walker = TreeWalker("data", "opencv", "OpenCV")
# walker.walk() walker.walk()
# doc = DocWalker('doc') # doc = DocWalker('doc')
# doc.walk() # doc.walk()
img = ImgWalker('data') # img = ImgWalker('data')
img.walk() # img.walk()
...@@ -2,11 +2,18 @@ import json ...@@ -2,11 +2,18 @@ import json
import logging import logging
import os import os
import re import re
import subprocess
import sys import sys
import uuid import uuid
import re import re
import git import git
def load_json(p):
with open(p, 'r') as f:
return json.loads(f.read())
id_set = set() id_set = set()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) logger.setLevel(logging.INFO)
...@@ -15,13 +22,21 @@ formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') ...@@ -15,13 +22,21 @@ formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter) handler.setFormatter(formatter)
logger.addHandler(handler) logger.addHandler(handler)
repo = git.Repo(".") repo = git.Repo(".")
author_dict = load_json('assets/author.json')
def user_name():
return repo.config_reader().get_value("user", "name")
def load_json(p): def user_name(md_file):
with open(p, 'r') as f: ret = subprocess.Popen([
return json.loads(f.read()) "git", "log", md_file
], stdout=subprocess.PIPE)
lines = list(map(lambda l: l.decode(), ret.stdout.readlines()))
author_lines = []
for line in lines:
if line.startswith('Author'):
author_lines.append(line.split(' ')[1])
author_nick_name = author_lines[-1]
return author_dict.get(author_nick_name, "")
# return repo.config_reader().get_value("user", "name")
def dump_json(p, j, exist_ok=False, override=False): def dump_json(p, j, exist_ok=False, override=False):
...@@ -100,11 +115,13 @@ class TreeWalker: ...@@ -100,11 +115,13 @@ class TreeWalker:
for index, chapter in enumerate(level_node["children"]): for index, chapter in enumerate(level_node["children"]):
chapter_title = list(chapter.keys())[0] chapter_title = list(chapter.keys())[0]
chapter_node = list(chapter.values())[0] chapter_node = list(chapter.values())[0]
chapter_path = os.path.join(level_path, f"{index + 1}.{chapter_title}") chapter_path = os.path.join(
level_path, f"{index + 1}.{chapter_title}")
self.load_sections(chapter_path, chapter_node) self.load_sections(chapter_path, chapter_node)
for index, section_node in enumerate(chapter_node["children"]): for index, section_node in enumerate(chapter_node["children"]):
section_title = list(section_node.keys())[0] section_title = list(section_node.keys())[0]
full_path = os.path.join(chapter_path, f"{index + 1}.{section_title}") full_path = os.path.join(
chapter_path, f"{index + 1}.{section_title}")
if os.path.isdir(full_path): if os.path.isdir(full_path):
self.check_section_keywords(full_path) self.check_section_keywords(full_path)
self.ensure_exercises(full_path) self.ensure_exercises(full_path)
...@@ -314,7 +331,8 @@ class TreeWalker: ...@@ -314,7 +331,8 @@ class TreeWalker:
continue continue
mfile = base + ".json" mfile = base + ".json"
meta_path = os.path.join(section_path, mfile) meta_path = os.path.join(section_path, mfile)
self.ensure_exercises_meta(meta_path, source) md_file = os.path.join(section_path, e)
self.ensure_exercises_meta(meta_path, source, md_file)
export = config.get("export", []) export = config.get("export", [])
if mfile not in export and self.name != "algorithm": if mfile not in export and self.name != "algorithm":
export.append(mfile) export.append(mfile)
...@@ -322,7 +340,8 @@ class TreeWalker: ...@@ -322,7 +340,8 @@ class TreeWalker:
config["export"] = export config["export"] = export
if flag: if flag:
dump_json(os.path.join(section_path, "config.json"), config, True, True) dump_json(os.path.join(section_path, "config.json"),
config, True, True)
for e in config.get("export", []): for e in config.get("export", []):
full_name = os.path.join(section_path, e) full_name = os.path.join(section_path, e)
...@@ -334,32 +353,35 @@ class TreeWalker: ...@@ -334,32 +353,35 @@ class TreeWalker:
else: else:
id_set.add(exercise["exercise_id"]) id_set.add(exercise["exercise_id"])
def ensure_exercises_meta(self, meta_path, source): def ensure_exercises_meta(self, meta_path, source, md_file):
_, mfile = os.path.split(meta_path) _, mfile = os.path.split(meta_path)
meta = None meta = None
if os.path.exists(meta_path): if os.path.exists(meta_path):
with open(meta_path) as f: with open(meta_path) as f:
content = f.read() content = f.read()
if content: if content:
meta = json.loads(content) try:
if "exercise_id" not in meta: meta = json.loads(content)
meta["exercise_id"] = uuid.uuid4().hex if "exercise_id" not in meta:
if "notebook_enable" not in meta: meta["exercise_id"] = uuid.uuid4().hex
meta["notebook_enable"] = self.default_notebook() if "notebook_enable" not in meta:
if "source" not in meta: meta["notebook_enable"] = self.default_notebook()
meta["source"] = source if "source" not in meta:
if "author" not in meta: meta["source"] = source
meta["author"] = user_name() if "author" not in meta:
if "type" not in meta: meta["author"] = user_name(md_file)
meta["type"] = "code_options" if "type" not in meta:
if meta is None: meta["type"] = "code_options"
meta = { except:
"type": "code_options", pass
"author": user_name(), if meta is None:
"source": source, meta = {
"notebook_enable": self.default_notebook(), "type": "code_options",
"exercise_id": uuid.uuid4().hex "author": user_name(md_file),
} "source": source,
"notebook_enable": self.default_notebook(),
"exercise_id": uuid.uuid4().hex
}
dump_json(meta_path, meta, True, True) dump_json(meta_path, meta, True, True)
def default_notebook(self): def default_notebook(self):
...@@ -370,6 +392,6 @@ class TreeWalker: ...@@ -370,6 +392,6 @@ class TreeWalker:
def check_section_keywords(self, full_path): def check_section_keywords(self, full_path):
config = self.ensure_section_config(full_path) config = self.ensure_section_config(full_path)
if not config.get("keywords", []): # if not config.get("keywords", []):
self.logger.error(f"节点 [{full_path}] 的关键字为空,请修改配置文件写入关键字") # self.logger.error(f"节点 [{full_path}] 的关键字为空,请修改配置文件写入关键字")
sys.exit(1) # sys.exit(1)
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册