forked from open-mmlab/mmpose
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Add Chinese doc project (open-mmlab#787)
* Add Chinese documentation framework
- Loading branch information
Showing
41 changed files
with
757 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
## <a href='https://mmpose.readthedocs.io/en/latest/'>English</a> | ||
|
||
## <a href='https://mmpose.readthedocs.io/zh_CN/latest/'>简体中文</a> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
# Minimal makefile for Sphinx documentation | ||
# | ||
|
||
# You can set these variables from the command line, and also | ||
# from the environment for the first two. | ||
SPHINXOPTS ?= | ||
SPHINXBUILD ?= sphinx-build | ||
SOURCEDIR = . | ||
BUILDDIR = _build | ||
|
||
# Put it first so that "make" without argument is like "make help". | ||
help: | ||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) | ||
|
||
.PHONY: help Makefile | ||
|
||
# Catch-all target: route all unknown targets to Sphinx using the new | ||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). | ||
%: Makefile | ||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,112 @@ | ||
API Documentation | ||
================= | ||
|
||
mmpose.apis | ||
------------- | ||
.. automodule:: mmpose.apis | ||
:members: | ||
|
||
|
||
mmpose.core | ||
------------- | ||
evaluation | ||
^^^^^^^^^^^ | ||
.. automodule:: mmpose.core.evaluation | ||
:members: | ||
|
||
fp16 | ||
^^^^^^^^^^^ | ||
.. automodule:: mmpose.core.fp16 | ||
:members: | ||
|
||
|
||
utils | ||
^^^^^^^^^^^ | ||
.. automodule:: mmpose.core.utils | ||
:members: | ||
|
||
|
||
post_processing | ||
^^^^^^^^^^^^^^^^ | ||
.. automodule:: mmpose.core.post_processing | ||
:members: | ||
|
||
|
||
mmpose.models | ||
--------------- | ||
backbones | ||
^^^^^^^^^^^ | ||
.. automodule:: mmpose.models.backbones | ||
:members: | ||
|
||
necks | ||
^^^^^^^^^^^ | ||
.. automodule:: mmpose.models.necks | ||
:members: | ||
|
||
detectors | ||
^^^^^^^^^^^ | ||
.. automodule:: mmpose.models.detectors | ||
:members: | ||
|
||
heads | ||
^^^^^^^^^^^^^^^ | ||
.. automodule:: mmpose.models.heads | ||
:members: | ||
|
||
losses | ||
^^^^^^^^^^^ | ||
.. automodule:: mmpose.models.losses | ||
:members: | ||
|
||
misc | ||
^^^^^^^^^^^ | ||
.. automodule:: mmpose.models.misc | ||
:members: | ||
|
||
mmpose.datasets | ||
----------------- | ||
.. automodule:: mmpose.datasets | ||
:members: | ||
|
||
datasets | ||
^^^^^^^^^^^ | ||
.. automodule:: mmpose.datasets.datasets.top_down | ||
:members: | ||
|
||
.. automodule:: mmpose.datasets.datasets.bottom_up | ||
:members: | ||
|
||
pipelines | ||
^^^^^^^^^^^ | ||
.. automodule:: mmpose.datasets.pipelines | ||
:members: | ||
|
||
.. automodule:: mmpose.datasets.pipelines.loading | ||
:members: | ||
|
||
.. automodule:: mmpose.datasets.pipelines.shared_transform | ||
:members: | ||
|
||
.. automodule:: mmpose.datasets.pipelines.top_down_transform | ||
:members: | ||
|
||
.. automodule:: mmpose.datasets.pipelines.bottom_up_transform | ||
:members: | ||
|
||
.. automodule:: mmpose.datasets.pipelines.mesh_transform | ||
:members: | ||
|
||
.. automodule:: mmpose.datasets.pipelines.pose3d_transform | ||
:members: | ||
|
||
samplers | ||
^^^^^^^^^^^ | ||
.. automodule:: mmpose.datasets.samplers | ||
:members: | ||
|
||
|
||
mmpose.utils | ||
--------------- | ||
.. automodule:: mmpose.utils | ||
:members: |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,3 @@ | ||
# 基准测试 | ||
|
||
内容建设中…… |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,102 @@ | ||
#!/usr/bin/env python | ||
import os | ||
import re | ||
from glob import glob | ||
|
||
from titlecase import titlecase | ||
|
||
os.makedirs('topics', exist_ok=True) | ||
os.makedirs('papers', exist_ok=True) | ||
|
||
# Step 1: get subtopics: a mix of topic and task | ||
minisections = [x.split('/')[-2:] for x in glob('../configs/*/*')] | ||
alltopics = sorted(list(set(x[0] for x in minisections))) | ||
subtopics = [] | ||
for t in alltopics: | ||
data = [x[1].split('_') for x in minisections if x[0] == t] | ||
valid_ids = [] | ||
for i in range(len(data[0])): | ||
if len(set(x[i] for x in data)) > 1: | ||
valid_ids.append(i) | ||
if len(valid_ids) > 0: | ||
subtopics.extend([ | ||
f"{titlecase(t)}({','.join([d[i].title() for i in valid_ids])})", | ||
t, '_'.join(d) | ||
] for d in data) | ||
else: | ||
subtopics.append([titlecase(t), t, '_'.join(data[0])]) | ||
|
||
contents = {} | ||
for subtopic, topic, task in sorted(subtopics): | ||
# Step 2: get all datasets | ||
datasets = sorted( | ||
list( | ||
set( | ||
x.split('/')[-2] | ||
for x in glob(f'../configs/{topic}/{task}/*/*/')))) | ||
contents[subtopic] = {d: {} for d in datasets} | ||
for dataset in datasets: | ||
# Step 3: get all settings: algorithm + backbone + trick | ||
for file in glob(f'../configs/{topic}/{task}/*/{dataset}/*.md'): | ||
keywords = (file.split('/')[-3], | ||
*file.split('/')[-1].split('_')[:-1]) | ||
with open(file, 'r') as f: | ||
contents[subtopic][dataset][keywords] = f.read() | ||
|
||
# Step 4: write files by topic | ||
for subtopic, datasets in contents.items(): | ||
lines = [f'# {subtopic}', ''] | ||
for dataset, keywords in datasets.items(): | ||
if len(keywords) == 0: | ||
continue | ||
lines += [ | ||
'<hr/>', '<br/><br/>', '', | ||
f'## <div align="center">{titlecase(dataset)} Dataset</div>', '' | ||
] | ||
for keyword, info in keywords.items(): | ||
keyword_strs = [titlecase(x.replace('_', ' ')) for x in keyword] | ||
lines += [ | ||
'<br/>', '', | ||
(f'### {" + ".join(keyword_strs)}' | ||
f' on {titlecase(dataset)}'), '', info, '' | ||
] | ||
|
||
with open(f'topics/{subtopic.lower()}.md', 'w') as f: | ||
f.write('\n'.join(lines)) | ||
|
||
# Step 5: write files by paper | ||
allfiles = [x.split('/')[-2:] for x in glob('../docs/papers/*/*.md')] | ||
sections = sorted(list(set(x[0] for x in allfiles))) | ||
for section in sections: | ||
lines = [f'# {titlecase(section)}', ''] | ||
files = [f for s, f in allfiles if s == section] | ||
for file in files: | ||
with open(f'../docs/papers/{section}/{file}', 'r') as f: | ||
keyline = [ | ||
line for line in f.readlines() if line.startswith('<summary') | ||
][0] | ||
papername = re.sub(r'\<.*?\>', '', keyline).strip() | ||
paperlines = [] | ||
for subtopic, datasets in contents.items(): | ||
for dataset, keywords in datasets.items(): | ||
keywords = {k: v for k, v in keywords.items() if keyline in v} | ||
if len(keywords) == 0: | ||
continue | ||
for keyword, info in keywords.items(): | ||
keyword_strs = [ | ||
titlecase(x.replace('_', ' ')) for x in keyword | ||
] | ||
paperlines += [ | ||
'<br/>', '', | ||
(f'### {" + ".join(keyword_strs)}' | ||
f' on {titlecase(dataset)}'), '', info, '' | ||
] | ||
if len(paperlines) > 0: | ||
lines += [ | ||
'<hr/>', '<br/><br/>', '', | ||
f'## <div align="center">{papername}</div>', '' | ||
] | ||
lines += paperlines | ||
|
||
with open(f'papers/{section}.md', 'w') as f: | ||
f.write('\n'.join(lines)) |
Oops, something went wrong.