mirror of
https://github.com/NotXia/unibo-ai-notes.git
synced 2025-12-14 18:51:52 +01:00
Add readme generation <noupdate>
This commit is contained in:
6
.github/workflows/compile.yml
vendored
6
.github/workflows/compile.yml
vendored
@ -33,13 +33,17 @@ jobs:
|
||||
- name: Prepare output directory
|
||||
run: |
|
||||
mkdir .compiled
|
||||
cp README.md .compiled
|
||||
cp LICENSE .compiled
|
||||
|
||||
- name: Compile
|
||||
run: |
|
||||
bash ./compile.sh .compiled .currpdfs
|
||||
|
||||
- name: Generate README
|
||||
run: |
|
||||
cp README.md .compiled/README.md
|
||||
python3 ./utils/update_readme.py --src-path ./src --readme-path .compiled/README.md --gh-link https://github.com/NotXia/unibo-ai-notes/blob/pdfs
|
||||
|
||||
|
||||
- name: Move to pdfs branch
|
||||
uses: s0/git-publish-subdir-action@develop
|
||||
|
||||
11
src/cognition-and-neuroscience/metadata.json
Normal file
11
src/cognition-and-neuroscience/metadata.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "Cognition and Neuroscience",
|
||||
"year": 1,
|
||||
"semester": 2,
|
||||
"pdfs": [
|
||||
{
|
||||
"name": "Module 1",
|
||||
"path": "cn.pdf"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "Combinatorial Decision Making and Optimization",
|
||||
"year": 1,
|
||||
"semester": 2,
|
||||
"pdfs": [
|
||||
{
|
||||
"name": "Module 1",
|
||||
"path": "module1/cdmo1.pdf"
|
||||
},
|
||||
{
|
||||
"name": "Module 2",
|
||||
"path": "module2/cdmo2.pdf"
|
||||
}
|
||||
]
|
||||
}
|
||||
11
src/deep-learning/metadata.json
Normal file
11
src/deep-learning/metadata.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "Deep Learning",
|
||||
"year": 1,
|
||||
"semester": 2,
|
||||
"pdfs": [
|
||||
{
|
||||
"name": null,
|
||||
"path": "dl.pdf"
|
||||
}
|
||||
]
|
||||
}
|
||||
19
src/fundamentals-of-ai-and-kr/metadata.json
Normal file
19
src/fundamentals-of-ai-and-kr/metadata.json
Normal file
@ -0,0 +1,19 @@
|
||||
{
|
||||
"name": "Fundamentals of Artificial Intelligence and Knowledge Representation",
|
||||
"year": 1,
|
||||
"semester": 1,
|
||||
"pdfs": [
|
||||
{
|
||||
"name": "Module 1",
|
||||
"path": "module1/faikr1.pdf"
|
||||
},
|
||||
{
|
||||
"name": "Module 2",
|
||||
"path": "module2/faikr2.pdf"
|
||||
},
|
||||
{
|
||||
"name": "Module 3",
|
||||
"path": "module3/faikr3.pdf"
|
||||
}
|
||||
]
|
||||
}
|
||||
15
src/image-processing-and-computer-vision/metadata.json
Normal file
15
src/image-processing-and-computer-vision/metadata.json
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "Image Processing and Computer Vision",
|
||||
"year": 1,
|
||||
"semester": 2,
|
||||
"pdfs": [
|
||||
{
|
||||
"name": "Module 1",
|
||||
"path": "module1/ipcv1.pdf"
|
||||
},
|
||||
{
|
||||
"name": "Module 2",
|
||||
"path": "module2/ipcv2.pdf"
|
||||
}
|
||||
]
|
||||
}
|
||||
15
src/languages-and-algorithms-for-ai/metadata.json
Normal file
15
src/languages-and-algorithms-for-ai/metadata.json
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "Languages and Algorithms for Artificial Intelligence",
|
||||
"year": 1,
|
||||
"semester": 3,
|
||||
"pdfs": [
|
||||
{
|
||||
"name": "Module 2",
|
||||
"path": "module2/laai2.pdf"
|
||||
},
|
||||
{
|
||||
"name": "Module 3",
|
||||
"path": "module3/laai3.pdf"
|
||||
}
|
||||
]
|
||||
}
|
||||
11
src/machine-learning-and-data-mining/metadata.json
Normal file
11
src/machine-learning-and-data-mining/metadata.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "Machine Learning and Data Mining",
|
||||
"year": 1,
|
||||
"semester": 1,
|
||||
"pdfs": [
|
||||
{
|
||||
"name": null,
|
||||
"path": "dm-ml.pdf"
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "Statistical and Mathematical Methods for Artificial Intelligence",
|
||||
"year": 1,
|
||||
"semester": 1,
|
||||
"pdfs": [
|
||||
{
|
||||
"name": null,
|
||||
"path": "smm.pdf"
|
||||
}
|
||||
]
|
||||
}
|
||||
55
utils/update_readme.py
Normal file
55
utils/update_readme.py
Normal file
@ -0,0 +1,55 @@
|
||||
import argparse
|
||||
import os
|
||||
import pathlib
|
||||
import json
|
||||
|
||||
METADATA_FILENAME = "metadata.json"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(prog="README updater")
|
||||
parser.add_argument("--src-path", type=str, required=True, help="Path to the .tex sources")
|
||||
parser.add_argument("--readme-path", type=str, required=True, help="Path to the readme")
|
||||
parser.add_argument("--gh-link", type=str, required=True, help="Link to the GitHub repo")
|
||||
args = parser.parse_args()
|
||||
|
||||
notes_metadata = {}
|
||||
|
||||
# Reads courses metadata
|
||||
for root, dirs, files in os.walk(args.src_path):
|
||||
if METADATA_FILENAME in files:
|
||||
with open(os.path.join(root, METADATA_FILENAME)) as f:
|
||||
metadata = json.load(f)
|
||||
dir_name = pathlib.PurePath(root).name
|
||||
gh_path = os.path.join(args.gh_link, dir_name)
|
||||
|
||||
if metadata["year"] not in notes_metadata: notes_metadata[metadata["year"]] = {}
|
||||
if metadata["semester"] not in notes_metadata[metadata["year"]]: notes_metadata[metadata["year"]][metadata["semester"]] = {}
|
||||
|
||||
notes_metadata[metadata["year"]][metadata["semester"]][metadata["name"]] = {
|
||||
"name": metadata["name"],
|
||||
"content": [
|
||||
{
|
||||
"name": pdf["name"],
|
||||
"url": os.path.join(gh_path, pdf["path"])
|
||||
}
|
||||
for pdf in metadata["pdfs"]
|
||||
]
|
||||
}
|
||||
|
||||
# Appends links to README
|
||||
with open(args.readme_path, "a") as readme_f:
|
||||
for year in sorted(notes_metadata.keys()):
|
||||
readme_f.write(f"\n\nYear {year}\n---\n")
|
||||
|
||||
for semester in sorted(notes_metadata[year].keys()):
|
||||
for course in sorted(notes_metadata[year][semester]):
|
||||
course_name = notes_metadata[year][semester][course]["name"]
|
||||
course_content = notes_metadata[year][semester][course]["content"]
|
||||
|
||||
if (len(course_content) == 1) and (course_content[0]["name"] is None):
|
||||
readme_f.write(f"- [{course_name}]({course_content[0]['url']})\n")
|
||||
else:
|
||||
readme_f.write(f"- {course_name}\n")
|
||||
for content in course_content:
|
||||
readme_f.write(f" - [{content['name']}]({content['url']})\n")
|
||||
Reference in New Issue
Block a user