forked from avinashkranjan/Amazing-Python-Scripts
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathUpdate_Database.py
119 lines (100 loc) · 4.53 KB
/
Update_Database.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
from github import Github
import json
import sys
import re
# Regex Patterns
category = r"- \[x\] (.+)"
name = r"Title: (.+)"
path = r"Folder: (.+)"
requirments_path = r"Requirements: (.+)"
entry = r"Script: (.+)"
arguments = r"Arguments: (.+)"
contributor = r"Contributor: (.+)"
description = r"Description: (.+)"
def add_script(category, name, path, entry, arguments, requirments_path, contributor, description, pa_token):
""" Add a Contributor script to database """
new_data = {category: {name: [path, entry, arguments, requirments_path, contributor, description]}}
data_store = read_data()
try:
# If category doesn't exist try will fail and except will ask to add a new category with the project
if data_store[category]: # Check for existing category or a new one
data_store[category].update(new_data[category]) # Add script
except:
data_store.update(new_data) # Add new category
# <----- This part is to avoid a single/double quotes error when trying to update the database with PyGithub ----->
with open("./Master Script/datastore.json", "w") as file:
json.dump(data_store, file)
print("Script added to database, pushing changes to repo...")
with open("./Master Script/datastore.json", "r") as file:
data_store = file.readlines()[0]
# <----- Github Login & Database Update ----->
git = Github(pa_token)
user_object = git.get_user()
print("[+] PyGithub Login Success!")
repo = git.get_repo("avinashkranjan/Amazing-Python-Scripts")
datastore_fileMaster = repo.get_contents("./Master Script/datastore.json", ref="master")
datastore_fileWebsite = repo.get_contents("./datastore.json", ref="gh-pages")
repo.update_file(datastore_fileMaster.path, "Updated datastore.json", data_store, datastore_fileMaster.sha, branch="master")
repo.update_file("./datastore.json", "Updated datastore.json", data_store, datastore_fileWebsite.sha, branch="gh-pages")
print("[+] Database Updated")
def read_data():
""" Loads datastore.json """
with open("./Master Script/datastore.json", "r") as file:
data = json.load(file)
return data
def extract_from_pr_body(pr_body, pa_token):
""" Manipulates the provided PR body and extracts the required information """
pr_body = pr_body.split("\n")
for element in pr_body:
pr_body[pr_body.index(element)] = element.rstrip("\r")
# A special case for contributors in gh-pages branch and other dependency PRs
try:
pr_body = pr_body[pr_body.index("## Project Metadata"):]
except:
sys.exit()
category_list = []
for text in pr_body:
# <----- Validate Category ----->
cat = re.match(category, text)
if cat is not None:
category_list.append(cat[1])
# <----- Validate Title ----->
if re.match(name, text) is not None:
title = re.match(name, text)[1]
# <----- Validate Folder ----->
if re.match(path, text) is not None:
folder = re.match(path, text)[1]
# <----- Validate requirments.txt ----->
if re.match(requirments_path, text) is not None:
requirements = re.match(requirments_path, text)[1]
# <----- Validate Script.py ----->
if re.match(entry, text) is not None:
script = re.match(entry, text)[1]
# <----- Validate Arguments ----->
if re.match(arguments, text) is not None:
argument = re.match(arguments, text)[1]
# <----- Validate Contribute ----->
if re.match(contributor, text) is not None:
user = re.match(contributor, text)[1]
# <----- Validate Description ----->
if re.match(description, text) is not None:
desc = re.match(description, text)[1]
# For GitHub Actions logging
print("<----- MetaData ----->")
print("Categories:", category_list)
print("Title:", title)
print("Path:", folder)
print("Requirements:", requirements)
print("Entry:", script)
print("Arguments:",argument)
print("Contributer:", user)
print("Description:", desc)
print("<----- ----- ----->")
# The loop is for scripts that will be added to multiple categories.
for cat in category_list:
add_script(cat, title, folder, script, argument, requirements, user, desc, pa_token)
# Start Checkpoint
if __name__ == "__main__":
# Get PR body and pass pa_token
data = sys.argv[1]
extract_from_pr_body(data, sys.argv[2])