a few additions from backlog and working on scripts

This commit is contained in:
Trilarion
2019-09-16 12:37:52 +02:00
parent 49424eb857
commit 4fab32b287
32 changed files with 359 additions and 243 deletions

View File

@ -13,7 +13,9 @@ import http.client
import datetime
import json
import textwrap
from utils.osg import *
import os
import re
from utils import constants as c, utils, osg
def update_readme_and_tocs(infos):
@ -29,12 +31,12 @@ def update_readme_and_tocs(infos):
print('update readme and toc files')
# delete content of toc path
for file in os.listdir(tocs_path):
os.remove(os.path.join(tocs_path, file))
for file in os.listdir(c.tocs_path):
os.remove(os.path.join(c.tocs_path, file))
# read readme
readme_file = os.path.join(root_path, 'README.md')
readme_text = read_text(readme_file)
readme_file = os.path.join(c.root_path, 'README.md')
readme_text = utils.read_text(readme_file)
# compile regex for identifying the building blocks in the readme
regex = re.compile(r"(.*?)(\[comment\]: # \(start.*?end of autogenerated content\))(.*)", re.DOTALL)
@ -55,7 +57,7 @@ def update_readme_and_tocs(infos):
# create by category
categories_text = []
for keyword in recommended_keywords:
for keyword in osg.recommended_keywords:
infos_filtered = [x for x in infos if keyword in x['keywords']]
title = keyword.capitalize()
name = keyword.replace(' ', '-')
@ -67,7 +69,7 @@ def update_readme_and_tocs(infos):
# create by platform
platforms_text = []
for platform in valid_platforms:
for platform in osg.valid_platforms:
infos_filtered = [x for x in infos if platform in x.get('platform', [])]
title = platform
name = platform.lower()
@ -80,7 +82,7 @@ def update_readme_and_tocs(infos):
text = start + "[comment]: # (start of autogenerated content, do not edit)\n" + tocs_text + "\n[comment]: # (end of autogenerated content)" + end
# write to readme
write_text(readme_file, text)
utils.write_text(readme_file, text)
def create_toc(title, file, entries):
@ -88,7 +90,7 @@ def create_toc(title, file, entries):
"""
# file path
toc_file = os.path.join(tocs_path, file)
toc_file = os.path.join(c.tocs_path, file)
# header line
text = '[comment]: # (autogenerated content, do not edit)\n# {}\n\n'.format(title)
@ -105,7 +107,7 @@ def create_toc(title, file, entries):
text += '\n'.join(rows)
# write to toc file
write_text(toc_file, text)
utils.write_text(toc_file, text)
def check_validity_external_links():
@ -127,7 +129,7 @@ def check_validity_external_links():
ignored_urls = ('https://git.tukaani.org/xz.git')
# iterate over all entries
for _, entry_path, content in entry_iterator(games_path):
for _, entry_path, content in osg.entry_iterator():
# apply regex
matches = regex.findall(content)
@ -169,12 +171,12 @@ def check_template_leftovers():
print('check for template leftovers')
# load template and get all lines
text = read_text(os.path.join(root_path, 'template.md'))
text = utils.read_text(os.path.join(c.root_path, 'template.md'))
text = text.split('\n')
check_strings = [x for x in text if x and not x.startswith('##')]
# iterate over all entries
for _, entry_path, content in entry_iterator(games_path):
for _, entry_path, content in osg.entry_iterator():
for check_string in check_strings:
if content.find(check_string) >= 0:
@ -196,7 +198,7 @@ def fix_entries():
regex = re.compile(r"(.*)- Keywords:([^\n]*)(.*)", re.DOTALL)
# iterate over all entries
for entry, entry_path, content in entry_iterator(games_path):
for entry, entry_path, content in osg.entry_iterator():
# match with regex
matches = regex.findall(content)
@ -211,7 +213,7 @@ def fix_entries():
elements = list(set(elements))
# get category out
for keyword in recommended_keywords:
for keyword in osg.recommended_keywords:
if keyword in elements:
elements.remove(keyword)
category = keyword
@ -243,13 +245,13 @@ def fix_entries():
if new_content != content:
# write again
write_text(entry_path, new_content)
utils.write_text(entry_path, new_content)
# code dependencies
regex = re.compile(r"(.*)- Code dependencies:([^\n]*)(.*)", re.DOTALL)
# iterate over all entries
for entry, entry_path, content in entry_iterator(games_path):
for entry, entry_path, content in osg.entry_iterator():
# match with regex
matches = regex.findall(content)
@ -279,13 +281,13 @@ def fix_entries():
if new_content != content:
# write again
write_text(entry_path, new_content)
utils.write_text(entry_path, new_content)
# build systems
regex = re.compile(r"(.*)- Build system:([^\n]*)(.*)", re.DOTALL)
# iterate over all entries
for entry, entry_path, content in entry_iterator(games_path):
for entry, entry_path, content in osg.entry_iterator():
# match with regex
matches = regex.findall(content)
@ -311,7 +313,7 @@ def fix_entries():
if new_content != content:
# write again
write_text(entry_path, new_content)
utils.write_text(entry_path, new_content)
def update_statistics(infos):
@ -324,7 +326,7 @@ def update_statistics(infos):
print('update statistics')
# start the page
statistics_file = os.path.join(root_path, 'statistics.md')
statistics_file = os.path.join(c.root_path, 'statistics.md')
statistics = '[comment]: # (autogenerated content, do not edit)\n# Statistics\n\n'
# total number
@ -524,7 +526,7 @@ def update_statistics(infos):
statistics += '##### Platforms frequency\n\n' + '\n'.join(unique_platforms) + '\n\n'
# write to statistics file
write_text(statistics_file, statistics)
utils.write_text(statistics_file, statistics)
def export_json(infos):
@ -585,9 +587,9 @@ def export_json(infos):
db['data'] = entries
# output
json_path = os.path.join(games_path, os.path.pardir, 'docs', 'data.json')
json_path = os.path.join(c.games_path, os.path.pardir, 'docs', 'data.json')
text = json.dumps(db, indent=1)
write_text(json_path, text)
utils.write_text(json_path, text)
def git_repo(repo):
@ -710,9 +712,9 @@ def export_primary_code_repositories_json():
primary_repos[k] = sorted(set(v))
# write them to tools/git
json_path = os.path.join(root_path, 'tools', 'archives.json')
json_path = os.path.join(c.root_path, 'tools', 'archives.json')
text = json.dumps(primary_repos, indent=1)
write_text(json_path, text)
utils.write_text(json_path, text)
def export_git_code_repositories_json():
@ -739,40 +741,31 @@ def export_git_code_repositories_json():
urls.sort()
# write them to tools/git
json_path = os.path.join(root_path, 'tools', 'git_repositories.json')
json_path = os.path.join(c.root_path, 'tools', 'git_repositories.json')
text = json.dumps(urls, indent=1)
write_text(json_path, text)
utils.write_text(json_path, text)
def sort_text_file(file, name):
"""
Reads a text file, splits in lines, removes duplicates, sort, writes back.
"""
text = read_text(file)
text = utils.read_text(file)
text = text.split('\n')
text = sorted(list(set(text)), key=str.casefold)
print('{} contains {} items'.format(name, len(text)))
text = '\n'.join(text)
write_text(file, text)
def strip_url(url):
for prefix in ('http://', 'https://'):
if url.startswith(prefix):
url = url[len(prefix):]
for suffix in ('/', '.git'):
if url.endswith(suffix):
url = url[:-len(suffix)]
return url
utils.write_text(file, text)
def clean_backlog(stripped_game_urls):
# read backlog and split
file = os.path.join(root_path, 'tools', 'backlog.txt')
text = read_text(file)
file = os.path.join(c.root_path, 'tools', 'backlog.txt')
text = utils.read_text(file)
text = text.split('\n')
# remove those that are in stripped_game_urls
text = [x for x in text if strip_url(x) not in stripped_game_urls]
text = [x for x in text if utils.strip_url(x) not in stripped_game_urls]
# remove duplicates and sort
text = sorted(list(set(text)), key=str.casefold)
@ -780,18 +773,14 @@ def clean_backlog(stripped_game_urls):
# join and save again
text = '\n'.join(text)
write_text(file, text)
utils.write_text(file, text)
if __name__ == "__main__":
# paths
root_path = os.path.realpath(os.path.join(os.path.dirname(__file__), os.path.pardir))
games_path = os.path.join(root_path, 'games')
tocs_path = os.path.join(games_path, 'tocs')
# backlog
game_urls = extract_links(games_path)
stripped_game_urls = [strip_url(x) for x in game_urls]
game_urls = osg.extract_links()
stripped_game_urls = [utils.strip_url(x) for x in game_urls]
clean_backlog(stripped_game_urls)
@ -802,7 +791,7 @@ if __name__ == "__main__":
fix_entries()
# assemble info
infos = assemble_infos(games_path)
infos = osg.assemble_infos()
# recount and write to readme and to tocs
update_readme_and_tocs(infos)
@ -823,5 +812,5 @@ if __name__ == "__main__":
# check_validity_external_links()
# sort backlog and rejected
# sort_text_file(os.path.join(root_path, 'tools', 'backlog.txt'), 'backlog')
sort_text_file(os.path.join(root_path, 'tools', 'rejected.txt'), 'rejected games list')
# sort_text_file(os.path.join(c.root_path, 'tools', 'backlog.txt'), 'backlog')
sort_text_file(os.path.join(c.root_path, 'tools', 'rejected.txt'), 'rejected games list')