update Github information (devs and project stars)
This commit is contained in:
18
code/custom-conversions/aatraders.json
Normal file
18
code/custom-conversions/aatraders.json
Normal file
@ -0,0 +1,18 @@
|
||||
["https://sourceforge.net/projects/aatrade/files/AATraders%200.4x/aatrade_0.40_re-release.zip/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.4x/aatrade_0.40.0_release.zip/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.3x/Alien%20Assault%20Traders%200.31.0/aatrade_0.31.0_release.zip/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.3x/Alien%20Assault%20Traders%200.30.3/aatrade_0.30.3_release.zip/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.3x/Alien%20Assault%20Traders%200.30.2/aatrade_0.30.2_release.zip/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.3x/Alien%20Assault%20Traders%200.30.1/aatrade_0.30.1_release.zip/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.3x/Alien%20Assault%20Traders%200.30/aatrade_0.30_release.zip/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.2x%20Final/AAtraders%200.21a%20Final%20Patch/AATRade_0.21a_final.zip/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.2x%20Final/AATraders%200.21%20Release/aatrade-0.21.tar.gz/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.2x%20Final/AATrade%200.20%20Release/aatrade-0.20.tar.gz/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.1x%20Final/Release%200.14.1%20w_Profiles/aatrade-0.14.1.tar.gz/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.1x%20Final/Release%200.14%20w_Profiles/aatrade-0.14.tar.gz/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.1x%20Final/Release%20v0.13/aatrade-0.13.tar.gz/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.1x%20Final/Recommended%20Release%20v0.12/aatrade-0.12.tar.gz/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.1x%20Final/Release%20v0.11/aatrade-0.11.tar.gz/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/AATraders%200.1x%20Final/Initial%20Release%20v0.10/aatrade-0.10.tar.gz/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/OldFiles/aatrade0.21final.zip/download",
|
||||
"https://sourceforge.net/projects/aatrade/files/OldFiles/aatrade0.21patched.zip/download"]
|
156
code/custom-conversions/aatraders_source_release_to_git.py
Normal file
156
code/custom-conversions/aatraders_source_release_to_git.py
Normal file
@ -0,0 +1,156 @@
|
||||
"""
|
||||
Downloads source releases from Sourceforge and puts them into a git repository
|
||||
"""
|
||||
|
||||
import json
|
||||
import datetime
|
||||
from utils.utils import *
|
||||
|
||||
def special_aatrade_package_extraction(source):
|
||||
"""
|
||||
Unpacks "aatrade_packages".
|
||||
"""
|
||||
files = os.listdir(source)
|
||||
if any([x.startswith('aatrade_package') for x in files]):
|
||||
# we got the special case
|
||||
print('aatrade package extraction of {}'.format(source))
|
||||
|
||||
# first delete all, that do not begin with the package name
|
||||
for file in files:
|
||||
if not file.startswith('aatrade_package'):
|
||||
os.remove(os.path.join(source, file))
|
||||
|
||||
# second extract all those with are left, removing them too
|
||||
files = os.listdir(source)
|
||||
for file in files:
|
||||
try:
|
||||
extract_archive(os.path.join(source, file), source, 'tar')
|
||||
except:
|
||||
extract_archive(os.path.join(source, file), source, 'zip')
|
||||
os.remove(os.path.join(source, file))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
# base path is the directory containing this file
|
||||
base_path = os.path.abspath(os.path.dirname(__file__))
|
||||
print('base path={}'.format(base_path))
|
||||
|
||||
# recreate archive path
|
||||
archive_path = os.path.join(base_path, 'downloads')
|
||||
if not os.path.exists(archive_path):
|
||||
os.mkdir(archive_path)
|
||||
|
||||
# load source releases urls
|
||||
with open(os.path.join(base_path, 'aatraders.json'), 'r') as f:
|
||||
urls = json.load(f)
|
||||
print('will process {} urls'.format(len(urls)))
|
||||
if len(urls) != len(set(urls)):
|
||||
raise RuntimeError("urls list contains duplicates")
|
||||
|
||||
# determine file archives from urls
|
||||
archives = [x.split('/')[-2] for x in urls]
|
||||
if len(archives) != len(set(archives)):
|
||||
raise RuntimeError("files with duplicate archives, cannot deal with that")
|
||||
|
||||
# determine version from file name
|
||||
versions = [determine_archive_version_generic(x, leading_terms=['aatrade_', 'aatrade-', 'aatrade'], trailing_terms=['.zip', '.tar.gz', '_release']) for x in archives]
|
||||
for version in versions:
|
||||
print(version)
|
||||
|
||||
# extend archives to full paths
|
||||
archives = [os.path.join(archive_path, x) for x in archives]
|
||||
|
||||
# download them
|
||||
print('download source releases')
|
||||
for url, destination in zip(urls, archives):
|
||||
# only if not yet existing
|
||||
if os.path.exists(destination):
|
||||
continue
|
||||
# download
|
||||
print(' download {}'.format(os.path.basename(destination)))
|
||||
download_url(url, destination)
|
||||
|
||||
# extract them
|
||||
print('extract downloaded archives')
|
||||
extracted_archives = [x + '-extracted' for x in archives]
|
||||
for archive, extracted_archive in zip(archives, extracted_archives):
|
||||
print(' extract {}'.format(os.path.basename(archive)))
|
||||
# only if not yet existing
|
||||
if os.path.exists(extracted_archive):
|
||||
continue
|
||||
os.mkdir(extracted_archive)
|
||||
# extract
|
||||
extract_archive(archive, extracted_archive, detect_archive_type(archive))
|
||||
|
||||
# go up in unzipped archives until the very first non-empty folder
|
||||
extracted_archives = [strip_wrapped_folders(x) for x in extracted_archives]
|
||||
|
||||
# special 'aatrade_packageX' treatment
|
||||
for extracted_archive in extracted_archives:
|
||||
special_aatrade_package_extraction(extracted_archive)
|
||||
|
||||
# calculate size of folder
|
||||
sizes = [folder_size(x) for x in extracted_archives]
|
||||
|
||||
# determine date
|
||||
dates = [determine_latest_last_modified_date(x) for x in extracted_archives]
|
||||
dates_strings = [datetime.datetime.fromtimestamp(x).strftime('%Y-%m-%d') for x in dates]
|
||||
# if len(dates_strings) != len(set(dates_strings)):
|
||||
# raise RuntimeError("Some on the same day, cannot cope with that")
|
||||
|
||||
# gather all important stuff in one list and sort by dates and throw those out where size is not in range
|
||||
db = list(zip(urls, extracted_archives, versions, dates, dates_strings, sizes))
|
||||
db.sort(key=lambda x:x[3])
|
||||
|
||||
size_range = [5e6, float("inf")] # set to None if not desired
|
||||
if size_range:
|
||||
db = [x for x in db if size_range[0] <= x[5] <= size_range[1]]
|
||||
|
||||
print('proposed order')
|
||||
for url, _, version, _, date, size in db:
|
||||
print(' date={} version={} size={}'.format(date, version, size))
|
||||
|
||||
# git init
|
||||
git_path = os.path.join(base_path, 'aatrade')
|
||||
if os.path.exists(git_path):
|
||||
shutil.rmtree(git_path)
|
||||
os.mkdir(git_path)
|
||||
os.chdir(git_path)
|
||||
subprocess_run(['git', 'init'])
|
||||
subprocess_run(['git', 'config', 'user.name', 'Trilarion'])
|
||||
subprocess_run(['git', 'config', 'user.email', 'Trilarion@users.noreply.gitlab.com'])
|
||||
|
||||
# now process revision by revision
|
||||
print('process revisions')
|
||||
git_author = 'akapanamajack, tarnus <akapanamajack_tarnus@user.sourceforge.net>'
|
||||
for url, archive_path, version, _, date, _ in db:
|
||||
print(' process version={}'.format(version))
|
||||
|
||||
# clear git path without deleting .git
|
||||
print(' clear git')
|
||||
for item in os.listdir(git_path):
|
||||
# ignore '.git
|
||||
if item == '.git':
|
||||
continue
|
||||
item = os.path.join(git_path, item)
|
||||
if os.path.isdir(item):
|
||||
shutil.rmtree(item)
|
||||
else:
|
||||
os.remove(item)
|
||||
|
||||
# copy unpacked source files to git path
|
||||
print('copy to git')
|
||||
copy_tree(archive_path, git_path)
|
||||
|
||||
# update the git index (add unstaged, remove deleted, ...)
|
||||
print('git add')
|
||||
os.chdir(git_path)
|
||||
subprocess_run(['git', 'add', '--all'])
|
||||
|
||||
# perform the commit
|
||||
print('git commit')
|
||||
os.chdir(git_path)
|
||||
message = 'version {} ({}) on {}'.format(version, url, date)
|
||||
print(' message "{}"'.format(message))
|
||||
subprocess_run(['git', 'commit', '--message={}'.format(message), '--author={}'.format(git_author), '--date={}'.format(date)])
|
103
code/custom-conversions/dfend_reloaded_source_releases_to_git.py
Normal file
103
code/custom-conversions/dfend_reloaded_source_releases_to_git.py
Normal file
@ -0,0 +1,103 @@
|
||||
"""
|
||||
Converts the source releases of D-Fend Reloaded to a Git.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import datetime
|
||||
import re
|
||||
from utils.utils import *
|
||||
|
||||
|
||||
def subprocess_run(cmd):
|
||||
"""
|
||||
|
||||
"""
|
||||
result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if result.returncode:
|
||||
print("error {} in call {}".format(result.returncode, cmd))
|
||||
print(result.stderr.decode('ascii'))
|
||||
sys.exit(-1)
|
||||
else:
|
||||
print(' output: {}'.format(result.stdout.decode('ascii')))
|
||||
|
||||
def single_release(zip):
|
||||
"""
|
||||
|
||||
"""
|
||||
|
||||
# get version
|
||||
matches = version_regex.findall(zip)
|
||||
version = matches[0]
|
||||
print(' version {}'.format(version))
|
||||
ftp_link = 'https://sourceforge.net/projects/dfendreloaded/files/D-Fend%20Reloaded/D-Fend%20Reloaded%20{}/'.format(version)
|
||||
|
||||
# clear git path without deleting '.git'
|
||||
for item in os.listdir(git_path):
|
||||
# ignore '.git
|
||||
if item == '.git':
|
||||
continue
|
||||
item = os.path.join(git_path, item)
|
||||
if os.path.isdir(item):
|
||||
shutil.rmtree(item)
|
||||
else:
|
||||
os.remove(item)
|
||||
|
||||
# unpack zip to git path
|
||||
# with zipfile.ZipFile(os.path.join(source_releases_path, zip), 'r') as zipf:
|
||||
# zipf.extractall(git_path)
|
||||
unzip(os.path.join(source_releases_path, zip), git_path)
|
||||
|
||||
# get date from the files (latest of last modified)
|
||||
latest_last_modified = 0
|
||||
for dirpath, dirnames, filenames in os.walk(git_path):
|
||||
if dirpath.startswith(os.path.join(git_path, '.git')):
|
||||
# not in '.git'
|
||||
continue
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
lastmodified = os.path.getmtime(filepath)
|
||||
if lastmodified > latest_last_modified:
|
||||
latest_last_modified = lastmodified
|
||||
# print('{}, {}'.format(filepath, datetime.datetime.fromtimestamp(latest_last_modified).strftime('%Y-%m-%d')))
|
||||
|
||||
original_date = datetime.datetime.fromtimestamp(latest_last_modified).strftime('%Y-%m-%d')
|
||||
print(' last modified: {}'.format(original_date))
|
||||
|
||||
# update the git index (add unstaged, remove deleted, ...)
|
||||
print('git add')
|
||||
os.chdir(git_path)
|
||||
subprocess_run(['git', 'add', '--all'])
|
||||
|
||||
# perform the commit
|
||||
print('git commit')
|
||||
os.chdir(git_path)
|
||||
message = 'version {} from {} ({})'.format(version, original_date, ftp_link)
|
||||
print(' message "{}"'.format(message))
|
||||
subprocess_run(['git', 'commit', '--message={}'.format(message), '--author={}'.format(author), '--date={}'.format(original_date)])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
# general properties
|
||||
author = 'alexanderherzog <alexanderherzog@users.sourceforge.net>'
|
||||
version_regex = re.compile(r"Reloaded-(.*)-", re.MULTILINE)
|
||||
|
||||
# get paths
|
||||
source_releases_path = sys.argv[1]
|
||||
git_path = os.path.join(source_releases_path, 'git')
|
||||
|
||||
# recreate git path
|
||||
recreate_directory(git_path)
|
||||
os.chdir(git_path)
|
||||
subprocess_run('git init')
|
||||
|
||||
# get all files in the source releases path and sort them
|
||||
zips = os.listdir(source_releases_path)
|
||||
zips = [file for file in zips if os.path.isfile(os.path.join(source_releases_path, file))]
|
||||
print('found {} source releases'.format(len(zips)))
|
||||
zips.sort()
|
||||
|
||||
# iterate over them and do revisions
|
||||
for counter, zip in enumerate(zips):
|
||||
print('{}/{}'.format(counter, len(zips)))
|
||||
single_release(zip)
|
502
code/custom-conversions/dungeon_crawl_source_releases_to_git.py
Normal file
502
code/custom-conversions/dungeon_crawl_source_releases_to_git.py
Normal file
@ -0,0 +1,502 @@
|
||||
"""
|
||||
Helps me with importing source revisions into Git
|
||||
"""
|
||||
|
||||
import shutil
|
||||
import os
|
||||
import subprocess
|
||||
import tarfile
|
||||
import zipfile
|
||||
import distutils.dir_util
|
||||
import sys
|
||||
import urllib.request
|
||||
import tempfile
|
||||
import datetime
|
||||
|
||||
|
||||
def extract_sources(source_path, type, destination_path):
|
||||
"""
|
||||
Extracts a zip, tar, ... to a destination path.
|
||||
"""
|
||||
if type == '.tbz2':
|
||||
tar = tarfile.open(source_path, 'r')
|
||||
os.chdir(destination_path)
|
||||
tar.extractall()
|
||||
elif type == '.zip':
|
||||
with zipfile.ZipFile(source_path, 'r') as zip:
|
||||
zip.extractall(destination_path)
|
||||
|
||||
def subprocess_run(cmd):
|
||||
"""
|
||||
|
||||
"""
|
||||
result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if result.returncode:
|
||||
print("error {} in call {}".format(result.returncode, cmd))
|
||||
print(result.stderr.decode('ascii'))
|
||||
sys.exit(-1)
|
||||
else:
|
||||
print(' output: {}'.format(result.stdout.decode('ascii')))
|
||||
|
||||
def single_revision():
|
||||
"""
|
||||
|
||||
"""
|
||||
# remove temp path completely and create again
|
||||
print('clear temp')
|
||||
if os.path.isdir(temp_path):
|
||||
shutil.rmtree(temp_path)
|
||||
os.mkdir(temp_path)
|
||||
|
||||
# download archive
|
||||
print('download archive from ftp')
|
||||
with urllib.request.urlopen(ftp_link) as response:
|
||||
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
|
||||
shutil.copyfileobj(response, tmp_file)
|
||||
|
||||
# unpack source files and delete archive
|
||||
print('extract {} to temp'.format(os.path.basename(ftp_link)))
|
||||
extract_sources(tmp_file.name, os.path.splitext(ftp_link)[1], temp_path)
|
||||
os.remove(tmp_file.name)
|
||||
|
||||
# we need to go up in temp_path until we find the first non-empty directory
|
||||
nonempty_temp_path = temp_path
|
||||
names = os.listdir(nonempty_temp_path)
|
||||
while len(names) == 1:
|
||||
nonempty_temp_path = os.path.join(nonempty_temp_path, names[0])
|
||||
names = os.listdir(nonempty_temp_path)
|
||||
print(' working in "{}" relative to temp'.format(os.path.relpath(nonempty_temp_path, temp_path)))
|
||||
|
||||
# if no original date is indicated, get it from the files (latest of last modified)
|
||||
global original_date
|
||||
if original_date is None:
|
||||
latest_last_modified = 0
|
||||
for dirpath, dirnames, filenames in os.walk(nonempty_temp_path):
|
||||
for filename in filenames:
|
||||
filepath = os.path.join(dirpath, filename)
|
||||
lastmodified = os.path.getmtime(filepath)
|
||||
if lastmodified > latest_last_modified:
|
||||
latest_last_modified = lastmodified
|
||||
original_date = datetime.datetime.fromtimestamp(latest_last_modified).strftime('%Y-%m-%d')
|
||||
print(' extracted original date from files: {}'.format(original_date))
|
||||
|
||||
# clear git path without deleting '.git'
|
||||
print('clear git')
|
||||
for item in os.listdir(git_path):
|
||||
# ignore '.git
|
||||
if item == '.git':
|
||||
continue
|
||||
item = os.path.join(git_path, item)
|
||||
if os.path.isdir(item):
|
||||
shutil.rmtree(item)
|
||||
else:
|
||||
os.remove(item)
|
||||
|
||||
# copy unpacked source files to git path
|
||||
print('copy to git')
|
||||
distutils.dir_util.copy_tree(nonempty_temp_path, git_path)
|
||||
|
||||
# update the git index (add unstaged, remove deleted, ...)
|
||||
print('git add')
|
||||
os.chdir(git_path)
|
||||
subprocess_run(['git', 'add', '--all'])
|
||||
|
||||
# perform the commit
|
||||
print('git commit')
|
||||
os.chdir(git_path)
|
||||
message = 'version {} ({}) on {}'.format(version, ftp_link, original_date)
|
||||
print(' message "{}"'.format(message))
|
||||
# subprocess_run(['git', 'commit', '--message={}'.format(message), '--author={}'.format(author), '--date={}'.format(original_date), '--dry-run'])
|
||||
subprocess_run(['git', 'commit', '--message={}'.format(message), '--author={}'.format(author), '--date={}'.format(original_date)])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
git_path = r'..\crawl' # must be initialized with 'git init' before
|
||||
temp_path = r'..\temp'
|
||||
author = 'Linley Henzell et al 1997-2005 <www.dungeoncrawl.org>' # is used for all commits
|
||||
|
||||
|
||||
# 1.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/final/1.1.x/src/dc110f-src.tbz2'
|
||||
# version = '110f'
|
||||
# original_date = '1997-10-04' # format yyyy-mm-dd, according to versions.txt in version 400b26
|
||||
# single_revision()
|
||||
|
||||
# 2.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/final/2.7.x/src/dc270f-src.tbz2'
|
||||
# version = '270f'
|
||||
# original_date = '1998-09-22'
|
||||
# single_revision()
|
||||
|
||||
# 3.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/final/2.7.x/src/dc272f-src.tbz2'
|
||||
# version = '272f'
|
||||
# original_date = '1998-10-02'
|
||||
# single_revision()
|
||||
|
||||
# 4.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/final/2.8.x/src/dc280f-src.tbz2'
|
||||
# version = '280f'
|
||||
# original_date = '1998-10-18'
|
||||
# single_revision()
|
||||
|
||||
# 5.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/final/2.8.x/src/dc281f-src.tbz2'
|
||||
# version = '281f'
|
||||
# original_date = '1998-10-20'
|
||||
# single_revision()
|
||||
|
||||
# 6.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/final/2.8.x/src/dc282f-src.tbz2'
|
||||
# version = '282f'
|
||||
# original_date = '1998-10-24'
|
||||
# single_revision()
|
||||
|
||||
# 7.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/final/3.0.x/src/dc301f-src.tbz2'
|
||||
# version = '301f'
|
||||
# original_date = '1999-01-01'
|
||||
# single_revision()
|
||||
|
||||
# 8.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/final/3.0.x/src/dc302f-src.tbz2'
|
||||
# version = '302f'
|
||||
# original_date = '1999-01-04'
|
||||
# single_revision()
|
||||
|
||||
# 9.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/final/3.2.x/src/dc320f-src.tbz2'
|
||||
# version = '320f'
|
||||
# original_date = '1999-02-09'
|
||||
# single_revision()
|
||||
|
||||
# 10.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/final/3.3.x/src/dc330f-src.tbz2'
|
||||
# version = '330f'
|
||||
# original_date = '1999-03-30'
|
||||
# single_revision()
|
||||
|
||||
# 11.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/3.3.x/src/cr331beta01-src.zip'
|
||||
# version = '331beta01'
|
||||
# original_date = '1999-04-09' # "Date last modified" of every file inside and of that the latest
|
||||
# single_revision()
|
||||
|
||||
# 12.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/3.3.x/src/cr331beta02-src.zip'
|
||||
# version = '331beta02'
|
||||
# original_date = '1999-06-18'
|
||||
# single_revision()
|
||||
|
||||
# 13.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/3.3.x/src/cr331beta03-src.zip'
|
||||
# version = '331beta03'
|
||||
# original_date = '1999-06-22'
|
||||
# single_revision()
|
||||
|
||||
# 14.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/3.3.x/src/cr331beta04-src.zip'
|
||||
# version = '331beta04'
|
||||
# original_date = '1999-08-08'
|
||||
# single_revision()
|
||||
|
||||
# 15.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/3.3.x/src/cr331beta05-src.zip'
|
||||
# version = '331beta05'
|
||||
# original_date = '1999-08-27'
|
||||
# single_revision()
|
||||
|
||||
# 16.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/3.3.x/src/cr331beta06-src.zip'
|
||||
# version = '331beta06'
|
||||
# original_date = '1999-09-12'
|
||||
# single_revision()
|
||||
|
||||
# 17.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/3.3.x/src/cr331beta07-src.zip'
|
||||
# version = '331beta07'
|
||||
# original_date = '1999-09-24'
|
||||
# single_revision()
|
||||
|
||||
# 18.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/3.3.x/src/cr331beta08-src.zip'
|
||||
# version = '331beta08'
|
||||
# original_date = '1999-09-28'
|
||||
# single_revision()
|
||||
|
||||
# 19.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/3.3.x/src/cr331beta09-src.zip'
|
||||
# version = '331beta09'
|
||||
# original_date = '1999-10-02'
|
||||
# single_revision()
|
||||
|
||||
# 20.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr1999oct12src.zip'
|
||||
# version = 'cr1999oct12'
|
||||
# original_date = '1999-10-12'
|
||||
# single_revision()
|
||||
|
||||
# 21.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr1999oct15src.zip'
|
||||
# version = 'cr1999oct15'
|
||||
# original_date = '1999-10-15'
|
||||
# single_revision()
|
||||
|
||||
# 22.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr1999nov18src.zip'
|
||||
# version = 'cr1999nov18'
|
||||
# original_date = '1999-11-18'
|
||||
# single_revision()
|
||||
|
||||
# 23.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr1999nov23src.zip'
|
||||
# version = 'cr1999nov23'
|
||||
# original_date = '1999-11-23'
|
||||
# single_revision()
|
||||
|
||||
# 24.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr1999dec27src.zip'
|
||||
# version = 'cr1999dec27'
|
||||
# original_date = '1999-12-27'
|
||||
# single_revision()
|
||||
|
||||
# 25.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr1999dec30src.zip'
|
||||
# version = 'cr1999dec30'
|
||||
# original_date = '1999-12-30'
|
||||
# single_revision()
|
||||
|
||||
# 26.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr1999dec31src.zip'
|
||||
# version = 'cr1999dec31'
|
||||
# original_date = '1999-12-31'
|
||||
# single_revision()
|
||||
|
||||
# 27.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr2000jan10src.zip'
|
||||
# version = 'cr2000jan10'
|
||||
# original_date = '2000-01-10'
|
||||
# single_revision()
|
||||
|
||||
# 28.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr2000feb23src.zip'
|
||||
# version = 'cr2000feb23'
|
||||
# original_date = '2000-02-23'
|
||||
# single_revision()
|
||||
|
||||
# 29.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr2000mar06src.zip'
|
||||
# version = 'cr2000mar06'
|
||||
# original_date = '2000-03-06'
|
||||
# single_revision()
|
||||
|
||||
# 30.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr2000jun19src.zip'
|
||||
# version = 'cr2000jun19src'
|
||||
# original_date = '2000-06-19'
|
||||
# single_revision()
|
||||
|
||||
# 31.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr2000jun20src.zip'
|
||||
# version = 'cr2000jun20'
|
||||
# original_date = '2000-06-20'
|
||||
# single_revision()
|
||||
|
||||
# 32.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr2000jun22src.zip'
|
||||
# version = 'cr2000jun22'
|
||||
# original_date = '2000-06-22'
|
||||
# single_revision()
|
||||
|
||||
# 33.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr2000jul22src.zip'
|
||||
# version = 'cr2000jul22'
|
||||
# original_date = '2000-07-22'
|
||||
# single_revision()
|
||||
|
||||
# 34.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr2000aug01src.zip'
|
||||
# version = 'cr2000aug01'
|
||||
# original_date = '2000-08-01'
|
||||
# single_revision()
|
||||
|
||||
# 35.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr2000aug13src.zip'
|
||||
# version = 'cr2000aug13'
|
||||
# original_date = '2000-08-13'
|
||||
# single_revision()
|
||||
|
||||
# 36.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/orphan/src/cr2000oct30src.zip'
|
||||
# version = 'cr2000oct30'
|
||||
# original_date = '2000-10-30'
|
||||
# single_revision()
|
||||
|
||||
# 37.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta01-src.tbz2'
|
||||
# version = '400beta01'
|
||||
# original_date = None # 2000-12-20
|
||||
# single_revision()
|
||||
|
||||
# 38.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta02-src.tbz2'
|
||||
# version = '400beta02'
|
||||
# original_date = None # 2000-12-22
|
||||
# single_revision()
|
||||
|
||||
# 39.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta03-src.tbz2'
|
||||
# version = '400beta03'
|
||||
# original_date = None # 2000-12-29
|
||||
# single_revision()
|
||||
|
||||
# 40.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta04-src.tbz2'
|
||||
# version = '400beta04'
|
||||
# original_date = None # 2001-01-11
|
||||
# single_revision()
|
||||
|
||||
# 41.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta06-src.tbz2'
|
||||
# version = '400beta06'
|
||||
# original_date = None # 2001-01-23
|
||||
# single_revision()
|
||||
|
||||
# 42.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta07-src.tbz2'
|
||||
# version = '400beta07'
|
||||
# original_date = None # 2001-01-29
|
||||
# single_revision()
|
||||
|
||||
# 43.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta08-src.tbz2'
|
||||
# version = 'cr400beta08'
|
||||
# original_date = None # 2001-02-20
|
||||
# single_revision()
|
||||
|
||||
# 44.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta09-src.tbz2'
|
||||
# version = 'cr400beta09'
|
||||
# original_date = None # 2001-03-06
|
||||
# single_revision()
|
||||
|
||||
# 45.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta10-src.tbz2'
|
||||
# version = 'cr400beta10'
|
||||
# original_date = None # 2001-03-13
|
||||
# single_revision()
|
||||
|
||||
# 46.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta10b-src.tbz2'
|
||||
# version = 'cr400beta10b'
|
||||
# original_date = None # 2001-03-14
|
||||
# single_revision()
|
||||
|
||||
# 47.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta10c-src.tbz2'
|
||||
# version = 'cr400beta10c'
|
||||
# original_date = None # 2001-03-15
|
||||
# single_revision()
|
||||
|
||||
# 48.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta10d-src.tbz2'
|
||||
# version = '400beta10d'
|
||||
# original_date = None # 2001-03-18
|
||||
# single_revision()
|
||||
|
||||
# 49.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta11-src.tbz2'
|
||||
# version = '400beta11'
|
||||
# original_date = None # 2001-03-21
|
||||
# single_revision()
|
||||
|
||||
# 50.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta12-src.tbz2'
|
||||
# version = '400beta12'
|
||||
# original_date = None # 2001-04-02
|
||||
# single_revision()
|
||||
|
||||
# 51.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta13-src.tbz2'
|
||||
# version = '400beta13'
|
||||
# original_date = None # 2001-04-09
|
||||
# single_revision()
|
||||
|
||||
# 52.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta14-src.tbz2'
|
||||
# version = '400beta14'
|
||||
# original_date = None # 2001-04-20
|
||||
# single_revision()
|
||||
|
||||
# 53.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta15-src.tbz2'
|
||||
# version = '400beta15'
|
||||
# original_date = None # 2001-04-25
|
||||
# single_revision()
|
||||
|
||||
# 54.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta16-src.tbz2'
|
||||
# version = '400beta16'
|
||||
# original_date = None # 2001-05-11
|
||||
# single_revision()
|
||||
|
||||
# 55.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta17-src.tbz2'
|
||||
# version = '400beta17'
|
||||
# original_date = None # 2001-06-01
|
||||
# single_revision()
|
||||
|
||||
# 56.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta18-src.tbz2'
|
||||
# version = '400beta18'
|
||||
# original_date = None # 2001-08-04
|
||||
# single_revision()
|
||||
|
||||
# 57.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta19-src.tbz2'
|
||||
# version = '400beta19'
|
||||
# original_date = None # 2001-08-10
|
||||
# single_revision()
|
||||
|
||||
# 58.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta20-src.tbz2'
|
||||
# version = '400beta20'
|
||||
# original_date = None # 2001-11-05
|
||||
# single_revision()
|
||||
|
||||
# 59.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/cr400beta22-src.tbz2'
|
||||
# version = '400beta22'
|
||||
# original_date = None # 2001-12-21
|
||||
# single_revision()
|
||||
|
||||
# 60.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/dc400b23-src.tbz2'
|
||||
# version = '400b23'
|
||||
# original_date = None # 2002-03-16
|
||||
# single_revision()
|
||||
|
||||
# 61.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/dc400b24-src.tbz2'
|
||||
# version = '400b24'
|
||||
# original_date = '2002-06-03' # taken again from ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/
|
||||
# single_revision()
|
||||
|
||||
# 62.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/dc400b25-src.tbz2'
|
||||
# version = '400b25'
|
||||
# original_date = '2003-03-06'
|
||||
# single_revision()
|
||||
|
||||
# 63.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/dc400a26-src.tbz2'
|
||||
# version = '400a26'
|
||||
# original_date = '2003-03-17'
|
||||
# single_revision()
|
||||
|
||||
# 64.
|
||||
# ftp_link = 'ftp://ftp.dungeoncrawl.org/dev/4.0.x/src/dc400b26-src.tbz2'
|
||||
# version = '400b26'
|
||||
# original_date = '2003-03-24'
|
||||
# single_revision()
|
502
code/custom-conversions/lechemindeladam_svn_to_git.py
Normal file
502
code/custom-conversions/lechemindeladam_svn_to_git.py
Normal file
@ -0,0 +1,502 @@
|
||||
"""
|
||||
The svn is too big to be automatically imported to git (and Github) because there are lots of large binary data components.
|
||||
Needs a manual solution.
|
||||
|
||||
TODO use git lfs migrate later on the elements
|
||||
TODO instead of svn export for every revision, checkout and then update to revision (reduced bandwith)
|
||||
"""
|
||||
|
||||
import json
|
||||
import sys
|
||||
import psutil
|
||||
|
||||
from utils.utils import *
|
||||
|
||||
|
||||
def remove_folders(base_folder, names):
|
||||
if isinstance(names, str):
|
||||
names = (names,)
|
||||
for name in names:
|
||||
folder = os.path.join(base_folder, name)
|
||||
if os.path.isdir(folder):
|
||||
shutil.rmtree(folder)
|
||||
|
||||
|
||||
def remove_files(base_folder, names):
|
||||
if isinstance(names, str):
|
||||
names = (names,)
|
||||
for name in names:
|
||||
file = os.path.join(base_folder, name)
|
||||
if os.path.isfile(file):
|
||||
os.remove(file)
|
||||
|
||||
|
||||
def special_treatment(destination, revision):
|
||||
"""
|
||||
|
||||
"""
|
||||
|
||||
# copy content of trunk to base
|
||||
if 2270 <= revision <= 2420:
|
||||
source = os.path.join(destination, 'trunk')
|
||||
if os.path.isdir(source):
|
||||
copy_tree(source, destination)
|
||||
shutil.rmtree(source)
|
||||
|
||||
# copy all important files from Holyspirit/Holyspirit and delete it
|
||||
if 5 <= revision <= 330:
|
||||
source = os.path.join(destination, 'Holyspirit', 'Holyspirit')
|
||||
if os.path.isdir(source):
|
||||
if revision >= 8:
|
||||
shutil.copytree(os.path.join(source, 'Data'), os.path.join(destination, 'Data'))
|
||||
files = [x for x in os.listdir(source) if x.endswith('.txt')]
|
||||
for file in files:
|
||||
shutil.copy(os.path.join(source, file), destination)
|
||||
# remove it
|
||||
shutil.rmtree(os.path.join(destination, 'Holyspirit'))
|
||||
|
||||
# copy all important files from Holyspirit and delete it
|
||||
if 337 <= revision <= 2268:
|
||||
source = os.path.join(destination, 'Holyspirit')
|
||||
if os.path.isdir(source):
|
||||
data = os.path.join(source, 'Data')
|
||||
if os.path.isdir(data):
|
||||
# shutil.copytree(data, os.path.join(destination, 'Data'))
|
||||
shutil.move(data, destination)
|
||||
target = os.path.join(destination, 'Meta')
|
||||
if not os.path.isdir(target):
|
||||
os.mkdir(target)
|
||||
files = [x for x in os.listdir(source) if x.endswith('.txt') or x.endswith('.conf') or x.endswith('.ini')]
|
||||
for file in files:
|
||||
shutil.move(os.path.join(source, file), target)
|
||||
# remove it
|
||||
shutil.rmtree(source)
|
||||
|
||||
# copy data folder vom HolySpiritJE and delete it
|
||||
if 2012 <= revision <= 2269:
|
||||
source = os.path.join(destination, 'HolyspiritJE')
|
||||
if os.path.isdir(source):
|
||||
data = os.path.join(source, 'Data')
|
||||
if os.path.isdir(data):
|
||||
shutil.move(data, os.path.join(destination, 'DataJE'))
|
||||
target = os.path.join(destination, 'MetaJE')
|
||||
if not os.path.isdir(target):
|
||||
os.mkdir(target)
|
||||
files = [x for x in os.listdir(source) if x.endswith('.txt') or x.endswith('.conf') or x.endswith('.ini')]
|
||||
for file in files:
|
||||
shutil.move(os.path.join(source, file), target)
|
||||
# remove it
|
||||
shutil.rmtree(source)
|
||||
|
||||
# remove Holyspirit3 folder
|
||||
if 464 <= revision <= 2268:
|
||||
remove_folders(destination, 'Holyspirit3')
|
||||
|
||||
# remove Holyspirit2 folder
|
||||
if 659 <= revision <= 2268:
|
||||
remove_folders(destination, 'Holyspirit2')
|
||||
|
||||
# remove Launcher/release
|
||||
if 413 <= revision <= 2420:
|
||||
source = os.path.join(destination, 'Launcher')
|
||||
remove_folders(source, ('bin', 'debug', 'release', 'obj'))
|
||||
|
||||
# delete all *.dll, *.exe in base folder
|
||||
if 3 <= revision <= 9:
|
||||
files = os.listdir(destination)
|
||||
for file in files:
|
||||
if file.endswith('.exe') or file.endswith('.dll'):
|
||||
os.remove(os.path.join(destination, file))
|
||||
|
||||
# delete "cross" folder
|
||||
if 42 <= revision <= 43:
|
||||
remove_folders(destination, 'Cross')
|
||||
|
||||
# delete personal photos
|
||||
if 374 <= revision <= 2267:
|
||||
remove_folders(destination, 'Photos')
|
||||
if 2268 <= revision <= 2420:
|
||||
source = os.path.join(destination, 'Media')
|
||||
remove_folders(source, 'Photos')
|
||||
|
||||
# move empire of steam out
|
||||
if 1173 <= revision <= 2420:
|
||||
folder = os.path.join(destination, 'EmpireOfSteam')
|
||||
if os.path.isdir(folder):
|
||||
# move to empire path
|
||||
empire = os.path.join(empire_path, 'r{:04d}'.format(revision))
|
||||
shutil.move(folder, empire)
|
||||
|
||||
# holy editor cleanup
|
||||
if 1078 <= revision <= 2420:
|
||||
source = os.path.join(destination, 'HolyEditor')
|
||||
remove_folders(source, ('bin', 'release', 'debug', 'obj'))
|
||||
remove_files(source, 'moc.exe')
|
||||
|
||||
# source folder cleanup
|
||||
if 939 <= revision <= 2420:
|
||||
source = os.path.join(destination, 'Source')
|
||||
remove_folders(source, 'HS')
|
||||
remove_files(source, 'HS.zip')
|
||||
|
||||
# sourceM folder cleanup
|
||||
if 2110 <= revision <= 2270:
|
||||
source = os.path.join(destination, 'SourceM')
|
||||
remove_folders(source, 'HS')
|
||||
|
||||
# sourceNewApi cleanup
|
||||
if 2261 <= revision <= 2269:
|
||||
source = os.path.join(destination, 'SourceNewApi')
|
||||
remove_folders(source, 'HS')
|
||||
|
||||
# Autres folder cleanup
|
||||
if 1272 <= revision <= 2267:
|
||||
source = os.path.join(destination, 'Autres')
|
||||
remove_folders(source, ('conf', 'db', 'hooks', 'locks'))
|
||||
remove_files(source, ('format', 'maj.php'))
|
||||
# Media/Other folder cleanup
|
||||
if 2268 <= revision <= 2420:
|
||||
source = os.path.join(destination, 'Media', 'Other')
|
||||
remove_files(source, ('format', 'maj.php'))
|
||||
|
||||
# remove Holyspirit-Demo
|
||||
if 1668 <= revision <= 2268:
|
||||
remove_folders(destination, 'Holyspirit_Demo')
|
||||
|
||||
# remove Debug.rar
|
||||
if 1950 <= revision <= 2420:
|
||||
remove_files(destination, 'Debug.rar')
|
||||
|
||||
# remove 3dparty folder
|
||||
if 2273 <= revision <= 2420:
|
||||
remove_folders(destination, '3dparty')
|
||||
|
||||
# branches cleanup
|
||||
if 2270 <= revision <= 2420:
|
||||
remove_folders(destination, 'branches')
|
||||
|
||||
|
||||
def delete_global_excludes(folder):
|
||||
"""
|
||||
|
||||
"""
|
||||
for dirpath, dirnames, filenames in os.walk(folder):
|
||||
rel_path = os.path.relpath(dirpath, folder)
|
||||
for file in filenames:
|
||||
if file in global_exclude:
|
||||
os.remove(os.path.join(dirpath, file))
|
||||
|
||||
|
||||
def delete_empty_directories(folder):
|
||||
"""
|
||||
|
||||
"""
|
||||
for dirpath, dirnames, filenames in os.walk(folder, topdown=False):
|
||||
rel_path = os.path.relpath(dirpath, folder)
|
||||
if not filenames and not dirnames:
|
||||
os.removedirs(dirpath)
|
||||
|
||||
|
||||
def list_large_unwanted_files(folder):
|
||||
"""
|
||||
|
||||
"""
|
||||
output = []
|
||||
for dirpath, dirnames, filenames in os.walk(folder):
|
||||
rel_path = os.path.relpath(dirpath, folder)
|
||||
for file in filenames:
|
||||
file_path = os.path.join(dirpath, file)
|
||||
already_listed = False
|
||||
for extension in unwanted_file_extensions:
|
||||
if file.endswith(extension):
|
||||
output.append(os.path.join(rel_path, file) + ' ' + str(os.path.getsize(file_path)))
|
||||
already_listed = True
|
||||
break
|
||||
if not already_listed and os.path.getsize(file_path) > large_file_limit:
|
||||
output.append(os.path.join(rel_path, file) + ' ' + str(os.path.getsize(file_path)))
|
||||
return output
|
||||
|
||||
|
||||
def checkout(revision_start, revision_end=None):
|
||||
"""
|
||||
|
||||
"""
|
||||
if not revision_end:
|
||||
revision_end = revision_start
|
||||
|
||||
assert revision_end >= revision_start
|
||||
|
||||
for revision in range(revision_start, revision_end + 1):
|
||||
# check free disc space
|
||||
if psutil.disk_usage(svn_checkout_path).free < 3e10: # 1e10 = 10 GiB
|
||||
print('not enough free disc space, will exit')
|
||||
sys.exit(-1)
|
||||
|
||||
print('checking out revision {}'.format(revision))
|
||||
|
||||
# create destination directory
|
||||
destination = os.path.join(svn_checkout_path, 'r{:04d}'.format(revision))
|
||||
if os.path.exists(destination):
|
||||
shutil.rmtree(destination)
|
||||
|
||||
# checkout
|
||||
start_time = time.time()
|
||||
# sometimes checkout fails for reasons like "svn: E000024: Can't open file '/svn/p/lechemindeladam/code/db/revs/1865': Too many open files", we try again and again in these cases
|
||||
while True:
|
||||
try:
|
||||
subprocess_run(['svn', 'export', '-r{}'.format(revision), svn_url, destination])
|
||||
break
|
||||
except:
|
||||
print('problem with export, will try again')
|
||||
if os.path.isdir(destination):
|
||||
shutil.rmtree(destination)
|
||||
|
||||
print('checkout took {:.1f}s'.format(time.time() - start_time))
|
||||
|
||||
|
||||
def fix_revision(revision_start, revision_end=None):
|
||||
"""
|
||||
|
||||
"""
|
||||
if not revision_end:
|
||||
revision_end = revision_start
|
||||
assert revision_end >= revision_start
|
||||
|
||||
unwanted_files = {}
|
||||
sizes = {}
|
||||
|
||||
for revision in range(revision_start, revision_end + 1):
|
||||
print('fixing revision {}'.format(revision))
|
||||
|
||||
# destination directory
|
||||
destination = os.path.join(svn_checkout_path, 'r{:04d}'.format(revision))
|
||||
if not os.path.exists(destination):
|
||||
raise RuntimeError('cannot fix revision {}, directory does not exist'.format(revision))
|
||||
|
||||
# special treatment
|
||||
special_treatment(destination, revision)
|
||||
|
||||
# delete files from global exclude list
|
||||
delete_global_excludes(destination)
|
||||
|
||||
# list unwanted files
|
||||
unwanted_files[revision] = list_large_unwanted_files(destination)
|
||||
|
||||
# delete empty directories
|
||||
delete_empty_directories(destination)
|
||||
|
||||
# size of resulting folder
|
||||
sizes[revision] = folder_size(destination)
|
||||
|
||||
text = json.dumps(unwanted_files, indent=1)
|
||||
write_text(os.path.join(svn_checkout_path, 'unwanted_files.json'.format(revision)), text)
|
||||
text = json.dumps(sizes, indent=1)
|
||||
write_text(os.path.join(svn_checkout_path, 'folder_sizes.json'.format(revision)), text)
|
||||
|
||||
|
||||
def initialize_git():
|
||||
"""
|
||||
|
||||
"""
|
||||
# git init
|
||||
os.mkdir(git_path)
|
||||
os.chdir(git_path)
|
||||
subprocess_run(['git', 'init'])
|
||||
subprocess_run(['git', 'config', 'user.name', 'Trilarion'])
|
||||
subprocess_run(['git', 'config', 'user.email', 'Trilarion@users.noreply.gitlab.com'])
|
||||
|
||||
|
||||
def combine_log_messages(msg):
|
||||
"""
|
||||
|
||||
"""
|
||||
# throw out all empty ones
|
||||
msg = [x.strip() for x in msg if x]
|
||||
# combine again
|
||||
msg = "\r\n".join(msg)
|
||||
return msg
|
||||
|
||||
|
||||
def read_logs():
|
||||
"""
|
||||
Probably regular expressions would have worked too.
|
||||
"""
|
||||
# read log
|
||||
print('read all log messages')
|
||||
os.chdir(svn_checkout_path)
|
||||
start_time = time.time()
|
||||
log = subprocess_run(['svn', 'log', svn_url], display=False)
|
||||
print('read log took {:.1f}s'.format(time.time() - start_time))
|
||||
# process log
|
||||
log = log.split('\r\n------------------------------------------------------------------------\r\n')
|
||||
# not the last one
|
||||
log = log[:-2]
|
||||
print('{} log entries'.format(len(log)))
|
||||
|
||||
# process log entries
|
||||
log = [x.split('\r\n') for x in log]
|
||||
|
||||
# the first one still contains an additional "---" elements
|
||||
log[0] = log[0][1:]
|
||||
|
||||
# split the first line
|
||||
info = [x[0].split('|') for x in log]
|
||||
|
||||
# get the revision
|
||||
revision = [int(x[0][1:]) for x in info]
|
||||
|
||||
author = [x[1].strip() for x in info]
|
||||
unique_authors = list(set(author))
|
||||
unique_authors.sort()
|
||||
|
||||
date = [x[2].strip() for x in info]
|
||||
msg = [combine_log_messages(x[2:]) for x in log]
|
||||
logs = list(zip(revision, author, date, msg))
|
||||
logs.sort(key=lambda x: x[0])
|
||||
return logs, unique_authors
|
||||
|
||||
|
||||
def gitify(revision_start, revision_end):
|
||||
"""
|
||||
|
||||
"""
|
||||
assert revision_end >= revision_start
|
||||
|
||||
for revision in range(revision_start, revision_end + 1):
|
||||
print('adding revision {} to git'.format(revision))
|
||||
|
||||
# svn folder
|
||||
svn_folder = os.path.join(svn_checkout_path, 'r{:04d}'.format(revision))
|
||||
if not os.path.exists(svn_folder):
|
||||
raise RuntimeError('cannot add revision {}, directory does not exist'.format(revision))
|
||||
|
||||
# clear git path
|
||||
print('git clear path')
|
||||
while True:
|
||||
try:
|
||||
git_clear_path(git_path)
|
||||
break
|
||||
except PermissionError as e:
|
||||
print(e)
|
||||
# wait a bit
|
||||
time.sleep(1)
|
||||
|
||||
# copy source files to git path
|
||||
print('copy to git')
|
||||
copy_tree(svn_folder, git_path)
|
||||
|
||||
os.chdir(git_path)
|
||||
|
||||
# update the git index (add unstaged, remove deleted, ...)
|
||||
print('git add')
|
||||
subprocess_run(['git', 'add', '--all'])
|
||||
|
||||
# check if there is something to commit
|
||||
status = subprocess_run(['git', 'status', '--porcelain'])
|
||||
if not status:
|
||||
print(' nothing to commit for revision {}, will skip'.format(revision))
|
||||
continue
|
||||
|
||||
# perform the commit
|
||||
print('git commit')
|
||||
log = logs[revision] # revision, author, date, message
|
||||
message = log[3] + '\r\nsvn-revision: {}'.format(revision)
|
||||
print(' message "{}"'.format(message))
|
||||
author = authors[log[1]]
|
||||
author = '{} <{}>'.format(*author)
|
||||
cmd = ['git', 'commit', '--allow-empty-message', '--message={}'.format(message), '--author={}'.format(author),
|
||||
'--date={}'.format(log[2])]
|
||||
print(' cmd: {}'.format(' '.join(cmd)))
|
||||
subprocess_run(cmd)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
global_exclude = ['Thumbs.db']
|
||||
unwanted_file_extensions = ['.exe', '.dll']
|
||||
large_file_limit = 1e6 # in bytes
|
||||
|
||||
# base path is the directory containing this file
|
||||
base_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'conversion')
|
||||
print('base path={}'.format(base_path))
|
||||
|
||||
# derived paths
|
||||
svn_checkout_path = os.path.join(base_path, 'svn')
|
||||
if not os.path.exists(svn_checkout_path):
|
||||
os.mkdir(svn_checkout_path)
|
||||
empire_path = os.path.join(base_path, 'empire') # empire of steam side project
|
||||
if not os.path.exists(empire_path):
|
||||
os.mkdir(empire_path)
|
||||
git_path = os.path.join(base_path, 'lechemindeladam')
|
||||
if not os.path.exists(git_path):
|
||||
initialize_git()
|
||||
|
||||
# svn url
|
||||
svn_url = "https://svn.code.sf.net/p/lechemindeladam/code/"
|
||||
|
||||
# read logs
|
||||
# logs, authors = read_logs()
|
||||
# text = json.dumps(logs, indent=1)
|
||||
# write_text(os.path.join(base_path, 'logs.json'), text)
|
||||
# text = json.dumps(authors, indent=1)
|
||||
# write_text(os.path.join(base_path, 'authors.json'), text)
|
||||
text = read_text(os.path.join(base_path, 'logs.json'))
|
||||
logs = json.loads(text)
|
||||
logs = {x[0]: x for x in logs} # dictionary
|
||||
text = read_text(os.path.join(base_path, 'authors.json'))
|
||||
authors = json.loads(text) # should be a dictionary: svn-author: [git-author, git-email]
|
||||
|
||||
# the steps
|
||||
# checkout(1, 50)
|
||||
# fix_revision(1, 50)
|
||||
# gitify(4, 50)
|
||||
|
||||
# checkout(51, 100)
|
||||
# checkout(101, 200)
|
||||
|
||||
# fix_revision(51, 200)
|
||||
|
||||
# gitify(51, 200)
|
||||
|
||||
# checkout(201, 400)
|
||||
# fix_revision(201, 400)
|
||||
# gitify(201, 400)
|
||||
|
||||
# checkout(401, 800)
|
||||
# fix_revision(401, 800)
|
||||
# gitify(401, 800)
|
||||
|
||||
# checkout(801, 1200)
|
||||
# fix_revision(801, 1200)
|
||||
# gitify(801, 1200)
|
||||
|
||||
# checkout(1201, 1470)
|
||||
# fix_revision(1201, 1470)
|
||||
# gitify(1201, 1470)
|
||||
|
||||
# checkout(1471, 1700)
|
||||
# fix_revision(1471, 1700)
|
||||
# gitify(1471, 1700)
|
||||
|
||||
# checkout(1701, 1900)
|
||||
# fix_revision(1701, 1900)
|
||||
# gitify(1701, 1900)
|
||||
|
||||
# checkout(1901, 2140)
|
||||
# fix_revision(1901, 2140)
|
||||
# gitify(1901, 2140)
|
||||
|
||||
# checkout(2141, 2388)
|
||||
# fix_revision(2141, 2388)
|
||||
# gitify(2141, 2388)
|
||||
|
||||
# checkout(2389, 2420)
|
||||
# fix_revision(2389, 2420)
|
||||
# gitify(2389, 2420)
|
||||
|
||||
# run the following commands in the git bash
|
||||
# git config credential.useHttpPath true
|
||||
# git lfs install
|
||||
# git lfs migrate import --include-ref=master --include="Zombie_paysan.rs.hs,Witch_monster.rs.hs,WanderingStones.rs.hs,TwoWeapons.rs.hs,TwoHands.rs.hs,TwoHand.rs.hs,Reaper.rs.hs,Peasant_crossbow.rs.hs,Peasant_club.rs.hs,OneHand.rs.hs,Offspring_champion.rs.hs,Mimic.rs.hs,LordSkeleton.rs.hs,Goule.rs.hs,ErrantRoche.rs.hs,DemonicPriest0.rs.hs,DemonicPriest.rs.hs,Brute.rs.hs,20575__dobroide__20060706.night.forest02.wav,31464__offtheline__Morning_Sounds.wav,47989__Luftrum__forestsurroundings.wav,ambiance.wav,Catacombs0.wav,Pluie.wav,Taverne fusion.png,Abbey.ogg,AgrarianLands0.ogg,AgrarianLands1.ogg,Boss0.ogg,Catacombs0.ogg,Catacombs1.ogg,DarkForest.ogg,Forest_ambient0.ogg,Johannes.ogg,OWC.ogg"
|
||||
|
||||
# then add remote and push (done)
|
49
code/custom-conversions/phaos.json
Normal file
49
code/custom-conversions/phaos.json
Normal file
@ -0,0 +1,49 @@
|
||||
["https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9.82/phaos-0.9.82-UPDATED-6.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9.82/phaos-0.9.82-UPDATED-5.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9.82/phaos-0.9.82-UPDATED-4.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9.82/phaos-0.9.82-UPDATED-3.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9.82/phaos-0.9.82-UPDATED-2.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9.82/phaos-0.9.82-UPDATED.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9.82/phaos-0.9.82.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9.81/phaos-0.9.81.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9.8/phaos-0.9.8.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9.7/phaos-0.9.7.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9.6/phaos-0.9.6.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9/phaos-0.9.4.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9/phaos-0.9.3.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.9/phaos-0.9.2.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/Phaos%20Online%20RPG%20%28Source%20Code%29/Version%200.89/phaos-0.89.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.9.5.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.9.1.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.9.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.88.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.88-rc3.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.88-rc2.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.88-rc1.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.87-1.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.87.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.86.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.86-TEST-3.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.86-TEST-2.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.86-TEST-1.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.85.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.85-TEST-3.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.85-TEST-2.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.85-TEST-1.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.84.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.84-TEST2.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.84-TEST1.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.82.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.82-TEST3.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.82-TEST2.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.82-TEST1.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.8.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.77.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.76.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos-0.74.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/phaos0.72.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/Pv0.72.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/Pv0.7develnew.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/Pv0.7.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/Pv0.61.zip/download",
|
||||
"https://sourceforge.net/projects/phaosrpg/files/OldFiles/Pv0.6.zip/download"]
|
125
code/custom-conversions/phaos_source_release_to_git.py
Normal file
125
code/custom-conversions/phaos_source_release_to_git.py
Normal file
@ -0,0 +1,125 @@
|
||||
"""
|
||||
Downloads source releases from Sourceforge and puts them into a git repository
|
||||
"""
|
||||
|
||||
import json
|
||||
import datetime
|
||||
from utils.utils import *
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
# https://sourceforge.net/projects/phaosrpg/files/OldFiles/Pv0.7devel.zip/download is a corrupt zip
|
||||
|
||||
# base path is the directory containing this file
|
||||
base_path = os.path.abspath(os.path.dirname(__file__))
|
||||
print('base path={}'.format(base_path))
|
||||
|
||||
# recreate archive path
|
||||
archive_path = os.path.join(base_path, 'downloads')
|
||||
if not os.path.exists(archive_path):
|
||||
os.mkdir(archive_path)
|
||||
|
||||
# load source releases urls
|
||||
with open(os.path.join(base_path, 'phaos.json'), 'r') as f:
|
||||
urls = json.load(f)
|
||||
print('will process {} urls'.format(len(urls)))
|
||||
if len(urls) != len(set(urls)):
|
||||
raise RuntimeError("urls list contains duplicates")
|
||||
|
||||
# determine file archives from urls
|
||||
archives = [x.split('/')[-2] for x in urls]
|
||||
if len(archives) != len(set(archives)):
|
||||
raise RuntimeError("files with duplicate archives, cannot deal with that")
|
||||
|
||||
# determine version from file name
|
||||
versions = [determine_archive_version_generic(x, leading_terms=['phaos-', 'phaos', 'pv'], trailing_terms=['zip']) for x in archives]
|
||||
# for version in versions:
|
||||
# print(version)
|
||||
|
||||
# extend archives to full paths
|
||||
archives = [os.path.join(archive_path, x) for x in archives]
|
||||
|
||||
# download them
|
||||
print('download source releases')
|
||||
for url, destination in zip(urls, archives):
|
||||
# only if not yet existing
|
||||
if os.path.exists(destination):
|
||||
continue
|
||||
# download
|
||||
print(' download {}'.format(os.path.basename(destination)))
|
||||
with urllib.request.urlopen(url) as response:
|
||||
with open(destination, 'wb') as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
time.sleep(1) # we are nice
|
||||
|
||||
# unzip them
|
||||
print('unzip downloaded archives')
|
||||
unzipped_archives = [x[:-4] for x in archives] # folder is archive name without .zip
|
||||
for archive, unzipped_archive in zip(archives, unzipped_archives):
|
||||
print(' unzip {}'.format(os.path.basename(archive)))
|
||||
# only if not yet existing
|
||||
if os.path.exists(unzipped_archive):
|
||||
continue
|
||||
os.mkdir(unzipped_archive)
|
||||
# unzip
|
||||
unzip_keep_last_modified(archive, unzipped_archive)
|
||||
|
||||
# go up in unzipped archives until the very first non-empty folder
|
||||
unzipped_archives = [strip_wrapped_folders(x) for x in unzipped_archives]
|
||||
|
||||
# determine date
|
||||
dates = [determine_latest_last_modified_date(x) for x in unzipped_archives]
|
||||
dates_strings = [datetime.datetime.fromtimestamp(x).strftime('%Y-%m-%d') for x in dates]
|
||||
# if len(dates_strings) != len(set(dates_strings)):
|
||||
# raise RuntimeError("Some on the same day, cannot cope with that")
|
||||
|
||||
# gather all important stuff in one list and sort by dates
|
||||
db = list(zip(urls, unzipped_archives, versions, dates, dates_strings))
|
||||
db.sort(key=lambda x:x[3])
|
||||
print('proposed order')
|
||||
for url, _, version, _, date in db:
|
||||
print(' date={} version={}'.format(date, version))
|
||||
|
||||
# git init
|
||||
git_path = os.path.join(base_path, 'phaosrpg')
|
||||
if os.path.exists(git_path):
|
||||
shutil.rmtree(git_path)
|
||||
os.mkdir(git_path)
|
||||
os.chdir(git_path)
|
||||
subprocess_run(['git', 'init'])
|
||||
subprocess_run(['git', 'config', 'user.name', 'Trilarion'])
|
||||
subprocess_run(['git', 'config', 'user.email', 'Trilarion@users.noreply.gitlab.com'])
|
||||
|
||||
# now process revision by revision
|
||||
print('process revisions')
|
||||
git_author = 'eproductions3 <eproductions3@user.sourceforge.net>'
|
||||
for url, archive_path, version, _, date in db:
|
||||
print(' process version={}'.format(version))
|
||||
|
||||
# clear git path without deleting .git
|
||||
print(' clear git')
|
||||
for item in os.listdir(git_path):
|
||||
# ignore '.git
|
||||
if item == '.git':
|
||||
continue
|
||||
item = os.path.join(git_path, item)
|
||||
if os.path.isdir(item):
|
||||
shutil.rmtree(item)
|
||||
else:
|
||||
os.remove(item)
|
||||
|
||||
# copy unpacked source files to git path
|
||||
print('copy to git')
|
||||
copy_tree(archive_path, git_path)
|
||||
|
||||
# update the git index (add unstaged, remove deleted, ...)
|
||||
print('git add')
|
||||
os.chdir(git_path)
|
||||
subprocess_run(['git', 'add', '--all'])
|
||||
|
||||
# perform the commit
|
||||
print('git commit')
|
||||
os.chdir(git_path)
|
||||
message = 'version {} ({}) on {}'.format(version, url, date)
|
||||
print(' message "{}"'.format(message))
|
||||
subprocess_run(['git', 'commit', '--message={}'.format(message), '--author={}'.format(git_author), '--date={}'.format(date)])
|
Reference in New Issue
Block a user