added new entries, update can also list repositories by size
This commit is contained in:
@ -86,6 +86,7 @@
|
||||
"https://github.com/MattMatt0240/DemiGod.git",
|
||||
"https://github.com/MegaGlest/megaglest-source.git",
|
||||
"https://github.com/MegaMek/megamek.git",
|
||||
"https://github.com/Mekire/cabbages-and-kings.git",
|
||||
"https://github.com/Meridian59/Meridian59.git",
|
||||
"https://github.com/MonoGame/MonoGame.git",
|
||||
"https://github.com/MovingBlocks/Terasology.git",
|
||||
@ -341,6 +342,7 @@
|
||||
"https://gitlab.com/Trilarion/pymapper.git",
|
||||
"https://gitlab.com/Trilarion/rogueclone.git",
|
||||
"https://gitlab.com/Trilarion/sentient-storage.git",
|
||||
"https://gitlab.com/Trilarion/sharpkonquest.git",
|
||||
"https://gitlab.com/Trilarion/skrupel.git",
|
||||
"https://gitlab.com/Trilarion/slaygame.git",
|
||||
"https://gitlab.com/Trilarion/spicetrade.git",
|
||||
|
@ -3,10 +3,11 @@ The svn is too big to be automatically imported to git (and Github) because ther
|
||||
Needs a manual solution.
|
||||
|
||||
TODO use git lfs migrate later on the elements
|
||||
TODO check for sufficient disc space before checkout
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
import psutil
|
||||
from utils.utils import *
|
||||
|
||||
|
||||
@ -27,7 +28,8 @@ def special_treatment(destination, revision):
|
||||
# remove it
|
||||
shutil.rmtree(os.path.join(destination, 'Holyspirit'))
|
||||
|
||||
if 337 <= revision <= 400:
|
||||
# copy all important files from Holyspirit and delete it
|
||||
if 337 <= revision <= 1200:
|
||||
source = os.path.join(destination, 'Holyspirit')
|
||||
if os.path.isdir(source):
|
||||
data = os.path.join(source, 'Data')
|
||||
@ -39,6 +41,24 @@ def special_treatment(destination, revision):
|
||||
# remove it
|
||||
shutil.rmtree(source)
|
||||
|
||||
# remove Holyspirit3 folder
|
||||
if 464 <= revision <= 1200:
|
||||
source = os.path.join(destination, 'Holyspirit3')
|
||||
if os.path.isdir(source):
|
||||
shutil.rmtree(source)
|
||||
|
||||
# remove Holyspirit2 folder
|
||||
if 659 <= revision <= 1200:
|
||||
source = os.path.join(destination, 'Holyspirit2')
|
||||
if os.path.isdir(source):
|
||||
shutil.rmtree(source)
|
||||
|
||||
# remove Launcher/release
|
||||
if 413 <= revision <= 1200:
|
||||
source = os.path.join(destination, 'Launcher', 'release')
|
||||
if os.path.isdir(source):
|
||||
shutil.rmtree(source)
|
||||
|
||||
# delete all *.dll, *.exe in base folder
|
||||
if 3 <= revision <= 9:
|
||||
files = os.listdir(destination)
|
||||
@ -46,16 +66,50 @@ def special_treatment(destination, revision):
|
||||
if file.endswith('.exe') or file.endswith('.dll'):
|
||||
os.remove(os.path.join(destination, file))
|
||||
|
||||
# delete "cross" folder
|
||||
if 42 <= revision <= 43:
|
||||
folder = os.path.join(destination, 'Cross')
|
||||
if os.path.isdir(folder):
|
||||
shutil.rmtree(folder)
|
||||
|
||||
if 374 <= revision:
|
||||
# delete personal photos
|
||||
if 374 <= revision <= 1200:
|
||||
folder = os.path.join(destination, 'Photos')
|
||||
if os.path.isdir(folder):
|
||||
shutil.rmtree(folder)
|
||||
|
||||
# move empire of steam out
|
||||
if 1173 <= revision <= 1200:
|
||||
folder = os.path.join(destination, 'EmpireOfSteam')
|
||||
if os.path.isdir(folder):
|
||||
# move to empire path
|
||||
empire = os.path.join(empire_path, 'r{:04d}'.format(revision))
|
||||
shutil.move(folder, empire)
|
||||
|
||||
# holy editor cleanup
|
||||
if 1078 <= revision <= 1200:
|
||||
source = os.path.join(destination, 'HolyEditor')
|
||||
for name in ('bin', 'release'):
|
||||
folder = os.path.join(source, name)
|
||||
if os.path.isdir(folder):
|
||||
shutil.rmtree(folder)
|
||||
for name in ('moc.exe',):
|
||||
file = os.path.join(source, name)
|
||||
if os.path.isfile(file):
|
||||
os.remove(file)
|
||||
|
||||
# source folder cleanup
|
||||
if 939 <= revision <= 1200:
|
||||
source = os.path.join(destination, 'Source')
|
||||
for name in ('HS',):
|
||||
folder = os.path.join(source, name)
|
||||
if os.path.isdir(folder):
|
||||
shutil.rmtree(folder)
|
||||
for name in ('HS.zip',):
|
||||
file = os.path.join(source, name)
|
||||
if os.path.isfile(file):
|
||||
os.remove(file)
|
||||
|
||||
|
||||
def delete_global_excludes(folder):
|
||||
"""
|
||||
@ -105,6 +159,11 @@ def checkout(revision_start, revision_end):
|
||||
assert revision_end >= revision_start
|
||||
|
||||
for revision in range(revision_start, revision_end + 1):
|
||||
# check free disc space
|
||||
if psutil.disk_usage(svn_checkout_path).free < 3e10: # 1e10 = 10 GiB
|
||||
print('not enough free disc space, will exit')
|
||||
sys.exit(-1)
|
||||
|
||||
print('checking out revision {}'.format(revision))
|
||||
|
||||
# create destination directory
|
||||
@ -117,11 +176,6 @@ def checkout(revision_start, revision_end):
|
||||
subprocess_run(['svn', 'export', '-r{}'.format(revision), svn_url, destination])
|
||||
print('checkout took {:.1f}s'.format(time.time() - start_time))
|
||||
|
||||
# copy to backup
|
||||
# backup_path = os.path.join(svn_backup_path, 'r{:04d}'.format(revision))
|
||||
# if not os.path.exists(backup_path):
|
||||
# shutil.copytree(destination, backup_path)
|
||||
|
||||
|
||||
def fix_revision(revision_start, revision_end):
|
||||
"""
|
||||
@ -291,9 +345,9 @@ if __name__ == "__main__":
|
||||
svn_checkout_path = os.path.join(base_path, 'svn')
|
||||
if not os.path.exists(svn_checkout_path):
|
||||
os.mkdir(svn_checkout_path)
|
||||
svn_backup_path = os.path.join(base_path, 'svn_backup')
|
||||
if not os.path.exists(svn_backup_path):
|
||||
os.mkdir(svn_backup_path)
|
||||
empire_path = os.path.join(base_path, 'empire') # empire of steam side project
|
||||
if not os.path.exists(empire_path):
|
||||
os.mkdir(empire_path)
|
||||
git_path = os.path.join(base_path, 'lechemindeladam')
|
||||
if not os.path.exists(git_path):
|
||||
initialize_git()
|
||||
@ -330,5 +384,11 @@ if __name__ == "__main__":
|
||||
# gitify(201, 400)
|
||||
|
||||
# checkout(401, 800)
|
||||
# fix_revision(401, 800)
|
||||
# gitify(401, 800)
|
||||
|
||||
checkout(493, 800)
|
||||
# checkout(801, 1200)
|
||||
# fix_revision(801, 1200)
|
||||
# gitify(801, 1200)
|
||||
|
||||
checkout(1201, 1500)
|
||||
|
@ -12,6 +12,7 @@ Note: May need to set http.postBuffer (https://stackoverflow.com/questions/17683
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
from utils.utils import *
|
||||
|
||||
|
||||
@ -27,6 +28,7 @@ def derive_folder_name(url, replaces):
|
||||
return sanitize(url)
|
||||
raise Exception('malformed url')
|
||||
|
||||
|
||||
def git_folder_name(url):
|
||||
replaces = {
|
||||
'https://github.com': 'github',
|
||||
@ -100,7 +102,7 @@ def bzr_update(folder):
|
||||
subprocess_run(['bzr', 'pull'])
|
||||
|
||||
|
||||
def run(type, urls):
|
||||
def run_update(type, urls):
|
||||
print('update {} {} archives'.format(len(urls), type))
|
||||
base_folder = os.path.join(archive_folder, type)
|
||||
if not os.path.exists(base_folder):
|
||||
@ -140,9 +142,25 @@ def run(type, urls):
|
||||
update[type](folder)
|
||||
|
||||
|
||||
def run_info(type, urls):
|
||||
print('collect info on {}'.format(type))
|
||||
|
||||
# get derived folder names
|
||||
folders = [os.path.join(type, folder_name[type](url)) for url in urls]
|
||||
|
||||
# collect information
|
||||
info = []
|
||||
for folder in folders:
|
||||
print(folder)
|
||||
path = os.path.join(archive_folder, folder)
|
||||
size = folder_size(path) if os.path.isdir(path) else -1
|
||||
info.append([size, folder])
|
||||
return info
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
supported_types = ['git', 'hg', 'svn'] # currently no bzr client installed
|
||||
supported_types = ['git', 'hg', 'svn'] # currently no bzr client installed
|
||||
|
||||
folder_name = {
|
||||
'git': git_folder_name,
|
||||
@ -173,13 +191,18 @@ if __name__ == '__main__':
|
||||
text = read_text(os.path.join(root_folder, 'archives.json'))
|
||||
archives = json.loads(text)
|
||||
|
||||
# update
|
||||
for type in archives:
|
||||
if type not in supported_types:
|
||||
continue
|
||||
urls = archives[type]
|
||||
run(type, urls)
|
||||
|
||||
|
||||
|
||||
|
||||
run_update(type, urls)
|
||||
|
||||
# collect info
|
||||
infos = []
|
||||
for type in archives:
|
||||
urls = archives[type]
|
||||
infos.extend(run_info(type, urls))
|
||||
infos.sort(key=lambda x: x[0], reverse=True)
|
||||
text = json.dumps(infos, indent=1)
|
||||
write_text(os.path.join(archive_folder, 'infos.json'), text)
|
||||
|
Reference in New Issue
Block a user