2018-09-10 00:19:41 +02:00
|
|
|
#!/usr/bin/env nix-shell
|
2018-09-20 09:42:00 +02:00
|
|
|
#!nix-shell -p nix-prefetch-git -p python3 nix -i python3
|
2018-09-10 00:19:41 +02:00
|
|
|
|
|
|
|
# format:
|
|
|
|
# $ nix run nixpkgs.python3Packages.black -c black update.py
|
|
|
|
# type-check:
|
|
|
|
# $ nix run nixpkgs.python3Packages.mypy -c mypy update.py
|
|
|
|
# linted:
|
|
|
|
# $ nix run nixpkgs.python3Packages.flake8 -c flake8 --ignore E501,E265 update.py
|
|
|
|
|
2019-11-18 17:54:29 +01:00
|
|
|
import argparse
|
2018-09-10 00:19:41 +02:00
|
|
|
import functools
|
2020-03-21 05:27:09 +01:00
|
|
|
import http
|
2018-09-10 00:19:41 +02:00
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
import sys
|
|
|
|
import traceback
|
|
|
|
import urllib.error
|
2020-03-21 05:27:09 +01:00
|
|
|
import urllib.parse
|
2018-09-10 00:19:41 +02:00
|
|
|
import urllib.request
|
|
|
|
import xml.etree.ElementTree as ET
|
|
|
|
from datetime import datetime
|
|
|
|
from multiprocessing.dummy import Pool
|
|
|
|
from pathlib import Path
|
2019-11-16 16:15:42 +01:00
|
|
|
from typing import Dict, List, Optional, Tuple, Union, Any, Callable
|
2018-09-10 00:19:41 +02:00
|
|
|
from urllib.parse import urljoin, urlparse
|
2018-09-12 08:51:48 +02:00
|
|
|
from tempfile import NamedTemporaryFile
|
2018-09-10 00:19:41 +02:00
|
|
|
|
2019-11-16 16:15:08 +01:00
|
|
|
ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry" # " vim gets confused here
|
|
|
|
ATOM_LINK = "{http://www.w3.org/2005/Atom}link" # "
|
|
|
|
ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated" # "
|
2018-09-10 00:19:41 +02:00
|
|
|
|
|
|
|
ROOT = Path(__file__).parent
|
2019-11-18 17:54:29 +01:00
|
|
|
DEFAULT_IN = ROOT.joinpath("vim-plugin-names")
|
|
|
|
DEFAULT_OUT = ROOT.joinpath("generated.nix")
|
2020-03-26 07:11:20 +01:00
|
|
|
DEPRECATED = ROOT.joinpath("deprecated.json")
|
2018-09-10 00:19:41 +02:00
|
|
|
|
2019-11-16 16:15:42 +01:00
|
|
|
import time
|
|
|
|
from functools import wraps
|
|
|
|
|
|
|
|
|
|
|
|
def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
|
|
|
|
"""Retry calling the decorated function using an exponential backoff.
|
|
|
|
|
|
|
|
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
|
|
|
|
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
|
|
|
|
(BSD licensed)
|
|
|
|
|
|
|
|
:param ExceptionToCheck: the exception on which to retry
|
|
|
|
:param tries: number of times to try (not retry) before giving up
|
|
|
|
:param delay: initial delay between retries in seconds
|
|
|
|
:param backoff: backoff multiplier e.g. value of 2 will double the delay
|
|
|
|
each retry
|
|
|
|
"""
|
|
|
|
|
|
|
|
def deco_retry(f: Callable) -> Callable:
|
|
|
|
@wraps(f)
|
|
|
|
def f_retry(*args: Any, **kwargs: Any) -> Any:
|
|
|
|
mtries, mdelay = tries, delay
|
|
|
|
while mtries > 1:
|
|
|
|
try:
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
except ExceptionToCheck as e:
|
|
|
|
print(f"{str(e)}, Retrying in {mdelay} seconds...")
|
|
|
|
time.sleep(mdelay)
|
|
|
|
mtries -= 1
|
|
|
|
mdelay *= backoff
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
|
|
|
|
return f_retry # true decorator
|
|
|
|
|
|
|
|
return deco_retry
|
|
|
|
|
2018-09-10 00:19:41 +02:00
|
|
|
|
|
|
|
class Repo:
|
2020-03-21 05:27:09 +01:00
|
|
|
def __init__(self, owner: str, name: str, alias: str) -> None:
|
2018-09-10 00:19:41 +02:00
|
|
|
self.owner = owner
|
|
|
|
self.name = name
|
2020-03-21 05:27:09 +01:00
|
|
|
self.alias = alias
|
|
|
|
self.redirect: Dict[str, str] = {}
|
2018-09-10 00:19:41 +02:00
|
|
|
|
|
|
|
def url(self, path: str) -> str:
|
|
|
|
return urljoin(f"https://github.com/{self.owner}/{self.name}/", path)
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
return f"Repo({self.owner}, {self.name})"
|
|
|
|
|
2019-11-16 16:15:42 +01:00
|
|
|
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
2018-09-10 00:19:41 +02:00
|
|
|
def has_submodules(self) -> bool:
|
|
|
|
try:
|
2019-11-16 16:15:42 +01:00
|
|
|
urllib.request.urlopen(
|
|
|
|
self.url("blob/master/.gitmodules"), timeout=10
|
|
|
|
).close()
|
2018-09-10 00:19:41 +02:00
|
|
|
except urllib.error.HTTPError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
return True
|
|
|
|
|
2019-11-16 16:15:42 +01:00
|
|
|
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
2018-09-10 00:19:41 +02:00
|
|
|
def latest_commit(self) -> Tuple[str, datetime]:
|
2020-03-21 05:27:09 +01:00
|
|
|
commit_url = self.url("commits/master.atom")
|
|
|
|
with urllib.request.urlopen(commit_url, timeout=10) as req:
|
|
|
|
self.check_for_redirect(commit_url, req)
|
2018-09-10 00:19:41 +02:00
|
|
|
xml = req.read()
|
|
|
|
root = ET.fromstring(xml)
|
|
|
|
latest_entry = root.find(ATOM_ENTRY)
|
|
|
|
assert latest_entry is not None, f"No commits found in repository {self}"
|
|
|
|
commit_link = latest_entry.find(ATOM_LINK)
|
|
|
|
assert commit_link is not None, f"No link tag found feed entry {xml}"
|
|
|
|
url = urlparse(commit_link.get("href"))
|
|
|
|
updated_tag = latest_entry.find(ATOM_UPDATED)
|
|
|
|
assert (
|
|
|
|
updated_tag is not None and updated_tag.text is not None
|
|
|
|
), f"No updated tag found feed entry {xml}"
|
|
|
|
updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
|
2019-11-16 16:15:08 +01:00
|
|
|
return Path(str(url.path)).name, updated
|
2018-09-10 00:19:41 +02:00
|
|
|
|
2020-03-21 05:27:09 +01:00
|
|
|
def check_for_redirect(self, url: str, req: http.client.HTTPResponse):
|
|
|
|
response_url = req.geturl()
|
|
|
|
if url != response_url:
|
|
|
|
new_owner, new_name = (
|
|
|
|
urllib.parse.urlsplit(response_url).path.strip("/").split("/")[:2]
|
|
|
|
)
|
2020-03-24 20:00:31 +01:00
|
|
|
end_line = "\n" if self.alias is None else f" as {self.alias}\n"
|
2020-03-21 05:27:09 +01:00
|
|
|
plugin_line = "{owner}/{name}" + end_line
|
|
|
|
|
|
|
|
old_plugin = plugin_line.format(owner=self.owner, name=self.name)
|
|
|
|
new_plugin = plugin_line.format(owner=new_owner, name=new_name)
|
|
|
|
self.redirect[old_plugin] = new_plugin
|
|
|
|
|
2018-09-10 00:19:41 +02:00
|
|
|
def prefetch_git(self, ref: str) -> str:
|
|
|
|
data = subprocess.check_output(
|
|
|
|
["nix-prefetch-git", "--fetch-submodules", self.url(""), ref]
|
|
|
|
)
|
|
|
|
return json.loads(data)["sha256"]
|
|
|
|
|
|
|
|
def prefetch_github(self, ref: str) -> str:
|
|
|
|
data = subprocess.check_output(
|
|
|
|
["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
|
|
|
|
)
|
|
|
|
return data.strip().decode("utf-8")
|
|
|
|
|
|
|
|
|
|
|
|
class Plugin:
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
name: str,
|
|
|
|
commit: str,
|
|
|
|
has_submodules: bool,
|
|
|
|
sha256: str,
|
|
|
|
date: Optional[datetime] = None,
|
|
|
|
) -> None:
|
|
|
|
self.name = name
|
|
|
|
self.commit = commit
|
|
|
|
self.has_submodules = has_submodules
|
|
|
|
self.sha256 = sha256
|
|
|
|
self.date = date
|
|
|
|
|
|
|
|
@property
|
|
|
|
def normalized_name(self) -> str:
|
|
|
|
return self.name.replace(".", "-")
|
|
|
|
|
|
|
|
@property
|
|
|
|
def version(self) -> str:
|
|
|
|
assert self.date is not None
|
|
|
|
return self.date.strftime("%Y-%m-%d")
|
|
|
|
|
|
|
|
def as_json(self) -> Dict[str, str]:
|
|
|
|
copy = self.__dict__.copy()
|
|
|
|
del copy["date"]
|
|
|
|
return copy
|
|
|
|
|
|
|
|
|
2019-02-22 15:49:43 +01:00
|
|
|
GET_PLUGINS = f"""(with import <localpkgs> {{}};
|
2018-09-10 00:19:41 +02:00
|
|
|
let
|
2019-02-22 15:49:43 +01:00
|
|
|
inherit (vimUtils.override {{inherit vim;}}) buildVimPluginFrom2Nix;
|
|
|
|
generated = callPackage {ROOT}/generated.nix {{
|
|
|
|
inherit buildVimPluginFrom2Nix;
|
|
|
|
}};
|
2018-09-10 00:19:41 +02:00
|
|
|
hasChecksum = value: lib.isAttrs value && lib.hasAttrByPath ["src" "outputHash"] value;
|
|
|
|
getChecksum = name: value:
|
2019-02-22 15:49:43 +01:00
|
|
|
if hasChecksum value then {{
|
2018-09-10 00:19:41 +02:00
|
|
|
submodules = value.src.fetchSubmodules or false;
|
|
|
|
sha256 = value.src.outputHash;
|
|
|
|
rev = value.src.rev;
|
2019-02-22 15:49:43 +01:00
|
|
|
}} else null;
|
|
|
|
checksums = lib.mapAttrs getChecksum generated;
|
2018-09-10 00:19:41 +02:00
|
|
|
in lib.filterAttrs (n: v: v != null) checksums)"""
|
|
|
|
|
|
|
|
|
2018-09-12 08:51:48 +02:00
|
|
|
class CleanEnvironment(object):
|
|
|
|
def __enter__(self) -> None:
|
|
|
|
self.old_environ = os.environ.copy()
|
|
|
|
local_pkgs = str(ROOT.joinpath("../../.."))
|
|
|
|
os.environ["NIX_PATH"] = f"localpkgs={local_pkgs}"
|
|
|
|
self.empty_config = NamedTemporaryFile()
|
|
|
|
self.empty_config.write(b"{}")
|
|
|
|
self.empty_config.flush()
|
|
|
|
os.environ["NIXPKGS_CONFIG"] = self.empty_config.name
|
|
|
|
|
|
|
|
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
|
|
|
|
os.environ.update(self.old_environ)
|
|
|
|
self.empty_config.close()
|
|
|
|
|
|
|
|
|
2018-09-10 00:19:41 +02:00
|
|
|
def get_current_plugins() -> List[Plugin]:
|
2018-09-12 08:51:48 +02:00
|
|
|
with CleanEnvironment():
|
|
|
|
out = subprocess.check_output(["nix", "eval", "--json", GET_PLUGINS])
|
2018-09-10 00:19:41 +02:00
|
|
|
data = json.loads(out)
|
|
|
|
plugins = []
|
|
|
|
for name, attr in data.items():
|
|
|
|
p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
|
|
|
|
plugins.append(p)
|
|
|
|
return plugins
|
|
|
|
|
|
|
|
|
2020-03-21 05:27:09 +01:00
|
|
|
def prefetch_plugin(
|
|
|
|
user: str, repo_name: str, alias: str, cache: "Cache"
|
|
|
|
) -> Tuple[Plugin, Dict[str, str]]:
|
|
|
|
repo = Repo(user, repo_name, alias)
|
2018-09-10 00:19:41 +02:00
|
|
|
commit, date = repo.latest_commit()
|
|
|
|
has_submodules = repo.has_submodules()
|
|
|
|
cached_plugin = cache[commit]
|
|
|
|
if cached_plugin is not None:
|
2019-11-06 09:35:21 +01:00
|
|
|
cached_plugin.name = alias or repo_name
|
2018-09-10 00:19:41 +02:00
|
|
|
cached_plugin.date = date
|
2020-03-21 05:27:09 +01:00
|
|
|
return cached_plugin, repo.redirect
|
2018-09-10 00:19:41 +02:00
|
|
|
|
|
|
|
print(f"prefetch {user}/{repo_name}")
|
|
|
|
if has_submodules:
|
|
|
|
sha256 = repo.prefetch_git(commit)
|
|
|
|
else:
|
|
|
|
sha256 = repo.prefetch_github(commit)
|
|
|
|
|
2020-03-21 05:27:09 +01:00
|
|
|
return (
|
|
|
|
Plugin(alias or repo_name, commit, has_submodules, sha256, date=date),
|
|
|
|
repo.redirect,
|
|
|
|
)
|
2018-09-10 00:19:41 +02:00
|
|
|
|
|
|
|
|
|
|
|
def print_download_error(plugin: str, ex: Exception):
|
|
|
|
print(f"{plugin}: {ex}", file=sys.stderr)
|
|
|
|
ex_traceback = ex.__traceback__
|
|
|
|
tb_lines = [
|
|
|
|
line.rstrip("\n")
|
|
|
|
for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
|
|
|
|
]
|
|
|
|
print("\n".join(tb_lines))
|
|
|
|
|
|
|
|
|
|
|
|
def check_results(
|
2020-03-21 05:27:09 +01:00
|
|
|
results: List[Tuple[str, str, Union[Exception, Plugin], Dict[str, str]]]
|
|
|
|
) -> Tuple[List[Tuple[str, str, Plugin]], Dict[str, str]]:
|
2018-09-10 00:19:41 +02:00
|
|
|
failures: List[Tuple[str, Exception]] = []
|
|
|
|
plugins = []
|
2020-03-21 05:27:09 +01:00
|
|
|
redirects: Dict[str, str] = {}
|
|
|
|
for (owner, name, result, redirect) in results:
|
2018-09-10 00:19:41 +02:00
|
|
|
if isinstance(result, Exception):
|
|
|
|
failures.append((name, result))
|
|
|
|
else:
|
2018-09-10 20:50:27 +02:00
|
|
|
plugins.append((owner, name, result))
|
2020-03-21 05:27:09 +01:00
|
|
|
redirects.update(redirect)
|
2018-09-10 00:19:41 +02:00
|
|
|
|
|
|
|
print(f"{len(results) - len(failures)} plugins were checked", end="")
|
|
|
|
if len(failures) == 0:
|
|
|
|
print()
|
2020-03-21 05:27:09 +01:00
|
|
|
return plugins, redirects
|
2018-09-10 00:19:41 +02:00
|
|
|
else:
|
|
|
|
print(f", {len(failures)} plugin(s) could not be downloaded:\n")
|
|
|
|
|
|
|
|
for (plugin, exception) in failures:
|
|
|
|
print_download_error(plugin, exception)
|
2018-09-16 17:58:48 +02:00
|
|
|
|
|
|
|
sys.exit(1)
|
2018-09-10 00:19:41 +02:00
|
|
|
|
|
|
|
|
2019-11-16 16:15:08 +01:00
|
|
|
def parse_plugin_line(line: str) -> Tuple[str, str, Optional[str]]:
|
|
|
|
name, repo = line.split("/")
|
2019-11-06 09:35:21 +01:00
|
|
|
try:
|
2019-11-16 16:15:08 +01:00
|
|
|
repo, alias = repo.split(" as ")
|
|
|
|
return (name, repo, alias.strip())
|
2019-11-06 09:35:21 +01:00
|
|
|
except ValueError:
|
2019-11-16 16:15:08 +01:00
|
|
|
# no alias defined
|
|
|
|
return (name, repo.strip(), None)
|
2019-11-06 09:35:21 +01:00
|
|
|
|
|
|
|
|
2019-11-16 16:15:08 +01:00
|
|
|
def load_plugin_spec(plugin_file: str) -> List[Tuple[str, str, Optional[str]]]:
|
2018-09-10 00:19:41 +02:00
|
|
|
plugins = []
|
|
|
|
with open(plugin_file) as f:
|
|
|
|
for line in f:
|
2019-11-06 09:35:21 +01:00
|
|
|
plugin = parse_plugin_line(line)
|
|
|
|
if not plugin[0]:
|
|
|
|
msg = f"Invalid repository {line}, must be in the format owner/repo[ as alias]"
|
2018-09-10 00:19:41 +02:00
|
|
|
print(msg, file=sys.stderr)
|
|
|
|
sys.exit(1)
|
2019-11-06 09:35:21 +01:00
|
|
|
plugins.append(plugin)
|
2018-09-10 00:19:41 +02:00
|
|
|
return plugins
|
|
|
|
|
|
|
|
|
|
|
|
def get_cache_path() -> Optional[Path]:
|
|
|
|
xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
|
|
|
|
if xdg_cache is None:
|
|
|
|
home = os.environ.get("HOME", None)
|
|
|
|
if home is None:
|
|
|
|
return None
|
|
|
|
xdg_cache = str(Path(home, ".cache"))
|
|
|
|
|
|
|
|
return Path(xdg_cache, "vim-plugin-cache.json")
|
|
|
|
|
|
|
|
|
|
|
|
class Cache:
|
|
|
|
def __init__(self, initial_plugins: List[Plugin]) -> None:
|
|
|
|
self.cache_file = get_cache_path()
|
|
|
|
|
|
|
|
downloads = {}
|
|
|
|
for plugin in initial_plugins:
|
|
|
|
downloads[plugin.commit] = plugin
|
|
|
|
downloads.update(self.load())
|
|
|
|
self.downloads = downloads
|
|
|
|
|
|
|
|
def load(self) -> Dict[str, Plugin]:
|
|
|
|
if self.cache_file is None or not self.cache_file.exists():
|
|
|
|
return {}
|
|
|
|
|
|
|
|
downloads: Dict[str, Plugin] = {}
|
|
|
|
with open(self.cache_file) as f:
|
|
|
|
data = json.load(f)
|
|
|
|
for attr in data.values():
|
|
|
|
p = Plugin(
|
|
|
|
attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
|
|
|
|
)
|
|
|
|
downloads[attr["commit"]] = p
|
|
|
|
return downloads
|
|
|
|
|
|
|
|
def store(self) -> None:
|
|
|
|
if self.cache_file is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
os.makedirs(self.cache_file.parent, exist_ok=True)
|
|
|
|
with open(self.cache_file, "w+") as f:
|
|
|
|
data = {}
|
|
|
|
for name, attr in self.downloads.items():
|
|
|
|
data[name] = attr.as_json()
|
|
|
|
json.dump(data, f, indent=4, sort_keys=True)
|
|
|
|
|
|
|
|
def __getitem__(self, key: str) -> Optional[Plugin]:
|
|
|
|
return self.downloads.get(key, None)
|
|
|
|
|
|
|
|
def __setitem__(self, key: str, value: Plugin) -> None:
|
|
|
|
self.downloads[key] = value
|
|
|
|
|
|
|
|
|
|
|
|
def prefetch(
|
2019-11-06 09:35:21 +01:00
|
|
|
args: Tuple[str, str, str], cache: Cache
|
2020-03-21 05:27:09 +01:00
|
|
|
) -> Tuple[str, str, Union[Exception, Plugin], dict]:
|
2019-11-06 09:35:21 +01:00
|
|
|
assert len(args) == 3
|
|
|
|
owner, repo, alias = args
|
2018-09-10 00:19:41 +02:00
|
|
|
try:
|
2020-03-21 05:27:09 +01:00
|
|
|
plugin, redirect = prefetch_plugin(owner, repo, alias, cache)
|
2018-09-10 00:19:41 +02:00
|
|
|
cache[plugin.commit] = plugin
|
2020-03-21 05:27:09 +01:00
|
|
|
return (owner, repo, plugin, redirect)
|
2018-09-10 00:19:41 +02:00
|
|
|
except Exception as e:
|
2020-03-21 05:27:09 +01:00
|
|
|
return (owner, repo, e, {})
|
2018-09-10 00:19:41 +02:00
|
|
|
|
|
|
|
|
|
|
|
header = (
|
|
|
|
"# This file has been generated by ./pkgs/misc/vim-plugins/update.py. Do not edit!"
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-11-18 17:54:29 +01:00
|
|
|
def generate_nix(plugins: List[Tuple[str, str, Plugin]], outfile: str):
|
2018-09-10 20:50:27 +02:00
|
|
|
sorted_plugins = sorted(plugins, key=lambda v: v[2].name.lower())
|
2018-09-10 00:19:41 +02:00
|
|
|
|
2019-11-18 17:54:29 +01:00
|
|
|
with open(outfile, "w+") as f:
|
2018-09-10 00:19:41 +02:00
|
|
|
f.write(header)
|
|
|
|
f.write(
|
|
|
|
"""
|
2018-12-30 18:13:00 +01:00
|
|
|
{ lib, buildVimPluginFrom2Nix, fetchFromGitHub, overrides ? (self: super: {}) }:
|
2018-09-10 00:19:41 +02:00
|
|
|
|
2018-12-30 18:13:00 +01:00
|
|
|
let
|
|
|
|
packages = ( self:
|
2018-09-10 00:19:41 +02:00
|
|
|
{"""
|
|
|
|
)
|
2018-09-10 20:50:27 +02:00
|
|
|
for owner, repo, plugin in sorted_plugins:
|
2018-09-10 00:19:41 +02:00
|
|
|
if plugin.has_submodules:
|
|
|
|
submodule_attr = "\n fetchSubmodules = true;"
|
|
|
|
else:
|
|
|
|
submodule_attr = ""
|
|
|
|
|
|
|
|
f.write(
|
|
|
|
f"""
|
|
|
|
{plugin.normalized_name} = buildVimPluginFrom2Nix {{
|
2018-12-24 13:02:31 +01:00
|
|
|
pname = "{plugin.normalized_name}";
|
2018-12-27 10:34:14 +01:00
|
|
|
version = "{plugin.version}";
|
2018-09-10 00:19:41 +02:00
|
|
|
src = fetchFromGitHub {{
|
|
|
|
owner = "{owner}";
|
2018-09-10 20:50:27 +02:00
|
|
|
repo = "{repo}";
|
2018-09-10 00:19:41 +02:00
|
|
|
rev = "{plugin.commit}";
|
|
|
|
sha256 = "{plugin.sha256}";{submodule_attr}
|
|
|
|
}};
|
|
|
|
}};
|
|
|
|
"""
|
|
|
|
)
|
2019-11-18 17:54:29 +01:00
|
|
|
f.write(
|
|
|
|
"""
|
2018-12-30 18:13:00 +01:00
|
|
|
});
|
|
|
|
in lib.fix' (lib.extends overrides packages)
|
2019-11-18 17:54:29 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
print(f"updated {outfile}")
|
|
|
|
|
|
|
|
|
2020-03-24 20:00:31 +01:00
|
|
|
def rewrite_input(input_file: Path, output_file: Path, redirects: dict):
|
|
|
|
with open(input_file, "r") as f:
|
|
|
|
lines = f.readlines()
|
|
|
|
|
|
|
|
if redirects:
|
|
|
|
lines = [redirects.get(line, line) for line in lines]
|
2020-03-26 07:11:20 +01:00
|
|
|
|
2020-03-27 16:38:16 +01:00
|
|
|
cur_date_iso = datetime.now().strftime("%Y-%m-%d")
|
2020-03-26 07:11:20 +01:00
|
|
|
with open(DEPRECATED, "r") as f:
|
|
|
|
deprecations = json.load(f)
|
|
|
|
for old, new in redirects.items():
|
|
|
|
old_name = old.split("/")[1].split(" ")[0].strip("\n")
|
|
|
|
new_name = new.split("/")[1].split(" ")[0].strip("\n")
|
|
|
|
if old_name != new_name:
|
2020-03-27 16:38:16 +01:00
|
|
|
deprecations[old_name] = {
|
|
|
|
"new": new_name,
|
|
|
|
"date": cur_date_iso,
|
|
|
|
}
|
2020-03-26 07:11:20 +01:00
|
|
|
with open(DEPRECATED, "w") as f:
|
|
|
|
json.dump(deprecations, f, indent=4, sort_keys=True)
|
|
|
|
|
2020-03-24 20:00:31 +01:00
|
|
|
print(
|
|
|
|
f"""\
|
2020-03-21 05:27:09 +01:00
|
|
|
Redirects have been detected and {input_file} has been updated. Please take the
|
|
|
|
following steps:
|
|
|
|
1. Go ahead and commit just the updated expressions as you intended to do:
|
|
|
|
git add {output_file}
|
|
|
|
git commit -m "vimPlugins: Update"
|
2020-03-26 07:11:20 +01:00
|
|
|
2. Run this script again so these changes will be reflected in the
|
2020-03-24 15:46:07 +01:00
|
|
|
generated expressions:
|
2020-03-21 05:27:09 +01:00
|
|
|
./update.py
|
2020-03-26 07:11:20 +01:00
|
|
|
3. Commit {input_file} along with deprecations and generated expressions:
|
|
|
|
git add {output_file} {input_file} {DEPRECATED}
|
2020-03-21 05:27:09 +01:00
|
|
|
git commit -m "vimPlugins: Update redirects"
|
2020-03-24 20:00:31 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
|
|
|
|
lines = sorted(lines, key=str.casefold)
|
|
|
|
|
|
|
|
with open(input_file, "w") as f:
|
|
|
|
f.writelines(lines)
|
2020-03-21 05:27:09 +01:00
|
|
|
|
|
|
|
|
2019-11-18 17:54:29 +01:00
|
|
|
def parse_args():
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
description=(
|
|
|
|
"Updates nix derivations for vim plugins"
|
|
|
|
f"By default from {DEFAULT_IN} to {DEFAULT_OUT}"
|
|
|
|
)
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--input-names",
|
|
|
|
"-i",
|
|
|
|
dest="input_file",
|
|
|
|
default=DEFAULT_IN,
|
|
|
|
help="A list of plugins in the form owner/repo",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--out",
|
|
|
|
"-o",
|
|
|
|
dest="outfile",
|
|
|
|
default=DEFAULT_OUT,
|
|
|
|
help="Filename to save generated nix code",
|
|
|
|
)
|
|
|
|
|
|
|
|
return parser.parse_args()
|
2018-09-10 00:19:41 +02:00
|
|
|
|
|
|
|
|
|
|
|
def main() -> None:
|
2019-11-18 17:54:29 +01:00
|
|
|
|
|
|
|
args = parse_args()
|
|
|
|
plugin_names = load_plugin_spec(args.input_file)
|
2018-09-10 00:19:41 +02:00
|
|
|
current_plugins = get_current_plugins()
|
|
|
|
cache = Cache(current_plugins)
|
|
|
|
|
|
|
|
prefetch_with_cache = functools.partial(prefetch, cache=cache)
|
|
|
|
|
|
|
|
try:
|
|
|
|
# synchronous variant for debugging
|
2019-11-16 16:15:08 +01:00
|
|
|
# results = list(map(prefetch_with_cache, plugin_names))
|
2018-09-10 00:19:41 +02:00
|
|
|
pool = Pool(processes=30)
|
|
|
|
results = pool.map(prefetch_with_cache, plugin_names)
|
|
|
|
finally:
|
|
|
|
cache.store()
|
|
|
|
|
2020-03-21 05:27:09 +01:00
|
|
|
plugins, redirects = check_results(results)
|
2018-09-10 00:19:41 +02:00
|
|
|
|
2019-11-18 17:54:29 +01:00
|
|
|
generate_nix(plugins, args.outfile)
|
2018-09-10 00:19:41 +02:00
|
|
|
|
2020-03-24 20:00:31 +01:00
|
|
|
rewrite_input(args.input_file, args.outfile, redirects)
|
2020-03-21 05:27:09 +01:00
|
|
|
|
2018-09-10 00:19:41 +02:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|