Merge staging-next into staging
This commit is contained in:
commit
288c9d7892
17 changed files with 243 additions and 78 deletions
|
@ -1821,6 +1821,11 @@ hosted on GitHub, exporting a `GITHUB_API_TOKEN` is highly recommended.
|
|||
Updating packages in bulk leads to lots of breakages, which is why a
|
||||
stabilization period on the `python-unstable` branch is required.
|
||||
|
||||
If a package is fragile and often breaks during these bulks updates, it
|
||||
may be reasonable to set `passthru.skipBulkUpdate = true` in the
|
||||
derivation. This decision should not be made on a whim and should
|
||||
always be supported by a qualifying comment.
|
||||
|
||||
Once the branch is sufficiently stable it should normally be merged
|
||||
into the `staging` branch.
|
||||
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
#!/bin/sh
|
||||
build=`nix-build -E "with import (fetchTarball "channel:nixpkgs-unstable") {}; python3.withPackages(ps: with ps; [ packaging requests toolz ])"`
|
||||
python=${build}/bin/python
|
||||
exec ${python} pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py $@
|
||||
|
||||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -I nixpkgs=channel:nixpkgs-unstable -i bash -p "python3.withPackages (ps: with ps; [ packaging requests ])" -p nix-prefetch-git
|
||||
exec python3 pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py $@
|
||||
|
|
|
@ -31,7 +31,7 @@ let
|
|||
"m.homeserver".base_url = "https://${fqdn}";
|
||||
"m.identity_server" = {};
|
||||
};
|
||||
serverConfig."m.server" = "${config.services.matrix-synapse.settings.server_name}:443";
|
||||
serverConfig."m.server" = "${fqdn}:443";
|
||||
mkWellKnown = data: ''
|
||||
add_header Content-Type application/json;
|
||||
add_header Access-Control-Allow-Origin *;
|
||||
|
|
|
@ -10,6 +10,7 @@ let
|
|||
categories = [ "Audio" "AudioVideo" ];
|
||||
icon = "psst";
|
||||
terminal = false;
|
||||
startupWMClass = "psst-gui";
|
||||
};
|
||||
|
||||
in
|
||||
|
|
|
@ -177,6 +177,23 @@ let
|
|||
|
||||
dune = dontConfigure super.dune;
|
||||
|
||||
emacsql = super.emacsql.overrideAttrs (old: {
|
||||
buildInputs = old.buildInputs ++ [ pkgs.sqlite ];
|
||||
|
||||
postBuild = ''
|
||||
cd source/sqlite
|
||||
make
|
||||
cd -
|
||||
'';
|
||||
|
||||
postInstall = (old.postInstall or "") + "\n" + ''
|
||||
install -m=755 -D source/sqlite/emacsql-sqlite \
|
||||
$out/share/emacs/site-lisp/elpa/emacsql-${old.version}/sqlite/emacsql-sqlite
|
||||
'';
|
||||
|
||||
stripDebugList = [ "share" ];
|
||||
});
|
||||
|
||||
emacsql-sqlite = super.emacsql-sqlite.overrideAttrs (old: {
|
||||
buildInputs = old.buildInputs ++ [ pkgs.sqlite ];
|
||||
|
||||
|
|
|
@ -5,16 +5,16 @@
|
|||
|
||||
buildGoModule rec {
|
||||
pname = "ticker";
|
||||
version = "4.5.5";
|
||||
version = "4.5.6";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "achannarasappa";
|
||||
repo = pname;
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-7FSyW71NWmWmBNQ5QUqMJ4x9WLXpm0kvvjdjzx1yk/M=";
|
||||
hash = "sha256-h7k/zAYqpCAGn2dW+a3gOF/BN5ywjy/2Yx6THK9zk6k=";
|
||||
};
|
||||
|
||||
vendorHash = "sha256-6bosJ2AlbLZ551tCNPmvNyyReFJG+iS3SYUFti2/CAw=";
|
||||
vendorHash = "sha256-c7wU9LLRlS9kOhE4yAiKAs/npQe8lvSwPcd+/D8o9rk=";
|
||||
|
||||
ldflags = [
|
||||
"-s"
|
||||
|
|
|
@ -2,13 +2,13 @@
|
|||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "mruby";
|
||||
version = "3.1.0";
|
||||
version = "3.2.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "mruby";
|
||||
repo = "mruby";
|
||||
rev = version;
|
||||
sha256 = "0gnzip7qfadnl0r1k8bpc9a6796sy503h77ggds02wrz7mpq32nf";
|
||||
sha256 = "sha256-MmrbWeg/G29YBvVrOtceTOZChrQ2kx9+apl7u7BiGjA=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ ruby bison rake ];
|
||||
|
@ -24,6 +24,14 @@ stdenv.mkDerivation rec {
|
|||
|
||||
doCheck = true;
|
||||
|
||||
checkPhase = ''
|
||||
runHook preCheck
|
||||
|
||||
rake test
|
||||
|
||||
runHook postCheck
|
||||
'';
|
||||
|
||||
meta = with lib; {
|
||||
description = "An embeddable implementation of the Ruby language";
|
||||
homepage = "https://mruby.org";
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
{ python3, runCommand, git, nix }:
|
||||
{ python3, runCommand, git, nix, nix-prefetch-git }:
|
||||
|
||||
runCommand "update-python-libraries" {
|
||||
buildInputs = [
|
||||
nix
|
||||
nix-prefetch-git
|
||||
(python3.withPackages(ps: with ps; [ packaging requests toolz ]))
|
||||
git
|
||||
];
|
||||
|
|
|
@ -12,14 +12,16 @@ to update all non-pinned libraries in that folder.
|
|||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import requests
|
||||
from concurrent.futures import ThreadPoolExecutor as Pool
|
||||
from packaging.version import Version as _Version
|
||||
from packaging.version import InvalidVersion
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from typing import Optional, Any
|
||||
import collections
|
||||
import subprocess
|
||||
|
||||
|
@ -31,11 +33,12 @@ EXTENSIONS = ['tar.gz', 'tar.bz2', 'tar', 'zip', '.whl']
|
|||
|
||||
PRERELEASES = False
|
||||
|
||||
BULK_UPDATE = False
|
||||
|
||||
GIT = "git"
|
||||
|
||||
NIXPGKS_ROOT = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode('utf-8').strip()
|
||||
NIXPKGS_ROOT = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).decode('utf-8').strip()
|
||||
|
||||
import logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
|
||||
|
@ -67,6 +70,22 @@ def _get_values(attribute, text):
|
|||
values = regex.findall(text)
|
||||
return values
|
||||
|
||||
|
||||
def _get_attr_value(attr_path: str) -> Optional[Any]:
|
||||
try:
|
||||
response = subprocess.check_output([
|
||||
"nix",
|
||||
"--extra-experimental-features", "nix-command",
|
||||
"eval",
|
||||
"-f", f"{NIXPKGS_ROOT}/default.nix",
|
||||
"--json",
|
||||
f"{attr_path}"
|
||||
])
|
||||
return json.loads(response.decode())
|
||||
except (subprocess.CalledProcessError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _get_unique_value(attribute, text):
|
||||
"""Match attribute in text and return unique match.
|
||||
|
||||
|
@ -81,23 +100,29 @@ def _get_unique_value(attribute, text):
|
|||
else:
|
||||
raise ValueError("no value found for {}".format(attribute))
|
||||
|
||||
def _get_line_and_value(attribute, text):
|
||||
def _get_line_and_value(attribute, text, value=None):
|
||||
"""Match attribute in text. Return the line and the value of the attribute."""
|
||||
regex = '({}\s+=\s+"(.*)";)'.format(attribute)
|
||||
if value is None:
|
||||
regex = rf'({attribute}\s+=\s+\"(.*)\";)'
|
||||
else:
|
||||
regex = rf'({attribute}\s+=\s+\"({value})\";)'
|
||||
regex = re.compile(regex)
|
||||
value = regex.findall(text)
|
||||
n = len(value)
|
||||
results = regex.findall(text)
|
||||
n = len(results)
|
||||
if n > 1:
|
||||
raise ValueError("found too many values for {}".format(attribute))
|
||||
elif n == 1:
|
||||
return value[0]
|
||||
return results[0]
|
||||
else:
|
||||
raise ValueError("no value found for {}".format(attribute))
|
||||
|
||||
|
||||
def _replace_value(attribute, value, text):
|
||||
def _replace_value(attribute, value, text, oldvalue=None):
|
||||
"""Search and replace value of attribute in text."""
|
||||
old_line, old_value = _get_line_and_value(attribute, text)
|
||||
if oldvalue is None:
|
||||
old_line, old_value = _get_line_and_value(attribute, text)
|
||||
else:
|
||||
old_line, old_value = _get_line_and_value(attribute, text, oldvalue)
|
||||
new_line = old_line.replace(old_value, value)
|
||||
new_text = text.replace(old_line, new_line)
|
||||
return new_text
|
||||
|
@ -124,6 +149,23 @@ def _fetch_github(url):
|
|||
raise ValueError("request for {} failed".format(url))
|
||||
|
||||
|
||||
def _hash_to_sri(algorithm, value):
|
||||
"""Convert a hash to its SRI representation"""
|
||||
return subprocess.check_output([
|
||||
"nix",
|
||||
"hash",
|
||||
"to-sri",
|
||||
"--type", algorithm,
|
||||
value
|
||||
]).decode().strip()
|
||||
|
||||
|
||||
def _skip_bulk_update(attr_name: str) -> bool:
|
||||
return bool(_get_attr_value(
|
||||
f"{attr_name}.skipBulkUpdate"
|
||||
))
|
||||
|
||||
|
||||
SEMVER = {
|
||||
'major' : 0,
|
||||
'minor' : 1,
|
||||
|
@ -198,7 +240,7 @@ def _get_latest_version_github(package, extension, current_version, target):
|
|||
attr_path = os.environ.get("UPDATE_NIX_ATTR_PATH", f"python3Packages.{package}")
|
||||
try:
|
||||
homepage = subprocess.check_output(
|
||||
["nix", "eval", "-f", f"{NIXPGKS_ROOT}/default.nix", "--raw", f"{attr_path}.src.meta.homepage"])\
|
||||
["nix", "eval", "-f", f"{NIXPKGS_ROOT}/default.nix", "--raw", f"{attr_path}.src.meta.homepage"])\
|
||||
.decode('utf-8')
|
||||
except Exception as e:
|
||||
raise ValueError(f"Unable to determine homepage: {e}")
|
||||
|
@ -217,17 +259,47 @@ def _get_latest_version_github(package, extension, current_version, target):
|
|||
|
||||
release = next(filter(lambda x: strip_prefix(x['tag_name']) == version, releases))
|
||||
prefix = get_prefix(release['tag_name'])
|
||||
try:
|
||||
sha256 = subprocess.check_output(["nix-prefetch-url", "--type", "sha256", "--unpack", f"{release['tarball_url']}"], stderr=subprocess.DEVNULL)\
|
||||
.decode('utf-8').strip()
|
||||
except:
|
||||
# this may fail if they have both a branch and a tag of the same name, attempt tag name
|
||||
tag_url = str(release['tarball_url']).replace("tarball","tarball/refs/tags")
|
||||
sha256 = subprocess.check_output(["nix-prefetch-url", "--type", "sha256", "--unpack", tag_url], stderr=subprocess.DEVNULL)\
|
||||
.decode('utf-8').strip()
|
||||
|
||||
# some attributes require using the fetchgit
|
||||
git_fetcher_args = []
|
||||
if (_get_attr_value(f"{attr_path}.src.fetchSubmodules")):
|
||||
git_fetcher_args.append("--fetch-submodules")
|
||||
if (_get_attr_value(f"{attr_path}.src.fetchLFS")):
|
||||
git_fetcher_args.append("--fetch-lfs")
|
||||
if (_get_attr_value(f"{attr_path}.src.leaveDotGit")):
|
||||
git_fetcher_args.append("--leave-dotGit")
|
||||
|
||||
return version, sha256, prefix
|
||||
if git_fetcher_args:
|
||||
algorithm = "sha256"
|
||||
cmd = [
|
||||
"nix-prefetch-git",
|
||||
f"https://github.com/{owner}/{repo}.git",
|
||||
"--hash", algorithm,
|
||||
"--rev", f"refs/tags/{release['tag_name']}"
|
||||
]
|
||||
cmd.extend(git_fetcher_args)
|
||||
response = subprocess.check_output(cmd)
|
||||
document = json.loads(response.decode())
|
||||
hash = _hash_to_sri(algorithm, document[algorithm])
|
||||
else:
|
||||
try:
|
||||
hash = subprocess.check_output([
|
||||
"nix-prefetch-url",
|
||||
"--type", "sha256",
|
||||
"--unpack",
|
||||
f"{release['tarball_url']}"
|
||||
], stderr=subprocess.DEVNULL).decode('utf-8').strip()
|
||||
except (subprocess.CalledProcessError, UnicodeError):
|
||||
# this may fail if they have both a branch and a tag of the same name, attempt tag name
|
||||
tag_url = str(release['tarball_url']).replace("tarball","tarball/refs/tags")
|
||||
hash = subprocess.check_output([
|
||||
"nix-prefetch-url",
|
||||
"--type", "sha256",
|
||||
"--unpack",
|
||||
tag_url
|
||||
], stderr=subprocess.DEVNULL).decode('utf-8').strip()
|
||||
|
||||
return version, hash, prefix
|
||||
|
||||
|
||||
FETCHERS = {
|
||||
|
@ -272,12 +344,12 @@ def _determine_extension(text, fetcher):
|
|||
if fetcher == 'fetchPypi':
|
||||
try:
|
||||
src_format = _get_unique_value('format', text)
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
src_format = None # format was not given
|
||||
|
||||
try:
|
||||
extension = _get_unique_value('extension', text)
|
||||
except ValueError as e:
|
||||
except ValueError:
|
||||
extension = None # extension was not given
|
||||
|
||||
if extension is None:
|
||||
|
@ -294,8 +366,6 @@ def _determine_extension(text, fetcher):
|
|||
raise ValueError('url does not point to PyPI.')
|
||||
|
||||
elif fetcher == 'fetchFromGitHub':
|
||||
if "fetchSubmodules" in text:
|
||||
raise ValueError("fetchFromGitHub fetcher doesn't support submodules")
|
||||
extension = "tar.gz"
|
||||
|
||||
return extension
|
||||
|
@ -321,6 +391,8 @@ def _update_package(path, target):
|
|||
# Attempt a fetch using each pname, e.g. backports-zoneinfo vs backports.zoneinfo
|
||||
successful_fetch = False
|
||||
for pname in pnames:
|
||||
if BULK_UPDATE and _skip_bulk_update(f"python3Packages.{pname}"):
|
||||
raise ValueError(f"Bulk update skipped for {pname}")
|
||||
try:
|
||||
new_version, new_sha256, prefix = FETCHERS[fetcher](pname, extension, version, target)
|
||||
successful_fetch = True
|
||||
|
@ -340,16 +412,20 @@ def _update_package(path, target):
|
|||
raise ValueError("no file available for {}.".format(pname))
|
||||
|
||||
text = _replace_value('version', new_version, text)
|
||||
|
||||
# hashes from pypi are 16-bit encoded sha256's, normalize it to sri to avoid merge conflicts
|
||||
# sri hashes have been the default format since nix 2.4+
|
||||
sri_hash = subprocess.check_output(["nix", "--extra-experimental-features", "nix-command", "hash", "to-sri", "--type", "sha256", new_sha256]).decode('utf-8').strip()
|
||||
sri_hash = _hash_to_sri("sha256", new_sha256)
|
||||
|
||||
|
||||
# fetchers can specify a sha256, or a sri hash
|
||||
try:
|
||||
text = _replace_value('sha256', sri_hash, text)
|
||||
except ValueError:
|
||||
text = _replace_value('hash', sri_hash, text)
|
||||
# retrieve the old output hash for a more precise match
|
||||
if old_hash := _get_attr_value(f"python3Packages.{pname}.src.outputHash"):
|
||||
# fetchers can specify a sha256, or a sri hash
|
||||
try:
|
||||
text = _replace_value('hash', sri_hash, text, old_hash)
|
||||
except ValueError:
|
||||
text = _replace_value('sha256', sri_hash, text, old_hash)
|
||||
else:
|
||||
raise ValueError(f"Unable to retrieve old hash for {pname}")
|
||||
|
||||
if fetcher == 'fetchFromGitHub':
|
||||
# in the case of fetchFromGitHub, it's common to see `rev = version;` or `rev = "v${version}";`
|
||||
|
@ -441,6 +517,10 @@ environment variables:
|
|||
|
||||
packages = list(map(os.path.abspath, args.package))
|
||||
|
||||
if len(packages) > 1:
|
||||
global BULK_UPDATE
|
||||
BULK_UPDATE = true
|
||||
|
||||
logging.info("Updating packages...")
|
||||
|
||||
# Use threads to update packages concurrently
|
||||
|
|
|
@ -4,32 +4,32 @@
|
|||
, icalendar
|
||||
, lxml
|
||||
, pytestCheckHook
|
||||
, pythonOlder
|
||||
, pytz
|
||||
, recurring-ical-events
|
||||
, requests
|
||||
, six
|
||||
, tzlocal
|
||||
, vobject
|
||||
}:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "caldav";
|
||||
version = "1.1.3";
|
||||
version = "1.2.0";
|
||||
|
||||
format = "setuptools";
|
||||
disabled = pythonOlder "3.7";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "python-caldav";
|
||||
repo = pname;
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-ZilsCYr1M2WKSz/g5JV41JVsuHopPerxOevoG7FrEjQ=";
|
||||
hash = "sha256-ibizwN4pxqzmVozVjrAPNSrmM1+8+/Qu6UnfRerrwUk=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
vobject
|
||||
lxml
|
||||
requests
|
||||
six
|
||||
icalendar
|
||||
recurring-ical-events
|
||||
];
|
||||
|
@ -52,6 +52,7 @@ buildPythonPackage rec {
|
|||
meta = with lib; {
|
||||
description = "CalDAV (RFC4791) client library";
|
||||
homepage = "https://github.com/python-caldav/caldav";
|
||||
changelog = "https://github.com/python-caldav/caldav/releases/tag/v${version}";
|
||||
license = licenses.asl20;
|
||||
maintainers = with maintainers; [ marenz dotlambda ];
|
||||
};
|
||||
|
|
|
@ -16,18 +16,22 @@
|
|||
, pomegranate
|
||||
, pyfaidx
|
||||
, python
|
||||
, pythonOlder
|
||||
, R
|
||||
}:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "CNVkit";
|
||||
version = "0.9.9";
|
||||
pname = "cnvkit";
|
||||
version = "0.9.10";
|
||||
format = "setuptools";
|
||||
|
||||
disabled = pythonOlder "3.7";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "etal";
|
||||
repo = "cnvkit";
|
||||
rev = "v${version}";
|
||||
sha256 = "1q4l7jhr1k135an3n9aa9wsid5lk6fwxb0hcldrr6v6y76zi4gj1";
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-mCQXo3abwC06x/g51UBshqUk3dpqEVNUvx+cJ/EdYGQ=";
|
||||
};
|
||||
|
||||
postPatch = ''
|
||||
|
@ -74,6 +78,7 @@ buildPythonPackage rec {
|
|||
meta = with lib; {
|
||||
homepage = "https://cnvkit.readthedocs.io";
|
||||
description = "A Python library and command-line software toolkit to infer and visualize copy number from high-throughput DNA sequencing data";
|
||||
changelog = "https://github.com/etal/cnvkit/releases/tag/v${version}";
|
||||
license = licenses.asl20;
|
||||
maintainers = [ maintainers.jbedo ];
|
||||
};
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{ lib, stdenv, fetchurl, makeWrapper, writeText
|
||||
, autoconf, ncurses, graphviz, doxygen
|
||||
, graphviz, doxygen
|
||||
, ocamlPackages, ltl2ba, coq, why3
|
||||
, gdk-pixbuf, wrapGAppsHook
|
||||
}:
|
||||
|
@ -17,6 +17,7 @@ let
|
|||
num
|
||||
ocamlgraph
|
||||
ppx_deriving
|
||||
ppx_deriving_yojson
|
||||
ppx_import
|
||||
stdlib-shims
|
||||
why3
|
||||
|
@ -35,32 +36,38 @@ in
|
|||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "frama-c";
|
||||
version = "25.0";
|
||||
slang = "Manganese";
|
||||
version = "26.1";
|
||||
slang = "Iron";
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://frama-c.com/download/frama-c-${version}-${slang}.tar.gz";
|
||||
sha256 = "sha256-Ii3O/NJyBTVAv1ts/zae/Ee4HCjzYOthZmnD8wqLwp8=";
|
||||
url = "https://frama-c.com/download/frama-c-${version}-${slang}.tar.gz";
|
||||
hash = "sha256-UT7ajIyu8e5vzrz2oBKDDrtZqUacgUP/TRi0/kz9Qkg=";
|
||||
};
|
||||
|
||||
preConfigure = lib.optionalString stdenv.cc.isClang "configureFlagsArray=(\"--with-cpp=clang -E -C\")";
|
||||
postConfigure = "patchShebangs src/plugins/value/gen-api.sh";
|
||||
postConfigure = "patchShebangs src/plugins/eva/gen-api.sh";
|
||||
|
||||
strictDeps = true;
|
||||
|
||||
nativeBuildInputs = [ autoconf wrapGAppsHook ] ++ (with ocamlPackages; [ ocaml findlib ]);
|
||||
nativeBuildInputs = [ wrapGAppsHook ] ++ (with ocamlPackages; [ ocaml findlib dune_3 ]);
|
||||
|
||||
buildInputs = with ocamlPackages; [
|
||||
ncurses ltl2ba ocamlgraph yojson menhirLib camlzip
|
||||
dune-site dune-configurator
|
||||
ltl2ba ocamlgraph yojson menhirLib camlzip
|
||||
lablgtk3 lablgtk3-sourceview3 coq graphviz zarith apron why3 mlgmpidl doxygen
|
||||
ppx_deriving ppx_import
|
||||
ppx_deriving ppx_import ppx_deriving_yojson
|
||||
gdk-pixbuf
|
||||
];
|
||||
|
||||
enableParallelBuilding = true;
|
||||
buildPhase = ''
|
||||
runHook preBuild
|
||||
dune build -j$NIX_BUILD_CORES --release @install
|
||||
runHook postBuild
|
||||
'';
|
||||
|
||||
installFlags = [ "PREFIX=$(out)" ];
|
||||
|
||||
preFixup = ''
|
||||
gappsWrapperArgs+=(--prefix OCAMLPATH ':' ${ocamlpath})
|
||||
gappsWrapperArgs+=(--prefix OCAMLPATH ':' ${ocamlpath}:$out/lib/)
|
||||
'';
|
||||
|
||||
# Allow loading of external Frama-C plugins
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
{ lib, buildGoModule, fetchFromGitHub }:
|
||||
|
||||
let
|
||||
version = "1.17.0";
|
||||
version = "1.17.2";
|
||||
in
|
||||
buildGoModule {
|
||||
pname = "sqlc";
|
||||
|
@ -11,7 +11,7 @@ buildGoModule {
|
|||
owner = "kyleconroy";
|
||||
repo = "sqlc";
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256-knblQwO+c8AD0WJ+1l6FJP8j8pdsVhKa/oiPqUJfsVY=";
|
||||
sha256 = "sha256-30dIFo07C+noWdnq2sL1pEQZzTR4FfaV0FvyW4BxCU8=";
|
||||
};
|
||||
|
||||
proxyVendor = true;
|
||||
|
|
|
@ -4,19 +4,22 @@
|
|||
, fetchFromGitHub
|
||||
, ruby
|
||||
, which
|
||||
, runCommand
|
||||
, darwin
|
||||
}:
|
||||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
pname = "rbspy";
|
||||
version = "0.15.0";
|
||||
version = "0.16.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = pname;
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
hash = "sha256-e6ZCRIJVKl3xbJym+h+ah/J4c+s7wf1laF7p63ubE4A=";
|
||||
hash = "sha256-yM3bE79flvFSZvpkHXhhEh1MJrSSJzqZcX9aVRmz1ew=";
|
||||
};
|
||||
|
||||
cargoHash = "sha256-yhZ0QM9vZxyFCjTShbV7+Rn8w4lkPW7E7zKhrK4qa1E=";
|
||||
cargoHash = "sha256-qvx5zPEIwvh2AIFCGNbVMNIRFtVjSLR9+exbSeQ9oXI=";
|
||||
doCheck = true;
|
||||
|
||||
# The current implementation of rbspy fails to detect the version of ruby
|
||||
|
@ -25,17 +28,28 @@ rustPlatform.buildRustPackage rec {
|
|||
substituteInPlace src/core/process.rs \
|
||||
--replace /usr/bin/which '${which}/bin/which'
|
||||
substituteInPlace src/sampler/mod.rs \
|
||||
--replace /usr/bin/which '${which}/bin/which' \
|
||||
--replace 'fn test_sample_single_process_with_time_limit(' '#[ignore] fn test_sample_single_process_with_time_limit(' \
|
||||
--replace 'fn test_sample_single_process(' '#[ignore] fn test_sample_single_process(' \
|
||||
--replace 'fn test_sample_subprocesses(' '#[ignore] fn test_sample_subprocesses('
|
||||
substituteInPlace src/core/ruby_spy.rs \
|
||||
--replace 'fn test_get_trace(' '#[ignore] fn test_get_trace(' \
|
||||
--replace 'fn test_get_trace_when_process_has_exited(' '#[ignore] fn test_get_trace_when_process_has_exited('
|
||||
--replace /usr/bin/which '${which}/bin/which'
|
||||
'';
|
||||
|
||||
checkFlags = [
|
||||
"--skip=test_get_trace"
|
||||
"--skip=test_get_trace_when_process_has_exited"
|
||||
"--skip=test_sample_single_process"
|
||||
"--skip=test_sample_single_process_with_time_limit"
|
||||
"--skip=test_sample_subprocesses"
|
||||
];
|
||||
|
||||
nativeBuildInputs = [ ruby which ];
|
||||
|
||||
buildInputs = lib.optionals (stdenv.isDarwin && stdenv.isx86_64) [
|
||||
# Pull a header that contains a definition of proc_pid_rusage().
|
||||
(runCommand "${pname}_headers" { } ''
|
||||
install -Dm444 ${lib.getDev darwin.apple_sdk.sdk}/include/libproc.h $out/include/libproc.h
|
||||
'')
|
||||
];
|
||||
|
||||
LIBCLANG_PATH = lib.optionalString stdenv.isDarwin "${stdenv.cc.cc.lib}/lib";
|
||||
|
||||
meta = with lib; {
|
||||
broken = (stdenv.isLinux && stdenv.isAarch64);
|
||||
homepage = "https://rbspy.github.io/";
|
||||
|
|
|
@ -5,17 +5,17 @@
|
|||
|
||||
buildGoModule rec {
|
||||
pname = "trivy";
|
||||
version = "0.37.2";
|
||||
version = "0.37.3";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "aquasecurity";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256-k5S0ttOhI+vjiGJpIPVi9ro6n3f2Cxe7HiADvs14Zuo=";
|
||||
sha256 = "sha256-fndA2rApDXwKeQEQ9Vy/9iJBJPcRWt+yJfvRdNDOwZU=";
|
||||
};
|
||||
# hash missmatch on across linux and darwin
|
||||
proxyVendor = true;
|
||||
vendorSha256 = "sha256-EJw5DxiBF+gw5X+vqrnZsNCm2umOHEq6GeQ5V/Z0DrE=";
|
||||
vendorHash = "sha256-91UPIz5HM82d6s8kHEb9w/vLQgXmoV8fIcbRyXDMNL8=";
|
||||
|
||||
excludedPackages = "misc";
|
||||
|
||||
|
|
26
pkgs/tools/security/go-dork/default.nix
Normal file
26
pkgs/tools/security/go-dork/default.nix
Normal file
|
@ -0,0 +1,26 @@
|
|||
{ lib
|
||||
, buildGoModule
|
||||
, fetchFromGitHub
|
||||
}:
|
||||
|
||||
buildGoModule rec {
|
||||
pname = "go-dork";
|
||||
version = "1.0.2";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "dwisiswant0";
|
||||
repo = pname;
|
||||
rev = "refs/tags/v${version}";
|
||||
hash = "sha256-tFmXutX3UnKAFFS4mO4PCv7Bhw1wJ7qjdA1ROryqYZU=";
|
||||
};
|
||||
|
||||
vendorHash = "sha256-6V58RRRPamBMDAf0gg4sQMQkoD5dWauCFtPrwf5EasI=";
|
||||
|
||||
meta = with lib; {
|
||||
description = "Dork scanner";
|
||||
homepage = "https://github.com/dwisiswant0/go-dork";
|
||||
changelog = "https://github.com/dwisiswant0/go-dork/releases/tag/v${version}";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ fab ];
|
||||
};
|
||||
}
|
|
@ -3059,6 +3059,8 @@ with pkgs;
|
|||
|
||||
go-cve-search = callPackage ../tools/security/go-cve-search { };
|
||||
|
||||
go-dork = callPackage ../tools/security/go-dork { };
|
||||
|
||||
chkcrontab = callPackage ../tools/admin/chkcrontab { };
|
||||
|
||||
claws = callPackage ../tools/misc/claws { };
|
||||
|
|
Loading…
Reference in a new issue