Merge master into staging-next
This commit is contained in:
commit
696025ae9c
49 changed files with 988 additions and 206 deletions
|
@ -206,6 +206,11 @@ in
|
|||
# Or disable the firewall altogether.
|
||||
# networking.firewall.enable = false;
|
||||
|
||||
# Copy the NixOS configuration file and link it from the resulting system
|
||||
# (/run/current-system/configuration.nix). This is useful in case you
|
||||
# accidentally delete configuration.nix.
|
||||
# system.copySystemConfiguration = true;
|
||||
|
||||
# This value determines the NixOS release from which the default
|
||||
# settings for stateful data, like file locations and database versions
|
||||
# on your system were taken. It‘s perfectly fine and recommended to leave
|
||||
|
|
36
pkgs/applications/accessibility/wvkbd/default.nix
Normal file
36
pkgs/applications/accessibility/wvkbd/default.nix
Normal file
|
@ -0,0 +1,36 @@
|
|||
{ stdenv
|
||||
, lib
|
||||
, fetchFromGitHub
|
||||
, wayland-scanner
|
||||
, wayland
|
||||
, pango
|
||||
, glib
|
||||
, harfbuzz
|
||||
, cairo
|
||||
, pkg-config
|
||||
, libxkbcommon
|
||||
}:
|
||||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "wvkbd";
|
||||
version = "0.7";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "jjsullivan5196";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256-5UV2PMrLXtF3AxjfPxxwFRkgVef+Ap8nG1v795o0bWE=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ pkg-config ];
|
||||
buildInputs = [ wayland-scanner wayland pango glib harfbuzz cairo libxkbcommon ];
|
||||
installFlags = [ "PREFIX=$(out)" ];
|
||||
|
||||
meta = with lib; {
|
||||
homepage = "https://github.com/jjsullivan5196/wvkbd";
|
||||
description = "On-screen keyboard for wlroots";
|
||||
maintainers = [ maintainers.elohmeier ];
|
||||
platforms = platforms.linux;
|
||||
license = licenses.gpl3Plus;
|
||||
};
|
||||
}
|
|
@ -233,6 +233,9 @@
|
|||
|
||||
sv-kalender = callPackage ./sv-kalender { };
|
||||
|
||||
tree-sitter-langs = callPackage ./tree-sitter-langs { final = self; };
|
||||
tsc = callPackage ./tsc { };
|
||||
|
||||
youtube-dl = callPackage ./youtube-dl { };
|
||||
|
||||
# From old emacsPackages (pre emacsPackagesNg)
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
[
|
||||
"tree-sitter-agda",
|
||||
"tree-sitter-bash",
|
||||
"tree-sitter-c",
|
||||
"tree-sitter-c-sharp",
|
||||
"tree-sitter-cpp",
|
||||
"tree-sitter-css",
|
||||
"tree-sitter-elixir",
|
||||
"tree-sitter-elm",
|
||||
"tree-sitter-fluent",
|
||||
"tree-sitter-go",
|
||||
"tree-sitter-haskell",
|
||||
"tree-sitter-hcl",
|
||||
"tree-sitter-html",
|
||||
"tree-sitter-java",
|
||||
"tree-sitter-javascript",
|
||||
"tree-sitter-jsdoc",
|
||||
"tree-sitter-json",
|
||||
"tree-sitter-julia",
|
||||
"tree-sitter-nix",
|
||||
"tree-sitter-ocaml",
|
||||
"tree-sitter-php",
|
||||
"tree-sitter-prisma",
|
||||
"tree-sitter-python",
|
||||
"tree-sitter-ruby",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-scala",
|
||||
"tree-sitter-swift",
|
||||
"tree-sitter-typescript",
|
||||
"tree-sitter-verilog",
|
||||
"tree-sitter-zig"
|
||||
]
|
|
@ -0,0 +1,44 @@
|
|||
{ lib
|
||||
, pkgs
|
||||
, symlinkJoin
|
||||
, fetchzip
|
||||
, melpaBuild
|
||||
, stdenv
|
||||
, fetchFromGitHub
|
||||
, writeText
|
||||
, melpaStablePackages
|
||||
, runCommand
|
||||
, tree-sitter-grammars
|
||||
, plugins ? map (g: tree-sitter-grammars.${g}) (lib.importJSON ./default-grammars.json)
|
||||
, final
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (melpaStablePackages) tree-sitter-langs;
|
||||
|
||||
libSuffix = if stdenv.isDarwin then "dylib" else "so";
|
||||
soName = g: lib.removeSuffix "-grammar" (lib.removePrefix "tree-sitter-" g.pname) + "." + libSuffix;
|
||||
|
||||
grammarDir = runCommand "emacs-tree-sitter-grammars" {
|
||||
# Fake same version number as upstream language bundle to prevent triggering runtime downloads
|
||||
inherit (tree-sitter-langs) version;
|
||||
} (''
|
||||
install -d $out/langs/bin
|
||||
echo -n $version > $out/langs/bin/BUNDLE-VERSION
|
||||
'' + lib.concatStringsSep "\n" (map (
|
||||
g: "ln -s ${g}/parser $out/langs/bin/${soName g}") plugins
|
||||
));
|
||||
|
||||
in
|
||||
melpaStablePackages.tree-sitter-langs.overrideAttrs(old: {
|
||||
postPatch = old.postPatch or "" + ''
|
||||
substituteInPlace ./tree-sitter-langs-build.el \
|
||||
--replace "tree-sitter-langs-grammar-dir tree-sitter-langs--dir" "tree-sitter-langs-grammar-dir \"${grammarDir}/langs\""
|
||||
'';
|
||||
|
||||
passthru = old.passthru or {} // {
|
||||
inherit plugins;
|
||||
withPlugins = fn: final.tree-sitter-langs.override { plugins = fn tree-sitter-grammars; };
|
||||
};
|
||||
|
||||
})
|
|
@ -0,0 +1,74 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#! nix-shell ../../../../../../. -i python3 -p python3 -p nix
|
||||
from os.path import (
|
||||
dirname,
|
||||
abspath,
|
||||
join,
|
||||
)
|
||||
from typing import (
|
||||
List,
|
||||
Any,
|
||||
)
|
||||
import subprocess
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
|
||||
def fmt_grammar(grammar: str) -> str:
|
||||
return "tree-sitter-" + grammar
|
||||
|
||||
|
||||
def eval_expr(nixpkgs: str, expr: str) -> Any:
|
||||
p = subprocess.run(
|
||||
[
|
||||
"nix-instantiate",
|
||||
"--json",
|
||||
"--eval",
|
||||
"--expr",
|
||||
("with import %s {}; %s" % (nixpkgs, expr)),
|
||||
],
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
return json.loads(p.stdout)
|
||||
|
||||
|
||||
def check_grammar_exists(nixpkgs: str, grammar: str) -> bool:
|
||||
return eval_expr(
|
||||
nixpkgs, f'lib.hasAttr "{fmt_grammar(grammar)}" tree-sitter-grammars'
|
||||
)
|
||||
|
||||
|
||||
def build_attr(nixpkgs, attr: str) -> str:
|
||||
return (
|
||||
subprocess.run(
|
||||
["nix-build", "--no-out-link", nixpkgs, "-A", attr],
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
.stdout.decode()
|
||||
.strip()
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cwd = dirname(abspath(__file__))
|
||||
nixpkgs = abspath(join(cwd, "../../../../../.."))
|
||||
|
||||
src_dir = build_attr(nixpkgs, "emacs.pkgs.tree-sitter-langs.src")
|
||||
|
||||
existing: List[str] = []
|
||||
|
||||
grammars = os.listdir(join(src_dir, "repos"))
|
||||
for g in grammars:
|
||||
exists = check_grammar_exists(nixpkgs, g)
|
||||
if exists:
|
||||
existing.append(fmt_grammar(g))
|
||||
else:
|
||||
sys.stderr.write("Missing grammar: " + fmt_grammar(g) + "\n")
|
||||
sys.stderr.flush()
|
||||
|
||||
with open(join(cwd, "default-grammars.json"), mode="w") as f:
|
||||
json.dump(sorted(existing), f, indent=2)
|
||||
f.write("\n")
|
|
@ -0,0 +1,89 @@
|
|||
{ lib
|
||||
, symlinkJoin
|
||||
, melpaBuild
|
||||
, fetchFromGitHub
|
||||
, rustPlatform
|
||||
, writeText
|
||||
, clang
|
||||
, llvmPackages
|
||||
|
||||
, runtimeShell
|
||||
, writeScript
|
||||
, python3
|
||||
, nix-prefetch-github
|
||||
, nix
|
||||
}:
|
||||
|
||||
let
|
||||
|
||||
srcMeta = lib.importJSON ./src.json;
|
||||
inherit (srcMeta) version;
|
||||
|
||||
src = fetchFromGitHub srcMeta.src;
|
||||
|
||||
tsc = melpaBuild {
|
||||
inherit src;
|
||||
inherit version;
|
||||
|
||||
pname = "tsc";
|
||||
commit = version;
|
||||
|
||||
sourceRoot = "source/core";
|
||||
|
||||
recipe = writeText "recipe" ''
|
||||
(tsc
|
||||
:repo "emacs-tree-sitter/elisp-tree-sitter"
|
||||
:fetcher github)
|
||||
'';
|
||||
};
|
||||
|
||||
tsc-dyn = rustPlatform.buildRustPackage {
|
||||
inherit version;
|
||||
inherit src;
|
||||
|
||||
pname = "tsc-dyn";
|
||||
|
||||
nativeBuildInputs = [ clang ];
|
||||
sourceRoot = "source/core";
|
||||
|
||||
configurePhase = ''
|
||||
export LIBCLANG_PATH="${llvmPackages.libclang.lib}/lib"
|
||||
'';
|
||||
|
||||
postInstall = ''
|
||||
LIB=($out/lib/libtsc_dyn.*)
|
||||
TSC_PATH=$out/share/emacs/site-lisp/elpa/tsc-${version}
|
||||
install -d $TSC_PATH
|
||||
install -m444 $out/lib/libtsc_dyn.* $TSC_PATH/''${LIB/*libtsc_/tsc-}
|
||||
echo -n $version > $TSC_PATH/DYN-VERSION
|
||||
rm -r $out/lib
|
||||
'';
|
||||
|
||||
inherit (srcMeta) cargoSha256;
|
||||
};
|
||||
|
||||
in symlinkJoin {
|
||||
name = "tsc-${version}";
|
||||
paths = [ tsc tsc-dyn ];
|
||||
|
||||
passthru = {
|
||||
updateScript = let
|
||||
pythonEnv = python3.withPackages(ps: [ ps.requests ]);
|
||||
in writeScript "tsc-update" ''
|
||||
#!${runtimeShell}
|
||||
set -euo pipefail
|
||||
export PATH=${lib.makeBinPath [
|
||||
nix-prefetch-github
|
||||
nix
|
||||
pythonEnv
|
||||
]}:$PATH
|
||||
exec python3 ${builtins.toString ./update.py} ${builtins.toString ./.}
|
||||
'';
|
||||
};
|
||||
|
||||
meta = {
|
||||
description = "The core APIs of the Emacs binding for tree-sitter.";
|
||||
license = lib.licenses.mit;
|
||||
maintainers = with lib.maintainers; [ pimeys ];
|
||||
};
|
||||
}
|
10
pkgs/applications/editors/emacs/elisp-packages/tsc/src.json
Normal file
10
pkgs/applications/editors/emacs/elisp-packages/tsc/src.json
Normal file
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"src": {
|
||||
"owner": "emacs-tree-sitter",
|
||||
"repo": "elisp-tree-sitter",
|
||||
"rev": "909717c685ff5a2327fa2ca8fb8a25216129361c",
|
||||
"sha256": "LrakDpP3ZhRQqz47dPcyoQnu5lROdaNlxGaQfQT6u+k="
|
||||
},
|
||||
"version": "0.18.0",
|
||||
"cargoSha256": "sha256-IRCZqszBkGF8anF/kpcPOzHdOP4lAtJBAp6FS5tAOx8="
|
||||
}
|
122
pkgs/applications/editors/emacs/elisp-packages/tsc/update.py
Normal file
122
pkgs/applications/editors/emacs/elisp-packages/tsc/update.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
#!/usr/bin/env python3
|
||||
from textwrap import dedent
|
||||
from os.path import (
|
||||
abspath,
|
||||
dirname,
|
||||
join,
|
||||
)
|
||||
from typing import (
|
||||
Dict,
|
||||
Any,
|
||||
)
|
||||
import subprocess
|
||||
import tempfile
|
||||
import json
|
||||
import sys
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
|
||||
def eval_drv(nixpkgs: str, expr: str) -> Any:
|
||||
expr = "\n".join(
|
||||
(
|
||||
"with (import %s {});" % nixpkgs,
|
||||
expr,
|
||||
)
|
||||
)
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode="w") as f:
|
||||
f.write(dedent(expr))
|
||||
f.flush()
|
||||
p = subprocess.run(
|
||||
["nix-instantiate", "--json", f.name], stdout=subprocess.PIPE, check=True
|
||||
)
|
||||
|
||||
return p.stdout.decode().strip()
|
||||
|
||||
|
||||
def get_src(tag_name: str) -> Dict[str, str]:
|
||||
p = subprocess.run(
|
||||
[
|
||||
"nix-prefetch-github",
|
||||
"--rev",
|
||||
tag_name,
|
||||
"--json",
|
||||
"emacs-tree-sitter",
|
||||
"elisp-tree-sitter",
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
)
|
||||
src = json.loads(p.stdout)
|
||||
|
||||
fields = ["owner", "repo", "rev", "sha256"]
|
||||
|
||||
return {f: src[f] for f in fields}
|
||||
|
||||
|
||||
def get_cargo_sha256(drv_path: str):
|
||||
# Note: No check=True since we expect this command to fail
|
||||
p = subprocess.run(["nix-store", "-r", drv_path], stderr=subprocess.PIPE)
|
||||
|
||||
stderr = p.stderr.decode()
|
||||
lines = iter(stderr.split("\n"))
|
||||
|
||||
for l in lines:
|
||||
if l.startswith("error: hash mismatch in fixed-output derivation"):
|
||||
break
|
||||
else:
|
||||
raise ValueError("Did not find expected hash mismatch message")
|
||||
|
||||
for l in lines:
|
||||
m = re.match(r"\s+got:\s+(.+)$", l)
|
||||
if m:
|
||||
return m.group(1)
|
||||
|
||||
raise ValueError("Could not extract actual sha256 hash: ", stderr)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cwd = sys.argv[1]
|
||||
|
||||
nixpkgs = abspath(join(cwd, "../../../../../.."))
|
||||
|
||||
tag_name = requests.get(
|
||||
"https://api.github.com/repos/emacs-tree-sitter/elisp-tree-sitter/releases/latest"
|
||||
).json()["tag_name"]
|
||||
|
||||
src = get_src(tag_name)
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode="w") as f:
|
||||
json.dump(src, f)
|
||||
f.flush()
|
||||
|
||||
drv_path = eval_drv(
|
||||
nixpkgs,
|
||||
"""
|
||||
rustPlatform.buildRustPackage {
|
||||
pname = "tsc-dyn";
|
||||
version = "%s";
|
||||
nativeBuildInputs = [ clang ];
|
||||
src = fetchFromGitHub (lib.importJSON %s);
|
||||
sourceRoot = "source/core";
|
||||
cargoSha256 = lib.fakeSha256;
|
||||
}
|
||||
"""
|
||||
% (tag_name, f.name),
|
||||
)
|
||||
|
||||
cargo_sha256 = get_cargo_sha256(drv_path)
|
||||
|
||||
with open(join(cwd, "src.json"), mode="w") as f:
|
||||
json.dump(
|
||||
{
|
||||
"src": src,
|
||||
"version": tag_name,
|
||||
"cargoSha256": cargo_sha256,
|
||||
},
|
||||
f,
|
||||
indent=2,
|
||||
)
|
||||
f.write("\n")
|
|
@ -16,6 +16,9 @@ stdenv.mkDerivation rec {
|
|||
|
||||
nativeBuildInputs = [ cmake pkg-config wrapGAppsHook ];
|
||||
|
||||
# This patch is upstream; remove it in 5.9.
|
||||
patches = [ ./fix-6324.patch ];
|
||||
|
||||
buildInputs = [
|
||||
pixman libpthreadstubs gtkmm3 libXau libXdmcp
|
||||
lcms2 libiptcdata libcanberra-gtk3 fftw expat pcre libsigcxx lensfun librsvg
|
||||
|
|
356
pkgs/applications/graphics/rawtherapee/fix-6324.patch
Normal file
356
pkgs/applications/graphics/rawtherapee/fix-6324.patch
Normal file
|
@ -0,0 +1,356 @@
|
|||
See:
|
||||
https://github.com/Beep6581/RawTherapee/issues/6324
|
||||
https://github.com/Beep6581/RawTherapee/commit/2e0137d54243eb729d4a5f939c4320ec8f8f415d
|
||||
|
||||
diff --git a/rtengine/canon_cr3_decoder.cc b/rtengine/canon_cr3_decoder.cc
|
||||
index 6274154cb..98c743dad 100644
|
||||
--- a/rtengine/canon_cr3_decoder.cc
|
||||
+++ b/rtengine/canon_cr3_decoder.cc
|
||||
@@ -662,7 +662,7 @@ std::uint32_t _byteswap_ulong(std::uint32_t x)
|
||||
#endif
|
||||
|
||||
struct LibRaw_abstract_datastream {
|
||||
- IMFILE* ifp;
|
||||
+ rtengine::IMFILE* ifp;
|
||||
|
||||
void lock()
|
||||
{
|
||||
diff --git a/rtengine/dcraw.cc b/rtengine/dcraw.cc
|
||||
index 812f122b3..5da696af2 100644
|
||||
--- a/rtengine/dcraw.cc
|
||||
+++ b/rtengine/dcraw.cc
|
||||
@@ -2025,7 +2025,7 @@ void CLASS phase_one_load_raw_c()
|
||||
#endif
|
||||
{
|
||||
int len[2], pred[2];
|
||||
- IMFILE ifpthr = *ifp;
|
||||
+ rtengine::IMFILE ifpthr = *ifp;
|
||||
ifpthr.plistener = nullptr;
|
||||
|
||||
#ifdef _OPENMP
|
||||
@@ -3380,7 +3380,7 @@ void CLASS sony_arw2_load_raw()
|
||||
{
|
||||
uchar *data = new (std::nothrow) uchar[raw_width + 1];
|
||||
merror(data, "sony_arw2_load_raw()");
|
||||
- IMFILE ifpthr = *ifp;
|
||||
+ rtengine::IMFILE ifpthr = *ifp;
|
||||
int pos = ifpthr.pos;
|
||||
ushort pix[16];
|
||||
|
||||
@@ -6394,7 +6394,7 @@ int CLASS parse_tiff_ifd (int base)
|
||||
unsigned sony_curve[] = { 0,0,0,0,0,4095 };
|
||||
unsigned *buf, sony_offset=0, sony_length=0, sony_key=0;
|
||||
struct jhead jh;
|
||||
-/*RT*/ IMFILE *sfp;
|
||||
+/*RT*/ rtengine::IMFILE *sfp;
|
||||
/*RT*/ int pana_raw = 0;
|
||||
|
||||
if (tiff_nifds >= sizeof tiff_ifd / sizeof tiff_ifd[0])
|
||||
@@ -6958,7 +6958,7 @@ it under the terms of the one of two licenses as you choose:
|
||||
fread (buf, sony_length, 1, ifp);
|
||||
sony_decrypt (buf, sony_length/4, 1, sony_key);
|
||||
sfp = ifp;
|
||||
-/*RT*/ ifp = fopen (buf, sony_length);
|
||||
+/*RT*/ ifp = rtengine::fopen (buf, sony_length);
|
||||
// if ((ifp = tmpfile())) {
|
||||
// fwrite (buf, sony_length, 1, ifp);
|
||||
// fseek (ifp, 0, SEEK_SET);
|
||||
@@ -7264,7 +7264,7 @@ void CLASS parse_external_jpeg()
|
||||
{
|
||||
const char *file, *ext;
|
||||
char *jname, *jfile, *jext;
|
||||
-/*RT*/ IMFILE *save=ifp;
|
||||
+/*RT*/ rtengine::IMFILE *save=ifp;
|
||||
|
||||
ext = strrchr (ifname, '.');
|
||||
file = strrchr (ifname, '/');
|
||||
@@ -7292,7 +7292,7 @@ void CLASS parse_external_jpeg()
|
||||
*jext = '0';
|
||||
}
|
||||
if (strcmp (jname, ifname)) {
|
||||
-/*RT*/ if ((ifp = fopen (jname))) {
|
||||
+/*RT*/ if ((ifp = rtengine::fopen (jname))) {
|
||||
// if ((ifp = fopen (jname, "rb"))) {
|
||||
if (verbose)
|
||||
fprintf (stderr,_("Reading metadata from %s ...\n"), jname);
|
||||
diff --git a/rtengine/dcraw.h b/rtengine/dcraw.h
|
||||
index 89c1fcaff..f25157088 100644
|
||||
--- a/rtengine/dcraw.h
|
||||
+++ b/rtengine/dcraw.h
|
||||
@@ -73,7 +73,7 @@ public:
|
||||
|
||||
protected:
|
||||
int exif_base, ciff_base, ciff_len;
|
||||
- IMFILE *ifp;
|
||||
+ rtengine::IMFILE *ifp;
|
||||
FILE *ofp;
|
||||
short order;
|
||||
const char *ifname;
|
||||
@@ -125,7 +125,7 @@ protected:
|
||||
int cur_buf_size; // buffer size
|
||||
uchar *cur_buf; // currently read block
|
||||
int fillbytes; // Counter to add extra byte for block size N*16
|
||||
- IMFILE *input;
|
||||
+ rtengine::IMFILE *input;
|
||||
struct int_pair grad_even[3][41]; // tables of gradients
|
||||
struct int_pair grad_odd[3][41];
|
||||
ushort *linealloc;
|
||||
@@ -278,7 +278,7 @@ void parse_redcine();
|
||||
class getbithuff_t
|
||||
{
|
||||
public:
|
||||
- getbithuff_t(DCraw *p,IMFILE *&i, unsigned &z):parent(p),bitbuf(0),vbits(0),reset(0),ifp(i),zero_after_ff(z){}
|
||||
+ getbithuff_t(DCraw *p,rtengine::IMFILE *&i, unsigned &z):parent(p),bitbuf(0),vbits(0),reset(0),ifp(i),zero_after_ff(z){}
|
||||
unsigned operator()(int nbits, ushort *huff);
|
||||
|
||||
private:
|
||||
@@ -288,7 +288,7 @@ private:
|
||||
DCraw *parent;
|
||||
unsigned bitbuf;
|
||||
int vbits, reset;
|
||||
- IMFILE *&ifp;
|
||||
+ rtengine::IMFILE *&ifp;
|
||||
unsigned &zero_after_ff;
|
||||
};
|
||||
getbithuff_t getbithuff;
|
||||
@@ -296,7 +296,7 @@ getbithuff_t getbithuff;
|
||||
class nikbithuff_t
|
||||
{
|
||||
public:
|
||||
- explicit nikbithuff_t(IMFILE *&i):bitbuf(0),errors(0),vbits(0),ifp(i){}
|
||||
+ explicit nikbithuff_t(rtengine::IMFILE *&i):bitbuf(0),errors(0),vbits(0),ifp(i){}
|
||||
void operator()() {bitbuf = vbits = 0;};
|
||||
unsigned operator()(int nbits, ushort *huff);
|
||||
unsigned errorCount() { return errors; }
|
||||
@@ -309,7 +309,7 @@ private:
|
||||
}
|
||||
unsigned bitbuf, errors;
|
||||
int vbits;
|
||||
- IMFILE *&ifp;
|
||||
+ rtengine::IMFILE *&ifp;
|
||||
};
|
||||
nikbithuff_t nikbithuff;
|
||||
|
||||
@@ -378,7 +378,7 @@ void parse_qt (int end);
|
||||
// ph1_bithuff(int nbits, ushort *huff);
|
||||
class ph1_bithuff_t {
|
||||
public:
|
||||
- ph1_bithuff_t(DCraw *p, IMFILE *i, short &o):order(o),ifp(i),bitbuf(0),vbits(0){}
|
||||
+ ph1_bithuff_t(DCraw *p, rtengine::IMFILE *i, short &o):order(o),ifp(i),bitbuf(0),vbits(0){}
|
||||
unsigned operator()(int nbits, ushort *huff);
|
||||
unsigned operator()(int nbits);
|
||||
unsigned operator()();
|
||||
@@ -412,7 +412,7 @@ private:
|
||||
}
|
||||
|
||||
short ℴ
|
||||
- IMFILE* const ifp;
|
||||
+ rtengine::IMFILE* const ifp;
|
||||
UINT64 bitbuf;
|
||||
int vbits;
|
||||
};
|
||||
@@ -430,11 +430,11 @@ void nokia_load_raw();
|
||||
|
||||
class pana_bits_t{
|
||||
public:
|
||||
- pana_bits_t(IMFILE *i, unsigned &u, unsigned enc):
|
||||
+ pana_bits_t(rtengine::IMFILE *i, unsigned &u, unsigned enc):
|
||||
ifp(i), load_flags(u), vbits(0), encoding(enc) {}
|
||||
unsigned operator()(int nbits, unsigned *bytes=nullptr);
|
||||
private:
|
||||
- IMFILE *ifp;
|
||||
+ rtengine::IMFILE *ifp;
|
||||
unsigned &load_flags;
|
||||
uchar buf[0x4000];
|
||||
int vbits;
|
||||
diff --git a/rtengine/dfmanager.cc b/rtengine/dfmanager.cc
|
||||
index 1fb1d2e1b..951df2248 100644
|
||||
--- a/rtengine/dfmanager.cc
|
||||
+++ b/rtengine/dfmanager.cc
|
||||
@@ -540,7 +540,7 @@ std::vector<badPix> *DFManager::getHotPixels ( const std::string &mak, const std
|
||||
|
||||
int DFManager::scanBadPixelsFile( Glib::ustring filename )
|
||||
{
|
||||
- FILE *file = fopen( filename.c_str(), "r" );
|
||||
+ FILE *file = ::fopen( filename.c_str(), "r" );
|
||||
|
||||
if( !file ) {
|
||||
return false;
|
||||
diff --git a/rtengine/myfile.cc b/rtengine/myfile.cc
|
||||
index 842766dcf..2321d18bb 100644
|
||||
--- a/rtengine/myfile.cc
|
||||
+++ b/rtengine/myfile.cc
|
||||
@@ -70,7 +70,7 @@ int munmap(void *start, size_t length)
|
||||
|
||||
#ifdef MYFILE_MMAP
|
||||
|
||||
-IMFILE* fopen (const char* fname)
|
||||
+rtengine::IMFILE* rtengine::fopen (const char* fname)
|
||||
{
|
||||
int fd;
|
||||
|
||||
@@ -123,13 +123,13 @@ IMFILE* fopen (const char* fname)
|
||||
return mf;
|
||||
}
|
||||
|
||||
-IMFILE* gfopen (const char* fname)
|
||||
+rtengine::IMFILE* rtengine::gfopen (const char* fname)
|
||||
{
|
||||
return fopen(fname);
|
||||
}
|
||||
#else
|
||||
|
||||
-IMFILE* fopen (const char* fname)
|
||||
+rtengine::IMFILE* rtengine::fopen (const char* fname)
|
||||
{
|
||||
|
||||
FILE* f = g_fopen (fname, "rb");
|
||||
@@ -152,7 +152,7 @@ IMFILE* fopen (const char* fname)
|
||||
return mf;
|
||||
}
|
||||
|
||||
-IMFILE* gfopen (const char* fname)
|
||||
+rtengine::IMFILE* rtengine::gfopen (const char* fname)
|
||||
{
|
||||
|
||||
FILE* f = g_fopen (fname, "rb");
|
||||
@@ -176,7 +176,7 @@ IMFILE* gfopen (const char* fname)
|
||||
}
|
||||
#endif //MYFILE_MMAP
|
||||
|
||||
-IMFILE* fopen (unsigned* buf, int size)
|
||||
+rtengine::IMFILE* rtengine::fopen (unsigned* buf, int size)
|
||||
{
|
||||
|
||||
IMFILE* mf = new IMFILE;
|
||||
@@ -190,7 +190,7 @@ IMFILE* fopen (unsigned* buf, int size)
|
||||
return mf;
|
||||
}
|
||||
|
||||
-void fclose (IMFILE* f)
|
||||
+void rtengine::fclose (IMFILE* f)
|
||||
{
|
||||
#ifdef MYFILE_MMAP
|
||||
|
||||
@@ -207,7 +207,7 @@ void fclose (IMFILE* f)
|
||||
delete f;
|
||||
}
|
||||
|
||||
-int fscanf (IMFILE* f, const char* s ...)
|
||||
+int rtengine::fscanf (IMFILE* f, const char* s ...)
|
||||
{
|
||||
// fscanf not easily wrapped since we have no terminating \0 at end
|
||||
// of file data and vsscanf() won't tell us how many characters that
|
||||
@@ -253,7 +253,7 @@ int fscanf (IMFILE* f, const char* s ...)
|
||||
}
|
||||
|
||||
|
||||
-char* fgets (char* s, int n, IMFILE* f)
|
||||
+char* rtengine::fgets (char* s, int n, IMFILE* f)
|
||||
{
|
||||
|
||||
if (f->pos >= f->size) {
|
||||
@@ -270,7 +270,7 @@ char* fgets (char* s, int n, IMFILE* f)
|
||||
return s;
|
||||
}
|
||||
|
||||
-void imfile_set_plistener(IMFILE *f, rtengine::ProgressListener *plistener, double progress_range)
|
||||
+void rtengine::imfile_set_plistener(IMFILE *f, rtengine::ProgressListener *plistener, double progress_range)
|
||||
{
|
||||
f->plistener = plistener;
|
||||
f->progress_range = progress_range;
|
||||
@@ -278,7 +278,7 @@ void imfile_set_plistener(IMFILE *f, rtengine::ProgressListener *plistener, doub
|
||||
f->progress_current = 0;
|
||||
}
|
||||
|
||||
-void imfile_update_progress(IMFILE *f)
|
||||
+void rtengine::imfile_update_progress(IMFILE *f)
|
||||
{
|
||||
if (!f->plistener || f->progress_current < f->progress_next) {
|
||||
return;
|
||||
diff --git a/rtengine/myfile.h b/rtengine/myfile.h
|
||||
index 423edea9a..c655696e6 100644
|
||||
--- a/rtengine/myfile.h
|
||||
+++ b/rtengine/myfile.h
|
||||
@@ -30,8 +30,6 @@ namespace rtengine
|
||||
|
||||
class ProgressListener;
|
||||
|
||||
-}
|
||||
-
|
||||
struct IMFILE {
|
||||
int fd;
|
||||
ssize_t pos;
|
||||
@@ -141,3 +139,5 @@ inline unsigned char* fdata(int offset, IMFILE* f)
|
||||
|
||||
int fscanf (IMFILE* f, const char* s ...);
|
||||
char* fgets (char* s, int n, IMFILE* f);
|
||||
+
|
||||
+}
|
||||
diff --git a/rtengine/rtthumbnail.cc b/rtengine/rtthumbnail.cc
|
||||
index 9da601e2a..097b9e711 100644
|
||||
--- a/rtengine/rtthumbnail.cc
|
||||
+++ b/rtengine/rtthumbnail.cc
|
||||
@@ -1922,7 +1922,7 @@ bool Thumbnail::writeImage (const Glib::ustring& fname)
|
||||
|
||||
Glib::ustring fullFName = fname + ".rtti";
|
||||
|
||||
- FILE* f = g_fopen (fullFName.c_str (), "wb");
|
||||
+ FILE* f = ::g_fopen (fullFName.c_str (), "wb");
|
||||
|
||||
if (!f) {
|
||||
return false;
|
||||
@@ -1965,7 +1965,7 @@ bool Thumbnail::readImage (const Glib::ustring& fname)
|
||||
return false;
|
||||
}
|
||||
|
||||
- FILE* f = g_fopen(fullFName.c_str (), "rb");
|
||||
+ FILE* f = ::g_fopen(fullFName.c_str (), "rb");
|
||||
|
||||
if (!f) {
|
||||
return false;
|
||||
@@ -2191,7 +2191,7 @@ bool Thumbnail::writeData (const Glib::ustring& fname)
|
||||
return false;
|
||||
}
|
||||
|
||||
- FILE *f = g_fopen (fname.c_str (), "wt");
|
||||
+ FILE *f = ::g_fopen (fname.c_str (), "wt");
|
||||
|
||||
if (!f) {
|
||||
if (settings->verbose) {
|
||||
@@ -2214,7 +2214,7 @@ bool Thumbnail::readEmbProfile (const Glib::ustring& fname)
|
||||
embProfile = nullptr;
|
||||
embProfileLength = 0;
|
||||
|
||||
- FILE* f = g_fopen (fname.c_str (), "rb");
|
||||
+ FILE* f = ::g_fopen (fname.c_str (), "rb");
|
||||
|
||||
if (f) {
|
||||
if (!fseek (f, 0, SEEK_END)) {
|
||||
@@ -2242,7 +2242,7 @@ bool Thumbnail::writeEmbProfile (const Glib::ustring& fname)
|
||||
{
|
||||
|
||||
if (embProfileData) {
|
||||
- FILE* f = g_fopen (fname.c_str (), "wb");
|
||||
+ FILE* f = ::g_fopen (fname.c_str (), "wb");
|
||||
|
||||
if (f) {
|
||||
fwrite (embProfileData, 1, embProfileLength, f);
|
||||
@@ -2257,7 +2257,7 @@ bool Thumbnail::writeEmbProfile (const Glib::ustring& fname)
|
||||
bool Thumbnail::readAEHistogram (const Glib::ustring& fname)
|
||||
{
|
||||
|
||||
- FILE* f = g_fopen(fname.c_str(), "rb");
|
||||
+ FILE* f = ::g_fopen(fname.c_str(), "rb");
|
||||
|
||||
if (!f) {
|
||||
aeHistogram.reset();
|
||||
@@ -2280,7 +2280,7 @@ bool Thumbnail::writeAEHistogram (const Glib::ustring& fname)
|
||||
{
|
||||
|
||||
if (aeHistogram) {
|
||||
- FILE* f = g_fopen (fname.c_str (), "wb");
|
||||
+ FILE* f = ::g_fopen (fname.c_str (), "wb");
|
||||
|
||||
if (f) {
|
||||
fwrite (&aeHistogram[0], 1, (65536 >> aeHistCompression)*sizeof (aeHistogram[0]), f);
|
|
@ -55,7 +55,7 @@ stdenv.mkDerivation {
|
|||
ln -s $opt/data/resources $opt/x86_64/resources
|
||||
'';
|
||||
|
||||
updateScript = writeShellScript "hubstaff-updater" ''
|
||||
passthru.updateScript = writeShellScript "hubstaff-updater" ''
|
||||
set -eu -o pipefail
|
||||
|
||||
installation_script_url=$(curl --fail --head --location --silent --output /dev/null --write-out %{url_effective} https://app.hubstaff.com/download/linux)
|
||||
|
|
|
@ -32,15 +32,15 @@
|
|||
}
|
||||
},
|
||||
"dev": {
|
||||
"version": "102.0.4997.0",
|
||||
"sha256": "05y9b426wcarq18faw5i79qrfqy158dinvba5d7lwrcjnbqyfr1f",
|
||||
"sha256bin64": "0846y3dbs7vghrb8s2s57a2lk7a0x2dha5q0d915qrn29g5x9c6p",
|
||||
"version": "102.0.5005.12",
|
||||
"sha256": "11n03hz3g8h7srywxrjwrdrxybdjvmdjrnigjlrwjkydprg1l7ab",
|
||||
"sha256bin64": "0hc56a98ikkbgdw36dpz9k6r15jmjmnm7faml8z59vixxlvkrw7y",
|
||||
"deps": {
|
||||
"gn": {
|
||||
"version": "2022-04-07",
|
||||
"version": "2022-04-14",
|
||||
"url": "https://gn.googlesource.com/gn",
|
||||
"rev": "ae110f8b525009255ba1f9ae96982176d3bfad3d",
|
||||
"sha256": "131y1v2m59hn7s00zc9p7rhfi956p744mp96g2i80f0i020dyl6w"
|
||||
"rev": "fd9f2036f26d83f9fcfe93042fb952e5a7fe2167",
|
||||
"sha256": "0b5xs0chcv3hfhy71rycsmgxnqbm375a333hwav8929k9cbi5p9h"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -1,16 +1,15 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -I nixpkgs=../../../.. -i python3 -p python3
|
||||
|
||||
import json
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
from typing import List, Dict, Optional, Any, Tuple
|
||||
import logging
|
||||
from operator import itemgetter
|
||||
import subprocess
|
||||
import zipfile
|
||||
import io
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import subprocess
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
from operator import itemgetter
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Optional, Any, Tuple
|
||||
|
||||
# We don't want all those deprecated legacy extensions
|
||||
# Group extensions by GNOME "major" version for compatibility reasons
|
||||
|
@ -21,14 +20,12 @@ supported_versions = {
|
|||
"42": "42",
|
||||
}
|
||||
|
||||
|
||||
# Some type alias to increase readility of complex compound types
|
||||
# Some type alias to increase readability of complex compound types
|
||||
PackageName = str
|
||||
ShellVersion = str
|
||||
Uuid = str
|
||||
ExtensionVersion = int
|
||||
|
||||
|
||||
# Keep track of all names that have been used till now to detect collisions.
|
||||
# This works because we deterministically process all extensions in historical order
|
||||
# The outer dict level is the shell version, as we are tracking duplicates only per same Shell version.
|
||||
|
@ -37,6 +34,8 @@ package_name_registry: Dict[ShellVersion, Dict[PackageName, List[Uuid]]] = {}
|
|||
for shell_version in supported_versions.keys():
|
||||
package_name_registry[shell_version] = {}
|
||||
|
||||
updater_dir_path = Path(__file__).resolve().parent
|
||||
|
||||
|
||||
def fetch_extension_data(uuid: str, version: str) -> Tuple[str, str]:
|
||||
"""
|
||||
|
@ -48,28 +47,32 @@ def fetch_extension_data(uuid: str, version: str) -> Tuple[str, str]:
|
|||
uuid = uuid.replace("@", "")
|
||||
url: str = f"https://extensions.gnome.org/extension-data/{uuid}.v{version}.shell-extension.zip"
|
||||
|
||||
# Yes, we download that file three times:
|
||||
# TODO remove when Vitals@CoreCoding.com version != 53, this extension has a missing manifest.json
|
||||
if url == 'https://extensions.gnome.org/extension-data/VitalsCoreCoding.com.v53.shell-extension.zip':
|
||||
url = 'https://extensions.gnome.org/extension-data/VitalsCoreCoding.com.v53.shell-extension_v1BI2FB.zip'
|
||||
|
||||
# The first time is for the maintainter, so they may have a personal backup to fix potential issues
|
||||
# subprocess.run(
|
||||
# ["wget", url], capture_output=True, text=True
|
||||
# )
|
||||
# Download extension and add the zip content to nix-store
|
||||
process = subprocess.run(
|
||||
["nix-prefetch-url", "--unpack", "--print-path", url], capture_output=True, text=True
|
||||
)
|
||||
|
||||
# The second time, we extract the metadata.json because we need it too
|
||||
with urllib.request.urlopen(url) as response:
|
||||
data = zipfile.ZipFile(io.BytesIO(response.read()), 'r')
|
||||
metadata = base64.b64encode(data.read('metadata.json')).decode()
|
||||
lines = process.stdout.splitlines()
|
||||
|
||||
# The third time is to get the file into the store and to get its hash
|
||||
hash = subprocess.run(
|
||||
["nix-prefetch-url", "--unpack", url], capture_output=True, text=True
|
||||
).stdout.strip()
|
||||
# Get hash from first line of nix-prefetch-url output
|
||||
hash = lines[0].strip()
|
||||
|
||||
# Get path from second line of nix-prefetch-url output
|
||||
path = Path(lines[1].strip())
|
||||
|
||||
# Get metadata.json content from nix-store
|
||||
with open(path / "metadata.json", "r") as out:
|
||||
metadata = base64.b64encode(out.read().encode("ascii")).decode()
|
||||
|
||||
return hash, metadata
|
||||
|
||||
|
||||
def generate_extension_versions(
|
||||
extension_version_map: Dict[ShellVersion, ExtensionVersion], uuid: str
|
||||
extension_version_map: Dict[ShellVersion, ExtensionVersion], uuid: str
|
||||
) -> Dict[ShellVersion, Dict[str, str]]:
|
||||
"""
|
||||
Takes in a mapping from shell versions to extension versions and transforms it the way we need it:
|
||||
|
@ -114,7 +117,7 @@ def generate_extension_versions(
|
|||
"version": str(extension_version),
|
||||
"sha256": sha256,
|
||||
# The downloads are impure, their metadata.json may change at any time.
|
||||
# Thus, be back it up / pin it to remain deterministic
|
||||
# Thus, we back it up / pin it to remain deterministic
|
||||
# Upstream issue: https://gitlab.gnome.org/Infrastructure/extensions-web/-/issues/137
|
||||
"metadata": metadata,
|
||||
}
|
||||
|
@ -127,7 +130,7 @@ def pname_from_url(url: str) -> Tuple[str, str]:
|
|||
"""
|
||||
|
||||
url = url.split("/") # type: ignore
|
||||
return (url[3], url[2])
|
||||
return url[3], url[2]
|
||||
|
||||
|
||||
def process_extension(extension: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
||||
|
@ -151,7 +154,7 @@ def process_extension(extension: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|||
Don't make any assumptions on it, and treat it like an opaque string!
|
||||
"link" follows the following schema: "/extension/$number/$string/"
|
||||
The number is monotonically increasing and unique to every extension.
|
||||
The string is usually derived from the extensions's name (but shortened, kebab-cased and URL friendly).
|
||||
The string is usually derived from the extension name (but shortened, kebab-cased and URL friendly).
|
||||
It may diverge from the actual name.
|
||||
The keys of "shell_version_map" are GNOME Shell version numbers.
|
||||
|
||||
|
@ -196,7 +199,7 @@ def process_extension(extension: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|||
|
||||
for shell_version in shell_version_map.keys():
|
||||
if pname in package_name_registry[shell_version]:
|
||||
logging.warning(f"Package name '{pname}' is colliding.")
|
||||
logging.warning(f"Package name '{pname}' for GNOME '{shell_version}' is colliding.")
|
||||
package_name_registry[shell_version][pname].append(uuid)
|
||||
else:
|
||||
package_name_registry[shell_version][pname] = [uuid]
|
||||
|
@ -225,16 +228,16 @@ def scrape_extensions_index() -> List[Dict[str, Any]]:
|
|||
logging.info("Scraping page " + str(page))
|
||||
try:
|
||||
with urllib.request.urlopen(
|
||||
f"https://extensions.gnome.org/extension-query/?n_per_page=25&page={page}"
|
||||
f"https://extensions.gnome.org/extension-query/?n_per_page=25&page={page}"
|
||||
) as response:
|
||||
data = json.loads(response.read().decode())["extensions"]
|
||||
responseLength = len(data)
|
||||
response_length = len(data)
|
||||
|
||||
for extension in data:
|
||||
extensions.append(extension)
|
||||
|
||||
# If our page isn't "full", it must have been the last one
|
||||
if responseLength < 25:
|
||||
if response_length < 25:
|
||||
logging.debug(
|
||||
f"\tThis page only has {responseLength} entries, so it must be the last one."
|
||||
)
|
||||
|
@ -265,11 +268,7 @@ if __name__ == "__main__":
|
|||
processed_extensions.append(processed_extension)
|
||||
logging.debug(f"Processed {num + 1} / {len(raw_extensions)}")
|
||||
|
||||
logging.info(
|
||||
f"Done. Writing results to extensions.json ({len(processed_extensions)} extensions in total)"
|
||||
)
|
||||
|
||||
with open("extensions.json", "w") as out:
|
||||
with open(updater_dir_path / "extensions.json", "w") as out:
|
||||
# Manually pretty-print the outer level, but then do one compact line per extension
|
||||
# This allows for the diffs to be manageable (one line of change per extension) despite their quantity
|
||||
for index, extension in enumerate(processed_extensions):
|
||||
|
@ -281,14 +280,15 @@ if __name__ == "__main__":
|
|||
out.write("\n")
|
||||
out.write("]\n")
|
||||
|
||||
with open("extensions.json", "r") as out:
|
||||
logging.info(
|
||||
f"Done. Writing results to extensions.json ({len(processed_extensions)} extensions in total)"
|
||||
)
|
||||
|
||||
with open(updater_dir_path / "extensions.json", "r") as out:
|
||||
# Check that the generated file actually is valid JSON, just to be sure
|
||||
json.load(out)
|
||||
|
||||
logging.info(
|
||||
"Done. Writing name collisions to collisions.json (please check manually)"
|
||||
)
|
||||
with open("collisions.json", "w") as out:
|
||||
with open(updater_dir_path / "collisions.json", "w") as out:
|
||||
# Filter out those that are not duplicates
|
||||
package_name_registry_filtered: Dict[ShellVersion, Dict[PackageName, List[Uuid]]] = {
|
||||
# The outer level keys are shell versions
|
||||
|
@ -299,3 +299,7 @@ if __name__ == "__main__":
|
|||
}
|
||||
json.dump(package_name_registry_filtered, out, indent=2, ensure_ascii=False)
|
||||
out.write("\n")
|
||||
|
||||
logging.info(
|
||||
"Done. Writing name collisions to collisions.json (please check manually)"
|
||||
)
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
, vulkanDrivers ? ["auto"]
|
||||
, eglPlatforms ? [ "x11" ] ++ lib.optionals stdenv.isLinux [ "wayland" ]
|
||||
, OpenGL, Xplugin
|
||||
, withValgrind ? !stdenv.isDarwin && lib.meta.availableOn stdenv.hostPlatform valgrind-light, valgrind-light
|
||||
, withValgrind ? lib.meta.availableOn stdenv.hostPlatform valgrind-light && !valgrind-light.meta.broken, valgrind-light
|
||||
, enableGalliumNine ? stdenv.isLinux
|
||||
, enableOSMesa ? stdenv.isLinux
|
||||
, enableOpenCL ? stdenv.isLinux && stdenv.isx86_64
|
||||
|
|
|
@ -19,13 +19,13 @@
|
|||
|
||||
stdenv.mkDerivation rec {
|
||||
pname = "umockdev";
|
||||
version = "0.17.7";
|
||||
version = "0.17.8";
|
||||
|
||||
outputs = [ "bin" "out" "dev" "devdoc" ];
|
||||
|
||||
src = fetchurl {
|
||||
url = "https://github.com/martinpitt/umockdev/releases/download/${version}/${pname}-${version}.tar.xz";
|
||||
sha256 = "sha256-BdZCoW3QHM4Oue4bpuSFsuwIU1vsZ5pjqVv9TfGNC7U=";
|
||||
sha256 = "sha256-s3zeWJxw5ohUtsv4NZGKcdP8khEYzIXycbBrAzdnVoU=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
|
|
@ -7,8 +7,8 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "ailment";
|
||||
version = "9.1.12332";
|
||||
format = "setuptools";
|
||||
version = "9.2.1";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.6";
|
||||
|
||||
|
@ -16,7 +16,7 @@ buildPythonPackage rec {
|
|||
owner = "angr";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
hash = "sha256-qWKvNhiOAonUi0qpOWtwbNZa2lgBQ+gaGrAHMgDdr4Q=";
|
||||
hash = "sha256-F0t4vVxi4KUUtIZc8FJD9+2qf1XA58haFfjmHwAQaWA=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
|
|
@ -8,13 +8,14 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "aiolifx";
|
||||
version = "0.7.1";
|
||||
version = "0.8.0";
|
||||
format = "setuptools";
|
||||
|
||||
disabled = pythonOlder "3.4";
|
||||
disabled = pythonOlder "3.7";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
sha256 = "sha256-ktXnAgrxfDELfMQATcWHn/u6C4bKQii+mbT4mA54coo=";
|
||||
hash = "sha256-7XwtTALfEFAI2Rl3JcVcncIZBTFNuXyyclpJj5jHyEU=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
@ -25,10 +26,12 @@ buildPythonPackage rec {
|
|||
# tests are not implemented
|
||||
doCheck = false;
|
||||
|
||||
pythonImportsCheck = [ "aiolifx" ];
|
||||
pythonImportsCheck = [
|
||||
"aiolifx"
|
||||
];
|
||||
|
||||
meta = with lib; {
|
||||
description = "API for local communication with LIFX devices over a LAN";
|
||||
description = "Module for local communication with LIFX devices over a LAN";
|
||||
homepage = "https://github.com/frawau/aiolifx";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ netixx ];
|
||||
|
|
|
@ -46,8 +46,8 @@ in
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "angr";
|
||||
version = "9.1.12332";
|
||||
format = "setuptools";
|
||||
version = "9.2.1";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.6";
|
||||
|
||||
|
@ -55,7 +55,7 @@ buildPythonPackage rec {
|
|||
owner = pname;
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
hash = "sha256-GaW1XyFOnjU28HqptFC6+Fe41zYZMR716Nsq0dPy660=";
|
||||
hash = "sha256-7t4NV1udBq3tK7czuKYUsQ+9tLahFM8DlUUBT3d6bco=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
|
|
@ -9,8 +9,8 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "angrop";
|
||||
version = "9.1.12332";
|
||||
format = "setuptools";
|
||||
version = "9.2.1";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.6";
|
||||
|
||||
|
@ -18,7 +18,7 @@ buildPythonPackage rec {
|
|||
owner = "angr";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
hash = "sha256-lhwlZ7eHaEMaTW7c+WCRSeGSIQ5IeEx6XALyYJH+Ey0=";
|
||||
hash = "sha256-VhlsRd5IN8zF6aUU5Ji/ULkdecOpR+egU3vhYpi+KL8=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
{ lib
|
||||
, buildPythonPackage
|
||||
, fetchFromGitHub
|
||||
, fetchpatch
|
||||
, pytestCheckHook
|
||||
, nose
|
||||
, pythonOlder
|
||||
|
@ -9,8 +8,8 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "archinfo";
|
||||
version = "9.1.12332";
|
||||
format = "setuptools";
|
||||
version = "9.2.1";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.6";
|
||||
|
||||
|
@ -18,7 +17,7 @@ buildPythonPackage rec {
|
|||
owner = "angr";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
hash = "sha256-nv/hwQZgKv/cM8fF6GqI8zY9GAe8aCZ/AGFOmhz+bMM=";
|
||||
hash = "sha256-RWdY7tzT5wDVjAn1QIkQm8b5lIo++VzktsYZxn8taeg=";
|
||||
};
|
||||
|
||||
checkInputs = [
|
||||
|
@ -26,15 +25,6 @@ buildPythonPackage rec {
|
|||
pytestCheckHook
|
||||
];
|
||||
|
||||
patches = [
|
||||
# Make archinfo import without installing pyvex, https://github.com/angr/archinfo/pull/113
|
||||
(fetchpatch {
|
||||
name = "fix-import-issue.patch";
|
||||
url = "https://github.com/angr/archinfo/commit/d29c108f55ffd458ff1d3d65db2d651c76b19267.patch";
|
||||
sha256 = "sha256-9vi0QyqQLIPQxFuB8qrpcnPXWOJ6d27/IXJE/Ui6HhM=";
|
||||
})
|
||||
];
|
||||
|
||||
pythonImportsCheck = [
|
||||
"archinfo"
|
||||
];
|
||||
|
@ -43,6 +33,6 @@ buildPythonPackage rec {
|
|||
description = "Classes with architecture-specific information";
|
||||
homepage = "https://github.com/angr/archinfo";
|
||||
license = with licenses; [ bsd2 ];
|
||||
maintainers = [ maintainers.fab ];
|
||||
maintainers = with maintainers; [ fab ];
|
||||
};
|
||||
}
|
||||
|
|
|
@ -6,12 +6,12 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "azure-synapse-artifacts";
|
||||
version = "0.12.0";
|
||||
version = "0.13.0";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
extension = "zip";
|
||||
sha256 = "sha256-IfQWsITuThzh+TRgv99JTtcDFY3gMq5PjALkN4mJEZo=";
|
||||
sha256 = "sha256-WJZtE7efs1xwalyb0Sr2J+pmPIt9gn2o01/prncb2uM=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
, fetchFromGitHub
|
||||
, future
|
||||
, hypothesis
|
||||
, packaging
|
||||
, parameterized
|
||||
, msgpack
|
||||
, pyserial
|
||||
|
@ -29,6 +30,7 @@ buildPythonPackage rec {
|
|||
|
||||
propagatedBuildInputs = [
|
||||
msgpack
|
||||
packaging
|
||||
pyserial
|
||||
typing-extensions
|
||||
wrapt
|
||||
|
|
|
@ -14,8 +14,8 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "claripy";
|
||||
version = "9.1.12332";
|
||||
format = "setuptools";
|
||||
version = "9.2.1";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.6";
|
||||
|
||||
|
@ -23,7 +23,7 @@ buildPythonPackage rec {
|
|||
owner = "angr";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256-YrR8OkDoop6kHAuk4cM4STYYOjjaMLZCQuE07/5IXqs=";
|
||||
hash = "sha256-pCqhSpZfX3u9vJ8Oy1yyicagBQYK5+jBVCEab0TnGA4=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
@ -42,8 +42,8 @@ buildPythonPackage rec {
|
|||
|
||||
postPatch = ''
|
||||
# Use upstream z3 implementation
|
||||
substituteInPlace setup.py \
|
||||
--replace "z3-solver>=4.8.5.0" ""
|
||||
substituteInPlace setup.cfg \
|
||||
--replace "z3-solver >= 4.8.5.0" ""
|
||||
'';
|
||||
|
||||
pythonImportsCheck = [
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
|
||||
let
|
||||
# The binaries are following the argr projects release cycle
|
||||
version = "9.1.12332";
|
||||
version = "9.2.1";
|
||||
|
||||
# Binary files from https://github.com/angr/binaries (only used for testing and only here)
|
||||
binaries = fetchFromGitHub {
|
||||
|
@ -29,7 +29,7 @@ in
|
|||
buildPythonPackage rec {
|
||||
pname = "cle";
|
||||
inherit version;
|
||||
format = "setuptools";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.6";
|
||||
|
||||
|
@ -37,7 +37,7 @@ buildPythonPackage rec {
|
|||
owner = "angr";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
hash = "sha256-xcj6Skzzmw5g+0KsBMLNOhRyXQA7nbgnc9YyfJLteCM=";
|
||||
hash = "sha256-OGdnrRFfx2LMMsw6giPC+4izWX603cYbpKHuslj4Gng=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
@ -70,6 +70,8 @@ buildPythonPackage rec {
|
|||
"test_plt_full_relro"
|
||||
# Test fails
|
||||
"test_tls_pe_incorrect_tls_data_start"
|
||||
# The required parts is not present on Nix
|
||||
"test_remote_file_map"
|
||||
];
|
||||
|
||||
pythonImportsCheck = [
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "deezer-python";
|
||||
version = "5.2.0";
|
||||
version = "5.3.0";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.6";
|
||||
|
@ -21,8 +21,8 @@ buildPythonPackage rec {
|
|||
src = fetchFromGitHub {
|
||||
owner = "browniebroke";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256-jaF5vQx8/qP9pGLfilx86v1GxHbjxaRghjjI5Me0pU0=";
|
||||
rev = "refs/tags/v${version}";
|
||||
sha256 = "sha256-Y3nn7q6wGBqWN2JxfpGYd/KDxW5yeuwkos0w1AENkJA=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
{ lib, stdenv, buildPythonPackage, fetchFromGitHub, pythonOlder
|
||||
{ lib, stdenv, buildPythonPackage, fetchFromGitHub, fetchpatch, pythonOlder
|
||||
, pandas, shapely, fiona, pyproj
|
||||
, pytestCheckHook, Rtree }:
|
||||
|
||||
|
@ -14,6 +14,19 @@ buildPythonPackage rec {
|
|||
sha256 = "14azl3gppqn90k8h4hpjilsivj92k6p1jh7mdr6p4grbww1b7sdq";
|
||||
};
|
||||
|
||||
patches = [
|
||||
# Fixes a test, will be included in the next release after 0.10.2
|
||||
(fetchpatch {
|
||||
url = "https://github.com/geopandas/geopandas/pull/2219/commits/ac67515c9df745b672cca1669adf05eaf5cb0f3b.patch";
|
||||
sha256 = "sha256-XcaoFhD6Rq0nfEpMbOJiAWPbaPDrMwFwoyppayq8NHc=";
|
||||
})
|
||||
# 5 commits from post 0.10.2 that fix the test suite compatibility with pandas >=1.4
|
||||
(fetchpatch {
|
||||
url = "https://github.com/geopandas/geopandas/pull/2289.patch";
|
||||
sha256 = "sha256-BcZVdaO/DdpZoVGUWaw9etFvvgwizAgrkaBISEOhV4A=";
|
||||
})
|
||||
];
|
||||
|
||||
propagatedBuildInputs = [
|
||||
pandas
|
||||
shapely
|
||||
|
|
18
pkgs/development/python-modules/glad/default.nix
Normal file
18
pkgs/development/python-modules/glad/default.nix
Normal file
|
@ -0,0 +1,18 @@
|
|||
{ buildPythonPackage, fetchPypi, lib }:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "glad";
|
||||
version = "0.1.36";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
sha256 = "sha256-P7ANv+x65t2+ugTiFUf2fzzPx5X8NFYkUM8/K7Gf28c=";
|
||||
};
|
||||
|
||||
meta = with lib; {
|
||||
description = "Multi-Language Vulkan/GL/GLES/EGL/GLX/WGL Loader-Generator based on the official specs";
|
||||
homepage = "https://github.com/Dav1dde/glad";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ superherointj ];
|
||||
};
|
||||
}
|
|
@ -1,4 +1,6 @@
|
|||
{ lib, fetchurl, buildPythonPackage, numpy, scikit-learn, setuptools-scm, cython, pytest }:
|
||||
{ lib, fetchurl, buildPythonPackage
|
||||
, numpy, scikit-learn, pybind11, setuptools-scm, cython
|
||||
, pytestCheckHook }:
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "hmmlearn";
|
||||
|
@ -9,13 +11,12 @@ buildPythonPackage rec {
|
|||
sha256 = "sha256-a0snIPJ6912pNnq02Q3LAPONozFo322Rf57F3mZw9uE=";
|
||||
};
|
||||
|
||||
buildInputs = [ setuptools-scm cython ];
|
||||
buildInputs = [ setuptools-scm cython pybind11 ];
|
||||
propagatedBuildInputs = [ numpy scikit-learn ];
|
||||
checkInputs = [ pytest ];
|
||||
checkInputs = [ pytestCheckHook ];
|
||||
|
||||
checkPhase = ''
|
||||
pytest --pyargs hmmlearn
|
||||
'';
|
||||
pythonImportsCheck = [ "hmmlearn" ];
|
||||
pytestFlagsArray = [ "--pyargs" "hmmlearn" ];
|
||||
|
||||
meta = with lib; {
|
||||
description = "Hidden Markov Models in Python with scikit-learn like API";
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
, pythonOlder
|
||||
, fetchFromGitHub
|
||||
, pytestCheckHook
|
||||
, autograd
|
||||
, numba
|
||||
, numpy
|
||||
, scikit-learn
|
||||
|
@ -25,6 +26,7 @@ buildPythonPackage rec {
|
|||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
autograd
|
||||
numba
|
||||
numpy
|
||||
scikit-learn
|
||||
|
@ -40,8 +42,8 @@ buildPythonPackage rec {
|
|||
|
||||
meta = with lib; {
|
||||
homepage = "https://github.com/neurodata/hyppo";
|
||||
description = "Indepedence testing in Python";
|
||||
license = licenses.asl20;
|
||||
description = "Python package for multivariate hypothesis testing";
|
||||
license = licenses.mit;
|
||||
maintainers = with maintainers; [ bcdarwin ];
|
||||
};
|
||||
}
|
||||
|
|
|
@ -99,7 +99,8 @@ buildPythonPackage rec {
|
|||
--replace "cachetools>=4.1,<5" "cachetools>=4.1" \
|
||||
--replace "Flask-WTF>=0.14.3,<1" "Flask-WTF>=0.14.3,<2" \
|
||||
--replace "SQLAlchemy>=1.3.0,<1.4" "SQLAlchemy>=1.3.0,<1.5" \
|
||||
--replace "WTForms>=2.3.1,<2.4" "WTForms"
|
||||
--replace "WTForms>=2.3.1,<2.4" "WTForms" \
|
||||
--replace "Flask-Talisman>=0.8,<1" "Flask-Talisman>=0.8,<2" # https://github.com/spiral-project/ihatemoney/pull/1006
|
||||
'';
|
||||
|
||||
checkInputs = [
|
||||
|
|
|
@ -12,7 +12,7 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "marshmallow-dataclass";
|
||||
version = "8.5.3";
|
||||
version = "8.5.5";
|
||||
format = "setuptools";
|
||||
|
||||
disabled = pythonOlder "3.6";
|
||||
|
@ -21,7 +21,7 @@ buildPythonPackage rec {
|
|||
owner = "lovasoa";
|
||||
repo = "marshmallow_dataclass";
|
||||
rev = "v${version}";
|
||||
sha256 = "0mngkjfs2nxxr0y77n429hb22rmjxbnn95j4vwqr9y6q16bqxs0w";
|
||||
sha256 = "sha256-sozq+L3pa9iprAWtQd/L+LCfTWfDue04WzQ/fbM0mps=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{ lib, stdenv, buildPythonPackage, fetchFromGitHub, isPyPy, isPy3k
|
||||
, olefile, freetype, libjpeg, zlib, libtiff, libwebp, tcl, lcms2
|
||||
, libxcb, tk, libX11, openjpeg, libimagequant, pyroma, numpy
|
||||
, libxcb, tk, libX11, openjpeg, libimagequant, pyroma, numpy, defusedxml
|
||||
, pytestCheckHook
|
||||
}@args:
|
||||
|
||||
|
@ -8,14 +8,14 @@ import ../pillow/generic.nix (rec {
|
|||
pname = "Pillow-SIMD";
|
||||
# check for release version on https://pypi.org/project/Pillow-SIMD/#history
|
||||
# does not match the latest pillow release version!
|
||||
version = "7.0.0.post3";
|
||||
version = "9.0.0.post1";
|
||||
disabled = !isPy3k;
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "uploadcare";
|
||||
repo = "pillow-simd";
|
||||
rev = "v${version}";
|
||||
sha256 = "1h832xp1bzf951hr4dmjmxqfsv28sx9lr2cq96qdz1c72k40zj1h";
|
||||
sha256 = "sha256-qTZYhgHjVMXqoYl3mG1xVrFaWrPidSY8HlyFQizV27Y=";
|
||||
};
|
||||
|
||||
meta = with lib; {
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "pynetdicom";
|
||||
version = "2.0.1";
|
||||
version = "2.0.2";
|
||||
format = "setuptools";
|
||||
|
||||
disabled = pythonOlder "3.7";
|
||||
|
@ -20,7 +20,7 @@ buildPythonPackage rec {
|
|||
owner = "pydicom";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256-28SoOdS6sAj3KrfJT8PR2k8XLEY2zh0k9w1eq1y7V8M=";
|
||||
sha256 = "sha256-/JWQUtFBW4uqCbs/nUxj1pRBfTCXV4wcqTkqvzpdFrM=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
@ -40,14 +40,17 @@ buildPythonPackage rec {
|
|||
"TestAEGoodAssociation"
|
||||
"TestEchoSCP"
|
||||
"TestEchoSCPCLI"
|
||||
"TestEventHandlingAcceptor"
|
||||
"TestFindSCP"
|
||||
"TestFindSCPCLI"
|
||||
"TestGetSCP"
|
||||
"TestGetSCPCLI"
|
||||
"TestMoveSCP"
|
||||
"TestMoveSCPCLI"
|
||||
"TestPrimitive_N_GET"
|
||||
"TestQRGetServiceClass"
|
||||
"TestQRMoveServiceClass"
|
||||
"TestSearch"
|
||||
"TestState"
|
||||
"TestStorageServiceClass"
|
||||
"TestStoreSCP"
|
||||
|
@ -56,6 +59,11 @@ buildPythonPackage rec {
|
|||
"TestStoreSCUCLI"
|
||||
];
|
||||
|
||||
disabledTestPaths = [
|
||||
# Ignore apps tests
|
||||
"pynetdicom/apps/tests/"
|
||||
];
|
||||
|
||||
pythonImportsCheck = [
|
||||
"pynetdicom"
|
||||
];
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "pyskyqremote";
|
||||
version = "0.3.5";
|
||||
version = "0.3.6";
|
||||
format = "setuptools";
|
||||
|
||||
disabled = pythonOlder "3.7";
|
||||
|
@ -17,8 +17,8 @@ buildPythonPackage rec {
|
|||
src = fetchFromGitHub {
|
||||
owner = "RogerSelwyn";
|
||||
repo = "skyq_remote";
|
||||
rev = version;
|
||||
sha256 = "sha256-/BhNoU1dnZj07ZvG126srSb6eW00n8htFuDttq006QE=";
|
||||
rev = "refs/tags/${version}";
|
||||
sha256 = "sha256-CCbLRb8eoMuYH3it2onfiUzHLW6sirePR/lqATpp4K0=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
|
|
@ -12,14 +12,14 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "pyvex";
|
||||
version = "9.1.12332";
|
||||
format = "setuptools";
|
||||
version = "9.2.1";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.6";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
hash = "sha256-e1lruHgppQ8mJbTx6xsUDSkLCYQISqM9c1vsjdQU4eI=";
|
||||
hash = "sha256-b6LZJmAyxklvZxvane19dK/kQfbLPjkk29RydiWMZCY=";
|
||||
};
|
||||
|
||||
propagatedBuildInputs = [
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
buildPythonPackage rec {
|
||||
pname = "seventeentrack";
|
||||
version = "2022.04.4";
|
||||
version = "2022.04.5";
|
||||
format = "pyproject";
|
||||
|
||||
disabled = pythonOlder "3.8";
|
||||
|
@ -23,7 +23,7 @@ buildPythonPackage rec {
|
|||
owner = "McSwindler";
|
||||
repo = pname;
|
||||
rev = version;
|
||||
hash = "sha256-r77UA9NDQ1EQIVXZK6povmp/jIIX/f+NbpH0qmYiHZs=";
|
||||
hash = "sha256-IaR0Cfs3FL4Vguc2NLdPaunk23zC8B93Iyqe9xY/hWY=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [
|
||||
|
|
|
@ -3,26 +3,20 @@
|
|||
, fetchFromGitHub
|
||||
, perl
|
||||
, rustPlatform
|
||||
, librusty_v8 ? callPackage ./librusty_v8.nix { }
|
||||
}:
|
||||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
pname = "rover";
|
||||
version = "0.4.8";
|
||||
version = "0.5.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "apollographql";
|
||||
repo = pname;
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256-9o2bGa9vxN7EprKgsy9TI7AFmwjo1OT1pDyiLierTq0=";
|
||||
sha256 = "sha256-wBHMND/xpm9o7pkWMUj9lEtEkzy3mX+E4Dt7qDn6auY=";
|
||||
};
|
||||
|
||||
cargoSha256 = "sha256-4oNuyZ1xNK2jP9QFEcthCjEQRyvFykd5N0j5KCXrzVY=";
|
||||
|
||||
# The v8 package will try to download a `librusty_v8.a` release at build time
|
||||
# to our read-only filesystem. To avoid this we pre-download the file and
|
||||
# export it via RUSTY_V8_ARCHIVE
|
||||
RUSTY_V8_ARCHIVE = librusty_v8;
|
||||
cargoSha256 = "sha256-n0R2MdAYGsOsYt4x1N1KdGvBZYTALyhSzCGW29bnFU4=";
|
||||
|
||||
nativeBuildInputs = [
|
||||
perl
|
||||
|
|
|
@ -1,17 +0,0 @@
|
|||
{ rust, stdenv, fetchurl }:
|
||||
|
||||
let
|
||||
arch = rust.toRustTarget stdenv.hostPlatform;
|
||||
fetch_librusty_v8 = args: fetchurl {
|
||||
name = "librusty_v8-${args.version}";
|
||||
url = "https://github.com/denoland/rusty_v8/releases/download/v${args.version}/librusty_v8_release_${arch}.a";
|
||||
sha256 = args.shas.${stdenv.hostPlatform.system};
|
||||
meta = { inherit (args) version; };
|
||||
};
|
||||
in
|
||||
fetch_librusty_v8 {
|
||||
version = "0.38.1";
|
||||
shas = {
|
||||
x86_64-linux = "sha256-vRkb5ZrIOYSKa84UbsJD+Oua0wve7f1Yf3kMg/kkYSY=";
|
||||
};
|
||||
}
|
|
@ -1 +1 @@
|
|||
1bdcf8916afc3cea660add457724dddd70154904f4e360e15da27fa7d5c43cd0
|
||||
2694c7b893d44c9ad8f5d7161116deb9985a6bd05e8e0cdcd7379947430e6f89
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -26,17 +26,6 @@ rover_tar_url="https://github.com/apollographql/rover/archive/refs/tags/${rover_
|
|||
# Convert hash to SRI representation
|
||||
rover_sri_hash=$(nix hash to-sri --type sha256 "$rover_hash")
|
||||
|
||||
# Identify librusty version and hash
|
||||
librusty_version=$(
|
||||
sed --quiet '/^name = "v8"$/{n;p}' "${repo}/Cargo.lock" \
|
||||
| grep --only-matching --perl-regexp '^version = "\K[^"]+'
|
||||
)
|
||||
librusty_arch=x86_64-unknown-linux-gnu
|
||||
librusty_url="https://github.com/denoland/rusty_v8/releases/download/v${librusty_version}/librusty_v8_release_${librusty_arch}.a"
|
||||
echo "Fetching librusty"
|
||||
librusty_hash=$(nix-prefetch-url "$librusty_url" --type sha256)
|
||||
librusty_sri_hash=$(nix hash to-sri --type sha256 "$librusty_hash")
|
||||
|
||||
# Update rover version.
|
||||
sed --in-place \
|
||||
"s|version = \"[0-9.]*\"|version = \"$rover_version\"|" \
|
||||
|
@ -61,16 +50,6 @@ sed --in-place \
|
|||
"s|cargoSha256 = \".*\"|cargoSha256 = \"$cargoSha256\"|" \
|
||||
"$dirname/default.nix"
|
||||
|
||||
# Update librusty version
|
||||
sed --in-place \
|
||||
"s|version = \"[0-9.]*\"|version = \"$librusty_version\"|" \
|
||||
"$dirname/librusty_v8.nix"
|
||||
|
||||
# Update librusty hash
|
||||
sed --in-place \
|
||||
"s|x86_64-linux = \"[^\"]*\"|x86_64-linux = \"$librusty_sri_hash\"|" \
|
||||
"$dirname/librusty_v8.nix"
|
||||
|
||||
# Update apollo api schema info
|
||||
response="$(mktemp)"
|
||||
schemaUrl=https://graphql.api.apollographql.com/api/schema
|
||||
|
|
|
@ -18,18 +18,18 @@
|
|||
|
||||
mkDerivation rec {
|
||||
pname = "polymc";
|
||||
version = "1.1.1";
|
||||
version = "1.2.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "PolyMC";
|
||||
repo = "PolyMC";
|
||||
rev = version;
|
||||
sha256 = "sha256-virXfnjCzuR2cJoyzapIopT9B+Yi1neff2ZqfOvsmxY=";
|
||||
sha256 = "sha256-pnMmmeIKAaX+z1YzzowotjaG/HKdiqcz2tJ5eGRR77I=";
|
||||
fetchSubmodules = true;
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ cmake file makeWrapper ];
|
||||
buildInputs = [ qtbase jdk8 zlib ];
|
||||
buildInputs = [ qtbase jdk zlib ];
|
||||
|
||||
postPatch = ''
|
||||
# hardcode jdk paths
|
||||
|
|
|
@ -4,18 +4,19 @@
|
|||
, fetchpatch
|
||||
, fetchurl
|
||||
, pkg-config
|
||||
, util-linux
|
||||
, coreutils
|
||||
, libuuid
|
||||
, libaio
|
||||
, substituteAll
|
||||
, enableCmdlib ? false
|
||||
, enableDmeventd ? false
|
||||
, udevSupport ? !stdenv.hostPlatform.isStatic, udev ? null
|
||||
, udevSupport ? !stdenv.hostPlatform.isStatic, udev
|
||||
, onlyLib ? stdenv.hostPlatform.isStatic
|
||||
, enableVDO ? false, vdo ? null
|
||||
, enableMdadm ? false, mdadm ? null
|
||||
, enableMultipath ? false, multipath-tools ? null
|
||||
# Otherwise we have a infinity recursion during static compilation
|
||||
, enableUtilLinux ? !stdenv.hostPlatform.isStatic, util-linux
|
||||
, enableVDO ? false, vdo
|
||||
, enableMdadm ? false, mdadm
|
||||
, enableMultipath ? false, multipath-tools
|
||||
, nixosTests
|
||||
}:
|
||||
|
||||
|
@ -95,14 +96,15 @@ stdenv.mkDerivation rec {
|
|||
sed -i 's|^#define LVM_CONFIGURE_LINE.*$|#define LVM_CONFIGURE_LINE "<removed>"|g' ./include/configure.h
|
||||
'';
|
||||
|
||||
patches = [
|
||||
patches = lib.optionals (lib.versionAtLeast version "2.03.15") [
|
||||
# fixes paths to and checks for tools
|
||||
# TODO: needs backport to LVM 2.02 used by static/musl
|
||||
(substituteAll (let
|
||||
optionalTool = cond: pkg: if cond then pkg else "/run/current-system/sw";
|
||||
in {
|
||||
src = ./fix-blkdeactivate.patch;
|
||||
inherit coreutils;
|
||||
util_linux = util-linux;
|
||||
util_linux = optionalTool enableUtilLinux util-linux;
|
||||
mdadm = optionalTool enableMdadm mdadm;
|
||||
multipath_tools = optionalTool enableMultipath multipath-tools;
|
||||
vdo = optionalTool enableVDO vdo;
|
||||
|
|
|
@ -2,13 +2,13 @@
|
|||
|
||||
stdenvNoCC.mkDerivation rec {
|
||||
pname = "icingaweb2";
|
||||
version = "2.10.0";
|
||||
version = "2.10.1";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "Icinga";
|
||||
repo = "icingaweb2";
|
||||
rev = "v${version}";
|
||||
sha256 = "sha256:0fhhq6mzpwj3dh6w583n4sngshf15dm8zgbli5cacy7jkzmsz0wn";
|
||||
sha256 = "sha256-X4RaAJjhUnSALJyFYiwagN3cHyW+GyB5MPkW7l+Zv10=";
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ makeWrapper ];
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
{ lib, buildGoModule, fetchFromGitHub }:
|
||||
buildGoModule rec {
|
||||
pname = "thanos";
|
||||
version = "0.25.1";
|
||||
version = "0.25.2";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
rev = "v${version}";
|
||||
owner = "thanos-io";
|
||||
repo = "thanos";
|
||||
sha256 = "sha256-yisJqr2JMpXSo9O3q7WpNe1r6w7E3XyeLpVlbdest3s=";
|
||||
sha256 = "sha256-CAeI+5aC8kSQaKOk/5WCQiQMOX82hogAQGP2Em3DJAw=";
|
||||
};
|
||||
|
||||
vendorSha256 = "sha256-tHtfS4PnO9n3ckUdaG6dQAIE2D2PG6km4Tqofaab/eg=";
|
||||
|
|
|
@ -2,21 +2,23 @@
|
|||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
pname = "watchexec";
|
||||
version = "1.17.1";
|
||||
version = "1.19.0";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = pname;
|
||||
repo = pname;
|
||||
rev = "cli-v${version}";
|
||||
sha256 = "13yqghdhakkwp607j84a1vbqgnqqn77a5mh27cr24352ik2vkrkq";
|
||||
sha256 = "sha256-Zqu6Qor7kHSeOFyHjcrl6RhB8gL9pljHt7hEd6/0Kss=";
|
||||
};
|
||||
|
||||
cargoSha256 = "0grzfzxw705zs5qb2h7k0yws45m20ihhh4mnpmk3wargbxpn6gsh";
|
||||
cargoSha256 = "sha256-XwgoYaqgDkNggzi2TL/JPfh8LSFSzSWOVMbkmhXX73I=";
|
||||
|
||||
nativeBuildInputs = [ installShellFiles ];
|
||||
|
||||
buildInputs = lib.optionals stdenv.isDarwin [ CoreServices Foundation libiconv ];
|
||||
|
||||
checkFlags = [ "--skip=help" "--skip=help_short" ];
|
||||
|
||||
postInstall = ''
|
||||
installManPage doc/watchexec.1
|
||||
installShellCompletion --zsh --name _watchexec completions/zsh
|
||||
|
|
|
@ -11637,6 +11637,8 @@ with pkgs;
|
|||
|
||||
wv2 = callPackage ../tools/misc/wv2 { };
|
||||
|
||||
wvkbd = callPackage ../applications/accessibility/wvkbd { };
|
||||
|
||||
wyrd = callPackage ../tools/misc/wyrd {
|
||||
ocamlPackages = ocaml-ng.ocamlPackages_4_05;
|
||||
};
|
||||
|
|
|
@ -3375,6 +3375,8 @@ in {
|
|||
|
||||
git-sweep = callPackage ../development/python-modules/git-sweep { };
|
||||
|
||||
glad = callPackage ../development/python-modules/glad { };
|
||||
|
||||
glances-api = callPackage ../development/python-modules/glances-api { };
|
||||
|
||||
glasgow = callPackage ../development/python-modules/glasgow { };
|
||||
|
|
Loading…
Reference in a new issue