From ae4dae2909aad127686306ce140f6551db6cf9c3 Mon Sep 17 00:00:00 2001 From: Las Safin Date: Tue, 21 Dec 2021 15:55:44 +0000 Subject: [PATCH 001/161] libraw: 0.20.2 -> unstable-2021-12-03 --- pkgs/development/libraries/libraw/default.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkgs/development/libraries/libraw/default.nix b/pkgs/development/libraries/libraw/default.nix index 3ff7e725f3d8..2dfadcdd4886 100644 --- a/pkgs/development/libraries/libraw/default.nix +++ b/pkgs/development/libraries/libraw/default.nix @@ -1,14 +1,14 @@ { lib, stdenv, fetchFromGitHub, autoreconfHook, lcms2, pkg-config }: -stdenv.mkDerivation rec { +stdenv.mkDerivation { pname = "libraw"; - version = "0.20.2"; + version = "unstable-2021-12-03"; src = fetchFromGitHub { owner = "LibRaw"; repo = "LibRaw"; - rev = version; - sha256 = "16nm4r2l5501c9zvz25pzajq5id592jhn068scjxhr8np2cblybc"; + rev = "52b2fc52e93a566e7e05eaa44cada58e3360b6ad"; + sha256 = "kW0R4iPuqnFuWYDrl46ok3kaPcGgY2MqZT7mqVX+BDQ="; }; outputs = [ "out" "lib" "dev" "doc" ]; From 2bc097e409da9e62d3c3f9e3bd3b938c303fa867 Mon Sep 17 00:00:00 2001 From: Las Safin Date: Tue, 21 Dec 2021 15:43:37 +0000 Subject: [PATCH 002/161] freeimage: unstable-2020-07-04 -> unstable-2021-11-01 --- .../libraries/freeimage/default.nix | 8 +- .../libraries/freeimage/unbundle.diff | 78 +++++++++---------- 2 files changed, 43 insertions(+), 43 deletions(-) diff --git a/pkgs/development/libraries/freeimage/default.nix b/pkgs/development/libraries/freeimage/default.nix index 2b4b174ca31b..b132b3fae8ce 100644 --- a/pkgs/development/libraries/freeimage/default.nix +++ b/pkgs/development/libraries/freeimage/default.nix @@ -5,14 +5,14 @@ stdenv.mkDerivation { pname = "freeimage"; - version = "unstable-2020-07-04"; + version = "unstable-2021-11-01"; src = fetchsvn { url = "svn://svn.code.sf.net/p/freeimage/svn/"; - rev = "1859"; - sha256 = "1d94935aqbkb994nqkw7m8xcynyz9rm6k7k59igrbjak8b63qpi6"; + rev = "1900"; + sha256 = "rWoNlU/BWKZBPzRb1HqU6T0sT7aK6dpqKPe88+o/4sA="; }; - sourceRoot = "svn-r1859/FreeImage/trunk"; + sourceRoot = "svn-r1900/FreeImage/trunk"; # Ensure that the bundled libraries are not used at all prePatch = "rm -rf Source/Lib* Source/OpenEXR Source/ZLib"; diff --git a/pkgs/development/libraries/freeimage/unbundle.diff b/pkgs/development/libraries/freeimage/unbundle.diff index 22dac720155a..99a7cfb4ca35 100644 --- a/pkgs/development/libraries/freeimage/unbundle.diff +++ b/pkgs/development/libraries/freeimage/unbundle.diff @@ -1,5 +1,5 @@ diff --git a/Makefile.fip b/Makefile.fip -index 660a026..86b3040 100644 +index 660a0265..86b30401 100644 --- a/Makefile.fip +++ b/Makefile.fip @@ -11,32 +11,21 @@ INSTALLDIR ?= $(DESTDIR)/usr/lib @@ -54,7 +54,7 @@ index 660a026..86b3040 100644 ln -sf $(VERLIBNAME) $(INSTALLDIR)/$(LIBNAME) diff --git a/Makefile.gnu b/Makefile.gnu -index a4f2601..be95476 100644 +index a4f26013..be954761 100644 --- a/Makefile.gnu +++ b/Makefile.gnu @@ -11,32 +11,21 @@ INSTALLDIR ?= $(DESTDIR)/usr/lib @@ -110,7 +110,7 @@ index a4f2601..be95476 100644 clean: rm -f core Dist/*.* u2dtmp* $(MODULES) $(STATICLIB) $(SHAREDLIB) $(LIBNAME) diff --git a/Makefile.osx b/Makefile.osx -index c39121d..3800e39 100644 +index c39121db..3800e39d 100644 --- a/Makefile.osx +++ b/Makefile.osx @@ -10,24 +10,25 @@ MACOSX_SYSROOT = $(shell xcrun --show-sdk-path) @@ -185,21 +185,21 @@ index c39121d..3800e39 100644 ln -sf $(SHAREDLIB) $(INSTALLDIR)/$(LIBNAME) diff --git a/Makefile.srcs b/Makefile.srcs -index 0b8d3a0..b706fd0 100644 +index 692c6e56..212195b2 100644 --- a/Makefile.srcs +++ b/Makefile.srcs @@ -1,6 +1,6 @@ VER_MAJOR = 3 VER_MINOR = 19.0 --SRCS = ./Source/FreeImage/BitmapAccess.cpp ./Source/FreeImage/ColorLookup.cpp ./Source/FreeImage/ConversionRGBA16.cpp ./Source/FreeImage/ConversionRGBAF.cpp ./Source/FreeImage/FreeImage.cpp ./Source/FreeImage/FreeImageC.c ./Source/FreeImage/FreeImageIO.cpp ./Source/FreeImage/GetType.cpp ./Source/FreeImage/LFPQuantizer.cpp ./Source/FreeImage/MemoryIO.cpp ./Source/FreeImage/PixelAccess.cpp ./Source/FreeImage/J2KHelper.cpp ./Source/FreeImage/MNGHelper.cpp ./Source/FreeImage/Plugin.cpp ./Source/FreeImage/PluginBMP.cpp ./Source/FreeImage/PluginCUT.cpp ./Source/FreeImage/PluginDDS.cpp ./Source/FreeImage/PluginEXR.cpp ./Source/FreeImage/PluginG3.cpp ./Source/FreeImage/PluginGIF.cpp ./Source/FreeImage/PluginHDR.cpp ./Source/FreeImage/PluginICO.cpp ./Source/FreeImage/PluginIFF.cpp ./Source/FreeImage/PluginJ2K.cpp ./Source/FreeImage/PluginJNG.cpp ./Source/FreeImage/PluginJP2.cpp ./Source/FreeImage/PluginJPEG.cpp ./Source/FreeImage/PluginJXR.cpp ./Source/FreeImage/PluginKOALA.cpp ./Source/FreeImage/PluginMNG.cpp ./Source/FreeImage/PluginPCD.cpp ./Source/FreeImage/PluginPCX.cpp ./Source/FreeImage/PluginPFM.cpp ./Source/FreeImage/PluginPICT.cpp ./Source/FreeImage/PluginPNG.cpp ./Source/FreeImage/PluginPNM.cpp ./Source/FreeImage/PluginPSD.cpp ./Source/FreeImage/PluginRAS.cpp ./Source/FreeImage/PluginRAW.cpp ./Source/FreeImage/PluginSGI.cpp ./Source/FreeImage/PluginTARGA.cpp ./Source/FreeImage/PluginTIFF.cpp ./Source/FreeImage/PluginWBMP.cpp ./Source/FreeImage/PluginWebP.cpp ./Source/FreeImage/PluginXBM.cpp ./Source/FreeImage/PluginXPM.cpp ./Source/FreeImage/PSDParser.cpp ./Source/FreeImage/TIFFLogLuv.cpp ./Source/FreeImage/Conversion.cpp ./Source/FreeImage/Conversion16_555.cpp ./Source/FreeImage/Conversion16_565.cpp ./Source/FreeImage/Conversion24.cpp ./Source/FreeImage/Conversion32.cpp ./Source/FreeImage/Conversion4.cpp ./Source/FreeImage/Conversion8.cpp ./Source/FreeImage/ConversionFloat.cpp ./Source/FreeImage/ConversionRGB16.cpp ./Source/FreeImage/ConversionRGBF.cpp ./Source/FreeImage/ConversionType.cpp ./Source/FreeImage/ConversionUINT16.cpp ./Source/FreeImage/Halftoning.cpp ./Source/FreeImage/tmoColorConvert.cpp ./Source/FreeImage/tmoDrago03.cpp ./Source/FreeImage/tmoFattal02.cpp ./Source/FreeImage/tmoReinhard05.cpp ./Source/FreeImage/ToneMapping.cpp ./Source/FreeImage/NNQuantizer.cpp ./Source/FreeImage/WuQuantizer.cpp ./Source/FreeImage/CacheFile.cpp ./Source/FreeImage/MultiPage.cpp ./Source/FreeImage/ZLibInterface.cpp ./Source/Metadata/Exif.cpp ./Source/Metadata/FIRational.cpp ./Source/Metadata/FreeImageTag.cpp ./Source/Metadata/IPTC.cpp ./Source/Metadata/TagConversion.cpp ./Source/Metadata/TagLib.cpp ./Source/Metadata/XTIFF.cpp ./Source/FreeImageToolkit/Background.cpp ./Source/FreeImageToolkit/BSplineRotate.cpp ./Source/FreeImageToolkit/Channels.cpp ./Source/FreeImageToolkit/ClassicRotate.cpp ./Source/FreeImageToolkit/Colors.cpp ./Source/FreeImageToolkit/CopyPaste.cpp ./Source/FreeImageToolkit/Display.cpp ./Source/FreeImageToolkit/Flip.cpp ./Source/FreeImageToolkit/JPEGTransform.cpp ./Source/FreeImageToolkit/MultigridPoissonSolver.cpp ./Source/FreeImageToolkit/Rescale.cpp ./Source/FreeImageToolkit/Resize.cpp Source/LibJPEG/jaricom.c Source/LibJPEG/jcapimin.c Source/LibJPEG/jcapistd.c Source/LibJPEG/jcarith.c Source/LibJPEG/jccoefct.c Source/LibJPEG/jccolor.c Source/LibJPEG/jcdctmgr.c Source/LibJPEG/jchuff.c Source/LibJPEG/jcinit.c Source/LibJPEG/jcmainct.c Source/LibJPEG/jcmarker.c Source/LibJPEG/jcmaster.c Source/LibJPEG/jcomapi.c Source/LibJPEG/jcparam.c Source/LibJPEG/jcprepct.c Source/LibJPEG/jcsample.c Source/LibJPEG/jctrans.c Source/LibJPEG/jdapimin.c Source/LibJPEG/jdapistd.c Source/LibJPEG/jdarith.c Source/LibJPEG/jdatadst.c Source/LibJPEG/jdatasrc.c Source/LibJPEG/jdcoefct.c Source/LibJPEG/jdcolor.c Source/LibJPEG/jddctmgr.c Source/LibJPEG/jdhuff.c Source/LibJPEG/jdinput.c Source/LibJPEG/jdmainct.c Source/LibJPEG/jdmarker.c Source/LibJPEG/jdmaster.c Source/LibJPEG/jdmerge.c Source/LibJPEG/jdpostct.c Source/LibJPEG/jdsample.c Source/LibJPEG/jdtrans.c Source/LibJPEG/jerror.c Source/LibJPEG/jfdctflt.c Source/LibJPEG/jfdctfst.c Source/LibJPEG/jfdctint.c Source/LibJPEG/jidctflt.c Source/LibJPEG/jidctfst.c Source/LibJPEG/jidctint.c Source/LibJPEG/jmemmgr.c Source/LibJPEG/jmemnobs.c Source/LibJPEG/jquant1.c Source/LibJPEG/jquant2.c Source/LibJPEG/jutils.c Source/LibJPEG/transupp.c Source/LibPNG/png.c Source/LibPNG/pngerror.c Source/LibPNG/pngget.c Source/LibPNG/pngmem.c Source/LibPNG/pngpread.c Source/LibPNG/pngread.c Source/LibPNG/pngrio.c Source/LibPNG/pngrtran.c Source/LibPNG/pngrutil.c Source/LibPNG/pngset.c Source/LibPNG/pngtrans.c Source/LibPNG/pngwio.c Source/LibPNG/pngwrite.c Source/LibPNG/pngwtran.c Source/LibPNG/pngwutil.c Source/LibTIFF4/tif_aux.c Source/LibTIFF4/tif_close.c Source/LibTIFF4/tif_codec.c Source/LibTIFF4/tif_color.c Source/LibTIFF4/tif_compress.c Source/LibTIFF4/tif_dir.c Source/LibTIFF4/tif_dirinfo.c Source/LibTIFF4/tif_dirread.c Source/LibTIFF4/tif_dirwrite.c Source/LibTIFF4/tif_dumpmode.c Source/LibTIFF4/tif_error.c Source/LibTIFF4/tif_extension.c Source/LibTIFF4/tif_fax3.c Source/LibTIFF4/tif_fax3sm.c Source/LibTIFF4/tif_flush.c Source/LibTIFF4/tif_getimage.c Source/LibTIFF4/tif_jpeg.c Source/LibTIFF4/tif_luv.c Source/LibTIFF4/tif_lzma.c Source/LibTIFF4/tif_lzw.c Source/LibTIFF4/tif_next.c Source/LibTIFF4/tif_ojpeg.c Source/LibTIFF4/tif_open.c Source/LibTIFF4/tif_packbits.c Source/LibTIFF4/tif_pixarlog.c Source/LibTIFF4/tif_predict.c Source/LibTIFF4/tif_print.c Source/LibTIFF4/tif_read.c Source/LibTIFF4/tif_strip.c Source/LibTIFF4/tif_swab.c Source/LibTIFF4/tif_thunder.c Source/LibTIFF4/tif_tile.c Source/LibTIFF4/tif_version.c Source/LibTIFF4/tif_warning.c Source/LibTIFF4/tif_write.c Source/LibTIFF4/tif_zip.c Source/ZLib/adler32.c Source/ZLib/compress.c Source/ZLib/crc32.c Source/ZLib/deflate.c Source/ZLib/gzclose.c Source/ZLib/gzlib.c Source/ZLib/gzread.c Source/ZLib/gzwrite.c Source/ZLib/infback.c Source/ZLib/inffast.c Source/ZLib/inflate.c Source/ZLib/inftrees.c Source/ZLib/trees.c Source/ZLib/uncompr.c Source/ZLib/zutil.c Source/LibOpenJPEG/bio.c Source/LibOpenJPEG/cio.c Source/LibOpenJPEG/dwt.c Source/LibOpenJPEG/event.c Source/LibOpenJPEG/function_list.c Source/LibOpenJPEG/image.c Source/LibOpenJPEG/invert.c Source/LibOpenJPEG/j2k.c Source/LibOpenJPEG/jp2.c Source/LibOpenJPEG/mct.c Source/LibOpenJPEG/mqc.c Source/LibOpenJPEG/openjpeg.c Source/LibOpenJPEG/opj_clock.c Source/LibOpenJPEG/pi.c Source/LibOpenJPEG/raw.c Source/LibOpenJPEG/t1.c Source/LibOpenJPEG/t2.c Source/LibOpenJPEG/tcd.c Source/LibOpenJPEG/tgt.c Source/OpenEXR/IexMath/IexMathFpu.cpp Source/OpenEXR/IlmImf/b44ExpLogTable.cpp Source/OpenEXR/IlmImf/ImfAcesFile.cpp Source/OpenEXR/IlmImf/ImfAttribute.cpp Source/OpenEXR/IlmImf/ImfB44Compressor.cpp Source/OpenEXR/IlmImf/ImfBoxAttribute.cpp Source/OpenEXR/IlmImf/ImfChannelList.cpp Source/OpenEXR/IlmImf/ImfChannelListAttribute.cpp Source/OpenEXR/IlmImf/ImfChromaticities.cpp Source/OpenEXR/IlmImf/ImfChromaticitiesAttribute.cpp Source/OpenEXR/IlmImf/ImfCompositeDeepScanLine.cpp Source/OpenEXR/IlmImf/ImfCompressionAttribute.cpp Source/OpenEXR/IlmImf/ImfCompressor.cpp Source/OpenEXR/IlmImf/ImfConvert.cpp Source/OpenEXR/IlmImf/ImfCRgbaFile.cpp Source/OpenEXR/IlmImf/ImfDeepCompositing.cpp Source/OpenEXR/IlmImf/ImfDeepFrameBuffer.cpp Source/OpenEXR/IlmImf/ImfDeepImageStateAttribute.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineInputFile.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineInputPart.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineOutputFile.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineOutputPart.cpp Source/OpenEXR/IlmImf/ImfDeepTiledInputFile.cpp Source/OpenEXR/IlmImf/ImfDeepTiledInputPart.cpp Source/OpenEXR/IlmImf/ImfDeepTiledOutputFile.cpp Source/OpenEXR/IlmImf/ImfDeepTiledOutputPart.cpp Source/OpenEXR/IlmImf/ImfDoubleAttribute.cpp Source/OpenEXR/IlmImf/ImfDwaCompressor.cpp Source/OpenEXR/IlmImf/ImfEnvmap.cpp Source/OpenEXR/IlmImf/ImfEnvmapAttribute.cpp Source/OpenEXR/IlmImf/ImfFastHuf.cpp Source/OpenEXR/IlmImf/ImfFloatAttribute.cpp Source/OpenEXR/IlmImf/ImfFloatVectorAttribute.cpp Source/OpenEXR/IlmImf/ImfFrameBuffer.cpp Source/OpenEXR/IlmImf/ImfFramesPerSecond.cpp Source/OpenEXR/IlmImf/ImfGenericInputFile.cpp Source/OpenEXR/IlmImf/ImfGenericOutputFile.cpp Source/OpenEXR/IlmImf/ImfHeader.cpp Source/OpenEXR/IlmImf/ImfHuf.cpp Source/OpenEXR/IlmImf/ImfInputFile.cpp Source/OpenEXR/IlmImf/ImfInputPart.cpp Source/OpenEXR/IlmImf/ImfInputPartData.cpp Source/OpenEXR/IlmImf/ImfIntAttribute.cpp Source/OpenEXR/IlmImf/ImfIO.cpp Source/OpenEXR/IlmImf/ImfKeyCode.cpp Source/OpenEXR/IlmImf/ImfKeyCodeAttribute.cpp Source/OpenEXR/IlmImf/ImfLineOrderAttribute.cpp Source/OpenEXR/IlmImf/ImfLut.cpp Source/OpenEXR/IlmImf/ImfMatrixAttribute.cpp Source/OpenEXR/IlmImf/ImfMisc.cpp Source/OpenEXR/IlmImf/ImfMultiPartInputFile.cpp Source/OpenEXR/IlmImf/ImfMultiPartOutputFile.cpp Source/OpenEXR/IlmImf/ImfMultiView.cpp Source/OpenEXR/IlmImf/ImfOpaqueAttribute.cpp Source/OpenEXR/IlmImf/ImfOutputFile.cpp Source/OpenEXR/IlmImf/ImfOutputPart.cpp Source/OpenEXR/IlmImf/ImfOutputPartData.cpp Source/OpenEXR/IlmImf/ImfPartType.cpp Source/OpenEXR/IlmImf/ImfPizCompressor.cpp Source/OpenEXR/IlmImf/ImfPreviewImage.cpp Source/OpenEXR/IlmImf/ImfPreviewImageAttribute.cpp Source/OpenEXR/IlmImf/ImfPxr24Compressor.cpp Source/OpenEXR/IlmImf/ImfRational.cpp Source/OpenEXR/IlmImf/ImfRationalAttribute.cpp Source/OpenEXR/IlmImf/ImfRgbaFile.cpp Source/OpenEXR/IlmImf/ImfRgbaYca.cpp Source/OpenEXR/IlmImf/ImfRle.cpp Source/OpenEXR/IlmImf/ImfRleCompressor.cpp Source/OpenEXR/IlmImf/ImfScanLineInputFile.cpp Source/OpenEXR/IlmImf/ImfStandardAttributes.cpp Source/OpenEXR/IlmImf/ImfStdIO.cpp Source/OpenEXR/IlmImf/ImfStringAttribute.cpp Source/OpenEXR/IlmImf/ImfStringVectorAttribute.cpp Source/OpenEXR/IlmImf/ImfSystemSpecific.cpp Source/OpenEXR/IlmImf/ImfTestFile.cpp Source/OpenEXR/IlmImf/ImfThreading.cpp Source/OpenEXR/IlmImf/ImfTileDescriptionAttribute.cpp Source/OpenEXR/IlmImf/ImfTiledInputFile.cpp Source/OpenEXR/IlmImf/ImfTiledInputPart.cpp Source/OpenEXR/IlmImf/ImfTiledMisc.cpp Source/OpenEXR/IlmImf/ImfTiledOutputFile.cpp Source/OpenEXR/IlmImf/ImfTiledOutputPart.cpp Source/OpenEXR/IlmImf/ImfTiledRgbaFile.cpp Source/OpenEXR/IlmImf/ImfTileOffsets.cpp Source/OpenEXR/IlmImf/ImfTimeCode.cpp Source/OpenEXR/IlmImf/ImfTimeCodeAttribute.cpp Source/OpenEXR/IlmImf/ImfVecAttribute.cpp Source/OpenEXR/IlmImf/ImfVersion.cpp Source/OpenEXR/IlmImf/ImfWav.cpp Source/OpenEXR/IlmImf/ImfZip.cpp Source/OpenEXR/IlmImf/ImfZipCompressor.cpp Source/OpenEXR/Imath/ImathBox.cpp Source/OpenEXR/Imath/ImathColorAlgo.cpp Source/OpenEXR/Imath/ImathFun.cpp Source/OpenEXR/Imath/ImathMatrixAlgo.cpp Source/OpenEXR/Imath/ImathRandom.cpp Source/OpenEXR/Imath/ImathShear.cpp Source/OpenEXR/Imath/ImathVec.cpp Source/OpenEXR/Iex/IexBaseExc.cpp Source/OpenEXR/Iex/IexThrowErrnoExc.cpp Source/OpenEXR/Half/half.cpp Source/OpenEXR/IlmThread/IlmThread.cpp Source/OpenEXR/IlmThread/IlmThreadMutex.cpp Source/OpenEXR/IlmThread/IlmThreadPool.cpp Source/OpenEXR/IlmThread/IlmThreadSemaphore.cpp Source/OpenEXR/IexMath/IexMathFloatExc.cpp Source/LibRawLite/internal/dcraw_common.cpp Source/LibRawLite/internal/dcraw_fileio.cpp Source/LibRawLite/internal/demosaic_packs.cpp Source/LibRawLite/src/libraw_c_api.cpp Source/LibRawLite/src/libraw_cxx.cpp Source/LibRawLite/src/libraw_datastream.cpp Source/LibWebP/src/dec/alpha_dec.c Source/LibWebP/src/dec/buffer_dec.c Source/LibWebP/src/dec/frame_dec.c Source/LibWebP/src/dec/idec_dec.c Source/LibWebP/src/dec/io_dec.c Source/LibWebP/src/dec/quant_dec.c Source/LibWebP/src/dec/tree_dec.c Source/LibWebP/src/dec/vp8l_dec.c Source/LibWebP/src/dec/vp8_dec.c Source/LibWebP/src/dec/webp_dec.c Source/LibWebP/src/demux/anim_decode.c Source/LibWebP/src/demux/demux.c Source/LibWebP/src/dsp/alpha_processing.c Source/LibWebP/src/dsp/alpha_processing_mips_dsp_r2.c Source/LibWebP/src/dsp/alpha_processing_neon.c Source/LibWebP/src/dsp/alpha_processing_sse2.c Source/LibWebP/src/dsp/alpha_processing_sse41.c Source/LibWebP/src/dsp/cost.c Source/LibWebP/src/dsp/cost_mips32.c Source/LibWebP/src/dsp/cost_mips_dsp_r2.c Source/LibWebP/src/dsp/cost_neon.c Source/LibWebP/src/dsp/cost_sse2.c Source/LibWebP/src/dsp/cpu.c Source/LibWebP/src/dsp/dec.c Source/LibWebP/src/dsp/dec_clip_tables.c Source/LibWebP/src/dsp/dec_mips32.c Source/LibWebP/src/dsp/dec_mips_dsp_r2.c Source/LibWebP/src/dsp/dec_msa.c Source/LibWebP/src/dsp/dec_neon.c Source/LibWebP/src/dsp/dec_sse2.c Source/LibWebP/src/dsp/dec_sse41.c Source/LibWebP/src/dsp/enc.c Source/LibWebP/src/dsp/enc_avx2.c Source/LibWebP/src/dsp/enc_mips32.c Source/LibWebP/src/dsp/enc_mips_dsp_r2.c Source/LibWebP/src/dsp/enc_msa.c Source/LibWebP/src/dsp/enc_neon.c Source/LibWebP/src/dsp/enc_sse2.c Source/LibWebP/src/dsp/enc_sse41.c Source/LibWebP/src/dsp/filters.c Source/LibWebP/src/dsp/filters_mips_dsp_r2.c Source/LibWebP/src/dsp/filters_msa.c Source/LibWebP/src/dsp/filters_neon.c Source/LibWebP/src/dsp/filters_sse2.c Source/LibWebP/src/dsp/lossless.c Source/LibWebP/src/dsp/lossless_enc.c Source/LibWebP/src/dsp/lossless_enc_mips32.c Source/LibWebP/src/dsp/lossless_enc_mips_dsp_r2.c Source/LibWebP/src/dsp/lossless_enc_msa.c Source/LibWebP/src/dsp/lossless_enc_neon.c Source/LibWebP/src/dsp/lossless_enc_sse2.c Source/LibWebP/src/dsp/lossless_enc_sse41.c Source/LibWebP/src/dsp/lossless_mips_dsp_r2.c Source/LibWebP/src/dsp/lossless_msa.c Source/LibWebP/src/dsp/lossless_neon.c Source/LibWebP/src/dsp/lossless_sse2.c Source/LibWebP/src/dsp/rescaler.c Source/LibWebP/src/dsp/rescaler_mips32.c Source/LibWebP/src/dsp/rescaler_mips_dsp_r2.c Source/LibWebP/src/dsp/rescaler_msa.c Source/LibWebP/src/dsp/rescaler_neon.c Source/LibWebP/src/dsp/rescaler_sse2.c Source/LibWebP/src/dsp/ssim.c Source/LibWebP/src/dsp/ssim_sse2.c Source/LibWebP/src/dsp/upsampling.c Source/LibWebP/src/dsp/upsampling_mips_dsp_r2.c Source/LibWebP/src/dsp/upsampling_msa.c Source/LibWebP/src/dsp/upsampling_neon.c Source/LibWebP/src/dsp/upsampling_sse2.c Source/LibWebP/src/dsp/upsampling_sse41.c Source/LibWebP/src/dsp/yuv.c Source/LibWebP/src/dsp/yuv_mips32.c Source/LibWebP/src/dsp/yuv_mips_dsp_r2.c Source/LibWebP/src/dsp/yuv_neon.c Source/LibWebP/src/dsp/yuv_sse2.c Source/LibWebP/src/dsp/yuv_sse41.c Source/LibWebP/src/enc/alpha_enc.c Source/LibWebP/src/enc/analysis_enc.c Source/LibWebP/src/enc/backward_references_cost_enc.c Source/LibWebP/src/enc/backward_references_enc.c Source/LibWebP/src/enc/config_enc.c Source/LibWebP/src/enc/cost_enc.c Source/LibWebP/src/enc/filter_enc.c Source/LibWebP/src/enc/frame_enc.c Source/LibWebP/src/enc/histogram_enc.c Source/LibWebP/src/enc/iterator_enc.c Source/LibWebP/src/enc/near_lossless_enc.c Source/LibWebP/src/enc/picture_csp_enc.c Source/LibWebP/src/enc/picture_enc.c Source/LibWebP/src/enc/picture_psnr_enc.c Source/LibWebP/src/enc/picture_rescale_enc.c Source/LibWebP/src/enc/picture_tools_enc.c Source/LibWebP/src/enc/predictor_enc.c Source/LibWebP/src/enc/quant_enc.c Source/LibWebP/src/enc/syntax_enc.c Source/LibWebP/src/enc/token_enc.c Source/LibWebP/src/enc/tree_enc.c Source/LibWebP/src/enc/vp8l_enc.c Source/LibWebP/src/enc/webp_enc.c Source/LibWebP/src/mux/anim_encode.c Source/LibWebP/src/mux/muxedit.c Source/LibWebP/src/mux/muxinternal.c Source/LibWebP/src/mux/muxread.c Source/LibWebP/src/utils/bit_reader_utils.c Source/LibWebP/src/utils/bit_writer_utils.c Source/LibWebP/src/utils/color_cache_utils.c Source/LibWebP/src/utils/filters_utils.c Source/LibWebP/src/utils/huffman_encode_utils.c Source/LibWebP/src/utils/huffman_utils.c Source/LibWebP/src/utils/quant_levels_dec_utils.c Source/LibWebP/src/utils/quant_levels_utils.c Source/LibWebP/src/utils/random_utils.c Source/LibWebP/src/utils/rescaler_utils.c Source/LibWebP/src/utils/thread_utils.c Source/LibWebP/src/utils/utils.c Source/LibJXR/image/decode/decode.c Source/LibJXR/image/decode/JXRTranscode.c Source/LibJXR/image/decode/postprocess.c Source/LibJXR/image/decode/segdec.c Source/LibJXR/image/decode/strdec.c Source/LibJXR/image/decode/strdec_x86.c Source/LibJXR/image/decode/strInvTransform.c Source/LibJXR/image/decode/strPredQuantDec.c Source/LibJXR/image/encode/encode.c Source/LibJXR/image/encode/segenc.c Source/LibJXR/image/encode/strenc.c Source/LibJXR/image/encode/strenc_x86.c Source/LibJXR/image/encode/strFwdTransform.c Source/LibJXR/image/encode/strPredQuantEnc.c Source/LibJXR/image/sys/adapthuff.c Source/LibJXR/image/sys/image.c Source/LibJXR/image/sys/strcodec.c Source/LibJXR/image/sys/strPredQuant.c Source/LibJXR/image/sys/strTransform.c Source/LibJXR/jxrgluelib/JXRGlue.c Source/LibJXR/jxrgluelib/JXRGlueJxr.c Source/LibJXR/jxrgluelib/JXRGluePFC.c Source/LibJXR/jxrgluelib/JXRMeta.c --INCLS = ./Examples/OpenGL/TextureManager/TextureManager.h ./Examples/Plugin/PluginCradle.h ./Examples/Generic/FIIO_Mem.h ./Source/MapIntrospector.h ./Source/CacheFile.h ./Source/LibJPEG/cderror.h ./Source/LibJPEG/jmorecfg.h ./Source/LibJPEG/transupp.h ./Source/LibJPEG/jpeglib.h ./Source/LibJPEG/jversion.h ./Source/LibJPEG/jinclude.h ./Source/LibJPEG/jerror.h ./Source/LibJPEG/jconfig.h ./Source/LibJPEG/jdct.h ./Source/LibJPEG/cdjpeg.h ./Source/LibJPEG/jmemsys.h ./Source/LibJPEG/jpegint.h ./Source/Plugin.h ./Source/Metadata/FreeImageTag.h ./Source/Metadata/FIRational.h ./Source/ToneMapping.h ./Source/LibTIFF4/tiffconf.vc.h ./Source/LibTIFF4/tif_config.h ./Source/LibTIFF4/tif_fax3.h ./Source/LibTIFF4/tif_config.vc.h ./Source/LibTIFF4/tiffvers.h ./Source/LibTIFF4/tiffio.h ./Source/LibTIFF4/tif_config.wince.h ./Source/LibTIFF4/tiffconf.wince.h ./Source/LibTIFF4/tiff.h ./Source/LibTIFF4/uvcode.h ./Source/LibTIFF4/tif_dir.h ./Source/LibTIFF4/t4.h ./Source/LibTIFF4/tif_predict.h ./Source/LibTIFF4/tiffiop.h ./Source/LibTIFF4/tiffconf.h ./Source/LibWebP/src/dec/alphai_dec.h ./Source/LibWebP/src/dec/common_dec.h ./Source/LibWebP/src/dec/vp8i_dec.h ./Source/LibWebP/src/dec/webpi_dec.h ./Source/LibWebP/src/dec/vp8li_dec.h ./Source/LibWebP/src/dec/vp8_dec.h ./Source/LibWebP/src/enc/cost_enc.h ./Source/LibWebP/src/enc/histogram_enc.h ./Source/LibWebP/src/enc/vp8li_enc.h ./Source/LibWebP/src/enc/backward_references_enc.h ./Source/LibWebP/src/enc/vp8i_enc.h ./Source/LibWebP/src/utils/bit_reader_utils.h ./Source/LibWebP/src/utils/endian_inl_utils.h ./Source/LibWebP/src/utils/huffman_encode_utils.h ./Source/LibWebP/src/utils/bit_writer_utils.h ./Source/LibWebP/src/utils/random_utils.h ./Source/LibWebP/src/utils/bit_reader_inl_utils.h ./Source/LibWebP/src/utils/quant_levels_dec_utils.h ./Source/LibWebP/src/utils/color_cache_utils.h ./Source/LibWebP/src/utils/thread_utils.h ./Source/LibWebP/src/utils/filters_utils.h ./Source/LibWebP/src/utils/rescaler_utils.h ./Source/LibWebP/src/utils/huffman_utils.h ./Source/LibWebP/src/utils/quant_levels_utils.h ./Source/LibWebP/src/utils/utils.h ./Source/LibWebP/src/mux/muxi.h ./Source/LibWebP/src/mux/animi.h ./Source/LibWebP/src/webp/mux.h ./Source/LibWebP/src/webp/types.h ./Source/LibWebP/src/webp/format_constants.h ./Source/LibWebP/src/webp/demux.h ./Source/LibWebP/src/webp/encode.h ./Source/LibWebP/src/webp/decode.h ./Source/LibWebP/src/webp/mux_types.h ./Source/LibWebP/src/dsp/msa_macro.h ./Source/LibWebP/src/dsp/yuv.h ./Source/LibWebP/src/dsp/common_sse41.h ./Source/LibWebP/src/dsp/neon.h ./Source/LibWebP/src/dsp/common_sse2.h ./Source/LibWebP/src/dsp/quant.h ./Source/LibWebP/src/dsp/lossless_common.h ./Source/LibWebP/src/dsp/mips_macro.h ./Source/LibWebP/src/dsp/dsp.h ./Source/LibWebP/src/dsp/lossless.h ./Source/FreeImageIO.h ./Source/FreeImage.h ./Source/FreeImage/PSDParser.h ./Source/FreeImage/J2KHelper.h ./Source/ZLib/trees.h ./Source/ZLib/inffixed.h ./Source/ZLib/inflate.h ./Source/ZLib/zlib.h ./Source/ZLib/zconf.h ./Source/ZLib/inftrees.h ./Source/ZLib/zutil.h ./Source/ZLib/inffast.h ./Source/ZLib/crc32.h ./Source/ZLib/gzguts.h ./Source/ZLib/deflate.h ./Source/Quantizers.h ./Source/LibOpenJPEG/cio.h ./Source/LibOpenJPEG/mqc.h ./Source/LibOpenJPEG/cidx_manager.h ./Source/LibOpenJPEG/function_list.h ./Source/LibOpenJPEG/indexbox_manager.h ./Source/LibOpenJPEG/opj_config.h ./Source/LibOpenJPEG/opj_clock.h ./Source/LibOpenJPEG/event.h ./Source/LibOpenJPEG/opj_codec.h ./Source/LibOpenJPEG/pi.h ./Source/LibOpenJPEG/dwt.h ./Source/LibOpenJPEG/tgt.h ./Source/LibOpenJPEG/invert.h ./Source/LibOpenJPEG/opj_malloc.h ./Source/LibOpenJPEG/raw.h ./Source/LibOpenJPEG/jp2.h ./Source/LibOpenJPEG/bio.h ./Source/LibOpenJPEG/t2.h ./Source/LibOpenJPEG/mct.h ./Source/LibOpenJPEG/t1.h ./Source/LibOpenJPEG/t1_luts.h ./Source/LibOpenJPEG/j2k.h ./Source/LibOpenJPEG/opj_stdint.h ./Source/LibOpenJPEG/opj_config_private.h ./Source/LibOpenJPEG/opj_includes.h ./Source/LibOpenJPEG/opj_intmath.h ./Source/LibOpenJPEG/image.h ./Source/LibOpenJPEG/opj_inttypes.h ./Source/LibOpenJPEG/openjpeg.h ./Source/LibOpenJPEG/tcd.h ./Source/LibRawLite/libraw/libraw_version.h ./Source/LibRawLite/libraw/libraw_const.h ./Source/LibRawLite/libraw/libraw.h ./Source/LibRawLite/libraw/libraw_types.h ./Source/LibRawLite/libraw/libraw_alloc.h ./Source/LibRawLite/libraw/libraw_datastream.h ./Source/LibRawLite/libraw/libraw_internal.h ./Source/LibRawLite/internal/dmp_include.h ./Source/LibRawLite/internal/libraw_const.h ./Source/LibRawLite/internal/var_defines.h ./Source/LibRawLite/internal/x3f_tools.h ./Source/LibRawLite/internal/defines.h ./Source/LibRawLite/internal/dcraw_fileio_defs.h ./Source/LibRawLite/internal/dcraw_defs.h ./Source/LibRawLite/internal/libraw_cxx_defs.h ./Source/LibRawLite/internal/libraw_internal_funcs.h ./Source/LibPNG/png.h ./Source/LibPNG/pngdebug.h ./Source/LibPNG/pnginfo.h ./Source/LibPNG/pnglibconf.h ./Source/LibPNG/pngstruct.h ./Source/LibPNG/pngpriv.h ./Source/LibPNG/pngconf.h ./Source/LibJXR/common/include/wmspecstrings_strict.h ./Source/LibJXR/common/include/wmspecstring.h ./Source/LibJXR/common/include/guiddef.h ./Source/LibJXR/common/include/wmsal.h ./Source/LibJXR/common/include/wmspecstrings_undef.h ./Source/LibJXR/common/include/wmspecstrings_adt.h ./Source/LibJXR/jxrgluelib/JXRGlue.h ./Source/LibJXR/jxrgluelib/JXRMeta.h ./Source/LibJXR/image/sys/xplatform_image.h ./Source/LibJXR/image/sys/strTransform.h ./Source/LibJXR/image/sys/windowsmediaphoto.h ./Source/LibJXR/image/sys/strcodec.h ./Source/LibJXR/image/sys/ansi.h ./Source/LibJXR/image/sys/perfTimer.h ./Source/LibJXR/image/sys/common.h ./Source/LibJXR/image/decode/decode.h ./Source/LibJXR/image/x86/x86.h ./Source/LibJXR/image/encode/encode.h ./Source/Utilities.h ./Source/FreeImageToolkit/Resize.h ./Source/FreeImageToolkit/Filters.h ./Source/OpenEXR/OpenEXRConfig.h ./Source/OpenEXR/IexMath/IexMathFloatExc.h ./Source/OpenEXR/IexMath/IexMathFpu.h ./Source/OpenEXR/IexMath/IexMathIeeeExc.h ./Source/OpenEXR/IlmThread/IlmThread.h ./Source/OpenEXR/IlmThread/IlmThreadMutex.h ./Source/OpenEXR/IlmThread/IlmThreadForward.h ./Source/OpenEXR/IlmThread/IlmThreadExport.h ./Source/OpenEXR/IlmThread/IlmThreadSemaphore.h ./Source/OpenEXR/IlmThread/IlmThreadPool.h ./Source/OpenEXR/IlmThread/IlmThreadNamespace.h ./Source/OpenEXR/Iex/IexErrnoExc.h ./Source/OpenEXR/Iex/IexMacros.h ./Source/OpenEXR/Iex/IexForward.h ./Source/OpenEXR/Iex/IexExport.h ./Source/OpenEXR/Iex/IexThrowErrnoExc.h ./Source/OpenEXR/Iex/IexNamespace.h ./Source/OpenEXR/Iex/IexMathExc.h ./Source/OpenEXR/Iex/IexBaseExc.h ./Source/OpenEXR/Iex/Iex.h ./Source/OpenEXR/Imath/ImathColorAlgo.h ./Source/OpenEXR/Imath/ImathNamespace.h ./Source/OpenEXR/Imath/ImathVec.h ./Source/OpenEXR/Imath/ImathGL.h ./Source/OpenEXR/Imath/ImathSphere.h ./Source/OpenEXR/Imath/ImathEuler.h ./Source/OpenEXR/Imath/ImathLimits.h ./Source/OpenEXR/Imath/ImathQuat.h ./Source/OpenEXR/Imath/ImathRoots.h ./Source/OpenEXR/Imath/ImathFun.h ./Source/OpenEXR/Imath/ImathExport.h ./Source/OpenEXR/Imath/ImathShear.h ./Source/OpenEXR/Imath/ImathPlane.h ./Source/OpenEXR/Imath/ImathForward.h ./Source/OpenEXR/Imath/ImathHalfLimits.h ./Source/OpenEXR/Imath/ImathFrustumTest.h ./Source/OpenEXR/Imath/ImathMatrixAlgo.h ./Source/OpenEXR/Imath/ImathVecAlgo.h ./Source/OpenEXR/Imath/ImathInterval.h ./Source/OpenEXR/Imath/ImathBox.h ./Source/OpenEXR/Imath/ImathFrame.h ./Source/OpenEXR/Imath/ImathColor.h ./Source/OpenEXR/Imath/ImathMath.h ./Source/OpenEXR/Imath/ImathLine.h ./Source/OpenEXR/Imath/ImathBoxAlgo.h ./Source/OpenEXR/Imath/ImathFrustum.h ./Source/OpenEXR/Imath/ImathExc.h ./Source/OpenEXR/Imath/ImathLineAlgo.h ./Source/OpenEXR/Imath/ImathRandom.h ./Source/OpenEXR/Imath/ImathInt64.h ./Source/OpenEXR/Imath/ImathGLU.h ./Source/OpenEXR/Imath/ImathPlatform.h ./Source/OpenEXR/Imath/ImathMatrix.h ./Source/OpenEXR/IlmImf/ImfDeepScanLineOutputPart.h ./Source/OpenEXR/IlmImf/ImfDeepScanLineInputFile.h ./Source/OpenEXR/IlmImf/ImfIO.h ./Source/OpenEXR/IlmImf/ImfStdIO.h ./Source/OpenEXR/IlmImf/ImfPreviewImage.h ./Source/OpenEXR/IlmImf/ImfAttribute.h ./Source/OpenEXR/IlmImf/ImfDwaCompressor.h ./Source/OpenEXR/IlmImf/ImfChannelList.h ./Source/OpenEXR/IlmImf/ImfInt64.h ./Source/OpenEXR/IlmImf/ImfGenericOutputFile.h ./Source/OpenEXR/IlmImf/ImfHuf.h ./Source/OpenEXR/IlmImf/ImfOptimizedPixelReading.h ./Source/OpenEXR/IlmImf/b44ExpLogTable.h ./Source/OpenEXR/IlmImf/ImfMultiPartOutputFile.h ./Source/OpenEXR/IlmImf/ImfTileDescriptionAttribute.h ./Source/OpenEXR/IlmImf/ImfFastHuf.h ./Source/OpenEXR/IlmImf/dwaLookups.h ./Source/OpenEXR/IlmImf/ImfCompositeDeepScanLine.h ./Source/OpenEXR/IlmImf/ImfDeepFrameBuffer.h ./Source/OpenEXR/IlmImf/ImfInputPartData.h ./Source/OpenEXR/IlmImf/ImfAcesFile.h ./Source/OpenEXR/IlmImf/ImfRgbaYca.h ./Source/OpenEXR/IlmImf/ImfThreading.h ./Source/OpenEXR/IlmImf/ImfWav.h ./Source/OpenEXR/IlmImf/ImfChromaticitiesAttribute.h ./Source/OpenEXR/IlmImf/ImfDwaCompressorSimd.h ./Source/OpenEXR/IlmImf/ImfNamespace.h ./Source/OpenEXR/IlmImf/ImfMatrixAttribute.h ./Source/OpenEXR/IlmImf/ImfTimeCodeAttribute.h ./Source/OpenEXR/IlmImf/ImfInputFile.h ./Source/OpenEXR/IlmImf/ImfDeepScanLineInputPart.h ./Source/OpenEXR/IlmImf/ImfFloatAttribute.h ./Source/OpenEXR/IlmImf/ImfPxr24Compressor.h ./Source/OpenEXR/IlmImf/ImfCompressor.h ./Source/OpenEXR/IlmImf/ImfCRgbaFile.h ./Source/OpenEXR/IlmImf/ImfOutputFile.h ./Source/OpenEXR/IlmImf/ImfTiledInputPart.h ./Source/OpenEXR/IlmImf/ImfRationalAttribute.h ./Source/OpenEXR/IlmImf/ImfTileOffsets.h ./Source/OpenEXR/IlmImf/ImfInputStreamMutex.h ./Source/OpenEXR/IlmImf/ImfIntAttribute.h ./Source/OpenEXR/IlmImf/ImfTiledOutputPart.h ./Source/OpenEXR/IlmImf/ImfPartType.h ./Source/OpenEXR/IlmImf/ImfTiledInputFile.h ./Source/OpenEXR/IlmImf/ImfStringAttribute.h ./Source/OpenEXR/IlmImf/ImfDeepTiledOutputPart.h ./Source/OpenEXR/IlmImf/ImfRleCompressor.h ./Source/OpenEXR/IlmImf/ImfChromaticities.h ./Source/OpenEXR/IlmImf/ImfTestFile.h ./Source/OpenEXR/IlmImf/ImfInputPart.h ./Source/OpenEXR/IlmImf/ImfXdr.h ./Source/OpenEXR/IlmImf/ImfOutputPart.h ./Source/OpenEXR/IlmImf/ImfExport.h ./Source/OpenEXR/IlmImf/ImfRgba.h ./Source/OpenEXR/IlmImf/ImfLineOrder.h ./Source/OpenEXR/IlmImf/ImfCompression.h ./Source/OpenEXR/IlmImf/ImfTiledMisc.h ./Source/OpenEXR/IlmImf/ImfFramesPerSecond.h ./Source/OpenEXR/IlmImf/ImfZipCompressor.h ./Source/OpenEXR/IlmImf/ImfKeyCodeAttribute.h ./Source/OpenEXR/IlmImf/ImfFloatVectorAttribute.h ./Source/OpenEXR/IlmImf/ImfMultiPartInputFile.h ./Source/OpenEXR/IlmImf/ImfDeepTiledOutputFile.h ./Source/OpenEXR/IlmImf/ImfDeepScanLineOutputFile.h ./Source/OpenEXR/IlmImf/ImfRational.h ./Source/OpenEXR/IlmImf/ImfDeepImageStateAttribute.h ./Source/OpenEXR/IlmImf/ImfChannelListAttribute.h ./Source/OpenEXR/IlmImf/ImfDeepCompositing.h ./Source/OpenEXR/IlmImf/ImfOutputPartData.h ./Source/OpenEXR/IlmImf/ImfDeepTiledInputPart.h ./Source/OpenEXR/IlmImf/ImfPreviewImageAttribute.h ./Source/OpenEXR/IlmImf/ImfFrameBuffer.h ./Source/OpenEXR/IlmImf/ImfDeepImageState.h ./Source/OpenEXR/IlmImf/ImfOpaqueAttribute.h ./Source/OpenEXR/IlmImf/ImfEnvmapAttribute.h ./Source/OpenEXR/IlmImf/ImfPizCompressor.h ./Source/OpenEXR/IlmImf/ImfStringVectorAttribute.h ./Source/OpenEXR/IlmImf/ImfMultiView.h ./Source/OpenEXR/IlmImf/ImfAutoArray.h ./Source/OpenEXR/IlmImf/ImfLut.h ./Source/OpenEXR/IlmImf/ImfTiledOutputFile.h ./Source/OpenEXR/IlmImf/ImfBoxAttribute.h ./Source/OpenEXR/IlmImf/ImfCheckedArithmetic.h ./Source/OpenEXR/IlmImf/ImfB44Compressor.h ./Source/OpenEXR/IlmImf/ImfSystemSpecific.h ./Source/OpenEXR/IlmImf/ImfRgbaFile.h ./Source/OpenEXR/IlmImf/ImfTimeCode.h ./Source/OpenEXR/IlmImf/ImfVecAttribute.h ./Source/OpenEXR/IlmImf/ImfDeepTiledInputFile.h ./Source/OpenEXR/IlmImf/ImfZip.h ./Source/OpenEXR/IlmImf/ImfConvert.h ./Source/OpenEXR/IlmImf/ImfMisc.h ./Source/OpenEXR/IlmImf/ImfHeader.h ./Source/OpenEXR/IlmImf/ImfForward.h ./Source/OpenEXR/IlmImf/ImfPartHelper.h ./Source/OpenEXR/IlmImf/ImfKeyCode.h ./Source/OpenEXR/IlmImf/ImfVersion.h ./Source/OpenEXR/IlmImf/ImfStandardAttributes.h ./Source/OpenEXR/IlmImf/ImfPixelType.h ./Source/OpenEXR/IlmImf/ImfName.h ./Source/OpenEXR/IlmImf/ImfSimd.h ./Source/OpenEXR/IlmImf/ImfArray.h ./Source/OpenEXR/IlmImf/ImfOutputStreamMutex.h ./Source/OpenEXR/IlmImf/ImfTiledRgbaFile.h ./Source/OpenEXR/IlmImf/ImfRle.h ./Source/OpenEXR/IlmImf/ImfScanLineInputFile.h ./Source/OpenEXR/IlmImf/ImfDoubleAttribute.h ./Source/OpenEXR/IlmImf/ImfGenericInputFile.h ./Source/OpenEXR/IlmImf/ImfEnvmap.h ./Source/OpenEXR/IlmImf/ImfLineOrderAttribute.h ./Source/OpenEXR/IlmImf/ImfTileDescription.h ./Source/OpenEXR/IlmImf/ImfCompressionAttribute.h ./Source/OpenEXR/IlmBaseConfig.h ./Source/OpenEXR/Half/halfFunction.h ./Source/OpenEXR/Half/halfExport.h ./Source/OpenEXR/Half/half.h ./Source/OpenEXR/Half/eLut.h ./Source/OpenEXR/Half/halfLimits.h ./Source/OpenEXR/Half/toFloat.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/FreeImageIO.Net.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/Stdafx.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/resource.h ./Wrapper/FreeImagePlus/dist/x64/FreeImagePlus.h ./Wrapper/FreeImagePlus/FreeImagePlus.h ./Wrapper/FreeImagePlus/test/fipTest.h ./TestAPI/TestSuite.h +-SRCS = ./Source/FreeImage/BitmapAccess.cpp ./Source/FreeImage/ColorLookup.cpp ./Source/FreeImage/ConversionRGBA16.cpp ./Source/FreeImage/ConversionRGBAF.cpp ./Source/FreeImage/FreeImage.cpp ./Source/FreeImage/FreeImageC.c ./Source/FreeImage/FreeImageIO.cpp ./Source/FreeImage/GetType.cpp ./Source/FreeImage/LFPQuantizer.cpp ./Source/FreeImage/MemoryIO.cpp ./Source/FreeImage/PixelAccess.cpp ./Source/FreeImage/J2KHelper.cpp ./Source/FreeImage/MNGHelper.cpp ./Source/FreeImage/Plugin.cpp ./Source/FreeImage/PluginBMP.cpp ./Source/FreeImage/PluginCUT.cpp ./Source/FreeImage/PluginDDS.cpp ./Source/FreeImage/PluginEXR.cpp ./Source/FreeImage/PluginG3.cpp ./Source/FreeImage/PluginGIF.cpp ./Source/FreeImage/PluginHDR.cpp ./Source/FreeImage/PluginICO.cpp ./Source/FreeImage/PluginIFF.cpp ./Source/FreeImage/PluginJ2K.cpp ./Source/FreeImage/PluginJNG.cpp ./Source/FreeImage/PluginJP2.cpp ./Source/FreeImage/PluginJPEG.cpp ./Source/FreeImage/PluginJXR.cpp ./Source/FreeImage/PluginKOALA.cpp ./Source/FreeImage/PluginMNG.cpp ./Source/FreeImage/PluginPCD.cpp ./Source/FreeImage/PluginPCX.cpp ./Source/FreeImage/PluginPFM.cpp ./Source/FreeImage/PluginPICT.cpp ./Source/FreeImage/PluginPNG.cpp ./Source/FreeImage/PluginPNM.cpp ./Source/FreeImage/PluginPSD.cpp ./Source/FreeImage/PluginRAS.cpp ./Source/FreeImage/PluginRAW.cpp ./Source/FreeImage/PluginSGI.cpp ./Source/FreeImage/PluginTARGA.cpp ./Source/FreeImage/PluginTIFF.cpp ./Source/FreeImage/PluginWBMP.cpp ./Source/FreeImage/PluginWebP.cpp ./Source/FreeImage/PluginXBM.cpp ./Source/FreeImage/PluginXPM.cpp ./Source/FreeImage/PSDParser.cpp ./Source/FreeImage/TIFFLogLuv.cpp ./Source/FreeImage/Conversion.cpp ./Source/FreeImage/Conversion16_555.cpp ./Source/FreeImage/Conversion16_565.cpp ./Source/FreeImage/Conversion24.cpp ./Source/FreeImage/Conversion32.cpp ./Source/FreeImage/Conversion4.cpp ./Source/FreeImage/Conversion8.cpp ./Source/FreeImage/ConversionFloat.cpp ./Source/FreeImage/ConversionRGB16.cpp ./Source/FreeImage/ConversionRGBF.cpp ./Source/FreeImage/ConversionType.cpp ./Source/FreeImage/ConversionUINT16.cpp ./Source/FreeImage/Halftoning.cpp ./Source/FreeImage/tmoColorConvert.cpp ./Source/FreeImage/tmoDrago03.cpp ./Source/FreeImage/tmoFattal02.cpp ./Source/FreeImage/tmoReinhard05.cpp ./Source/FreeImage/ToneMapping.cpp ./Source/FreeImage/NNQuantizer.cpp ./Source/FreeImage/WuQuantizer.cpp ./Source/FreeImage/CacheFile.cpp ./Source/FreeImage/MultiPage.cpp ./Source/FreeImage/ZLibInterface.cpp ./Source/Metadata/Exif.cpp ./Source/Metadata/FIRational.cpp ./Source/Metadata/FreeImageTag.cpp ./Source/Metadata/IPTC.cpp ./Source/Metadata/TagConversion.cpp ./Source/Metadata/TagLib.cpp ./Source/Metadata/XTIFF.cpp ./Source/FreeImageToolkit/Background.cpp ./Source/FreeImageToolkit/BSplineRotate.cpp ./Source/FreeImageToolkit/Channels.cpp ./Source/FreeImageToolkit/ClassicRotate.cpp ./Source/FreeImageToolkit/Colors.cpp ./Source/FreeImageToolkit/CopyPaste.cpp ./Source/FreeImageToolkit/Display.cpp ./Source/FreeImageToolkit/Flip.cpp ./Source/FreeImageToolkit/JPEGTransform.cpp ./Source/FreeImageToolkit/MultigridPoissonSolver.cpp ./Source/FreeImageToolkit/Rescale.cpp ./Source/FreeImageToolkit/Resize.cpp Source/LibJPEG/jaricom.c Source/LibJPEG/jcapimin.c Source/LibJPEG/jcapistd.c Source/LibJPEG/jcarith.c Source/LibJPEG/jccoefct.c Source/LibJPEG/jccolor.c Source/LibJPEG/jcdctmgr.c Source/LibJPEG/jchuff.c Source/LibJPEG/jcinit.c Source/LibJPEG/jcmainct.c Source/LibJPEG/jcmarker.c Source/LibJPEG/jcmaster.c Source/LibJPEG/jcomapi.c Source/LibJPEG/jcparam.c Source/LibJPEG/jcprepct.c Source/LibJPEG/jcsample.c Source/LibJPEG/jctrans.c Source/LibJPEG/jdapimin.c Source/LibJPEG/jdapistd.c Source/LibJPEG/jdarith.c Source/LibJPEG/jdatadst.c Source/LibJPEG/jdatasrc.c Source/LibJPEG/jdcoefct.c Source/LibJPEG/jdcolor.c Source/LibJPEG/jddctmgr.c Source/LibJPEG/jdhuff.c Source/LibJPEG/jdinput.c Source/LibJPEG/jdmainct.c Source/LibJPEG/jdmarker.c Source/LibJPEG/jdmaster.c Source/LibJPEG/jdmerge.c Source/LibJPEG/jdpostct.c Source/LibJPEG/jdsample.c Source/LibJPEG/jdtrans.c Source/LibJPEG/jerror.c Source/LibJPEG/jfdctflt.c Source/LibJPEG/jfdctfst.c Source/LibJPEG/jfdctint.c Source/LibJPEG/jidctflt.c Source/LibJPEG/jidctfst.c Source/LibJPEG/jidctint.c Source/LibJPEG/jmemmgr.c Source/LibJPEG/jmemnobs.c Source/LibJPEG/jquant1.c Source/LibJPEG/jquant2.c Source/LibJPEG/jutils.c Source/LibJPEG/transupp.c Source/LibPNG/png.c Source/LibPNG/pngerror.c Source/LibPNG/pngget.c Source/LibPNG/pngmem.c Source/LibPNG/pngpread.c Source/LibPNG/pngread.c Source/LibPNG/pngrio.c Source/LibPNG/pngrtran.c Source/LibPNG/pngrutil.c Source/LibPNG/pngset.c Source/LibPNG/pngtrans.c Source/LibPNG/pngwio.c Source/LibPNG/pngwrite.c Source/LibPNG/pngwtran.c Source/LibPNG/pngwutil.c Source/LibTIFF4/tif_aux.c Source/LibTIFF4/tif_close.c Source/LibTIFF4/tif_codec.c Source/LibTIFF4/tif_color.c Source/LibTIFF4/tif_compress.c Source/LibTIFF4/tif_dir.c Source/LibTIFF4/tif_dirinfo.c Source/LibTIFF4/tif_dirread.c Source/LibTIFF4/tif_dirwrite.c Source/LibTIFF4/tif_dumpmode.c Source/LibTIFF4/tif_error.c Source/LibTIFF4/tif_extension.c Source/LibTIFF4/tif_fax3.c Source/LibTIFF4/tif_fax3sm.c Source/LibTIFF4/tif_flush.c Source/LibTIFF4/tif_getimage.c Source/LibTIFF4/tif_jpeg.c Source/LibTIFF4/tif_lerc.c Source/LibTIFF4/tif_luv.c Source/LibTIFF4/tif_lzw.c Source/LibTIFF4/tif_next.c Source/LibTIFF4/tif_ojpeg.c Source/LibTIFF4/tif_open.c Source/LibTIFF4/tif_packbits.c Source/LibTIFF4/tif_pixarlog.c Source/LibTIFF4/tif_predict.c Source/LibTIFF4/tif_print.c Source/LibTIFF4/tif_read.c Source/LibTIFF4/tif_strip.c Source/LibTIFF4/tif_swab.c Source/LibTIFF4/tif_thunder.c Source/LibTIFF4/tif_tile.c Source/LibTIFF4/tif_version.c Source/LibTIFF4/tif_warning.c Source/LibTIFF4/tif_webp.c Source/LibTIFF4/tif_write.c Source/LibTIFF4/tif_zip.c Source/ZLib/adler32.c Source/ZLib/compress.c Source/ZLib/crc32.c Source/ZLib/deflate.c Source/ZLib/gzclose.c Source/ZLib/gzlib.c Source/ZLib/gzread.c Source/ZLib/gzwrite.c Source/ZLib/infback.c Source/ZLib/inffast.c Source/ZLib/inflate.c Source/ZLib/inftrees.c Source/ZLib/trees.c Source/ZLib/uncompr.c Source/ZLib/zutil.c Source/LibOpenJPEG/bio.c Source/LibOpenJPEG/cio.c Source/LibOpenJPEG/dwt.c Source/LibOpenJPEG/event.c Source/LibOpenJPEG/function_list.c Source/LibOpenJPEG/image.c Source/LibOpenJPEG/invert.c Source/LibOpenJPEG/j2k.c Source/LibOpenJPEG/jp2.c Source/LibOpenJPEG/mct.c Source/LibOpenJPEG/mqc.c Source/LibOpenJPEG/openjpeg.c Source/LibOpenJPEG/opj_clock.c Source/LibOpenJPEG/pi.c Source/LibOpenJPEG/raw.c Source/LibOpenJPEG/t1.c Source/LibOpenJPEG/t2.c Source/LibOpenJPEG/tcd.c Source/LibOpenJPEG/tgt.c Source/OpenEXR/IexMath/IexMathFpu.cpp Source/OpenEXR/IlmImf/b44ExpLogTable.cpp Source/OpenEXR/IlmImf/ImfAcesFile.cpp Source/OpenEXR/IlmImf/ImfAttribute.cpp Source/OpenEXR/IlmImf/ImfB44Compressor.cpp Source/OpenEXR/IlmImf/ImfBoxAttribute.cpp Source/OpenEXR/IlmImf/ImfChannelList.cpp Source/OpenEXR/IlmImf/ImfChannelListAttribute.cpp Source/OpenEXR/IlmImf/ImfChromaticities.cpp Source/OpenEXR/IlmImf/ImfChromaticitiesAttribute.cpp Source/OpenEXR/IlmImf/ImfCompositeDeepScanLine.cpp Source/OpenEXR/IlmImf/ImfCompressionAttribute.cpp Source/OpenEXR/IlmImf/ImfCompressor.cpp Source/OpenEXR/IlmImf/ImfConvert.cpp Source/OpenEXR/IlmImf/ImfCRgbaFile.cpp Source/OpenEXR/IlmImf/ImfDeepCompositing.cpp Source/OpenEXR/IlmImf/ImfDeepFrameBuffer.cpp Source/OpenEXR/IlmImf/ImfDeepImageStateAttribute.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineInputFile.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineInputPart.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineOutputFile.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineOutputPart.cpp Source/OpenEXR/IlmImf/ImfDeepTiledInputFile.cpp Source/OpenEXR/IlmImf/ImfDeepTiledInputPart.cpp Source/OpenEXR/IlmImf/ImfDeepTiledOutputFile.cpp Source/OpenEXR/IlmImf/ImfDeepTiledOutputPart.cpp Source/OpenEXR/IlmImf/ImfDoubleAttribute.cpp Source/OpenEXR/IlmImf/ImfDwaCompressor.cpp Source/OpenEXR/IlmImf/ImfEnvmap.cpp Source/OpenEXR/IlmImf/ImfEnvmapAttribute.cpp Source/OpenEXR/IlmImf/ImfFastHuf.cpp Source/OpenEXR/IlmImf/ImfFloatAttribute.cpp Source/OpenEXR/IlmImf/ImfFloatVectorAttribute.cpp Source/OpenEXR/IlmImf/ImfFrameBuffer.cpp Source/OpenEXR/IlmImf/ImfFramesPerSecond.cpp Source/OpenEXR/IlmImf/ImfGenericInputFile.cpp Source/OpenEXR/IlmImf/ImfGenericOutputFile.cpp Source/OpenEXR/IlmImf/ImfHeader.cpp Source/OpenEXR/IlmImf/ImfHuf.cpp Source/OpenEXR/IlmImf/ImfInputFile.cpp Source/OpenEXR/IlmImf/ImfInputPart.cpp Source/OpenEXR/IlmImf/ImfInputPartData.cpp Source/OpenEXR/IlmImf/ImfIntAttribute.cpp Source/OpenEXR/IlmImf/ImfIO.cpp Source/OpenEXR/IlmImf/ImfKeyCode.cpp Source/OpenEXR/IlmImf/ImfKeyCodeAttribute.cpp Source/OpenEXR/IlmImf/ImfLineOrderAttribute.cpp Source/OpenEXR/IlmImf/ImfLut.cpp Source/OpenEXR/IlmImf/ImfMatrixAttribute.cpp Source/OpenEXR/IlmImf/ImfMisc.cpp Source/OpenEXR/IlmImf/ImfMultiPartInputFile.cpp Source/OpenEXR/IlmImf/ImfMultiPartOutputFile.cpp Source/OpenEXR/IlmImf/ImfMultiView.cpp Source/OpenEXR/IlmImf/ImfOpaqueAttribute.cpp Source/OpenEXR/IlmImf/ImfOutputFile.cpp Source/OpenEXR/IlmImf/ImfOutputPart.cpp Source/OpenEXR/IlmImf/ImfOutputPartData.cpp Source/OpenEXR/IlmImf/ImfPartType.cpp Source/OpenEXR/IlmImf/ImfPizCompressor.cpp Source/OpenEXR/IlmImf/ImfPreviewImage.cpp Source/OpenEXR/IlmImf/ImfPreviewImageAttribute.cpp Source/OpenEXR/IlmImf/ImfPxr24Compressor.cpp Source/OpenEXR/IlmImf/ImfRational.cpp Source/OpenEXR/IlmImf/ImfRationalAttribute.cpp Source/OpenEXR/IlmImf/ImfRgbaFile.cpp Source/OpenEXR/IlmImf/ImfRgbaYca.cpp Source/OpenEXR/IlmImf/ImfRle.cpp Source/OpenEXR/IlmImf/ImfRleCompressor.cpp Source/OpenEXR/IlmImf/ImfScanLineInputFile.cpp Source/OpenEXR/IlmImf/ImfStandardAttributes.cpp Source/OpenEXR/IlmImf/ImfStdIO.cpp Source/OpenEXR/IlmImf/ImfStringAttribute.cpp Source/OpenEXR/IlmImf/ImfStringVectorAttribute.cpp Source/OpenEXR/IlmImf/ImfSystemSpecific.cpp Source/OpenEXR/IlmImf/ImfTestFile.cpp Source/OpenEXR/IlmImf/ImfThreading.cpp Source/OpenEXR/IlmImf/ImfTileDescriptionAttribute.cpp Source/OpenEXR/IlmImf/ImfTiledInputFile.cpp Source/OpenEXR/IlmImf/ImfTiledInputPart.cpp Source/OpenEXR/IlmImf/ImfTiledMisc.cpp Source/OpenEXR/IlmImf/ImfTiledOutputFile.cpp Source/OpenEXR/IlmImf/ImfTiledOutputPart.cpp Source/OpenEXR/IlmImf/ImfTiledRgbaFile.cpp Source/OpenEXR/IlmImf/ImfTileOffsets.cpp Source/OpenEXR/IlmImf/ImfTimeCode.cpp Source/OpenEXR/IlmImf/ImfTimeCodeAttribute.cpp Source/OpenEXR/IlmImf/ImfVecAttribute.cpp Source/OpenEXR/IlmImf/ImfVersion.cpp Source/OpenEXR/IlmImf/ImfWav.cpp Source/OpenEXR/IlmImf/ImfZip.cpp Source/OpenEXR/IlmImf/ImfZipCompressor.cpp Source/OpenEXR/Imath/ImathBox.cpp Source/OpenEXR/Imath/ImathColorAlgo.cpp Source/OpenEXR/Imath/ImathFun.cpp Source/OpenEXR/Imath/ImathMatrixAlgo.cpp Source/OpenEXR/Imath/ImathRandom.cpp Source/OpenEXR/Imath/ImathShear.cpp Source/OpenEXR/Imath/ImathVec.cpp Source/OpenEXR/Iex/IexBaseExc.cpp Source/OpenEXR/Iex/IexThrowErrnoExc.cpp Source/OpenEXR/Half/half.cpp Source/OpenEXR/IlmThread/IlmThread.cpp Source/OpenEXR/IlmThread/IlmThreadMutex.cpp Source/OpenEXR/IlmThread/IlmThreadPool.cpp Source/OpenEXR/IlmThread/IlmThreadSemaphore.cpp Source/OpenEXR/IexMath/IexMathFloatExc.cpp Source/LibRawLite/src/decoders/canon_600.cpp Source/LibRawLite/src/decoders/crx.cpp Source/LibRawLite/src/decoders/decoders_dcraw.cpp Source/LibRawLite/src/decoders/decoders_libraw.cpp Source/LibRawLite/src/decoders/decoders_libraw_dcrdefs.cpp Source/LibRawLite/src/decoders/dng.cpp Source/LibRawLite/src/decoders/fp_dng.cpp Source/LibRawLite/src/decoders/fuji_compressed.cpp Source/LibRawLite/src/decoders/generic.cpp Source/LibRawLite/src/decoders/kodak_decoders.cpp Source/LibRawLite/src/decoders/load_mfbacks.cpp Source/LibRawLite/src/decoders/smal.cpp Source/LibRawLite/src/decoders/unpack.cpp Source/LibRawLite/src/decoders/unpack_thumb.cpp Source/LibRawLite/src/demosaic/aahd_demosaic.cpp Source/LibRawLite/src/demosaic/ahd_demosaic.cpp Source/LibRawLite/src/demosaic/dcb_demosaic.cpp Source/LibRawLite/src/demosaic/dht_demosaic.cpp Source/LibRawLite/src/demosaic/misc_demosaic.cpp Source/LibRawLite/src/demosaic/xtrans_demosaic.cpp Source/LibRawLite/src/integration/dngsdk_glue.cpp Source/LibRawLite/src/integration/rawspeed_glue.cpp Source/LibRawLite/src/libraw_datastream.cpp Source/LibRawLite/src/metadata/adobepano.cpp Source/LibRawLite/src/metadata/canon.cpp Source/LibRawLite/src/metadata/ciff.cpp Source/LibRawLite/src/metadata/cr3_parser.cpp Source/LibRawLite/src/metadata/epson.cpp Source/LibRawLite/src/metadata/exif_gps.cpp Source/LibRawLite/src/metadata/fuji.cpp Source/LibRawLite/src/metadata/hasselblad_model.cpp Source/LibRawLite/src/metadata/identify.cpp Source/LibRawLite/src/metadata/identify_tools.cpp Source/LibRawLite/src/metadata/kodak.cpp Source/LibRawLite/src/metadata/leica.cpp Source/LibRawLite/src/metadata/makernotes.cpp Source/LibRawLite/src/metadata/mediumformat.cpp Source/LibRawLite/src/metadata/minolta.cpp Source/LibRawLite/src/metadata/misc_parsers.cpp Source/LibRawLite/src/metadata/nikon.cpp Source/LibRawLite/src/metadata/normalize_model.cpp Source/LibRawLite/src/metadata/olympus.cpp Source/LibRawLite/src/metadata/p1.cpp Source/LibRawLite/src/metadata/pentax.cpp Source/LibRawLite/src/metadata/samsung.cpp Source/LibRawLite/src/metadata/sony.cpp Source/LibRawLite/src/metadata/tiff.cpp Source/LibRawLite/src/postprocessing/aspect_ratio.cpp Source/LibRawLite/src/postprocessing/dcraw_process.cpp Source/LibRawLite/src/postprocessing/mem_image.cpp Source/LibRawLite/src/postprocessing/postprocessing_aux.cpp Source/LibRawLite/src/postprocessing/postprocessing_utils.cpp Source/LibRawLite/src/postprocessing/postprocessing_utils_dcrdefs.cpp Source/LibRawLite/src/preprocessing/ext_preprocess.cpp Source/LibRawLite/src/preprocessing/raw2image.cpp Source/LibRawLite/src/preprocessing/subtract_black.cpp Source/LibRawLite/src/tables/cameralist.cpp Source/LibRawLite/src/tables/colorconst.cpp Source/LibRawLite/src/tables/colordata.cpp Source/LibRawLite/src/tables/wblists.cpp Source/LibRawLite/src/utils/curves.cpp Source/LibRawLite/src/utils/decoder_info.cpp Source/LibRawLite/src/utils/init_close_utils.cpp Source/LibRawLite/src/utils/open.cpp Source/LibRawLite/src/utils/phaseone_processing.cpp Source/LibRawLite/src/utils/read_utils.cpp Source/LibRawLite/src/utils/thumb_utils.cpp Source/LibRawLite/src/utils/utils_dcraw.cpp Source/LibRawLite/src/utils/utils_libraw.cpp Source/LibRawLite/src/write/file_write.cpp Source/LibRawLite/src/x3f/x3f_parse_process.cpp Source/LibRawLite/src/x3f/x3f_utils_patched.cpp Source/LibWebP/src/dec/alpha_dec.c Source/LibWebP/src/dec/buffer_dec.c Source/LibWebP/src/dec/frame_dec.c Source/LibWebP/src/dec/idec_dec.c Source/LibWebP/src/dec/io_dec.c Source/LibWebP/src/dec/quant_dec.c Source/LibWebP/src/dec/tree_dec.c Source/LibWebP/src/dec/vp8l_dec.c Source/LibWebP/src/dec/vp8_dec.c Source/LibWebP/src/dec/webp_dec.c Source/LibWebP/src/demux/anim_decode.c Source/LibWebP/src/demux/demux.c Source/LibWebP/src/dsp/alpha_processing.c Source/LibWebP/src/dsp/alpha_processing_mips_dsp_r2.c Source/LibWebP/src/dsp/alpha_processing_neon.c Source/LibWebP/src/dsp/alpha_processing_sse2.c Source/LibWebP/src/dsp/alpha_processing_sse41.c Source/LibWebP/src/dsp/cost.c Source/LibWebP/src/dsp/cost_mips32.c Source/LibWebP/src/dsp/cost_mips_dsp_r2.c Source/LibWebP/src/dsp/cost_neon.c Source/LibWebP/src/dsp/cost_sse2.c Source/LibWebP/src/dsp/cpu.c Source/LibWebP/src/dsp/dec.c Source/LibWebP/src/dsp/dec_clip_tables.c Source/LibWebP/src/dsp/dec_mips32.c Source/LibWebP/src/dsp/dec_mips_dsp_r2.c Source/LibWebP/src/dsp/dec_msa.c Source/LibWebP/src/dsp/dec_neon.c Source/LibWebP/src/dsp/dec_sse2.c Source/LibWebP/src/dsp/dec_sse41.c Source/LibWebP/src/dsp/enc.c Source/LibWebP/src/dsp/enc_avx2.c Source/LibWebP/src/dsp/enc_mips32.c Source/LibWebP/src/dsp/enc_mips_dsp_r2.c Source/LibWebP/src/dsp/enc_msa.c Source/LibWebP/src/dsp/enc_neon.c Source/LibWebP/src/dsp/enc_sse2.c Source/LibWebP/src/dsp/enc_sse41.c Source/LibWebP/src/dsp/filters.c Source/LibWebP/src/dsp/filters_mips_dsp_r2.c Source/LibWebP/src/dsp/filters_msa.c Source/LibWebP/src/dsp/filters_neon.c Source/LibWebP/src/dsp/filters_sse2.c Source/LibWebP/src/dsp/lossless.c Source/LibWebP/src/dsp/lossless_enc.c Source/LibWebP/src/dsp/lossless_enc_mips32.c Source/LibWebP/src/dsp/lossless_enc_mips_dsp_r2.c Source/LibWebP/src/dsp/lossless_enc_msa.c Source/LibWebP/src/dsp/lossless_enc_neon.c Source/LibWebP/src/dsp/lossless_enc_sse2.c Source/LibWebP/src/dsp/lossless_enc_sse41.c Source/LibWebP/src/dsp/lossless_mips_dsp_r2.c Source/LibWebP/src/dsp/lossless_msa.c Source/LibWebP/src/dsp/lossless_neon.c Source/LibWebP/src/dsp/lossless_sse2.c Source/LibWebP/src/dsp/lossless_sse41.c Source/LibWebP/src/dsp/rescaler.c Source/LibWebP/src/dsp/rescaler_mips32.c Source/LibWebP/src/dsp/rescaler_mips_dsp_r2.c Source/LibWebP/src/dsp/rescaler_msa.c Source/LibWebP/src/dsp/rescaler_neon.c Source/LibWebP/src/dsp/rescaler_sse2.c Source/LibWebP/src/dsp/ssim.c Source/LibWebP/src/dsp/ssim_sse2.c Source/LibWebP/src/dsp/upsampling.c Source/LibWebP/src/dsp/upsampling_mips_dsp_r2.c Source/LibWebP/src/dsp/upsampling_msa.c Source/LibWebP/src/dsp/upsampling_neon.c Source/LibWebP/src/dsp/upsampling_sse2.c Source/LibWebP/src/dsp/upsampling_sse41.c Source/LibWebP/src/dsp/yuv.c Source/LibWebP/src/dsp/yuv_mips32.c Source/LibWebP/src/dsp/yuv_mips_dsp_r2.c Source/LibWebP/src/dsp/yuv_neon.c Source/LibWebP/src/dsp/yuv_sse2.c Source/LibWebP/src/dsp/yuv_sse41.c Source/LibWebP/src/enc/alpha_enc.c Source/LibWebP/src/enc/analysis_enc.c Source/LibWebP/src/enc/backward_references_cost_enc.c Source/LibWebP/src/enc/backward_references_enc.c Source/LibWebP/src/enc/config_enc.c Source/LibWebP/src/enc/cost_enc.c Source/LibWebP/src/enc/filter_enc.c Source/LibWebP/src/enc/frame_enc.c Source/LibWebP/src/enc/histogram_enc.c Source/LibWebP/src/enc/iterator_enc.c Source/LibWebP/src/enc/near_lossless_enc.c Source/LibWebP/src/enc/picture_csp_enc.c Source/LibWebP/src/enc/picture_enc.c Source/LibWebP/src/enc/picture_psnr_enc.c Source/LibWebP/src/enc/picture_rescale_enc.c Source/LibWebP/src/enc/picture_tools_enc.c Source/LibWebP/src/enc/predictor_enc.c Source/LibWebP/src/enc/quant_enc.c Source/LibWebP/src/enc/syntax_enc.c Source/LibWebP/src/enc/token_enc.c Source/LibWebP/src/enc/tree_enc.c Source/LibWebP/src/enc/vp8l_enc.c Source/LibWebP/src/enc/webp_enc.c Source/LibWebP/src/mux/anim_encode.c Source/LibWebP/src/mux/muxedit.c Source/LibWebP/src/mux/muxinternal.c Source/LibWebP/src/mux/muxread.c Source/LibWebP/src/utils/bit_reader_utils.c Source/LibWebP/src/utils/bit_writer_utils.c Source/LibWebP/src/utils/color_cache_utils.c Source/LibWebP/src/utils/filters_utils.c Source/LibWebP/src/utils/huffman_encode_utils.c Source/LibWebP/src/utils/huffman_utils.c Source/LibWebP/src/utils/quant_levels_dec_utils.c Source/LibWebP/src/utils/quant_levels_utils.c Source/LibWebP/src/utils/random_utils.c Source/LibWebP/src/utils/rescaler_utils.c Source/LibWebP/src/utils/thread_utils.c Source/LibWebP/src/utils/utils.c Source/LibJXR/image/decode/decode.c Source/LibJXR/image/decode/JXRTranscode.c Source/LibJXR/image/decode/postprocess.c Source/LibJXR/image/decode/segdec.c Source/LibJXR/image/decode/strdec.c Source/LibJXR/image/decode/strdec_x86.c Source/LibJXR/image/decode/strInvTransform.c Source/LibJXR/image/decode/strPredQuantDec.c Source/LibJXR/image/encode/encode.c Source/LibJXR/image/encode/segenc.c Source/LibJXR/image/encode/strenc.c Source/LibJXR/image/encode/strenc_x86.c Source/LibJXR/image/encode/strFwdTransform.c Source/LibJXR/image/encode/strPredQuantEnc.c Source/LibJXR/image/sys/adapthuff.c Source/LibJXR/image/sys/image.c Source/LibJXR/image/sys/strcodec.c Source/LibJXR/image/sys/strPredQuant.c Source/LibJXR/image/sys/strTransform.c Source/LibJXR/jxrgluelib/JXRGlue.c Source/LibJXR/jxrgluelib/JXRGlueJxr.c Source/LibJXR/jxrgluelib/JXRGluePFC.c Source/LibJXR/jxrgluelib/JXRMeta.c +-INCLS = ./Dist/FreeImage.h ./Examples/Generic/FIIO_Mem.h ./Examples/OpenGL/TextureManager/TextureManager.h ./Examples/Plugin/PluginCradle.h ./Source/CacheFile.h ./Source/FreeImage/J2KHelper.h ./Source/FreeImage/PSDParser.h ./Source/FreeImage.h ./Source/FreeImageIO.h ./Source/FreeImageToolkit/Filters.h ./Source/FreeImageToolkit/Resize.h ./Source/LibJPEG/cderror.h ./Source/LibJPEG/cdjpeg.h ./Source/LibJPEG/jconfig.h ./Source/LibJPEG/jdct.h ./Source/LibJPEG/jerror.h ./Source/LibJPEG/jinclude.h ./Source/LibJPEG/jmemsys.h ./Source/LibJPEG/jmorecfg.h ./Source/LibJPEG/jpegint.h ./Source/LibJPEG/jpeglib.h ./Source/LibJPEG/jversion.h ./Source/LibJPEG/transupp.h ./Source/LibJXR/common/include/guiddef.h ./Source/LibJXR/common/include/wmsal.h ./Source/LibJXR/common/include/wmspecstring.h ./Source/LibJXR/common/include/wmspecstrings_adt.h ./Source/LibJXR/common/include/wmspecstrings_strict.h ./Source/LibJXR/common/include/wmspecstrings_undef.h ./Source/LibJXR/image/decode/decode.h ./Source/LibJXR/image/encode/encode.h ./Source/LibJXR/image/sys/ansi.h ./Source/LibJXR/image/sys/common.h ./Source/LibJXR/image/sys/perfTimer.h ./Source/LibJXR/image/sys/strcodec.h ./Source/LibJXR/image/sys/strTransform.h ./Source/LibJXR/image/sys/windowsmediaphoto.h ./Source/LibJXR/image/sys/xplatform_image.h ./Source/LibJXR/image/x86/x86.h ./Source/LibJXR/jxrgluelib/JXRGlue.h ./Source/LibJXR/jxrgluelib/JXRMeta.h ./Source/LibOpenJPEG/bio.h ./Source/LibOpenJPEG/cidx_manager.h ./Source/LibOpenJPEG/cio.h ./Source/LibOpenJPEG/dwt.h ./Source/LibOpenJPEG/event.h ./Source/LibOpenJPEG/function_list.h ./Source/LibOpenJPEG/image.h ./Source/LibOpenJPEG/indexbox_manager.h ./Source/LibOpenJPEG/invert.h ./Source/LibOpenJPEG/j2k.h ./Source/LibOpenJPEG/jp2.h ./Source/LibOpenJPEG/mct.h ./Source/LibOpenJPEG/mqc.h ./Source/LibOpenJPEG/openjpeg.h ./Source/LibOpenJPEG/opj_clock.h ./Source/LibOpenJPEG/opj_codec.h ./Source/LibOpenJPEG/opj_config.h ./Source/LibOpenJPEG/opj_config_private.h ./Source/LibOpenJPEG/opj_includes.h ./Source/LibOpenJPEG/opj_intmath.h ./Source/LibOpenJPEG/opj_inttypes.h ./Source/LibOpenJPEG/opj_malloc.h ./Source/LibOpenJPEG/opj_stdint.h ./Source/LibOpenJPEG/pi.h ./Source/LibOpenJPEG/raw.h ./Source/LibOpenJPEG/t1.h ./Source/LibOpenJPEG/t1_luts.h ./Source/LibOpenJPEG/t2.h ./Source/LibOpenJPEG/tcd.h ./Source/LibOpenJPEG/tgt.h ./Source/LibPNG/png.h ./Source/LibPNG/pngconf.h ./Source/LibPNG/pngdebug.h ./Source/LibPNG/pnginfo.h ./Source/LibPNG/pnglibconf.h ./Source/LibPNG/pngpriv.h ./Source/LibPNG/pngstruct.h ./Source/LibRawLite/internal/dcraw_defs.h ./Source/LibRawLite/internal/dcraw_fileio_defs.h ./Source/LibRawLite/internal/defines.h ./Source/LibRawLite/internal/dmp_include.h ./Source/LibRawLite/internal/libraw_cameraids.h ./Source/LibRawLite/internal/libraw_cxx_defs.h ./Source/LibRawLite/internal/libraw_internal_funcs.h ./Source/LibRawLite/internal/var_defines.h ./Source/LibRawLite/internal/x3f_tools.h ./Source/LibRawLite/libraw/libraw.h ./Source/LibRawLite/libraw/libraw_alloc.h ./Source/LibRawLite/libraw/libraw_const.h ./Source/LibRawLite/libraw/libraw_datastream.h ./Source/LibRawLite/libraw/libraw_internal.h ./Source/LibRawLite/libraw/libraw_types.h ./Source/LibRawLite/libraw/libraw_version.h ./Source/LibTIFF4/t4.h ./Source/LibTIFF4/tiff.h ./Source/LibTIFF4/tiffconf.h ./Source/LibTIFF4/tiffconf.vc.h ./Source/LibTIFF4/tiffconf.wince.h ./Source/LibTIFF4/tiffio.h ./Source/LibTIFF4/tiffiop.h ./Source/LibTIFF4/tiffvers.h ./Source/LibTIFF4/tif_config.h ./Source/LibTIFF4/tif_config.vc.h ./Source/LibTIFF4/tif_config.wince.h ./Source/LibTIFF4/tif_dir.h ./Source/LibTIFF4/tif_fax3.h ./Source/LibTIFF4/tif_predict.h ./Source/LibTIFF4/uvcode.h ./Source/LibWebP/src/dec/alphai_dec.h ./Source/LibWebP/src/dec/common_dec.h ./Source/LibWebP/src/dec/vp8i_dec.h ./Source/LibWebP/src/dec/vp8li_dec.h ./Source/LibWebP/src/dec/vp8_dec.h ./Source/LibWebP/src/dec/webpi_dec.h ./Source/LibWebP/src/dsp/common_sse2.h ./Source/LibWebP/src/dsp/common_sse41.h ./Source/LibWebP/src/dsp/dsp.h ./Source/LibWebP/src/dsp/lossless.h ./Source/LibWebP/src/dsp/lossless_common.h ./Source/LibWebP/src/dsp/mips_macro.h ./Source/LibWebP/src/dsp/msa_macro.h ./Source/LibWebP/src/dsp/neon.h ./Source/LibWebP/src/dsp/quant.h ./Source/LibWebP/src/dsp/yuv.h ./Source/LibWebP/src/enc/backward_references_enc.h ./Source/LibWebP/src/enc/cost_enc.h ./Source/LibWebP/src/enc/histogram_enc.h ./Source/LibWebP/src/enc/vp8i_enc.h ./Source/LibWebP/src/enc/vp8li_enc.h ./Source/LibWebP/src/mux/animi.h ./Source/LibWebP/src/mux/muxi.h ./Source/LibWebP/src/utils/bit_reader_inl_utils.h ./Source/LibWebP/src/utils/bit_reader_utils.h ./Source/LibWebP/src/utils/bit_writer_utils.h ./Source/LibWebP/src/utils/color_cache_utils.h ./Source/LibWebP/src/utils/endian_inl_utils.h ./Source/LibWebP/src/utils/filters_utils.h ./Source/LibWebP/src/utils/huffman_encode_utils.h ./Source/LibWebP/src/utils/huffman_utils.h ./Source/LibWebP/src/utils/quant_levels_dec_utils.h ./Source/LibWebP/src/utils/quant_levels_utils.h ./Source/LibWebP/src/utils/random_utils.h ./Source/LibWebP/src/utils/rescaler_utils.h ./Source/LibWebP/src/utils/thread_utils.h ./Source/LibWebP/src/utils/utils.h ./Source/LibWebP/src/webp/decode.h ./Source/LibWebP/src/webp/demux.h ./Source/LibWebP/src/webp/encode.h ./Source/LibWebP/src/webp/format_constants.h ./Source/LibWebP/src/webp/mux.h ./Source/LibWebP/src/webp/mux_types.h ./Source/LibWebP/src/webp/types.h ./Source/MapIntrospector.h ./Source/Metadata/FIRational.h ./Source/Metadata/FreeImageTag.h ./Source/OpenEXR/Half/eLut.h ./Source/OpenEXR/Half/half.h ./Source/OpenEXR/Half/halfExport.h ./Source/OpenEXR/Half/halfFunction.h ./Source/OpenEXR/Half/halfLimits.h ./Source/OpenEXR/Half/toFloat.h ./Source/OpenEXR/Iex/Iex.h ./Source/OpenEXR/Iex/IexBaseExc.h ./Source/OpenEXR/Iex/IexErrnoExc.h ./Source/OpenEXR/Iex/IexExport.h ./Source/OpenEXR/Iex/IexForward.h ./Source/OpenEXR/Iex/IexMacros.h ./Source/OpenEXR/Iex/IexMathExc.h ./Source/OpenEXR/Iex/IexNamespace.h ./Source/OpenEXR/Iex/IexThrowErrnoExc.h ./Source/OpenEXR/IexMath/IexMathFloatExc.h ./Source/OpenEXR/IexMath/IexMathFpu.h ./Source/OpenEXR/IexMath/IexMathIeeeExc.h ./Source/OpenEXR/IlmBaseConfig.h ./Source/OpenEXR/IlmImf/b44ExpLogTable.h ./Source/OpenEXR/IlmImf/dwaLookups.h ./Source/OpenEXR/IlmImf/ImfAcesFile.h ./Source/OpenEXR/IlmImf/ImfArray.h ./Source/OpenEXR/IlmImf/ImfAttribute.h ./Source/OpenEXR/IlmImf/ImfAutoArray.h ./Source/OpenEXR/IlmImf/ImfB44Compressor.h ./Source/OpenEXR/IlmImf/ImfBoxAttribute.h ./Source/OpenEXR/IlmImf/ImfChannelList.h ./Source/OpenEXR/IlmImf/ImfChannelListAttribute.h ./Source/OpenEXR/IlmImf/ImfCheckedArithmetic.h ./Source/OpenEXR/IlmImf/ImfChromaticities.h ./Source/OpenEXR/IlmImf/ImfChromaticitiesAttribute.h ./Source/OpenEXR/IlmImf/ImfCompositeDeepScanLine.h ./Source/OpenEXR/IlmImf/ImfCompression.h ./Source/OpenEXR/IlmImf/ImfCompressionAttribute.h ./Source/OpenEXR/IlmImf/ImfCompressor.h ./Source/OpenEXR/IlmImf/ImfConvert.h ./Source/OpenEXR/IlmImf/ImfCRgbaFile.h ./Source/OpenEXR/IlmImf/ImfDeepCompositing.h ./Source/OpenEXR/IlmImf/ImfDeepFrameBuffer.h ./Source/OpenEXR/IlmImf/ImfDeepImageState.h ./Source/OpenEXR/IlmImf/ImfDeepImageStateAttribute.h ./Source/OpenEXR/IlmImf/ImfDeepScanLineInputFile.h ./Source/OpenEXR/IlmImf/ImfDeepScanLineInputPart.h ./Source/OpenEXR/IlmImf/ImfDeepScanLineOutputFile.h ./Source/OpenEXR/IlmImf/ImfDeepScanLineOutputPart.h ./Source/OpenEXR/IlmImf/ImfDeepTiledInputFile.h ./Source/OpenEXR/IlmImf/ImfDeepTiledInputPart.h ./Source/OpenEXR/IlmImf/ImfDeepTiledOutputFile.h ./Source/OpenEXR/IlmImf/ImfDeepTiledOutputPart.h ./Source/OpenEXR/IlmImf/ImfDoubleAttribute.h ./Source/OpenEXR/IlmImf/ImfDwaCompressor.h ./Source/OpenEXR/IlmImf/ImfDwaCompressorSimd.h ./Source/OpenEXR/IlmImf/ImfEnvmap.h ./Source/OpenEXR/IlmImf/ImfEnvmapAttribute.h ./Source/OpenEXR/IlmImf/ImfExport.h ./Source/OpenEXR/IlmImf/ImfFastHuf.h ./Source/OpenEXR/IlmImf/ImfFloatAttribute.h ./Source/OpenEXR/IlmImf/ImfFloatVectorAttribute.h ./Source/OpenEXR/IlmImf/ImfForward.h ./Source/OpenEXR/IlmImf/ImfFrameBuffer.h ./Source/OpenEXR/IlmImf/ImfFramesPerSecond.h ./Source/OpenEXR/IlmImf/ImfGenericInputFile.h ./Source/OpenEXR/IlmImf/ImfGenericOutputFile.h ./Source/OpenEXR/IlmImf/ImfHeader.h ./Source/OpenEXR/IlmImf/ImfHuf.h ./Source/OpenEXR/IlmImf/ImfInputFile.h ./Source/OpenEXR/IlmImf/ImfInputPart.h ./Source/OpenEXR/IlmImf/ImfInputPartData.h ./Source/OpenEXR/IlmImf/ImfInputStreamMutex.h ./Source/OpenEXR/IlmImf/ImfInt64.h ./Source/OpenEXR/IlmImf/ImfIntAttribute.h ./Source/OpenEXR/IlmImf/ImfIO.h ./Source/OpenEXR/IlmImf/ImfKeyCode.h ./Source/OpenEXR/IlmImf/ImfKeyCodeAttribute.h ./Source/OpenEXR/IlmImf/ImfLineOrder.h ./Source/OpenEXR/IlmImf/ImfLineOrderAttribute.h ./Source/OpenEXR/IlmImf/ImfLut.h ./Source/OpenEXR/IlmImf/ImfMatrixAttribute.h ./Source/OpenEXR/IlmImf/ImfMisc.h ./Source/OpenEXR/IlmImf/ImfMultiPartInputFile.h ./Source/OpenEXR/IlmImf/ImfMultiPartOutputFile.h ./Source/OpenEXR/IlmImf/ImfMultiView.h ./Source/OpenEXR/IlmImf/ImfName.h ./Source/OpenEXR/IlmImf/ImfNamespace.h ./Source/OpenEXR/IlmImf/ImfOpaqueAttribute.h ./Source/OpenEXR/IlmImf/ImfOptimizedPixelReading.h ./Source/OpenEXR/IlmImf/ImfOutputFile.h ./Source/OpenEXR/IlmImf/ImfOutputPart.h ./Source/OpenEXR/IlmImf/ImfOutputPartData.h ./Source/OpenEXR/IlmImf/ImfOutputStreamMutex.h ./Source/OpenEXR/IlmImf/ImfPartHelper.h ./Source/OpenEXR/IlmImf/ImfPartType.h ./Source/OpenEXR/IlmImf/ImfPixelType.h ./Source/OpenEXR/IlmImf/ImfPizCompressor.h ./Source/OpenEXR/IlmImf/ImfPreviewImage.h ./Source/OpenEXR/IlmImf/ImfPreviewImageAttribute.h ./Source/OpenEXR/IlmImf/ImfPxr24Compressor.h ./Source/OpenEXR/IlmImf/ImfRational.h ./Source/OpenEXR/IlmImf/ImfRationalAttribute.h ./Source/OpenEXR/IlmImf/ImfRgba.h ./Source/OpenEXR/IlmImf/ImfRgbaFile.h ./Source/OpenEXR/IlmImf/ImfRgbaYca.h ./Source/OpenEXR/IlmImf/ImfRle.h ./Source/OpenEXR/IlmImf/ImfRleCompressor.h ./Source/OpenEXR/IlmImf/ImfScanLineInputFile.h ./Source/OpenEXR/IlmImf/ImfSimd.h ./Source/OpenEXR/IlmImf/ImfStandardAttributes.h ./Source/OpenEXR/IlmImf/ImfStdIO.h ./Source/OpenEXR/IlmImf/ImfStringAttribute.h ./Source/OpenEXR/IlmImf/ImfStringVectorAttribute.h ./Source/OpenEXR/IlmImf/ImfSystemSpecific.h ./Source/OpenEXR/IlmImf/ImfTestFile.h ./Source/OpenEXR/IlmImf/ImfThreading.h ./Source/OpenEXR/IlmImf/ImfTileDescription.h ./Source/OpenEXR/IlmImf/ImfTileDescriptionAttribute.h ./Source/OpenEXR/IlmImf/ImfTiledInputFile.h ./Source/OpenEXR/IlmImf/ImfTiledInputPart.h ./Source/OpenEXR/IlmImf/ImfTiledMisc.h ./Source/OpenEXR/IlmImf/ImfTiledOutputFile.h ./Source/OpenEXR/IlmImf/ImfTiledOutputPart.h ./Source/OpenEXR/IlmImf/ImfTiledRgbaFile.h ./Source/OpenEXR/IlmImf/ImfTileOffsets.h ./Source/OpenEXR/IlmImf/ImfTimeCode.h ./Source/OpenEXR/IlmImf/ImfTimeCodeAttribute.h ./Source/OpenEXR/IlmImf/ImfVecAttribute.h ./Source/OpenEXR/IlmImf/ImfVersion.h ./Source/OpenEXR/IlmImf/ImfWav.h ./Source/OpenEXR/IlmImf/ImfXdr.h ./Source/OpenEXR/IlmImf/ImfZip.h ./Source/OpenEXR/IlmImf/ImfZipCompressor.h ./Source/OpenEXR/IlmThread/IlmThread.h ./Source/OpenEXR/IlmThread/IlmThreadExport.h ./Source/OpenEXR/IlmThread/IlmThreadForward.h ./Source/OpenEXR/IlmThread/IlmThreadMutex.h ./Source/OpenEXR/IlmThread/IlmThreadNamespace.h ./Source/OpenEXR/IlmThread/IlmThreadPool.h ./Source/OpenEXR/IlmThread/IlmThreadSemaphore.h ./Source/OpenEXR/Imath/ImathBox.h ./Source/OpenEXR/Imath/ImathBoxAlgo.h ./Source/OpenEXR/Imath/ImathColor.h ./Source/OpenEXR/Imath/ImathColorAlgo.h ./Source/OpenEXR/Imath/ImathEuler.h ./Source/OpenEXR/Imath/ImathExc.h ./Source/OpenEXR/Imath/ImathExport.h ./Source/OpenEXR/Imath/ImathForward.h ./Source/OpenEXR/Imath/ImathFrame.h ./Source/OpenEXR/Imath/ImathFrustum.h ./Source/OpenEXR/Imath/ImathFrustumTest.h ./Source/OpenEXR/Imath/ImathFun.h ./Source/OpenEXR/Imath/ImathGL.h ./Source/OpenEXR/Imath/ImathGLU.h ./Source/OpenEXR/Imath/ImathHalfLimits.h ./Source/OpenEXR/Imath/ImathInt64.h ./Source/OpenEXR/Imath/ImathInterval.h ./Source/OpenEXR/Imath/ImathLimits.h ./Source/OpenEXR/Imath/ImathLine.h ./Source/OpenEXR/Imath/ImathLineAlgo.h ./Source/OpenEXR/Imath/ImathMath.h ./Source/OpenEXR/Imath/ImathMatrix.h ./Source/OpenEXR/Imath/ImathMatrixAlgo.h ./Source/OpenEXR/Imath/ImathNamespace.h ./Source/OpenEXR/Imath/ImathPlane.h ./Source/OpenEXR/Imath/ImathPlatform.h ./Source/OpenEXR/Imath/ImathQuat.h ./Source/OpenEXR/Imath/ImathRandom.h ./Source/OpenEXR/Imath/ImathRoots.h ./Source/OpenEXR/Imath/ImathShear.h ./Source/OpenEXR/Imath/ImathSphere.h ./Source/OpenEXR/Imath/ImathVec.h ./Source/OpenEXR/Imath/ImathVecAlgo.h ./Source/OpenEXR/OpenEXRConfig.h ./Source/Plugin.h ./Source/Quantizers.h ./Source/ToneMapping.h ./Source/Utilities.h ./Source/ZLib/crc32.h ./Source/ZLib/deflate.h ./Source/ZLib/gzguts.h ./Source/ZLib/inffast.h ./Source/ZLib/inffixed.h ./Source/ZLib/inflate.h ./Source/ZLib/inftrees.h ./Source/ZLib/trees.h ./Source/ZLib/zconf.h ./Source/ZLib/zlib.h ./Source/ZLib/zutil.h ./TestAPI/TestSuite.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/FreeImageIO.Net.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/resource.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/Stdafx.h ./Wrapper/FreeImagePlus/FreeImagePlus.h ./Wrapper/FreeImagePlus/test/fipTest.h +SRCS = ./Source/FreeImage/BitmapAccess.cpp ./Source/FreeImage/ColorLookup.cpp ./Source/FreeImage/ConversionRGBA16.cpp ./Source/FreeImage/ConversionRGBAF.cpp ./Source/FreeImage/FreeImage.cpp ./Source/FreeImage/FreeImageC.c ./Source/FreeImage/FreeImageIO.cpp ./Source/FreeImage/GetType.cpp ./Source/FreeImage/LFPQuantizer.cpp ./Source/FreeImage/MemoryIO.cpp ./Source/FreeImage/PixelAccess.cpp ./Source/FreeImage/J2KHelper.cpp ./Source/FreeImage/MNGHelper.cpp ./Source/FreeImage/Plugin.cpp ./Source/FreeImage/PluginBMP.cpp ./Source/FreeImage/PluginCUT.cpp ./Source/FreeImage/PluginDDS.cpp ./Source/FreeImage/PluginEXR.cpp ./Source/FreeImage/PluginG3.cpp ./Source/FreeImage/PluginGIF.cpp ./Source/FreeImage/PluginHDR.cpp ./Source/FreeImage/PluginICO.cpp ./Source/FreeImage/PluginIFF.cpp ./Source/FreeImage/PluginJ2K.cpp ./Source/FreeImage/PluginJNG.cpp ./Source/FreeImage/PluginJP2.cpp ./Source/FreeImage/PluginJPEG.cpp ./Source/FreeImage/PluginJXR.cpp ./Source/FreeImage/PluginKOALA.cpp ./Source/FreeImage/PluginMNG.cpp ./Source/FreeImage/PluginPCD.cpp ./Source/FreeImage/PluginPCX.cpp ./Source/FreeImage/PluginPFM.cpp ./Source/FreeImage/PluginPICT.cpp ./Source/FreeImage/PluginPNG.cpp ./Source/FreeImage/PluginPNM.cpp ./Source/FreeImage/PluginPSD.cpp ./Source/FreeImage/PluginRAS.cpp ./Source/FreeImage/PluginRAW.cpp ./Source/FreeImage/PluginSGI.cpp ./Source/FreeImage/PluginTARGA.cpp ./Source/FreeImage/PluginTIFF.cpp ./Source/FreeImage/PluginWBMP.cpp ./Source/FreeImage/PluginWebP.cpp ./Source/FreeImage/PluginXBM.cpp ./Source/FreeImage/PluginXPM.cpp ./Source/FreeImage/PSDParser.cpp ./Source/FreeImage/TIFFLogLuv.cpp ./Source/FreeImage/Conversion.cpp ./Source/FreeImage/Conversion16_555.cpp ./Source/FreeImage/Conversion16_565.cpp ./Source/FreeImage/Conversion24.cpp ./Source/FreeImage/Conversion32.cpp ./Source/FreeImage/Conversion4.cpp ./Source/FreeImage/Conversion8.cpp ./Source/FreeImage/ConversionFloat.cpp ./Source/FreeImage/ConversionRGB16.cpp ./Source/FreeImage/ConversionRGBF.cpp ./Source/FreeImage/ConversionType.cpp ./Source/FreeImage/ConversionUINT16.cpp ./Source/FreeImage/Halftoning.cpp ./Source/FreeImage/tmoColorConvert.cpp ./Source/FreeImage/tmoDrago03.cpp ./Source/FreeImage/tmoFattal02.cpp ./Source/FreeImage/tmoReinhard05.cpp ./Source/FreeImage/ToneMapping.cpp ./Source/FreeImage/NNQuantizer.cpp ./Source/FreeImage/WuQuantizer.cpp ./Source/FreeImage/CacheFile.cpp ./Source/FreeImage/MultiPage.cpp ./Source/FreeImage/ZLibInterface.cpp ./Source/Metadata/Exif.cpp ./Source/Metadata/FIRational.cpp ./Source/Metadata/FreeImageTag.cpp ./Source/Metadata/IPTC.cpp ./Source/Metadata/TagConversion.cpp ./Source/Metadata/TagLib.cpp ./Source/Metadata/XTIFF.cpp ./Source/FreeImageToolkit/Background.cpp ./Source/FreeImageToolkit/BSplineRotate.cpp ./Source/FreeImageToolkit/Channels.cpp ./Source/FreeImageToolkit/ClassicRotate.cpp ./Source/FreeImageToolkit/Colors.cpp ./Source/FreeImageToolkit/CopyPaste.cpp ./Source/FreeImageToolkit/Display.cpp ./Source/FreeImageToolkit/Flip.cpp ./Source/FreeImageToolkit/JPEGTransform.cpp ./Source/FreeImageToolkit/MultigridPoissonSolver.cpp ./Source/FreeImageToolkit/Rescale.cpp ./Source/FreeImageToolkit/Resize.cpp -+INCLS = ./Examples/OpenGL/TextureManager/TextureManager.h ./Examples/Generic/FIIO_Mem.h ./Examples/Plugin/PluginCradle.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/resource.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/FreeImageIO.Net.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/Stdafx.h ./Wrapper/FreeImagePlus/FreeImagePlus.h ./Wrapper/FreeImagePlus/test/fipTest.h ./TestAPI/TestSuite.h ./Source/FreeImage.h ./Source/FreeImage/PSDParser.h ./Source/FreeImage/J2KHelper.h ./Source/FreeImageToolkit/Filters.h ./Source/FreeImageToolkit/Resize.h ./Source/Metadata/FreeImageTag.h ./Source/Metadata/FIRational.h ./Source/ToneMapping.h ./Source/FreeImageIO.h ./Source/Plugin.h ./Source/CacheFile.h ./Source/Utilities.h ./Source/MapIntrospector.h ./Source/Quantizers.h ++INCLS = ./Dist/FreeImage.h ./Examples/Generic/FIIO_Mem.h ./Examples/OpenGL/TextureManager/TextureManager.h ./Examples/Plugin/PluginCradle.h ./Source/CacheFile.h ./Source/FreeImage/J2KHelper.h ./Source/FreeImage/PSDParser.h ./Source/FreeImage.h ./Source/FreeImageIO.h ./Source/FreeImageToolkit/Filters.h ./Source/FreeImageToolkit/Resize.h ./Source/MapIntrospector.h ./Source/Metadata/FIRational.h ./Source/Metadata/FreeImageTag.h ./Source/Plugin.h ./Source/Quantizers.h ./Source/ToneMapping.h ./Source/Utilities.h ./TestAPI/TestSuite.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/FreeImageIO.Net.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/resource.h ./Wrapper/FreeImage.NET/cpp/FreeImageIO/Stdafx.h ./Wrapper/FreeImagePlus/FreeImagePlus.h ./Wrapper/FreeImagePlus/test/fipTest.h -INCLUDE = -I. -ISource -ISource/Metadata -ISource/FreeImageToolkit -ISource/LibJPEG -ISource/LibPNG -ISource/LibTIFF4 -ISource/ZLib -ISource/LibOpenJPEG -ISource/OpenEXR -ISource/OpenEXR/Half -ISource/OpenEXR/Iex -ISource/OpenEXR/IlmImf -ISource/OpenEXR/IlmThread -ISource/OpenEXR/Imath -ISource/OpenEXR/IexMath -ISource/LibRawLite -ISource/LibRawLite/dcraw -ISource/LibRawLite/internal -ISource/LibRawLite/libraw -ISource/LibRawLite/src -ISource/LibWebP -ISource/LibJXR -ISource/LibJXR/common/include -ISource/LibJXR/image/sys -ISource/LibJXR/jxrgluelib +INCLUDE = -I. -ISource -ISource/Metadata -ISource/FreeImageToolkit diff --git a/Source/FreeImage.h b/Source/FreeImage.h -index e8f1da6..235563e 100644 +index 2dfb9ee2..d2d32322 100644 --- a/Source/FreeImage.h +++ b/Source/FreeImage.h @@ -155,8 +155,8 @@ typedef uint8_t BYTE; @@ -214,20 +214,20 @@ index e8f1da6..235563e 100644 // MS is not C99 ISO compliant typedef long BOOL; diff --git a/Source/FreeImage/J2KHelper.cpp b/Source/FreeImage/J2KHelper.cpp -index 1776c3b..538f1c5 100644 +index 062b49ee..0e79fbc5 100644 --- a/Source/FreeImage/J2KHelper.cpp +++ b/Source/FreeImage/J2KHelper.cpp @@ -21,7 +21,7 @@ - - #include "FreeImage.h" - #include "Utilities.h" --#include "../LibOpenJPEG/openjpeg.h" -+#include - #include "J2KHelper.h" - - // -------------------------------------------------------------------------- + + #include "FreeImage.h" + #include "Utilities.h" +-#include "../LibOpenJPEG/openjpeg.h" ++#include + #include "J2KHelper.h" + + // -------------------------------------------------------------------------- diff --git a/Source/FreeImage/PluginEXR.cpp b/Source/FreeImage/PluginEXR.cpp -index b286430..9bf3ada 100644 +index b2864303..9bf3ada9 100644 --- a/Source/FreeImage/PluginEXR.cpp +++ b/Source/FreeImage/PluginEXR.cpp @@ -28,16 +28,16 @@ @@ -258,7 +258,7 @@ index b286430..9bf3ada 100644 // ========================================================== diff --git a/Source/FreeImage/PluginG3.cpp b/Source/FreeImage/PluginG3.cpp -index 0a083b4..e2f1241 100644 +index 4680aa32..7d4b5ce6 100644 --- a/Source/FreeImage/PluginG3.cpp +++ b/Source/FreeImage/PluginG3.cpp @@ -20,7 +20,7 @@ @@ -271,7 +271,7 @@ index 0a083b4..e2f1241 100644 #include "FreeImage.h" #include "Utilities.h" diff --git a/Source/FreeImage/PluginJ2K.cpp b/Source/FreeImage/PluginJ2K.cpp -index b8bcfc8..621a903 100644 +index b8bcfc8b..621a9037 100644 --- a/Source/FreeImage/PluginJ2K.cpp +++ b/Source/FreeImage/PluginJ2K.cpp @@ -21,7 +21,7 @@ @@ -284,7 +284,7 @@ index b8bcfc8..621a903 100644 // ========================================================== diff --git a/Source/FreeImage/PluginJP2.cpp b/Source/FreeImage/PluginJP2.cpp -index 742fe2c..c57f626 100644 +index 742fe2c0..c57f6267 100644 --- a/Source/FreeImage/PluginJP2.cpp +++ b/Source/FreeImage/PluginJP2.cpp @@ -21,7 +21,7 @@ @@ -297,7 +297,7 @@ index 742fe2c..c57f626 100644 // ========================================================== diff --git a/Source/FreeImage/PluginJPEG.cpp b/Source/FreeImage/PluginJPEG.cpp -index 8db177d..a7de637 100644 +index 8db177d2..a7de6378 100644 --- a/Source/FreeImage/PluginJPEG.cpp +++ b/Source/FreeImage/PluginJPEG.cpp @@ -35,9 +35,9 @@ extern "C" { @@ -431,7 +431,7 @@ index 8db177d..a7de637 100644 Read JPEG_APPD marker (IPTC or Adobe Photoshop profile) */ diff --git a/Source/FreeImage/PluginJXR.cpp b/Source/FreeImage/PluginJXR.cpp -index 85c6ff3..163a93b 100644 +index 85c6ff3e..163a93bd 100644 --- a/Source/FreeImage/PluginJXR.cpp +++ b/Source/FreeImage/PluginJXR.cpp @@ -23,7 +23,7 @@ @@ -444,7 +444,7 @@ index 85c6ff3..163a93b 100644 // ========================================================== // Plugin Interface diff --git a/Source/FreeImage/PluginPNG.cpp b/Source/FreeImage/PluginPNG.cpp -index 661f160..504fafe 100644 +index 661f1602..504fafe1 100644 --- a/Source/FreeImage/PluginPNG.cpp +++ b/Source/FreeImage/PluginPNG.cpp @@ -40,8 +40,8 @@ @@ -459,7 +459,7 @@ index 661f160..504fafe 100644 // ---------------------------------------------------------- diff --git a/Source/FreeImage/PluginRAW.cpp b/Source/FreeImage/PluginRAW.cpp -index ab0499f..2d3c9fd 100644 +index 26134bcc..d7fa81e4 100644 --- a/Source/FreeImage/PluginRAW.cpp +++ b/Source/FreeImage/PluginRAW.cpp @@ -19,12 +19,16 @@ @@ -481,7 +481,7 @@ index ab0499f..2d3c9fd 100644 // Plugin Interface // ========================================================== diff --git a/Source/FreeImage/PluginTIFF.cpp b/Source/FreeImage/PluginTIFF.cpp -index a805319..3e318ba 100644 +index 84554958..13a224dd 100644 --- a/Source/FreeImage/PluginTIFF.cpp +++ b/Source/FreeImage/PluginTIFF.cpp @@ -37,9 +37,9 @@ @@ -497,7 +497,7 @@ index a805319..3e318ba 100644 #include "FreeImageIO.h" #include "PSDParser.h" diff --git a/Source/FreeImage/PluginWebP.cpp b/Source/FreeImage/PluginWebP.cpp -index 7c9f62f..c401447 100644 +index 7c9f62fd..c4014473 100644 --- a/Source/FreeImage/PluginWebP.cpp +++ b/Source/FreeImage/PluginWebP.cpp @@ -24,9 +24,9 @@ @@ -514,7 +514,7 @@ index 7c9f62f..c401447 100644 // ========================================================== // Plugin Interface diff --git a/Source/FreeImage/ZLibInterface.cpp b/Source/FreeImage/ZLibInterface.cpp -index 3ab6d32..0973475 100644 +index 3ab6d321..09734755 100644 --- a/Source/FreeImage/ZLibInterface.cpp +++ b/Source/FreeImage/ZLibInterface.cpp @@ -19,10 +19,9 @@ @@ -540,7 +540,7 @@ index 3ab6d32..0973475 100644 memcpy(target + 4 + dest_len, &crc, 4); memcpy(target + 8 + dest_len, &source_size, 4); diff --git a/Source/FreeImageToolkit/JPEGTransform.cpp b/Source/FreeImageToolkit/JPEGTransform.cpp -index 6f9ba8e..836bc90 100644 +index 6f9ba8e1..836bc901 100644 --- a/Source/FreeImageToolkit/JPEGTransform.cpp +++ b/Source/FreeImageToolkit/JPEGTransform.cpp @@ -26,10 +26,10 @@ extern "C" { @@ -559,26 +559,26 @@ index 6f9ba8e..836bc90 100644 #include "FreeImage.h" diff --git a/Source/Metadata/XTIFF.cpp b/Source/Metadata/XTIFF.cpp -index d5be902..b6ecd11 100644 +index 6919a8e8..ce3d7c6b 100644 --- a/Source/Metadata/XTIFF.cpp +++ b/Source/Metadata/XTIFF.cpp @@ -29,7 +29,7 @@ - #pragma warning (disable : 4786) // identifier was truncated to 'number' characters - #endif - --#include "../LibTIFF4/tiffiop.h" + #pragma warning (disable : 4786) // identifier was truncated to 'number' characters + #endif + +-#include "../LibTIFF4/tiffiop.h" +#include - - #include "FreeImage.h" - #include "Utilities.h" + + #include "FreeImage.h" + #include "Utilities.h" diff --git a/fipMakefile.srcs b/fipMakefile.srcs -index 15ec099..72ba4fb 100644 +index cc75e5e0..bedc9e3e 100644 --- a/fipMakefile.srcs +++ b/fipMakefile.srcs @@ -1,4 +1,4 @@ VER_MAJOR = 3 VER_MINOR = 19.0 --SRCS = ./Source/FreeImage/BitmapAccess.cpp ./Source/FreeImage/ColorLookup.cpp ./Source/FreeImage/ConversionRGBA16.cpp ./Source/FreeImage/ConversionRGBAF.cpp ./Source/FreeImage/FreeImage.cpp ./Source/FreeImage/FreeImageC.c ./Source/FreeImage/FreeImageIO.cpp ./Source/FreeImage/GetType.cpp ./Source/FreeImage/LFPQuantizer.cpp ./Source/FreeImage/MemoryIO.cpp ./Source/FreeImage/PixelAccess.cpp ./Source/FreeImage/J2KHelper.cpp ./Source/FreeImage/MNGHelper.cpp ./Source/FreeImage/Plugin.cpp ./Source/FreeImage/PluginBMP.cpp ./Source/FreeImage/PluginCUT.cpp ./Source/FreeImage/PluginDDS.cpp ./Source/FreeImage/PluginEXR.cpp ./Source/FreeImage/PluginG3.cpp ./Source/FreeImage/PluginGIF.cpp ./Source/FreeImage/PluginHDR.cpp ./Source/FreeImage/PluginICO.cpp ./Source/FreeImage/PluginIFF.cpp ./Source/FreeImage/PluginJ2K.cpp ./Source/FreeImage/PluginJNG.cpp ./Source/FreeImage/PluginJP2.cpp ./Source/FreeImage/PluginJPEG.cpp ./Source/FreeImage/PluginJXR.cpp ./Source/FreeImage/PluginKOALA.cpp ./Source/FreeImage/PluginMNG.cpp ./Source/FreeImage/PluginPCD.cpp ./Source/FreeImage/PluginPCX.cpp ./Source/FreeImage/PluginPFM.cpp ./Source/FreeImage/PluginPICT.cpp ./Source/FreeImage/PluginPNG.cpp ./Source/FreeImage/PluginPNM.cpp ./Source/FreeImage/PluginPSD.cpp ./Source/FreeImage/PluginRAS.cpp ./Source/FreeImage/PluginRAW.cpp ./Source/FreeImage/PluginSGI.cpp ./Source/FreeImage/PluginTARGA.cpp ./Source/FreeImage/PluginTIFF.cpp ./Source/FreeImage/PluginWBMP.cpp ./Source/FreeImage/PluginWebP.cpp ./Source/FreeImage/PluginXBM.cpp ./Source/FreeImage/PluginXPM.cpp ./Source/FreeImage/PSDParser.cpp ./Source/FreeImage/TIFFLogLuv.cpp ./Source/FreeImage/Conversion.cpp ./Source/FreeImage/Conversion16_555.cpp ./Source/FreeImage/Conversion16_565.cpp ./Source/FreeImage/Conversion24.cpp ./Source/FreeImage/Conversion32.cpp ./Source/FreeImage/Conversion4.cpp ./Source/FreeImage/Conversion8.cpp ./Source/FreeImage/ConversionFloat.cpp ./Source/FreeImage/ConversionRGB16.cpp ./Source/FreeImage/ConversionRGBF.cpp ./Source/FreeImage/ConversionType.cpp ./Source/FreeImage/ConversionUINT16.cpp ./Source/FreeImage/Halftoning.cpp ./Source/FreeImage/tmoColorConvert.cpp ./Source/FreeImage/tmoDrago03.cpp ./Source/FreeImage/tmoFattal02.cpp ./Source/FreeImage/tmoReinhard05.cpp ./Source/FreeImage/ToneMapping.cpp ./Source/FreeImage/NNQuantizer.cpp ./Source/FreeImage/WuQuantizer.cpp ./Source/FreeImage/CacheFile.cpp ./Source/FreeImage/MultiPage.cpp ./Source/FreeImage/ZLibInterface.cpp ./Source/Metadata/Exif.cpp ./Source/Metadata/FIRational.cpp ./Source/Metadata/FreeImageTag.cpp ./Source/Metadata/IPTC.cpp ./Source/Metadata/TagConversion.cpp ./Source/Metadata/TagLib.cpp ./Source/Metadata/XTIFF.cpp ./Source/FreeImageToolkit/Background.cpp ./Source/FreeImageToolkit/BSplineRotate.cpp ./Source/FreeImageToolkit/Channels.cpp ./Source/FreeImageToolkit/ClassicRotate.cpp ./Source/FreeImageToolkit/Colors.cpp ./Source/FreeImageToolkit/CopyPaste.cpp ./Source/FreeImageToolkit/Display.cpp ./Source/FreeImageToolkit/Flip.cpp ./Source/FreeImageToolkit/JPEGTransform.cpp ./Source/FreeImageToolkit/MultigridPoissonSolver.cpp ./Source/FreeImageToolkit/Rescale.cpp ./Source/FreeImageToolkit/Resize.cpp Source/LibJPEG/jaricom.c Source/LibJPEG/jcapimin.c Source/LibJPEG/jcapistd.c Source/LibJPEG/jcarith.c Source/LibJPEG/jccoefct.c Source/LibJPEG/jccolor.c Source/LibJPEG/jcdctmgr.c Source/LibJPEG/jchuff.c Source/LibJPEG/jcinit.c Source/LibJPEG/jcmainct.c Source/LibJPEG/jcmarker.c Source/LibJPEG/jcmaster.c Source/LibJPEG/jcomapi.c Source/LibJPEG/jcparam.c Source/LibJPEG/jcprepct.c Source/LibJPEG/jcsample.c Source/LibJPEG/jctrans.c Source/LibJPEG/jdapimin.c Source/LibJPEG/jdapistd.c Source/LibJPEG/jdarith.c Source/LibJPEG/jdatadst.c Source/LibJPEG/jdatasrc.c Source/LibJPEG/jdcoefct.c Source/LibJPEG/jdcolor.c Source/LibJPEG/jddctmgr.c Source/LibJPEG/jdhuff.c Source/LibJPEG/jdinput.c Source/LibJPEG/jdmainct.c Source/LibJPEG/jdmarker.c Source/LibJPEG/jdmaster.c Source/LibJPEG/jdmerge.c Source/LibJPEG/jdpostct.c Source/LibJPEG/jdsample.c Source/LibJPEG/jdtrans.c Source/LibJPEG/jerror.c Source/LibJPEG/jfdctflt.c Source/LibJPEG/jfdctfst.c Source/LibJPEG/jfdctint.c Source/LibJPEG/jidctflt.c Source/LibJPEG/jidctfst.c Source/LibJPEG/jidctint.c Source/LibJPEG/jmemmgr.c Source/LibJPEG/jmemnobs.c Source/LibJPEG/jquant1.c Source/LibJPEG/jquant2.c Source/LibJPEG/jutils.c Source/LibJPEG/transupp.c Source/LibPNG/png.c Source/LibPNG/pngerror.c Source/LibPNG/pngget.c Source/LibPNG/pngmem.c Source/LibPNG/pngpread.c Source/LibPNG/pngread.c Source/LibPNG/pngrio.c Source/LibPNG/pngrtran.c Source/LibPNG/pngrutil.c Source/LibPNG/pngset.c Source/LibPNG/pngtrans.c Source/LibPNG/pngwio.c Source/LibPNG/pngwrite.c Source/LibPNG/pngwtran.c Source/LibPNG/pngwutil.c Source/LibTIFF4/tif_aux.c Source/LibTIFF4/tif_close.c Source/LibTIFF4/tif_codec.c Source/LibTIFF4/tif_color.c Source/LibTIFF4/tif_compress.c Source/LibTIFF4/tif_dir.c Source/LibTIFF4/tif_dirinfo.c Source/LibTIFF4/tif_dirread.c Source/LibTIFF4/tif_dirwrite.c Source/LibTIFF4/tif_dumpmode.c Source/LibTIFF4/tif_error.c Source/LibTIFF4/tif_extension.c Source/LibTIFF4/tif_fax3.c Source/LibTIFF4/tif_fax3sm.c Source/LibTIFF4/tif_flush.c Source/LibTIFF4/tif_getimage.c Source/LibTIFF4/tif_jpeg.c Source/LibTIFF4/tif_luv.c Source/LibTIFF4/tif_lzma.c Source/LibTIFF4/tif_lzw.c Source/LibTIFF4/tif_next.c Source/LibTIFF4/tif_ojpeg.c Source/LibTIFF4/tif_open.c Source/LibTIFF4/tif_packbits.c Source/LibTIFF4/tif_pixarlog.c Source/LibTIFF4/tif_predict.c Source/LibTIFF4/tif_print.c Source/LibTIFF4/tif_read.c Source/LibTIFF4/tif_strip.c Source/LibTIFF4/tif_swab.c Source/LibTIFF4/tif_thunder.c Source/LibTIFF4/tif_tile.c Source/LibTIFF4/tif_version.c Source/LibTIFF4/tif_warning.c Source/LibTIFF4/tif_write.c Source/LibTIFF4/tif_zip.c Source/ZLib/adler32.c Source/ZLib/compress.c Source/ZLib/crc32.c Source/ZLib/deflate.c Source/ZLib/gzclose.c Source/ZLib/gzlib.c Source/ZLib/gzread.c Source/ZLib/gzwrite.c Source/ZLib/infback.c Source/ZLib/inffast.c Source/ZLib/inflate.c Source/ZLib/inftrees.c Source/ZLib/trees.c Source/ZLib/uncompr.c Source/ZLib/zutil.c Source/LibOpenJPEG/bio.c Source/LibOpenJPEG/cio.c Source/LibOpenJPEG/dwt.c Source/LibOpenJPEG/event.c Source/LibOpenJPEG/function_list.c Source/LibOpenJPEG/image.c Source/LibOpenJPEG/invert.c Source/LibOpenJPEG/j2k.c Source/LibOpenJPEG/jp2.c Source/LibOpenJPEG/mct.c Source/LibOpenJPEG/mqc.c Source/LibOpenJPEG/openjpeg.c Source/LibOpenJPEG/opj_clock.c Source/LibOpenJPEG/pi.c Source/LibOpenJPEG/raw.c Source/LibOpenJPEG/t1.c Source/LibOpenJPEG/t2.c Source/LibOpenJPEG/tcd.c Source/LibOpenJPEG/tgt.c Source/OpenEXR/IexMath/IexMathFpu.cpp Source/OpenEXR/IlmImf/b44ExpLogTable.cpp Source/OpenEXR/IlmImf/ImfAcesFile.cpp Source/OpenEXR/IlmImf/ImfAttribute.cpp Source/OpenEXR/IlmImf/ImfB44Compressor.cpp Source/OpenEXR/IlmImf/ImfBoxAttribute.cpp Source/OpenEXR/IlmImf/ImfChannelList.cpp Source/OpenEXR/IlmImf/ImfChannelListAttribute.cpp Source/OpenEXR/IlmImf/ImfChromaticities.cpp Source/OpenEXR/IlmImf/ImfChromaticitiesAttribute.cpp Source/OpenEXR/IlmImf/ImfCompositeDeepScanLine.cpp Source/OpenEXR/IlmImf/ImfCompressionAttribute.cpp Source/OpenEXR/IlmImf/ImfCompressor.cpp Source/OpenEXR/IlmImf/ImfConvert.cpp Source/OpenEXR/IlmImf/ImfCRgbaFile.cpp Source/OpenEXR/IlmImf/ImfDeepCompositing.cpp Source/OpenEXR/IlmImf/ImfDeepFrameBuffer.cpp Source/OpenEXR/IlmImf/ImfDeepImageStateAttribute.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineInputFile.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineInputPart.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineOutputFile.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineOutputPart.cpp Source/OpenEXR/IlmImf/ImfDeepTiledInputFile.cpp Source/OpenEXR/IlmImf/ImfDeepTiledInputPart.cpp Source/OpenEXR/IlmImf/ImfDeepTiledOutputFile.cpp Source/OpenEXR/IlmImf/ImfDeepTiledOutputPart.cpp Source/OpenEXR/IlmImf/ImfDoubleAttribute.cpp Source/OpenEXR/IlmImf/ImfDwaCompressor.cpp Source/OpenEXR/IlmImf/ImfEnvmap.cpp Source/OpenEXR/IlmImf/ImfEnvmapAttribute.cpp Source/OpenEXR/IlmImf/ImfFastHuf.cpp Source/OpenEXR/IlmImf/ImfFloatAttribute.cpp Source/OpenEXR/IlmImf/ImfFloatVectorAttribute.cpp Source/OpenEXR/IlmImf/ImfFrameBuffer.cpp Source/OpenEXR/IlmImf/ImfFramesPerSecond.cpp Source/OpenEXR/IlmImf/ImfGenericInputFile.cpp Source/OpenEXR/IlmImf/ImfGenericOutputFile.cpp Source/OpenEXR/IlmImf/ImfHeader.cpp Source/OpenEXR/IlmImf/ImfHuf.cpp Source/OpenEXR/IlmImf/ImfInputFile.cpp Source/OpenEXR/IlmImf/ImfInputPart.cpp Source/OpenEXR/IlmImf/ImfInputPartData.cpp Source/OpenEXR/IlmImf/ImfIntAttribute.cpp Source/OpenEXR/IlmImf/ImfIO.cpp Source/OpenEXR/IlmImf/ImfKeyCode.cpp Source/OpenEXR/IlmImf/ImfKeyCodeAttribute.cpp Source/OpenEXR/IlmImf/ImfLineOrderAttribute.cpp Source/OpenEXR/IlmImf/ImfLut.cpp Source/OpenEXR/IlmImf/ImfMatrixAttribute.cpp Source/OpenEXR/IlmImf/ImfMisc.cpp Source/OpenEXR/IlmImf/ImfMultiPartInputFile.cpp Source/OpenEXR/IlmImf/ImfMultiPartOutputFile.cpp Source/OpenEXR/IlmImf/ImfMultiView.cpp Source/OpenEXR/IlmImf/ImfOpaqueAttribute.cpp Source/OpenEXR/IlmImf/ImfOutputFile.cpp Source/OpenEXR/IlmImf/ImfOutputPart.cpp Source/OpenEXR/IlmImf/ImfOutputPartData.cpp Source/OpenEXR/IlmImf/ImfPartType.cpp Source/OpenEXR/IlmImf/ImfPizCompressor.cpp Source/OpenEXR/IlmImf/ImfPreviewImage.cpp Source/OpenEXR/IlmImf/ImfPreviewImageAttribute.cpp Source/OpenEXR/IlmImf/ImfPxr24Compressor.cpp Source/OpenEXR/IlmImf/ImfRational.cpp Source/OpenEXR/IlmImf/ImfRationalAttribute.cpp Source/OpenEXR/IlmImf/ImfRgbaFile.cpp Source/OpenEXR/IlmImf/ImfRgbaYca.cpp Source/OpenEXR/IlmImf/ImfRle.cpp Source/OpenEXR/IlmImf/ImfRleCompressor.cpp Source/OpenEXR/IlmImf/ImfScanLineInputFile.cpp Source/OpenEXR/IlmImf/ImfStandardAttributes.cpp Source/OpenEXR/IlmImf/ImfStdIO.cpp Source/OpenEXR/IlmImf/ImfStringAttribute.cpp Source/OpenEXR/IlmImf/ImfStringVectorAttribute.cpp Source/OpenEXR/IlmImf/ImfSystemSpecific.cpp Source/OpenEXR/IlmImf/ImfTestFile.cpp Source/OpenEXR/IlmImf/ImfThreading.cpp Source/OpenEXR/IlmImf/ImfTileDescriptionAttribute.cpp Source/OpenEXR/IlmImf/ImfTiledInputFile.cpp Source/OpenEXR/IlmImf/ImfTiledInputPart.cpp Source/OpenEXR/IlmImf/ImfTiledMisc.cpp Source/OpenEXR/IlmImf/ImfTiledOutputFile.cpp Source/OpenEXR/IlmImf/ImfTiledOutputPart.cpp Source/OpenEXR/IlmImf/ImfTiledRgbaFile.cpp Source/OpenEXR/IlmImf/ImfTileOffsets.cpp Source/OpenEXR/IlmImf/ImfTimeCode.cpp Source/OpenEXR/IlmImf/ImfTimeCodeAttribute.cpp Source/OpenEXR/IlmImf/ImfVecAttribute.cpp Source/OpenEXR/IlmImf/ImfVersion.cpp Source/OpenEXR/IlmImf/ImfWav.cpp Source/OpenEXR/IlmImf/ImfZip.cpp Source/OpenEXR/IlmImf/ImfZipCompressor.cpp Source/OpenEXR/Imath/ImathBox.cpp Source/OpenEXR/Imath/ImathColorAlgo.cpp Source/OpenEXR/Imath/ImathFun.cpp Source/OpenEXR/Imath/ImathMatrixAlgo.cpp Source/OpenEXR/Imath/ImathRandom.cpp Source/OpenEXR/Imath/ImathShear.cpp Source/OpenEXR/Imath/ImathVec.cpp Source/OpenEXR/Iex/IexBaseExc.cpp Source/OpenEXR/Iex/IexThrowErrnoExc.cpp Source/OpenEXR/Half/half.cpp Source/OpenEXR/IlmThread/IlmThread.cpp Source/OpenEXR/IlmThread/IlmThreadMutex.cpp Source/OpenEXR/IlmThread/IlmThreadPool.cpp Source/OpenEXR/IlmThread/IlmThreadSemaphore.cpp Source/OpenEXR/IexMath/IexMathFloatExc.cpp Source/LibRawLite/internal/dcraw_common.cpp Source/LibRawLite/internal/dcraw_fileio.cpp Source/LibRawLite/internal/demosaic_packs.cpp Source/LibRawLite/src/libraw_c_api.cpp Source/LibRawLite/src/libraw_cxx.cpp Source/LibRawLite/src/libraw_datastream.cpp Source/LibWebP/src/dec/alpha_dec.c Source/LibWebP/src/dec/buffer_dec.c Source/LibWebP/src/dec/frame_dec.c Source/LibWebP/src/dec/idec_dec.c Source/LibWebP/src/dec/io_dec.c Source/LibWebP/src/dec/quant_dec.c Source/LibWebP/src/dec/tree_dec.c Source/LibWebP/src/dec/vp8l_dec.c Source/LibWebP/src/dec/vp8_dec.c Source/LibWebP/src/dec/webp_dec.c Source/LibWebP/src/demux/anim_decode.c Source/LibWebP/src/demux/demux.c Source/LibWebP/src/dsp/alpha_processing.c Source/LibWebP/src/dsp/alpha_processing_mips_dsp_r2.c Source/LibWebP/src/dsp/alpha_processing_neon.c Source/LibWebP/src/dsp/alpha_processing_sse2.c Source/LibWebP/src/dsp/alpha_processing_sse41.c Source/LibWebP/src/dsp/cost.c Source/LibWebP/src/dsp/cost_mips32.c Source/LibWebP/src/dsp/cost_mips_dsp_r2.c Source/LibWebP/src/dsp/cost_neon.c Source/LibWebP/src/dsp/cost_sse2.c Source/LibWebP/src/dsp/cpu.c Source/LibWebP/src/dsp/dec.c Source/LibWebP/src/dsp/dec_clip_tables.c Source/LibWebP/src/dsp/dec_mips32.c Source/LibWebP/src/dsp/dec_mips_dsp_r2.c Source/LibWebP/src/dsp/dec_msa.c Source/LibWebP/src/dsp/dec_neon.c Source/LibWebP/src/dsp/dec_sse2.c Source/LibWebP/src/dsp/dec_sse41.c Source/LibWebP/src/dsp/enc.c Source/LibWebP/src/dsp/enc_avx2.c Source/LibWebP/src/dsp/enc_mips32.c Source/LibWebP/src/dsp/enc_mips_dsp_r2.c Source/LibWebP/src/dsp/enc_msa.c Source/LibWebP/src/dsp/enc_neon.c Source/LibWebP/src/dsp/enc_sse2.c Source/LibWebP/src/dsp/enc_sse41.c Source/LibWebP/src/dsp/filters.c Source/LibWebP/src/dsp/filters_mips_dsp_r2.c Source/LibWebP/src/dsp/filters_msa.c Source/LibWebP/src/dsp/filters_neon.c Source/LibWebP/src/dsp/filters_sse2.c Source/LibWebP/src/dsp/lossless.c Source/LibWebP/src/dsp/lossless_enc.c Source/LibWebP/src/dsp/lossless_enc_mips32.c Source/LibWebP/src/dsp/lossless_enc_mips_dsp_r2.c Source/LibWebP/src/dsp/lossless_enc_msa.c Source/LibWebP/src/dsp/lossless_enc_neon.c Source/LibWebP/src/dsp/lossless_enc_sse2.c Source/LibWebP/src/dsp/lossless_enc_sse41.c Source/LibWebP/src/dsp/lossless_mips_dsp_r2.c Source/LibWebP/src/dsp/lossless_msa.c Source/LibWebP/src/dsp/lossless_neon.c Source/LibWebP/src/dsp/lossless_sse2.c Source/LibWebP/src/dsp/rescaler.c Source/LibWebP/src/dsp/rescaler_mips32.c Source/LibWebP/src/dsp/rescaler_mips_dsp_r2.c Source/LibWebP/src/dsp/rescaler_msa.c Source/LibWebP/src/dsp/rescaler_neon.c Source/LibWebP/src/dsp/rescaler_sse2.c Source/LibWebP/src/dsp/ssim.c Source/LibWebP/src/dsp/ssim_sse2.c Source/LibWebP/src/dsp/upsampling.c Source/LibWebP/src/dsp/upsampling_mips_dsp_r2.c Source/LibWebP/src/dsp/upsampling_msa.c Source/LibWebP/src/dsp/upsampling_neon.c Source/LibWebP/src/dsp/upsampling_sse2.c Source/LibWebP/src/dsp/upsampling_sse41.c Source/LibWebP/src/dsp/yuv.c Source/LibWebP/src/dsp/yuv_mips32.c Source/LibWebP/src/dsp/yuv_mips_dsp_r2.c Source/LibWebP/src/dsp/yuv_neon.c Source/LibWebP/src/dsp/yuv_sse2.c Source/LibWebP/src/dsp/yuv_sse41.c Source/LibWebP/src/enc/alpha_enc.c Source/LibWebP/src/enc/analysis_enc.c Source/LibWebP/src/enc/backward_references_cost_enc.c Source/LibWebP/src/enc/backward_references_enc.c Source/LibWebP/src/enc/config_enc.c Source/LibWebP/src/enc/cost_enc.c Source/LibWebP/src/enc/filter_enc.c Source/LibWebP/src/enc/frame_enc.c Source/LibWebP/src/enc/histogram_enc.c Source/LibWebP/src/enc/iterator_enc.c Source/LibWebP/src/enc/near_lossless_enc.c Source/LibWebP/src/enc/picture_csp_enc.c Source/LibWebP/src/enc/picture_enc.c Source/LibWebP/src/enc/picture_psnr_enc.c Source/LibWebP/src/enc/picture_rescale_enc.c Source/LibWebP/src/enc/picture_tools_enc.c Source/LibWebP/src/enc/predictor_enc.c Source/LibWebP/src/enc/quant_enc.c Source/LibWebP/src/enc/syntax_enc.c Source/LibWebP/src/enc/token_enc.c Source/LibWebP/src/enc/tree_enc.c Source/LibWebP/src/enc/vp8l_enc.c Source/LibWebP/src/enc/webp_enc.c Source/LibWebP/src/mux/anim_encode.c Source/LibWebP/src/mux/muxedit.c Source/LibWebP/src/mux/muxinternal.c Source/LibWebP/src/mux/muxread.c Source/LibWebP/src/utils/bit_reader_utils.c Source/LibWebP/src/utils/bit_writer_utils.c Source/LibWebP/src/utils/color_cache_utils.c Source/LibWebP/src/utils/filters_utils.c Source/LibWebP/src/utils/huffman_encode_utils.c Source/LibWebP/src/utils/huffman_utils.c Source/LibWebP/src/utils/quant_levels_dec_utils.c Source/LibWebP/src/utils/quant_levels_utils.c Source/LibWebP/src/utils/random_utils.c Source/LibWebP/src/utils/rescaler_utils.c Source/LibWebP/src/utils/thread_utils.c Source/LibWebP/src/utils/utils.c Source/LibJXR/image/decode/decode.c Source/LibJXR/image/decode/JXRTranscode.c Source/LibJXR/image/decode/postprocess.c Source/LibJXR/image/decode/segdec.c Source/LibJXR/image/decode/strdec.c Source/LibJXR/image/decode/strdec_x86.c Source/LibJXR/image/decode/strInvTransform.c Source/LibJXR/image/decode/strPredQuantDec.c Source/LibJXR/image/encode/encode.c Source/LibJXR/image/encode/segenc.c Source/LibJXR/image/encode/strenc.c Source/LibJXR/image/encode/strenc_x86.c Source/LibJXR/image/encode/strFwdTransform.c Source/LibJXR/image/encode/strPredQuantEnc.c Source/LibJXR/image/sys/adapthuff.c Source/LibJXR/image/sys/image.c Source/LibJXR/image/sys/strcodec.c Source/LibJXR/image/sys/strPredQuant.c Source/LibJXR/image/sys/strTransform.c Source/LibJXR/jxrgluelib/JXRGlue.c Source/LibJXR/jxrgluelib/JXRGlueJxr.c Source/LibJXR/jxrgluelib/JXRGluePFC.c Source/LibJXR/jxrgluelib/JXRMeta.c Wrapper/FreeImagePlus/src/fipImage.cpp Wrapper/FreeImagePlus/src/fipMemoryIO.cpp Wrapper/FreeImagePlus/src/fipMetadataFind.cpp Wrapper/FreeImagePlus/src/fipMultiPage.cpp Wrapper/FreeImagePlus/src/fipTag.cpp Wrapper/FreeImagePlus/src/fipWinImage.cpp Wrapper/FreeImagePlus/src/FreeImagePlus.cpp +-SRCS = ./Source/FreeImage/BitmapAccess.cpp ./Source/FreeImage/ColorLookup.cpp ./Source/FreeImage/ConversionRGBA16.cpp ./Source/FreeImage/ConversionRGBAF.cpp ./Source/FreeImage/FreeImage.cpp ./Source/FreeImage/FreeImageC.c ./Source/FreeImage/FreeImageIO.cpp ./Source/FreeImage/GetType.cpp ./Source/FreeImage/LFPQuantizer.cpp ./Source/FreeImage/MemoryIO.cpp ./Source/FreeImage/PixelAccess.cpp ./Source/FreeImage/J2KHelper.cpp ./Source/FreeImage/MNGHelper.cpp ./Source/FreeImage/Plugin.cpp ./Source/FreeImage/PluginBMP.cpp ./Source/FreeImage/PluginCUT.cpp ./Source/FreeImage/PluginDDS.cpp ./Source/FreeImage/PluginEXR.cpp ./Source/FreeImage/PluginG3.cpp ./Source/FreeImage/PluginGIF.cpp ./Source/FreeImage/PluginHDR.cpp ./Source/FreeImage/PluginICO.cpp ./Source/FreeImage/PluginIFF.cpp ./Source/FreeImage/PluginJ2K.cpp ./Source/FreeImage/PluginJNG.cpp ./Source/FreeImage/PluginJP2.cpp ./Source/FreeImage/PluginJPEG.cpp ./Source/FreeImage/PluginJXR.cpp ./Source/FreeImage/PluginKOALA.cpp ./Source/FreeImage/PluginMNG.cpp ./Source/FreeImage/PluginPCD.cpp ./Source/FreeImage/PluginPCX.cpp ./Source/FreeImage/PluginPFM.cpp ./Source/FreeImage/PluginPICT.cpp ./Source/FreeImage/PluginPNG.cpp ./Source/FreeImage/PluginPNM.cpp ./Source/FreeImage/PluginPSD.cpp ./Source/FreeImage/PluginRAS.cpp ./Source/FreeImage/PluginRAW.cpp ./Source/FreeImage/PluginSGI.cpp ./Source/FreeImage/PluginTARGA.cpp ./Source/FreeImage/PluginTIFF.cpp ./Source/FreeImage/PluginWBMP.cpp ./Source/FreeImage/PluginWebP.cpp ./Source/FreeImage/PluginXBM.cpp ./Source/FreeImage/PluginXPM.cpp ./Source/FreeImage/PSDParser.cpp ./Source/FreeImage/TIFFLogLuv.cpp ./Source/FreeImage/Conversion.cpp ./Source/FreeImage/Conversion16_555.cpp ./Source/FreeImage/Conversion16_565.cpp ./Source/FreeImage/Conversion24.cpp ./Source/FreeImage/Conversion32.cpp ./Source/FreeImage/Conversion4.cpp ./Source/FreeImage/Conversion8.cpp ./Source/FreeImage/ConversionFloat.cpp ./Source/FreeImage/ConversionRGB16.cpp ./Source/FreeImage/ConversionRGBF.cpp ./Source/FreeImage/ConversionType.cpp ./Source/FreeImage/ConversionUINT16.cpp ./Source/FreeImage/Halftoning.cpp ./Source/FreeImage/tmoColorConvert.cpp ./Source/FreeImage/tmoDrago03.cpp ./Source/FreeImage/tmoFattal02.cpp ./Source/FreeImage/tmoReinhard05.cpp ./Source/FreeImage/ToneMapping.cpp ./Source/FreeImage/NNQuantizer.cpp ./Source/FreeImage/WuQuantizer.cpp ./Source/FreeImage/CacheFile.cpp ./Source/FreeImage/MultiPage.cpp ./Source/FreeImage/ZLibInterface.cpp ./Source/Metadata/Exif.cpp ./Source/Metadata/FIRational.cpp ./Source/Metadata/FreeImageTag.cpp ./Source/Metadata/IPTC.cpp ./Source/Metadata/TagConversion.cpp ./Source/Metadata/TagLib.cpp ./Source/Metadata/XTIFF.cpp ./Source/FreeImageToolkit/Background.cpp ./Source/FreeImageToolkit/BSplineRotate.cpp ./Source/FreeImageToolkit/Channels.cpp ./Source/FreeImageToolkit/ClassicRotate.cpp ./Source/FreeImageToolkit/Colors.cpp ./Source/FreeImageToolkit/CopyPaste.cpp ./Source/FreeImageToolkit/Display.cpp ./Source/FreeImageToolkit/Flip.cpp ./Source/FreeImageToolkit/JPEGTransform.cpp ./Source/FreeImageToolkit/MultigridPoissonSolver.cpp ./Source/FreeImageToolkit/Rescale.cpp ./Source/FreeImageToolkit/Resize.cpp Source/LibJPEG/jaricom.c Source/LibJPEG/jcapimin.c Source/LibJPEG/jcapistd.c Source/LibJPEG/jcarith.c Source/LibJPEG/jccoefct.c Source/LibJPEG/jccolor.c Source/LibJPEG/jcdctmgr.c Source/LibJPEG/jchuff.c Source/LibJPEG/jcinit.c Source/LibJPEG/jcmainct.c Source/LibJPEG/jcmarker.c Source/LibJPEG/jcmaster.c Source/LibJPEG/jcomapi.c Source/LibJPEG/jcparam.c Source/LibJPEG/jcprepct.c Source/LibJPEG/jcsample.c Source/LibJPEG/jctrans.c Source/LibJPEG/jdapimin.c Source/LibJPEG/jdapistd.c Source/LibJPEG/jdarith.c Source/LibJPEG/jdatadst.c Source/LibJPEG/jdatasrc.c Source/LibJPEG/jdcoefct.c Source/LibJPEG/jdcolor.c Source/LibJPEG/jddctmgr.c Source/LibJPEG/jdhuff.c Source/LibJPEG/jdinput.c Source/LibJPEG/jdmainct.c Source/LibJPEG/jdmarker.c Source/LibJPEG/jdmaster.c Source/LibJPEG/jdmerge.c Source/LibJPEG/jdpostct.c Source/LibJPEG/jdsample.c Source/LibJPEG/jdtrans.c Source/LibJPEG/jerror.c Source/LibJPEG/jfdctflt.c Source/LibJPEG/jfdctfst.c Source/LibJPEG/jfdctint.c Source/LibJPEG/jidctflt.c Source/LibJPEG/jidctfst.c Source/LibJPEG/jidctint.c Source/LibJPEG/jmemmgr.c Source/LibJPEG/jmemnobs.c Source/LibJPEG/jquant1.c Source/LibJPEG/jquant2.c Source/LibJPEG/jutils.c Source/LibJPEG/transupp.c Source/LibPNG/png.c Source/LibPNG/pngerror.c Source/LibPNG/pngget.c Source/LibPNG/pngmem.c Source/LibPNG/pngpread.c Source/LibPNG/pngread.c Source/LibPNG/pngrio.c Source/LibPNG/pngrtran.c Source/LibPNG/pngrutil.c Source/LibPNG/pngset.c Source/LibPNG/pngtrans.c Source/LibPNG/pngwio.c Source/LibPNG/pngwrite.c Source/LibPNG/pngwtran.c Source/LibPNG/pngwutil.c Source/LibTIFF4/tif_aux.c Source/LibTIFF4/tif_close.c Source/LibTIFF4/tif_codec.c Source/LibTIFF4/tif_color.c Source/LibTIFF4/tif_compress.c Source/LibTIFF4/tif_dir.c Source/LibTIFF4/tif_dirinfo.c Source/LibTIFF4/tif_dirread.c Source/LibTIFF4/tif_dirwrite.c Source/LibTIFF4/tif_dumpmode.c Source/LibTIFF4/tif_error.c Source/LibTIFF4/tif_extension.c Source/LibTIFF4/tif_fax3.c Source/LibTIFF4/tif_fax3sm.c Source/LibTIFF4/tif_flush.c Source/LibTIFF4/tif_getimage.c Source/LibTIFF4/tif_jpeg.c Source/LibTIFF4/tif_lerc.c Source/LibTIFF4/tif_luv.c Source/LibTIFF4/tif_lzw.c Source/LibTIFF4/tif_next.c Source/LibTIFF4/tif_ojpeg.c Source/LibTIFF4/tif_open.c Source/LibTIFF4/tif_packbits.c Source/LibTIFF4/tif_pixarlog.c Source/LibTIFF4/tif_predict.c Source/LibTIFF4/tif_print.c Source/LibTIFF4/tif_read.c Source/LibTIFF4/tif_strip.c Source/LibTIFF4/tif_swab.c Source/LibTIFF4/tif_thunder.c Source/LibTIFF4/tif_tile.c Source/LibTIFF4/tif_version.c Source/LibTIFF4/tif_warning.c Source/LibTIFF4/tif_webp.c Source/LibTIFF4/tif_write.c Source/LibTIFF4/tif_zip.c Source/ZLib/adler32.c Source/ZLib/compress.c Source/ZLib/crc32.c Source/ZLib/deflate.c Source/ZLib/gzclose.c Source/ZLib/gzlib.c Source/ZLib/gzread.c Source/ZLib/gzwrite.c Source/ZLib/infback.c Source/ZLib/inffast.c Source/ZLib/inflate.c Source/ZLib/inftrees.c Source/ZLib/trees.c Source/ZLib/uncompr.c Source/ZLib/zutil.c Source/LibOpenJPEG/bio.c Source/LibOpenJPEG/cio.c Source/LibOpenJPEG/dwt.c Source/LibOpenJPEG/event.c Source/LibOpenJPEG/function_list.c Source/LibOpenJPEG/image.c Source/LibOpenJPEG/invert.c Source/LibOpenJPEG/j2k.c Source/LibOpenJPEG/jp2.c Source/LibOpenJPEG/mct.c Source/LibOpenJPEG/mqc.c Source/LibOpenJPEG/openjpeg.c Source/LibOpenJPEG/opj_clock.c Source/LibOpenJPEG/pi.c Source/LibOpenJPEG/raw.c Source/LibOpenJPEG/t1.c Source/LibOpenJPEG/t2.c Source/LibOpenJPEG/tcd.c Source/LibOpenJPEG/tgt.c Source/OpenEXR/IexMath/IexMathFpu.cpp Source/OpenEXR/IlmImf/b44ExpLogTable.cpp Source/OpenEXR/IlmImf/ImfAcesFile.cpp Source/OpenEXR/IlmImf/ImfAttribute.cpp Source/OpenEXR/IlmImf/ImfB44Compressor.cpp Source/OpenEXR/IlmImf/ImfBoxAttribute.cpp Source/OpenEXR/IlmImf/ImfChannelList.cpp Source/OpenEXR/IlmImf/ImfChannelListAttribute.cpp Source/OpenEXR/IlmImf/ImfChromaticities.cpp Source/OpenEXR/IlmImf/ImfChromaticitiesAttribute.cpp Source/OpenEXR/IlmImf/ImfCompositeDeepScanLine.cpp Source/OpenEXR/IlmImf/ImfCompressionAttribute.cpp Source/OpenEXR/IlmImf/ImfCompressor.cpp Source/OpenEXR/IlmImf/ImfConvert.cpp Source/OpenEXR/IlmImf/ImfCRgbaFile.cpp Source/OpenEXR/IlmImf/ImfDeepCompositing.cpp Source/OpenEXR/IlmImf/ImfDeepFrameBuffer.cpp Source/OpenEXR/IlmImf/ImfDeepImageStateAttribute.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineInputFile.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineInputPart.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineOutputFile.cpp Source/OpenEXR/IlmImf/ImfDeepScanLineOutputPart.cpp Source/OpenEXR/IlmImf/ImfDeepTiledInputFile.cpp Source/OpenEXR/IlmImf/ImfDeepTiledInputPart.cpp Source/OpenEXR/IlmImf/ImfDeepTiledOutputFile.cpp Source/OpenEXR/IlmImf/ImfDeepTiledOutputPart.cpp Source/OpenEXR/IlmImf/ImfDoubleAttribute.cpp Source/OpenEXR/IlmImf/ImfDwaCompressor.cpp Source/OpenEXR/IlmImf/ImfEnvmap.cpp Source/OpenEXR/IlmImf/ImfEnvmapAttribute.cpp Source/OpenEXR/IlmImf/ImfFastHuf.cpp Source/OpenEXR/IlmImf/ImfFloatAttribute.cpp Source/OpenEXR/IlmImf/ImfFloatVectorAttribute.cpp Source/OpenEXR/IlmImf/ImfFrameBuffer.cpp Source/OpenEXR/IlmImf/ImfFramesPerSecond.cpp Source/OpenEXR/IlmImf/ImfGenericInputFile.cpp Source/OpenEXR/IlmImf/ImfGenericOutputFile.cpp Source/OpenEXR/IlmImf/ImfHeader.cpp Source/OpenEXR/IlmImf/ImfHuf.cpp Source/OpenEXR/IlmImf/ImfInputFile.cpp Source/OpenEXR/IlmImf/ImfInputPart.cpp Source/OpenEXR/IlmImf/ImfInputPartData.cpp Source/OpenEXR/IlmImf/ImfIntAttribute.cpp Source/OpenEXR/IlmImf/ImfIO.cpp Source/OpenEXR/IlmImf/ImfKeyCode.cpp Source/OpenEXR/IlmImf/ImfKeyCodeAttribute.cpp Source/OpenEXR/IlmImf/ImfLineOrderAttribute.cpp Source/OpenEXR/IlmImf/ImfLut.cpp Source/OpenEXR/IlmImf/ImfMatrixAttribute.cpp Source/OpenEXR/IlmImf/ImfMisc.cpp Source/OpenEXR/IlmImf/ImfMultiPartInputFile.cpp Source/OpenEXR/IlmImf/ImfMultiPartOutputFile.cpp Source/OpenEXR/IlmImf/ImfMultiView.cpp Source/OpenEXR/IlmImf/ImfOpaqueAttribute.cpp Source/OpenEXR/IlmImf/ImfOutputFile.cpp Source/OpenEXR/IlmImf/ImfOutputPart.cpp Source/OpenEXR/IlmImf/ImfOutputPartData.cpp Source/OpenEXR/IlmImf/ImfPartType.cpp Source/OpenEXR/IlmImf/ImfPizCompressor.cpp Source/OpenEXR/IlmImf/ImfPreviewImage.cpp Source/OpenEXR/IlmImf/ImfPreviewImageAttribute.cpp Source/OpenEXR/IlmImf/ImfPxr24Compressor.cpp Source/OpenEXR/IlmImf/ImfRational.cpp Source/OpenEXR/IlmImf/ImfRationalAttribute.cpp Source/OpenEXR/IlmImf/ImfRgbaFile.cpp Source/OpenEXR/IlmImf/ImfRgbaYca.cpp Source/OpenEXR/IlmImf/ImfRle.cpp Source/OpenEXR/IlmImf/ImfRleCompressor.cpp Source/OpenEXR/IlmImf/ImfScanLineInputFile.cpp Source/OpenEXR/IlmImf/ImfStandardAttributes.cpp Source/OpenEXR/IlmImf/ImfStdIO.cpp Source/OpenEXR/IlmImf/ImfStringAttribute.cpp Source/OpenEXR/IlmImf/ImfStringVectorAttribute.cpp Source/OpenEXR/IlmImf/ImfSystemSpecific.cpp Source/OpenEXR/IlmImf/ImfTestFile.cpp Source/OpenEXR/IlmImf/ImfThreading.cpp Source/OpenEXR/IlmImf/ImfTileDescriptionAttribute.cpp Source/OpenEXR/IlmImf/ImfTiledInputFile.cpp Source/OpenEXR/IlmImf/ImfTiledInputPart.cpp Source/OpenEXR/IlmImf/ImfTiledMisc.cpp Source/OpenEXR/IlmImf/ImfTiledOutputFile.cpp Source/OpenEXR/IlmImf/ImfTiledOutputPart.cpp Source/OpenEXR/IlmImf/ImfTiledRgbaFile.cpp Source/OpenEXR/IlmImf/ImfTileOffsets.cpp Source/OpenEXR/IlmImf/ImfTimeCode.cpp Source/OpenEXR/IlmImf/ImfTimeCodeAttribute.cpp Source/OpenEXR/IlmImf/ImfVecAttribute.cpp Source/OpenEXR/IlmImf/ImfVersion.cpp Source/OpenEXR/IlmImf/ImfWav.cpp Source/OpenEXR/IlmImf/ImfZip.cpp Source/OpenEXR/IlmImf/ImfZipCompressor.cpp Source/OpenEXR/Imath/ImathBox.cpp Source/OpenEXR/Imath/ImathColorAlgo.cpp Source/OpenEXR/Imath/ImathFun.cpp Source/OpenEXR/Imath/ImathMatrixAlgo.cpp Source/OpenEXR/Imath/ImathRandom.cpp Source/OpenEXR/Imath/ImathShear.cpp Source/OpenEXR/Imath/ImathVec.cpp Source/OpenEXR/Iex/IexBaseExc.cpp Source/OpenEXR/Iex/IexThrowErrnoExc.cpp Source/OpenEXR/Half/half.cpp Source/OpenEXR/IlmThread/IlmThread.cpp Source/OpenEXR/IlmThread/IlmThreadMutex.cpp Source/OpenEXR/IlmThread/IlmThreadPool.cpp Source/OpenEXR/IlmThread/IlmThreadSemaphore.cpp Source/OpenEXR/IexMath/IexMathFloatExc.cpp Source/LibRawLite/src/decoders/canon_600.cpp Source/LibRawLite/src/decoders/crx.cpp Source/LibRawLite/src/decoders/decoders_dcraw.cpp Source/LibRawLite/src/decoders/decoders_libraw.cpp Source/LibRawLite/src/decoders/decoders_libraw_dcrdefs.cpp Source/LibRawLite/src/decoders/dng.cpp Source/LibRawLite/src/decoders/fp_dng.cpp Source/LibRawLite/src/decoders/fuji_compressed.cpp Source/LibRawLite/src/decoders/generic.cpp Source/LibRawLite/src/decoders/kodak_decoders.cpp Source/LibRawLite/src/decoders/load_mfbacks.cpp Source/LibRawLite/src/decoders/smal.cpp Source/LibRawLite/src/decoders/unpack.cpp Source/LibRawLite/src/decoders/unpack_thumb.cpp Source/LibRawLite/src/demosaic/aahd_demosaic.cpp Source/LibRawLite/src/demosaic/ahd_demosaic.cpp Source/LibRawLite/src/demosaic/dcb_demosaic.cpp Source/LibRawLite/src/demosaic/dht_demosaic.cpp Source/LibRawLite/src/demosaic/misc_demosaic.cpp Source/LibRawLite/src/demosaic/xtrans_demosaic.cpp Source/LibRawLite/src/integration/dngsdk_glue.cpp Source/LibRawLite/src/integration/rawspeed_glue.cpp Source/LibRawLite/src/libraw_datastream.cpp Source/LibRawLite/src/metadata/adobepano.cpp Source/LibRawLite/src/metadata/canon.cpp Source/LibRawLite/src/metadata/ciff.cpp Source/LibRawLite/src/metadata/cr3_parser.cpp Source/LibRawLite/src/metadata/epson.cpp Source/LibRawLite/src/metadata/exif_gps.cpp Source/LibRawLite/src/metadata/fuji.cpp Source/LibRawLite/src/metadata/hasselblad_model.cpp Source/LibRawLite/src/metadata/identify.cpp Source/LibRawLite/src/metadata/identify_tools.cpp Source/LibRawLite/src/metadata/kodak.cpp Source/LibRawLite/src/metadata/leica.cpp Source/LibRawLite/src/metadata/makernotes.cpp Source/LibRawLite/src/metadata/mediumformat.cpp Source/LibRawLite/src/metadata/minolta.cpp Source/LibRawLite/src/metadata/misc_parsers.cpp Source/LibRawLite/src/metadata/nikon.cpp Source/LibRawLite/src/metadata/normalize_model.cpp Source/LibRawLite/src/metadata/olympus.cpp Source/LibRawLite/src/metadata/p1.cpp Source/LibRawLite/src/metadata/pentax.cpp Source/LibRawLite/src/metadata/samsung.cpp Source/LibRawLite/src/metadata/sony.cpp Source/LibRawLite/src/metadata/tiff.cpp Source/LibRawLite/src/postprocessing/aspect_ratio.cpp Source/LibRawLite/src/postprocessing/dcraw_process.cpp Source/LibRawLite/src/postprocessing/mem_image.cpp Source/LibRawLite/src/postprocessing/postprocessing_aux.cpp Source/LibRawLite/src/postprocessing/postprocessing_utils.cpp Source/LibRawLite/src/postprocessing/postprocessing_utils_dcrdefs.cpp Source/LibRawLite/src/preprocessing/ext_preprocess.cpp Source/LibRawLite/src/preprocessing/raw2image.cpp Source/LibRawLite/src/preprocessing/subtract_black.cpp Source/LibRawLite/src/tables/cameralist.cpp Source/LibRawLite/src/tables/colorconst.cpp Source/LibRawLite/src/tables/colordata.cpp Source/LibRawLite/src/tables/wblists.cpp Source/LibRawLite/src/utils/curves.cpp Source/LibRawLite/src/utils/decoder_info.cpp Source/LibRawLite/src/utils/init_close_utils.cpp Source/LibRawLite/src/utils/open.cpp Source/LibRawLite/src/utils/phaseone_processing.cpp Source/LibRawLite/src/utils/read_utils.cpp Source/LibRawLite/src/utils/thumb_utils.cpp Source/LibRawLite/src/utils/utils_dcraw.cpp Source/LibRawLite/src/utils/utils_libraw.cpp Source/LibRawLite/src/write/file_write.cpp Source/LibRawLite/src/x3f/x3f_parse_process.cpp Source/LibRawLite/src/x3f/x3f_utils_patched.cpp Source/LibWebP/src/dec/alpha_dec.c Source/LibWebP/src/dec/buffer_dec.c Source/LibWebP/src/dec/frame_dec.c Source/LibWebP/src/dec/idec_dec.c Source/LibWebP/src/dec/io_dec.c Source/LibWebP/src/dec/quant_dec.c Source/LibWebP/src/dec/tree_dec.c Source/LibWebP/src/dec/vp8l_dec.c Source/LibWebP/src/dec/vp8_dec.c Source/LibWebP/src/dec/webp_dec.c Source/LibWebP/src/demux/anim_decode.c Source/LibWebP/src/demux/demux.c Source/LibWebP/src/dsp/alpha_processing.c Source/LibWebP/src/dsp/alpha_processing_mips_dsp_r2.c Source/LibWebP/src/dsp/alpha_processing_neon.c Source/LibWebP/src/dsp/alpha_processing_sse2.c Source/LibWebP/src/dsp/alpha_processing_sse41.c Source/LibWebP/src/dsp/cost.c Source/LibWebP/src/dsp/cost_mips32.c Source/LibWebP/src/dsp/cost_mips_dsp_r2.c Source/LibWebP/src/dsp/cost_neon.c Source/LibWebP/src/dsp/cost_sse2.c Source/LibWebP/src/dsp/cpu.c Source/LibWebP/src/dsp/dec.c Source/LibWebP/src/dsp/dec_clip_tables.c Source/LibWebP/src/dsp/dec_mips32.c Source/LibWebP/src/dsp/dec_mips_dsp_r2.c Source/LibWebP/src/dsp/dec_msa.c Source/LibWebP/src/dsp/dec_neon.c Source/LibWebP/src/dsp/dec_sse2.c Source/LibWebP/src/dsp/dec_sse41.c Source/LibWebP/src/dsp/enc.c Source/LibWebP/src/dsp/enc_avx2.c Source/LibWebP/src/dsp/enc_mips32.c Source/LibWebP/src/dsp/enc_mips_dsp_r2.c Source/LibWebP/src/dsp/enc_msa.c Source/LibWebP/src/dsp/enc_neon.c Source/LibWebP/src/dsp/enc_sse2.c Source/LibWebP/src/dsp/enc_sse41.c Source/LibWebP/src/dsp/filters.c Source/LibWebP/src/dsp/filters_mips_dsp_r2.c Source/LibWebP/src/dsp/filters_msa.c Source/LibWebP/src/dsp/filters_neon.c Source/LibWebP/src/dsp/filters_sse2.c Source/LibWebP/src/dsp/lossless.c Source/LibWebP/src/dsp/lossless_enc.c Source/LibWebP/src/dsp/lossless_enc_mips32.c Source/LibWebP/src/dsp/lossless_enc_mips_dsp_r2.c Source/LibWebP/src/dsp/lossless_enc_msa.c Source/LibWebP/src/dsp/lossless_enc_neon.c Source/LibWebP/src/dsp/lossless_enc_sse2.c Source/LibWebP/src/dsp/lossless_enc_sse41.c Source/LibWebP/src/dsp/lossless_mips_dsp_r2.c Source/LibWebP/src/dsp/lossless_msa.c Source/LibWebP/src/dsp/lossless_neon.c Source/LibWebP/src/dsp/lossless_sse2.c Source/LibWebP/src/dsp/lossless_sse41.c Source/LibWebP/src/dsp/rescaler.c Source/LibWebP/src/dsp/rescaler_mips32.c Source/LibWebP/src/dsp/rescaler_mips_dsp_r2.c Source/LibWebP/src/dsp/rescaler_msa.c Source/LibWebP/src/dsp/rescaler_neon.c Source/LibWebP/src/dsp/rescaler_sse2.c Source/LibWebP/src/dsp/ssim.c Source/LibWebP/src/dsp/ssim_sse2.c Source/LibWebP/src/dsp/upsampling.c Source/LibWebP/src/dsp/upsampling_mips_dsp_r2.c Source/LibWebP/src/dsp/upsampling_msa.c Source/LibWebP/src/dsp/upsampling_neon.c Source/LibWebP/src/dsp/upsampling_sse2.c Source/LibWebP/src/dsp/upsampling_sse41.c Source/LibWebP/src/dsp/yuv.c Source/LibWebP/src/dsp/yuv_mips32.c Source/LibWebP/src/dsp/yuv_mips_dsp_r2.c Source/LibWebP/src/dsp/yuv_neon.c Source/LibWebP/src/dsp/yuv_sse2.c Source/LibWebP/src/dsp/yuv_sse41.c Source/LibWebP/src/enc/alpha_enc.c Source/LibWebP/src/enc/analysis_enc.c Source/LibWebP/src/enc/backward_references_cost_enc.c Source/LibWebP/src/enc/backward_references_enc.c Source/LibWebP/src/enc/config_enc.c Source/LibWebP/src/enc/cost_enc.c Source/LibWebP/src/enc/filter_enc.c Source/LibWebP/src/enc/frame_enc.c Source/LibWebP/src/enc/histogram_enc.c Source/LibWebP/src/enc/iterator_enc.c Source/LibWebP/src/enc/near_lossless_enc.c Source/LibWebP/src/enc/picture_csp_enc.c Source/LibWebP/src/enc/picture_enc.c Source/LibWebP/src/enc/picture_psnr_enc.c Source/LibWebP/src/enc/picture_rescale_enc.c Source/LibWebP/src/enc/picture_tools_enc.c Source/LibWebP/src/enc/predictor_enc.c Source/LibWebP/src/enc/quant_enc.c Source/LibWebP/src/enc/syntax_enc.c Source/LibWebP/src/enc/token_enc.c Source/LibWebP/src/enc/tree_enc.c Source/LibWebP/src/enc/vp8l_enc.c Source/LibWebP/src/enc/webp_enc.c Source/LibWebP/src/mux/anim_encode.c Source/LibWebP/src/mux/muxedit.c Source/LibWebP/src/mux/muxinternal.c Source/LibWebP/src/mux/muxread.c Source/LibWebP/src/utils/bit_reader_utils.c Source/LibWebP/src/utils/bit_writer_utils.c Source/LibWebP/src/utils/color_cache_utils.c Source/LibWebP/src/utils/filters_utils.c Source/LibWebP/src/utils/huffman_encode_utils.c Source/LibWebP/src/utils/huffman_utils.c Source/LibWebP/src/utils/quant_levels_dec_utils.c Source/LibWebP/src/utils/quant_levels_utils.c Source/LibWebP/src/utils/random_utils.c Source/LibWebP/src/utils/rescaler_utils.c Source/LibWebP/src/utils/thread_utils.c Source/LibWebP/src/utils/utils.c Source/LibJXR/image/decode/decode.c Source/LibJXR/image/decode/JXRTranscode.c Source/LibJXR/image/decode/postprocess.c Source/LibJXR/image/decode/segdec.c Source/LibJXR/image/decode/strdec.c Source/LibJXR/image/decode/strdec_x86.c Source/LibJXR/image/decode/strInvTransform.c Source/LibJXR/image/decode/strPredQuantDec.c Source/LibJXR/image/encode/encode.c Source/LibJXR/image/encode/segenc.c Source/LibJXR/image/encode/strenc.c Source/LibJXR/image/encode/strenc_x86.c Source/LibJXR/image/encode/strFwdTransform.c Source/LibJXR/image/encode/strPredQuantEnc.c Source/LibJXR/image/sys/adapthuff.c Source/LibJXR/image/sys/image.c Source/LibJXR/image/sys/strcodec.c Source/LibJXR/image/sys/strPredQuant.c Source/LibJXR/image/sys/strTransform.c Source/LibJXR/jxrgluelib/JXRGlue.c Source/LibJXR/jxrgluelib/JXRGlueJxr.c Source/LibJXR/jxrgluelib/JXRGluePFC.c Source/LibJXR/jxrgluelib/JXRMeta.c Wrapper/FreeImagePlus/src/fipImage.cpp Wrapper/FreeImagePlus/src/fipMemoryIO.cpp Wrapper/FreeImagePlus/src/fipMetadataFind.cpp Wrapper/FreeImagePlus/src/fipMultiPage.cpp Wrapper/FreeImagePlus/src/fipTag.cpp Wrapper/FreeImagePlus/src/fipWinImage.cpp Wrapper/FreeImagePlus/src/FreeImagePlus.cpp -INCLUDE = -I. -ISource -ISource/Metadata -ISource/FreeImageToolkit -ISource/LibJPEG -ISource/LibPNG -ISource/LibTIFF4 -ISource/ZLib -ISource/LibOpenJPEG -ISource/OpenEXR -ISource/OpenEXR/Half -ISource/OpenEXR/Iex -ISource/OpenEXR/IlmImf -ISource/OpenEXR/IlmThread -ISource/OpenEXR/Imath -ISource/OpenEXR/IexMath -ISource/LibRawLite -ISource/LibRawLite/dcraw -ISource/LibRawLite/internal -ISource/LibRawLite/libraw -ISource/LibRawLite/src -ISource/LibWebP -ISource/LibJXR -ISource/LibJXR/common/include -ISource/LibJXR/image/sys -ISource/LibJXR/jxrgluelib -IWrapper/FreeImagePlus +SRCS = ./Source/FreeImage/BitmapAccess.cpp ./Source/FreeImage/ColorLookup.cpp ./Source/FreeImage/ConversionRGBA16.cpp ./Source/FreeImage/ConversionRGBAF.cpp ./Source/FreeImage/FreeImage.cpp ./Source/FreeImage/FreeImageC.c ./Source/FreeImage/FreeImageIO.cpp ./Source/FreeImage/GetType.cpp ./Source/FreeImage/LFPQuantizer.cpp ./Source/FreeImage/MemoryIO.cpp ./Source/FreeImage/PixelAccess.cpp ./Source/FreeImage/J2KHelper.cpp ./Source/FreeImage/MNGHelper.cpp ./Source/FreeImage/Plugin.cpp ./Source/FreeImage/PluginBMP.cpp ./Source/FreeImage/PluginCUT.cpp ./Source/FreeImage/PluginDDS.cpp ./Source/FreeImage/PluginEXR.cpp ./Source/FreeImage/PluginG3.cpp ./Source/FreeImage/PluginGIF.cpp ./Source/FreeImage/PluginHDR.cpp ./Source/FreeImage/PluginICO.cpp ./Source/FreeImage/PluginIFF.cpp ./Source/FreeImage/PluginJ2K.cpp ./Source/FreeImage/PluginJNG.cpp ./Source/FreeImage/PluginJP2.cpp ./Source/FreeImage/PluginJPEG.cpp ./Source/FreeImage/PluginJXR.cpp ./Source/FreeImage/PluginKOALA.cpp ./Source/FreeImage/PluginMNG.cpp ./Source/FreeImage/PluginPCD.cpp ./Source/FreeImage/PluginPCX.cpp ./Source/FreeImage/PluginPFM.cpp ./Source/FreeImage/PluginPICT.cpp ./Source/FreeImage/PluginPNG.cpp ./Source/FreeImage/PluginPNM.cpp ./Source/FreeImage/PluginPSD.cpp ./Source/FreeImage/PluginRAS.cpp ./Source/FreeImage/PluginRAW.cpp ./Source/FreeImage/PluginSGI.cpp ./Source/FreeImage/PluginTARGA.cpp ./Source/FreeImage/PluginTIFF.cpp ./Source/FreeImage/PluginWBMP.cpp ./Source/FreeImage/PluginWebP.cpp ./Source/FreeImage/PluginXBM.cpp ./Source/FreeImage/PluginXPM.cpp ./Source/FreeImage/PSDParser.cpp ./Source/FreeImage/TIFFLogLuv.cpp ./Source/FreeImage/Conversion.cpp ./Source/FreeImage/Conversion16_555.cpp ./Source/FreeImage/Conversion16_565.cpp ./Source/FreeImage/Conversion24.cpp ./Source/FreeImage/Conversion32.cpp ./Source/FreeImage/Conversion4.cpp ./Source/FreeImage/Conversion8.cpp ./Source/FreeImage/ConversionFloat.cpp ./Source/FreeImage/ConversionRGB16.cpp ./Source/FreeImage/ConversionRGBF.cpp ./Source/FreeImage/ConversionType.cpp ./Source/FreeImage/ConversionUINT16.cpp ./Source/FreeImage/Halftoning.cpp ./Source/FreeImage/tmoColorConvert.cpp ./Source/FreeImage/tmoDrago03.cpp ./Source/FreeImage/tmoFattal02.cpp ./Source/FreeImage/tmoReinhard05.cpp ./Source/FreeImage/ToneMapping.cpp ./Source/FreeImage/NNQuantizer.cpp ./Source/FreeImage/WuQuantizer.cpp ./Source/FreeImage/CacheFile.cpp ./Source/FreeImage/MultiPage.cpp ./Source/FreeImage/ZLibInterface.cpp ./Source/Metadata/Exif.cpp ./Source/Metadata/FIRational.cpp ./Source/Metadata/FreeImageTag.cpp ./Source/Metadata/IPTC.cpp ./Source/Metadata/TagConversion.cpp ./Source/Metadata/TagLib.cpp ./Source/Metadata/XTIFF.cpp ./Source/FreeImageToolkit/Background.cpp ./Source/FreeImageToolkit/BSplineRotate.cpp ./Source/FreeImageToolkit/Channels.cpp ./Source/FreeImageToolkit/ClassicRotate.cpp ./Source/FreeImageToolkit/Colors.cpp ./Source/FreeImageToolkit/CopyPaste.cpp ./Source/FreeImageToolkit/Display.cpp ./Source/FreeImageToolkit/Flip.cpp ./Source/FreeImageToolkit/JPEGTransform.cpp ./Source/FreeImageToolkit/MultigridPoissonSolver.cpp ./Source/FreeImageToolkit/Rescale.cpp ./Source/FreeImageToolkit/Resize.cpp Wrapper/FreeImagePlus/src/fipImage.cpp Wrapper/FreeImagePlus/src/fipMemoryIO.cpp Wrapper/FreeImagePlus/src/fipMetadataFind.cpp Wrapper/FreeImagePlus/src/fipMultiPage.cpp Wrapper/FreeImagePlus/src/fipTag.cpp Wrapper/FreeImagePlus/src/fipWinImage.cpp Wrapper/FreeImagePlus/src/FreeImagePlus.cpp +INCLUDE = -I. -ISource -ISource/Metadata -ISource/FreeImageToolkit -IWrapper/FreeImagePlus From 686e1352f725d3bd7808cd66f2d8923db17ac9c0 Mon Sep 17 00:00:00 2001 From: Las Safin Date: Wed, 22 Dec 2021 14:23:18 +0000 Subject: [PATCH 003/161] libraw_0_20: init at 0.20.2 --- pkgs/development/libraries/libraw/0_20.nix | 12 ++++++++++++ pkgs/top-level/all-packages.nix | 2 ++ 2 files changed, 14 insertions(+) create mode 100644 pkgs/development/libraries/libraw/0_20.nix diff --git a/pkgs/development/libraries/libraw/0_20.nix b/pkgs/development/libraries/libraw/0_20.nix new file mode 100644 index 000000000000..9fa85e3ace27 --- /dev/null +++ b/pkgs/development/libraries/libraw/0_20.nix @@ -0,0 +1,12 @@ +{ libraw, fetchFromGitHub }: + +libraw.overrideAttrs (_: rec { + version = "0.20.2"; + + src = fetchFromGitHub { + owner = "LibRaw"; + repo = "LibRaw"; + rev = version; + sha256 = "16nm4r2l5501c9zvz25pzajq5id592jhn068scjxhr8np2cblybc"; + }; +}) diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index 4bd1380e027d..2a9d08393f48 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -16435,6 +16435,7 @@ with pkgs; gegl = callPackage ../development/libraries/gegl { inherit (darwin.apple_sdk.frameworks) OpenCL; + libraw = libraw_0_20; }; gensio = callPackage ../development/libraries/gensio {}; @@ -22381,6 +22382,7 @@ with pkgs; }; libraw = callPackage ../development/libraries/libraw { }; + libraw_0_20 = callPackage ../development/libraries/libraw/0_20.nix { }; libraw1394 = callPackage ../development/libraries/libraw1394 { }; From 0d5791236431766fa70ceca0e1df17ac8087ed13 Mon Sep 17 00:00:00 2001 From: Enno Richter Date: Sat, 15 Jan 2022 23:17:26 +0100 Subject: [PATCH 004/161] wvkbd: init at 0.7 --- .../accessibility/wvkbd/default.nix | 36 +++++++++++++++++++ pkgs/top-level/all-packages.nix | 2 ++ 2 files changed, 38 insertions(+) create mode 100644 pkgs/applications/accessibility/wvkbd/default.nix diff --git a/pkgs/applications/accessibility/wvkbd/default.nix b/pkgs/applications/accessibility/wvkbd/default.nix new file mode 100644 index 000000000000..a593b0d2ea2b --- /dev/null +++ b/pkgs/applications/accessibility/wvkbd/default.nix @@ -0,0 +1,36 @@ +{ stdenv +, lib +, fetchFromGitHub +, wayland-scanner +, wayland +, pango +, glib +, harfbuzz +, cairo +, pkg-config +, libxkbcommon +}: + +stdenv.mkDerivation rec { + pname = "wvkbd"; + version = "0.7"; + + src = fetchFromGitHub { + owner = "jjsullivan5196"; + repo = pname; + rev = "v${version}"; + sha256 = "sha256-5UV2PMrLXtF3AxjfPxxwFRkgVef+Ap8nG1v795o0bWE="; + }; + + nativeBuildInputs = [ pkg-config ]; + buildInputs = [ wayland-scanner wayland pango glib harfbuzz cairo libxkbcommon ]; + installFlags = [ "PREFIX=$(out)" ]; + + meta = with lib; { + homepage = "https://github.com/jjsullivan5196/wvkbd"; + description = "On-screen keyboard for wlroots"; + maintainers = [ maintainers.elohmeier ]; + platforms = platforms.linux; + license = licenses.gpl3Plus; + }; +} diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index f6cb53114a47..b4785cbd52cf 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -11091,6 +11091,8 @@ with pkgs; wv2 = callPackage ../tools/misc/wv2 { }; + wvkbd = callPackage ../applications/accessibility/wvkbd { }; + wyrd = callPackage ../tools/misc/wyrd { ocamlPackages = ocaml-ng.ocamlPackages_4_05; }; From d3aa7c14039471d52b2d36266008502d78c0e179 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 18 Mar 2022 21:15:34 +0000 Subject: [PATCH 005/161] python310Packages.flask-talisman: 0.8.1 -> 1.0.0 --- pkgs/development/python-modules/flask-talisman/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/flask-talisman/default.nix b/pkgs/development/python-modules/flask-talisman/default.nix index b57d58824705..82f3e09b4794 100644 --- a/pkgs/development/python-modules/flask-talisman/default.nix +++ b/pkgs/development/python-modules/flask-talisman/default.nix @@ -8,11 +8,11 @@ buildPythonPackage rec { pname = "flask-talisman"; - version = "0.8.1"; + version = "1.0.0"; src = fetchPypi { inherit pname version; - sha256 = "11gjgqkpj2yqydb0pfhjyx56iy4l9szgz33vg5d7bw8vqp02wl2x"; + sha256 = "sha256-IF0958Xs+tZnyEEj9fvlgLH2jNmhsFjXNTzANI4Vsb8="; }; buildInputs = [ From 67303b06c0a20f3dfa11a390ffd35e4762a7c179 Mon Sep 17 00:00:00 2001 From: Guillaume Girol Date: Thu, 31 Mar 2022 12:00:00 +0000 Subject: [PATCH 006/161] python3.pkgs.ihatemoney: fix build with flask-talisman 1 upstream bumped the bound without modification https://github.com/spiral-project/ihatemoney/pull/1006 --- pkgs/development/python-modules/ihatemoney/default.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkgs/development/python-modules/ihatemoney/default.nix b/pkgs/development/python-modules/ihatemoney/default.nix index d351f1ca7d44..ff30196630a9 100644 --- a/pkgs/development/python-modules/ihatemoney/default.nix +++ b/pkgs/development/python-modules/ihatemoney/default.nix @@ -99,7 +99,8 @@ buildPythonPackage rec { --replace "cachetools>=4.1,<5" "cachetools>=4.1" \ --replace "Flask-WTF>=0.14.3,<1" "Flask-WTF>=0.14.3,<2" \ --replace "SQLAlchemy>=1.3.0,<1.4" "SQLAlchemy>=1.3.0,<1.5" \ - --replace "WTForms>=2.3.1,<2.4" "WTForms" + --replace "WTForms>=2.3.1,<2.4" "WTForms" \ + --replace "Flask-Talisman>=0.8,<1" "Flask-Talisman>=0.8,<2" # https://github.com/spiral-project/ihatemoney/pull/1006 ''; checkInputs = [ From 3326297c8e93c895f7bac7a84729216515383a84 Mon Sep 17 00:00:00 2001 From: Sergei Trofimovich Date: Tue, 5 Apr 2022 22:48:31 +0100 Subject: [PATCH 007/161] ethtool: 5.16 -> 5.17 While at it added trivial updater. changelog: https://git.kernel.org/pub/scm/network/ethtool/ethtool.git/log/ --- pkgs/tools/misc/ethtool/default.nix | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/pkgs/tools/misc/ethtool/default.nix b/pkgs/tools/misc/ethtool/default.nix index d46815e8bf2a..f80de50ea551 100644 --- a/pkgs/tools/misc/ethtool/default.nix +++ b/pkgs/tools/misc/ethtool/default.nix @@ -3,15 +3,16 @@ , fetchurl , libmnl , pkg-config +, writeScript }: stdenv.mkDerivation rec { pname = "ethtool"; - version = "5.16"; + version = "5.17"; src = fetchurl { url = "mirror://kernel/software/network/${pname}/${pname}-${version}.tar.xz"; - sha256 = "sha256-qi/vGTbdShF1XfoL25Pw7FvqRSCNJ8l1S8Or4apCwcs="; + sha256 = "sha256-ZKuRS5xrRQRyRdkfQLh2CycomSqeWvInF8ZEI46IkTM="; }; nativeBuildInputs = [ @@ -22,6 +23,22 @@ stdenv.mkDerivation rec { libmnl ]; + passthru = { + updateScript = writeScript "update-ethtool" '' + #!/usr/bin/env nix-shell + #!nix-shell -i bash -p curl pcre common-updater-scripts + + set -eu -o pipefail + + # Expect the text in format of '...' + # The page always lists versions newest to oldest. Pick the first one. + new_version="$(curl -s https://mirrors.edge.kernel.org/pub/software/network/ethtool/ | + pcregrep -o1 '' | + head -n1)" + update-source-version ${pname} "$new_version" + ''; + }; + meta = with lib; { description = "Utility for controlling network drivers and hardware"; homepage = "https://www.kernel.org/pub/software/network/ethtool/"; From 89f15314013af00739b3eb394d064989e43f8e4b Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 8 Apr 2022 00:10:22 +0000 Subject: [PATCH 008/161] icingaweb2: 2.10.0 -> 2.10.1 --- pkgs/servers/icingaweb2/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/servers/icingaweb2/default.nix b/pkgs/servers/icingaweb2/default.nix index 75282ad15c4f..651c73988682 100644 --- a/pkgs/servers/icingaweb2/default.nix +++ b/pkgs/servers/icingaweb2/default.nix @@ -2,13 +2,13 @@ stdenvNoCC.mkDerivation rec { pname = "icingaweb2"; - version = "2.10.0"; + version = "2.10.1"; src = fetchFromGitHub { owner = "Icinga"; repo = "icingaweb2"; rev = "v${version}"; - sha256 = "sha256:0fhhq6mzpwj3dh6w583n4sngshf15dm8zgbli5cacy7jkzmsz0wn"; + sha256 = "sha256-X4RaAJjhUnSALJyFYiwagN3cHyW+GyB5MPkW7l+Zv10="; }; nativeBuildInputs = [ makeWrapper ]; From eb286631b4cec28424fb4056735a79cdb36eec59 Mon Sep 17 00:00:00 2001 From: sternenseemann Date: Sat, 9 Apr 2022 00:26:05 +0200 Subject: [PATCH 009/161] haskell.compiler: check targetPlatform for Cabal Paths module patch The issue we are working around is concerned with the lack of dead code elimination in the aarch64-darwin LLVM codegen backend. Thus the relevant condition is the target platform, not the host platform as checked previously. --- pkgs/development/compilers/ghc/8.10.7.nix | 2 +- pkgs/development/compilers/ghc/9.0.2.nix | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/compilers/ghc/8.10.7.nix b/pkgs/development/compilers/ghc/8.10.7.nix index fd14501097c2..cdf4faf3ffc7 100644 --- a/pkgs/development/compilers/ghc/8.10.7.nix +++ b/pkgs/development/compilers/ghc/8.10.7.nix @@ -212,7 +212,7 @@ stdenv.mkDerivation (rec { url = "https://gitlab.haskell.org/ghc/ghc/-/commit/97d0b0a367e4c6a52a17c3299439ac7de129da24.patch"; sha256 = "0r4zjj0bv1x1m2dgxp3adsf2xkr94fjnyj1igsivd9ilbs5ja0b5"; }) - ] ++ lib.optionals (stdenv.isDarwin && stdenv.isAarch64) [ + ] ++ lib.optionals (stdenv.targetPlatform.isDarwin && stdenv.targetPlatform.isAarch64) [ # Prevent the paths module from emitting symbols that we don't use # when building with separate outputs. diff --git a/pkgs/development/compilers/ghc/9.0.2.nix b/pkgs/development/compilers/ghc/9.0.2.nix index 1f94dd3bbde2..a4cefe7294d8 100644 --- a/pkgs/development/compilers/ghc/9.0.2.nix +++ b/pkgs/development/compilers/ghc/9.0.2.nix @@ -192,7 +192,7 @@ stdenv.mkDerivation (rec { url = "https://gitlab.haskell.org/ghc/ghc/-/commit/c6132c782d974a7701e7f6447bdcd2bf6db4299a.patch?merge_request_iid=7423"; sha256 = "sha256-b4feGZIaKDj/UKjWTNY6/jH4s2iate0wAgMxG3rAbZI="; }) - ] ++ lib.optionals (stdenv.isDarwin && stdenv.isAarch64) [ + ] ++ lib.optionals (stdenv.targetPlatform.isDarwin && stdenv.targetPlatform.isAarch64) [ # Prevent the paths module from emitting symbols that we don't use # when building with separate outputs. From 7a13b86dbd88871124a9af6654b5eaef00a19f82 Mon Sep 17 00:00:00 2001 From: Michiel Boekhoff Date: Sat, 9 Apr 2022 20:22:24 +0100 Subject: [PATCH 010/161] androidStudioPackages.stable: 2021.1.1.21 -> 2021.1.1.23 --- pkgs/applications/editors/android-studio/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/editors/android-studio/default.nix b/pkgs/applications/editors/android-studio/default.nix index e7824cd93f1c..d36c7b9d9460 100644 --- a/pkgs/applications/editors/android-studio/default.nix +++ b/pkgs/applications/editors/android-studio/default.nix @@ -9,8 +9,8 @@ let inherit buildFHSUserEnv; }; stableVersion = { - version = "2021.1.1.21"; # "Android Studio Bumblebee (2021.1.1 Patch 1)" - sha256Hash = "PeMJIILfaunTlpR4EV76qQlTlZDcWoKes61qe9W9oqQ="; + version = "2021.1.1.23"; # "Android Studio Bumblebee (2021.1.1 Patch 3)" + sha256Hash = "1kxb19qf7bs5lyfgr8vamakp1nf2wlxlwwni1kihza67ib6hcxdk"; }; betaVersion = { version = "2021.2.1.8"; # "Android Studio Chipmunk (2021.2.1) Beta 1" From 61069589c24fc7f7bd09f8599b1699a1c8830ee6 Mon Sep 17 00:00:00 2001 From: Michiel Boekhoff Date: Sat, 9 Apr 2022 20:23:28 +0100 Subject: [PATCH 011/161] androidStudioPackages.beta: 2021.2.1.8 -> 2021.2.1.11 --- pkgs/applications/editors/android-studio/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/editors/android-studio/default.nix b/pkgs/applications/editors/android-studio/default.nix index d36c7b9d9460..8cd44ed8b685 100644 --- a/pkgs/applications/editors/android-studio/default.nix +++ b/pkgs/applications/editors/android-studio/default.nix @@ -13,8 +13,8 @@ let sha256Hash = "1kxb19qf7bs5lyfgr8vamakp1nf2wlxlwwni1kihza67ib6hcxdk"; }; betaVersion = { - version = "2021.2.1.8"; # "Android Studio Chipmunk (2021.2.1) Beta 1" - sha256Hash = "bPfs4kw7czG9CbEgrzn0bQXdT03jyqPVqtaIuVBFSmc="; + version = "2021.2.1.11"; # "Android Studio Chipmunk (2021.2.1) Beta 4" + sha256Hash = "0in8x6v957y9hsnz5ak845pdpvgvnvlm0s6r9y8f27zkm947vbjd"; }; latestVersion = { # canary & dev version = "2021.3.1.1"; # "Android Studio Dolphin (2021.3.1) Canary 1" From fb90972987eb0ddb1914e4d7e8ac77186cbda444 Mon Sep 17 00:00:00 2001 From: Michiel Boekhoff Date: Sat, 9 Apr 2022 20:24:18 +0100 Subject: [PATCH 012/161] androidStudioPackages.{canary,dev}: 2021.3.1.1 -> 2021.3.1.7 --- pkgs/applications/editors/android-studio/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/editors/android-studio/default.nix b/pkgs/applications/editors/android-studio/default.nix index 8cd44ed8b685..39bbff256525 100644 --- a/pkgs/applications/editors/android-studio/default.nix +++ b/pkgs/applications/editors/android-studio/default.nix @@ -17,8 +17,8 @@ let sha256Hash = "0in8x6v957y9hsnz5ak845pdpvgvnvlm0s6r9y8f27zkm947vbjd"; }; latestVersion = { # canary & dev - version = "2021.3.1.1"; # "Android Studio Dolphin (2021.3.1) Canary 1" - sha256Hash = "W3pNQBM7WdDScQo5b8q5Va5NTgl73uZu0ks/zDMb4aA="; + version = "2021.3.1.7"; # "Android Studio Dolphin (2021.3.1) Canary 7" + sha256Hash = "02jwy3q2ccs7l3snm8w40znzk54v2h1sljdr3d0yh7sy0qyn32k1"; }; in { # Attributes are named by their corresponding release channels From bd2a384c40c4176c279502c46c2ac411f8c31cbd Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 9 Apr 2022 22:11:46 +0000 Subject: [PATCH 013/161] thanos: 0.25.1 -> 0.25.2 --- pkgs/servers/monitoring/thanos/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/servers/monitoring/thanos/default.nix b/pkgs/servers/monitoring/thanos/default.nix index 8692b5b558ab..7b1bd2960ab9 100644 --- a/pkgs/servers/monitoring/thanos/default.nix +++ b/pkgs/servers/monitoring/thanos/default.nix @@ -1,13 +1,13 @@ { lib, buildGoModule, fetchFromGitHub }: buildGoModule rec { pname = "thanos"; - version = "0.25.1"; + version = "0.25.2"; src = fetchFromGitHub { rev = "v${version}"; owner = "thanos-io"; repo = "thanos"; - sha256 = "sha256-yisJqr2JMpXSo9O3q7WpNe1r6w7E3XyeLpVlbdest3s="; + sha256 = "sha256-CAeI+5aC8kSQaKOk/5WCQiQMOX82hogAQGP2Em3DJAw="; }; vendorSha256 = "sha256-tHtfS4PnO9n3ckUdaG6dQAIE2D2PG6km4Tqofaab/eg="; From 6951ba02f4c7cef6cd38825ae8fc5f51be05205d Mon Sep 17 00:00:00 2001 From: Robert Scott Date: Sat, 16 Apr 2022 17:18:05 +0100 Subject: [PATCH 014/161] nginxStable: add patch for CVE-2021-3618 --- pkgs/servers/http/nginx/generic.nix | 4 +++- pkgs/servers/http/nginx/stable.nix | 9 ++++++++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/pkgs/servers/http/nginx/generic.nix b/pkgs/servers/http/nginx/generic.nix index fa711d1dff4f..9d0737a8f32c 100644 --- a/pkgs/servers/http/nginx/generic.nix +++ b/pkgs/servers/http/nginx/generic.nix @@ -18,6 +18,7 @@ , sha256 ? null # when not specifying src , configureFlags ? [] , buildInputs ? [] +, extraPatches ? [] , fixPatch ? p: p , preConfigure ? "" , postInstall ? null @@ -134,7 +135,8 @@ stdenv.mkDerivation { url = "https://raw.githubusercontent.com/openwrt/packages/c057dfb09c7027287c7862afab965a4cd95293a3/net/nginx/patches/103-sys_nerr.patch"; sha256 = "0s497x6mkz947aw29wdy073k8dyjq8j99lax1a1mzpikzr4rxlmd"; }) - ] ++ mapModules "patches"); + ] ++ mapModules "patches") + ++ extraPatches; hardeningEnable = optional (!stdenv.isDarwin) "pie"; diff --git a/pkgs/servers/http/nginx/stable.nix b/pkgs/servers/http/nginx/stable.nix index 5db7c65daa40..c219b01fbb31 100644 --- a/pkgs/servers/http/nginx/stable.nix +++ b/pkgs/servers/http/nginx/stable.nix @@ -1,6 +1,13 @@ -{ callPackage, ... } @ args: +{ callPackage, fetchpatch, ... } @ args: callPackage ./generic.nix args { version = "1.20.2"; sha256 = "0hjsyjzd35qyw49w210f67g678kvzinw4kg1acb0l6c2fxspd24m"; + extraPatches = [ + (fetchpatch { + name = "CVE-2021-3618.patch"; + url = "https://github.com/nginx/nginx/commit/173f16f736c10eae46cd15dd861b04b82d91a37a.patch"; + sha256 = "0cnxmbkp6ip61w7y1ihhnvziiwzz3p3wi2vpi5c7yaj5m964k5db"; + }) + ]; } From c9464f4716ea18614ba8952e73bf75fbd3c15818 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sun, 17 Apr 2022 06:36:31 +0000 Subject: [PATCH 015/161] pyinfra: 2.0 -> 2.0.1 --- pkgs/development/python-modules/pyinfra/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/pyinfra/default.nix b/pkgs/development/python-modules/pyinfra/default.nix index dc25d162445f..130486a55f17 100644 --- a/pkgs/development/python-modules/pyinfra/default.nix +++ b/pkgs/development/python-modules/pyinfra/default.nix @@ -17,14 +17,14 @@ buildPythonPackage rec { pname = "pyinfra"; - version = "2.0"; + version = "2.0.1"; format = "setuptools"; disabled = pythonOlder "3.7"; src = fetchPypi { inherit pname version; - sha256 = "sha256-rzVirU697wGehCIc/WwE6Rg9AaYYELXfoe10GMRFHgw="; + sha256 = "sha256-157NtpA85FS27Ln1Xsvq5/qumSsr0WSDOhG0UwMUnRE="; }; propagatedBuildInputs = [ From 9d0e77b38e0f8f82b9d26bf9044ff843184155aa Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Tue, 19 Apr 2022 02:07:02 +0000 Subject: [PATCH 016/161] python310Packages.vispy: 0.9.6 -> 0.10.0 --- pkgs/development/python-modules/vispy/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/vispy/default.nix b/pkgs/development/python-modules/vispy/default.nix index 47a966332548..c17f37287384 100644 --- a/pkgs/development/python-modules/vispy/default.nix +++ b/pkgs/development/python-modules/vispy/default.nix @@ -16,11 +16,11 @@ buildPythonPackage rec { pname = "vispy"; - version = "0.9.6"; + version = "0.10.0"; src = fetchPypi { inherit pname version; - sha256 = "sha256-ISzVtQgkSZu84+LXQaray3nAt3GsVm+THGE1WXYCi8s="; + sha256 = "sha256-t2rW8+rK2/xJRM+4IR6ttuqEF6WQmT7OWqfKrAgs/8I="; }; patches = [ From e76dad1a77ed5bf5cc5253d0ed9a842a1c135e7f Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Tue, 19 Apr 2022 03:15:47 +0000 Subject: [PATCH 017/161] git-review: 2.2.0 -> 2.3.0 --- pkgs/applications/version-management/git-review/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/version-management/git-review/default.nix b/pkgs/applications/version-management/git-review/default.nix index 98c9c5312e61..37053b7f7a0c 100644 --- a/pkgs/applications/version-management/git-review/default.nix +++ b/pkgs/applications/version-management/git-review/default.nix @@ -9,7 +9,7 @@ buildPythonApplication rec { pname = "git-review"; - version = "2.2.0"; + version = "2.3.0"; # Manually set version because prb wants to get it from the git # upstream repository (and we are installing from tarball instead) @@ -20,7 +20,7 @@ buildPythonApplication rec { owner = "opendev"; repo = pname; rev = version; - sha256 = "sha256-2+X5fPxB2FIp1fwqEUc+W0gH2NjhF/V+La+maE+XEpo="; + sha256 = "sha256-ENrv2jx59iNA/hALFg6+gOz8zxJaoiCwYcAh1xeEOR0="; }; outputs = [ "out" "man" ]; From 579983525cea451e5e5ab25934fd38bedfd58c15 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Tue, 19 Apr 2022 04:01:30 +0000 Subject: [PATCH 018/161] gnumeric: 1.12.51 -> 1.12.52 --- pkgs/applications/office/gnumeric/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/office/gnumeric/default.nix b/pkgs/applications/office/gnumeric/default.nix index 13b0c84cfaa5..8f10a79a4cb9 100644 --- a/pkgs/applications/office/gnumeric/default.nix +++ b/pkgs/applications/office/gnumeric/default.nix @@ -7,11 +7,11 @@ let inherit (python3Packages) python pygobject3; in stdenv.mkDerivation rec { pname = "gnumeric"; - version = "1.12.51"; + version = "1.12.52"; src = fetchurl { url = "mirror://gnome/sources/${pname}/${lib.versions.majorMinor version}/${pname}-${version}.tar.xz"; - sha256 = "oA5sbk7N2tq9mwrhgBPXsFk3/cuPmq1ac7lZI8eusd0="; + sha256 = "c89zBJoiodgoUGJ1ssk3jsN8X/N7aLsfL0lPDWQAgjs="; }; configureFlags = [ "--disable-component" ]; From 0edfd89d6e24064d2b0fd0853b8e19c42906dc1b Mon Sep 17 00:00:00 2001 From: Eric Corson Date: Tue, 19 Apr 2022 10:33:29 +0100 Subject: [PATCH 019/161] nixos/tools: add copySystemConfiguration to configuration file template --- nixos/modules/installer/tools/tools.nix | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/nixos/modules/installer/tools/tools.nix b/nixos/modules/installer/tools/tools.nix index 2e088b977710..bf5ec0f9690b 100644 --- a/nixos/modules/installer/tools/tools.nix +++ b/nixos/modules/installer/tools/tools.nix @@ -206,6 +206,11 @@ in # Or disable the firewall altogether. # networking.firewall.enable = false; + # Copy the NixOS configuration file and link it from the resulting system + # (/run/current-system/configuration.nix). This is useful in case you + # accidentally delete configuration.nix. + # system.copySystemConfiguration = true; + # This value determines the NixOS release from which the default # settings for stateful data, like file locations and database versions # on your system were taken. It‘s perfectly fine and recommended to leave From 874381847b1063b811c4631244a77356335d04ce Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vladim=C3=ADr=20=C4=8Cun=C3=A1t?= Date: Tue, 19 Apr 2022 13:17:59 +0200 Subject: [PATCH 020/161] gnumeric: mark aarch64-darwin as broken Apparently it's never worked: https://hydra.nixos.org/job/nixpkgs/nixpkgs-unstable-aarch64-darwin/gnumeric.aarch64-darwin/all --- pkgs/applications/office/gnumeric/default.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/pkgs/applications/office/gnumeric/default.nix b/pkgs/applications/office/gnumeric/default.nix index 8f10a79a4cb9..c9b4ef139126 100644 --- a/pkgs/applications/office/gnumeric/default.nix +++ b/pkgs/applications/office/gnumeric/default.nix @@ -38,6 +38,7 @@ in stdenv.mkDerivation rec { license = lib.licenses.gpl2Plus; homepage = "http://projects.gnome.org/gnumeric/"; platforms = platforms.unix; + broken = with stdenv; isDarwin && isAarch64; maintainers = [ maintainers.vcunat ]; }; } From 2d1b9c0632dbbbf2d150bb0de027cbbd2ea9400d Mon Sep 17 00:00:00 2001 From: rewine <1758075541@qq.com> Date: Tue, 19 Apr 2022 21:20:40 +0800 Subject: [PATCH 021/161] tdlib: 1.8.1 -> 1.8.3 --- pkgs/development/libraries/tdlib/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/libraries/tdlib/default.nix b/pkgs/development/libraries/tdlib/default.nix index 4dedc13db2a8..0196e62fb977 100644 --- a/pkgs/development/libraries/tdlib/default.nix +++ b/pkgs/development/libraries/tdlib/default.nix @@ -2,14 +2,14 @@ stdenv.mkDerivation rec { pname = "tdlib"; - version = "1.8.1"; + version = "1.8.3"; src = fetchFromGitHub { owner = "tdlib"; repo = "td"; # https://github.com/tdlib/td/issues/1790 - rev = "92c2a9c4e521df720abeaa9872e1c2b797d5c93f"; - sha256 = "ZoKsgdkS78mptfbxkkV4pgcgJEaWwKZWK2cvmxgJN4E="; + rev = "054a823c1a812ee3e038f702c6d8ba3e6974be9c"; + sha256 = "sha256-YlvIGR3Axej0nfcGBQ5lwwYVWsLgqFrYgOxoNubYMPM="; }; buildInputs = [ gperf openssl readline zlib ]; From 6e06f630400ad9e4c364862efab1ba1c9561c954 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Wed, 20 Apr 2022 09:04:15 +0200 Subject: [PATCH 022/161] python3Packages.readme_renderer: 34.0 -> 35.0 --- pkgs/development/python-modules/readme_renderer/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/readme_renderer/default.nix b/pkgs/development/python-modules/readme_renderer/default.nix index ab062615cc49..1c1ea480cd40 100644 --- a/pkgs/development/python-modules/readme_renderer/default.nix +++ b/pkgs/development/python-modules/readme_renderer/default.nix @@ -12,7 +12,7 @@ buildPythonPackage rec { pname = "readme-renderer"; - version = "34.0"; + version = "35.0"; format = "setuptools"; disabled = pythonOlder "3.6"; @@ -20,7 +20,7 @@ buildPythonPackage rec { src = fetchPypi { pname = "readme_renderer"; inherit version; - sha256 = "sha256-37TRfyFwbRRfdHPgthyiRbpY6BDPmyIJpII5Z3+C5bA="; + sha256 = "sha256-pyeZms/CIvwh2CoS7UjJV8SYl4XlhlgHxlpIfSFndJc="; }; propagatedBuildInputs = [ From aa305e509322a6cefa4660402689b45bbc292f30 Mon Sep 17 00:00:00 2001 From: Florian Brandes Date: Wed, 20 Apr 2022 10:04:50 +0200 Subject: [PATCH 023/161] python3Packages.sentry-sdk: disable tests disable tests in the wake of Flask and Werkzeug update Signed-off-by: Florian Brandes --- .../python-modules/sentry-sdk/default.nix | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/pkgs/development/python-modules/sentry-sdk/default.nix b/pkgs/development/python-modules/sentry-sdk/default.nix index 3272c4b612f6..8a450023d1f3 100644 --- a/pkgs/development/python-modules/sentry-sdk/default.nix +++ b/pkgs/development/python-modules/sentry-sdk/default.nix @@ -112,6 +112,8 @@ buildPythonPackage rec { "test_auto_session_tracking_with_aggregates" # Network requests to public web "test_crumb_capture" + # TypeError: cannot unpack non-iterable TestResponse object + "test_rpc_error_page" ]; disabledTestPaths = [ @@ -128,7 +130,17 @@ buildPythonPackage rec { "tests/integrations/rq/" # broken since pytest 7.0.1; AssertionError: previous item was not torn down properly "tests/utils/test_contextvars.py" - ]; + # broken since Flask and Werkzeug update to 2.1.0 (different error messages) + "tests/integrations/flask/test_flask.py" + "tests/integrations/bottle/test_bottle.py" + "tests/integrations/django/test_basic.py" + "tests/integrations/pyramid/test_pyramid.py" + ] + # test crashes on aarch64 + ++ (if stdenv.buildPlatform != "x86_64-linux" then [ + "tests/test_transport.py" + "tests/integrations/threading/test_threading.py" + ] else [ ]); pythonImportsCheck = [ "sentry_sdk" From a61732d482a3df79be24dc8152d4b41624c2697c Mon Sep 17 00:00:00 2001 From: 06kellyjac Date: Wed, 20 Apr 2022 11:16:36 +0100 Subject: [PATCH 024/161] conftest: 0.30.0 -> 0.31.0 --- pkgs/development/tools/conftest/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/tools/conftest/default.nix b/pkgs/development/tools/conftest/default.nix index f200cd742d90..fa4f9ea43843 100644 --- a/pkgs/development/tools/conftest/default.nix +++ b/pkgs/development/tools/conftest/default.nix @@ -2,15 +2,15 @@ buildGoModule rec { pname = "conftest"; - version = "0.30.0"; + version = "0.31.0"; src = fetchFromGitHub { owner = "open-policy-agent"; repo = "conftest"; rev = "v${version}"; - sha256 = "sha256-8/eZz5ejw5bahCYwz825HI+oZ6D1odeTpMIJh0TcGMY="; + sha256 = "sha256-p3EzJLq+LH8G8P7x6+47XWn8ckFeW2O7xhQGoRQDOOQ="; }; - vendorSha256 = "sha256-wvOtBK3lRK7XwgeClywowgrZLohltSTGdoB+j3NRmkE="; + vendorSha256 = "sha256-WFR0DtOz4dteRWWaqjTIiyTpBTnH6qKivH9t+gRWsvg="; ldflags = [ "-s" From 582a42ece7e2a86d5b076addd5f18a6379fd326a Mon Sep 17 00:00:00 2001 From: Thiago Kenji Okada Date: Tue, 19 Apr 2022 16:32:26 +0100 Subject: [PATCH 025/161] lvm2: fix static compilation This should avoid a infinity recursion issue during static compilation, by making util-linux not a dependency of the `fix-blkdeactivate.patch`. --- pkgs/os-specific/linux/lvm2/common.nix | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pkgs/os-specific/linux/lvm2/common.nix b/pkgs/os-specific/linux/lvm2/common.nix index 641945a5685a..53cab49f1a63 100644 --- a/pkgs/os-specific/linux/lvm2/common.nix +++ b/pkgs/os-specific/linux/lvm2/common.nix @@ -4,18 +4,19 @@ , fetchpatch , fetchurl , pkg-config -, util-linux , coreutils , libuuid , libaio , substituteAll , enableCmdlib ? false , enableDmeventd ? false -, udevSupport ? !stdenv.hostPlatform.isStatic, udev ? null +, udevSupport ? !stdenv.hostPlatform.isStatic, udev , onlyLib ? stdenv.hostPlatform.isStatic -, enableVDO ? false, vdo ? null -, enableMdadm ? false, mdadm ? null -, enableMultipath ? false, multipath-tools ? null + # Otherwise we have a infinity recursion during static compilation +, enableUtilLinux ? !stdenv.hostPlatform.isStatic, util-linux +, enableVDO ? false, vdo +, enableMdadm ? false, mdadm +, enableMultipath ? false, multipath-tools , nixosTests }: @@ -102,7 +103,7 @@ stdenv.mkDerivation rec { in { src = ./fix-blkdeactivate.patch; inherit coreutils; - util_linux = util-linux; + util_linux = optionalTool enableUtilLinux util-linux; mdadm = optionalTool enableMdadm mdadm; multipath_tools = optionalTool enableMultipath multipath-tools; vdo = optionalTool enableVDO vdo; From 67ae929deb3b3c5102848485dbb3f48e5ff27278 Mon Sep 17 00:00:00 2001 From: Thiago Kenji Okada Date: Tue, 19 Apr 2022 21:46:17 +0100 Subject: [PATCH 026/161] mesa: disable withValgrind if valgrind-light is marked as broken `valgrind` derivation is currently marked as broken in either: - `stdenv.isDarwin` - `stdenv.hostPlatform.isStatic` Instead of adding those two checks in the `mesa` derivation, we can just check the current `valgrind-light.meta.broken` attribute. --- pkgs/development/libraries/mesa/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/development/libraries/mesa/default.nix b/pkgs/development/libraries/mesa/default.nix index f5dbb30d65fd..28e8c61bf736 100644 --- a/pkgs/development/libraries/mesa/default.nix +++ b/pkgs/development/libraries/mesa/default.nix @@ -9,7 +9,7 @@ , vulkanDrivers ? ["auto"] , eglPlatforms ? [ "x11" ] ++ lib.optionals stdenv.isLinux [ "wayland" ] , OpenGL, Xplugin -, withValgrind ? !stdenv.isDarwin && lib.meta.availableOn stdenv.hostPlatform valgrind-light, valgrind-light +, withValgrind ? lib.meta.availableOn stdenv.hostPlatform valgrind-light && !valgrind-light.meta.broken, valgrind-light , enableGalliumNine ? stdenv.isLinux , enableOSMesa ? stdenv.isLinux , enableOpenCL ? stdenv.isLinux && stdenv.isx86_64 From 06e48b1552f20408be3a76edc3b00a70a556c6bb Mon Sep 17 00:00:00 2001 From: Sergei Trofimovich Date: Thu, 21 Apr 2022 08:12:06 +0100 Subject: [PATCH 027/161] zkfuse: add a -std=c++14 workaround for gcc-11 Otherwise the build fails as: zkadapter.h:616:33: error: ISO C++17 does not allow dynamic exception specifications --- pkgs/tools/filesystems/zkfuse/default.nix | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkgs/tools/filesystems/zkfuse/default.nix b/pkgs/tools/filesystems/zkfuse/default.nix index 456444f3bf59..14938113a1e0 100644 --- a/pkgs/tools/filesystems/zkfuse/default.nix +++ b/pkgs/tools/filesystems/zkfuse/default.nix @@ -25,6 +25,10 @@ stdenv.mkDerivation rec { -e 's,"zookeeper\.h",,' ''; + # c++17 (gcc-11's default) breaks the build as: + # zkadapter.h:616:33: error: ISO C++17 does not allow dynamic exception specifications + NIX_CFLAGS_COMPILE = [ "-std=c++14" ]; + installPhase = '' mkdir -p $out/bin cp -v src/zkfuse $out/bin From 62905f6706648e84c3af6570ba2f03cc41dfab07 Mon Sep 17 00:00:00 2001 From: Thiago Kenji Okada Date: Tue, 19 Apr 2022 23:28:54 +0100 Subject: [PATCH 028/161] snes9x: init at 1.61 This is done by modifying the current `snes9x-gtk` derivation, including a new `withGtk` option that when enabled will build the `snes9x-gtk`, but when disabled will build `snes9x` instead. `snes9x` has less dependencies than `snes9x-gtk`, having only X11 as a major dependency and a minor one in ALSA. However, the only audio backend supported is ALSA, so audio is only available on Linux. Still, this has less dependencies than the GTK version, so it should be useful as a minimal version for cross-compilation cases or when porting to new devices. This also builds in macOS, however as I said before, shouldn't have any sound there. --- .../emulators/snes9x-gtk/default.nix | 37 ------ .../applications/emulators/snes9x/default.nix | 120 ++++++++++++++++++ pkgs/top-level/all-packages.nix | 6 +- 3 files changed, 125 insertions(+), 38 deletions(-) delete mode 100644 pkgs/applications/emulators/snes9x-gtk/default.nix create mode 100644 pkgs/applications/emulators/snes9x/default.nix diff --git a/pkgs/applications/emulators/snes9x-gtk/default.nix b/pkgs/applications/emulators/snes9x-gtk/default.nix deleted file mode 100644 index ff7df4b09440..000000000000 --- a/pkgs/applications/emulators/snes9x-gtk/default.nix +++ /dev/null @@ -1,37 +0,0 @@ -{ lib, stdenv, fetchFromGitHub, meson, ninja, pkg-config, wrapGAppsHook, alsa-lib -, SDL2, zlib, gtkmm3, libXv, libepoxy, minizip, pulseaudio, portaudio }: - -stdenv.mkDerivation rec { - pname = "snes9x-gtk"; - version = "1.61"; - - src = fetchFromGitHub { - owner = "snes9xgit"; - repo = "snes9x"; - rev = version; - fetchSubmodules = true; - sha256 = "1kay7aj30x0vn8rkylspdycydrzsc0aidjbs0dd238hr5hid723b"; - }; - - nativeBuildInputs = [ meson ninja pkg-config wrapGAppsHook ]; - buildInputs = [ alsa-lib SDL2 zlib gtkmm3 libXv libepoxy minizip pulseaudio portaudio ]; - - preConfigure = "cd gtk"; - - meta = with lib; { - homepage = "https://www.snes9x.com"; - description = "Super Nintendo Entertainment System (SNES) emulator"; - - longDescription = '' - Snes9x is a portable, freeware Super Nintendo Entertainment System (SNES) - emulator. It basically allows you to play most games designed for the SNES - and Super Famicom Nintendo game systems on your PC or Workstation; which - includes some real gems that were only ever released in Japan. - ''; - - # see https://github.com/snes9xgit/snes9x/blob/master/LICENSE for exact details - license = licenses.unfreeRedistributable; - maintainers = with maintainers; [ qknight xfix ]; - platforms = platforms.linux; - }; -} diff --git a/pkgs/applications/emulators/snes9x/default.nix b/pkgs/applications/emulators/snes9x/default.nix new file mode 100644 index 000000000000..f8a315b7753f --- /dev/null +++ b/pkgs/applications/emulators/snes9x/default.nix @@ -0,0 +1,120 @@ +{ lib +, stdenv +, alsa-lib +, fetchFromGitHub +, gtkmm3 +, libepoxy +, libpng +, libX11 +, libXv +, libXext +, libXinerama +, meson +, minizip +, ninja +, pkg-config +, portaudio +, pulseaudio +, SDL2 +, wrapGAppsHook +, zlib +, withGtk ? false +}: + +stdenv.mkDerivation rec { + pname = + if withGtk then + "snes9x-gtk" + else + "snes9x"; + version = "1.61"; + + src = fetchFromGitHub { + owner = "snes9xgit"; + repo = "snes9x"; + rev = version; + fetchSubmodules = true; + sha256 = "1kay7aj30x0vn8rkylspdycydrzsc0aidjbs0dd238hr5hid723b"; + }; + + nativeBuildInputs = [ + pkg-config + ] + ++ lib.optionals withGtk [ + meson + ninja + wrapGAppsHook + ]; + + buildInputs = [ + libX11 + libXext + libXv + minizip + zlib + ] + # on non-Linux platforms this will build without sound support on X11 build + ++ lib.optionals stdenv.isLinux [ + alsa-lib + pulseaudio + ] + ++ lib.optionals (!withGtk) [ + libpng + libXinerama + ] + ++ lib.optionals withGtk [ + gtkmm3 + libepoxy + portaudio + SDL2 + ]; + + configureFlags = + lib.optional stdenv.hostPlatform.sse4_1Support "--enable-sse41" + ++ lib.optional stdenv.hostPlatform.avx2Support "--enable-avx2" + ++ lib.optional stdenv.hostPlatform.isAarch64 "--enable-neon"; + + installPhase = lib.optionalString (!withGtk) '' + runHook preInstall + + install -Dm755 snes9x -t "$out/bin/" + install -Dm644 snes9x.conf.default -t "$out/share/doc/${pname}/" + install -Dm644 ../docs/{control-inputs,controls,snapshots}.txt -t \ + "$out/share/doc/${pname}/" + + runHook postInstall + ''; + + preConfigure = + if withGtk then + "cd gtk" + else + "cd unix"; + + enableParallelBuilding = true; + + meta = with lib; + let + interface = if withGtk then "GTK" else "X11"; + in + { + homepage = "https://www.snes9x.com"; + description = "Super Nintendo Entertainment System (SNES) emulator, ${interface} version"; + + longDescription = '' + Snes9x is a portable, freeware Super Nintendo Entertainment System (SNES) + emulator. It basically allows you to play most games designed for the SNES + and Super Famicom Nintendo game systems on your PC or Workstation; which + includes some real gems that were only ever released in Japan. + + Version build with ${interface} interface. + ''; + + license = licenses.unfreeRedistributable // { + url = "https://github.com/snes9xgit/snes9x/blob/${version}/LICENSE"; + }; + maintainers = with maintainers; [ qknight xfix thiagokokada ]; + platforms = platforms.unix; + broken = (withGtk && stdenv.isDarwin); + }; +} diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index 789360728dfd..12c158f1e350 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -1338,7 +1338,11 @@ with pkgs; simplenes = callPackage ../applications/emulators/simplenes { }; - snes9x-gtk = callPackage ../applications/emulators/snes9x-gtk { }; + snes9x = callPackage ../applications/emulators/snes9x { }; + + snes9x-gtk = callPackage ../applications/emulators/snes9x { + withGtk = true; + }; stella = callPackage ../applications/emulators/stella { }; From d30e9aabf499077d4cc7b61c0aad4eb337266e8b Mon Sep 17 00:00:00 2001 From: Thiago Kenji Okada Date: Wed, 20 Apr 2022 15:38:57 +0100 Subject: [PATCH 029/161] snes9x: fix cross-compilation This is done by importing an old patch from Gentoo's Portage, and also rebuilding the `configure` script by using `autoreconfHook` --- .../applications/emulators/snes9x/default.nix | 24 +++++++++++++------ 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/pkgs/applications/emulators/snes9x/default.nix b/pkgs/applications/emulators/snes9x/default.nix index f8a315b7753f..779e4dab3b3d 100644 --- a/pkgs/applications/emulators/snes9x/default.nix +++ b/pkgs/applications/emulators/snes9x/default.nix @@ -1,7 +1,9 @@ { lib , stdenv , alsa-lib +, autoreconfHook , fetchFromGitHub +, fetchpatch , gtkmm3 , libepoxy , libpng @@ -37,9 +39,21 @@ stdenv.mkDerivation rec { sha256 = "1kay7aj30x0vn8rkylspdycydrzsc0aidjbs0dd238hr5hid723b"; }; + patches = [ + # Fix cross-compilation, otherwise it fails to detect host compiler features + # Doesn't affect non CC builds + (fetchpatch { + url = "https://mirror.its.dal.ca/gentoo-portage/games-emulation/snes9x/files/snes9x-1.53-cross-compile.patch"; + sha256 = "sha256-ZCmnprimz8PtDIXkB1dYD0oura9icW81yKvJ4coKaDg="; + }) + ]; + nativeBuildInputs = [ pkg-config ] + ++ lib.optionals (!withGtk) [ + autoreconfHook + ] ++ lib.optionals withGtk [ meson ninja @@ -71,8 +85,7 @@ stdenv.mkDerivation rec { configureFlags = lib.optional stdenv.hostPlatform.sse4_1Support "--enable-sse41" - ++ lib.optional stdenv.hostPlatform.avx2Support "--enable-avx2" - ++ lib.optional stdenv.hostPlatform.isAarch64 "--enable-neon"; + ++ lib.optional stdenv.hostPlatform.avx2Support "--enable-avx2"; installPhase = lib.optionalString (!withGtk) '' runHook preInstall @@ -85,11 +98,8 @@ stdenv.mkDerivation rec { runHook postInstall ''; - preConfigure = - if withGtk then - "cd gtk" - else - "cd unix"; + preAutoreconf = lib.optionalString (!withGtk) "cd unix"; + preConfigure = lib.optionalString withGtk "cd gtk"; enableParallelBuilding = true; From 2dc4427d358e6ca37934de16cd626bc4e4665e2a Mon Sep 17 00:00:00 2001 From: Bobby Rong Date: Thu, 21 Apr 2022 14:18:30 +0000 Subject: [PATCH 030/161] =?UTF-8?q?gnome-builder:=2042.0=20=E2=86=92=2042.?= =?UTF-8?q?1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit https://gitlab.gnome.org/GNOME/gnome-builder/-/compare/42.0...42.1 --- .../editors/gnome-builder/default.nix | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/pkgs/applications/editors/gnome-builder/default.nix b/pkgs/applications/editors/gnome-builder/default.nix index 8fd9f56a9d22..7cfde3a91f39 100644 --- a/pkgs/applications/editors/gnome-builder/default.nix +++ b/pkgs/applications/editors/gnome-builder/default.nix @@ -5,7 +5,6 @@ , appstream-glib , desktop-file-utils , fetchurl -, fetchpatch , flatpak , gnome , libgit2-glib @@ -41,23 +40,15 @@ stdenv.mkDerivation rec { pname = "gnome-builder"; - version = "42.0"; + version = "42.1"; outputs = [ "out" "devdoc" ]; src = fetchurl { url = "mirror://gnome/sources/${pname}/${lib.versions.major version}/${pname}-${version}.tar.xz"; - sha256 = "Uu/SltaLL/GCNBwEgdz9cGVMQIvbZ5/Ot225cDwiQo8="; + sha256 = "XU1RtwKGW0gBcgHwxgfiSifXIDGo9ciNT86HW1VFZwo="; }; - patches = [ - # Fix appstream validation - (fetchpatch { - url = "https://gitlab.gnome.org/GNOME/gnome-builder/-/commit/d7151679e0c925d27216256dc32fe67fb298d059.patch"; - sha256 = "vdNJawkqSBaFGRZvxzvjOryQpBL4jcN7tr1t3ihD7LA="; - }) - ]; - nativeBuildInputs = [ appstream-glib desktop-file-utils @@ -115,8 +106,6 @@ stdenv.mkDerivation rec { "-Dnetwork_tests=false" ]; - # Some tests fail due to being unable to find the Vte typelib, and I don't - # understand why. Somebody should look into fixing this. doCheck = true; postPatch = '' From c0edac77b30c49e49e865d2ffe53cef4cd20c642 Mon Sep 17 00:00:00 2001 From: Armeen Mahdian Date: Thu, 21 Apr 2022 10:36:04 -0500 Subject: [PATCH 031/161] libpqxx_6: 6.4.5 -> 6.4.8 --- pkgs/development/libraries/libpqxx/6.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkgs/development/libraries/libpqxx/6.nix b/pkgs/development/libraries/libpqxx/6.nix index feb9c7e65147..c9e55fd9c019 100644 --- a/pkgs/development/libraries/libpqxx/6.nix +++ b/pkgs/development/libraries/libpqxx/6.nix @@ -1,17 +1,17 @@ -{ lib, stdenv, fetchFromGitHub, postgresql, doxygen, xmlto, python2, gnused }: +{ lib, stdenv, fetchFromGitHub, postgresql, doxygen, xmlto, python3, gnused }: stdenv.mkDerivation rec { pname = "libpqxx"; - version = "6.4.5"; + version = "6.4.8"; src = fetchFromGitHub { owner = "jtv"; repo = pname; rev = version; - sha256 = "0djmjr2b5x5nd2a4idv5j8s6w0kdmvil910iv1kyc7x94dirbrni"; + hash = "sha256-ybnW9ip1QVadmbYLP+gvo49k9ExHfnsOhSnI6NjsAQk="; }; - nativeBuildInputs = [ gnused python2 ]; + nativeBuildInputs = [ gnused python3 ]; buildInputs = [ postgresql doxygen xmlto ]; preConfigure = '' From 7dc99549561bf92e5b8ace0cfaceea85a98a7661 Mon Sep 17 00:00:00 2001 From: Yaya Date: Thu, 21 Apr 2022 16:36:43 +0000 Subject: [PATCH 032/161] nextcloud: 22.2.6 -> 22.2.7, 23.0.3 -> 23.0.4 --- pkgs/servers/nextcloud/default.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkgs/servers/nextcloud/default.nix b/pkgs/servers/nextcloud/default.nix index 92e8d6ff5d21..e71fb9db115e 100644 --- a/pkgs/servers/nextcloud/default.nix +++ b/pkgs/servers/nextcloud/default.nix @@ -46,13 +46,13 @@ in { ''; nextcloud22 = generic { - version = "22.2.6"; - sha256 = "0f1d0f0cb000c51b11886be25a8adce478846c3233572fcf28b44c5d4036e235"; + version = "22.2.7"; + sha256 = "5ada41cb3e69665e8a13946f71978829c0a0163d0277a49e599c9e8ccf960eab"; }; nextcloud23 = generic { - version = "23.0.3"; - sha256 = "39401d400fab02a84a175ea6e995b8ed4110fbaea48c876230b4f09755a62986"; + version = "23.0.4"; + sha256 = "67191c2b8b41591ae42accfb32216313fde0e107201682cb39029f890712bc6a"; }; # tip: get she sha with: # curl 'https://download.nextcloud.com/server/releases/nextcloud-${version}.tar.bz2.sha256' From 368ce6abb2a708c63e5577a80b328fa466224773 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fabi=C3=A1n=20Heredia=20Montiel?= Date: Thu, 21 Apr 2022 12:51:26 -0500 Subject: [PATCH 033/161] =?UTF-8?q?kotlin:=201.6.20=20=E2=86=92=201.6.21?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit https://github.com/JetBrains/kotlin/releases/tag/v1.6.21 --- pkgs/development/compilers/kotlin/default.nix | 4 ++-- pkgs/development/compilers/kotlin/native.nix | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pkgs/development/compilers/kotlin/default.nix b/pkgs/development/compilers/kotlin/default.nix index fe0bd29a1b27..5f493394f04e 100644 --- a/pkgs/development/compilers/kotlin/default.nix +++ b/pkgs/development/compilers/kotlin/default.nix @@ -2,11 +2,11 @@ stdenv.mkDerivation rec { pname = "kotlin"; - version = "1.6.20"; + version = "1.6.21"; src = fetchurl { url = "https://github.com/JetBrains/kotlin/releases/download/v${version}/kotlin-compiler-${version}.zip"; - hash = "sha256-2vF9scGU9CBfOvZxKTZ6abOI+BkXeWPcU6e0ssTYziI="; + hash = "sha256-YyFm/tifP0MEgvWqB/LiC5I7cu9ojI9affOqFQLG2Lo="; }; propagatedBuildInputs = [ jre ] ; diff --git a/pkgs/development/compilers/kotlin/native.nix b/pkgs/development/compilers/kotlin/native.nix index 0c0315d87667..4438a78608a6 100644 --- a/pkgs/development/compilers/kotlin/native.nix +++ b/pkgs/development/compilers/kotlin/native.nix @@ -7,7 +7,7 @@ stdenv.mkDerivation rec { pname = "kotlin-native"; - version = "1.6.20"; + version = "1.6.21"; src = let getArch = { @@ -20,9 +20,9 @@ stdenv.mkDerivation rec { "https://github.com/JetBrains/kotlin/releases/download/v${version}/kotlin-native-${arch}-${version}.tar.gz"; getHash = arch: { - "macos-aarch64" = "sha256-Nb/5UrNnIOJI+5PdXX4FfhUvCChrfUTkjaMS7nN/eGg="; - "macos-x86_64" = "sha256-cgET1zjk14/o3EH/1t7jfCfXHwcjfAANKG+yxpQccMc="; - "linux-x86_64" = "sha256-qbXyvCTGqRK0mCav6Ei128y1Ok1vfZ1o0haZ+MJjmBQ="; + "macos-aarch64" = "sha256-kkJvlDtK0Y+zeht+9fLX2HL2fyKOIyo0qYkJk+35tMU="; + "macos-x86_64" = "sha256-znTMO8h0pC6bkSUVYmxWPe4HVQPQw/VcJM11ckmG8CA="; + "linux-x86_64" = "sha256-r1H2riRLsZl5+65tw6/cp7rkJWjWoz8PozHt1mWmEfo="; }.${arch}; in fetchurl { From 38aa92b3d2e84645416c78405118e805460e893b Mon Sep 17 00:00:00 2001 From: Armeen Mahdian Date: Thu, 21 Apr 2022 17:00:58 -0500 Subject: [PATCH 034/161] ntdb: remove --- pkgs/development/libraries/ntdb/default.nix | 51 --------------------- pkgs/top-level/aliases.nix | 1 + pkgs/top-level/all-packages.nix | 2 - 3 files changed, 1 insertion(+), 53 deletions(-) delete mode 100644 pkgs/development/libraries/ntdb/default.nix diff --git a/pkgs/development/libraries/ntdb/default.nix b/pkgs/development/libraries/ntdb/default.nix deleted file mode 100644 index 021436516c22..000000000000 --- a/pkgs/development/libraries/ntdb/default.nix +++ /dev/null @@ -1,51 +0,0 @@ -{ lib, stdenv -, fetchurl -, python2 -, python3 -, pkg-config -, readline -, gettext -, libxslt -, docbook-xsl-nons -, docbook_xml_dtd_42 -, wafHook -}: - -stdenv.mkDerivation rec { - pname = "ntdb"; - version = "1.0"; - - src = fetchurl { - url = "mirror://samba/tdb/${pname}-${version}.tar.gz"; - sha256 = "0jdzgrz5sr25k83yrw7wqb3r0yj1v04z4s3lhsmnr5z6n5ifhyl1"; - }; - - nativeBuildInputs = [ - pkg-config - gettext - libxslt - docbook-xsl-nons - docbook_xml_dtd_42 - wafHook - python2 # For wafHook - ]; - - buildInputs = [ - python3 - readline # required to build python - ]; - - wafPath = "buildtools/bin/waf"; - - wafConfigureFlags = [ - "--bundled-libraries=NONE" - "--builtin-libraries=replace,ccan" - ]; - - meta = with lib; { - description = "The not-so trivial database"; - homepage = "https://tdb.samba.org/"; - license = licenses.lgpl3Plus; - platforms = platforms.all; - }; -} diff --git a/pkgs/top-level/aliases.nix b/pkgs/top-level/aliases.nix index db330743e50d..11ab169d4aff 100644 --- a/pkgs/top-level/aliases.nix +++ b/pkgs/top-level/aliases.nix @@ -867,6 +867,7 @@ mapAliases ({ noto-fonts-cjk = noto-fonts-cjk-sans; # Added 2021-12-16 nottetris2 = throw "nottetris2 was removed because it is unmaintained by upstream and broken"; # Added 2022-01-15 now-cli = throw "now-cli has been replaced with nodePackages.vercel"; # Added 2021-08-05 + ntdb = throw "ntdb has been removed: abandoned by upstream"; # Added 2022-04-21 nxproxy = throw "'nxproxy' has been renamed to/replaced by 'nx-libs'"; # Converted to throw 2022-02-22 ### O ### diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index 434d94509ab4..1f43a8dddc84 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -20698,8 +20698,6 @@ with pkgs; tclx = callPackage ../development/libraries/tclx { }; - ntdb = callPackage ../development/libraries/ntdb { }; - tdb = callPackage ../development/libraries/tdb {}; tdlib = callPackage ../development/libraries/tdlib { }; From 9407edc17d7bcccb20e1630eadbee094f03a6fae Mon Sep 17 00:00:00 2001 From: Armeen Mahdian Date: Thu, 21 Apr 2022 22:13:17 -0500 Subject: [PATCH 035/161] rabbitmq-server: switch to python3 --- pkgs/servers/amqp/rabbitmq-server/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/servers/amqp/rabbitmq-server/default.nix b/pkgs/servers/amqp/rabbitmq-server/default.nix index 37e6f1fd2430..c59d53897fcb 100644 --- a/pkgs/servers/amqp/rabbitmq-server/default.nix +++ b/pkgs/servers/amqp/rabbitmq-server/default.nix @@ -3,7 +3,7 @@ , fetchurl , erlang , elixir -, python2 +, python3 , libxml2 , libxslt , xmlto @@ -35,8 +35,8 @@ stdenv.mkDerivation rec { sha256 = "sha256-c6GpB6CSCHiU9hTC9FkxyTc1UpNWxx5iP3y2dbTUfS0="; }; - nativeBuildInputs = [ unzip xmlto docbook_xml_dtd_45 docbook_xsl zip rsync ]; - buildInputs = [ erlang elixir python2 libxml2 libxslt glibcLocales ] + nativeBuildInputs = [ unzip xmlto docbook_xml_dtd_45 docbook_xsl zip rsync python3 ]; + buildInputs = [ erlang elixir libxml2 libxslt glibcLocales ] ++ lib.optionals stdenv.isDarwin [ AppKit Carbon Cocoa ]; outputs = [ "out" "man" "doc" ]; From efb1e8956939e5cdb18bf163d616efc329ab339a Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 03:37:15 +0000 Subject: [PATCH 036/161] bash-preexec: 0.4.1 -> 0.5.0 --- pkgs/development/libraries/bash/bash-preexec/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/libraries/bash/bash-preexec/default.nix b/pkgs/development/libraries/bash/bash-preexec/default.nix index fe8f63c685c4..fcdf5619047b 100644 --- a/pkgs/development/libraries/bash/bash-preexec/default.nix +++ b/pkgs/development/libraries/bash/bash-preexec/default.nix @@ -1,6 +1,6 @@ { stdenvNoCC, lib, fetchFromGitHub, bats }: -let version = "0.4.1"; +let version = "0.5.0"; in stdenvNoCC.mkDerivation { pname = "bash-preexec"; inherit version; @@ -9,7 +9,7 @@ in stdenvNoCC.mkDerivation { owner = "rcaloras"; repo = "bash-preexec"; rev = version; - sha256 = "062iigps285628p710i7vh7kmgra5gahq9qiwj7rxir167lg0ggw"; + sha256 = "sha256-+FU5n7EkY78X5nUiW3WN9+6Bf6oiPjsG2MSRCleooFs="; }; checkInputs = [ bats ]; From cae857abfd10e526fe1f36c78b79c0af8c423779 Mon Sep 17 00:00:00 2001 From: Mario Rodas Date: Fri, 22 Apr 2022 04:20:00 +0000 Subject: [PATCH 037/161] zeek: 4.2.0 -> 4.2.1 https://github.com/zeek/zeek/releases/tag/v4.2.1 --- pkgs/applications/networking/ids/zeek/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/networking/ids/zeek/default.nix b/pkgs/applications/networking/ids/zeek/default.nix index e91b517e8158..adbda434b422 100644 --- a/pkgs/applications/networking/ids/zeek/default.nix +++ b/pkgs/applications/networking/ids/zeek/default.nix @@ -21,11 +21,11 @@ stdenv.mkDerivation rec { pname = "zeek"; - version = "4.2.0"; + version = "4.2.1"; src = fetchurl { url = "https://download.zeek.org/zeek-${version}.tar.gz"; - sha256 = "sha256-jZoCjKn+x61KnkinY+KWBSOEz0AupM03FXe/8YPCdFE="; + sha256 = "sha256-axNImzBJTHxd2kU/xQmB5ZQ9ZxW2ybW3qFq7gLvm0RY="; }; nativeBuildInputs = [ From 4d68e1befb69d37d1218fa1a368b50b43cf0882e Mon Sep 17 00:00:00 2001 From: Mario Rodas Date: Fri, 22 Apr 2022 04:20:00 +0000 Subject: [PATCH 038/161] libvmaf: 2.3.0 -> 2.3.1 https://github.com/Netflix/vmaf/releases/tag/v2.3.1 --- pkgs/development/libraries/libvmaf/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/libraries/libvmaf/default.nix b/pkgs/development/libraries/libvmaf/default.nix index 03ca9b0d11b3..a58f816e8afa 100644 --- a/pkgs/development/libraries/libvmaf/default.nix +++ b/pkgs/development/libraries/libvmaf/default.nix @@ -2,13 +2,13 @@ stdenv.mkDerivation rec { pname = "libvmaf"; - version = "2.3.0"; + version = "2.3.1"; src = fetchFromGitHub { owner = "netflix"; repo = "vmaf"; rev = "v${version}"; - sha256 = "12mwl7vxc3xi0qar386mkhkpah9zzgjb74mzc2qqsgz9zzxp16dm"; + sha256 = "sha256-TkMy2tEdG1FPPWfH/wPnVbs5kocqe4Y0jU4yvbiRZ9k="; }; sourceRoot = "source/libvmaf"; From d748aedb2b86196e801e90aef58eda959a644aea Mon Sep 17 00:00:00 2001 From: Mario Rodas Date: Fri, 22 Apr 2022 04:20:00 +0000 Subject: [PATCH 039/161] watchexec: 1.17.1 -> 1.19.0 https://github.com/watchexec/watchexec/releases/tag/cli-v1.19.0 --- pkgs/tools/misc/watchexec/default.nix | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/misc/watchexec/default.nix b/pkgs/tools/misc/watchexec/default.nix index b9be5982f774..d54e906f347e 100644 --- a/pkgs/tools/misc/watchexec/default.nix +++ b/pkgs/tools/misc/watchexec/default.nix @@ -2,21 +2,23 @@ rustPlatform.buildRustPackage rec { pname = "watchexec"; - version = "1.17.1"; + version = "1.19.0"; src = fetchFromGitHub { owner = pname; repo = pname; rev = "cli-v${version}"; - sha256 = "13yqghdhakkwp607j84a1vbqgnqqn77a5mh27cr24352ik2vkrkq"; + sha256 = "sha256-Zqu6Qor7kHSeOFyHjcrl6RhB8gL9pljHt7hEd6/0Kss="; }; - cargoSha256 = "0grzfzxw705zs5qb2h7k0yws45m20ihhh4mnpmk3wargbxpn6gsh"; + cargoSha256 = "sha256-XwgoYaqgDkNggzi2TL/JPfh8LSFSzSWOVMbkmhXX73I="; nativeBuildInputs = [ installShellFiles ]; buildInputs = lib.optionals stdenv.isDarwin [ CoreServices Foundation libiconv ]; + checkFlags = [ "--skip=help" "--skip=help_short" ]; + postInstall = '' installManPage doc/watchexec.1 installShellCompletion --zsh --name _watchexec completions/zsh From 1d01a8d25c36da18bf34a3c337e3383eebce58a9 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 04:53:14 +0000 Subject: [PATCH 040/161] aliyun-cli: 3.0.116 -> 3.0.117 --- pkgs/tools/admin/aliyun-cli/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/tools/admin/aliyun-cli/default.nix b/pkgs/tools/admin/aliyun-cli/default.nix index 69900238c5e6..7747e46b26a5 100644 --- a/pkgs/tools/admin/aliyun-cli/default.nix +++ b/pkgs/tools/admin/aliyun-cli/default.nix @@ -2,14 +2,14 @@ buildGoModule rec { pname = "aliyun-cli"; - version = "3.0.116"; + version = "3.0.117"; src = fetchFromGitHub { rev = "v${version}"; owner = "aliyun"; repo = pname; fetchSubmodules = true; - sha256 = "sha256-KZZT7XVhJLfrQ7L3FFOTw9bLT5GqewvDTGQQd/ovjbg="; + sha256 = "sha256-iltyw2Qw7WSq96T/upGwSyjqWK8KOo/pK7HR+25n2js="; }; vendorSha256 = "sha256-f3GXkAvTe8rPFWCR5TM4mDK/VOQWt2lrZrfJ/Wvw8Uc="; From 261cb2e66f63ee55ccf14082ed85cc96887b610b Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 05:47:29 +0000 Subject: [PATCH 041/161] argo: 3.3.1 -> 3.3.2 --- pkgs/applications/networking/cluster/argo/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/networking/cluster/argo/default.nix b/pkgs/applications/networking/cluster/argo/default.nix index de1310f55d74..1881009f3e2c 100644 --- a/pkgs/applications/networking/cluster/argo/default.nix +++ b/pkgs/applications/networking/cluster/argo/default.nix @@ -19,16 +19,16 @@ let in buildGoModule rec { pname = "argo"; - version = "3.3.1"; + version = "3.3.2"; src = fetchFromGitHub { owner = "argoproj"; repo = "argo"; rev = "v${version}"; - sha256 = "sha256-IcKueb/bxPNwJ9bZWwmz4ywCtZGk0PSuH3KYDMp6Qd4="; + sha256 = "sha256-tl1UpoXBuIyJyMLHeIhQ6EHG1XqAGE6Tw5jU6rW+DXc="; }; - vendorSha256 = "sha256-YeSeaYOkNRjQgxsK9G7iPbVpfrPs4HRRFwfoUDxoCm0="; + vendorSha256 = "sha256-cq452XEGMVbLvfJ/UiVyOvnUSJr196owB3SyBYnAmZ0="; doCheck = false; From 26d74675fa8df3637fd674ca7b289a3bba50fd7a Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Fri, 22 Apr 2022 08:10:18 +0200 Subject: [PATCH 042/161] python3Packages.net2grid: remove --- .../python-modules/net2grid/default.nix | 58 ------------------- pkgs/top-level/python-aliases.nix | 1 + pkgs/top-level/python-packages.nix | 2 - 3 files changed, 1 insertion(+), 60 deletions(-) delete mode 100644 pkgs/development/python-modules/net2grid/default.nix diff --git a/pkgs/development/python-modules/net2grid/default.nix b/pkgs/development/python-modules/net2grid/default.nix deleted file mode 100644 index ef03d45ab6b8..000000000000 --- a/pkgs/development/python-modules/net2grid/default.nix +++ /dev/null @@ -1,58 +0,0 @@ -{ lib -, aiohttp -, aresponses -, buildPythonPackage -, fetchFromGitHub -, poetry-core -, pytest-asyncio -, pytestCheckHook -, pythonOlder -, yarl -}: - -buildPythonPackage rec { - pname = "net2grid"; - version = "4.0.0"; - format = "pyproject"; - - disabled = pythonOlder "3.9"; - - src = fetchFromGitHub { - owner = "klaasnicolaas"; - repo = "python-net2grid"; - rev = "v${version}"; - hash = "sha256-Ihs8qUx50tAUcRBsVArRhzoLcQUi1vbYh8sPyK75AEk="; - }; - - nativeBuildInputs = [ - poetry-core - ]; - - propagatedBuildInputs = [ - aiohttp - yarl - ]; - - checkInputs = [ - aresponses - pytest-asyncio - pytestCheckHook - ]; - - postPatch = '' - substituteInPlace pyproject.toml \ - --replace '"0.0.0"' '"${version}"' \ - --replace 'addopts = "--cov"' "" - ''; - - pythonImportsCheck = [ - "net2grid" - ]; - - meta = with lib; { - description = "Module for interacting with NET2GRID devices"; - homepage = "https://github.com/klaasnicolaas/python-net2grid"; - license = with licenses; [ mit ]; - maintainers = with maintainers; [ fab ]; - }; -} diff --git a/pkgs/top-level/python-aliases.nix b/pkgs/top-level/python-aliases.nix index 85c79d04d333..9d912ee5e601 100644 --- a/pkgs/top-level/python-aliases.nix +++ b/pkgs/top-level/python-aliases.nix @@ -79,6 +79,7 @@ mapAliases ({ lammps-cython = throw "lammps-cython no longer builds and is unmaintained"; # added 2021-07-04 Markups = markups; # added 2022-02-14 MechanicalSoup = mechanicalsoup; # added 2021-06-01 + net2grid = gridnet; # add 2022-04-22 nose-cover3 = throw "nose-cover3 has been removed, it was using setuptools 2to3 translation feature, which has been removed in setuptools 58"; # added 2022-02-16 pam = python-pam; # added 2020-09-07. PasteDeploy = pastedeploy; # added 2021-10-07 diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index 0ed2d95217e4..c450d16d3ada 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -5535,8 +5535,6 @@ in { nestedtext = callPackage ../development/python-modules/nestedtext { }; - net2grid = callPackage ../development/python-modules/net2grid { }; - netaddr = callPackage ../development/python-modules/netaddr { }; netcdf4 = callPackage ../development/python-modules/netcdf4 { }; From 9277e0002ae9e99d2cd4ae1452ce4ed3f4e97e92 Mon Sep 17 00:00:00 2001 From: Florian Date: Fri, 22 Apr 2022 09:03:26 +0200 Subject: [PATCH 043/161] Update pkgs/development/python-modules/sentry-sdk/default.nix Co-authored-by: Sandro --- pkgs/development/python-modules/sentry-sdk/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/sentry-sdk/default.nix b/pkgs/development/python-modules/sentry-sdk/default.nix index 8a450023d1f3..513e0a0acc4f 100644 --- a/pkgs/development/python-modules/sentry-sdk/default.nix +++ b/pkgs/development/python-modules/sentry-sdk/default.nix @@ -137,10 +137,10 @@ buildPythonPackage rec { "tests/integrations/pyramid/test_pyramid.py" ] # test crashes on aarch64 - ++ (if stdenv.buildPlatform != "x86_64-linux" then [ + ++ lib.optionals (stdenv.buildPlatform != "x86_64-linux") [ "tests/test_transport.py" "tests/integrations/threading/test_threading.py" - ] else [ ]); + ]; pythonImportsCheck = [ "sentry_sdk" From 2c5cd9eddaedf4ee71da216c632d2260c1e332c0 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 07:35:05 +0000 Subject: [PATCH 044/161] gtksourceview5: 5.4.0 -> 5.4.1 --- pkgs/development/libraries/gtksourceview/5.x.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/libraries/gtksourceview/5.x.nix b/pkgs/development/libraries/gtksourceview/5.x.nix index 81c0abd61b6d..29d85980accc 100644 --- a/pkgs/development/libraries/gtksourceview/5.x.nix +++ b/pkgs/development/libraries/gtksourceview/5.x.nix @@ -23,13 +23,13 @@ stdenv.mkDerivation rec { pname = "gtksourceview"; - version = "5.4.0"; + version = "5.4.1"; outputs = [ "out" "dev" "devdoc" ]; src = fetchurl { url = "mirror://gnome/sources/${pname}/${lib.versions.majorMinor version}/${pname}-${version}.tar.xz"; - sha256 = "ADvCF+ZwqOyKo67OmUtw5wt9a4B0k4rdohcYVV2E5jc="; + sha256 = "6zWECZz6CtyaCx7eCN72MgvQmeeedKLQrvtAV82T1o4="; }; patches = [ From 36b0854076004c7801b6ebfcc0b175d6666dd0c7 Mon Sep 17 00:00:00 2001 From: inty Date: Thu, 21 Apr 2022 20:09:41 +0000 Subject: [PATCH 045/161] rawtherapee: Add glibc-2.34 malloc error patch FOR MAINTAINERS: This patch will have to be removed once rawtherapee-5.9 is packaged. Fixes the following rtengine.dir/myfile.cc.o build error: <..>-glibc-2.34-115-dev/include/wchar.h:582:24: error: 'malloc' attribute argument 1 is ambiguous 582 | __attribute_malloc__ __attr_dealloc_fclose; For the upstream discussion, see Beep6581/RawTherapee#6324. --- .../graphics/rawtherapee/default.nix | 2 + .../graphics/rawtherapee/fix-6324.patch | 356 ++++++++++++++++++ 2 files changed, 358 insertions(+) create mode 100644 pkgs/applications/graphics/rawtherapee/fix-6324.patch diff --git a/pkgs/applications/graphics/rawtherapee/default.nix b/pkgs/applications/graphics/rawtherapee/default.nix index 7d1acc1dcd69..000d92f3b052 100644 --- a/pkgs/applications/graphics/rawtherapee/default.nix +++ b/pkgs/applications/graphics/rawtherapee/default.nix @@ -16,6 +16,8 @@ stdenv.mkDerivation rec { nativeBuildInputs = [ cmake pkg-config wrapGAppsHook ]; + patches = [ ./fix-6324.patch ]; + buildInputs = [ pixman libpthreadstubs gtkmm3 libXau libXdmcp lcms2 libiptcdata libcanberra-gtk3 fftw expat pcre libsigcxx lensfun librsvg diff --git a/pkgs/applications/graphics/rawtherapee/fix-6324.patch b/pkgs/applications/graphics/rawtherapee/fix-6324.patch new file mode 100644 index 000000000000..fa324c2c9389 --- /dev/null +++ b/pkgs/applications/graphics/rawtherapee/fix-6324.patch @@ -0,0 +1,356 @@ +See: + https://github.com/Beep6581/RawTherapee/issues/6324 + https://github.com/Beep6581/RawTherapee/commit/2e0137d54243eb729d4a5f939c4320ec8f8f415d + +diff --git a/rtengine/canon_cr3_decoder.cc b/rtengine/canon_cr3_decoder.cc +index 6274154cb..98c743dad 100644 +--- a/rtengine/canon_cr3_decoder.cc ++++ b/rtengine/canon_cr3_decoder.cc +@@ -662,7 +662,7 @@ std::uint32_t _byteswap_ulong(std::uint32_t x) + #endif + + struct LibRaw_abstract_datastream { +- IMFILE* ifp; ++ rtengine::IMFILE* ifp; + + void lock() + { +diff --git a/rtengine/dcraw.cc b/rtengine/dcraw.cc +index 812f122b3..5da696af2 100644 +--- a/rtengine/dcraw.cc ++++ b/rtengine/dcraw.cc +@@ -2025,7 +2025,7 @@ void CLASS phase_one_load_raw_c() + #endif + { + int len[2], pred[2]; +- IMFILE ifpthr = *ifp; ++ rtengine::IMFILE ifpthr = *ifp; + ifpthr.plistener = nullptr; + + #ifdef _OPENMP +@@ -3380,7 +3380,7 @@ void CLASS sony_arw2_load_raw() + { + uchar *data = new (std::nothrow) uchar[raw_width + 1]; + merror(data, "sony_arw2_load_raw()"); +- IMFILE ifpthr = *ifp; ++ rtengine::IMFILE ifpthr = *ifp; + int pos = ifpthr.pos; + ushort pix[16]; + +@@ -6394,7 +6394,7 @@ int CLASS parse_tiff_ifd (int base) + unsigned sony_curve[] = { 0,0,0,0,0,4095 }; + unsigned *buf, sony_offset=0, sony_length=0, sony_key=0; + struct jhead jh; +-/*RT*/ IMFILE *sfp; ++/*RT*/ rtengine::IMFILE *sfp; + /*RT*/ int pana_raw = 0; + + if (tiff_nifds >= sizeof tiff_ifd / sizeof tiff_ifd[0]) +@@ -6958,7 +6958,7 @@ it under the terms of the one of two licenses as you choose: + fread (buf, sony_length, 1, ifp); + sony_decrypt (buf, sony_length/4, 1, sony_key); + sfp = ifp; +-/*RT*/ ifp = fopen (buf, sony_length); ++/*RT*/ ifp = rtengine::fopen (buf, sony_length); + // if ((ifp = tmpfile())) { + // fwrite (buf, sony_length, 1, ifp); + // fseek (ifp, 0, SEEK_SET); +@@ -7264,7 +7264,7 @@ void CLASS parse_external_jpeg() + { + const char *file, *ext; + char *jname, *jfile, *jext; +-/*RT*/ IMFILE *save=ifp; ++/*RT*/ rtengine::IMFILE *save=ifp; + + ext = strrchr (ifname, '.'); + file = strrchr (ifname, '/'); +@@ -7292,7 +7292,7 @@ void CLASS parse_external_jpeg() + *jext = '0'; + } + if (strcmp (jname, ifname)) { +-/*RT*/ if ((ifp = fopen (jname))) { ++/*RT*/ if ((ifp = rtengine::fopen (jname))) { + // if ((ifp = fopen (jname, "rb"))) { + if (verbose) + fprintf (stderr,_("Reading metadata from %s ...\n"), jname); +diff --git a/rtengine/dcraw.h b/rtengine/dcraw.h +index 89c1fcaff..f25157088 100644 +--- a/rtengine/dcraw.h ++++ b/rtengine/dcraw.h +@@ -73,7 +73,7 @@ public: + + protected: + int exif_base, ciff_base, ciff_len; +- IMFILE *ifp; ++ rtengine::IMFILE *ifp; + FILE *ofp; + short order; + const char *ifname; +@@ -125,7 +125,7 @@ protected: + int cur_buf_size; // buffer size + uchar *cur_buf; // currently read block + int fillbytes; // Counter to add extra byte for block size N*16 +- IMFILE *input; ++ rtengine::IMFILE *input; + struct int_pair grad_even[3][41]; // tables of gradients + struct int_pair grad_odd[3][41]; + ushort *linealloc; +@@ -278,7 +278,7 @@ void parse_redcine(); + class getbithuff_t + { + public: +- getbithuff_t(DCraw *p,IMFILE *&i, unsigned &z):parent(p),bitbuf(0),vbits(0),reset(0),ifp(i),zero_after_ff(z){} ++ getbithuff_t(DCraw *p,rtengine::IMFILE *&i, unsigned &z):parent(p),bitbuf(0),vbits(0),reset(0),ifp(i),zero_after_ff(z){} + unsigned operator()(int nbits, ushort *huff); + + private: +@@ -288,7 +288,7 @@ private: + DCraw *parent; + unsigned bitbuf; + int vbits, reset; +- IMFILE *&ifp; ++ rtengine::IMFILE *&ifp; + unsigned &zero_after_ff; + }; + getbithuff_t getbithuff; +@@ -296,7 +296,7 @@ getbithuff_t getbithuff; + class nikbithuff_t + { + public: +- explicit nikbithuff_t(IMFILE *&i):bitbuf(0),errors(0),vbits(0),ifp(i){} ++ explicit nikbithuff_t(rtengine::IMFILE *&i):bitbuf(0),errors(0),vbits(0),ifp(i){} + void operator()() {bitbuf = vbits = 0;}; + unsigned operator()(int nbits, ushort *huff); + unsigned errorCount() { return errors; } +@@ -309,7 +309,7 @@ private: + } + unsigned bitbuf, errors; + int vbits; +- IMFILE *&ifp; ++ rtengine::IMFILE *&ifp; + }; + nikbithuff_t nikbithuff; + +@@ -378,7 +378,7 @@ void parse_qt (int end); + // ph1_bithuff(int nbits, ushort *huff); + class ph1_bithuff_t { + public: +- ph1_bithuff_t(DCraw *p, IMFILE *i, short &o):order(o),ifp(i),bitbuf(0),vbits(0){} ++ ph1_bithuff_t(DCraw *p, rtengine::IMFILE *i, short &o):order(o),ifp(i),bitbuf(0),vbits(0){} + unsigned operator()(int nbits, ushort *huff); + unsigned operator()(int nbits); + unsigned operator()(); +@@ -412,7 +412,7 @@ private: + } + + short ℴ +- IMFILE* const ifp; ++ rtengine::IMFILE* const ifp; + UINT64 bitbuf; + int vbits; + }; +@@ -430,11 +430,11 @@ void nokia_load_raw(); + + class pana_bits_t{ + public: +- pana_bits_t(IMFILE *i, unsigned &u, unsigned enc): ++ pana_bits_t(rtengine::IMFILE *i, unsigned &u, unsigned enc): + ifp(i), load_flags(u), vbits(0), encoding(enc) {} + unsigned operator()(int nbits, unsigned *bytes=nullptr); + private: +- IMFILE *ifp; ++ rtengine::IMFILE *ifp; + unsigned &load_flags; + uchar buf[0x4000]; + int vbits; +diff --git a/rtengine/dfmanager.cc b/rtengine/dfmanager.cc +index 1fb1d2e1b..951df2248 100644 +--- a/rtengine/dfmanager.cc ++++ b/rtengine/dfmanager.cc +@@ -540,7 +540,7 @@ std::vector *DFManager::getHotPixels ( const std::string &mak, const std + + int DFManager::scanBadPixelsFile( Glib::ustring filename ) + { +- FILE *file = fopen( filename.c_str(), "r" ); ++ FILE *file = ::fopen( filename.c_str(), "r" ); + + if( !file ) { + return false; +diff --git a/rtengine/myfile.cc b/rtengine/myfile.cc +index 842766dcf..2321d18bb 100644 +--- a/rtengine/myfile.cc ++++ b/rtengine/myfile.cc +@@ -70,7 +70,7 @@ int munmap(void *start, size_t length) + + #ifdef MYFILE_MMAP + +-IMFILE* fopen (const char* fname) ++rtengine::IMFILE* rtengine::fopen (const char* fname) + { + int fd; + +@@ -123,13 +123,13 @@ IMFILE* fopen (const char* fname) + return mf; + } + +-IMFILE* gfopen (const char* fname) ++rtengine::IMFILE* rtengine::gfopen (const char* fname) + { + return fopen(fname); + } + #else + +-IMFILE* fopen (const char* fname) ++rtengine::IMFILE* rtengine::fopen (const char* fname) + { + + FILE* f = g_fopen (fname, "rb"); +@@ -152,7 +152,7 @@ IMFILE* fopen (const char* fname) + return mf; + } + +-IMFILE* gfopen (const char* fname) ++rtengine::IMFILE* rtengine::gfopen (const char* fname) + { + + FILE* f = g_fopen (fname, "rb"); +@@ -176,7 +176,7 @@ IMFILE* gfopen (const char* fname) + } + #endif //MYFILE_MMAP + +-IMFILE* fopen (unsigned* buf, int size) ++rtengine::IMFILE* rtengine::fopen (unsigned* buf, int size) + { + + IMFILE* mf = new IMFILE; +@@ -190,7 +190,7 @@ IMFILE* fopen (unsigned* buf, int size) + return mf; + } + +-void fclose (IMFILE* f) ++void rtengine::fclose (IMFILE* f) + { + #ifdef MYFILE_MMAP + +@@ -207,7 +207,7 @@ void fclose (IMFILE* f) + delete f; + } + +-int fscanf (IMFILE* f, const char* s ...) ++int rtengine::fscanf (IMFILE* f, const char* s ...) + { + // fscanf not easily wrapped since we have no terminating \0 at end + // of file data and vsscanf() won't tell us how many characters that +@@ -253,7 +253,7 @@ int fscanf (IMFILE* f, const char* s ...) + } + + +-char* fgets (char* s, int n, IMFILE* f) ++char* rtengine::fgets (char* s, int n, IMFILE* f) + { + + if (f->pos >= f->size) { +@@ -270,7 +270,7 @@ char* fgets (char* s, int n, IMFILE* f) + return s; + } + +-void imfile_set_plistener(IMFILE *f, rtengine::ProgressListener *plistener, double progress_range) ++void rtengine::imfile_set_plistener(IMFILE *f, rtengine::ProgressListener *plistener, double progress_range) + { + f->plistener = plistener; + f->progress_range = progress_range; +@@ -278,7 +278,7 @@ void imfile_set_plistener(IMFILE *f, rtengine::ProgressListener *plistener, doub + f->progress_current = 0; + } + +-void imfile_update_progress(IMFILE *f) ++void rtengine::imfile_update_progress(IMFILE *f) + { + if (!f->plistener || f->progress_current < f->progress_next) { + return; +diff --git a/rtengine/myfile.h b/rtengine/myfile.h +index 423edea9a..c655696e6 100644 +--- a/rtengine/myfile.h ++++ b/rtengine/myfile.h +@@ -30,8 +30,6 @@ namespace rtengine + + class ProgressListener; + +-} +- + struct IMFILE { + int fd; + ssize_t pos; +@@ -141,3 +139,5 @@ inline unsigned char* fdata(int offset, IMFILE* f) + + int fscanf (IMFILE* f, const char* s ...); + char* fgets (char* s, int n, IMFILE* f); ++ ++} +diff --git a/rtengine/rtthumbnail.cc b/rtengine/rtthumbnail.cc +index 9da601e2a..097b9e711 100644 +--- a/rtengine/rtthumbnail.cc ++++ b/rtengine/rtthumbnail.cc +@@ -1922,7 +1922,7 @@ bool Thumbnail::writeImage (const Glib::ustring& fname) + + Glib::ustring fullFName = fname + ".rtti"; + +- FILE* f = g_fopen (fullFName.c_str (), "wb"); ++ FILE* f = ::g_fopen (fullFName.c_str (), "wb"); + + if (!f) { + return false; +@@ -1965,7 +1965,7 @@ bool Thumbnail::readImage (const Glib::ustring& fname) + return false; + } + +- FILE* f = g_fopen(fullFName.c_str (), "rb"); ++ FILE* f = ::g_fopen(fullFName.c_str (), "rb"); + + if (!f) { + return false; +@@ -2191,7 +2191,7 @@ bool Thumbnail::writeData (const Glib::ustring& fname) + return false; + } + +- FILE *f = g_fopen (fname.c_str (), "wt"); ++ FILE *f = ::g_fopen (fname.c_str (), "wt"); + + if (!f) { + if (settings->verbose) { +@@ -2214,7 +2214,7 @@ bool Thumbnail::readEmbProfile (const Glib::ustring& fname) + embProfile = nullptr; + embProfileLength = 0; + +- FILE* f = g_fopen (fname.c_str (), "rb"); ++ FILE* f = ::g_fopen (fname.c_str (), "rb"); + + if (f) { + if (!fseek (f, 0, SEEK_END)) { +@@ -2242,7 +2242,7 @@ bool Thumbnail::writeEmbProfile (const Glib::ustring& fname) + { + + if (embProfileData) { +- FILE* f = g_fopen (fname.c_str (), "wb"); ++ FILE* f = ::g_fopen (fname.c_str (), "wb"); + + if (f) { + fwrite (embProfileData, 1, embProfileLength, f); +@@ -2257,7 +2257,7 @@ bool Thumbnail::writeEmbProfile (const Glib::ustring& fname) + bool Thumbnail::readAEHistogram (const Glib::ustring& fname) + { + +- FILE* f = g_fopen(fname.c_str(), "rb"); ++ FILE* f = ::g_fopen(fname.c_str(), "rb"); + + if (!f) { + aeHistogram.reset(); +@@ -2280,7 +2280,7 @@ bool Thumbnail::writeAEHistogram (const Glib::ustring& fname) + { + + if (aeHistogram) { +- FILE* f = g_fopen (fname.c_str (), "wb"); ++ FILE* f = ::g_fopen (fname.c_str (), "wb"); + + if (f) { + fwrite (&aeHistogram[0], 1, (65536 >> aeHistCompression)*sizeof (aeHistogram[0]), f); From 3230e673105caec3b1f402e77d8041383237bad8 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 09:20:54 +0000 Subject: [PATCH 046/161] clickhouse-backup: 1.3.1 -> 1.3.2 --- .../tools/database/clickhouse-backup/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/tools/database/clickhouse-backup/default.nix b/pkgs/development/tools/database/clickhouse-backup/default.nix index fdc15a9256dd..a55ed8a3b650 100644 --- a/pkgs/development/tools/database/clickhouse-backup/default.nix +++ b/pkgs/development/tools/database/clickhouse-backup/default.nix @@ -2,16 +2,16 @@ buildGoModule rec { pname = "clickhouse-backup"; - version = "1.3.1"; + version = "1.3.2"; src = fetchFromGitHub { owner = "AlexAkulov"; repo = pname; rev = "v${version}"; - sha256 = "sha256-M9fJFdwNyNOolXFknzEPG7pNDVrqKv/WOQZUHmr8B84="; + sha256 = "sha256-dpVFDLDEqqW1u1afb3klpdqwOptudbjUfoFhFBc85Pg="; }; - vendorSha256 = "sha256-d6h0LK4zbrfkUum7FXHIP+hqBx5A0mQmvW5GOi+EMVQ="; + vendorSha256 = "sha256-wj4N146iqj/YwyBI0XdrvBp1tqeK43Yq4kSpN594hRs="; postConfigure = '' export CGO_ENABLED=0 From c7f09c80bef6f7d02fe72edf7055974532566150 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sandro=20J=C3=A4ckel?= Date: Fri, 22 Apr 2022 11:45:14 +0200 Subject: [PATCH 047/161] git-extras: 6.3.0 -> 6.4.0 --- .../version-management/git-and-tools/git-extras/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/version-management/git-and-tools/git-extras/default.nix b/pkgs/applications/version-management/git-and-tools/git-extras/default.nix index 2037122bf277..f0932fde2d5e 100644 --- a/pkgs/applications/version-management/git-and-tools/git-extras/default.nix +++ b/pkgs/applications/version-management/git-and-tools/git-extras/default.nix @@ -2,13 +2,13 @@ stdenv.mkDerivation rec { pname = "git-extras"; - version = "6.3.0"; + version = "6.4.0"; src = fetchFromGitHub { owner = "tj"; repo = "git-extras"; rev = version; - sha256 = "sha256-mmvDsK+SgBXQSKNKuPt+K4sgtdrtqPx9Df2E3kKLdJM="; + sha256 = "sha256-Cn7IXMzgg0QIsNIHz+X14Gkmop0UbsSBlGlGkmg71ek="; }; postPatch = '' From 84b547cac06c89b960b5d6fe7404ef5768318db8 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 09:59:15 +0000 Subject: [PATCH 048/161] calcurse: 4.7.1 -> 4.8.0 --- pkgs/applications/misc/calcurse/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/misc/calcurse/default.nix b/pkgs/applications/misc/calcurse/default.nix index 837fcc5c27d6..401a14e69a7a 100644 --- a/pkgs/applications/misc/calcurse/default.nix +++ b/pkgs/applications/misc/calcurse/default.nix @@ -2,11 +2,11 @@ stdenv.mkDerivation rec { pname = "calcurse"; - version = "4.7.1"; + version = "4.8.0"; src = fetchurl { url = "https://calcurse.org/files/${pname}-${version}.tar.gz"; - sha256 = "sha256-CnxV0HZ0Vp0WbAsOdYeyly09qBYM231gsdvSiVgDr7A="; + sha256 = "sha256-SKc2ZmzEtrUwEtc7OqcBUsGLQebHtIB/qw8WjWRa4yw="; }; buildInputs = [ ncurses gettext python3 python3Packages.wrapPython ]; From a9fdf69f9fe255f6ec659165ad3cf7a1cdec76c1 Mon Sep 17 00:00:00 2001 From: Sybrand Aarnoutse Date: Fri, 22 Apr 2022 12:34:23 +0200 Subject: [PATCH 049/161] gitoxide: 0.10.0 -> 0.12.0 --- pkgs/applications/version-management/gitoxide/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/version-management/gitoxide/default.nix b/pkgs/applications/version-management/gitoxide/default.nix index 570a4c06abf9..5b1bdeb22b45 100644 --- a/pkgs/applications/version-management/gitoxide/default.nix +++ b/pkgs/applications/version-management/gitoxide/default.nix @@ -12,16 +12,16 @@ rustPlatform.buildRustPackage rec { pname = "gitoxide"; - version = "0.10.0"; + version = "0.12.0"; src = fetchFromGitHub { owner = "Byron"; repo = "gitoxide"; rev = "v${version}"; - sha256 = "sha256-c29gmmkIOyS+HNq2kv53yq+sdEDmQbSmcvVGcd55/hk="; + sha256 = "sha256-hDNlnNGm9of6Yu9WRVTRH5g4fAXlUxAexdufbZ0vMOo="; }; - cargoSha256 = "sha256-oc7XpiOZj4bfqdwrEHj/CzNtWzYWFkgMJOySJNgxAGQ="; + cargoSha256 = "sha256-026DFEWu7PTvhJZP7YW3KOBOkzFRoxrc+THilit87jU="; nativeBuildInputs = [ cmake pkg-config ]; buildInputs = if stdenv.isDarwin From 54d69052305ed313000a7903b0880854d1e6f4bf Mon Sep 17 00:00:00 2001 From: superherointj <5861043+superherointj@users.noreply.github.com> Date: Fri, 22 Apr 2022 08:02:10 -0300 Subject: [PATCH 050/161] fluxcd: 0.29.1 -> 0.29.3 --- pkgs/applications/networking/cluster/fluxcd/default.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkgs/applications/networking/cluster/fluxcd/default.nix b/pkgs/applications/networking/cluster/fluxcd/default.nix index 5dc342247421..586272b18461 100644 --- a/pkgs/applications/networking/cluster/fluxcd/default.nix +++ b/pkgs/applications/networking/cluster/fluxcd/default.nix @@ -1,9 +1,9 @@ { lib, buildGoModule, fetchFromGitHub, fetchzip, installShellFiles }: let - version = "0.29.1"; - sha256 = "0sqavlylkbcgk9yfblh85rfi3lxv23rlcrq8cw1ikn832gncg7dk"; - manifestsSha256 = "1ph0qbjc1l3h5n0myza3dw71x30iva34nx6f7vfiy8fpapcf1qbi"; + version = "0.29.3"; + sha256 = "02qdczd8x6dl6gsyjyrmga9i67shm54xixckxvva1lhl33zz5wwm"; + manifestsSha256 = "0ar73dwz5j19qwcp85cd87gx0kwm7ys4xcyk91gj88s5i3djd9sl"; manifests = fetchzip { url = @@ -23,7 +23,7 @@ in buildGoModule rec { inherit sha256; }; - vendorSha256 = "sha256-v208yuWWPzLxrbkgXikE2PiFv9NXNEG4K1zrLHyaleI="; + vendorSha256 = "sha256-VWTtHquq/8/nKBGLuIdg2xkpGz1ofhPlQf3NTaQaHBI="; postUnpack = '' cp -r ${manifests} source/cmd/flux/manifests From a79a8d95d13b6fa9ebef707a489b5b53192050ff Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 12:22:29 +0000 Subject: [PATCH 051/161] dnstake: 0.1.0 -> 0.1.1 --- pkgs/tools/networking/dnstake/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/tools/networking/dnstake/default.nix b/pkgs/tools/networking/dnstake/default.nix index 9ddf853b7d9b..e1719d19ecfb 100644 --- a/pkgs/tools/networking/dnstake/default.nix +++ b/pkgs/tools/networking/dnstake/default.nix @@ -5,13 +5,13 @@ buildGoModule rec { pname = "dnstake"; - version = "0.1.0"; + version = "0.1.1"; src = fetchFromGitHub { owner = "pwnesia"; repo = pname; rev = "v${version}"; - sha256 = "sha256-XfZDRu1UrH5nVh1GQCQVaEamKorWSOxQZs556iDqfS8="; + sha256 = "sha256-k6j7DIwK8YAKmEjn8JJO7XBcap9ui6cgUSJG7CeHAAM="; }; vendorSha256 = "sha256-l3IKvcO10C+PVDX962tFWny7eMNC48ATIVqiHjpVH/Y="; From 0ddbd36e06dce61645798a352fb6ebb489cd5a4b Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 12:35:13 +0000 Subject: [PATCH 052/161] bitwig-studio: 4.2.2 -> 4.2.3 --- pkgs/applications/audio/bitwig-studio/bitwig-studio4.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/audio/bitwig-studio/bitwig-studio4.nix b/pkgs/applications/audio/bitwig-studio/bitwig-studio4.nix index 10cbbeb21de0..2f177c2f8c63 100644 --- a/pkgs/applications/audio/bitwig-studio/bitwig-studio4.nix +++ b/pkgs/applications/audio/bitwig-studio/bitwig-studio4.nix @@ -6,11 +6,11 @@ stdenv.mkDerivation rec { pname = "bitwig-studio"; - version = "4.2.2"; + version = "4.2.3"; src = fetchurl { url = "https://downloads.bitwig.com/stable/${version}/${pname}-${version}.deb"; - sha256 = "sha256-cpEV0EWW9vd2ZE+RaqN9fhyy7axgPlx4PmlOeX3TSfY="; + sha256 = "sha256-UCafrjrEwwHkhPum7sTOjtXzy7PNeK5/aeKg+b3CGJU="; }; nativeBuildInputs = [ dpkg makeWrapper wrapGAppsHook ]; From 250ef1ff392b938415726a9331a30012573efaea Mon Sep 17 00:00:00 2001 From: Artturin Date: Wed, 20 Apr 2022 16:31:36 +0300 Subject: [PATCH 053/161] testers.testVersion: move from trivial-builders.nix we will have more testers in the future so they should have their own location putting 'testers' in args will also make it simpler to use multiple testers --- pkgs/applications/misc/hello/default.nix | 3 +- pkgs/build-support/testers/default.nix | 39 +++++++++++++++++++++++- pkgs/build-support/trivial-builders.nix | 37 ---------------------- pkgs/top-level/aliases.nix | 1 + 4 files changed, 40 insertions(+), 40 deletions(-) diff --git a/pkgs/applications/misc/hello/default.nix b/pkgs/applications/misc/hello/default.nix index ecb789282173..60482a84c9b4 100644 --- a/pkgs/applications/misc/hello/default.nix +++ b/pkgs/applications/misc/hello/default.nix @@ -2,7 +2,6 @@ , stdenv , fetchurl , nixos -, testVersion , testers , hello }: @@ -19,7 +18,7 @@ stdenv.mkDerivation rec { doCheck = true; passthru.tests = { - version = testVersion { package = hello; }; + version = testers.testVersion { package = hello; }; invariant-under-noXlibs = testers.testEqualDerivation diff --git a/pkgs/build-support/testers/default.nix b/pkgs/build-support/testers/default.nix index 1d1effa37306..b63ba5742b2b 100644 --- a/pkgs/build-support/testers/default.nix +++ b/pkgs/build-support/testers/default.nix @@ -1,4 +1,41 @@ -{ pkgs, lib, callPackage }: +{ pkgs, lib, callPackage, runCommand }: { testEqualDerivation = callPackage ./test-equal-derivation.nix { }; + + /* Checks the command output contains the specified version + * + * Although simplistic, this test assures that the main program + * can run. While there's no substitute for a real test case, + * it does catch dynamic linking errors and such. It also provides + * some protection against accidentally building the wrong version, + * for example when using an 'old' hash in a fixed-output derivation. + * + * Examples: + * + * passthru.tests.version = testVersion { package = hello; }; + * + * passthru.tests.version = testVersion { + * package = seaweedfs; + * command = "weed version"; + * }; + * + * passthru.tests.version = testVersion { + * package = key; + * command = "KeY --help"; + * # Wrong '2.5' version in the code. Drop on next version. + * version = "2.5"; + * }; + */ + testVersion = + { package, + command ? "${package.meta.mainProgram or package.pname or package.name} --version", + version ? package.version, + }: runCommand "${package.name}-test-version" { nativeBuildInputs = [ package ]; meta.timeout = 60; } '' + if output=$(${command} 2>&1); then + grep -Fw "${version}" - <<< "$output" + touch $out + else + echo "$output" >&2 && exit 1 + fi + ''; } diff --git a/pkgs/build-support/trivial-builders.nix b/pkgs/build-support/trivial-builders.nix index 1f9543f808e6..bd14971fe78b 100644 --- a/pkgs/build-support/trivial-builders.nix +++ b/pkgs/build-support/trivial-builders.nix @@ -784,41 +784,4 @@ rec { outputHash = "0sjjj9z1dhilhpc8pq4154czrb79z9cm044jvn75kxcjv6v5l2m5"; preferLocalBuild = true; } "mkdir $out"; - - /* Checks the command output contains the specified version - * - * Although simplistic, this test assures that the main program - * can run. While there's no substitute for a real test case, - * it does catch dynamic linking errors and such. It also provides - * some protection against accidentally building the wrong version, - * for example when using an 'old' hash in a fixed-output derivation. - * - * Examples: - * - * passthru.tests.version = testVersion { package = hello; }; - * - * passthru.tests.version = testVersion { - * package = seaweedfs; - * command = "weed version"; - * }; - * - * passthru.tests.version = testVersion { - * package = key; - * command = "KeY --help"; - * # Wrong '2.5' version in the code. Drop on next version. - * version = "2.5"; - * }; - */ - testVersion = - { package, - command ? "${package.meta.mainProgram or package.pname or package.name} --version", - version ? package.version, - }: runCommand "${package.name}-test-version" { nativeBuildInputs = [ package ]; meta.timeout = 60; } '' - if output=$(${command} 2>&1); then - grep -Fw "${version}" - <<< "$output" - touch $out - else - echo "$output" >&2 && exit 1 - fi - ''; } diff --git a/pkgs/top-level/aliases.nix b/pkgs/top-level/aliases.nix index db330743e50d..28f2ed64ccaa 100644 --- a/pkgs/top-level/aliases.nix +++ b/pkgs/top-level/aliases.nix @@ -1253,6 +1253,7 @@ mapAliases ({ terraform_1_0 = throw "terraform_1_0 has been renamed to terraform_1"; # Added 2021-12-08 terraform_1_0_0 = throw "terraform_1_0_0 has been renamed to terraform_1"; # Added 2021-06-15 tesseract_4 = throw "'tesseract_4' has been renamed to/replaced by 'tesseract4'"; # Converted to throw 2022-02-22 + testVersion = testers.testVersion; # Added 2022-04-20 tex-gyre-bonum-math = throw "'tex-gyre-bonum-math' has been renamed to/replaced by 'tex-gyre-math.bonum'"; # Converted to throw 2022-02-22 tex-gyre-pagella-math = throw "'tex-gyre-pagella-math' has been renamed to/replaced by 'tex-gyre-math.pagella'"; # Converted to throw 2022-02-22 tex-gyre-schola-math = throw "'tex-gyre-schola-math' has been renamed to/replaced by 'tex-gyre-math.schola'"; # Converted to throw 2022-02-22 From f1c7f19e49190cf58041908f9c194431e2513688 Mon Sep 17 00:00:00 2001 From: Artturin Date: Wed, 20 Apr 2022 17:40:48 +0300 Subject: [PATCH 054/161] treewide: testVersion -> testers.testVersion --- pkgs/applications/audio/furnace/default.nix | 4 ++-- pkgs/applications/audio/whipper/default.nix | 4 ++-- pkgs/applications/graphics/ImageMagick/7.0.nix | 4 ++-- pkgs/applications/misc/dunst/default.nix | 4 ++-- pkgs/applications/misc/k4dirstat/default.nix | 4 ++-- pkgs/applications/misc/sigi/default.nix | 4 ++-- pkgs/applications/misc/zola/default.nix | 4 ++-- pkgs/applications/networking/browsers/palemoon/default.nix | 4 ++-- pkgs/applications/networking/cluster/ocm/default.nix | 4 ++-- pkgs/applications/networking/cluster/odo/default.nix | 4 ++-- pkgs/applications/networking/cluster/openshift/default.nix | 4 ++-- .../applications/networking/mailreaders/notmuch/default.nix | 4 ++-- pkgs/applications/networking/seaweedfs/default.nix | 4 ++-- pkgs/applications/science/logic/key/default.nix | 5 ++--- .../git-and-tools/git-machete/default.nix | 2 +- .../version-management/git-and-tools/scmpuff/default.nix | 4 ++-- pkgs/applications/version-management/git-sizer/default.nix | 4 ++-- pkgs/applications/version-management/jujutsu/default.nix | 4 ++-- pkgs/applications/video/handbrake/default.nix | 4 ++-- pkgs/applications/virtualization/podman-tui/default.nix | 4 ++-- pkgs/development/compilers/go-jsonnet/default.nix | 4 ++-- pkgs/development/compilers/uasm/default.nix | 4 ++-- pkgs/development/python-modules/mutmut/default.nix | 4 ++-- pkgs/development/python-modules/staticjinja/default.nix | 4 ++-- pkgs/development/quickemu/default.nix | 4 ++-- pkgs/development/tools/amazon-qldb-shell/default.nix | 4 ++-- pkgs/development/tools/bingo/default.nix | 4 ++-- pkgs/development/tools/buf/default.nix | 4 ++-- pkgs/development/tools/fq/default.nix | 4 ++-- pkgs/development/tools/gojq/default.nix | 4 ++-- pkgs/development/tools/kube-linter/default.nix | 4 ++-- pkgs/development/tools/quick-lint-js/default.nix | 4 ++-- pkgs/development/tools/selenium/chromedriver/default.nix | 4 ++-- pkgs/development/tools/sq/default.nix | 4 ++-- pkgs/development/tools/yarn/default.nix | 4 ++-- pkgs/games/lgogdownloader/default.nix | 4 ++-- pkgs/games/opensupaplex/default.nix | 4 ++-- pkgs/games/warzone2100/default.nix | 4 ++-- pkgs/servers/monitoring/prometheus/promscale/default.nix | 4 ++-- pkgs/servers/stayrtr/default.nix | 4 ++-- pkgs/tools/admin/colmena/default.nix | 4 ++-- pkgs/tools/archivers/7zz/default.nix | 4 ++-- pkgs/tools/backup/discordchatexporter-cli/default.nix | 4 ++-- pkgs/tools/compression/pxz/default.nix | 4 ++-- pkgs/tools/filesystems/garage/default.nix | 4 ++-- pkgs/tools/misc/adrgen/default.nix | 4 ++-- pkgs/tools/misc/arch-install-scripts/default.nix | 1 - pkgs/tools/misc/czkawka/default.nix | 4 ++-- pkgs/tools/misc/datefmt/default.nix | 4 ++-- pkgs/tools/misc/dsq/default.nix | 4 ++-- pkgs/tools/misc/gummy/default.nix | 4 ++-- pkgs/tools/misc/lsd/default.nix | 4 ++-- pkgs/tools/misc/zellij/default.nix | 4 ++-- pkgs/tools/networking/clash/default.nix | 4 ++-- pkgs/tools/networking/curlie/default.nix | 4 ++-- pkgs/tools/networking/innernet/default.nix | 6 +++--- pkgs/tools/networking/pmacct/default.nix | 4 ++-- pkgs/tools/networking/smartdns/default.nix | 4 ++-- pkgs/tools/nix/alejandra/default.nix | 4 ++-- pkgs/tools/system/s-tui/default.nix | 4 ++-- pkgs/tools/text/crowdin-cli/default.nix | 4 ++-- pkgs/tools/text/difftastic/default.nix | 4 ++-- pkgs/tools/text/igrep/default.nix | 4 ++-- pkgs/tools/text/mdbook-linkcheck/default.nix | 4 ++-- 64 files changed, 126 insertions(+), 128 deletions(-) diff --git a/pkgs/applications/audio/furnace/default.nix b/pkgs/applications/audio/furnace/default.nix index 841a65e541fc..115c5b7767d2 100644 --- a/pkgs/applications/audio/furnace/default.nix +++ b/pkgs/applications/audio/furnace/default.nix @@ -1,7 +1,7 @@ { stdenv , lib , gitUpdater -, testVersion +, testers , furnace , fetchFromGitHub , cmake @@ -83,7 +83,7 @@ stdenv.mkDerivation rec { inherit pname version; rev-prefix = "v"; }; - tests.version = testVersion { + tests.version = testers.testVersion { package = furnace; # The command always exits with code 1 command = "(furnace --version || [ $? -eq 1 ])"; diff --git a/pkgs/applications/audio/whipper/default.nix b/pkgs/applications/audio/whipper/default.nix index 7b76d91acf81..f06907ad32f2 100644 --- a/pkgs/applications/audio/whipper/default.nix +++ b/pkgs/applications/audio/whipper/default.nix @@ -8,7 +8,7 @@ , flac , sox , util-linux -, testVersion +, testers , whipper }: @@ -74,7 +74,7 @@ in python3.pkgs.buildPythonApplication rec { runHook postCheck ''; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = whipper; command = "HOME=$TMPDIR whipper --version"; }; diff --git a/pkgs/applications/graphics/ImageMagick/7.0.nix b/pkgs/applications/graphics/ImageMagick/7.0.nix index 48008c369333..74a60857a6a3 100644 --- a/pkgs/applications/graphics/ImageMagick/7.0.nix +++ b/pkgs/applications/graphics/ImageMagick/7.0.nix @@ -29,7 +29,7 @@ , curl , ApplicationServices , Foundation -, testVersion +, testers , imagemagick }: @@ -120,7 +120,7 @@ stdenv.mkDerivation rec { ''; passthru.tests.version = - testVersion { package = imagemagick; }; + testers.testVersion { package = imagemagick; }; meta = with lib; { homepage = "http://www.imagemagick.org/"; diff --git a/pkgs/applications/misc/dunst/default.nix b/pkgs/applications/misc/dunst/default.nix index 116d5430c2a8..6afd9ee62420 100644 --- a/pkgs/applications/misc/dunst/default.nix +++ b/pkgs/applications/misc/dunst/default.nix @@ -3,7 +3,7 @@ , cairo, dbus, systemd, gdk-pixbuf, glib, libX11, libXScrnSaver , wayland, wayland-protocols , libXinerama, libnotify, pango, xorgproto, librsvg -, testVersion, dunst +, testers, dunst }: stdenv.mkDerivation rec { @@ -40,7 +40,7 @@ stdenv.mkDerivation rec { --set GDK_PIXBUF_MODULE_FILE "$GDK_PIXBUF_MODULE_FILE" ''; - passthru.tests.version = testVersion { package = dunst; }; + passthru.tests.version = testers.testVersion { package = dunst; }; meta = with lib; { description = "Lightweight and customizable notification daemon"; diff --git a/pkgs/applications/misc/k4dirstat/default.nix b/pkgs/applications/misc/k4dirstat/default.nix index e2e43ae09971..552a63240d51 100644 --- a/pkgs/applications/misc/k4dirstat/default.nix +++ b/pkgs/applications/misc/k4dirstat/default.nix @@ -7,7 +7,7 @@ , kjobwidgets , kxmlgui , lib -, testVersion +, testers , k4dirstat }: @@ -26,7 +26,7 @@ mkDerivation rec { buildInputs = [ kiconthemes kio kjobwidgets kxmlgui ]; passthru.tests.version = - testVersion { + testers.testVersion { package = k4dirstat; command = "k4dirstat -platform offscreen --version &>/dev/stdout"; }; diff --git a/pkgs/applications/misc/sigi/default.nix b/pkgs/applications/misc/sigi/default.nix index 2513476ad74e..ff74d8b098f3 100644 --- a/pkgs/applications/misc/sigi/default.nix +++ b/pkgs/applications/misc/sigi/default.nix @@ -1,4 +1,4 @@ -{ lib, rustPlatform, fetchCrate, installShellFiles, testVersion, sigi }: +{ lib, rustPlatform, fetchCrate, installShellFiles, testers, sigi }: rustPlatform.buildRustPackage rec { pname = "sigi"; @@ -19,7 +19,7 @@ rustPlatform.buildRustPackage rec { installManPage sigi.1 ''; - passthru.tests.version = testVersion { package = sigi; }; + passthru.tests.version = testers.testVersion { package = sigi; }; meta = with lib; { description = "Organizing CLI for people who don't love organizing."; diff --git a/pkgs/applications/misc/zola/default.nix b/pkgs/applications/misc/zola/default.nix index f6ed2daf8ec7..24511096f1d4 100644 --- a/pkgs/applications/misc/zola/default.nix +++ b/pkgs/applications/misc/zola/default.nix @@ -10,7 +10,7 @@ , installShellFiles , libsass , zola -, testVersion +, testers }: rustPlatform.buildRustPackage rec { @@ -48,7 +48,7 @@ rustPlatform.buildRustPackage rec { --bash completions/zola.bash ''; - passthru.tests.version = testVersion { package = zola; }; + passthru.tests.version = testers.testVersion { package = zola; }; meta = with lib; { description = "A fast static site generator with everything built-in"; diff --git a/pkgs/applications/networking/browsers/palemoon/default.nix b/pkgs/applications/networking/browsers/palemoon/default.nix index 70442e47e66b..eacc66271d5a 100644 --- a/pkgs/applications/networking/browsers/palemoon/default.nix +++ b/pkgs/applications/networking/browsers/palemoon/default.nix @@ -31,7 +31,7 @@ , zip , zlib , withGTK3 ? true, gtk3, gtk2 -, testVersion +, testers , palemoon }: @@ -211,7 +211,7 @@ stdenv.mkDerivation rec { )" update-source-version ${pname} "$version" ''; - tests.version = testVersion { + tests.version = testers.testVersion { package = palemoon; }; }; diff --git a/pkgs/applications/networking/cluster/ocm/default.nix b/pkgs/applications/networking/cluster/ocm/default.nix index 1bacd8510dc4..73a5d964f34b 100644 --- a/pkgs/applications/networking/cluster/ocm/default.nix +++ b/pkgs/applications/networking/cluster/ocm/default.nix @@ -1,4 +1,4 @@ -{ lib, buildGoModule, fetchFromGitHub, testVersion, ocm }: +{ lib, buildGoModule, fetchFromGitHub, testers, ocm }: buildGoModule rec { pname = "ocm"; @@ -18,7 +18,7 @@ buildGoModule rec { ln -s $GOPATH/bin/ocm ocm ''; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = ocm; command = "ocm version"; }; diff --git a/pkgs/applications/networking/cluster/odo/default.nix b/pkgs/applications/networking/cluster/odo/default.nix index be85981f7bf4..fb1888d62358 100644 --- a/pkgs/applications/networking/cluster/odo/default.nix +++ b/pkgs/applications/networking/cluster/odo/default.nix @@ -1,4 +1,4 @@ -{ lib, buildGoModule, fetchFromGitHub, testVersion, odo }: +{ lib, buildGoModule, fetchFromGitHub, testers, odo }: buildGoModule rec { pname = "odo"; @@ -22,7 +22,7 @@ buildGoModule rec { cp -a odo $out/bin ''; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = odo; command = "odo version"; version = "v${version}"; diff --git a/pkgs/applications/networking/cluster/openshift/default.nix b/pkgs/applications/networking/cluster/openshift/default.nix index aade6c549f31..bab73c41f5f5 100644 --- a/pkgs/applications/networking/cluster/openshift/default.nix +++ b/pkgs/applications/networking/cluster/openshift/default.nix @@ -4,7 +4,7 @@ , libkrb5 , git , installShellFiles -, testVersion +, testers , openshift }: @@ -52,7 +52,7 @@ buildGoModule rec { installShellCompletion --zsh contrib/completions/zsh/* ''; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = openshift; command = "oc version"; version = "v${version}"; diff --git a/pkgs/applications/networking/mailreaders/notmuch/default.nix b/pkgs/applications/networking/mailreaders/notmuch/default.nix index e89216802e50..18e09326f35f 100644 --- a/pkgs/applications/networking/mailreaders/notmuch/default.nix +++ b/pkgs/applications/networking/mailreaders/notmuch/default.nix @@ -6,7 +6,7 @@ , pythonPackages , emacs , ruby -, testVersion +, testers , which, dtach, openssl, bash, gdb, man , withEmacs ? true , withRuby ? true @@ -102,7 +102,7 @@ stdenv.mkDerivation rec { passthru = { pythonSourceRoot = "notmuch-${version}/bindings/python"; - tests.version = testVersion { package = notmuch; }; + tests.version = testers.testVersion { package = notmuch; }; inherit version; }; diff --git a/pkgs/applications/networking/seaweedfs/default.nix b/pkgs/applications/networking/seaweedfs/default.nix index cbabc6b10c72..bd6df5ef6bee 100644 --- a/pkgs/applications/networking/seaweedfs/default.nix +++ b/pkgs/applications/networking/seaweedfs/default.nix @@ -1,7 +1,7 @@ { lib , fetchFromGitHub , buildGoModule -, testVersion +, testers , seaweedfs }: @@ -21,7 +21,7 @@ buildGoModule rec { subPackages = [ "weed" ]; passthru.tests.version = - testVersion { package = seaweedfs; command = "weed version"; }; + testers.testVersion { package = seaweedfs; command = "weed version"; }; meta = with lib; { description = "Simple and highly scalable distributed file system"; diff --git a/pkgs/applications/science/logic/key/default.nix b/pkgs/applications/science/logic/key/default.nix index 85a7ecb08c39..aee1a9c63f8f 100644 --- a/pkgs/applications/science/logic/key/default.nix +++ b/pkgs/applications/science/logic/key/default.nix @@ -7,7 +7,7 @@ , makeWrapper , makeDesktopItem , copyDesktopItems -, testVersion +, testers , key }: @@ -98,7 +98,7 @@ in stdenv.mkDerivation rec { ''; passthru.tests.version = - testVersion { + testers.testVersion { package = key; command = "KeY --help"; }; @@ -118,4 +118,3 @@ in stdenv.mkDerivation rec { platforms = platforms.all; }; } - diff --git a/pkgs/applications/version-management/git-and-tools/git-machete/default.nix b/pkgs/applications/version-management/git-and-tools/git-machete/default.nix index 573f69fef007..bed96493e513 100644 --- a/pkgs/applications/version-management/git-and-tools/git-machete/default.nix +++ b/pkgs/applications/version-management/git-and-tools/git-machete/default.nix @@ -4,7 +4,7 @@ , installShellFiles , git , nix-update-script -, testVersion +, testers , git-machete }: diff --git a/pkgs/applications/version-management/git-and-tools/scmpuff/default.nix b/pkgs/applications/version-management/git-and-tools/scmpuff/default.nix index 3396718edbc2..65ca1b476d4d 100644 --- a/pkgs/applications/version-management/git-and-tools/scmpuff/default.nix +++ b/pkgs/applications/version-management/git-and-tools/scmpuff/default.nix @@ -1,4 +1,4 @@ -{ lib, buildGoModule, fetchFromGitHub, testVersion, scmpuff }: +{ lib, buildGoModule, fetchFromGitHub, testers, scmpuff }: buildGoModule rec { pname = "scmpuff"; @@ -15,7 +15,7 @@ buildGoModule rec { ldflags = [ "-s" "-w" "-X main.VERSION=${version}" ]; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = scmpuff; command = "scmpuff version"; }; diff --git a/pkgs/applications/version-management/git-sizer/default.nix b/pkgs/applications/version-management/git-sizer/default.nix index daabe71bebb7..ed7239b80cb3 100644 --- a/pkgs/applications/version-management/git-sizer/default.nix +++ b/pkgs/applications/version-management/git-sizer/default.nix @@ -1,4 +1,4 @@ -{ lib, buildGoModule, fetchFromGitHub, testVersion, git-sizer }: +{ lib, buildGoModule, fetchFromGitHub, testers, git-sizer }: buildGoModule rec { pname = "git-sizer"; @@ -17,7 +17,7 @@ buildGoModule rec { doCheck = false; - passthru.tests.vesion = testVersion { + passthru.tests.vesion = testers.testVersion { package = git-sizer; }; diff --git a/pkgs/applications/version-management/jujutsu/default.nix b/pkgs/applications/version-management/jujutsu/default.nix index 30ea7e1524a9..c99bb409c3e9 100644 --- a/pkgs/applications/version-management/jujutsu/default.nix +++ b/pkgs/applications/version-management/jujutsu/default.nix @@ -9,7 +9,7 @@ , Security , SystemConfiguration , libiconv -, testVersion +, testers , jujutsu }: @@ -44,7 +44,7 @@ rustPlatform.buildRustPackage rec { ]; passthru.tests = { - version = testVersion { + version = testers.testVersion { package = jujutsu; command = "jj --version"; }; diff --git a/pkgs/applications/video/handbrake/default.nix b/pkgs/applications/video/handbrake/default.nix index 841604399ed3..3c73e7f59944 100644 --- a/pkgs/applications/video/handbrake/default.nix +++ b/pkgs/applications/video/handbrake/default.nix @@ -11,7 +11,7 @@ , lib , fetchFromGitHub # For tests -, testVersion +, testers , runCommand , fetchurl # Main build tools @@ -230,7 +230,7 @@ let self = stdenv.mkDerivation rec { HandBrakeCLI -i ${testMkv} -o test.mkv -e x264 -q 20 -B 160 test -e test.mkv ''; - version = testVersion { package = self; command = "HandBrakeCLI --version"; }; + version = testers.testVersion { package = self; command = "HandBrakeCLI --version"; }; }; meta = with lib; { diff --git a/pkgs/applications/virtualization/podman-tui/default.nix b/pkgs/applications/virtualization/podman-tui/default.nix index 17272acd86eb..6feeb17256a9 100644 --- a/pkgs/applications/virtualization/podman-tui/default.nix +++ b/pkgs/applications/virtualization/podman-tui/default.nix @@ -6,7 +6,7 @@ , gpgme , libassuan , lvm2 -, testVersion +, testers , podman-tui }: buildGoModule rec { @@ -33,7 +33,7 @@ buildGoModule rec { ldflags = [ "-s" "-w" ]; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = podman-tui; command = "podman-tui version"; version = "v${version}"; diff --git a/pkgs/development/compilers/go-jsonnet/default.nix b/pkgs/development/compilers/go-jsonnet/default.nix index 25bedd397b2f..3c4d90ccb054 100644 --- a/pkgs/development/compilers/go-jsonnet/default.nix +++ b/pkgs/development/compilers/go-jsonnet/default.nix @@ -1,4 +1,4 @@ -{ lib, buildGoModule, fetchFromGitHub, testVersion }: +{ lib, buildGoModule, fetchFromGitHub, testers }: let self = buildGoModule rec { pname = "go-jsonnet"; @@ -17,7 +17,7 @@ let self = buildGoModule rec { subPackages = [ "cmd/jsonnet*" ]; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = self; version = "v${version}"; }; diff --git a/pkgs/development/compilers/uasm/default.nix b/pkgs/development/compilers/uasm/default.nix index 7356175c87c4..2c8d6eb21aeb 100644 --- a/pkgs/development/compilers/uasm/default.nix +++ b/pkgs/development/compilers/uasm/default.nix @@ -2,7 +2,7 @@ , stdenv , fetchFromGitHub , fetchpatch -, testVersion +, testers , uasm }: @@ -47,7 +47,7 @@ stdenv.mkDerivation rec { runHook postInstall ''; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = uasm; command = "uasm -h"; version = "v${version}"; diff --git a/pkgs/development/python-modules/mutmut/default.nix b/pkgs/development/python-modules/mutmut/default.nix index ae0f06213a87..626c768f2a81 100644 --- a/pkgs/development/python-modules/mutmut/default.nix +++ b/pkgs/development/python-modules/mutmut/default.nix @@ -7,7 +7,7 @@ , pony , junit-xml , pythonOlder -, testVersion +, testers }: let self = buildPythonApplication rec { @@ -31,7 +31,7 @@ let self = buildPythonApplication rec { propagatedBuildInputs = [ click glob2 parso pony junit-xml ]; - passthru.tests.version = testVersion { package = self; }; + passthru.tests.version = testers.testVersion { package = self; }; meta = with lib; { description = "mutation testing system for Python, with a strong focus on ease of use"; diff --git a/pkgs/development/python-modules/staticjinja/default.nix b/pkgs/development/python-modules/staticjinja/default.nix index ac3c173c09a5..0c1a698aa63c 100644 --- a/pkgs/development/python-modules/staticjinja/default.nix +++ b/pkgs/development/python-modules/staticjinja/default.nix @@ -9,7 +9,7 @@ , pytest-check , pythonOlder , markdown -, testVersion +, testers , tomlkit , staticjinja , callPackage @@ -53,7 +53,7 @@ buildPythonPackage rec { ''; passthru.tests = { - version = testVersion { package = staticjinja; }; + version = testers.testVersion { package = staticjinja; }; minimal-template = callPackage ./test-minimal-template {}; }; diff --git a/pkgs/development/quickemu/default.nix b/pkgs/development/quickemu/default.nix index 7aa5b1602c38..0a0dfb004f13 100644 --- a/pkgs/development/quickemu/default.nix +++ b/pkgs/development/quickemu/default.nix @@ -19,7 +19,7 @@ , zsync , OVMF , quickemu -, testVersion +, testers }: let runtimePaths = [ @@ -73,7 +73,7 @@ stdenv.mkDerivation rec { runHook postInstall ''; - passthru.tests = testVersion { package = quickemu; }; + passthru.tests = testers.testVersion { package = quickemu; }; meta = with lib; { description = "Quickly create and run optimised Windows, macOS and Linux desktop virtual machines"; diff --git a/pkgs/development/tools/amazon-qldb-shell/default.nix b/pkgs/development/tools/amazon-qldb-shell/default.nix index 6f6ef1ca8af7..d017c213415c 100644 --- a/pkgs/development/tools/amazon-qldb-shell/default.nix +++ b/pkgs/development/tools/amazon-qldb-shell/default.nix @@ -4,7 +4,7 @@ , fetchFromGitHub , llvmPackages , rustPlatform -, testVersion +, testers }: let @@ -27,7 +27,7 @@ let LIBCLANG_PATH = "${llvmPackages.libclang.lib}/lib"; - passthru.tests.version = testVersion { inherit package; }; + passthru.tests.version = testers.testVersion { inherit package; }; meta = with lib; { description = "An interface to send PartiQL statements to Amazon Quantum Ledger Database (QLDB)"; diff --git a/pkgs/development/tools/bingo/default.nix b/pkgs/development/tools/bingo/default.nix index 8fe1dbaa4bea..6e7ed39a221c 100644 --- a/pkgs/development/tools/bingo/default.nix +++ b/pkgs/development/tools/bingo/default.nix @@ -1,4 +1,4 @@ -{ lib, buildGoModule, fetchFromGitHub, testVersion, bingo }: +{ lib, buildGoModule, fetchFromGitHub, testers, bingo }: buildGoModule rec { pname = "bingo"; @@ -26,7 +26,7 @@ buildGoModule rec { ldflags = [ "-s" "-w" ]; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = bingo; command = "bingo version"; version = "v${version}"; diff --git a/pkgs/development/tools/buf/default.nix b/pkgs/development/tools/buf/default.nix index 128dd4834995..746233fb0dee 100644 --- a/pkgs/development/tools/buf/default.nix +++ b/pkgs/development/tools/buf/default.nix @@ -3,7 +3,7 @@ , fetchFromGitHub , protobuf , git -, testVersion +, testers , buf , installShellFiles }: @@ -70,7 +70,7 @@ buildGoModule rec { runHook postInstall ''; - passthru.tests.version = testVersion { package = buf; }; + passthru.tests.version = testers.testVersion { package = buf; }; meta = with lib; { homepage = "https://buf.build"; diff --git a/pkgs/development/tools/fq/default.nix b/pkgs/development/tools/fq/default.nix index ad8c43195c9d..add4e21be89d 100644 --- a/pkgs/development/tools/fq/default.nix +++ b/pkgs/development/tools/fq/default.nix @@ -2,7 +2,7 @@ , buildGoModule , fetchFromGitHub , fq -, testVersion +, testers }: buildGoModule rec { @@ -26,7 +26,7 @@ buildGoModule rec { subPackages = [ "." ]; - passthru.tests = testVersion { package = fq; }; + passthru.tests = testers.testVersion { package = fq; }; meta = with lib; { description = "jq for binary formats"; diff --git a/pkgs/development/tools/gojq/default.nix b/pkgs/development/tools/gojq/default.nix index d78c587cfbda..08fe68e3a0a1 100644 --- a/pkgs/development/tools/gojq/default.nix +++ b/pkgs/development/tools/gojq/default.nix @@ -1,4 +1,4 @@ -{ lib, buildGoModule, fetchFromGitHub, testVersion, gojq }: +{ lib, buildGoModule, fetchFromGitHub, testers, gojq }: buildGoModule rec { pname = "gojq"; @@ -15,7 +15,7 @@ buildGoModule rec { ldflags = [ "-s" "-w" ]; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = gojq; }; diff --git a/pkgs/development/tools/kube-linter/default.nix b/pkgs/development/tools/kube-linter/default.nix index d234e458c184..e982cbfed173 100644 --- a/pkgs/development/tools/kube-linter/default.nix +++ b/pkgs/development/tools/kube-linter/default.nix @@ -1,4 +1,4 @@ -{ lib, buildGoModule, fetchFromGitHub, testVersion, kube-linter }: +{ lib, buildGoModule, fetchFromGitHub, testers, kube-linter }: buildGoModule rec { pname = "kube-linter"; @@ -17,7 +17,7 @@ buildGoModule rec { "-s" "-w" "-X golang.stackrox.io/kube-linter/internal/version.version=${version}" ]; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = kube-linter; command = "kube-linter version"; }; diff --git a/pkgs/development/tools/quick-lint-js/default.nix b/pkgs/development/tools/quick-lint-js/default.nix index 47151dd1e662..43a7fa494ba4 100644 --- a/pkgs/development/tools/quick-lint-js/default.nix +++ b/pkgs/development/tools/quick-lint-js/default.nix @@ -1,4 +1,4 @@ -{ cmake, fetchFromGitHub, lib, ninja, stdenv, testVersion, quick-lint-js }: +{ cmake, fetchFromGitHub, lib, ninja, stdenv, testers, quick-lint-js }: stdenv.mkDerivation rec { @@ -16,7 +16,7 @@ stdenv.mkDerivation rec { doCheck = true; passthru.tests = { - version = testVersion { package = quick-lint-js; }; + version = testers.testVersion { package = quick-lint-js; }; }; meta = with lib; { diff --git a/pkgs/development/tools/selenium/chromedriver/default.nix b/pkgs/development/tools/selenium/chromedriver/default.nix index a75488a229df..66a73b1c1e98 100644 --- a/pkgs/development/tools/selenium/chromedriver/default.nix +++ b/pkgs/development/tools/selenium/chromedriver/default.nix @@ -2,7 +2,7 @@ , cairo, fontconfig, freetype, gdk-pixbuf, glib , glibc, gtk2, libX11, nspr, nss, pango , libxcb, libXi, libXrender, libXext, dbus -, testVersion, chromedriver +, testers, chromedriver }: let @@ -56,7 +56,7 @@ in stdenv.mkDerivation rec { wrapProgram "$out/bin/chromedriver" --prefix LD_LIBRARY_PATH : "${libs}" ''; - passthru.tests.version = testVersion { package = chromedriver; }; + passthru.tests.version = testers.testVersion { package = chromedriver; }; meta = with lib; { homepage = "https://chromedriver.chromium.org/"; diff --git a/pkgs/development/tools/sq/default.nix b/pkgs/development/tools/sq/default.nix index 4141fd59733c..0be6e6c7bb6e 100644 --- a/pkgs/development/tools/sq/default.nix +++ b/pkgs/development/tools/sq/default.nix @@ -1,4 +1,4 @@ -{ lib, buildGoModule, fetchFromGitHub, installShellFiles, testVersion, sq }: +{ lib, buildGoModule, fetchFromGitHub, installShellFiles, testers, sq }: buildGoModule rec { pname = "sq"; version = "0.15.4"; @@ -29,7 +29,7 @@ buildGoModule rec { ''; passthru.tests = { - version = testVersion { package = sq; }; + version = testers.testVersion { package = sq; }; }; meta = with lib; { diff --git a/pkgs/development/tools/yarn/default.nix b/pkgs/development/tools/yarn/default.nix index 0e39a714ccda..d934eadcdf8a 100644 --- a/pkgs/development/tools/yarn/default.nix +++ b/pkgs/development/tools/yarn/default.nix @@ -1,4 +1,4 @@ -{ lib, stdenv, nodejs, fetchzip, testVersion, yarn }: +{ lib, stdenv, nodejs, fetchzip, testers, yarn }: stdenv.mkDerivation rec { pname = "yarn"; @@ -18,7 +18,7 @@ stdenv.mkDerivation rec { ln -s $out/libexec/yarn/bin/yarn.js $out/bin/yarnpkg ''; - passthru.tests = testVersion { package = yarn; }; + passthru.tests = testers.testVersion { package = yarn; }; meta = with lib; { homepage = "https://yarnpkg.com/"; diff --git a/pkgs/games/lgogdownloader/default.nix b/pkgs/games/lgogdownloader/default.nix index 0c40d537fcec..ab029e055aa9 100644 --- a/pkgs/games/lgogdownloader/default.nix +++ b/pkgs/games/lgogdownloader/default.nix @@ -11,7 +11,7 @@ , rhash , tinyxml-2 , help2man -, testVersion +, testers , lgogdownloader }: @@ -43,7 +43,7 @@ stdenv.mkDerivation rec { ]; passthru.tests = { - version = testVersion { package = lgogdownloader; }; + version = testers.testVersion { package = lgogdownloader; }; }; meta = with lib; { diff --git a/pkgs/games/opensupaplex/default.nix b/pkgs/games/opensupaplex/default.nix index d3f8155f546c..dc5d9aae690c 100644 --- a/pkgs/games/opensupaplex/default.nix +++ b/pkgs/games/opensupaplex/default.nix @@ -4,7 +4,7 @@ , fetchurl , makeDesktopItem , copyDesktopItems -, testVersion +, testers , opensupaplex , SDL2 , SDL2_mixer @@ -62,7 +62,7 @@ stdenv.mkDerivation rec { runHook postInstall ''; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = opensupaplex; command = "opensupaplex --help"; version = "v${version}"; diff --git a/pkgs/games/warzone2100/default.nix b/pkgs/games/warzone2100/default.nix index a44f965b0e55..2df306d686f2 100644 --- a/pkgs/games/warzone2100/default.nix +++ b/pkgs/games/warzone2100/default.nix @@ -26,7 +26,7 @@ , vulkan-loader , shaderc -, testVersion +, testers , warzone2100 , withVideos ? false @@ -104,7 +104,7 @@ stdenv.mkDerivation rec { ''; passthru.tests = { - version = testVersion { + version = testers.testVersion { package = warzone2100; # The command always exits with code 1 command = "(warzone2100 --version || [ $? -eq 1 ])"; diff --git a/pkgs/servers/monitoring/prometheus/promscale/default.nix b/pkgs/servers/monitoring/prometheus/promscale/default.nix index dd297d4c2585..fbd1661c5f8b 100644 --- a/pkgs/servers/monitoring/prometheus/promscale/default.nix +++ b/pkgs/servers/monitoring/prometheus/promscale/default.nix @@ -2,7 +2,7 @@ , buildGoModule , fetchFromGitHub , promscale -, testVersion +, testers }: buildGoModule rec { @@ -40,7 +40,7 @@ buildGoModule rec { runHook postCheck ''; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = promscale; command = "promscale -version"; }; diff --git a/pkgs/servers/stayrtr/default.nix b/pkgs/servers/stayrtr/default.nix index cee68bd2f61b..a168cf5b1eec 100644 --- a/pkgs/servers/stayrtr/default.nix +++ b/pkgs/servers/stayrtr/default.nix @@ -2,7 +2,7 @@ , fetchFromGitHub , buildGoModule , stayrtr -, testVersion +, testers }: buildGoModule rec { @@ -23,7 +23,7 @@ buildGoModule rec { "-X main.version=${version}" ]; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = stayrtr; }; diff --git a/pkgs/tools/admin/colmena/default.nix b/pkgs/tools/admin/colmena/default.nix index d17366b38a81..8d7982060de6 100644 --- a/pkgs/tools/admin/colmena/default.nix +++ b/pkgs/tools/admin/colmena/default.nix @@ -1,4 +1,4 @@ -{ stdenv, lib, rustPlatform, fetchFromGitHub, installShellFiles, colmena, testVersion }: +{ stdenv, lib, rustPlatform, fetchFromGitHub, installShellFiles, colmena, testers }: rustPlatform.buildRustPackage rec { pname = "colmena"; @@ -29,7 +29,7 @@ rustPlatform.buildRustPackage rec { # We guarantee CLI and Nix API stability for the same minor version apiVersion = builtins.concatStringsSep "." (lib.take 2 (lib.splitString "." version)); - tests.version = testVersion { package = colmena; }; + tests.version = testers.testVersion { package = colmena; }; }; meta = with lib; { diff --git a/pkgs/tools/archivers/7zz/default.nix b/pkgs/tools/archivers/7zz/default.nix index 03c95378a0ae..b734232ca053 100644 --- a/pkgs/tools/archivers/7zz/default.nix +++ b/pkgs/tools/archivers/7zz/default.nix @@ -12,7 +12,7 @@ # For tests , _7zz -, testVersion +, testers }: let @@ -79,7 +79,7 @@ stdenv.mkDerivation rec { passthru = { updateScript = ./update.sh; - tests.version = testVersion { + tests.version = testers.testVersion { package = _7zz; command = "7zz --help"; }; diff --git a/pkgs/tools/backup/discordchatexporter-cli/default.nix b/pkgs/tools/backup/discordchatexporter-cli/default.nix index 8e98ea3c7c28..96de897c5f34 100644 --- a/pkgs/tools/backup/discordchatexporter-cli/default.nix +++ b/pkgs/tools/backup/discordchatexporter-cli/default.nix @@ -2,7 +2,7 @@ , buildDotnetModule , fetchFromGitHub , dotnetCorePackages -, testVersion +, testers , discordchatexporter-cli }: @@ -29,7 +29,7 @@ buildDotnetModule rec { passthru = { updateScript = ./updater.sh; - tests.version = testVersion { + tests.version = testers.testVersion { package = discordchatexporter-cli; version = "v${version}"; }; diff --git a/pkgs/tools/compression/pxz/default.nix b/pkgs/tools/compression/pxz/default.nix index 0f509a3bd170..f61e80cbc065 100644 --- a/pkgs/tools/compression/pxz/default.nix +++ b/pkgs/tools/compression/pxz/default.nix @@ -1,7 +1,7 @@ { lib , stdenv , fetchFromGitHub -, testVersion +, testers , pxz , xz }: @@ -34,7 +34,7 @@ stdenv.mkDerivation rec { "MANDIR=${placeholder "out"}/share/man" ]; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = pxz; }; diff --git a/pkgs/tools/filesystems/garage/default.nix b/pkgs/tools/filesystems/garage/default.nix index 9d9f2535a04f..8799ec5a3d2a 100644 --- a/pkgs/tools/filesystems/garage/default.nix +++ b/pkgs/tools/filesystems/garage/default.nix @@ -1,4 +1,4 @@ -{ lib, rustPlatform, fetchFromGitea, protobuf, testVersion, garage }: +{ lib, rustPlatform, fetchFromGitea, protobuf, testers, garage }: rustPlatform.buildRustPackage rec { pname = "garage"; version = "0.7.0"; @@ -16,7 +16,7 @@ rustPlatform.buildRustPackage rec { nativeBuildInputs = [ protobuf ]; passthru = { - tests.version = testVersion { package = garage; }; + tests.version = testers.testVersion { package = garage; }; }; meta = { diff --git a/pkgs/tools/misc/adrgen/default.nix b/pkgs/tools/misc/adrgen/default.nix index 08fec93b5871..83117f403506 100644 --- a/pkgs/tools/misc/adrgen/default.nix +++ b/pkgs/tools/misc/adrgen/default.nix @@ -1,7 +1,7 @@ { lib , buildGoModule , fetchFromGitHub -, testVersion +, testers , adrgen }: @@ -18,7 +18,7 @@ buildGoModule rec { vendorSha256 = "sha256-aDtUD+KKKSE0TpSi4+6HXSBMqF/TROZZhT0ox3a8Idk="; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = adrgen; command = "adrgen version"; version = "v${version}"; diff --git a/pkgs/tools/misc/arch-install-scripts/default.nix b/pkgs/tools/misc/arch-install-scripts/default.nix index a967e4989e61..c89e575e8eb0 100644 --- a/pkgs/tools/misc/arch-install-scripts/default.nix +++ b/pkgs/tools/misc/arch-install-scripts/default.nix @@ -6,7 +6,6 @@ , coreutils , gawk , gnum4 -, testVersion , util-linux }: diff --git a/pkgs/tools/misc/czkawka/default.nix b/pkgs/tools/misc/czkawka/default.nix index fbe3a68d5842..6385eade7aa4 100644 --- a/pkgs/tools/misc/czkawka/default.nix +++ b/pkgs/tools/misc/czkawka/default.nix @@ -8,7 +8,7 @@ , gdk-pixbuf , atk , gtk3 -, testVersion +, testers , czkawka }: @@ -38,7 +38,7 @@ rustPlatform.buildRustPackage rec { gtk3 ]; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = czkawka; command = "czkawka_cli --version"; }; diff --git a/pkgs/tools/misc/datefmt/default.nix b/pkgs/tools/misc/datefmt/default.nix index 01927fedc28d..c70d04326fc8 100644 --- a/pkgs/tools/misc/datefmt/default.nix +++ b/pkgs/tools/misc/datefmt/default.nix @@ -1,4 +1,4 @@ -{ lib, stdenv, fetchurl, datefmt, testVersion }: +{ lib, stdenv, fetchurl, datefmt, testers }: stdenv.mkDerivation rec { pname = "datefmt"; @@ -11,7 +11,7 @@ stdenv.mkDerivation rec { makeFlags = [ "PREFIX=$(out)" ]; - passthru.tests.version = testVersion { package = datefmt; }; + passthru.tests.version = testers.testVersion { package = datefmt; }; meta = with lib; { homepage = "https://jb55.com/datefmt"; diff --git a/pkgs/tools/misc/dsq/default.nix b/pkgs/tools/misc/dsq/default.nix index bb884c5a2933..791297607c1f 100644 --- a/pkgs/tools/misc/dsq/default.nix +++ b/pkgs/tools/misc/dsq/default.nix @@ -5,7 +5,7 @@ , runCommand , nix-update-script , dsq -, testVersion +, testers , diffutils }: @@ -30,7 +30,7 @@ buildGoModule rec { updateScript = nix-update-script { attrPath = pname; }; tests = { - version = testVersion { package = dsq; }; + version = testers.testVersion { package = dsq; }; pretty-csv = runCommand "${pname}-test" { } '' mkdir "$out" diff --git a/pkgs/tools/misc/gummy/default.nix b/pkgs/tools/misc/gummy/default.nix index cc1b68b7c0e8..defe5950a116 100644 --- a/pkgs/tools/misc/gummy/default.nix +++ b/pkgs/tools/misc/gummy/default.nix @@ -1,7 +1,7 @@ { lib , stdenv , fetchFromGitHub -, testVersion +, testers , gummy , cmake , libX11 @@ -51,7 +51,7 @@ stdenv.mkDerivation rec { ln -s $out/libexec/gummyd $out/bin/gummyd ''; - passthru.tests.version = testVersion { package = gummy; }; + passthru.tests.version = testers.testVersion { package = gummy; }; meta = with lib; { homepage = "https://github.com/Fushko/gummy"; diff --git a/pkgs/tools/misc/lsd/default.nix b/pkgs/tools/misc/lsd/default.nix index d72fad1a666c..157cc1adf8d7 100644 --- a/pkgs/tools/misc/lsd/default.nix +++ b/pkgs/tools/misc/lsd/default.nix @@ -2,7 +2,7 @@ , fetchFromGitHub , rustPlatform , installShellFiles -, testVersion +, testers , lsd }: @@ -27,7 +27,7 @@ rustPlatform.buildRustPackage rec { # Found argument '--test-threads' which wasn't expected, or isn't valid in this context doCheck = false; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = lsd; }; diff --git a/pkgs/tools/misc/zellij/default.nix b/pkgs/tools/misc/zellij/default.nix index e8b39bb1caa1..b12d096435bd 100644 --- a/pkgs/tools/misc/zellij/default.nix +++ b/pkgs/tools/misc/zellij/default.nix @@ -10,7 +10,7 @@ , Foundation , mandown , zellij -, testVersion +, testers }: rustPlatform.buildRustPackage rec { @@ -54,7 +54,7 @@ rustPlatform.buildRustPackage rec { --zsh <($out/bin/zellij setup --generate-completion zsh) ''; - passthru.tests.version = testVersion { package = zellij; }; + passthru.tests.version = testers.testVersion { package = zellij; }; meta = with lib; { description = "A terminal workspace with batteries included"; diff --git a/pkgs/tools/networking/clash/default.nix b/pkgs/tools/networking/clash/default.nix index 844335df223c..c73e487904ef 100644 --- a/pkgs/tools/networking/clash/default.nix +++ b/pkgs/tools/networking/clash/default.nix @@ -1,4 +1,4 @@ -{ lib, fetchFromGitHub, buildGoModule, testVersion, clash }: +{ lib, fetchFromGitHub, buildGoModule, testers, clash }: buildGoModule rec { pname = "clash"; @@ -26,7 +26,7 @@ buildGoModule rec { "-X github.com/Dreamacro/clash/constant.Version=${version}" ]; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = clash; command = "clash -v"; }; diff --git a/pkgs/tools/networking/curlie/default.nix b/pkgs/tools/networking/curlie/default.nix index fb46ab3fe102..c251e1c149ee 100644 --- a/pkgs/tools/networking/curlie/default.nix +++ b/pkgs/tools/networking/curlie/default.nix @@ -1,4 +1,4 @@ -{ buildGoModule, fetchFromGitHub, lib, curlie, testVersion }: +{ buildGoModule, fetchFromGitHub, lib, curlie, testers }: buildGoModule rec { pname = "curlie"; @@ -15,7 +15,7 @@ buildGoModule rec { ldflags = [ "-s" "-w" "-X main.version=${version}" ]; - passthru.tests.version = testVersion { + passthru.tests.version = testers.testVersion { package = curlie; command = "curlie version"; }; diff --git a/pkgs/tools/networking/innernet/default.nix b/pkgs/tools/networking/innernet/default.nix index 68ccdfc3870d..6f7669ff6f7f 100644 --- a/pkgs/tools/networking/innernet/default.nix +++ b/pkgs/tools/networking/innernet/default.nix @@ -8,7 +8,7 @@ , Security , libiconv , innernet -, testVersion +, testers }: rustPlatform.buildRustPackage rec { @@ -40,8 +40,8 @@ rustPlatform.buildRustPackage rec { ''; passthru.tests = { - serverVersion = testVersion { package = innernet; command = "innernet-server --version"; }; - version = testVersion { package = innernet; command = "innernet --version"; }; + serverVersion = testers.testVersion { package = innernet; command = "innernet-server --version"; }; + version = testers.testVersion { package = innernet; command = "innernet --version"; }; }; meta = with lib; { diff --git a/pkgs/tools/networking/pmacct/default.nix b/pkgs/tools/networking/pmacct/default.nix index 32b800521e16..aa699d5bcad5 100644 --- a/pkgs/tools/networking/pmacct/default.nix +++ b/pkgs/tools/networking/pmacct/default.nix @@ -13,7 +13,7 @@ , withPgSQL ? true, postgresql , withMysql ? true, libmysqlclient, zlib, numactl , gnutlsSupport ? false, gnutls -, testVersion +, testers , pmacct }: @@ -55,7 +55,7 @@ stdenv.mkDerivation rec { ++ lib.optional gnutlsSupport "--enable-gnutls"; passthru.tests = { - version = testVersion { package = pmacct; command = "pmacct -V"; }; + version = testers.testVersion { package = pmacct; command = "pmacct -V"; }; }; meta = with lib; { diff --git a/pkgs/tools/networking/smartdns/default.nix b/pkgs/tools/networking/smartdns/default.nix index 399aeefd9c3d..9763d52d0b1d 100644 --- a/pkgs/tools/networking/smartdns/default.nix +++ b/pkgs/tools/networking/smartdns/default.nix @@ -1,4 +1,4 @@ -{ lib, stdenv, fetchFromGitHub, openssl, testVersion, smartdns }: +{ lib, stdenv, fetchFromGitHub, openssl, testers, smartdns }: stdenv.mkDerivation rec { pname = "smartdns"; @@ -24,7 +24,7 @@ stdenv.mkDerivation rec { installFlags = [ "SYSCONFDIR=${placeholder "out"}/etc" ]; passthru.tests = { - version = testVersion { package = smartdns; }; + version = testers.testVersion { package = smartdns; }; }; meta = with lib; { diff --git a/pkgs/tools/nix/alejandra/default.nix b/pkgs/tools/nix/alejandra/default.nix index 43b6200972dd..62deef516d50 100644 --- a/pkgs/tools/nix/alejandra/default.nix +++ b/pkgs/tools/nix/alejandra/default.nix @@ -1,7 +1,7 @@ { lib , rustPlatform , fetchFromGitHub -, testVersion +, testers , alejandra }: @@ -19,7 +19,7 @@ rustPlatform.buildRustPackage rec { cargoSha256 = "sha256-SsIpggbRQPjpCYgCG4sSJ022MmMV4bJJ8UAHcJR74O8="; passthru.tests = { - version = testVersion { package = alejandra; }; + version = testers.testVersion { package = alejandra; }; }; meta = with lib; { diff --git a/pkgs/tools/system/s-tui/default.nix b/pkgs/tools/system/s-tui/default.nix index 1152e66bd887..5c759eea8a12 100644 --- a/pkgs/tools/system/s-tui/default.nix +++ b/pkgs/tools/system/s-tui/default.nix @@ -3,7 +3,7 @@ , python3Packages , nix-update-script , s-tui -, testVersion +, testers }: python3Packages.buildPythonPackage rec { @@ -22,7 +22,7 @@ python3Packages.buildPythonPackage rec { passthru = { updateScript = nix-update-script { attrPath = pname; }; - tests = testVersion { package = s-tui; }; + tests = testers.testVersion { package = s-tui; }; }; meta = with lib; { diff --git a/pkgs/tools/text/crowdin-cli/default.nix b/pkgs/tools/text/crowdin-cli/default.nix index e73b58d4e6ba..6111a6f1c8bd 100644 --- a/pkgs/tools/text/crowdin-cli/default.nix +++ b/pkgs/tools/text/crowdin-cli/default.nix @@ -8,7 +8,7 @@ , jre , makeWrapper , crowdin-cli -, testVersion +, testers , unzip }: @@ -38,7 +38,7 @@ stdenv.mkDerivation rec { runHook postInstall ''; - passthru.tests.version = testVersion { package = crowdin-cli; }; + passthru.tests.version = testers.testVersion { package = crowdin-cli; }; meta = with lib; { mainProgram = "crowdin"; diff --git a/pkgs/tools/text/difftastic/default.nix b/pkgs/tools/text/difftastic/default.nix index 12ad1a764fb2..3f4b1946e123 100644 --- a/pkgs/tools/text/difftastic/default.nix +++ b/pkgs/tools/text/difftastic/default.nix @@ -1,4 +1,4 @@ -{ lib, fetchFromGitHub, rustPlatform, tree-sitter, difftastic, testVersion }: +{ lib, fetchFromGitHub, rustPlatform, tree-sitter, difftastic, testers }: rustPlatform.buildRustPackage rec { pname = "difftastic"; @@ -13,7 +13,7 @@ rustPlatform.buildRustPackage rec { cargoSha256 = "1kmwd9m94kl3j6ajfndr7rjx66fsqvnn2jh0m54ac5qd5r9hhdc8"; - passthru.tests.version = testVersion { package = difftastic; }; + passthru.tests.version = testers.testVersion { package = difftastic; }; meta = with lib; { description = "A syntax-aware diff"; diff --git a/pkgs/tools/text/igrep/default.nix b/pkgs/tools/text/igrep/default.nix index 28c896a828e5..87f9de373e23 100644 --- a/pkgs/tools/text/igrep/default.nix +++ b/pkgs/tools/text/igrep/default.nix @@ -3,7 +3,7 @@ , fetchFromGitHub , stdenv , Security -, testVersion +, testers , igrep }: @@ -23,7 +23,7 @@ rustPlatform.buildRustPackage rec { buildInputs = lib.optionals stdenv.isDarwin [ Security ]; passthru.tests = { - version = testVersion { package = igrep; command = "ig --version"; }; + version = testers.testVersion { package = igrep; command = "ig --version"; }; }; meta = with lib; { diff --git a/pkgs/tools/text/mdbook-linkcheck/default.nix b/pkgs/tools/text/mdbook-linkcheck/default.nix index b37b16876b0f..eaccd05eb9c4 100644 --- a/pkgs/tools/text/mdbook-linkcheck/default.nix +++ b/pkgs/tools/text/mdbook-linkcheck/default.nix @@ -1,5 +1,5 @@ { lib, stdenv, fetchFromGitHub, rustPlatform, pkg-config, openssl, Security -, testVersion, mdbook-linkcheck }: +, testers, mdbook-linkcheck }: rustPlatform.buildRustPackage rec { pname = "mdbook-linkcheck"; @@ -22,7 +22,7 @@ rustPlatform.buildRustPackage rec { doCheck = false; # tries to access network to test broken web link functionality - passthru.tests.version = testVersion { package = mdbook-linkcheck; }; + passthru.tests.version = testers.testVersion { package = mdbook-linkcheck; }; meta = with lib; { description = "A backend for `mdbook` which will check your links for you."; From 3cc2e86bab4dcfa1abc7c835fae6336b068cd77e Mon Sep 17 00:00:00 2001 From: Artturin Date: Wed, 20 Apr 2022 18:43:21 +0300 Subject: [PATCH 055/161] testers: convert to a format that is kind of compatible with nixdoc Examples: support has to be added to https://github.com/nix-community/nixdoc 'nixdoc --category testers --description "nixpkgs testers" --file ./pkgs/build-support/testers/default.nix' --- pkgs/build-support/testers/default.nix | 62 ++++++++++++------- .../testers/test-equal-derivation.nix | 17 ----- 2 files changed, 39 insertions(+), 40 deletions(-) diff --git a/pkgs/build-support/testers/default.nix b/pkgs/build-support/testers/default.nix index b63ba5742b2b..e5a8dbc79609 100644 --- a/pkgs/build-support/testers/default.nix +++ b/pkgs/build-support/testers/default.nix @@ -1,31 +1,47 @@ { pkgs, lib, callPackage, runCommand }: { + + /* Checks that two packages produce the exact same build instructions. + + This can be used to make sure that a certain difference of configuration, + such as the presence of an overlay does not cause a cache miss. + + When the derivations are equal, the return value is an empty file. + Otherwise, the build log explains the difference via `nix-diff`. + + Example: + + testEqualDerivation + "The hello package must stay the same when enabling checks." + hello + (hello.overrideAttrs(o: { doCheck = true; })) + */ testEqualDerivation = callPackage ./test-equal-derivation.nix { }; /* Checks the command output contains the specified version - * - * Although simplistic, this test assures that the main program - * can run. While there's no substitute for a real test case, - * it does catch dynamic linking errors and such. It also provides - * some protection against accidentally building the wrong version, - * for example when using an 'old' hash in a fixed-output derivation. - * - * Examples: - * - * passthru.tests.version = testVersion { package = hello; }; - * - * passthru.tests.version = testVersion { - * package = seaweedfs; - * command = "weed version"; - * }; - * - * passthru.tests.version = testVersion { - * package = key; - * command = "KeY --help"; - * # Wrong '2.5' version in the code. Drop on next version. - * version = "2.5"; - * }; - */ + + Although simplistic, this test assures that the main program + can run. While there's no substitute for a real test case, + it does catch dynamic linking errors and such. It also provides + some protection against accidentally building the wrong version, + for example when using an 'old' hash in a fixed-output derivation. + + Examples: + + passthru.tests.version = testVersion { package = hello; }; + + passthru.tests.version = testVersion { + package = seaweedfs; + command = "weed version"; + }; + + passthru.tests.version = testVersion { + package = key; + command = "KeY --help"; + # Wrong '2.5' version in the code. Drop on next version. + version = "2.5"; + }; + */ testVersion = { package, command ? "${package.meta.mainProgram or package.pname or package.name} --version", diff --git a/pkgs/build-support/testers/test-equal-derivation.nix b/pkgs/build-support/testers/test-equal-derivation.nix index 652f3716b2a7..610d5f585576 100644 --- a/pkgs/build-support/testers/test-equal-derivation.nix +++ b/pkgs/build-support/testers/test-equal-derivation.nix @@ -1,22 +1,5 @@ { lib, runCommand, emptyFile, nix-diff }: -/* - Checks that two packages produce the exact same build instructions. - - This can be used to make sure that a certain difference of configuration, - such as the presence of an overlay does not cause a cache miss. - - When the derivations are equal, the return value is an empty file. - Otherwise, the build log explains the difference via `nix-diff`. - - Example: - - testEqualDerivation - "The hello package must stay the same when enabling checks." - hello - (hello.overrideAttrs(o: { doCheck = true; })) - -*/ assertion: a: b: let drvA = builtins.unsafeDiscardOutputDependency a.drvPath or (throw "testEqualDerivation second argument must be a package"); From 41808d42d21f685e3fd3cdb2c9f04082f0818e30 Mon Sep 17 00:00:00 2001 From: Artturin Date: Wed, 20 Apr 2022 21:26:52 +0300 Subject: [PATCH 056/161] doc: move testers to their own chapter --- doc/builders/fetchers.chapter.md | 2 +- doc/builders/special.xml | 1 - .../invalidateFetcherByDrvHash.section.md | 31 ------- doc/builders/testers.chapter.md | 82 +++++++++++++++++++ doc/manual.xml | 1 + pkgs/build-support/testers/default.nix | 41 +--------- pkgs/top-level/all-packages.nix | 4 +- 7 files changed, 87 insertions(+), 75 deletions(-) delete mode 100644 doc/builders/special/invalidateFetcherByDrvHash.section.md create mode 100644 doc/builders/testers.chapter.md diff --git a/doc/builders/fetchers.chapter.md b/doc/builders/fetchers.chapter.md index d9f22b062827..09a41cd9ce0b 100644 --- a/doc/builders/fetchers.chapter.md +++ b/doc/builders/fetchers.chapter.md @@ -6,7 +6,7 @@ When using Nix, you will frequently need to download source code and other files Because fixed output derivations are _identified_ by their hash, a common mistake is to update a fetcher's URL or a version parameter, without updating the hash. **This will cause the old contents to be used.** So remember to always invalidate the hash argument. -For those who develop and maintain fetchers, a similar problem arises with changes to the implementation of a fetcher. These may cause a fixed output derivation to fail, but won't normally be caught by tests because the supposed output is already in the store or cache. For the purpose of testing, you can use a trick that is embodied by the [`invalidateFetcherByDrvHash`](#sec-pkgs-invalidateFetcherByDrvHash) function. It uses the derivation `name` to create a unique output path per fetcher implementation, defeating the caching precisely where it would be harmful. +For those who develop and maintain fetchers, a similar problem arises with changes to the implementation of a fetcher. These may cause a fixed output derivation to fail, but won't normally be caught by tests because the supposed output is already in the store or cache. For the purpose of testing, you can use a trick that is embodied by the [`invalidateFetcherByDrvHash`](#tester-invalidateFetcherByDrvHash) function. It uses the derivation `name` to create a unique output path per fetcher implementation, defeating the caching precisely where it would be harmful. ## `fetchurl` and `fetchzip` {#fetchurl} diff --git a/doc/builders/special.xml b/doc/builders/special.xml index 2f84599cdd4f..8902ce5c8132 100644 --- a/doc/builders/special.xml +++ b/doc/builders/special.xml @@ -7,5 +7,4 @@ - diff --git a/doc/builders/special/invalidateFetcherByDrvHash.section.md b/doc/builders/special/invalidateFetcherByDrvHash.section.md deleted file mode 100644 index 7c2f03a64b7b..000000000000 --- a/doc/builders/special/invalidateFetcherByDrvHash.section.md +++ /dev/null @@ -1,31 +0,0 @@ - -## `invalidateFetcherByDrvHash` {#sec-pkgs-invalidateFetcherByDrvHash} - -Use the derivation hash to invalidate the output via name, for testing. - -Type: `(a@{ name, ... } -> Derivation) -> a -> Derivation` - -Normally, fixed output derivations can and should be cached by their output -hash only, but for testing we want to re-fetch everytime the fetcher changes. - -Changes to the fetcher become apparent in the drvPath, which is a hash of -how to fetch, rather than a fixed store path. -By inserting this hash into the name, we can make sure to re-run the fetcher -every time the fetcher changes. - -This relies on the assumption that Nix isn't clever enough to reuse its -database of local store contents to optimize fetching. - -You might notice that the "salted" name derives from the normal invocation, -not the final derivation. `invalidateFetcherByDrvHash` has to invoke the fetcher -function twice: once to get a derivation hash, and again to produce the final -fixed output derivation. - -Example: - - tests.fetchgit = invalidateFetcherByDrvHash fetchgit { - name = "nix-source"; - url = "https://github.com/NixOS/nix"; - rev = "9d9dbe6ed05854e03811c361a3380e09183f4f4a"; - sha256 = "sha256-7DszvbCNTjpzGRmpIVAWXk20P0/XTrWZ79KSOGLrUWY="; - }; diff --git a/doc/builders/testers.chapter.md b/doc/builders/testers.chapter.md new file mode 100644 index 000000000000..2c30c8bd240e --- /dev/null +++ b/doc/builders/testers.chapter.md @@ -0,0 +1,82 @@ +# Testers {#chap-testers} +This chapter describes several testing builders which are available in the testers namespace. + +## `testVersion` {#tester-testVersion} + +Checks the command output contains the specified version + +Although simplistic, this test assures that the main program +can run. While there's no substitute for a real test case, +it does catch dynamic linking errors and such. It also provides +some protection against accidentally building the wrong version, +for example when using an 'old' hash in a fixed-output derivation. + +Examples: + +```nix +passthru.tests.version = testVersion { package = hello; }; + +passthru.tests.version = testVersion { + package = seaweedfs; + command = "weed version"; +}; + +passthru.tests.version = testVersion { + package = key; + command = "KeY --help"; + # Wrong '2.5' version in the code. Drop on next version. + version = "2.5"; +}; +``` + +## `testEqualDerivation` {#tester-testEqualDerivation} + +Checks that two packages produce the exact same build instructions. + +This can be used to make sure that a certain difference of configuration, +such as the presence of an overlay does not cause a cache miss. + +When the derivations are equal, the return value is an empty file. +Otherwise, the build log explains the difference via `nix-diff`. + +Example: + +```nix +testEqualDerivation + "The hello package must stay the same when enabling checks." + hello + (hello.overrideAttrs(o: { doCheck = true; })) +``` + +## `invalidateFetcherByDrvHash` {#tester-invalidateFetcherByDrvHash} + +Use the derivation hash to invalidate the output via name, for testing. + +Type: `(a@{ name, ... } -> Derivation) -> a -> Derivation` + +Normally, fixed output derivations can and should be cached by their output +hash only, but for testing we want to re-fetch everytime the fetcher changes. + +Changes to the fetcher become apparent in the drvPath, which is a hash of +how to fetch, rather than a fixed store path. +By inserting this hash into the name, we can make sure to re-run the fetcher +every time the fetcher changes. + +This relies on the assumption that Nix isn't clever enough to reuse its +database of local store contents to optimize fetching. + +You might notice that the "salted" name derives from the normal invocation, +not the final derivation. `invalidateFetcherByDrvHash` has to invoke the fetcher +function twice: once to get a derivation hash, and again to produce the final +fixed output derivation. + +Example: + +```nix +tests.fetchgit = invalidateFetcherByDrvHash fetchgit { + name = "nix-source"; + url = "https://github.com/NixOS/nix"; + rev = "9d9dbe6ed05854e03811c361a3380e09183f4f4a"; + sha256 = "sha256-7DszvbCNTjpzGRmpIVAWXk20P0/XTrWZ79KSOGLrUWY="; +}; +``` diff --git a/doc/manual.xml b/doc/manual.xml index e49ae67ec943..ccbaf40586d1 100644 --- a/doc/manual.xml +++ b/doc/manual.xml @@ -25,6 +25,7 @@ Builders + diff --git a/pkgs/build-support/testers/default.nix b/pkgs/build-support/testers/default.nix index e5a8dbc79609..8b79843b8332 100644 --- a/pkgs/build-support/testers/default.nix +++ b/pkgs/build-support/testers/default.nix @@ -1,47 +1,8 @@ { pkgs, lib, callPackage, runCommand }: +# Documentation is in doc/builders/testers.chapter.md { - - /* Checks that two packages produce the exact same build instructions. - - This can be used to make sure that a certain difference of configuration, - such as the presence of an overlay does not cause a cache miss. - - When the derivations are equal, the return value is an empty file. - Otherwise, the build log explains the difference via `nix-diff`. - - Example: - - testEqualDerivation - "The hello package must stay the same when enabling checks." - hello - (hello.overrideAttrs(o: { doCheck = true; })) - */ testEqualDerivation = callPackage ./test-equal-derivation.nix { }; - /* Checks the command output contains the specified version - - Although simplistic, this test assures that the main program - can run. While there's no substitute for a real test case, - it does catch dynamic linking errors and such. It also provides - some protection against accidentally building the wrong version, - for example when using an 'old' hash in a fixed-output derivation. - - Examples: - - passthru.tests.version = testVersion { package = hello; }; - - passthru.tests.version = testVersion { - package = seaweedfs; - command = "weed version"; - }; - - passthru.tests.version = testVersion { - package = key; - command = "KeY --help"; - # Wrong '2.5' version in the code. Drop on next version. - version = "2.5"; - }; - */ testVersion = { package, command ? "${package.meta.mainProgram or package.pname or package.name} --version", diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index 5117ec4d3092..1bcc20e2d2fa 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -725,8 +725,8 @@ with pkgs; installShellFiles = callPackage ../build-support/install-shell-files {}; - # See doc/builders/special/invalidateFetcherByDrvHash.section.md or - # https://nixos.org/manual/nixpkgs/unstable/#sec-pkgs-invalidateFetcherByDrvHash + # See doc/builders/testers.chapter.md or + # https://nixos.org/manual/nixpkgs/unstable/#tester-invalidateFetcherByDrvHash invalidateFetcherByDrvHash = f: args: let drvPath = (f args).drvPath; From 52ca1804a1e87e7842df4ec75b82cd9e5cb2f0e7 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 13:26:39 +0000 Subject: [PATCH 057/161] dovecot_fts_xapian: 1.5.4b -> 1.5.5 --- pkgs/servers/mail/dovecot/plugins/fts_xapian/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/servers/mail/dovecot/plugins/fts_xapian/default.nix b/pkgs/servers/mail/dovecot/plugins/fts_xapian/default.nix index e6ec00633386..2f3a4d807d2f 100644 --- a/pkgs/servers/mail/dovecot/plugins/fts_xapian/default.nix +++ b/pkgs/servers/mail/dovecot/plugins/fts_xapian/default.nix @@ -1,13 +1,13 @@ { lib, stdenv, fetchFromGitHub, autoconf, automake, sqlite, pkg-config, dovecot, libtool, xapian, icu64 }: stdenv.mkDerivation rec { pname = "dovecot-fts-xapian"; - version = "1.5.4b"; + version = "1.5.5"; src = fetchFromGitHub { owner = "grosjo"; repo = "fts-xapian"; rev = version; - sha256 = "sha256-RzwJjcUgk1XXQZpNdz86Pr9HsUaQCOSt5oSejnGVmgA="; + sha256 = "sha256-KAZno4N/4dFH3QHFTs0wkY4PtGF+j4ZEjZzn9ljCjrM="; }; buildInputs = [ dovecot xapian icu64 sqlite ]; From 38f1d4d19350f69b2d3d21b6c24f51c96ac62923 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 14:43:42 +0000 Subject: [PATCH 058/161] esbuild: 0.14.34 -> 0.14.38 --- pkgs/development/tools/esbuild/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/tools/esbuild/default.nix b/pkgs/development/tools/esbuild/default.nix index 3a8fcd67dd0c..1af15e82110e 100644 --- a/pkgs/development/tools/esbuild/default.nix +++ b/pkgs/development/tools/esbuild/default.nix @@ -2,13 +2,13 @@ buildGoModule rec { pname = "esbuild"; - version = "0.14.34"; + version = "0.14.38"; src = fetchFromGitHub { owner = "evanw"; repo = "esbuild"; rev = "v${version}"; - sha256 = "sha256-TnX/vd12GStzFsCaRYOY7VyDYRw5qP4EeAa0GmWlFWc="; + sha256 = "sha256-rvMi1oC7qGidvi4zrm9KCMMntu6LJGVOGN6VmU2ivQE="; }; vendorSha256 = "sha256-QPkBR+FscUc3jOvH7olcGUhM6OW4vxawmNJuRQxPuGs="; From 04fd6e83f4fe8062324f5f6c6137129f8f0292f8 Mon Sep 17 00:00:00 2001 From: ajs124 Date: Fri, 22 Apr 2022 15:49:15 +0100 Subject: [PATCH 059/161] prometheus-nextcloud-exporter: 0.5.0 -> 0.5.1 Updated Prometheus client library for CVE-2022-21698 --- pkgs/servers/monitoring/prometheus/nextcloud-exporter.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/servers/monitoring/prometheus/nextcloud-exporter.nix b/pkgs/servers/monitoring/prometheus/nextcloud-exporter.nix index 4f96e516278f..386e49594d24 100644 --- a/pkgs/servers/monitoring/prometheus/nextcloud-exporter.nix +++ b/pkgs/servers/monitoring/prometheus/nextcloud-exporter.nix @@ -2,16 +2,16 @@ buildGoModule rec { pname = "prometheus-nextcloud-exporter"; - version = "0.5.0"; + version = "0.5.1"; src = fetchFromGitHub { owner = "xperimental"; repo = "nextcloud-exporter"; rev = "v${version}"; - sha256 = "sha256-73IxGxnKgbG50nr57Wft+hh0KT7redrwXc4EZFn25qs="; + sha256 = "18xvxx0aplkj7xzi0zfsz7a5d45hh8blfqb105360pknvvi6apjv"; }; - vendorSha256 = "sha256-vIhHUFg8m6raKF82DcXRGKCgSM2FJ2VTM+MdMjP7KUY="; + vendorSha256 = "1wr1ckz0klk9sqpyk96h8bwr1dg6aiirr1l1f23kbib91pfpd08r"; passthru.tests = { inherit (nixosTests.prometheus-exporters) nextcloud; }; From 3b0f9301bbca58b78336c4a604cbf3fd79297d65 Mon Sep 17 00:00:00 2001 From: inty Date: Fri, 22 Apr 2022 15:13:03 +0000 Subject: [PATCH 060/161] rawtherapee: Add note about removing the patch in 5.9 --- pkgs/applications/graphics/rawtherapee/default.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/pkgs/applications/graphics/rawtherapee/default.nix b/pkgs/applications/graphics/rawtherapee/default.nix index 000d92f3b052..494180fdb598 100644 --- a/pkgs/applications/graphics/rawtherapee/default.nix +++ b/pkgs/applications/graphics/rawtherapee/default.nix @@ -16,6 +16,7 @@ stdenv.mkDerivation rec { nativeBuildInputs = [ cmake pkg-config wrapGAppsHook ]; + # This patch is upstream; remove it in 5.9. patches = [ ./fix-6324.patch ]; buildInputs = [ From c2e1bcedeb7e503d508856ae97b3ebd64d5dea29 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 15:50:44 +0000 Subject: [PATCH 061/161] findomain: 8.0.0 -> 8.1.1 --- pkgs/tools/networking/findomain/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/networking/findomain/default.nix b/pkgs/tools/networking/findomain/default.nix index 6295c4562b0b..a1020a319a7d 100644 --- a/pkgs/tools/networking/findomain/default.nix +++ b/pkgs/tools/networking/findomain/default.nix @@ -10,16 +10,16 @@ rustPlatform.buildRustPackage rec { pname = "findomain"; - version = "8.0.0"; + version = "8.1.1"; src = fetchFromGitHub { owner = "Edu4rdSHL"; repo = pname; rev = version; - sha256 = "sha256-Bk3p8+FkjrF/nLsPRx8daqieV8iewAbcoO2DglFSERg="; + sha256 = "sha256-ngT9ZtPsCzcmZbwpmzbEcSUTHPezzdyAB12qrm5Z6n0="; }; - cargoSha256 = "sha256-chHr/3yN2PLUQLYqkln12q3+n7tX2IclVGpXBCkPxCQ="; + cargoSha256 = "sha256-nHNS1Uskggm5e1paWRSiL4HHcooDbYe0toMwR05OkDQ="; nativeBuildInputs = [ installShellFiles perl ]; buildInputs = lib.optionals stdenv.isDarwin [ libiconv Security ]; From 28e8501d8e67d08892db837b1238fedf54c6eb0a Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Fri, 22 Apr 2022 17:58:43 +0200 Subject: [PATCH 062/161] apksigcopier: remove linting, use installCheckPhase --- .../tools/apksigcopier/default.nix | 51 ++++++++++++------- 1 file changed, 33 insertions(+), 18 deletions(-) diff --git a/pkgs/development/tools/apksigcopier/default.nix b/pkgs/development/tools/apksigcopier/default.nix index cc23081473af..f8747398c222 100644 --- a/pkgs/development/tools/apksigcopier/default.nix +++ b/pkgs/development/tools/apksigcopier/default.nix @@ -1,10 +1,10 @@ { lib -, fetchFromGitHub -, python3 -, installShellFiles -, bash -, pandoc , apksigner +, bash +, fetchFromGitHub +, installShellFiles +, pandoc +, python3 }: python3.pkgs.buildPythonApplication rec { @@ -18,33 +18,48 @@ python3.pkgs.buildPythonApplication rec { sha256 = "07ldq3q1x2lpb15q5s5i1pbg89sn6ah45amskm9pndqlh16z9k2x"; }; - nativeBuildInputs = [ installShellFiles pandoc ]; - propagatedBuildInputs = with python3.pkgs; [ click ]; - checkInputs = with python3.pkgs; [ flake8 mypy pylint ]; - makeWrapperArgs = [ "--prefix" "PATH" ":" "${lib.makeBinPath [ apksigner ]}" ]; + nativeBuildInputs = [ + installShellFiles + pandoc + ]; + + propagatedBuildInputs = with python3.pkgs; [ + click + ]; + + makeWrapperArgs = [ + "--prefix" + "PATH" + ":" + "${lib.makeBinPath [ apksigner ]}" + ]; postPatch = '' substituteInPlace Makefile \ - --replace /bin/bash ${bash}/bin/bash \ - --replace 'apksigcopier --version' '${python3.interpreter} apksigcopier --version' + --replace /bin/bash ${bash}/bin/bash ''; postBuild = '' make ${pname}.1 ''; - checkPhase = '' - make test - ''; - postInstall = '' installManPage ${pname}.1 ''; + doInstallCheck = true; + + installCheckPhase = '' + runHook preInstallCheck + $out/bin/apksigcopier --version | grep "${version}" + runHook postInstallCheck + ''; + meta = with lib; { - description = "Copy/extract/patch android apk signatures & compare apks"; + description = "Copy/extract/patch android apk signatures & compare APKs"; longDescription = '' - apksigcopier is a tool for copying android APK signatures from a signed APK to an unsigned one (in order to verify reproducible builds). + apksigcopier is a tool for copying android APK signatures from a signed + APK to an unsigned one (in order to verify reproducible builds). It can also be used to compare two APKs with different signatures. Its command-line tool offers four operations: @@ -55,6 +70,6 @@ python3.pkgs.buildPythonApplication rec { ''; homepage = "https://github.com/obfusk/apksigcopier"; license = with licenses; [ gpl3Plus ]; - maintainers = [ maintainers.obfusk ]; + maintainers = with maintainers; [ obfusk ]; }; } From 03451de0b28f0b60dba7427b6313ce07e34eac12 Mon Sep 17 00:00:00 2001 From: Lein Matsumaru Date: Fri, 22 Apr 2022 16:42:25 +0000 Subject: [PATCH 063/161] exploitdb: 2022-04-12 -> 2022-04-20 --- pkgs/tools/security/exploitdb/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/tools/security/exploitdb/default.nix b/pkgs/tools/security/exploitdb/default.nix index bc78eb7f1017..b5ad987b27be 100644 --- a/pkgs/tools/security/exploitdb/default.nix +++ b/pkgs/tools/security/exploitdb/default.nix @@ -2,13 +2,13 @@ stdenv.mkDerivation rec { pname = "exploitdb"; - version = "2022-04-12"; + version = "2022-04-20"; src = fetchFromGitHub { owner = "offensive-security"; repo = pname; rev = version; - sha256 = "sha256-Ucbw09oFklulyXr8mGO5RskKNZx0rPTA6hPJgYByPAI="; + sha256 = "sha256-8sDixCXJA1K6hnPtLzNCB9gJh1GShCC89VTCJ63ohKA="; }; nativeBuildInputs = [ makeWrapper ]; From 51114edb8f88d2c2bd46a3667693f18109ef4d31 Mon Sep 17 00:00:00 2001 From: betaboon Date: Fri, 22 Apr 2022 19:21:15 +0200 Subject: [PATCH 064/161] bazel_3: fix build with gcc11 --- .../build-managers/bazel/bazel_3/default.nix | 2 ++ .../build-managers/bazel/bazel_3/gcc11.patch | 24 +++++++++++++++++++ 2 files changed, 26 insertions(+) create mode 100644 pkgs/development/tools/build-managers/bazel/bazel_3/gcc11.patch diff --git a/pkgs/development/tools/build-managers/bazel/bazel_3/default.nix b/pkgs/development/tools/build-managers/bazel/bazel_3/default.nix index 3a5c9e2e76fd..c5f61e7d25c3 100644 --- a/pkgs/development/tools/build-managers/bazel/bazel_3/default.nix +++ b/pkgs/development/tools/build-managers/bazel/bazel_3/default.nix @@ -186,6 +186,8 @@ stdenv.mkDerivation rec { # we accept this fact because xcode_locator is only a short-lived process used during the build. ./no-arc.patch + ./gcc11.patch + # --experimental_strict_action_env (which may one day become the default # see bazelbuild/bazel#2574) hardcodes the default # action environment to a non hermetic value (e.g. "/usr/local/bin"). diff --git a/pkgs/development/tools/build-managers/bazel/bazel_3/gcc11.patch b/pkgs/development/tools/build-managers/bazel/bazel_3/gcc11.patch new file mode 100644 index 000000000000..450bcd114776 --- /dev/null +++ b/pkgs/development/tools/build-managers/bazel/bazel_3/gcc11.patch @@ -0,0 +1,24 @@ +diff --git a/third_party/ijar/mapped_file_unix.cc b/third_party/ijar/mapped_file_unix.cc +index 6e3a908..030e9ca 100644 +--- a/third_party/ijar/mapped_file_unix.cc ++++ b/third_party/ijar/mapped_file_unix.cc +@@ -19,6 +19,7 @@ + #include + + #include ++#include + + #include "third_party/ijar/mapped_file.h" + +diff --git a/third_party/ijar/zlib_client.h b/third_party/ijar/zlib_client.h +index ed66163..c4b051e 100644 +--- a/third_party/ijar/zlib_client.h ++++ b/third_party/ijar/zlib_client.h +@@ -16,6 +16,7 @@ + #define THIRD_PARTY_IJAR_ZLIB_CLIENT_H_ + + #include ++#include + + #include "third_party/ijar/common.h" + From ddf38696146e8640979d12290e2361a6c6ee788d Mon Sep 17 00:00:00 2001 From: Azat Bahawi Date: Fri, 22 Apr 2022 21:26:47 +0300 Subject: [PATCH 065/161] werf: 1.2.87 -> 1.2.91 --- pkgs/applications/networking/cluster/werf/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/networking/cluster/werf/default.nix b/pkgs/applications/networking/cluster/werf/default.nix index 9d6e91fd0a6b..6ff362bb0e08 100644 --- a/pkgs/applications/networking/cluster/werf/default.nix +++ b/pkgs/applications/networking/cluster/werf/default.nix @@ -12,15 +12,15 @@ buildGoModule rec { pname = "werf"; - version = "1.2.87"; + version = "1.2.91"; src = fetchFromGitHub { owner = "werf"; repo = "werf"; rev = "v${version}"; - sha256 = "sha256-DMP//gh79WuQ8VY4sV6lQlwR+k+rwqODf/pagOBP+4U="; + sha256 = "sha256-ZafIG4D5TvAbXbo07gFajt8orTsju1GfF9a1OR0t1Oo="; }; - vendorSha256 = "sha256-OrvGDNj48W1tVAs3tdtAuesHnh8fHRsGd6KL0Uaf9Zg="; + vendorSha256 = "sha256-U4eVQR/ExAENOg2XEYM+mFXANk+basdMLEcqSHuTsh4="; proxyVendor = true; nativeBuildInputs = [ installShellFiles pkg-config ]; From a142e404fe1efd39a8c3e1e191c80dddb79dd896 Mon Sep 17 00:00:00 2001 From: techknowlogick Date: Fri, 22 Apr 2022 14:38:15 -0400 Subject: [PATCH 066/161] vault: 1.10.0 -> 1.10.1 --- pkgs/tools/security/vault/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/tools/security/vault/default.nix b/pkgs/tools/security/vault/default.nix index d349c205ad4f..907f3f4e7c55 100644 --- a/pkgs/tools/security/vault/default.nix +++ b/pkgs/tools/security/vault/default.nix @@ -6,16 +6,16 @@ buildGoModule rec { pname = "vault"; - version = "1.10.0"; + version = "1.10.1"; src = fetchFromGitHub { owner = "hashicorp"; repo = "vault"; rev = "v${version}"; - sha256 = "sha256-XgrEtAVfMcXbmAjwgIWME/v85QHJ11fUXapAZtS/lSw="; + sha256 = "sha256-In+rc5H8HNx5hGySYvCqx6hQ7tmTioHiNdJIMyMRNvU="; }; - vendorSha256 = "sha256-Bo0+HSG7NqaweMKPdl+kzB6RdbQsy2FAzmr7ZZVgcsg="; + vendorSha256 = "sha256-z0PsLrT4jtSof4Bd62juGLv58EV22TnPx6fosMvW97c="; subPackages = [ "." ]; From 3788e27a24f5b7cdb3f734b775868b66461bfa68 Mon Sep 17 00:00:00 2001 From: 06kellyjac Date: Fri, 22 Apr 2022 14:39:16 +0100 Subject: [PATCH 067/161] deno: 1.20.5 -> 1.21.0 --- pkgs/development/web/deno/default.nix | 13 +++++++++---- pkgs/development/web/deno/librusty_v8.nix | 10 +++++----- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/pkgs/development/web/deno/default.nix b/pkgs/development/web/deno/default.nix index 1bcb1f4fc60b..d61b7433bdf2 100644 --- a/pkgs/development/web/deno/default.nix +++ b/pkgs/development/web/deno/default.nix @@ -2,7 +2,6 @@ , lib , callPackage , fetchFromGitHub -, rust , rustPlatform , installShellFiles , libiconv @@ -17,15 +16,21 @@ rustPlatform.buildRustPackage rec { pname = "deno"; - version = "1.20.5"; + version = "1.21.0"; src = fetchFromGitHub { owner = "denoland"; repo = pname; rev = "v${version}"; - sha256 = "sha256-j8aYps70yPH7excYdU99zWbtVsr/ok5cRX2Z6DAW0qU="; + sha256 = "sha256-Sv9Keb+6vc6Lr+H/gAi9/4bmBO18gv9bqAjBIpOrtnk="; }; - cargoSha256 = "sha256-NIUb9ifA06cPwMU3tY0lWJLcK8a8hjkdu/nlADPxWNg="; + cargoSha256 = "sha256-EykIg8rU2VBag+3834SwMYkz9ZR6brOo/0NXXvrGqsU="; + + postPatch = '' + # upstream uses lld on aarch64-darwin for faster builds + # within nix lld looks for CoreFoundation rather than CoreFoundation.tbd and fails + substituteInPlace .cargo/config --replace '"-C", "link-arg=-fuse-ld=lld"' "" + ''; # Install completions post-install nativeBuildInputs = [ installShellFiles ]; diff --git a/pkgs/development/web/deno/librusty_v8.nix b/pkgs/development/web/deno/librusty_v8.nix index d60fdad1b081..e1a9b0de7536 100644 --- a/pkgs/development/web/deno/librusty_v8.nix +++ b/pkgs/development/web/deno/librusty_v8.nix @@ -11,11 +11,11 @@ let }; in fetch_librusty_v8 { - version = "0.41.0"; + version = "0.42.0"; shas = { - x86_64-linux = "sha256-f5HWLe1wF6nA+e5SwWd+DwXtYnOrcXI8kLIwdi8scmg="; - aarch64-linux = "sha256-tLCR1CBeM+1vRThsJNtEtKH6iJluxdCfnbCNkQQjNfM="; - x86_64-darwin = "sha256-KVBskUTr1SXL/hm9gnHvGGd3TO3QVESiZy0EYXDwPo0="; - aarch64-darwin = "sha256-KjBO6cgibo98P8n1kiMIT1cJ7Aa6BnQeMku71j4sgZY="; + x86_64-linux = "sha256-p8wC2r9+PKEabaHj0NF059TBSKOpE+rtZkqk1SXINzQ="; + aarch64-linux = "sha256-1mQQ5XmR+WcYW6BGfnUdsG4yzhwIal80Y5fWw4XAJ3g="; + x86_64-darwin = "sha256-a5Mu33gXn2X02WRdtO1hb9JRctmFTiCaLNhScz2D0J8="; + aarch64-darwin = "sha256-THEFn8nQDktXJlY1zpi2760KAS2eKEQ9O3Y+yqI2OYw="; }; } From 9f302132b28a44cd0291e06a85f08a3eb5732594 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 18:50:51 +0000 Subject: [PATCH 068/161] python310Packages.phonenumbers: 8.12.46 -> 8.12.47 --- pkgs/development/python-modules/phonenumbers/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/phonenumbers/default.nix b/pkgs/development/python-modules/phonenumbers/default.nix index 603980645e4b..4f8dc93c671e 100644 --- a/pkgs/development/python-modules/phonenumbers/default.nix +++ b/pkgs/development/python-modules/phonenumbers/default.nix @@ -6,12 +6,12 @@ buildPythonPackage rec { pname = "phonenumbers"; - version = "8.12.46"; + version = "8.12.47"; format = "setuptools"; src = fetchPypi { inherit pname version; - sha256 = "sha256-HEQPYzbLSYk/8agybHC032k4Aq6YHyEPVFzUIVrEgTM="; + sha256 = "sha256-Vv1gXS9UYOHfIRcIW2U7s4MiKV7GWOasqvycl2hn1SI="; }; checkInputs = [ From 96993e273048e0887b7b4b36e5564a9a145f4d51 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 19:01:40 +0000 Subject: [PATCH 069/161] flyway: 8.5.1 -> 8.5.9 --- pkgs/development/tools/flyway/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/tools/flyway/default.nix b/pkgs/development/tools/flyway/default.nix index c7ecf4cace2d..3f3a2b190eb5 100644 --- a/pkgs/development/tools/flyway/default.nix +++ b/pkgs/development/tools/flyway/default.nix @@ -1,10 +1,10 @@ { lib, stdenv, fetchurl, jre_headless, makeWrapper }: stdenv.mkDerivation rec{ pname = "flyway"; - version = "8.5.1"; + version = "8.5.9"; src = fetchurl { url = "mirror://maven/org/flywaydb/flyway-commandline/${version}/flyway-commandline-${version}.tar.gz"; - sha256 = "sha256-aD8++IhpV89hgx6pIe8CDg07ps+44si4Z6RcZD8g1Fc="; + sha256 = "sha256-AOfCYWjn8XyyFdz6BbYOysEE1TADfIk8CyPBHsQJTDE="; }; nativeBuildInputs = [ makeWrapper ]; dontBuild = true; From 8be8be71cff14d0e4f01e2fa8fd579ffbfde39e7 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Fri, 22 Apr 2022 20:00:49 +0000 Subject: [PATCH 070/161] ghorg: 1.7.12 -> 1.7.13 --- .../version-management/git-and-tools/ghorg/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/version-management/git-and-tools/ghorg/default.nix b/pkgs/applications/version-management/git-and-tools/ghorg/default.nix index 87d11dd9c1e9..f991bccf0e28 100644 --- a/pkgs/applications/version-management/git-and-tools/ghorg/default.nix +++ b/pkgs/applications/version-management/git-and-tools/ghorg/default.nix @@ -2,13 +2,13 @@ buildGoModule rec { pname = "ghorg"; - version = "1.7.12"; + version = "1.7.13"; src = fetchFromGitHub { owner = "gabrie30"; repo = "ghorg"; rev = "v${version}"; - sha256 = "sha256-y5o4yY5M9eDKN9LtbrPR29EafN3X7J51ARCEpFtLxCo="; + sha256 = "sha256-EQCu+2qMKu+e6G5iXAQn5cz0MZqHrF2wnKNO8Fkpp/Y="; }; doCheck = false; From da09d908bec3586eadedb3d06271caafee965bbe Mon Sep 17 00:00:00 2001 From: Michael Weiss Date: Fri, 22 Apr 2022 22:49:44 +0200 Subject: [PATCH 071/161] chromiumDev: 102.0.4997.0 -> 102.0.5005.12 --- .../networking/browsers/chromium/upstream-info.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pkgs/applications/networking/browsers/chromium/upstream-info.json b/pkgs/applications/networking/browsers/chromium/upstream-info.json index 3d27f5b2f4cc..5636a87fe7d7 100644 --- a/pkgs/applications/networking/browsers/chromium/upstream-info.json +++ b/pkgs/applications/networking/browsers/chromium/upstream-info.json @@ -32,15 +32,15 @@ } }, "dev": { - "version": "102.0.4997.0", - "sha256": "05y9b426wcarq18faw5i79qrfqy158dinvba5d7lwrcjnbqyfr1f", - "sha256bin64": "0846y3dbs7vghrb8s2s57a2lk7a0x2dha5q0d915qrn29g5x9c6p", + "version": "102.0.5005.12", + "sha256": "11n03hz3g8h7srywxrjwrdrxybdjvmdjrnigjlrwjkydprg1l7ab", + "sha256bin64": "0hc56a98ikkbgdw36dpz9k6r15jmjmnm7faml8z59vixxlvkrw7y", "deps": { "gn": { - "version": "2022-04-07", + "version": "2022-04-14", "url": "https://gn.googlesource.com/gn", - "rev": "ae110f8b525009255ba1f9ae96982176d3bfad3d", - "sha256": "131y1v2m59hn7s00zc9p7rhfi956p744mp96g2i80f0i020dyl6w" + "rev": "fd9f2036f26d83f9fcfe93042fb952e5a7fe2167", + "sha256": "0b5xs0chcv3hfhy71rycsmgxnqbm375a333hwav8929k9cbi5p9h" } } }, From 163c53865c782adb12302e98265b72a3106759a0 Mon Sep 17 00:00:00 2001 From: Anders Kaseorg Date: Fri, 22 Apr 2022 13:54:46 -0700 Subject: [PATCH 072/161] =?UTF-8?q?electron=5F15:=2015.5.1=20=E2=86=92=201?= =?UTF-8?q?5.5.2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Anders Kaseorg --- pkgs/development/tools/electron/default.nix | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pkgs/development/tools/electron/default.nix b/pkgs/development/tools/electron/default.nix index 0ed800eb7c63..66ece9a6d9e0 100644 --- a/pkgs/development/tools/electron/default.nix +++ b/pkgs/development/tools/electron/default.nix @@ -83,14 +83,14 @@ rec { headers = "181b2agnf4b5s81p2rdnd6wkw9c2ri4cv1x0wwf7rj60axvzvydm"; }; - electron_15 = mkElectron "15.5.1" { - armv7l-linux = "222e9ad3210cf538c45f938b5b1a5d901b36fb6ec09461446e4ab4ea6ee9bc1b"; - aarch64-linux = "16a0053036a9f6ba947445c85bacda720975a23a910e78cab8dce1bf8ad2acf7"; - x86_64-linux = "99867ef0eef53f214ef4c8a4ec0223890d9c38b73fd6bf4fb49a9e400dff5726"; - i686-linux = "6d65c900c77b9e259932b20978d78a56bb04fad590f29a5935d8a31f5e0891db"; - x86_64-darwin = "54e6312d5e06e16eab78d86ddb01553864b66f608d017c7a1c4a0a318be32081"; - aarch64-darwin = "6472bea7f38f38c20a8c87e6daf70ca0d00b7c29b1641eeda6c8d45f9e60784b"; - headers = "0dgxyndsyhk5m9m8iiy2h3vg1gz1xg5nj75537apsh9mdiizsfhk"; + electron_15 = mkElectron "15.5.2" { + armv7l-linux = "da434095fd7cc17d85ebca5eab3510ec7ff73ace4edc933fe2f27a716ca711c0"; + aarch64-linux = "bcec3f962c7acefc8690680a19df9d83721db7e5db55c7b7a8946365139457a6"; + x86_64-linux = "a4a95888c313dbe279f5f9d9dfd99f56a2a1b6b905fb6cba3b284322fe19a530"; + i686-linux = "0fd1dd9027bfdbc573fd39e163b6b3f8c07e8ac1586a554e65e7324e7fa7ea35"; + x86_64-darwin = "688cc1d501d32afa5efe1883be42446b61f404d4a5e84bd9815254b5437c869b"; + aarch64-darwin = "b43237d7612ada2f2dccaf6e13fa70ba938dc48f1e2f895558949dd372171db7"; + headers = "0jbxazkjkm8g8b8d0ini2l4q9z7885mz5vyj74lf85lqdfqzgzc0"; }; electron_16 = mkElectron "16.2.1" { From d2a5b1281c6a97f40cd29dc5732cc1ee312aa632 Mon Sep 17 00:00:00 2001 From: Anders Kaseorg Date: Fri, 22 Apr 2022 13:55:07 -0700 Subject: [PATCH 073/161] =?UTF-8?q?electron=5F16:=2016.2.1=20=E2=86=92=201?= =?UTF-8?q?6.2.3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Anders Kaseorg --- pkgs/development/tools/electron/default.nix | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pkgs/development/tools/electron/default.nix b/pkgs/development/tools/electron/default.nix index 66ece9a6d9e0..f274882de2d1 100644 --- a/pkgs/development/tools/electron/default.nix +++ b/pkgs/development/tools/electron/default.nix @@ -93,14 +93,14 @@ rec { headers = "0jbxazkjkm8g8b8d0ini2l4q9z7885mz5vyj74lf85lqdfqzgzc0"; }; - electron_16 = mkElectron "16.2.1" { - armv7l-linux = "d55daeffed60cfd0c2de4ea8cab102ec5957dbd0cd863add881080e891b02334"; - aarch64-linux = "6446c665a1c6d7648dbeae94a669423b4c6768bafa96f0d3f8072b8c5d5a949e"; - x86_64-linux = "7b27a8531a8ef60fa27dd119422a81a710e09f7d8cb01777f1fe7b7ab67e3ac4"; - i686-linux = "eb7e0c8113af80f0e4edbae35d2cca718c1e98966da87041304fa6afb2d3e4c0"; - x86_64-darwin = "0386e3318d4b5cfabccc226ca88bd9946669901f381e3817d1d414b1356e472c"; - aarch64-darwin = "280660c0333702de9d95bcf9a21d3db8d148bef2a5946bb57d20b9e5f2aadb96"; - headers = "121wrzy81h9m12y83mb0xs9jbm5l4w31f831lmb4wmkkg54bvcwj"; + electron_16 = mkElectron "16.2.3" { + armv7l-linux = "9b442b17349dcec08e6efadecf9d338a7f4b2955635fed2a78374af850ceee5d"; + aarch64-linux = "eec581d162b494a7bcba4b0221f3beac9f359b48fb8612c83ce6ad7ac63094cd"; + x86_64-linux = "2c032baff08b40f106dfcd86e7b63c6275f13e64d26b8c301af704563edf8600"; + i686-linux = "227e9f5670a2d92a814eeda41c7ef4efd8fc6150bee659e0f322a8d2481ecdec"; + x86_64-darwin = "3a51ad480d4085a822b0526018805e64fe82f93b954abe500eaebb3c81c80d45"; + aarch64-darwin = "38c736c336abf8747040f22542d6a0bd785b5a10f6ba01d71335cc5f77a3d0b5"; + headers = "1a9kb89iigwmahjwq14i74rr6gj21gmpc106pg0il73c50khaxpz"; }; electron_17 = mkElectron "17.3.1" { From 015cb5656abc39d18b142c843b41964926581d02 Mon Sep 17 00:00:00 2001 From: Anders Kaseorg Date: Fri, 22 Apr 2022 13:55:37 -0700 Subject: [PATCH 074/161] =?UTF-8?q?electron=5F17:=2017.3.1=20=E2=86=92=201?= =?UTF-8?q?7.4.1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Anders Kaseorg --- pkgs/development/tools/electron/default.nix | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pkgs/development/tools/electron/default.nix b/pkgs/development/tools/electron/default.nix index f274882de2d1..389025981740 100644 --- a/pkgs/development/tools/electron/default.nix +++ b/pkgs/development/tools/electron/default.nix @@ -103,13 +103,13 @@ rec { headers = "1a9kb89iigwmahjwq14i74rr6gj21gmpc106pg0il73c50khaxpz"; }; - electron_17 = mkElectron "17.3.1" { - armv7l-linux = "ad7864f9a580b3fd8865480caa6cccbaefa5d7e5fdbe455700ab711b0adf2228"; - aarch64-linux = "e0f03aff5339e4dab85cbc970568ca31599ee0f032d1e766c23deea7d96654b0"; - x86_64-linux = "59d13b1d060523d1098e6e1e5bf7c6f2494b713321541f863dc459a42d2c40a8"; - i686-linux = "6d31d5117f4508bc7e0f9ecb6423d266b47fb5f074b0dfa8ddcfc2298c764151"; - x86_64-darwin = "c70a6e906ae7ce02552f1722022147f2416f27de0f98b88a0b7b1a09e341426f"; - aarch64-darwin = "76a9c8cbfa578c7e6678e6ccab2417374d1b3d3d81c6cac5ceea0aa058c12a8f"; - headers = "11k5sw7wk0fjjdlhcvbkwpffslngm9ns8l4c7rxa4qx8n1six9sf"; + electron_17 = mkElectron "17.4.1" { + armv7l-linux = "d1329468cb21039fb5b503fc813381f9be4d43422383b44f859b450be0e4200c"; + aarch64-linux = "70d29bca5f884753341a11b0445ccf159c0f43dfae16eb60c53946582c3128b0"; + x86_64-linux = "f9437a86947c418d92eabea14b268dcc4a5dde74cc6927530c1e9195e4aeddf8"; + i686-linux = "436f44d778acc41a4a07cc4ee23ab861e2c4d72e4b1335e3c4ccfd4855deb594"; + x86_64-darwin = "0357bcf841bc246d01df8b838fa5de9856bd48f4fa6b2b4f3053ba3db492e54b"; + aarch64-darwin = "827f6ecb7bc4d4ed88eb22e1b02615465ad13ace918d294c873a67a34c207dcf"; + headers = "064qnwv6gqn502r1cv7vi6ahvgyxcqq7mv0rmk2bxfpkr5x6hgmh"; }; } From 2b9e207a04b34b66632ea2cfca2ae0e4e52c5999 Mon Sep 17 00:00:00 2001 From: Anders Kaseorg Date: Fri, 22 Apr 2022 13:57:14 -0700 Subject: [PATCH 075/161] electron_18: init at 18.1.0 Signed-off-by: Anders Kaseorg --- pkgs/development/tools/electron/default.nix | 12 +++++++++++- pkgs/top-level/all-packages.nix | 3 ++- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/pkgs/development/tools/electron/default.nix b/pkgs/development/tools/electron/default.nix index 389025981740..1eb6144b4c3b 100644 --- a/pkgs/development/tools/electron/default.nix +++ b/pkgs/development/tools/electron/default.nix @@ -23,7 +23,7 @@ let in rec { - electron = electron_17; + electron = electron_18; electron_9 = mkElectron "9.4.4" { x86_64-linux = "781d6ca834d415c71078e1c2c198faba926d6fce19e31448bbf4450869135450"; @@ -112,4 +112,14 @@ rec { aarch64-darwin = "827f6ecb7bc4d4ed88eb22e1b02615465ad13ace918d294c873a67a34c207dcf"; headers = "064qnwv6gqn502r1cv7vi6ahvgyxcqq7mv0rmk2bxfpkr5x6hgmh"; }; + + electron_18 = mkElectron "18.1.0" { + armv7l-linux = "c2296f3f68938aab4cef07b747d2dd28973625b6717163b9c51fbcf1509fd8ff"; + aarch64-linux = "13bd4998d0d86ccf4cb87d11f9581d5a6063b4585fc4828e130054527dfb9179"; + x86_64-linux = "7f95069d58e6843e6ae2b8f02619d4dcef7db4c35bd6e90b903268d83b939fba"; + i686-linux = "e952d06b3828695636de522e3af8140543ecbe02d7351dd002b0ffb9e2a09705"; + x86_64-darwin = "24dd64a66b820c9553c5e5570907da6c98e808d33fac98072b9c2a8f1659cb14"; + aarch64-darwin = "97adf13306c9b3b304d3e9ddf68f5f7fb9b79c9a1342114e3671182f3cc9e808"; + headers = "0gl30q2igr9c8sjlhyj5w57dm5navpkas5hnz9yl7sasbx66v10v"; + }; } diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index f00ee1c353ff..afe11632160e 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -14448,7 +14448,8 @@ with pkgs; electron_14 electron_15 electron_16 - electron_17; + electron_17 + electron_18; autobuild = callPackage ../development/tools/misc/autobuild { }; From d2822bdfed09b54112930a973150390f3e017186 Mon Sep 17 00:00:00 2001 From: Armeen Mahdian Date: Fri, 22 Apr 2022 17:10:42 -0500 Subject: [PATCH 076/161] winpdb: remove --- pkgs/development/tools/winpdb/default.nix | 55 ----------------------- pkgs/top-level/aliases.nix | 1 + pkgs/top-level/all-packages.nix | 2 - 3 files changed, 1 insertion(+), 57 deletions(-) delete mode 100644 pkgs/development/tools/winpdb/default.nix diff --git a/pkgs/development/tools/winpdb/default.nix b/pkgs/development/tools/winpdb/default.nix deleted file mode 100644 index 8f8d03db950b..000000000000 --- a/pkgs/development/tools/winpdb/default.nix +++ /dev/null @@ -1,55 +0,0 @@ -{ lib, fetchurl, python2Packages, makeDesktopItem }: - -python2Packages.buildPythonApplication rec { - pname = "winpdb"; - version = "1.4.8"; - namePrefix = ""; - - src = fetchurl { - url = "https://winpdb.googlecode.com/files/${pname}-${version}.tar.gz"; - sha256 = "0vkpd24r40j928vc04c721innv0168sbllg97v4zw10adm24d8fs"; - }; - - propagatedBuildInputs = [ python2Packages.wxPython ]; - - desktopItem = makeDesktopItem { - name = "winpdb"; - exec = "winpdb"; - icon = "winpdb"; - comment = "Platform independend Python debugger"; - desktopName = "Winpdb"; - genericName = "Python Debugger"; - categories = [ "Development" "Debugger" ]; - }; - - # Don't call gnome-terminal with "--disable-factory" flag, which is - # unsupported since GNOME >= 3.10. Apparently, debian also does this fix: - # https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=757718 - postPatch = '' - sed -i "s/--disable-factory//" rpdb2.py - ''; - - postInstall = '' - mkdir -p "$out"/share/applications - cp "$desktopItem"/share/applications/* "$out"/share/applications/ - - mkdir -p "$out"/share/icons - cp artwork/winpdb-icon.svg "$out"/share/icons/winpdb.svg - ''; - - # no tests - doCheck = false; - - meta = with lib; { - description = "Platform independent Python debugger"; - longDescription = '' - Winpdb is a platform independent GPL Python debugger with support for - multiple threads, namespace modification, embedded debugging, encrypted - communication and is up to 20 times faster than pdb. - ''; - homepage = "http://winpdb.org/"; - license = licenses.gpl2Plus; - platforms = platforms.all; - maintainers = [ maintainers.bjornfor ]; - }; -} diff --git a/pkgs/top-level/aliases.nix b/pkgs/top-level/aliases.nix index db330743e50d..7573cd45096b 100644 --- a/pkgs/top-level/aliases.nix +++ b/pkgs/top-level/aliases.nix @@ -1347,6 +1347,7 @@ mapAliases ({ wineStaging = throw "'wineStaging' has been renamed to/replaced by 'wine-staging'"; # Converted to throw 2022-02-22 wineUnstable = throw "'wineUnstable' has been renamed to/replaced by 'winePackages.unstable'"; # Converted to throw 2022-02-22 wineWayland = wine-wayland; + winpdb = throw "winpdb has been removed: abandoned by upstream"; # Added 2022-04-22 winusb = throw "'winusb' has been renamed to/replaced by 'woeusb'"; # Converted to throw 2022-02-22 wireguard = throw "'wireguard' has been renamed to/replaced by 'wireguard-tools'"; # Converted to throw 2022-02-22 wxmupen64plus = throw "wxmupen64plus was removed because the upstream disappeared"; # Added 2022-01-31 diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index 434d94509ab4..dad33e22ac2f 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -16341,8 +16341,6 @@ with pkgs; zydis = callPackage ../development/libraries/zydis { }; - winpdb = callPackage ../development/tools/winpdb { }; - grabserial = callPackage ../development/tools/grabserial { }; mypy = with python3Packages; toPythonApplication mypy; From 5ce8c3404395d5b6ef92eb6c456eaeeea1964c67 Mon Sep 17 00:00:00 2001 From: Ryan Horiguchi Date: Sat, 23 Apr 2022 01:13:02 +0200 Subject: [PATCH 077/161] gnomeExtensions: update-extensions.py format --- .../gnome/extensions/update-extensions.py | 26 +++++++++---------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/pkgs/desktops/gnome/extensions/update-extensions.py b/pkgs/desktops/gnome/extensions/update-extensions.py index 581781fb11a8..84eed2093024 100755 --- a/pkgs/desktops/gnome/extensions/update-extensions.py +++ b/pkgs/desktops/gnome/extensions/update-extensions.py @@ -1,16 +1,16 @@ #!/usr/bin/env nix-shell #!nix-shell -I nixpkgs=../../../.. -i python3 -p python3 -import json -import urllib.request -import urllib.error -from typing import List, Dict, Optional, Any, Tuple -import logging -from operator import itemgetter -import subprocess -import zipfile -import io import base64 +import io +import json +import logging +import subprocess +import urllib.error +import urllib.request +import zipfile +from operator import itemgetter +from typing import List, Dict, Optional, Any, Tuple # We don't want all those deprecated legacy extensions # Group extensions by GNOME "major" version for compatibility reasons @@ -21,14 +21,12 @@ supported_versions = { "42": "42", } - # Some type alias to increase readility of complex compound types PackageName = str ShellVersion = str Uuid = str ExtensionVersion = int - # Keep track of all names that have been used till now to detect collisions. # This works because we deterministically process all extensions in historical order # The outer dict level is the shell version, as we are tracking duplicates only per same Shell version. @@ -69,7 +67,7 @@ def fetch_extension_data(uuid: str, version: str) -> Tuple[str, str]: def generate_extension_versions( - extension_version_map: Dict[ShellVersion, ExtensionVersion], uuid: str + extension_version_map: Dict[ShellVersion, ExtensionVersion], uuid: str ) -> Dict[ShellVersion, Dict[str, str]]: """ Takes in a mapping from shell versions to extension versions and transforms it the way we need it: @@ -127,7 +125,7 @@ def pname_from_url(url: str) -> Tuple[str, str]: """ url = url.split("/") # type: ignore - return (url[3], url[2]) + return url[3], url[2] def process_extension(extension: Dict[str, Any]) -> Optional[Dict[str, Any]]: @@ -225,7 +223,7 @@ def scrape_extensions_index() -> List[Dict[str, Any]]: logging.info("Scraping page " + str(page)) try: with urllib.request.urlopen( - f"https://extensions.gnome.org/extension-query/?n_per_page=25&page={page}" + f"https://extensions.gnome.org/extension-query/?n_per_page=25&page={page}" ) as response: data = json.loads(response.read().decode())["extensions"] responseLength = len(data) From bce4a670cc2ffbbbf222249bb2442a73509b68ae Mon Sep 17 00:00:00 2001 From: Ryan Horiguchi Date: Sat, 23 Apr 2022 01:16:40 +0200 Subject: [PATCH 078/161] gnomeExtensions: update-extensions.py fix where metadata.json missing --- pkgs/desktops/gnome/extensions/update-extensions.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkgs/desktops/gnome/extensions/update-extensions.py b/pkgs/desktops/gnome/extensions/update-extensions.py index 84eed2093024..619569f8d3d7 100755 --- a/pkgs/desktops/gnome/extensions/update-extensions.py +++ b/pkgs/desktops/gnome/extensions/update-extensions.py @@ -46,6 +46,10 @@ def fetch_extension_data(uuid: str, version: str) -> Tuple[str, str]: uuid = uuid.replace("@", "") url: str = f"https://extensions.gnome.org/extension-data/{uuid}.v{version}.shell-extension.zip" + # TODO remove when Vitals@CoreCoding.com version != 53, this extension has a missing manifest.json + if url == 'https://extensions.gnome.org/extension-data/VitalsCoreCoding.com.v53.shell-extension.zip': + url = 'https://extensions.gnome.org/extension-data/VitalsCoreCoding.com.v53.shell-extension_v1BI2FB.zip' + # Yes, we download that file three times: # The first time is for the maintainter, so they may have a personal backup to fix potential issues From 30818451447ab1c076caa5c36fc0310895f2ab3d Mon Sep 17 00:00:00 2001 From: Ryan Horiguchi Date: Sat, 23 Apr 2022 01:19:38 +0200 Subject: [PATCH 079/161] gnomeExtensions: update-extensions.py fix typos --- pkgs/desktops/gnome/extensions/update-extensions.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkgs/desktops/gnome/extensions/update-extensions.py b/pkgs/desktops/gnome/extensions/update-extensions.py index 619569f8d3d7..3430e46d0401 100755 --- a/pkgs/desktops/gnome/extensions/update-extensions.py +++ b/pkgs/desktops/gnome/extensions/update-extensions.py @@ -21,7 +21,7 @@ supported_versions = { "42": "42", } -# Some type alias to increase readility of complex compound types +# Some type alias to increase readability of complex compound types PackageName = str ShellVersion = str Uuid = str @@ -52,7 +52,7 @@ def fetch_extension_data(uuid: str, version: str) -> Tuple[str, str]: # Yes, we download that file three times: - # The first time is for the maintainter, so they may have a personal backup to fix potential issues + # The first time is for the maintainer, so they may have a personal backup to fix potential issues # subprocess.run( # ["wget", url], capture_output=True, text=True # ) @@ -116,7 +116,7 @@ def generate_extension_versions( "version": str(extension_version), "sha256": sha256, # The downloads are impure, their metadata.json may change at any time. - # Thus, be back it up / pin it to remain deterministic + # Thus, we back it up / pin it to remain deterministic # Upstream issue: https://gitlab.gnome.org/Infrastructure/extensions-web/-/issues/137 "metadata": metadata, } @@ -153,7 +153,7 @@ def process_extension(extension: Dict[str, Any]) -> Optional[Dict[str, Any]]: Don't make any assumptions on it, and treat it like an opaque string! "link" follows the following schema: "/extension/$number/$string/" The number is monotonically increasing and unique to every extension. - The string is usually derived from the extensions's name (but shortened, kebab-cased and URL friendly). + The string is usually derived from the extension name (but shortened, kebab-cased and URL friendly). It may diverge from the actual name. The keys of "shell_version_map" are GNOME Shell version numbers. From c88d12200cdfd049f2915922c7cc443ac13430b2 Mon Sep 17 00:00:00 2001 From: Ryan Horiguchi Date: Sat, 23 Apr 2022 01:20:37 +0200 Subject: [PATCH 080/161] gnomeExtensions: update-extensions.py improve collision warning --- pkgs/desktops/gnome/extensions/update-extensions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/desktops/gnome/extensions/update-extensions.py b/pkgs/desktops/gnome/extensions/update-extensions.py index 3430e46d0401..6a3637292f73 100755 --- a/pkgs/desktops/gnome/extensions/update-extensions.py +++ b/pkgs/desktops/gnome/extensions/update-extensions.py @@ -198,7 +198,7 @@ def process_extension(extension: Dict[str, Any]) -> Optional[Dict[str, Any]]: for shell_version in shell_version_map.keys(): if pname in package_name_registry[shell_version]: - logging.warning(f"Package name '{pname}' is colliding.") + logging.warning(f"Package name '{pname}' for GNOME '{shell_version}' is colliding.") package_name_registry[shell_version][pname].append(uuid) else: package_name_registry[shell_version][pname] = [uuid] From 51fdb8f239cb69f1e88bf38a5b125d83adff0f22 Mon Sep 17 00:00:00 2001 From: Ryan Horiguchi Date: Sat, 23 Apr 2022 01:23:46 +0200 Subject: [PATCH 081/161] gnomeExtensions: update-extensions.py use relative paths --- pkgs/desktops/gnome/extensions/update-extensions.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pkgs/desktops/gnome/extensions/update-extensions.py b/pkgs/desktops/gnome/extensions/update-extensions.py index 6a3637292f73..c5542f03d8eb 100755 --- a/pkgs/desktops/gnome/extensions/update-extensions.py +++ b/pkgs/desktops/gnome/extensions/update-extensions.py @@ -10,6 +10,7 @@ import urllib.error import urllib.request import zipfile from operator import itemgetter +from pathlib import Path from typing import List, Dict, Optional, Any, Tuple # We don't want all those deprecated legacy extensions @@ -35,6 +36,8 @@ package_name_registry: Dict[ShellVersion, Dict[PackageName, List[Uuid]]] = {} for shell_version in supported_versions.keys(): package_name_registry[shell_version] = {} +updater_dir_path = Path(__file__).resolve().parent + def fetch_extension_data(uuid: str, version: str) -> Tuple[str, str]: """ @@ -271,7 +274,7 @@ if __name__ == "__main__": f"Done. Writing results to extensions.json ({len(processed_extensions)} extensions in total)" ) - with open("extensions.json", "w") as out: + with open(updater_dir_path / "extensions.json", "w") as out: # Manually pretty-print the outer level, but then do one compact line per extension # This allows for the diffs to be manageable (one line of change per extension) despite their quantity for index, extension in enumerate(processed_extensions): @@ -283,14 +286,14 @@ if __name__ == "__main__": out.write("\n") out.write("]\n") - with open("extensions.json", "r") as out: + with open(updater_dir_path / "extensions.json", "r") as out: # Check that the generated file actually is valid JSON, just to be sure json.load(out) logging.info( "Done. Writing name collisions to collisions.json (please check manually)" ) - with open("collisions.json", "w") as out: + with open(updater_dir_path / "collisions.json", "w") as out: # Filter out those that are not duplicates package_name_registry_filtered: Dict[ShellVersion, Dict[PackageName, List[Uuid]]] = { # The outer level keys are shell versions From dcb93d0080d17d9ef94afeaa17d484f6ca882508 Mon Sep 17 00:00:00 2001 From: Ryan Horiguchi Date: Sat, 23 Apr 2022 01:24:38 +0200 Subject: [PATCH 082/161] gnomeExtensions: update-extensions.py improve download --- .../gnome/extensions/update-extensions.py | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/pkgs/desktops/gnome/extensions/update-extensions.py b/pkgs/desktops/gnome/extensions/update-extensions.py index c5542f03d8eb..7448864ffa9c 100755 --- a/pkgs/desktops/gnome/extensions/update-extensions.py +++ b/pkgs/desktops/gnome/extensions/update-extensions.py @@ -2,13 +2,11 @@ #!nix-shell -I nixpkgs=../../../.. -i python3 -p python3 import base64 -import io import json import logging import subprocess import urllib.error import urllib.request -import zipfile from operator import itemgetter from pathlib import Path from typing import List, Dict, Optional, Any, Tuple @@ -53,22 +51,29 @@ def fetch_extension_data(uuid: str, version: str) -> Tuple[str, str]: if url == 'https://extensions.gnome.org/extension-data/VitalsCoreCoding.com.v53.shell-extension.zip': url = 'https://extensions.gnome.org/extension-data/VitalsCoreCoding.com.v53.shell-extension_v1BI2FB.zip' - # Yes, we download that file three times: + # Yes, we download that file two times: # The first time is for the maintainer, so they may have a personal backup to fix potential issues # subprocess.run( # ["wget", url], capture_output=True, text=True # ) - # The second time, we extract the metadata.json because we need it too - with urllib.request.urlopen(url) as response: - data = zipfile.ZipFile(io.BytesIO(response.read()), 'r') - metadata = base64.b64encode(data.read('metadata.json')).decode() + # The second time, we add the file to store + process = subprocess.run( + ["nix-prefetch-url", "--unpack", "--print-path", url], capture_output=True, text=True + ) - # The third time is to get the file into the store and to get its hash - hash = subprocess.run( - ["nix-prefetch-url", "--unpack", url], capture_output=True, text=True - ).stdout.strip() + lines = process.stdout.splitlines() + + # Get hash from first line of nix-prefetch-url output + hash = lines[0].strip() + + # Get path from second line of nix-prefetch-url output + path = Path(lines[1].strip()) + + # Get metadata.json content from nix-store + with open(path / "metadata.json", "r") as out: + metadata = base64.b64encode(out.read().encode("ascii")).decode() return hash, metadata From 7a54239267392e0209137e12e2567cd5262d346d Mon Sep 17 00:00:00 2001 From: Ryan Horiguchi Date: Sat, 23 Apr 2022 01:25:09 +0200 Subject: [PATCH 083/161] gnomeExtensions: update-extensions.py move info after writing to file --- .../gnome/extensions/update-extensions.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/pkgs/desktops/gnome/extensions/update-extensions.py b/pkgs/desktops/gnome/extensions/update-extensions.py index 7448864ffa9c..849832a70066 100755 --- a/pkgs/desktops/gnome/extensions/update-extensions.py +++ b/pkgs/desktops/gnome/extensions/update-extensions.py @@ -275,10 +275,6 @@ if __name__ == "__main__": processed_extensions.append(processed_extension) logging.debug(f"Processed {num + 1} / {len(raw_extensions)}") - logging.info( - f"Done. Writing results to extensions.json ({len(processed_extensions)} extensions in total)" - ) - with open(updater_dir_path / "extensions.json", "w") as out: # Manually pretty-print the outer level, but then do one compact line per extension # This allows for the diffs to be manageable (one line of change per extension) despite their quantity @@ -291,13 +287,14 @@ if __name__ == "__main__": out.write("\n") out.write("]\n") + logging.info( + f"Done. Writing results to extensions.json ({len(processed_extensions)} extensions in total)" + ) + with open(updater_dir_path / "extensions.json", "r") as out: # Check that the generated file actually is valid JSON, just to be sure json.load(out) - logging.info( - "Done. Writing name collisions to collisions.json (please check manually)" - ) with open(updater_dir_path / "collisions.json", "w") as out: # Filter out those that are not duplicates package_name_registry_filtered: Dict[ShellVersion, Dict[PackageName, List[Uuid]]] = { @@ -309,3 +306,7 @@ if __name__ == "__main__": } json.dump(package_name_registry_filtered, out, indent=2, ensure_ascii=False) out.write("\n") + + logging.info( + "Done. Writing name collisions to collisions.json (please check manually)" + ) From d13f1f869f4d60a0b01cc7b7d3bbe8899a5d545b Mon Sep 17 00:00:00 2001 From: Ryan Horiguchi Date: Sat, 23 Apr 2022 01:34:52 +0200 Subject: [PATCH 084/161] gnomeExtensions: update-extensions.py make var more pythonic --- pkgs/desktops/gnome/extensions/update-extensions.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/desktops/gnome/extensions/update-extensions.py b/pkgs/desktops/gnome/extensions/update-extensions.py index 849832a70066..f6e831c7b536 100755 --- a/pkgs/desktops/gnome/extensions/update-extensions.py +++ b/pkgs/desktops/gnome/extensions/update-extensions.py @@ -238,13 +238,13 @@ def scrape_extensions_index() -> List[Dict[str, Any]]: f"https://extensions.gnome.org/extension-query/?n_per_page=25&page={page}" ) as response: data = json.loads(response.read().decode())["extensions"] - responseLength = len(data) + response_length = len(data) for extension in data: extensions.append(extension) # If our page isn't "full", it must have been the last one - if responseLength < 25: + if response_length < 25: logging.debug( f"\tThis page only has {responseLength} entries, so it must be the last one." ) From 57f964efe9f63d928e087ab49b32a74bf14a3e03 Mon Sep 17 00:00:00 2001 From: AndersonTorres Date: Thu, 21 Apr 2022 20:51:32 -0300 Subject: [PATCH 085/161] nickel: init at 0.1.0 --- .../interpreters/nickel/default.nix | 33 +++++++++++++++++++ pkgs/top-level/all-packages.nix | 2 ++ 2 files changed, 35 insertions(+) create mode 100644 pkgs/development/interpreters/nickel/default.nix diff --git a/pkgs/development/interpreters/nickel/default.nix b/pkgs/development/interpreters/nickel/default.nix new file mode 100644 index 000000000000..7a097391ba5d --- /dev/null +++ b/pkgs/development/interpreters/nickel/default.nix @@ -0,0 +1,33 @@ +{ lib +, rustPlatform +, fetchFromGitHub +}: + +rustPlatform.buildRustPackage rec { + pname = "nickel"; + version = "0.1.0"; + + src = fetchFromGitHub { + owner = "tweag"; + repo = pname; + rev = "refs/tags/${version}"; # because pure ${version} doesn't work + hash = "sha256-St8oK9vP2cAhsNindkebtAMeRPwYggP9E4CciSZc7oA="; + }; + + cargoSha256 = "sha256-VsyK/api8acIpADpXQ8RdbRLiZwHFSDH0vwQrZQ8zp4="; + + meta = with lib; { + homepage = "https://nickel-lang.org/"; + description = "Better configuration for less"; + longDescription = '' + Nickel is the cheap configuration language. + + Its purpose is to automate the generation of static configuration files - + think JSON, YAML, XML, or your favorite data representation language - + that are then fed to another system. It is designed to have a simple, + well-understood core: it is in essence JSON with functions. + ''; + license = licenses.mit; + maintainers = with maintainers; [ AndersonTorres ]; + }; +} diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index 434d94509ab4..b16389474d3c 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -8650,6 +8650,8 @@ with pkgs; npapi_sdk = callPackage ../development/libraries/npapi-sdk {}; + nickel = callPackage ../development/interpreters/nickel { }; + npiet = callPackage ../development/interpreters/npiet { }; npth = callPackage ../development/libraries/npth {}; From 348b256e9848bb5141cc0f435b51e23bb96897a7 Mon Sep 17 00:00:00 2001 From: Kira Bruneau Date: Fri, 22 Apr 2022 19:19:56 -0400 Subject: [PATCH 086/161] python3Packages.xxhash: fix version with setuptools-scm Since v3.0.0, xxhash now uses setuptools-scm to manage its version --- pkgs/development/python-modules/xxhash/default.nix | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pkgs/development/python-modules/xxhash/default.nix b/pkgs/development/python-modules/xxhash/default.nix index f70526da6a10..ff4eeff35459 100644 --- a/pkgs/development/python-modules/xxhash/default.nix +++ b/pkgs/development/python-modules/xxhash/default.nix @@ -1,6 +1,7 @@ { lib , buildPythonPackage , fetchPypi +, setuptools-scm }: buildPythonPackage rec { @@ -12,6 +13,10 @@ buildPythonPackage rec { sha256 = "sha256-MLLZeq8R+xIgI/a0TruXxpVengDXRhqWQVygMLXOucc="; }; + nativeBuildInputs = [ + setuptools-scm + ]; + meta = with lib; { homepage = "https://github.com/ifduyue/python-xxhash"; description = "Python Binding for xxHash https://pypi.org/project/xxhash/"; From ad605efb5fbdc2fa33f4ab385a4511fb1ba3f615 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81tila=20Saraiva?= Date: Tue, 15 Feb 2022 22:04:21 -0300 Subject: [PATCH 087/161] droidmote: init at 3.0.6 --- pkgs/tools/inputmethods/droidmote/default.nix | 61 +++++++++++++++++++ pkgs/top-level/all-packages.nix | 2 + 2 files changed, 63 insertions(+) create mode 100644 pkgs/tools/inputmethods/droidmote/default.nix diff --git a/pkgs/tools/inputmethods/droidmote/default.nix b/pkgs/tools/inputmethods/droidmote/default.nix new file mode 100644 index 000000000000..24c2bcee8cdc --- /dev/null +++ b/pkgs/tools/inputmethods/droidmote/default.nix @@ -0,0 +1,61 @@ +{ lib, stdenv, fetchurl, autoPatchelfHook }: + +let + srcs = { + x86_64-linux = fetchurl { + urls = [ + "https://videomap.it/script/dms-ubuntu-x64" + "https://archive.org/download/videomap/dms-ubuntu-x64" + ]; + sha256 = "1x7pp6k27lr206a8j2pn0wf4wjb0zi28s0g1g3rb08jmr8fh1jnh"; + }; + i686-linux = fetchurl { + urls = [ + "https://videomap.it/script/dms-ubuntu-x32" + "https://archive.org/download/videomap/dms-ubuntu-x32" + ]; + sha256 = "1d62d7jz50wzk5rqqm3xab66jdzi9i1j6mwxf7r7nsgm6j5zz8r4"; + }; + aarch64-linux = fetchurl { + urls = [ + "https://videomap.it/script/dms-ubuntu-arm64" + "https://archive.org/download/videomap/dms-ubuntu-arm64" + ]; + sha256 = "1l1x7iqbxn6zsh3d37yb5x15qsxlwy3cz8g2g8vnzkgaafw9vva0"; + }; + armv7l-linux = fetchurl { + urls = [ + "https://videomap.it/script/dms-ubuntu-arm" + "https://archive.org/download/videomap/dms-ubuntu-arm" + ]; + sha256 = "1i7q9mylzvbsfydv4xf83nyqkh0nh01612jrqm93q1w6d0k2zvcd"; + }; + }; +in +stdenv.mkDerivation rec { + pname = "droidmote"; + version = "3.0.6"; + + src = srcs.${stdenv.hostPlatform.system} or (throw "Unsupported system: ${stdenv.hostPlatform.system}"); + + dontUnpack = true; + dontBuild = true; + + nativeBuildInputs = [ autoPatchelfHook ]; + + installPhase = '' + runHook preInstall + + install -m755 -D $src $out/bin/droidmote + + runHook postInstall + ''; + + meta = with lib; { + description = "Control your computer from your couch"; + homepage = "https://www.videomap.it/"; + license = licenses.unfree; + maintainers = with maintainers; [ atila ]; + platforms = lib.attrNames srcs; + }; +} diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index d37a6e8efb03..e93c6e20964c 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -1738,6 +1738,8 @@ with pkgs; droidcam = callPackage ../applications/video/droidcam { }; + droidmote = callPackage ../tools/inputmethods/droidmote { }; + ecdsautils = callPackage ../tools/security/ecdsautils { }; echidna = haskell.lib.compose.justStaticExecutables (haskellPackages.callPackage (../tools/security/echidna) { }); From ba635f7d5d983992ea2380da46381f6af611e1e6 Mon Sep 17 00:00:00 2001 From: Samuel Ainsworth Date: Sat, 23 Apr 2022 00:22:12 +0000 Subject: [PATCH 088/161] python3Packages.flax: fix build --- pkgs/development/python-modules/flax/default.nix | 1 + 1 file changed, 1 insertion(+) diff --git a/pkgs/development/python-modules/flax/default.nix b/pkgs/development/python-modules/flax/default.nix index 97f2c3c3862e..1cd68790479f 100644 --- a/pkgs/development/python-modules/flax/default.nix +++ b/pkgs/development/python-modules/flax/default.nix @@ -45,6 +45,7 @@ buildPythonPackage rec { pytestFlagsArray = [ "-W ignore::FutureWarning" + "-W ignore::DeprecationWarning" ]; disabledTestPaths = [ From ecaf258f7a8c9d84a31a450c88d50d4668b15625 Mon Sep 17 00:00:00 2001 From: Dmitry Kalinkin Date: Fri, 22 Apr 2022 20:54:19 -0400 Subject: [PATCH 089/161] lhapdf.pdf_sets: update --- .../libraries/physics/lhapdf/maintainer.sh | 4 +- .../libraries/physics/lhapdf/pdf_sets.nix | 560 ++++++++++++++---- 2 files changed, 433 insertions(+), 131 deletions(-) diff --git a/pkgs/development/libraries/physics/lhapdf/maintainer.sh b/pkgs/development/libraries/physics/lhapdf/maintainer.sh index 6e263dcebfde..dc182ec99ea6 100755 --- a/pkgs/development/libraries/physics/lhapdf/maintainer.sh +++ b/pkgs/development/libraries/physics/lhapdf/maintainer.sh @@ -4,9 +4,9 @@ set -xe : ${SED:="$(nix-build '' -A gnused --no-out-link)/bin/sed"} -BASE_URL="https://lhapdfsets.web.cern.ch/lhapdfsets/current/" +BASE_URL="https://lhapdfsets.web.cern.ch/current/" -for pdf_set in `curl -L $BASE_URL 2>/dev/null | "$SED" -e "s/.*/dev/null | "$SED" -n -e 's/.*/dev/null | tr -d '\n' echo "\";" diff --git a/pkgs/development/libraries/physics/lhapdf/pdf_sets.nix b/pkgs/development/libraries/physics/lhapdf/pdf_sets.nix index 694164f5af2e..286cf8355b64 100644 --- a/pkgs/development/libraries/physics/lhapdf/pdf_sets.nix +++ b/pkgs/development/libraries/physics/lhapdf/pdf_sets.nix @@ -59,6 +59,9 @@ in "ATLAS-epWZ16-VAR" = "1zkhlv8yxfla46gj57119w9prsd3zyy5vg275bayfwa6b71gmc0b"; "ATLAS-epWZtop18-EIG" = "069rysd9mf3cshx7xkcv7735ydh2g6szvljbfkcqwckaqjg2x3v5"; "ATLAS-epWZtop18-VAR" = "0hpyp52dwl8fnw47pyw8g7fsz97wr6sk4yli6sx0zbj8yy2j28yj"; + "ATLASepWZVjet20-EIG" = "0lvd3zkmisx95rbjx7r9wkk0s0mxvaybp3pk66sxrxf1bj1l9r52"; + "ATLASepWZVjet20-MOD" = "1iyb50isdsy3a5wnlm0185z9bfs6nxwlcl1aqlh4h3j1dbmz4ba9"; + "ATLASepWZVjet20-PAR" = "1kfqii7sbcs8zdsyd9kiy3r233nawc9yfc23fb6ql0xcwfzpyb1d"; "CJ12max" = "1vk2zkaiqbl6fixaxy7mrggmmxv7lvnw736lxm5sh25dapg6s8ag"; "CJ12mid" = "0s2558ihypn0l9qqx25qwnawbc7fkbi2wwwhbyb108rjk2klaf8v"; "CJ12min" = "1kdla638m3axr65ndid9irmqhby4gl084r297xw3jxxlrb0b7hj9"; @@ -171,7 +174,7 @@ in "CT14nnloIC" = "1wnpwy0mz0c5y29wi497jcn5k47bndd0h65d6a18qcfk0l15rfzx"; "CT14nnlo_NF3" = "0ijns9bjkw8zcinba7rflc7ic03mn5701lqfrxqjyq4q6kh8fia7"; "CT14nnlo_NF4" = "0fhyzaxnm17pi7wfh5hwaic9q4y0hb05ripd6r648wnnhhi353xy"; - "CT14nnlo_NF6" = "1rnacbsh0y9qjd2x7ggs87zi9msrxrp2l6lidg92i2la4pri27zk"; + "CT14nnlo_NF6" = "1dvabji3vrqk8ngln72xqiahm8fai3klgv5yz64b3bfxcr04wmg9"; "CT14nnlo_as_0111" = "1hl88j40czr73h9fbz0zbliawlqwng7ikrmq01hsfns190axm8w9"; "CT14nnlo_as_0112" = "1w9344v9ihr0w8vrfhhxn81gcnr0qm6ihwwijvcdds09jpdlp6vr"; "CT14nnlo_as_0113" = "11symfb1ljislbksrars1k766fa2n1inbarzbw3kp01vxpw8gxf5"; @@ -190,133 +193,157 @@ in "CT14qed_neutron" = "0ck1vmqk17i7rq42hra79cz2rm8ngxv4da6dvz62l6m2nrga3l2k"; "CT14qed_proton" = "1gijxkq5gpsljijblzd13kgr7xjjvnjv18v02jivylf73igsakd7"; "CT18ANLO" = "16lbhgkbiym3njiffxdcm3hf7kkm33hyj2w1hwgb3mvxx2sja31c"; - "CT18ANLO_as_0110" = "08hwxc99l645a9craimgawwynxcs5cmapgxgk0fy9ihvjvqs6jg2"; - "CT18ANLO_as_0111" = "02ff3s127svdjzawbhzry04rcsw5waggmf3iwpqndzxhqpm0py8a"; - "CT18ANLO_as_0112" = "063g7sqii0gf2rdjg9k9x95kzwg62w8lfq9cgyv3bnkpapnbqhia"; - "CT18ANLO_as_0113" = "01zh34dg4cc8955ipg2i0k6s13h77jg8yaa4v2f4aw0020js9dn2"; - "CT18ANLO_as_0114" = "061lvglsg4889q6qya83f7ngyzi2ibar1c9w6xyl462x5i4frx2x"; - "CT18ANLO_as_0115" = "0a35axfjxywy4yh8pk4w4f57rfljvd593bx7a8wnix0cifnngg4j"; - "CT18ANLO_as_0116" = "1cm3m3m6l93qlr9fxbc0d21gq3x0wn09qi8cxbx7lj0yqhjf2zh8"; - "CT18ANLO_as_0117" = "1nrzrlp7i42z7pv550ggga0fk356i9rqbj60mdxvlw3xl6v4kkf4"; - "CT18ANLO_as_0118" = "19p7x6q9flsz1s82scakgnsfsrjf8ym6ix3gp195fjgfdkannh9i"; - "CT18ANLO_as_0119" = "1jz27f39dpg0g46p834vgvaajxspyqwd8f7zlpv44lfb43va6dgy"; - "CT18ANLO_as_0120" = "0rp9hrvs44d8pbagmc3vipnh5d9amam3prkm2k7spvxahr6dp8dp"; - "CT18ANLO_as_0121" = "15abhrjmmhyka9dxjmwz7103i0bpa605yhy6kisgzf7km5ca14h9"; - "CT18ANLO_as_0122" = "15mxybppydzsxx308hqljahnmrw0islw2zl45kjlhxjxsmaai2nc"; - "CT18ANLO_as_0123" = "19zlyapm5cp1hwvfqcjm3v6mgwdaa6f5d0mvnh68c05sn39xjhz2"; - "CT18ANLO_as_0124" = "1fwzcs50bj6d6cjkvi4qj44mwrwxhjh25lxmk2q82wdmddgpaz0c"; + "CT18ANLO_as_0110" = "1lkxicxmphi4mdc23vig4a5l4gp0n53jblzsl7bvrixbkhd5arv8"; + "CT18ANLO_as_0111" = "1jk8siawnpnclgjc0jhx89ipym0jp94mrklwkn0awh0hgqxd26ra"; + "CT18ANLO_as_0112" = "0rpfx10b5hjwzmlqzkk1zkk38ysn5jfgipk71zl5da6qk1ih5v2s"; + "CT18ANLO_as_0113" = "0chhqgjddrb731y6haa94yypki6pzpjq5rvja61gfbghbvnc02fs"; + "CT18ANLO_as_0114" = "0nvl1a588jvmh7a7przrzpvf9prrpvv610jmsnfrcp4i98ipdn1p"; + "CT18ANLO_as_0115" = "1f757zlavyjxjwyda8rnkzg9kagmciywvvvdcsbks9ij3m4fcw5z"; + "CT18ANLO_as_0116" = "13bdsnwkqzjq63m02vmb7z03rx6chcyy3br4m52gja0qz03rxhyj"; + "CT18ANLO_as_0117" = "0w5pmqry5rd5jsfwiv43cy5z3hlk7gzllnk0vn1qgsjrgd284hj9"; + "CT18ANLO_as_0118" = "1g137nw812zqdkr97hdwvfi4c4bxfazy1wyk30gwgrhqs6xdsmyp"; + "CT18ANLO_as_0119" = "06pjg9nsq6pvda1yg6lg2qi15i3h2radampgk23rbz9g6zn5hw39"; + "CT18ANLO_as_0120" = "0jh6f5jj81sppv5fhm8ccgzwpacfr1nql3r5466z0bl201fc9x6x"; + "CT18ANLO_as_0121" = "0jh4x2y4rcp3l825dl3a89apmb0f94jrk0pl93lv8xg34f8jrb2i"; + "CT18ANLO_as_0122" = "0ma8r5vgdw9hj6cafkj8fbpq8i18cbild4aw4q9lrsszwwcrlv9i"; + "CT18ANLO_as_0123" = "1mv75gga1gdmnwkaxc0c89jxgapc38376xv5yxfqy2dn03pad9im"; + "CT18ANLO_as_0124" = "0913a748xm6lbdci8vicz08h323hbkc4z1bjq1wq8qfrl1cx02ic"; "CT18ANNLO" = "1kbsbvvkkchhwwjdrj4d91lbykid4dcy4ghanpdd9x0nfm5b4sgk"; - "CT18ANNLO_as_0110" = "1a60p22r292hjjcrdkgis6d81hgihnjzyzlbcqrvx9bkbq447kjq"; - "CT18ANNLO_as_0111" = "1gyl4h92xs4s64dm7cwrfqk2zrs1cbzp76dqckf7z44k4pm460m9"; - "CT18ANNLO_as_0112" = "1bsn5q12bgkhyl1d6wkq32m3l7i0wqxpnjxh790xcd3ympbfx16h"; - "CT18ANNLO_as_0113" = "1dri54s71ygnd1pdnmvr9vqbyfllwzr9x39zg01rpj02zy7kidb0"; - "CT18ANNLO_as_0114" = "0f731ryn1031053zv40mak9m7mxmn6dvnhn6ik6kyag9d3az6lvx"; - "CT18ANNLO_as_0115" = "107569wrkjic6xjp574i6r6n8wj2x9cx4h7dqh77wxl8g4aajkh6"; - "CT18ANNLO_as_0116" = "0r951p0a4pan71lkhf701ysw6kyq1wvf15rbjdjr4j7khjfaykcp"; - "CT18ANNLO_as_0117" = "0xsgzga5bya0ng6i7nvk33nrf792vzbd1rs174cix4v406g99xm8"; - "CT18ANNLO_as_0118" = "049534355lxhppw2l85i677ysb2gwzccs0b5afm719sh06rv6jkn"; - "CT18ANNLO_as_0119" = "034kd7pg103ldc3nmgsylv0ffl8v0sp9jkf9073ny11s7b3pb5wa"; - "CT18ANNLO_as_0120" = "1ph23xpirkahpr9x1k2qm9pp3a1hc3i15bhc6xprpc29k53m4wsi"; - "CT18ANNLO_as_0121" = "12qrg3jb1kar46b8lai56lb7wxjr950dzaixfncxvy38hrny6mxh"; - "CT18ANNLO_as_0122" = "1wmkl2rlhkwzxi1yln0m9i6lvpbqkp3bxdnyzz7hp3hy1sa5f60s"; - "CT18ANNLO_as_0123" = "1caz4rfmcmabfdw5b8xg2307bs1bjclgdcxq2k6gf73z3pqbjs8w"; - "CT18ANNLO_as_0124" = "0mx8h8vdhlklgvysmhllkzga3g65zkmzpz7bmyvaqmvbvr6x5q0w"; + "CT18ANNLO_as_0110" = "1inx20r83pfmwxfhyy3hhj2csp016d9cnald1rf8vl9riqxvx0j4"; + "CT18ANNLO_as_0111" = "0zzi0b27xp4xykbwd2y7l2ka1k4kfvhaq7y2w82fky2b842ixsmg"; + "CT18ANNLO_as_0112" = "0y031rslsmwxs76rz184mrjb07pdcxrf07yl5yab1y24vymqj4dy"; + "CT18ANNLO_as_0113" = "1pgrcb6sbahl2jf3v08bki28w9x0ag5n3zj1fi0jc69fxwgkczzq"; + "CT18ANNLO_as_0114" = "0ifzf428gxlmhc8wvpj3qaqr0cl6pripiabmnb5av43d5avwhagr"; + "CT18ANNLO_as_0115" = "1bxf5rs33kfl3q570wm49ad0drlanzq8wkrbd85qjlvyhy52j8vs"; + "CT18ANNLO_as_0116" = "0l43qn45wfj2lljpp8kri1n2p99lxj3gbbqh2p2s7v0my5ds5p06"; + "CT18ANNLO_as_0117" = "0nfh9y2w8lvlqbghxx4i7j7gxq5bm67h3vz1wajg86zndarkq6mz"; + "CT18ANNLO_as_0118" = "0m8s96rgnnl5xk7g3l2pf5qx7dwb8kgn18b9nyr8cyqxn90mh3vr"; + "CT18ANNLO_as_0119" = "1lpkcrcfmn0kc9g21ca90j1shcf3ii89yrr17rgwynmylwvizs2y"; + "CT18ANNLO_as_0120" = "0lmn2p57k7yvr5mpzykljhkpnb1c71f4ya2s4zbp2x84fqfg5wbk"; + "CT18ANNLO_as_0121" = "0arbvp0sc67fsf7slhlv96iwq89yjqqkv84pf76fqdvrrjsmyn61"; + "CT18ANNLO_as_0122" = "1nbkgb0wmjh2bfx944sqb810sn4bb0ppxgv2aw2y93jbfyx7x4ry"; + "CT18ANNLO_as_0123" = "1rlxn70mc299v596y0dwp9a1pdy1yz0r8367cjw5l97y46yxhjrh"; + "CT18ANNLO_as_0124" = "0sfkvhyxp9sqf75wj91h9h59vcs2y2n4qchsg0marjy849xxh6qb"; "CT18NLO" = "04y2p6vz484l3yv6381pfavqs3xh78h3jn6bg7ncp5vywwqp44n9"; - "CT18NLO_as_0110" = "0ncaacfw8dh45vaf84kkj93hwxgwz744qqd6llpy73zdilnl62a8"; - "CT18NLO_as_0111" = "1cib3ggy0wajvvw908wr4bfymcw62iy5abwdadhq69crcg01619r"; - "CT18NLO_as_0112" = "1x242x4y0vykfypm02g02qxpwmsq2p45bxqrqgfy29qagxz6j66d"; - "CT18NLO_as_0113" = "0fkis7l0s1lb2k7qyfwnn5axbpiv9yky4j5qc8g3fa068czijmhi"; - "CT18NLO_as_0114" = "1r6ih2gqiwm7z24iw9xgn2n35659v5nwl2d02f07j1k3d33j175n"; - "CT18NLO_as_0115" = "0z4vm73l16mpjf3wcrv5q659f2mwkx85wpmnq8j1fnk0vhms59dx"; - "CT18NLO_as_0116" = "0g4lxxc9g09alpsff9wr7w0jgi26h3klx8rk6nb71j9yzrwv12vv"; - "CT18NLO_as_0117" = "0hmn5vkgi5981q0s5lyp9mq9jjrzhgr1f9w8np3i2nwcgn1awis5"; - "CT18NLO_as_0118" = "1z6is1f3064wq56lfxrmqckk3yi6wsl42s2xigx87p8zqg3r4nkz"; - "CT18NLO_as_0119" = "0p3r7w5v3pq2dgaq96r3khx1wwjq6i33l0bbf63dxs88gk5cx1s4"; - "CT18NLO_as_0120" = "1h0rcra68yypf1yqwlzql385ks1agxc9njdpyx60j3yg3whk4h63"; - "CT18NLO_as_0121" = "1by1iiy7qby73m8s3qmnrf0dyca3k4z00fclbrm651f79nz8scz7"; - "CT18NLO_as_0122" = "1r8h2cw874dh1mj4r545wp9msr1358qw1lzznwvgkmgwjclndjg4"; - "CT18NLO_as_0123" = "1d8c3bk6bvy3azbv9yqi45cwkcmjbxxw7qaxn6xnc5jfcf6wbsp1"; - "CT18NLO_as_0124" = "1haqxq1jbcz9qbhnw4pxsvlr37908fkdlzyn7c1csrlr8a51s3z4"; + "CT18NLO_as_0110" = "0nrydk44sp7hgabn6xk6r2hnkir7mgddcsbbnqmpwmq3x0xz27pn"; + "CT18NLO_as_0111" = "17xwzcj4n1bmfwz02n2g8afzxc4lp5diij00f2w50pqh2w7vj6g9"; + "CT18NLO_as_0112" = "0a6lsmpz3c1z7dm593nb3r9q7dgpskkls2i6wpdlrrg6s6cr8rmq"; + "CT18NLO_as_0113" = "1kwz9yp0vzyiwy9avxjwibdc6jla32vddf23pvfiv0qjcwfnp6ii"; + "CT18NLO_as_0114" = "161q98jr59vn1qldhd83qxx0qjq1rahgamwfqd3hw6dn6wy39970"; + "CT18NLO_as_0115" = "1dp0683zfn7mg0bj1l5m7i9kdbyxjl0ahhwppvgi5gs5kbmhbs9n"; + "CT18NLO_as_0116" = "0hpi5s175cpz251nav0v34l6qsfqj6181mhhp80kghyyvl7l22sw"; + "CT18NLO_as_0117" = "1v32wxdsvms23sghcszw6csd08kw0xppjzwjnbdsc8k6w67r546m"; + "CT18NLO_as_0118" = "1vd7vc7f49in1i5398p12b9vklxbsif89wv2q93k6m91kb38rm45"; + "CT18NLO_as_0119" = "1h0dlys71cngsxl9dj9l5amikxrvzzb7bins2a6wn6s7zgfyvlck"; + "CT18NLO_as_0120" = "126jfwml027mnpbr6ad7s8d94j3n1sv6fbdy5r5vcb64nyncjach"; + "CT18NLO_as_0121" = "1khffdgqdfl1g4cxp4fnyb900722s6pwzys7cdxmwhzi9f0rwgw8"; + "CT18NLO_as_0122" = "1w7q35igi7fnkrwnr1dnfq646qicz4549c6ddqbkyil10arvq7fk"; + "CT18NLO_as_0123" = "1bl6rf69gjnblvfdh5p8flax9qb65vk25hcfjw2r7qwdz3dxs6sr"; + "CT18NLO_as_0124" = "18mhpn4l3qqg9v79z2vz4jc8w3za726fndfl6sbc9mf94jy72chm"; "CT18NNLO" = "1shkah5ma0hp101aklkz2p8n9y4i4sv6zwa5ifzyj3bgz1020l5f"; - "CT18NNLO_as_0110" = "1smilnmhw8zjd0hl03v7wflbbia5qxqfmvyikbgwc29g212xbq71"; - "CT18NNLO_as_0111" = "0mj77vshb9fmlvc1pp3m701nl574p0k013lg0l25r4nhvlfiiriz"; - "CT18NNLO_as_0112" = "0a87crw7dygf9q28v95h9j02yq5f9rr5fdrxvqj5ggw839nazgmk"; - "CT18NNLO_as_0113" = "011269haxlh2grq60qbmwrilgnkz6hlacd8x56iizl6ify7hcs2c"; - "CT18NNLO_as_0114" = "0im03f2vr9pfd223skadmcfrypxlpka4pqizjcbqq75fddhljivq"; - "CT18NNLO_as_0115" = "0pppdh2vq86iiar18c5wi2qbm6viv0hpyfah8pn1p6bcg1k99srs"; - "CT18NNLO_as_0116" = "17wsn6jxp25klk3x3yfa5abxjjdl5j9vdwxqb51zg6ic3a7is764"; - "CT18NNLO_as_0117" = "0nkdmqyqzzg19m98mqm9n2dcaiy4i97zrxmy7x1c3rxc0n7igkzh"; - "CT18NNLO_as_0118" = "0v931kw5dzqq95940mxmkj1r6a75w525j99yf47pyf55vg22ybkb"; - "CT18NNLO_as_0119" = "1z4kg4na0m2vrflnizxhjdxa9rdzp66mq66bxcjlvqiraf4ygkd4"; - "CT18NNLO_as_0120" = "07bz7q5h0rfxf5989sarchsv2mcn4093b6x5094725p74sw41sq5"; - "CT18NNLO_as_0121" = "0slw6m1scnajlfhxswd05if782k32gcyx9zz50gaiwqimrz188fa"; - "CT18NNLO_as_0122" = "1jrzxq7mqhkfj96whqfr3ny2g1kggc047cvzb7ladlirmi05injg"; - "CT18NNLO_as_0123" = "1zdmvp9pxjjs96yw05l5s0c0ym1hyj7d3an5siy3i20lvyxcgyz7"; - "CT18NNLO_as_0124" = "1lb88y4c68n669c2g2q2zwjp92d9hgbxgpl0b7dxny9a7zdmw0mx"; + "CT18NNLO_as_0110" = "0v7nsjcm1q7hgj726zlvfydl3arqkwyddd20z3g0nwdqcimv3qs9"; + "CT18NNLO_as_0111" = "118444ygv03ryhbb28njbsayvv0rdlcb9djja6p62kk6rnbwi1wz"; + "CT18NNLO_as_0112" = "0djxkvwk628sxgf62bff40m2m6vgzs08jss61f90rscvj6gxid5b"; + "CT18NNLO_as_0113" = "1xg7qs33h8zgj4007r8g4drhm95551slhwv62dzyv9pwy5vrvgdn"; + "CT18NNLO_as_0114" = "0i1g7kwfs39ps9ml0ckkcq7x4g1n764q1r06ilq7bci3m073cffn"; + "CT18NNLO_as_0115" = "16q0hc3p0325bq9zgskkpf9qfyhmz9q0rk3b0jrzpc0d4vk7b5r5"; + "CT18NNLO_as_0116" = "1mhnx8szpp4sfy592f8vzvjlzr9y46qndv3c42hf0jsygx5pc5cr"; + "CT18NNLO_as_0117" = "1bmcnjfzwf4bl70qyx6csix3ps46pd32yb2h33y2f144vp8bmkpg"; + "CT18NNLO_as_0118" = "1r1dmj42qrqprhq06i0h4kpjc1riql963n32icl0mfwjq9wpfa0g"; + "CT18NNLO_as_0119" = "1rxyd21h407zmjn3nnr4cqvinw2nwcdhid6cbr0wif8p2b3gasic"; + "CT18NNLO_as_0120" = "1llhnfijc7v6v4dkbsfgj2c3m0y4q42mvaynz06v2j3aqv3wzhza"; + "CT18NNLO_as_0121" = "19dqq2jz5daq59gv2zdvygyvwi9sx6i3ih82yl82yy82gbw2568h"; + "CT18NNLO_as_0122" = "0p8w4ypaxrpsyz3dn7f0964wgvd30iy4r1haa88hqwx74qrkb4pc"; + "CT18NNLO_as_0123" = "1c1sw5md5xp8l6b3qxbnf994kz2rd60p4bl3s3l2af7f77w57wfv"; + "CT18NNLO_as_0124" = "0ldf7dnzdlwqh0gmb6an0b8cwcjpkiaih49aa77j2irw2yja5p41"; "CT18XNLO" = "1k0cli4j0z5hj24pk9f78flhlvsdfya51hgh90jv4myniapk616l"; - "CT18XNLO_as_0110" = "0yahahfmzzwzxiqanm7029z05f3nx9cs2yjdvyvhazvicsq3ibid"; - "CT18XNLO_as_0111" = "1n0q8d0j8smq2z6n9l091r2q8v319zcf896nk2m0s7n9g9a0vcjq"; - "CT18XNLO_as_0112" = "1wj968g1vb58gz6vslzfmihvqg5f9f2cqgq8inlgdhai1y8vk1lz"; - "CT18XNLO_as_0113" = "1gi939mxv99q2r1m8a6d4ky5nrp24xv16xw6d9h7ly27jrw8kzm5"; - "CT18XNLO_as_0114" = "06gf4m01yr89xklch6ack012in2i1bifyzvp793x9w8m56dx12ms"; - "CT18XNLO_as_0115" = "1g4705l0qb0immd4la2vrj9v4kw7r7i1wz1vn4knbqjwig5kcfws"; - "CT18XNLO_as_0116" = "0vaxwg3ixf4x92vssh8gqrszbfa5zgzbsd5p81j14nlksshrs6bf"; - "CT18XNLO_as_0117" = "10qg2yr63csg4nd62a8h0s1z08cmgbkwvcsh0wp7zkzpw70r7x78"; - "CT18XNLO_as_0118" = "0kxhg2pn7ki4nxcs5jhxvx4fs6c414mq0d0qm3vldv0hsayqsbnd"; - "CT18XNLO_as_0119" = "1xd4ib2fqzhg9c6z2zyc8h3il4msm7rv9kkaaapll4h0gpjdda6x"; - "CT18XNLO_as_0120" = "0jyb8gs0avvlhiwwvrv09p47vs3jim3y315hg7wcy31xab90b91i"; - "CT18XNLO_as_0121" = "1afizsl9phvvdjbyrifx3ii10gpxl51rvx311imz30l51i3fzl7v"; - "CT18XNLO_as_0122" = "0wkpicsv9357lh96vjnrxzddaaaiaagyfph2jcyp97mjhixx2hlg"; - "CT18XNLO_as_0123" = "0hr9m422shvp5yzjdd7lqansim7qcx3iv1p017fp1a4ihj661sra"; - "CT18XNLO_as_0124" = "03zf75f6gx41g3fxrdc6sqkfcyzz03izchwfvabwfxr06yq94jxc"; + "CT18XNLO_as_0110" = "07k9ga6n2gf9qz0flvrd4if0mssddrq1bbk0rpxsy8wfp41cjsl8"; + "CT18XNLO_as_0111" = "1ysz50r2nc57c7srgqw1dcvyfr9h578dkz24sbimxq54akp9jkxy"; + "CT18XNLO_as_0112" = "11wvnvsc6a5c2ygq39avai4xk2mrnfnvi4fqzmkjdcm0kby0swpb"; + "CT18XNLO_as_0113" = "0cyv8y2m3514np7f3fwpf3g1mzy2cz905sc5lrjqff5djwjc23yg"; + "CT18XNLO_as_0114" = "16vj7hhg3psmyr4vqvy8mz4bg7rp6jc6b64n2dfpq5jvb15w2fbv"; + "CT18XNLO_as_0115" = "0ylw7d9g041fgrjfvq0i0ycpxwbm3s4jdgm5mkjk6yj0s4mrrqcy"; + "CT18XNLO_as_0116" = "0mcfgih55zja7k0cdi1yd7gx1gjr6cpzz28gz4fxyxi2l4paxh2k"; + "CT18XNLO_as_0117" = "0klzf5bchabcjc0c8h6f09g37jy6vwrpq7q8iwrfcmar2slx26r2"; + "CT18XNLO_as_0118" = "0hd1bhlkmnchcv0xbrqjc3paa7fqp249sxi9hg71x3qbh03ab036"; + "CT18XNLO_as_0119" = "1xlxw18hcsv7bij4dvnj9dfm8sai5xm9jggb8g7flmvkmcskgzmg"; + "CT18XNLO_as_0120" = "1ixdg56qgm5701al85zkp81xx3h9hsipqka8l3sh0ghp563qxk32"; + "CT18XNLO_as_0121" = "1msa7pp1a77wmvxa9mhr0sgjj4yv1msb0igqj53ahzgisyl3lnml"; + "CT18XNLO_as_0122" = "1kin5bf9bcxadqibqfzb03bxdrj759mlgpbpjvvpxg4ishj0b7yv"; + "CT18XNLO_as_0123" = "1mir3cpvbc30l3m84j1ql1d8phrx7nf0qd5xbq9jfl4gx1kjfw8c"; + "CT18XNLO_as_0124" = "0ims3sl32rria896ckm9fg5dsmbf6ivcfl3drnqpl328ynrkbzlr"; "CT18XNNLO" = "0j7bwzkhax4cm3wnbhqdv48j4wha9zdd7v77ihlgcvcmk79rx1fa"; - "CT18XNNLO_as_0110" = "1vwaz00jwpyd1nafpfw0mw309v10zqxcsygdjsdd9mn5p1j6z3hv"; - "CT18XNNLO_as_0111" = "0g6w519dc13mzgb2wpyy7chnl5wkl0ndrdiw7nymad0csg20yss2"; - "CT18XNNLO_as_0112" = "06wlzpx9b83gblg4rvqv22k60pvjikqs5m5gp2kvrwmc2wxp73d2"; - "CT18XNNLO_as_0113" = "0ybm5v0dprid7vvsnsihkd3vn5gqsqsmib63sh3xl45i58h1szzz"; - "CT18XNNLO_as_0114" = "0hpznnkarzjmf5447jp9za1w52lqpysprnf14v31mda9k7a6kdkn"; - "CT18XNNLO_as_0115" = "0lj637cwm726hqilrnfa064apdsqdav142dy3scz2gxzpzqpya7c"; - "CT18XNNLO_as_0116" = "0m1zh15f975g628npifyqmlj578lpdlc67sdrxgdg97jfvxrq7s2"; - "CT18XNNLO_as_0117" = "1r5kwl333ipq3g78cmn7h5yxk2gl3rfszm1ijzyf8hrjqz9m2p35"; - "CT18XNNLO_as_0118" = "070jcd7y5w0h65ssk359w4kf2j7164pgdkg78mjwifi2garrlv0s"; - "CT18XNNLO_as_0119" = "12nzzjyllr4vs422dxdccjy0qffg9gy8p2wa828cr3a26wjlipfr"; - "CT18XNNLO_as_0120" = "1b9k3wd212nrhhncckj6fml58jzjagiskgmc6h248mcc8mcc2gyz"; - "CT18XNNLO_as_0121" = "1wkgmkw5djzxc5g4iyr4h2cz08jv1clmp8x8xcidg5532zclavmd"; - "CT18XNNLO_as_0122" = "1w7jhlk432qni1kn1big44yk16bxghbzrjb1g1rdxpibzy2jdkw3"; - "CT18XNNLO_as_0123" = "15hqb1c4jx41119h2ahx6zacbigs9xw92jw7c4xsww9dkzr1qsr5"; - "CT18XNNLO_as_0124" = "0fx7am4dv4d09hdk0yxvxzbdlhzc03y3q2x1hfx9wk07kcxw1mj5"; + "CT18XNNLO_as_0110" = "1cxlps6kvm08lkgvrqjd8080ykc1dvd56986iwwzd0s6whlpfsi0"; + "CT18XNNLO_as_0111" = "0bbp4qz3n9pwcfn6m623q2qqmx2wcgpy6759wzwpjnifym832j95"; + "CT18XNNLO_as_0112" = "16p36jf8c8pliaxd6s30cmmmxg9slnmb2527vnwkka0kp9qw3ffq"; + "CT18XNNLO_as_0113" = "15d4qx8x56kcg6p8980bslhfilkld8yf1mwpdzyf8v8ns50wrbw5"; + "CT18XNNLO_as_0114" = "1zsfys0xkgf8zlbzzjmh1wvzxwjqi4rvgik26s5y4ibr68gshvaj"; + "CT18XNNLO_as_0115" = "1x00d2q2lnl5w0l052v9cvkywav26b4r072dpn1jiak6n52yqqaw"; + "CT18XNNLO_as_0116" = "1gm9m0rl9vghswcb4xgp54lc3h8wkh6c077625m9y0166xbv5x2d"; + "CT18XNNLO_as_0117" = "0630arl5qwjhxw0avzlc0mr4hwi09ki8xfn4zvfccgpy2nd85q5b"; + "CT18XNNLO_as_0118" = "1nx75pf5krazrk3ff3lb6zjnxz2qmffrk5vcf06iq5qci9zi5l0q"; + "CT18XNNLO_as_0119" = "0dzjj9f2qkpwfr9pm3pfj6jw5ih2jis8wzc8d0vyh5mm084jlk6a"; + "CT18XNNLO_as_0120" = "17pp23l0brnd0phq23888qbkf1c5j1lcskrbm3v3f2cd8p7jcvvw"; + "CT18XNNLO_as_0121" = "0hlfx8zsxc1x6glny4cp2vpba8jgjix9cpsfixff9vkbqpm2ppk0"; + "CT18XNNLO_as_0122" = "1918l55khrfyb3lcxsbbf9w6v8j54klszl2c32nmna0apf8zc3qm"; + "CT18XNNLO_as_0123" = "02kak35pj6c4hml75na7452ryashfclglhahclzkpq8gs72l5w91"; + "CT18XNNLO_as_0124" = "03wym12nvwdcr13dz6d2gr4bz3csffnn21zfdld42fsyq4glx431"; "CT18ZNLO" = "0iv8laks2ymn5fygk6k9lxm3s7fld5g292n9bfkhn3nmcfxczi03"; - "CT18ZNLO_as_0110" = "0q90c9nx0b3fbqq317qr0j13cc9m3zcgpk3pcn8s2sd6aaksa66i"; - "CT18ZNLO_as_0111" = "0wnxj323k29xvcrrf68mfyhflfnblvvnx63p070l5x52qqbfjl7y"; - "CT18ZNLO_as_0112" = "03qjvv004g99lbi022l9bvr82gvv6gzk651r8x5hwwyr0mar4j0v"; - "CT18ZNLO_as_0113" = "0hw6w7x3bnx2fy03jj5yvbjjab9mj0fzca8bc46phjsmx3nqxq5k"; - "CT18ZNLO_as_0114" = "0gnhqhxcsaslcldhyh69lxdx1misjz5qiwry57n31j6mqjrggqbi"; - "CT18ZNLO_as_0115" = "1dn32bwarggnfq2s9drmdjikcrn0nm0mqih4f5wxr3zbmq70xw4q"; - "CT18ZNLO_as_0116" = "17q8ysl7ar1n7wym55k8vzrx963rip9l9b0kxw2bqkha5ipwmnv3"; - "CT18ZNLO_as_0117" = "1afaqy8afzib6fmyy7ysnfk8w5f92893nvh4fn1sx9ink7i2zqal"; - "CT18ZNLO_as_0118" = "0sbhjzjsjd8m6sgz66vky3w7ymhwpss0dr2p603dxgm84fig1kzx"; - "CT18ZNLO_as_0119" = "0fxplpy2l1fdh5p4csdlabg36xgbpdg8pcdfcnws2cfj3g0941as"; - "CT18ZNLO_as_0120" = "05dk8bvwkn5y5j4rk18an25rg1f1am9vlddal84rbp8m15qnms65"; - "CT18ZNLO_as_0121" = "0ymql1wjxng5i887lx2q6p8gryw29zs0d2hzkfxl4f0zzn2wlwpi"; - "CT18ZNLO_as_0122" = "0gnl23n4ljlry340pwwfs0xs22bl2qp2b8p3f73gpp9xn42nwz1g"; - "CT18ZNLO_as_0123" = "1wfx59iadvn85raa1bq81ipxpjbxli58hs8wpzm1vz10ilifn9d5"; - "CT18ZNLO_as_0124" = "0jm0gnp8g1drz6a10wrdxkj2s8gws80ias1ixdnr5fdmnghf1wl7"; + "CT18ZNLO_as_0110" = "1d0j9nmn9mk90698pxqlpgz7c7cyxswc88n89cr2h8mgcg2w8g2v"; + "CT18ZNLO_as_0111" = "17l7j1j2x529mhk0andkdh83k9z6kg9v3ccfna08i7d4iilsdfrs"; + "CT18ZNLO_as_0112" = "1b8mi0jwln2wvysrkbm1fvay053d17dzvlj9fkz36xmr03bv5mvj"; + "CT18ZNLO_as_0113" = "13dkpjvh5a3p565mhpxqnhijl3jd2zr03np5psknvl121gr007fk"; + "CT18ZNLO_as_0114" = "0drryvq2x42xpf9bmd6n4vz1f8ddh83c3rynnzm54qssxmfkb073"; + "CT18ZNLO_as_0115" = "1wdj056rf02jksa2l2panvkijvvwr6rsb8kh3g2bvx0yjhff8g1c"; + "CT18ZNLO_as_0116" = "1ibf0841irsa4vq9sg0kvrhvakyyshpvs38frz9v3zjbc012cldz"; + "CT18ZNLO_as_0117" = "0l2nabywfsvb1sk44rqgrwf8h0lxkz8qf6pmzr8jc3zhq1fv04sl"; + "CT18ZNLO_as_0118" = "0dnksqqshxqr0y3qr3diyvhfq1jxy1x0hrjw8xk76jzm61xi96x1"; + "CT18ZNLO_as_0119" = "0qljv4d1qfc9rx3p4a8dghij11dv1mi03y30wdilfxrf98znvdyj"; + "CT18ZNLO_as_0120" = "1w0p5gai8qhfjh4jxhyl26xrp8n210cp2a7zjd4id1s4pcvpzvn7"; + "CT18ZNLO_as_0121" = "1ija1nqc4pbprcc3ddhl9rxxbaxngjr256zxiy7gg3wmg6364hjl"; + "CT18ZNLO_as_0122" = "0d7h3vli13m1pm5w91js8skv198aqz9kjkx7w0sa4v2vhvz1rdyh"; + "CT18ZNLO_as_0123" = "14cl8fmkl6jav7byqwcfa1z2ml6lnn6pzp0w4nqy75gc7wxsba0m"; + "CT18ZNLO_as_0124" = "18riha0fflfbsgh7nnc3ghm8cpzpcss0z6l48d28bbq0i7caqad7"; "CT18ZNNLO" = "0zsqrpab6vgcinsxjq3rqdadig5flxzk61wc1aa9rwnkbpm1paa5"; - "CT18ZNNLO_as_0110" = "09ypj0yydkiw82bq3ymsp19i4iz82fm2z2xfplb3iasa86y377in"; - "CT18ZNNLO_as_0111" = "00h0zd5indm57xhn467qffpx7aadzb73vyfazq09pl5vdqq9fn8x"; - "CT18ZNNLO_as_0112" = "19vlb1bvp7r9jnknd2dvblggim1xqf4yjqyf5h04r90b89pyzxn8"; - "CT18ZNNLO_as_0113" = "0h8i68dligavf051dpil2bqvlxm19156v1951n340pcncaxxi5d1"; - "CT18ZNNLO_as_0114" = "1n2drxdd6f36njq0lcfm7s6cyignqdqvirh03ixvvar2pgj02yay"; - "CT18ZNNLO_as_0115" = "0cv633f5gg6hcyhwfh22h5n4irnk1pxsk7949wiax7qkl84mcm1j"; - "CT18ZNNLO_as_0116" = "17z25cky2ysrcplsxblrzka667npnnp42k6n8jsm73pagscsj91n"; - "CT18ZNNLO_as_0117" = "1yrbrzbg5r2pvwhbnvfwcp9d9rvfmqqxwph0rd0sdfix9agwy2yd"; - "CT18ZNNLO_as_0118" = "019lbvb8pjfbwz8hz8h2xw76nf1ly9mgnbz6pzi3v9msk0qbmlp1"; - "CT18ZNNLO_as_0119" = "1x08wg3y3fqa8ah6m0c6x2fckjjyylkhnmry6vg93rp3n7qlvynw"; - "CT18ZNNLO_as_0120" = "0m9mfr8553yrysbcksx17nz1gm1vi2zvs5bp8d3v82phsv5alhf5"; - "CT18ZNNLO_as_0121" = "0snzl818ag926n0i67hdwkjclfvykx546vfnvsx7n2z5pabakd1j"; - "CT18ZNNLO_as_0122" = "10i7dk1bllyk6f3l92kbiqdib8l2zvqf91g9c20k12sim6n6x2g5"; - "CT18ZNNLO_as_0123" = "0v2h2fjkdsnyssb6ralw32c23l0nmdxbg3sx38vxh2y2s2nycz5h"; - "CT18ZNNLO_as_0124" = "0w29hn728p8yip40mr27kqmv5wndfkq6nx0vnl53x41pwczkhkdl"; + "CT18ZNNLO_as_0110" = "06qzlfshx8gwrhnmjfvz6sxq7h2is1dqvx5sz8jxrr1gl7gl92h9"; + "CT18ZNNLO_as_0111" = "08k101cn9x9y44zcpn6iql85qqx89rv7xjhvak4y6s309p9rlnzi"; + "CT18ZNNLO_as_0112" = "1c72mz93kha8mdfdcwj8fs8dqqylxmmc4vs7fjf9h7xbrqzmss7m"; + "CT18ZNNLO_as_0113" = "0z7s1kmlrv99r5mb6p1xwrydx0s896kr3va0ld3gq39a0f6bqvfz"; + "CT18ZNNLO_as_0114" = "0ir6n8i170czq7h3badim28540478cq5fb5vv4kdi0ncypsasr4d"; + "CT18ZNNLO_as_0115" = "0dncvhp99v5s9746ql37gdm65byih9ppg30c77k50i2485a1zfap"; + "CT18ZNNLO_as_0116" = "0qihfcsgxv66l781dmvmmpbr0s0c52s90jfmz5y52gyy1lplw569"; + "CT18ZNNLO_as_0117" = "0agqd4vgj53w9p7ghfkrskqyvg7lw5g9ilj0fid0jia8adfh58xp"; + "CT18ZNNLO_as_0118" = "07y1l00igx7d2yaj9gi60fvqz1p8f8z44fmxm84fpjikajabff5v"; + "CT18ZNNLO_as_0119" = "16nnwwj16c8fmqh5mwnihzvbgfj3cnvw01i1il1gr0g4zqpn0yhi"; + "CT18ZNNLO_as_0120" = "0yy0lxwm41aa727wdrq27l6ih7fdaqwiy4bkrbh0zrns0km9r958"; + "CT18ZNNLO_as_0121" = "0yg70dx2wi9wf5914shfqaf2j90dnkfnbp1pq2jzxd0h0sxhlphj"; + "CT18ZNNLO_as_0122" = "1bd4193ggv4nb48d0mw2n93ia30h4myfy197k9b0m3qc90xjq77z"; + "CT18ZNNLO_as_0123" = "0r2vri1brq0xcrpj0cg9hf9kwhkh2czmimrsg5bfvx35adiiis19"; + "CT18ZNNLO_as_0124" = "1wq0nz9jfb5fmzwnnh0xyra2j62kb5xpmh2nyy8ih4lvxhgi16mg"; + "EPPS16_B_90CL_Au_hess" = "0ab3pnv8fq45mdp29m6lfmrhhnr88k6qvkq6lwxmn17k39v8j9w4"; + "EPPS16_B_90CL_Pb_hess" = "1cjc79sygpxnir3qw9n6cdwvd3flfn11ajqs5y64svrpsqcx5ng0"; + "EPPS16_B_c_90CL_Au_hess" = "1ijvnglq4wrjhhvksyd60s7c6nv50vwyk5vd8c3gs0qr8yz1fk69"; + "EPPS16_B_c_90CL_Pb_hess" = "1347cqsfkim0xcds5imxmsdxh8x0h2n97x5zwpf035rbdk5mkr4n"; + "EPPS16_B_d_90CL_Au_hess" = "088jrj6xf1ph19sypa5dizllydfzi6ikxq2gisdlxpj1qnwjymsg"; + "EPPS16_B_d_90CL_Pb_hess" = "0x12r31l0nvqsc2ml1zkil0w1iji52xgbnxn3wss9pnmswrf3cah"; + "EPPS16_B_u_90CL_Au_hess" = "1pq7agglirpk2w566c1ql15ps1aglmnph2p2dfa535zlv89s7c0d"; + "EPPS16_B_u_90CL_Pb_hess" = "137jkcbikmcjaxp8rpr5j193cmr329mcvyy7j0s0a6ynglhpr76k"; + "EPPS16_D_90CL_Au_hess" = "01ggm0xxwd3nc95cjcf36sy0pdb0xvk6bkiaq328w2sfajccr5mk"; + "EPPS16_D_90CL_Pb_hess" = "1z3zam84m2kbs7zinn87xhlca90d5zwk8j72yj19nx3r92brnq8h"; + "EPPS16_D_c_90CL_Au_hess" = "18sviyvm3rm9n7x79w1sx8j9mcc6dnc2il8hsw2avjgy2aqmwj87"; + "EPPS16_D_c_90CL_Pb_hess" = "1ryv83iq1lrphgxvdsmh70j6iky993sax0s9cfrswpjyl2pcilq2"; + "EPPS16_D_d_90CL_Au_hess" = "15j6s9mj1ci9wjgsfhbxfikcyxc5pilv56cyzxjhgjhfgwvi1xyz"; + "EPPS16_D_d_90CL_Pb_hess" = "1qjyb57fhf6d3g7l48jcl6jizj2c5g63xahzanrmkm9r538hvhcc"; + "EPPS16_D_u_90CL_Au_hess" = "0jjk2rccvv0ngxn7wf33j21y72wvs4dhwl56yhmf7bfzd6v70rp2"; + "EPPS16_D_u_90CL_Pb_hess" = "09npz68wwvcsvd6h8lsgmlr19l0af4h4rppcd6jlwd88c2zpb3r3"; + "EPPS16_Jpsi_90CL_Au_hess" = "0msvkihdmhap0bbiydxbp552k1sgk20wadvc2s2h9jldakdx0pk9"; + "EPPS16_Jpsi_90CL_Pb_hess" = "13vc490k1769gbph3xn1lffj0ilvhz78by3lhw45lwkra4vx5zp4"; + "EPPS16_Jpsi_c_90CL_Au_hess" = "069lzrnm5kx56rblr4lxqvr014nrf2yyf1iw42s37q2xsxpjip87"; + "EPPS16_Jpsi_c_90CL_Pb_hess" = "1895iqzmcnaqkidcy96z4766wppycp1riwg9clg71cb404wz74as"; + "EPPS16_Jpsi_d_90CL_Au_hess" = "0ndh23dyaszam144dsdbg4281c61vai8avgi4y7x8kb0paha4icm"; + "EPPS16_Jpsi_d_90CL_Pb_hess" = "0xflijnwabg931z19v8c18dzh1lbqivkg94kpwm8j135ya1vpmm2"; + "EPPS16_Jpsi_u_90CL_Au_hess" = "1hagv9akwm337kq3kvkpkdkcpnic7klnigh9pyif1gm16i1q40jf"; + "EPPS16_Jpsi_u_90CL_Pb_hess" = "0dw68rky105lyaagkzkmfx6l9jk763m293m7s972jhnl5037bj74"; "EPPS16nlo_CT14nlo_Ag108" = "1p7gckhv44h04rvknd6fdizy9c1jqfwic7ppf0ra14ic8wp1g7wg"; "EPPS16nlo_CT14nlo_Al27" = "0hxyakfgknmixxndfj14i44afp5gcfz9afjvjdaj702sv42a7qa8"; "EPPS16nlo_CT14nlo_Au197" = "1g272110y3a1fr6raxdfhagn68i0lcnwbdhiiqg4j6wb6v4m3p6i"; @@ -421,10 +448,15 @@ in "JAM19FF_kaon_nlo" = "05mcahzr0k5w0hqfbn902lmkwxlkbf8wrk6akpqnfsyqpbmhja5k"; "JAM19FF_pion_nlo" = "06krcf0c9jbbpwf1rk1xd5z7rz904ji984xz05kv9p1j1vgk0ha0"; "JAM19PDF_proton_nlo" = "1zrcijik60rci6km5d8pn8ivww8w3v8pb1m5dshqjs51lhf56ayp"; - "JAM20-SIDIS_FF_hadron_nlo" = "11g4syy0r46m1wvzq0pb84s4kk2aihjmhx16mr8gzv5b11520a6d"; - "JAM20-SIDIS_FF_kaon_nlo" = "1b2rz6k0g6ck3m28vdqjnnfc025ql5alhjmgn1l84cflf4fvkkgp"; - "JAM20-SIDIS_FF_pion_nlo" = "15l98gmzsqxw615802si94dmj8ihsz6n1mraxkkwjl86hm8nalzi"; - "JAM20-SIDIS_PDF_proton_nlo" = "07xwp9as0nscm4whl5x9bry1p54yl5qmj2r3hqh6vjsz6mxksdjp"; + "JAM20-SIDIS_FF_hadron_nlo" = "0bx3igckr2dszxskz5f952vl0q7kwvxgyb28yksjk75325dp2f9c"; + "JAM20-SIDIS_FF_kaon_nlo" = "060r6ah5843vm1r3rhjvlgp7w45z39cqgibfc2g2m1q7mwjqccjy"; + "JAM20-SIDIS_FF_pion_nlo" = "0fpkbl5fw76wgk8l599kf51mqa0fy92bq9ksfjfks0c4m6ah1g5i"; + "JAM20-SIDIS_PDF_proton_nlo" = "1g1g9n2ij58yzvgrw8g1f8jbqyhj9yvbvl9iqjxllkhkb2zbllpl"; + "JAM21PionPDFnlo" = "0zn7p9ny6072dkhsiaq64f2gdzpqbqc06d9a21rvvg3cgsba9jg3"; + "JAM21PionPDFnlo_pT" = "1wxpkk1wzx1z1kwxfj6kz14pxlckb96aqaq2fa4sf1a0ph1ibrc8"; + "JAM21PionPDFnlonll_cosine" = "0nbmdc0744kl6r7r9lfs20gffpjyxpcfpkp7f336fn1mcl89wggn"; + "JAM21PionPDFnlonll_double_Mellin" = "1n8fqar0dddc92054kg3pl1xlh6z7smm3glv5fvfxr933bxli5g8"; + "JAM21PionPDFnlonll_expansion" = "0dmmalgmp4xjwimyfx0sa8yafzzm0xzqk557qwkli3ramzwrwy8p"; "JR14NLO08FF" = "16azkqxf1yw1j32ay6j01gf8n9n7qm56jh4yzgjag0zdhm01lbip"; "JR14NLO08VF" = "1ilw38pp4vy8c8v1glfi4ixca73wjkdg3di1wh9p8xqrifdb096p"; "JR14NNLO08FF" = "1w0pywmjb4xi7bsvv1mdd4q2adf1g7khspfbkphmlh8zipx29nxx"; @@ -435,6 +467,9 @@ in "LUXqed17_plus_PDF4LHC15_nnlo_100" = "18y3pa6gjmcv2s21si9a5dvbq6xxqphbqz5qiy39c62g2zf8512c"; "LUXqed17_plus_PDF4LHC15_nnlo_30" = "1bnwlxr8p4xmr36zd2flhqssil6w7jh50k46j0mxfnd8jgxgwn6n"; "LUXqed_plus_PDF4LHC15_nnlo_100" = "08jzl4wcsrr9agycq1r5kd5bqxsx4b637nxk34s82vs7vwpq7qib"; + "MAPFF10NLOPIm" = "0w875dh5klqgggcr84g0g7qmh4q2xim8nrf0xdnfc665xww7v4my"; + "MAPFF10NLOPIp" = "1493k3p7b03sw0n7va60vqxcry2b3xgpww6fnk2gx8b4w1632yn2"; + "MAPFF10NLOPIsum" = "0x6sashkhg1hs7wy6fyln12s1f4yavvc90zv4k7rclbah4hr75wm"; "METAv10LHC" = "1vn4wnx1blz6wylbzirswdqqf0knmyh1pcfh62wvj695mh7i0w16"; "METAv10LHCH" = "1p4wy7m1ksz0r1fylwz3cbq7jl8s58v817n3d898l83ic2ghp4vj"; "METAv10LHCHfull" = "1w623939fjdyx1316rxyaavf6kmxff19himr00br57jrw3v49nfg"; @@ -486,7 +521,6 @@ in "MMHT2015qed_nnlo" = "1ypqiz0yz6hnxfml7ym83k4qqvqsbl39abbr38galns8xzzpi03m"; "MMHT2015qed_nnlo_elastic" = "17in1cz5j7mm9qjk8i27fif6x276lcqmccl7kfz8a5yn73xxzja4"; "MMHT2015qed_nnlo_inelastic" = "1ngk4p7w8l8b8sfg6hlm8ypxz97i1iwzlrc48szy7bi99kn8rmy1"; - "MRST2004qed" = "1kdrzk2arvs36lnpkbc94w06hx3nh8nixh2qjhb271c2blwgahzh"; "MRST2004qed_neutron" = "12vna0ic6gh313k22b44b0k9kd939v7zjl2hj65k1075j23mq425"; "MRST2004qed_proton" = "10z0cr8pnr0lfxxi916naiz381a2cqn461jblfzvvddwqmqbllbc"; "MRST2007lomod" = "13ar6hzw9al20zlm8lg0hvwmgrmv0dbam820gm36rj8p7i33qlr6"; @@ -494,9 +528,45 @@ in "MSHT20lo_as130" = "0ivjvqabk9jnrlrczjlqywmijx5ql8wy579j77qkl1vhv7sqccm1"; "MSHT20nlo_as118" = "1qwbwcq8p4hrprz4ib18mp5142b0lbyyzc1bf5a4iq5jjvi5qm93"; "MSHT20nlo_as120" = "10y1a6iryahrafzdqskypjrnad6xxq08gm72pa9yc61xdy6andc6"; + "MSHT20nlo_as120_mbrange_nf3" = "0548pw6lkwwqhlrg7c4cyqh76bcyz78yh06fs7crdbx7hfl47cj7"; + "MSHT20nlo_as120_mbrange_nf4" = "0pg49mad4845llj49a1piaggd8wpwb2s4ar7jydlhrv7im886by4"; + "MSHT20nlo_as120_mbrange_nf5" = "0148agm89p7pwzdfjk8gjdaicll30xhz6sawca632kp5qwyd4g3d"; + "MSHT20nlo_as120_mcrange_nf3" = "187hgg8klk5jhcadiy8viyrfi0jfb3i18jckv6d7nsapixz1wgkz"; + "MSHT20nlo_as120_mcrange_nf4" = "12pjy0igjcsih100g238v143kq5cjjm5a13cghcipgcz2w4ldglf"; + "MSHT20nlo_as120_mcrange_nf5" = "00mipn9ndnw1k4nx6pmxb95wddmh98hg9k0317vlirxrf2n2jy7g"; + "MSHT20nlo_as120_nf3" = "1zy0j9qc28xpav3gx24r6r02zfz49r11ic66hkyq83d3q3fj751b"; + "MSHT20nlo_as120_nf4" = "0fpyf9s9ppb6w49chsmb2xfp9gwkx3ky9v8gwwqxli9fpzsc2ywn"; + "MSHT20nlo_as_largerange" = "1f57dvxas2c6qnv38ysnsyf0y8imafnrxkcqh3b0an19mkln5mmg"; "MSHT20nlo_as_smallrange" = "1rygvj33g84whl24kgpqa47g11c48l93jlnpzqq8f5zr1ijqcq7i"; + "MSHT20nlo_as_smallrange_nf3" = "1d6cr1akc25mwfyghvn1986i60l76fxb8fd02h7f4pl4lmnn6i8n"; + "MSHT20nlo_as_smallrange_nf4" = "08js1l5g6knjx3813i0rf34xfjkbn3mdsrbawzkn00vf49xzcxdj"; + "MSHT20nlo_mbrange_nf3" = "1bwl4inshg5h3sslss4lgvcqahb5q0n794ag73hyacxd1kmx7d2z"; + "MSHT20nlo_mbrange_nf4" = "1sm995pzzr601wcs4rjgjs6alm74vc23szgmkqa5dvxghw7x8w55"; + "MSHT20nlo_mbrange_nf5" = "1sal7iqma7770593ifxypl9zapvba693asc8m2fjsbgpbfjjid60"; + "MSHT20nlo_mcrange_nf3" = "0lmv6m8m7zv0s8kjfm8parr9xlfy39dhwxn71z33f5x6cyp08zlg"; + "MSHT20nlo_mcrange_nf4" = "0875zfv0dws8n44fqawa1jp5p8b9vky8yyq48wijhrcph8qbxbrx"; + "MSHT20nlo_mcrange_nf5" = "0qjvam29zwibx54fgijry58vdwqkiwyavdwn736bmckjycncnv8p"; + "MSHT20nlo_nf3" = "1v0mzsa2sxp0g3m3d8yqcs7dgi74am6cpx79f341ahpwybz5x829"; + "MSHT20nlo_nf4" = "1lyii55adqaah6sm8b778q8scaq5yjycq3s8jdi3k48z8m23zqng"; "MSHT20nnlo_as118" = "1yz0003ixjg97974648qba5d37vb4fhzzmq4k9xh4c37pnc3kgyn"; + "MSHT20nnlo_as_largerange" = "014a9x6zsw3w7b6w3v6lg8qxdjicxslr79bnagi6ci0skqs2v7z9"; "MSHT20nnlo_as_smallrange" = "1bv7cbdynp6dm5c9v7r32gqy1lch4428apw426pr0d7xpm0abnxv"; + "MSHT20nnlo_as_smallrange_nf3" = "0n1j9dd069qdmgmd85m7wp29g96cnbgsdxrvlh6y51q063lkqbwb"; + "MSHT20nnlo_as_smallrange_nf4" = "1jkdl3rd933czz753qbi7yvszg376r6bvwq2inqbslnzfkfav5bh"; + "MSHT20nnlo_mbrange_nf3" = "095zcxhpfhlsb67ki4j6a8z4d1r5hzx92xlzprwkwd4v5ya4f732"; + "MSHT20nnlo_mbrange_nf4" = "1114z5a0pk9svps918zifff23difxf37rbmr3jnzxnyp1vfgzws7"; + "MSHT20nnlo_mbrange_nf5" = "12i6m01bmnzqadi6jg5gmah1jliq4wr4p0vpjgmpgyagsk4drx4b"; + "MSHT20nnlo_mcrange_nf3" = "01wq537083dr9ibiahwdwdaxj0j2ys211m478w9wgvihdjjvjvvv"; + "MSHT20nnlo_mcrange_nf4" = "04abrnlnbr94lksn29w9ws9a3b6sqkqvi9awmbk4715dxk2amnpk"; + "MSHT20nnlo_mcrange_nf5" = "0za255xi66a1mfch2b91qqmsr305m0kvs4rzvqlzf7k0v141j90y"; + "MSHT20nnlo_nf3" = "10yx59r32q4rl0yn4gpc29z8xskbvizkdr7k219qf16lnv892jpa"; + "MSHT20nnlo_nf4" = "17d10rba8b0aqi1npnx93j1995has5sw2l4izd6lly3yhjynbgp6"; + "MSHT20qed_nnlo" = "05c2hjgysyvkcyqg1lq3y00hqixgc8w984zivxkr76nj5csyf6m3"; + "MSHT20qed_nnlo_elastic" = "0in0y27gn4k9h6ya77krhizv8rg3i6s3d6h4bk0fhz91hbi4l3gh"; + "MSHT20qed_nnlo_inelastic" = "0vna0gbadf92w3xngb8gnsxd3lil8m029as8260wlki2axm4gqlw"; + "MSHT20qed_nnlo_neutron" = "1hiq7m8j7736477vfs18mqwhkxyvjmcrs7jiqysa8v6rmhb4bali"; + "MSHT20qed_nnlo_neutron_elastic" = "06dbvsysszpbvabgafb569vq88q1sm9mvz1iwv1m2b9vzin2ixiy"; + "MSHT20qed_nnlo_neutron_inelastic" = "0gfh6nhl4vq4qz1jhnv5vhhz4h4wlkwgj4ffjqll6jqa5anfhbqr"; "MSTW2008CPdeutnlo68cl" = "1x2y7hl8ckplx175bp3wi04xafm44dd7vzfgnmvvai1x0072xi51"; "MSTW2008CPdeutnnlo68cl" = "1szsdqjkmny30mpw4pdzi97vj7i55agxm285dvnkzp06ycgp1ld3"; "MSTW2008lo68cl" = "0j12mv286r4ds9v7piqh4n44yjnc51hm74lqa4vv5xznxhibng7l"; @@ -854,6 +924,32 @@ in "NNPDF31_nnlo_pch_hessian_pdfas" = "08baysni2lhbpr1scx7h0zf64gyncj2ahcv4y86142gl4zqrafvp"; "NNPDF31_nnlo_pch_pdfas" = "09mw3gr7dz0vwdnralaplvlz2c464lmdizf673xsb0wlm12pqf6g"; "NNPDF31_nnlo_pdfas" = "0l92q3xhdk5nrnhkmrirxnvplj531rdpnblnacd759cl4hgxcs2q"; + "NNPDF40_lo_as_01180" = "0m630n5i7s0qnlxzk6ka43qfp6ipz2jgzr7ys42hdb66kg3z5lkc"; + "NNPDF40_lo_pch_as_01180" = "0wrvmcgdaipi7vy7j85yszaa3c57hs3xll9lfn9cdbjscvmsx6x5"; + "NNPDF40_nlo_as_01170" = "14j21qryc25k1jk4ypdcr3cgfkhyy4hsb57hy8x5pggfwc2w3f36"; + "NNPDF40_nlo_as_01180" = "0399bnxvgl2h2ini198jmzjjb179f6dpxfv5x8imlfl515llivx2"; + "NNPDF40_nlo_as_01180_nf_4" = "01kwziiyg8vbl26znv7khqdckm501d7ccxlkq1y0cd9s1f5ff8xi"; + "NNPDF40_nlo_as_01180_nf_6" = "1gv8anb3vqpzdymp9g69702x64pbh6l2rn9257hdpz4i4m98rsjy"; + "NNPDF40_nlo_as_01190" = "0wf8p2i4mxs3hkqvxg1clj082yinbdgccr6qx5kbwkzsck4yybar"; + "NNPDF40_nlo_nf_4_pdfas" = "1ia9glingyds6bj6yxy867ahriqdhfkxczzc4nki933h6jbj74bf"; + "NNPDF40_nlo_pch_as_01180" = "0gad0hjq9kwiymc9pljj4z52jsg794m3knb38zj9icgjk0p9lwxm"; + "NNPDF40_nlo_pch_as_01180_nf_3" = "15j3vvc0vaf13d8cnyr4h7gwb7iznrcajnw59ryx6ksz7cn84sy4"; + "NNPDF40_nnlo_as_01160" = "0y9115xlg14m1ahfffiam4zp0axga86bhzfxf0xiaxb36yzbmdhw"; + "NNPDF40_nnlo_as_01170" = "1h6if1zw9dqlfnn7glbl5flj792i7fqiy7pprhwk93k4snh3800i"; + "NNPDF40_nnlo_as_01175" = "1dkzzhmkmzm92gmb83lyirj3clicrg70h5grzh0j8nfcw0xlz4rm"; + "NNPDF40_nnlo_as_01180" = "1bhysjkji0k7xy9njarkfvxff05kjl1byhkknxv0875p2znzkpva"; + "NNPDF40_nnlo_as_01180_1000" = "04yai94qwd7187wg26icwf2wbwi20747l6zikd66ygsz9n34xn9y"; + "NNPDF40_nnlo_as_01180_hessian" = "0cmpqgaz341hif0gdkzq8mnfh9apxn1zxjwa12fk5svbig9i1a81"; + "NNPDF40_nnlo_as_01180_nf_4" = "1aj5y3fyvna2jwxbsrgr2cbc452aprxnwv78vh3ph20jlnww20pd"; + "NNPDF40_nnlo_as_01180_nf_6" = "0bf6s6s0w7l592bm87mazwff05d8s4qlblzl9yj91am3xlv0cvn1"; + "NNPDF40_nnlo_as_01185" = "0y1xn9qcv2l81sbz9rayzrpd1bjsdyixr42lzfgmqbq45sw2m04n"; + "NNPDF40_nnlo_as_01190" = "1ibz4yfrw1n8plq4gi03yxi9afaca2yprxfk2y9lvbkycn608d0q"; + "NNPDF40_nnlo_as_01200" = "1cfcn819aali7ylv9y6yb2ggqy0yghyj0dys9lf9gv5wiqjwh5q2"; + "NNPDF40_nnlo_hessian_pdfas" = "1b5jvhdk7fmnz8gl38i3408h2qrqcsr7s9v7bh6ilc7x52xsg65k"; + "NNPDF40_nnlo_nf_4_pdfas" = "1ssp315xcqpc4md5gijbi2c02r6hpazp8yw1661r4m7xy5qm62wc"; + "NNPDF40_nnlo_pch_as_01180" = "173l2178plrir9fb3bq17l4dw5qfy9clic4m54wqg7y3r71bmv7p"; + "NNPDF40_nnlo_pch_as_01180_nf_3" = "1mh0pl1f1ayx1fjj0q1fw9s7wc7gmj7a46jli87s4g5nxrhyki9f"; + "NNPDF40_nnlo_pdfas" = "16d81h0pzxzgwwrfjghmradx4bijf08xbrdn79y9bxf6czacm8n6"; "NNPDFpol10_100" = "0r5qfa8cyanalphgjdsh57s3viqv9i10v51p1pyamj1f90gb9pr8"; "NNPDFpol11_100" = "0nny1lpw37jcillpfxjx82hq7wlzp4yksxialmc2ivr192qqdda8"; "PDF4LHC15_nlo_100" = "0m9d4zy7608iryqy1ypgkr1d3yhw2wv1nrrc70zrfih7x0fp7lz7"; @@ -871,6 +967,14 @@ in "PDF4LHC15_nnlo_asvar" = "03fh1jcbmvla7n2jj3zq4ibwvq66h0rniply7h93d94zawcgsy4v"; "PDF4LHC15_nnlo_mc" = "0c6nfkv3x1p5iw514knjvqcs1dcaryf74qqg1za8x234yr5ndi3p"; "PDF4LHC15_nnlo_mc_pdfas" = "0l8hlcz69cdii7mpgargi9nsx7iy746nad5pnn7pvycrc40marij"; + "PDF4LHC21_40" = "037bs1l7zr3z8zi6wzh5kxgml84bl64258fr2sm0dzv9yxh8lvp5"; + "PDF4LHC21_40_nf4" = "0m2ki4qmgl1ggim2inlgynjzqr4ya3qgjph63jf72kia8ks2hfl0"; + "PDF4LHC21_40_pdfas" = "0pigpix2x2bv1a5ib17zjlfjqygjpjra0pgvmf6knm7mbgwxhqm3"; + "PDF4LHC21_40_pdfas_nf4" = "0dayiilh94cszqiphr487589qacawfp4za2cngr3da03yg1aswbd"; + "PDF4LHC21_mc" = "17fbwk7fp2m3fd0xzp94sp7m0mmjmzakg870rbhg8vi88bimmwry"; + "PDF4LHC21_mc_nf4" = "1n7h45yxw2mwppf6zblc8v415khy9vgrackmbfkm5lb9c79faas6"; + "PDF4LHC21_mc_pdfas" = "1pn9a7z0xl1bn18z461j90sjglccswimm4p23nyq0fxal5ziidbl"; + "PDF4LHC21_mc_pdfas_nf4" = "1andl5n1lw5iyd337czph5abd0sqc3l90b21g67af96am5pprcpk"; "TUJU19_nlo_119_50" = "1q1dhsxz1kq75rpzv6gg6p6bzvvv0d44pc4y3wsiy9g14aff85vq"; "TUJU19_nlo_12_6" = "006j6y4xbjss9apzagjcc3r1z6s61a1hzafhcyriiffqhn8bg50k"; "TUJU19_nlo_131_54" = "0ymf35alyar6fwagmdny2zz2aag576f38kail7gh2lvqpmjmv6np"; @@ -916,7 +1020,6 @@ in "abm12lhc_3_nnlo" = "09k90vhjq7p0i0aaq2697pq2dc86bkmnv4q8zyqxjp3wnqx1v95f"; "abm12lhc_4_nnlo" = "1hciv1z9b5fiz7swv21gr0rshijj9yj2n8x4l54v9g0jyd061jaz"; "abm12lhc_5_nnlo" = "00xxkrhbfkxhg33mkpwwk5nsdp4nmi0zmllx5z5ygxl24rinsq9j"; - "cteq6" = "0lp110wldhliad354v29f0rhdzf5qrs1ibklj0cmzp2rcbp8zrix"; "cteq61" = "14hbc855b3wsjk7ypg86md46cjm1bj7n4hins9nr8kgzs69i6vss"; "cteq66" = "09i69ac3gkrai5jmazjyjvi5sl8k2vm48m90ijn6pl24p31qf68y"; "cteq6l1" = "1b5m7g7wawk72h76l9yr3gx3n67jggna1004lwffvj43gffwkjap"; @@ -943,6 +1046,162 @@ in "nCTEQ15FullNuc_7_3" = "1ncarbncfkqk6l3rx3zg34a3sj7mpm2diqsafyldpn92cw66bcs2"; "nCTEQ15FullNuc_84_42" = "1z719mcx5lnx2ciwlnxxhgc4s00jrr9sfrxcimh69sj14hmzgx0d"; "nCTEQ15FullNuc_9_4" = "180ipb4m2zy54h7n4s0jwqk9k6562bygvnv7mg9dp2f7vf5317a1"; + "nCTEQ15HIX_108_47" = "0iqv6rsvvr5mqiaddn2cs6psrslw6ncqxca993v0z0hng9ahnnwd"; + "nCTEQ15HIX_119_50" = "0gxdr596gk69sb76r90p5ksvx9bk4axj21qrsyxjf1bmgdg2rv72"; + "nCTEQ15HIX_12_6" = "0vkkz3hq0irvfb08cpdijfvv17bcvzaba5c1bf8kwx1i2zl5s6xz"; + "nCTEQ15HIX_131_54" = "0pbm6390cdglxqzrpl2slv65m943m32i10c49pf70fg68x3l2dff"; + "nCTEQ15HIX_14_7" = "1yshwpa3zzzmf7s5v3c3130ysm1wciicz202hrraz4px102h8a06"; + "nCTEQ15HIX_184_74" = "1wy1691pc776kv456cbjl5x5rg2z7cycyfny24caq1qvjifvbr94"; + "nCTEQ15HIX_197_79" = "0ckbp8cw49ch78q4nsm1fccn6nizpipdp8q85nnipql53xsnr4zm"; + "nCTEQ15HIX_1_1" = "13sfws9cmrsnp26mx4m2n03gary2m10l67bd8xic8pykgpr8c695"; + "nCTEQ15HIX_208_82" = "0fqvzpqszkyqb4f2y44hrdj7rvadbqj6y8fzkl9xzk432lalm8w5"; + "nCTEQ15HIX_27_13" = "0z356q82x6cm52f3qym7vkajlkf2amkz87as0jvfiygvi4gnilcn"; + "nCTEQ15HIX_2_1" = "117dgz8nx1j64xgqlp40zmgpg0z8b0p1j75d1llvshmksba43avb"; + "nCTEQ15HIX_3_2" = "1k1hxpcg39hrh629ml2kwnhir6pq41rpic48njq2lagmrcdpbn52"; + "nCTEQ15HIX_40_20" = "0q1ggww4a5cdf802737kh7igb0br3q50xw205v4b5p7v237wsiym"; + "nCTEQ15HIX_4_2" = "0n9zf4yxvp3b3ryxbkdw0yilsb21nrjh40ms883d6j42slzhf37f"; + "nCTEQ15HIX_56_26" = "1z39vjgp6jryvqaxv6jq05jj0lfxsj212g17amch65pgxm4l7cwg"; + "nCTEQ15HIX_64_29" = "1iy1g1226irrjxz9wwabwd6x0712dm1aap7ka6fy9wwwbn87f6d8"; + "nCTEQ15HIX_6_3" = "0ljq0h0cdwbfvlpd40v2nypklimw4hh6k999mnyxqfvq47m13ffl"; + "nCTEQ15HIX_7_3" = "1y9k2snzymzgs26ayfn2c8n6isqzqq14pzf05xvlxmc3k1fbsfyx"; + "nCTEQ15HIX_84_36" = "130g553b22s7plgy51n56az05v1pnfgrg5knpg1knx8xr8a9xhh5"; + "nCTEQ15HIX_9_4" = "05igk9g2i1gm7d7npdwd7y3k7xf39scx1mwiw4m39b52kbq7kszp"; + "nCTEQ15HIX_FullNuc_108_47" = "1h2x2h2n02nwinf8ba3yyqa61384p4g29ib8vnrzwzc7q34940i0"; + "nCTEQ15HIX_FullNuc_119_50" = "0m5z036py2m1863k62pzysjdr92dhyiwmb8lxgky5skd7514rrsd"; + "nCTEQ15HIX_FullNuc_12_6" = "13rsl7lczf5qzjgy8m3dp4f1gyib2ffvf86iwci91sq51sma5kk9"; + "nCTEQ15HIX_FullNuc_131_54" = "1bsi3y6b7sxykwablgghqc89s3vxrvjr9y4bvhwcm21c2i4y6vbp"; + "nCTEQ15HIX_FullNuc_14_7" = "0w93qfiqdnzvrry9ddpbqcy6y16sszxfmvvgdy6r8i8py8dzfkyr"; + "nCTEQ15HIX_FullNuc_184_74" = "1q93f9rvlc4z3hzfhdmmxlyqfschrf99a0xyrc78rq1z2pfgmm9b"; + "nCTEQ15HIX_FullNuc_197_79" = "0kb2lnvh8mq25i5pkfis86ggs0s6hmpmyqqgimhcsamk8xnf2pcv"; + "nCTEQ15HIX_FullNuc_1_1" = "1nk792chip0iamc6dlqgqqlc2qlxlbgvgnhsvbmzkmmzvp2ji961"; + "nCTEQ15HIX_FullNuc_208_82" = "1l038nhnwc0pd7sq4yki3sjrrn66bsvqkvba2j3kf4f852gsyswr"; + "nCTEQ15HIX_FullNuc_27_13" = "1gdb09429xn5n07qqjch9x1cnq4xinf05wla8fsq69qr7bcdq9mp"; + "nCTEQ15HIX_FullNuc_2_1" = "0fnbpwv8qzgchc2r2gcjgw9yh4r94vm1rzbwj1f6ahbwrsl4rfp3"; + "nCTEQ15HIX_FullNuc_3_2" = "1r1s7ljwa169nbgc1439cj0gkaqza6ilx3x4gyq2jg9byz30khzx"; + "nCTEQ15HIX_FullNuc_40_20" = "13p7mnh2h8abih8y24rfapnrsd036pc8xy8qm82j8g4bk0j5jacx"; + "nCTEQ15HIX_FullNuc_4_2" = "0j1w7d11ybmpfci79lz99i94szxf5xn6z4kxvh8hly3qlcgn636j"; + "nCTEQ15HIX_FullNuc_56_26" = "08wds8afv0fzlaxck5i2d3pzvi5nqnc1jmq58fpnc7i0g238wl8k"; + "nCTEQ15HIX_FullNuc_64_29" = "0w4hn5iwqa65lnf6mhihx5qrq4wpcqw04ii3jphy79l58j3i1iam"; + "nCTEQ15HIX_FullNuc_6_3" = "1ki2cfsg0wmvfkzv2j9akiyl4d4b8v3d6f65iryxakjkhqj6vvgx"; + "nCTEQ15HIX_FullNuc_7_3" = "14mx5lh278p3zdc572bhxw9sc6n7ga0ak0ch85h3lx9zwg2a1m5i"; + "nCTEQ15HIX_FullNuc_84_36" = "0hyxyhb2mn64fwmijigw8m3v5zlj52hf2hicvx4gcq0lw063jxj4"; + "nCTEQ15HIX_FullNuc_9_4" = "17pydzll5lgs974gz4bchl2wxc9ixfpnqjrsidzksl4jf03gn77z"; + "nCTEQ15WZSIH_108_54" = "02z08pzvl8fa0bi6ddrlbknj0iryimw02r40z0nn7p8xf99qabhz"; + "nCTEQ15WZSIH_119_59" = "0r03k6j6nd2mvdkidw4gx1xm1s9hil9z5kanxsn2hzp30ab971db"; + "nCTEQ15WZSIH_12_6" = "00d4lis1qas1k8yzfb3dbqgvy9ynv7h9lx67ys3mj1ws5fqyn5al"; + "nCTEQ15WZSIH_131_54" = "10frai6qmzvp8xpkanl1qlpnc6chf6k5j70f4pw4abw8ycbjymij"; + "nCTEQ15WZSIH_14_7" = "02qd5x041p6n8rzv5l446481jb9vkc5nrc018vcg41735azr0d0p"; + "nCTEQ15WZSIH_16_8" = "164ciyxkrxp33r5nrkl86gq0jvdzm90hf602raamn1b5l5yrnadl"; + "nCTEQ15WZSIH_184_74" = "00nrwb8q6sa3zwbdc9mx4jz5ndibml84lfz8gad9vhy20zayyh3q"; + "nCTEQ15WZSIH_197_79" = "03hzr05vhb0l58iv9c2743c688ygagy1bi5a7zqn8nilsmykbvag"; + "nCTEQ15WZSIH_197_98" = "1mdlhp643z0gkjpj2jdi5zdd4qxxjqsy93rkv69cn1x5abawdl8l"; + "nCTEQ15WZSIH_1_1" = "13wskph284niaxqx92yaa4jg749ry6y98ds1ifvwc9970iccdnk2"; + "nCTEQ15WZSIH_207_103" = "0mpc6msqqjbcs756zd426xgzxmmmcmk8cp8wh2mpagib1drrg1x8"; + "nCTEQ15WZSIH_208_82" = "130bs9y9337pmgwi1ix0ar2xvhfhl4rs0626kbin5yrxhdy7rpha"; + "nCTEQ15WZSIH_27_13" = "1r01sbzixlrqxkjp9kx3s4zgsd48myivyc70p7ha83i1qrq50g7w"; + "nCTEQ15WZSIH_2_1" = "1qlh3zxbg13sq187k1fmssan00ifmqr0q2l7i45vc8jz3mk70098"; + "nCTEQ15WZSIH_3_1" = "057zyvlxz3hwlwgydccl2y124bvc6iwqqgav2jqw0r53a39rc25g"; + "nCTEQ15WZSIH_40_20" = "11psi56yk2yd75v396j68hfdacsnxvng2bw4v9g4afbjv9697jgr"; + "nCTEQ15WZSIH_4_2" = "0g4rvlgksw5gf6a7zh7yzsi2sq5jqkwbrx6d7jgz7zpg8jkg4qrz"; + "nCTEQ15WZSIH_56_26" = "12d0bj92b615kzadxwivp1q0j906m2rlcxfflwgwg21sv9axhi4l"; + "nCTEQ15WZSIH_56_28" = "12yx9v87y59qf14005fmj55n38xnhlvc7qgcrgfsicmdbx3ncm5x"; + "nCTEQ15WZSIH_64_32" = "02w3a4sbygc72acxnfc6lhird4nxcgq5dprfldg10h7f9lr7441y"; + "nCTEQ15WZSIH_6_3" = "05x3cknlnc1kbqnmi3hk6fjgx07dhl7b36rg3abaqn4yg65i79sj"; + "nCTEQ15WZSIH_7_3" = "0d6cmpv6csysr96knip033mw7sg56ls1gcq8gvz2qy7isj167gx7"; + "nCTEQ15WZSIH_84_42" = "15zva7n91p6s98kkhcfsvws20s26fl1bvjql8m1n1c2d5pr29wj5"; + "nCTEQ15WZSIH_9_4" = "04cs89s2m99p31jkc2k4f5i5rr0l4fpa6a41d59zvknhgfy74yvw"; + "nCTEQ15WZSIH_FullNuc_108_54" = "1wk8vhhlzj3wrb994s66q5zmwxhcy9vxpyks36s3jr5729jxk1j1"; + "nCTEQ15WZSIH_FullNuc_119_59" = "1rrkj7inah6bg03mmxgza39z40ghdr8km9hy5v5b69bvqcyr42a3"; + "nCTEQ15WZSIH_FullNuc_12_6" = "14wgacbimifnaji6byq1cds9zz266a63bvv616b0n06391plvzff"; + "nCTEQ15WZSIH_FullNuc_131_54" = "0k6gncwq4l5avlnr4qpvakklysi6g855yqksylc42ndlgjm4jxfy"; + "nCTEQ15WZSIH_FullNuc_14_7" = "1mlffwsn3f942flxvvi0rp63xlcrq88ir9vffmkzh3br3qpm4q41"; + "nCTEQ15WZSIH_FullNuc_16_8" = "1ga0kijnlzjz98j32bakcan89bfhbhq8y08d84d90xpgaqkpb9z2"; + "nCTEQ15WZSIH_FullNuc_184_74" = "1nhly8065kabzjjkapr75vafx46f1zl21xc6fdcv15a2qwx54n0p"; + "nCTEQ15WZSIH_FullNuc_197_79" = "0l9c684f427b8hhwm68swh78n6104nbpdxq6v50zipwc6df0j6w9"; + "nCTEQ15WZSIH_FullNuc_197_98" = "0vc285bd21arpmaykb6baspzr8ak42yx9h4j0sx2vj07l648g5hi"; + "nCTEQ15WZSIH_FullNuc_1_1" = "13gbjq46f5cdpdr902nxv261hqw041f7ryxbwgvxr3k2zm7h8fw0"; + "nCTEQ15WZSIH_FullNuc_207_103" = "08m08pcz0f72nd7zcrvi8cl5va49djlvdff1w4is4gmsb44khyv2"; + "nCTEQ15WZSIH_FullNuc_208_82" = "04x5icmidi1p3j8bdarl3sj0ak6g2ygyc5wmkkn9g80qqn4mxwva"; + "nCTEQ15WZSIH_FullNuc_27_13" = "0x4iv1kxb5lp514qm1nr3k32m68aw7sgy12nhdjhy2dv018snd8n"; + "nCTEQ15WZSIH_FullNuc_2_1" = "177qv17wv15sr1zcm2p6av20h32cjkspf4jj3jvvvgvks947n7dl"; + "nCTEQ15WZSIH_FullNuc_3_1" = "0aawk1ppg3nhl5n6gpdi6l1rw53l2x2sv3fwjz5821b6d7cmb9f6"; + "nCTEQ15WZSIH_FullNuc_40_20" = "1271dlp580a5gm29sv6b8plpc8d06j57x2xrfjyp7kafxa1158ii"; + "nCTEQ15WZSIH_FullNuc_4_2" = "0sv20zpzbinrz5biia1g3jzgyq0wbqaqrmrhhcyg2yxg9z48vgca"; + "nCTEQ15WZSIH_FullNuc_56_26" = "1wz1vwy2q85qz85kdy9gzzhnvv0jy4iazzavf9janz2xzw7833gg"; + "nCTEQ15WZSIH_FullNuc_56_28" = "01y0f2bz5yxmfd719fq8s1i3q5wb0dd81l0qkllpa961db83zmz2"; + "nCTEQ15WZSIH_FullNuc_64_32" = "1a93nqx26pj9kyvy66dmm4ib2pl5qwf03420q84zdl1hlcgaszzc"; + "nCTEQ15WZSIH_FullNuc_6_3" = "157j927a4x53gcam5kmpcpkyk76qgdlszxa4bcj9wlqrygwxsk3k"; + "nCTEQ15WZSIH_FullNuc_7_3" = "0l369wpd3gcb6i452w2hsjvidz80xl623xf1g1p8d2485nrvh6jm"; + "nCTEQ15WZSIH_FullNuc_84_42" = "1nikb7yk35s27g43k1wlgcfxqfyjf40csn8a6aiabliqdfjacaqv"; + "nCTEQ15WZSIH_FullNuc_9_4" = "0nnxf32kllwvm3fyjlswnyjx8cpsanx15qwsn03z6d67wx2f87sw"; + "nCTEQ15WZ_108_47" = "0v1s95f0wxyz73pfv5z6hc4mslxb7ml6imjmkhn2p9yx5mvk94qf"; + "nCTEQ15WZ_108_54" = "0zqw1p1sls58v7aacmwamlic0vsjyjijfam6bas5lh92rrmcf9h6"; + "nCTEQ15WZ_119_50" = "1wwx1rsjhd8rqrvyq68r70issnsby8zrlr8d76br3622mxqr83a7"; + "nCTEQ15WZ_119_59" = "11hckxdqyfrbsv3lc12q8zjysr2nw9mhx438ff13azp93ha7h1v6"; + "nCTEQ15WZ_12_6" = "0naqqy223p6gyw2r1qy5fs185dhmxzmb3zmkx3bbccqnspk7qcnz"; + "nCTEQ15WZ_131_54" = "0fi4sjv7rs90yac1zmnlf38dv5ry9zwzxrjwakpzh00irxw0cz8r"; + "nCTEQ15WZ_14_7" = "1k4sc588n3rd8fvcvwhxzc405iqj28xzv7y1md45kz1c7m6qy4cj"; + "nCTEQ15WZ_16_8" = "0ndjlgbggbk3zk5bm9nm726ci1v8b3qfy3gag14jmp7q780zyhmy"; + "nCTEQ15WZ_184_74" = "08cdkqbjihls2sg4waj9rxg7nvs0mjzxfv3dx1jppiw6f7ljjzrr"; + "nCTEQ15WZ_197_79" = "0ss7wgvx8rv03x1g10c516i0yd65njc7qjh19maij49aizf1fd4l"; + "nCTEQ15WZ_197_98" = "0vlyjqlj9plniim0z7mdhia307i296iha94l8b3iqgzyp9553gly"; + "nCTEQ15WZ_1_0" = "13wnm5sz1sf3hng70j8d3ml53knrx9b7wrg2h6x947jl51flrkh4"; + "nCTEQ15WZ_1_1" = "0pllq4zdgjaklad9j87vilx8gapzfhjh478gcw479ahgjcbbbxxp"; + "nCTEQ15WZ_207_103" = "088d8sr9mq9q4bi0ipxznbm3k2b2k347bj9k5fxsb27f4dl5d0jp"; + "nCTEQ15WZ_207_82" = "0cxagqav2q6kwq83syiad67nmzzkmrg9q0hlshbz6bjlcqmdi4jr"; + "nCTEQ15WZ_208_82" = "1c7vacsr2m6r7dy1b439c46xgxjqvq1gj9y68p2vargm5az444sz"; + "nCTEQ15WZ_20_10" = "1y38sqjgsjrfhmyhf688jir2hgkk871sjz8dm89lm5g7m5c1mgmq"; + "nCTEQ15WZ_27_13" = "0gb2zg5j3jcqjisa3nzsbif4rfi8vshl5vq2vq93d2132qgpzq3l"; + "nCTEQ15WZ_2_1" = "04nhmrvy2m4a2i8b5qadsg8h51k171df1kb7mdqn3hjzga7lg0j7"; + "nCTEQ15WZ_3_1" = "00aansn8jjh8yqyhr2fx8h453nahrdf8j1j0qgny1n1mhad4mc56"; + "nCTEQ15WZ_3_2" = "1pk6gp5a1g15zn5w00l89liz4w7w4xsmpcdk4x50vc7k5phy6vj7"; + "nCTEQ15WZ_40_18" = "0m58lby911lxqy6rvvs959qg5gjbppnfxl34hn81glc0lr90qiz6"; + "nCTEQ15WZ_40_20" = "0dnk9yikivxd557bpi9j7dbpwkf4sk49bg8lf0lxf86bdmi2l330"; + "nCTEQ15WZ_4_2" = "00rr4qwwp7i419sy5wr6f2lz82121ilrvvj0js45bvcqknx26c7i"; + "nCTEQ15WZ_56_26" = "09qjjavqjwrh9adkz6yhcjjiy55dg6c7imnbsi8qxi8xspz8nqq3"; + "nCTEQ15WZ_56_28" = "0pdqsrj4dphjb50r3v95wp1drc8rkdgsbisgg430dj9xaa703ijr"; + "nCTEQ15WZ_64_29" = "1qjvvfzv5nfjp1k1kdv2kz5pjssr2avrnjlxznqvlxwgs7rv8p7v"; + "nCTEQ15WZ_64_32" = "172whqdyyvqxyr3lijw2v45y8nvc0vk7z47i9xfmcydi2qfz8g3s"; + "nCTEQ15WZ_6_3" = "0kr0cf8aqwsn4x8kjwihsynvb99i8bdp4g91digbgcfpp02jyvgd"; + "nCTEQ15WZ_7_3" = "1c76h9xfflfihqyxi9a6dlmg1gwwr2wh93mfac3a3jnx73981gq3"; + "nCTEQ15WZ_84_36" = "1cvykpl9r41dwahjv3kik98y6sx53wiyyzywqc87mhnlx1x5876f"; + "nCTEQ15WZ_84_42" = "1jslkk9wc0fzbm6s5czpmaf004pndwc2ggwnxgkga2idn45204xz"; + "nCTEQ15WZ_9_4" = "0farphic6jyzc5x3ja715wdmvv5rfcgil1c49i8fxqlap5mlgv89"; + "nCTEQ15WZ_9_4_iso" = "01zxmn50r14n1a5gq75pia8mz4ibyqhyjl5d31kv3h69xgdrlizc"; + "nCTEQ15WZ_FullNuc_108_47" = "0yl5ll4j3mr6nhiz9ynn9w3hybgwdypg0b8zwsxz1rjcr3c8bmz5"; + "nCTEQ15WZ_FullNuc_108_54" = "1c8cdilw9n91apqj9lrv9j6hf6mcg3ndndchc96d5svrqlqk4fg6"; + "nCTEQ15WZ_FullNuc_119_50" = "19lb71cmmi4fvpb0bxmz0ipjlzg6b5hrlwiml4bj5r65km46whyk"; + "nCTEQ15WZ_FullNuc_119_59" = "0r9cwy2znpl59ynv2av6whjl3igm4b19rzh6sjqsm6a85d5jjdpc"; + "nCTEQ15WZ_FullNuc_12_6" = "1972j8lziq1ckwq2s7qpcf9g7mg3762kfrnbdjf4ilfw5b2b9i9n"; + "nCTEQ15WZ_FullNuc_131_54" = "06w9yl6a11ir8qjyxaakyzs8b51hf3jhm8nj5nvrh4f6cicp1g66"; + "nCTEQ15WZ_FullNuc_14_7" = "095x9pphc9lx71hlhpwcb71p8wx3b1pnv6qd3s6992visismyb02"; + "nCTEQ15WZ_FullNuc_16_8" = "005h4mbf9c7d9w61pmgghpxb5yh63i6cbyxyylngznbbbaxggjcc"; + "nCTEQ15WZ_FullNuc_184_74" = "0niyiq85bmbr596gfrsmf79rksql7n2gqdxw72yrki5ywc3iy4sr"; + "nCTEQ15WZ_FullNuc_197_79" = "0vbgg6m0b1wlrsx2c5xxga0crz0nwvcq9a88f032yjzihh6rscjd"; + "nCTEQ15WZ_FullNuc_197_98" = "0sf4q5bb4b44zkbxq51pf0xhpldr5dj5d0gqpqs40qkdlmkb9z7k"; + "nCTEQ15WZ_FullNuc_1_0" = "1il9j3ggn9kc8k8jpql46lw5c9lq45ngaip45vqppgvl3zcrc4fv"; + "nCTEQ15WZ_FullNuc_1_1" = "0nxvm11qlapnpdkkymqhy86axcvcx84r9hk0dswa2ji5gmk18jww"; + "nCTEQ15WZ_FullNuc_207_103" = "05qqg6ckkhv30nfbs5rdpiwz7dkgywjrd8a9gr3skvlayj2bisni"; + "nCTEQ15WZ_FullNuc_207_82" = "0s27py7i2h0va79q9ivmgrpwk5pxrjx9csyad76fc7pmvi4xsgjl"; + "nCTEQ15WZ_FullNuc_208_82" = "1xygz9px4jwz3vkbx86384775vhiqqv5l2rp2qi42zy8y8ijwaqd"; + "nCTEQ15WZ_FullNuc_20_10" = "06bgr8r6pz8cpvqbncjaazxw7j2qzhh183brs7r5mi6yckg318gz"; + "nCTEQ15WZ_FullNuc_27_13" = "1qrs82jscmxyhqkd4fa2kjglzgig23kqwc7r2n8p23352ggcsn9x"; + "nCTEQ15WZ_FullNuc_2_1" = "0jxam0p1ypd3x8l83d2f14h2av9wk1r69prfhl6pgd6pdh3nx2gh"; + "nCTEQ15WZ_FullNuc_3_1" = "0kjgsiidi33p442bpp1g6sss62qn4pj90ag4hcmdqsdf5m9vpc5s"; + "nCTEQ15WZ_FullNuc_3_2" = "0srxdvk5jh4ga4r8hniikzanfa8fh65xc4g51arxwd9sda4n0mqb"; + "nCTEQ15WZ_FullNuc_40_18" = "1x7d1cpglijq4rag57m8sp6qyzn3c7r3zs5z9jqarsaqc9wv2ypw"; + "nCTEQ15WZ_FullNuc_40_20" = "1l58bvdhqs9i4mgc39dw9c13rsh5vq5vh6zq8xk35150cbysiwza"; + "nCTEQ15WZ_FullNuc_4_2" = "0s1zdn5r9hk7bjk2ggn7bhrzq2iaxspdhmwi9rq1xbsk7n8k3wif"; + "nCTEQ15WZ_FullNuc_56_26" = "15ilca90cvw7p3w1xr15jynqnvnyw8zhpydhyq11x1kc647bgirm"; + "nCTEQ15WZ_FullNuc_56_28" = "16bs4yq4jr4y3431csabgps6330gcw1ymgm2161z4nmmyp988w1g"; + "nCTEQ15WZ_FullNuc_64_29" = "17wqyn8g5zbrm6ywvqn2y5kphy9xwbgijwlzxsni5k821nzmjyxn"; + "nCTEQ15WZ_FullNuc_64_32" = "0ivyka1g2cj6ign8wm28pzim3saas3bvnxqhixxkr91m1gl40b86"; + "nCTEQ15WZ_FullNuc_6_3" = "00v5wydvrvw1fgbzc436cap7amk8yr7dj5wf3ykyd6415vxmlzl3"; + "nCTEQ15WZ_FullNuc_7_3" = "06z2p0gx988q5w4d3c0qx6pj7lp6mqlz1qrnwy5lf4i4i4vcqkkx"; + "nCTEQ15WZ_FullNuc_84_36" = "0rfn82f8f9f6bgngrs67maip2kfhy42i4ppa904d7dspyqkgfa31"; + "nCTEQ15WZ_FullNuc_84_42" = "14lyhhc5jwccqnz85msaqzdx7cn2vxkvm2l9mf1p2kmgy7p5jis3"; + "nCTEQ15WZ_FullNuc_9_4" = "1895lr4yydzk4cry5wnjhpl6gs2kcspjlm2mv2y00sb7gzinjs94"; + "nCTEQ15WZ_FullNuc_9_4_iso" = "0kvjk4wvw98w7y02ishjacn9frncspl6rsxlzcd4y2pdk1ckr29i"; "nCTEQ15_108_54" = "1bjx2d61qjhabfx28pfi64hf8br4gl67nzir3ygdpwdcah4k6lz8"; "nCTEQ15_119_59" = "0g7wffsyjh84r2wv8w67skx8gwdb3clv9c1dlpijwqmpkcm3b8q5"; "nCTEQ15_12_6" = "1xnnqp38zz3b61jb38hz54wv09w06fwwnb66sf93r1agcajvv1vi"; @@ -966,6 +1225,30 @@ in "nCTEQ15_7_3" = "13b9wbm2hqx4lixq3dad1y3cr6didcch8kg7mqm9lgbism7dwaqw"; "nCTEQ15_84_42" = "12vkqpvjjyh0x0hbn7r4gx5za01yqs9a7lqirdxd15k04fp5rnjr"; "nCTEQ15_9_4" = "1rkxhxwp0v9dm6f71c5635ihlspfx0sj666maif4iaw1sf4hazln"; + "nCTEQ15_B_90CL_Au_hess" = "13sdn1ng4nd6935dksk7jin8yilp29zys3d0jvf7m7vx8gyxi187"; + "nCTEQ15_B_90CL_Pb_hess" = "1j11x8y5sbs5lz4z06wcl702ijvh1bcb4i222jdcq9gh9j40xn17"; + "nCTEQ15_B_c_90CL_Au_hess" = "0k0pn3yqb632j254h8w9wbdvcasnfxr7d9g47drqw3f4w1as3s16"; + "nCTEQ15_B_c_90CL_Pb_hess" = "0gvvwnc298qgxhpj4wgnzcrgz0wqlj5r7sfsl1fj21zhm3kc45jl"; + "nCTEQ15_B_d_90CL_Au_hess" = "0lzfa2gsl5cs2i6y833lhvf2pifzkysj9jgq22v9iyyz5q0nbsh4"; + "nCTEQ15_B_d_90CL_Pb_hess" = "0zp7dirc2l42f9zjyq1a2qbnir1bbj1firmg3s856mn7sp3p4i0k"; + "nCTEQ15_B_u_90CL_Au_hess" = "1w2ifnl9waqvsaz1yg696mmqxcijm2bphq8zp3rcbimck3rmr978"; + "nCTEQ15_B_u_90CL_Pb_hess" = "0j95dg6kyvd30qgivd9495glcd152cr1j3zh1cg76s6sdzfkhvs5"; + "nCTEQ15_D_90CL_Au_hess" = "0y3n008i22g9ny0v8z6hyc47730xb6qldkpall6p8icn8yzfdga9"; + "nCTEQ15_D_90CL_Pb_hess" = "0mp1plqxsd6j0ybnf7yrl4hgl68a85q56vdjhs911xah1bgrpix1"; + "nCTEQ15_D_c_90CL_Au_hess" = "1xk6ngc488n9immd9nbv8ygvxav5n7b7902k44rkxnw42kbx2c4m"; + "nCTEQ15_D_c_90CL_Pb_hess" = "1ldig46l08n00jvj0dl36jsnpjl2ycv3jdr9d9g375rwgv671kad"; + "nCTEQ15_D_d_90CL_Au_hess" = "059fz4rfhydnk25hmabavwi346cy2hzxaw2ciq8jx64fmawb3v79"; + "nCTEQ15_D_d_90CL_Pb_hess" = "0zgs89yjypn2sd97948z2r78sydadivvw6wy7pwi3a5b0yx1zpzn"; + "nCTEQ15_D_u_90CL_Au_hess" = "0j8sfih3r2wps1l7vapnllh88ibw7672f646m5p67aw5k2an4f8j"; + "nCTEQ15_D_u_90CL_Pb_hess" = "0d2nxsabard1yq8f2v9a7kwk0fzv549hx1k15k0dfif9523i6xqf"; + "nCTEQ15_Jpsi_90CL_Au_hess" = "0x75jizpqi8vss62xb2913vdhvckq2b468iqxd0ggr0fic1f8q9a"; + "nCTEQ15_Jpsi_90CL_Pb_hess" = "09a9s6gmf7q9rk8c88iskra5kxaiz131s5650964znxv29lpzlqm"; + "nCTEQ15_Jpsi_c_90CL_Au_hess" = "0mh3ikdkca7xc5bc2knjbyr7dkgbydnaa4i1gqln0i29b89j5nw0"; + "nCTEQ15_Jpsi_c_90CL_Pb_hess" = "1gs39gla77sqgry1799l9kapc1c48pzxgfba6p70fdbwdac45n3j"; + "nCTEQ15_Jpsi_d_90CL_Au_hess" = "1qn4f4nn8avjrsdqgab25053zadwx7vlr27w8bsmcxg25si104y9"; + "nCTEQ15_Jpsi_d_90CL_Pb_hess" = "107w4yiv6dl8gqfx0mpbnii06wzf15ih2kqmb8hkmz988x49l65q"; + "nCTEQ15_Jpsi_u_90CL_Au_hess" = "1mdm8qlzkyzrszsng49ns9hq7zdqaal61vbi64449fkvmxd0a7mk"; + "nCTEQ15_Jpsi_u_90CL_Pb_hess" = "1y0bcr3g9znjz2c3s4487yjl1ipf0ls05krfpdn8gcsxymf4cirm"; "nCTEQ15npFullNuc_108_54" = "1g8id10rpys9566r8h92diqrr43mww6q8nhvlns0kfjkvkr22m9y"; "nCTEQ15npFullNuc_119_59" = "0df499pvfls1281zkvngrhicnc0ac0bfwamzs027k7f2y6ygkfb4"; "nCTEQ15npFullNuc_12_6" = "0mb3zixcikagsqzpxb7jzrcg05dln37d7anz5359ssjyd6p1mqyi"; @@ -1057,11 +1340,30 @@ in "nNNPDF20_nlo_as_0118_Li6" = "0pwdqrmivpm0j7hrg6h2qqshpna2vjlslxnz0sd100kc3lfq7xab"; "nNNPDF20_nlo_as_0118_N1" = "09y7pd3nnys49w25gb4524x5xkahillvaypjgncbn8n5x1a11nsr"; "nNNPDF20_nlo_as_0118_N14" = "0nb3kcmhbyncp9frs27ww550mjl3f7yiahyyrm3aik93ycpm16n7"; - "nNNPDF20_nlo_as_0118_O16" = "12kfhldvg8gqxjaian14dng6qqc6nikydwcj5jz2i2d1da3dyhgg"; + "nNNPDF20_nlo_as_0118_O16" = "1wmi63l6cpj3nx0vwiqaa1pfw0im5ps96g7842428skzrg0q4yx3"; "nNNPDF20_nlo_as_0118_Pb208" = "1rwb7vca0y1aj38mz8m3wg07q9hq66qd5j3y6hs9bh0jz6hkifzh"; "nNNPDF20_nlo_as_0118_Sn119" = "1dan86ckd5padipp4x12x8msfg5p97b8hwxm78gfyf88kq725m6z"; - "nNNPDF20_nlo_as_0118_W184" = "1g0br4gdrb2vzwmqhgj5778a6vl0lykc4ymylibxlqbqrhf8j89b"; + "nNNPDF20_nlo_as_0118_W184" = "0wzd8vw2svf3mzpyy1wryr5jz3anhykp5z3cx4hdljprws2b8nll"; "nNNPDF20_nlo_as_0118_Xe131" = "1a62qi3qy5kli9q80p2w80mj5v3ps2g6p40zxlgm65q5mphkx1qi"; + "nNNPDF30_nlo_as_0118_A108_Z54" = "0n67w44rmz5s6cg2h58d8sf51dsfd0i5g09dh08mgfcn640bqlqw"; + "nNNPDF30_nlo_as_0118_A119_Z59" = "1zj5gn5021ig0j6p7jpyy683avg0890blmr90yjm6skqzxfjq48i"; + "nNNPDF30_nlo_as_0118_A12_Z6" = "0jvaq9a1w19dybv2hzvn8swk4i4z16lab9yfhbywq0vixyfn1swd"; + "nNNPDF30_nlo_as_0118_A131_Z54" = "01mcqnixjw5m9di508aq8fp74f3aqnvhc1bjirfvi3ca0jz0c1jz"; + "nNNPDF30_nlo_as_0118_A14_Z7" = "0hqpni9fxbf5qfamcirglr04cbwx58pwylqh8hqm3vc03ap3nmjd"; + "nNNPDF30_nlo_as_0118_A16_Z8" = "0f0h3hckxg4xgfd6ldblavhcidzdy2b5660a5nvv96y63sdsx2x4"; + "nNNPDF30_nlo_as_0118_A184_Z74" = "01jsbla72c3b6gbc7w4nx5bb5ws3g864avznb9vxmmk3prib7n8x"; + "nNNPDF30_nlo_as_0118_A197_Z79" = "1gm7nx0cn3lmjlsy6c7dr5vzyfmi1fcdib9656d4039m0ningrhg"; + "nNNPDF30_nlo_as_0118_A208_Z82" = "0r1spnj4qmjwpjybqv9aa6w3wybvgk4qzivzwz4s9bacz0kb3z4n"; + "nNNPDF30_nlo_as_0118_A27_Z13" = "17vmr3pwjp1prb83yngada7sw8553sv39dnncksabnklfp1l5x59"; + "nNNPDF30_nlo_as_0118_A2_Z1" = "1g97mc7c14hnkfsfvg4n9jmb4l461i9lka643s626hw0gcq053f5"; + "nNNPDF30_nlo_as_0118_A31_Z15" = "0h5rx9113yq6jw1l2alwyvw77vv733y5mcpa3m9773s7y61w3fpk"; + "nNNPDF30_nlo_as_0118_A40_Z20" = "1937kk4039hi9cslw4417174s83rs4n9vm16ay70pp1c8bcqzp6l"; + "nNNPDF30_nlo_as_0118_A4_Z2" = "147npkmbvzk6j95hnnil6jafc2gxjqavaawl9cilr93f9h64w54w"; + "nNNPDF30_nlo_as_0118_A56_Z26" = "06dxsapqirmajh107j24b3w2nhqz39gs9c2pglq71fc53i0fra5a"; + "nNNPDF30_nlo_as_0118_A64_Z29" = "1drvfd3i1drrb15m3vk9sm2lzx2x01da44gdq3wbc90nxvzc7d56"; + "nNNPDF30_nlo_as_0118_A6_Z3" = "0zp1cixj6ixayzdra2i5qyfn2b6y6qfxd5d1l101n8sgf4bdh6fb"; + "nNNPDF30_nlo_as_0118_A9_Z4" = "07i30jk29cgdbdk4n5acdki5aihaki5w0mqibjww1hy7mwh4y45w"; + "nNNPDF30_nlo_as_0118_p" = "0k4bs4zhlm7l14mbd2q9n0n7rdnpqwgnfwj289ql62v3kh8mnn18"; "xFitterPI_NLO_EIG" = "1v6mfhmcrmdvica0wlc2ilfca1srxc7vjyli113wjvpd7wfpnvj5"; "xFitterPI_NLO_VAR" = "09mlsww89hhm2s96rlkqbkfwwf9qkblw7n3nnrgas6l1kn2hxq1i"; } From 4bcda364c3ceab680f00ca5f5360066f98d3a70a Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 23 Apr 2022 01:22:33 +0000 Subject: [PATCH 090/161] clojure: 1.11.1.1107 -> 1.11.1.1113 --- pkgs/development/interpreters/clojure/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/interpreters/clojure/default.nix b/pkgs/development/interpreters/clojure/default.nix index 50b2a49fdc6e..212e453b7a72 100644 --- a/pkgs/development/interpreters/clojure/default.nix +++ b/pkgs/development/interpreters/clojure/default.nix @@ -2,12 +2,12 @@ stdenv.mkDerivation rec { pname = "clojure"; - version = "1.11.1.1107"; + version = "1.11.1.1113"; src = fetchurl { # https://clojure.org/releases/tools url = "https://download.clojure.org/install/clojure-tools-${version}.tar.gz"; - sha256 = "sha256-ItSKM546QW4DpnGotGzYs6917cbHYYkPvL9XezQBzOY="; + sha256 = "sha256-DJVKVqBx8zueA5+KuQX4NypaYBoNFKMuDM8jDqdgaiI="; }; nativeBuildInputs = [ From 9f9b94fe1983533c1d101874061c3ea0a333b7f2 Mon Sep 17 00:00:00 2001 From: Armeen Mahdian Date: Fri, 22 Apr 2022 17:04:09 -0500 Subject: [PATCH 091/161] oclgrind: switch to python3 --- pkgs/development/tools/analysis/oclgrind/default.nix | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pkgs/development/tools/analysis/oclgrind/default.nix b/pkgs/development/tools/analysis/oclgrind/default.nix index 3752fa8e251d..9e8d1da4d36a 100644 --- a/pkgs/development/tools/analysis/oclgrind/default.nix +++ b/pkgs/development/tools/analysis/oclgrind/default.nix @@ -1,4 +1,4 @@ -{ lib, stdenv, fetchFromGitHub, cmake, llvmPackages, readline, python2 }: +{ lib, stdenv, fetchFromGitHub, cmake, llvmPackages, readline, python3 }: stdenv.mkDerivation rec { pname = "oclgrind"; @@ -12,7 +12,8 @@ stdenv.mkDerivation rec { }; nativeBuildInputs = [ cmake ]; - buildInputs = [ llvmPackages.llvm llvmPackages.clang-unwrapped readline python2 ]; + checkInputs = [ python3 ]; + buildInputs = [ llvmPackages.llvm llvmPackages.clang-unwrapped readline ]; cmakeFlags = [ "-DCLANG_ROOT=${llvmPackages.clang-unwrapped}" From 3a279c6db035229737941809fe8d3fad0fde09e9 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 23 Apr 2022 01:55:14 +0000 Subject: [PATCH 092/161] cudatext: 1.162.0 -> 1.162.5 --- pkgs/applications/editors/cudatext/default.nix | 4 ++-- pkgs/applications/editors/cudatext/deps.json | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pkgs/applications/editors/cudatext/default.nix b/pkgs/applications/editors/cudatext/default.nix index 2a4e29e473b1..ea07b3dfd60b 100644 --- a/pkgs/applications/editors/cudatext/default.nix +++ b/pkgs/applications/editors/cudatext/default.nix @@ -38,13 +38,13 @@ let in stdenv.mkDerivation rec { pname = "cudatext"; - version = "1.162.0"; + version = "1.162.5"; src = fetchFromGitHub { owner = "Alexey-T"; repo = "CudaText"; rev = version; - sha256 = "sha256-lAH0HXtzWs1iFVzM/tvnBT1s1Mt0AGs4TqdtFu1NeMw="; + sha256 = "sha256-CQ0TPZH9A37WK+gm7jgCxL5eF+1SxHlsJTTzMVRkHIs="; }; postPatch = '' diff --git a/pkgs/applications/editors/cudatext/deps.json b/pkgs/applications/editors/cudatext/deps.json index 68b15ce10700..c66ec2c121c8 100644 --- a/pkgs/applications/editors/cudatext/deps.json +++ b/pkgs/applications/editors/cudatext/deps.json @@ -16,8 +16,8 @@ }, "ATSynEdit": { "owner": "Alexey-T", - "rev": "2022.04.18", - "sha256": "sha256-tvFESbamCt7A6Xv8WGh0dKzr9neelYMM7guySOunfvk=" + "rev": "2022.04.21", + "sha256": "sha256-rPbQ3LNBXNHi9dgQKSaaCsuAY/VIzgq9tqlRXRl2IqU=" }, "ATSynEdit_Cmp": { "owner": "Alexey-T", @@ -26,8 +26,8 @@ }, "EControl": { "owner": "Alexey-T", - "rev": "2022.04.18", - "sha256": "sha256-Wp+/f/z2H/WANq9u8mRDn0BaeyFWiPpLrW0YqyT+ezw=" + "rev": "2022.04.21", + "sha256": "sha256-le6ulGFUNjeipYQKzVFezFb9u/0IcQcu5BMxFaIZdyw=" }, "ATSynEdit_Ex": { "owner": "Alexey-T", From 879b212b91952e619f4a6d4cada04c9ee15a8999 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 23 Apr 2022 03:43:49 +0000 Subject: [PATCH 093/161] python310Packages.winacl: 0.1.2 -> 0.1.3 --- pkgs/development/python-modules/winacl/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/winacl/default.nix b/pkgs/development/python-modules/winacl/default.nix index 76d6d71c0b63..d39c5be164d7 100644 --- a/pkgs/development/python-modules/winacl/default.nix +++ b/pkgs/development/python-modules/winacl/default.nix @@ -5,11 +5,11 @@ buildPythonPackage rec { pname = "winacl"; - version = "0.1.2"; + version = "0.1.3"; src = fetchPypi { inherit pname version; - sha256 = "187b4394ef247806f50e1d8320bdb9e33ad1f759d9e61e2e391b97b9adf5f58a"; + sha256 = "sha256-G6xWep0hMACCqiJGuw+UpZH8qOIY4WO6sY3w4y7v6gY="; }; # Project doesn't have tests From f822e917d40f5fa791566c2d26578e085378bae3 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 23 Apr 2022 03:56:19 +0000 Subject: [PATCH 094/161] python310Packages.lightwave2: 0.8.8 -> 0.8.9 --- pkgs/development/python-modules/lightwave2/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/lightwave2/default.nix b/pkgs/development/python-modules/lightwave2/default.nix index dd85d3b1f67f..77661b3d58ba 100644 --- a/pkgs/development/python-modules/lightwave2/default.nix +++ b/pkgs/development/python-modules/lightwave2/default.nix @@ -7,14 +7,14 @@ buildPythonPackage rec { pname = "lightwave2"; - version = "0.8.8"; + version = "0.8.9"; format = "setuptools"; disabled = pythonOlder "3.8"; src = fetchPypi { inherit pname version; - sha256 = "sha256-6z4w6GMwShhdF8JUwySOR2RNvCXJ22IzQvoahmSS6Zk="; + sha256 = "sha256-iSxVnUYohCJMr4ESFhZg0vgg9balP87Hm1hDdpWsnv4="; }; propagatedBuildInputs = [ From c7b93bcdd1899399f92f9e5eb1f1fbf8b38e62c0 Mon Sep 17 00:00:00 2001 From: Tom Date: Fri, 22 Apr 2022 21:16:55 -0700 Subject: [PATCH 095/161] tailscale: 1.22.2 -> 1.24.0 --- pkgs/servers/tailscale/default.nix | 8 ++++---- pkgs/top-level/all-packages.nix | 4 +++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/pkgs/servers/tailscale/default.nix b/pkgs/servers/tailscale/default.nix index 7c7bb84e15b7..2ccabb3f21b9 100644 --- a/pkgs/servers/tailscale/default.nix +++ b/pkgs/servers/tailscale/default.nix @@ -2,20 +2,20 @@ buildGoModule rec { pname = "tailscale"; - version = "1.22.2"; + version = "1.24.0"; src = fetchFromGitHub { owner = "tailscale"; repo = "tailscale"; rev = "v${version}"; - sha256 = "sha256-W4BcUDMxUZKFXueSI/Xlml17Jabi/hnnOyXgitao76A="; + sha256 = "12dn2dkk86ni7wqpl7zaxb8n840fnvg8kcjsg1lvf9k432dqhksn"; }; nativeBuildInputs = lib.optionals stdenv.isLinux [ makeWrapper ]; CGO_ENABLED = 0; - vendorSha256 = "sha256-Bu0a9JI0rlsA4wvvX9Mj4+QH0ot6Jdl+t246HbXu6OA="; + vendorSha256 = "01hh8v3mvl7fgv4w4y78jg50b383lgxfy876lkn7wg0sgg336dc8"; doCheck = false; @@ -35,6 +35,6 @@ buildGoModule rec { homepage = "https://tailscale.com"; description = "The node agent for Tailscale, a mesh VPN built on WireGuard"; license = licenses.bsd3; - maintainers = with maintainers; [ danderson mbaillie ]; + maintainers = with maintainers; [ danderson mbaillie twitchyliquid64 ]; }; } diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index 16ee7ddc6fef..81064c2338f7 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -22463,7 +22463,9 @@ with pkgs; systemd-journal2gelf = callPackage ../tools/system/systemd-journal2gelf { }; - tailscale = callPackage ../servers/tailscale { }; + tailscale = callPackage ../servers/tailscale { + buildGoModule = buildGo118Module; + }; thanos = callPackage ../servers/monitoring/thanos { }; From ba3cb9e8f6dc0b91d9470eae5f30202d05bd0ffa Mon Sep 17 00:00:00 2001 From: Mario Rodas Date: Sat, 23 Apr 2022 04:20:00 +0000 Subject: [PATCH 096/161] calcurse: enable on darwin --- pkgs/applications/misc/calcurse/default.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkgs/applications/misc/calcurse/default.nix b/pkgs/applications/misc/calcurse/default.nix index 401a14e69a7a..740c463038c8 100644 --- a/pkgs/applications/misc/calcurse/default.nix +++ b/pkgs/applications/misc/calcurse/default.nix @@ -28,7 +28,8 @@ stdenv.mkDerivation rec { be used to filter and format appointments, making it suitable for use in scripts. ''; homepage = "https://calcurse.org/"; + changelog = "https://git.calcurse.org/calcurse.git/plain/CHANGES.md?h=v${version}"; license = licenses.bsd2; - platforms = platforms.linux; + platforms = platforms.unix; }; } From c8577c92b8fb897a1e19962b0bfc6728795bd4aa Mon Sep 17 00:00:00 2001 From: icelimetea Date: Sat, 23 Apr 2022 06:58:16 +0100 Subject: [PATCH 097/161] polymc: 1.1.1 -> 1.2.1 --- pkgs/games/polymc/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/games/polymc/default.nix b/pkgs/games/polymc/default.nix index 57d6a0c9faae..4c5c8f6b7808 100644 --- a/pkgs/games/polymc/default.nix +++ b/pkgs/games/polymc/default.nix @@ -18,18 +18,18 @@ mkDerivation rec { pname = "polymc"; - version = "1.1.1"; + version = "1.2.1"; src = fetchFromGitHub { owner = "PolyMC"; repo = "PolyMC"; rev = version; - sha256 = "sha256-virXfnjCzuR2cJoyzapIopT9B+Yi1neff2ZqfOvsmxY="; + sha256 = "sha256-pnMmmeIKAaX+z1YzzowotjaG/HKdiqcz2tJ5eGRR77I="; fetchSubmodules = true; }; nativeBuildInputs = [ cmake file makeWrapper ]; - buildInputs = [ qtbase jdk8 zlib ]; + buildInputs = [ qtbase jdk zlib ]; postPatch = '' # hardcode jdk paths From 2795e9345e473a7b5ed2851ca1249175aa4603db Mon Sep 17 00:00:00 2001 From: Michael Raskin <7c6f434c@mail.ru> Date: Sat, 23 Apr 2022 08:11:11 +0200 Subject: [PATCH 098/161] monotone: fix build by forcing C++11 --- pkgs/applications/version-management/monotone/default.nix | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkgs/applications/version-management/monotone/default.nix b/pkgs/applications/version-management/monotone/default.nix index 6ea66b296d74..34c3e611d405 100644 --- a/pkgs/applications/version-management/monotone/default.nix +++ b/pkgs/applications/version-management/monotone/default.nix @@ -39,6 +39,8 @@ stdenv.mkDerivation rec { sed -e 's@/usr/bin/less@${less}/bin/less@' -i src/unix/terminal.cc ''; + CXXFLAGS=" --std=c++11 "; + nativeBuildInputs = [ pkg-config autoreconfHook texinfo ]; buildInputs = [ boost zlib botan2 libidn lua pcre sqlite expect openssl gmp bzip2 perl ]; From d73efdae64e203d2659773cfb46839ed92f7e3cd Mon Sep 17 00:00:00 2001 From: Mario Rodas Date: Fri, 22 Apr 2022 04:20:00 +0000 Subject: [PATCH 099/161] dune_3: 3.1.0 -> 3.1.1 https://github.com/ocaml/dune/releases/tag/3.1.1 --- pkgs/development/tools/ocaml/dune/3.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/tools/ocaml/dune/3.nix b/pkgs/development/tools/ocaml/dune/3.nix index a43c88671f85..de661948b9c3 100644 --- a/pkgs/development/tools/ocaml/dune/3.nix +++ b/pkgs/development/tools/ocaml/dune/3.nix @@ -6,11 +6,11 @@ else stdenv.mkDerivation rec { pname = "dune"; - version = "3.1.0"; + version = "3.1.1"; src = fetchurl { url = "https://github.com/ocaml/dune/releases/download/${version}/fiber-${version}.tbz"; - sha256 = "sha256-B31SCwhFxW4Q7FhW18ZuvnofG+pKMCfRgvRLJSlRnYE="; + sha256 = "sha256-AkhEVKsbmYhAx4c1CexrIwHrkmYsEy749fT1abNaa2A="; }; nativeBuildInputs = [ ocaml findlib ]; From 9c1b29e66294b69a84c1cc1c6ff6d606c23d327b Mon Sep 17 00:00:00 2001 From: "Aaron L. Zeng" Date: Fri, 22 Apr 2022 13:27:12 -0400 Subject: [PATCH 100/161] ledger: Don't depend on python3 if usePython is false --- pkgs/applications/office/ledger/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/office/ledger/default.nix b/pkgs/applications/office/ledger/default.nix index 5478382cbd83..0ea9b908ada6 100644 --- a/pkgs/applications/office/ledger/default.nix +++ b/pkgs/applications/office/ledger/default.nix @@ -16,8 +16,8 @@ stdenv.mkDerivation rec { buildInputs = [ (boost.override { enablePython = usePython; python = python3; }) - gmp mpfr libedit python3 gnused - ]; + gmp mpfr libedit gnused + ] ++ lib.optional usePython python3; nativeBuildInputs = [ cmake texinfo ]; From da727b72c93b358edc9b80d66971af7fef6ab3d4 Mon Sep 17 00:00:00 2001 From: Nikolay Korotkiy Date: Wed, 20 Apr 2022 14:21:08 +0300 Subject: [PATCH 101/161] mg: fix cross-compilation --- pkgs/applications/editors/mg/default.nix | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkgs/applications/editors/mg/default.nix b/pkgs/applications/editors/mg/default.nix index c6f014238ea7..8545be13e3b9 100644 --- a/pkgs/applications/editors/mg/default.nix +++ b/pkgs/applications/editors/mg/default.nix @@ -11,6 +11,10 @@ stdenv.mkDerivation rec { sha256 = "sha256-qnb0yB/NNJV257dsLmP84brajoRG03U+Ja1ACYbBvbE="; }; + postPatch = lib.optionalString (stdenv.buildPlatform != stdenv.hostPlatform) '' + substituteInPlace configure --replace "./conftest" "echo" + ''; + enableParallelBuilding = true; makeFlags = [ "PKG_CONFIG=${buildPackages.pkg-config}/bin/${buildPackages.pkg-config.targetPrefix}pkg-config" ]; From 15427feaeb182675adaada5e8af21a24ef80d292 Mon Sep 17 00:00:00 2001 From: Nikolay Korotkiy Date: Thu, 21 Apr 2022 18:58:31 +0300 Subject: [PATCH 102/161] oksh: fix cross-compilation --- pkgs/shells/oksh/default.nix | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pkgs/shells/oksh/default.nix b/pkgs/shells/oksh/default.nix index 9ea851adbbf6..418a453d9e9a 100644 --- a/pkgs/shells/oksh/default.nix +++ b/pkgs/shells/oksh/default.nix @@ -1,4 +1,4 @@ -{ stdenv, lib, fetchFromGitHub }: +{ stdenv, lib, fetchFromGitHub, buildPackages }: stdenv.mkDerivation rec { pname = "oksh"; @@ -11,6 +11,12 @@ stdenv.mkDerivation rec { sha256 = "sha256-076nD0aPps6n5qkR3LQJ6Kn2g3mkov+/M0qSvxNLZ6o="; }; + postPatch = lib.optionalString (stdenv.buildPlatform != stdenv.hostPlatform) '' + substituteInPlace configure --replace "./conftest" "echo" + ''; + + configureFlags = [ "--no-strip" ]; + meta = with lib; { description = "Portable OpenBSD ksh, based on the Public Domain Korn Shell (pdksh)"; homepage = "https://github.com/ibara/oksh"; From 4a72aad35692b4a14981cfe9dacbfe9921a53f31 Mon Sep 17 00:00:00 2001 From: Nikolay Korotkiy Date: Thu, 21 Apr 2022 18:59:58 +0300 Subject: [PATCH 103/161] oed: fix cross-compilation --- pkgs/applications/editors/oed/default.nix | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/pkgs/applications/editors/oed/default.nix b/pkgs/applications/editors/oed/default.nix index 2281ca8c08b3..f4012220c50d 100644 --- a/pkgs/applications/editors/oed/default.nix +++ b/pkgs/applications/editors/oed/default.nix @@ -14,6 +14,15 @@ stdenv.mkDerivation rec { hash = "sha256-Z8B1RIFve3UPj+9G/WJX0BNc2ynG/qtoGfoesarYGz8="; }; + postPatch = lib.optionalString (stdenv.buildPlatform != stdenv.hostPlatform) '' + substituteInPlace configure --replace "./conftest" "echo" + ''; + + installPhase = '' + install -m755 -Dt $out/bin ed + install -m644 -Dt $out/share/man/man1 ed.1 + ''; + meta = with lib; { homepage = "https://github.com/ibara/oed"; description = "Portable ed editor from OpenBSD"; From f542643ed32d915a4322b31e1d2c33b9207f0a8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vladim=C3=ADr=20=C4=8Cun=C3=A1t?= Date: Sat, 23 Apr 2022 09:39:01 +0200 Subject: [PATCH 104/161] aws-sdk-cpp: revert to older version on i686-linux After update in PR #169281 (commit f394cc6c5) it won't build: https://hydra.nixos.org/build/174323623 It's still unreliable, as even the i686 builds that succeeded on Hydra won't build for me locally, shooting over the 32-bit memory limit. But I trust this will unblock the nixos-unstable channel on Hydra. --- pkgs/development/libraries/aws-sdk-cpp/default.nix | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/pkgs/development/libraries/aws-sdk-cpp/default.nix b/pkgs/development/libraries/aws-sdk-cpp/default.nix index 3cd846c87a13..8bf009ddacba 100644 --- a/pkgs/development/libraries/aws-sdk-cpp/default.nix +++ b/pkgs/development/libraries/aws-sdk-cpp/default.nix @@ -1,6 +1,7 @@ { lib , stdenv , fetchFromGitHub +, fetchpatch , cmake , curl , openssl @@ -30,13 +31,15 @@ in stdenv.mkDerivation rec { pname = "aws-sdk-cpp"; - version = "1.9.238"; + version = if stdenv.system == "i686-linux" then "1.9.150" + else "1.9.238"; src = fetchFromGitHub { owner = "aws"; repo = "aws-sdk-cpp"; rev = version; - sha256 = "sha256-pEmsTfZXsvJMV79dGkjDNbUVajwyoYgzE5DCsC53pGY="; + sha256 = if version == "1.9.150" then "fgLdXWQKHaCwulrw9KV3vpQ71DjnQAL4heIRW7Rk7UY=" + else "sha256-pEmsTfZXsvJMV79dGkjDNbUVajwyoYgzE5DCsC53pGY="; }; postPatch = '' @@ -109,7 +112,12 @@ stdenv.mkDerivation rec { patches = [ ./cmake-dirs.patch - ]; + ] + ++ lib.optional (lib.versionOlder version "1.9.163") + (fetchpatch { + url = "https://github.com/aws/aws-sdk-cpp/commit/b102aaf5693c4165c84b616ab9ffb9edfb705239.diff"; + sha256 = "sha256-38QBo3MEFpyHPb8jZEURRPkoeu4DqWhVeErJayiHKF0="; + }); # Builds in 2+h with 2 cores, and ~10m with a big-parallel builder. requiredSystemFeatures = [ "big-parallel" ]; From e92dc71feeb853f1f788a2bcc0d9fd7b332d213e Mon Sep 17 00:00:00 2001 From: Alexander Bich Date: Sat, 23 Apr 2022 13:28:12 +0500 Subject: [PATCH 105/161] bomutils: fix build with gcc 11 (#169845) --- pkgs/tools/archivers/bomutils/default.nix | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pkgs/tools/archivers/bomutils/default.nix b/pkgs/tools/archivers/bomutils/default.nix index c4ff957135ff..90a97af03678 100644 --- a/pkgs/tools/archivers/bomutils/default.nix +++ b/pkgs/tools/archivers/bomutils/default.nix @@ -18,6 +18,11 @@ stdenv.mkDerivation rec { "CXX=${stdenv.cc.targetPrefix}c++" ]; + # fix + # src/lsbom.cpp:70:10: error: reference to 'data' is ambiguous + # which refers to std::data from C++17 + NIX_CFLAGS_COMPILE = [ "-std=c++14" ]; + meta = with lib; { homepage = "https://github.com/hogliux/bomutils"; description = "Open source tools to create bill-of-materials files used in macOS installers"; From 149fc387d67adac4cf9fd452bfb5186a7f53f16b Mon Sep 17 00:00:00 2001 From: Michael Raskin <7c6f434c@mail.ru> Date: Sat, 23 Apr 2022 10:31:09 +0200 Subject: [PATCH 106/161] tigervnc: fix build with fresh xorgserver via upstream patch --- pkgs/tools/admin/tigervnc/default.nix | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/pkgs/tools/admin/tigervnc/default.nix b/pkgs/tools/admin/tigervnc/default.nix index 1824265b84af..f5cd6f03b963 100644 --- a/pkgs/tools/admin/tigervnc/default.nix +++ b/pkgs/tools/admin/tigervnc/default.nix @@ -1,4 +1,4 @@ -{ lib, stdenv, fetchFromGitHub +{ lib, stdenv, fetchFromGitHub, fetchpatch , xorg, xkeyboard_config, zlib , libjpeg_turbo, pixman, fltk , cmake, gettext, libtool @@ -23,11 +23,22 @@ stdenv.mkDerivation rec { }; + patches = [ + (fetchpatch { + url = "https://patch-diff.githubusercontent.com/raw/TigerVNC/tigervnc/pull/1383.patch"; + sha256 = "sha256-r3QLtxVD0wIv2NWVN9r0LVxSlLurDHgkAZfkpIjmZyU="; + name = "Xvnc-support-Xorg-1.21-PR1383.patch"; + }) + ]; + postPatch = '' sed -i -e '/^\$cmd \.= " -pn";/a$cmd .= " -xkbdir ${xkeyboard_config}/etc/X11/xkb";' unix/vncserver/vncserver.in fontPath= substituteInPlace vncviewer/vncviewer.cxx \ --replace '"/usr/bin/ssh' '"${openssh}/bin/ssh' + + cp unix/xserver21.1.1.patch unix/xserver211.patch + source_top="$(pwd)" ''; dontUseCmakeBuildDir = true; @@ -46,7 +57,7 @@ stdenv.mkDerivation rec { cp -R xorg*/* unix/xserver pushd unix/xserver version=$(echo ${xorg.xorgserver.name} | sed 's/.*-\([0-9]\+\).\([0-9]\+\).*/\1\2/g') - patch -p1 < ${src}/unix/xserver$version.patch + patch -p1 < "$source_top/unix/xserver$version.patch" autoreconf -vfi ./configure $configureFlags --disable-devel-docs --disable-docs \ --disable-xorg --disable-xnest --disable-xvfb --disable-dmx \ From e7decf0387f7e9ee0fe6dc2c7443d44db610ac92 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 11:45:36 +0200 Subject: [PATCH 107/161] python3Packages.winacl: disable on older Python releases --- pkgs/development/python-modules/winacl/default.nix | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pkgs/development/python-modules/winacl/default.nix b/pkgs/development/python-modules/winacl/default.nix index d39c5be164d7..52ae843d4260 100644 --- a/pkgs/development/python-modules/winacl/default.nix +++ b/pkgs/development/python-modules/winacl/default.nix @@ -1,15 +1,19 @@ { lib , buildPythonPackage , fetchPypi +, pythonOlder }: buildPythonPackage rec { pname = "winacl"; version = "0.1.3"; + format = "setuptools"; + + disabled = pythonOlder "3.7"; src = fetchPypi { inherit pname version; - sha256 = "sha256-G6xWep0hMACCqiJGuw+UpZH8qOIY4WO6sY3w4y7v6gY="; + hash = "sha256-G6xWep0hMACCqiJGuw+UpZH8qOIY4WO6sY3w4y7v6gY="; }; # Project doesn't have tests From 2c6765c613cf4a04ab6bc558e88f772015509bec Mon Sep 17 00:00:00 2001 From: Michael Raskin <7c6f434c@mail.ru> Date: Sat, 23 Apr 2022 11:47:11 +0200 Subject: [PATCH 108/161] webcamoid: 8.8.0 -> 9.0.0 --- pkgs/applications/video/webcamoid/default.nix | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/pkgs/applications/video/webcamoid/default.nix b/pkgs/applications/video/webcamoid/default.nix index 63f1e885af30..b9c21b7ad7d1 100644 --- a/pkgs/applications/video/webcamoid/default.nix +++ b/pkgs/applications/video/webcamoid/default.nix @@ -1,13 +1,13 @@ -{ lib, fetchFromGitHub, pkg-config, libxcb, mkDerivation, qmake +{ lib, fetchFromGitHub, pkg-config, libxcb, mkDerivation, cmake , qtbase, qtdeclarative, qtquickcontrols, qtquickcontrols2 , ffmpeg-full, gst_all_1, libpulseaudio, alsa-lib, jack2 , v4l-utils }: mkDerivation rec { pname = "webcamoid"; - version = "8.8.0"; + version = "9.0.0"; src = fetchFromGitHub { - sha256 = "0a8M9GQ6Ea9jBCyfbORVyB6HC/O6jdcIZruQZj9Aai4="; + sha256 = "sha256-NV1BmG+fgy+ZcvHl+05VX5J1BAz8PxKiZ3z9BxjhMU0="; rev = version; repo = "webcamoid"; owner = "webcamoid"; @@ -22,12 +22,7 @@ mkDerivation rec { v4l-utils ]; - nativeBuildInputs = [ pkg-config qmake ]; - - qmakeFlags = [ - "Webcamoid.pro" - "INSTALLQMLDIR=${placeholder "out"}/lib/qt/qml" - ]; + nativeBuildInputs = [ pkg-config cmake ]; meta = with lib; { description = "Webcam Capture Software"; From c72d40e573b799ab764a23093bee928cde08ae2f Mon Sep 17 00:00:00 2001 From: Michael Raskin <7c6f434c@mail.ru> Date: Sat, 23 Apr 2022 12:08:50 +0200 Subject: [PATCH 109/161] xconq: fix build by pinning C++ standard version --- pkgs/games/xconq/default.nix | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkgs/games/xconq/default.nix b/pkgs/games/xconq/default.nix index 10a959a676b4..2d28090c595c 100644 --- a/pkgs/games/xconq/default.nix +++ b/pkgs/games/xconq/default.nix @@ -19,6 +19,8 @@ stdenv.mkDerivation rec { "--with-tkconfig=${tk}/lib" ]; + CXXFLAGS = " --std=c++11 "; + hardeningDisable = [ "format" ]; patchPhase = '' From 1b6f0a2de829423a3b2c2da41ae773488decb450 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 23 Apr 2022 10:38:59 +0000 Subject: [PATCH 110/161] open-watcom-v2-unwrapped: unstable-2022-04-21 -> unstable-2022-04-23 --- pkgs/development/compilers/open-watcom/v2.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/compilers/open-watcom/v2.nix b/pkgs/development/compilers/open-watcom/v2.nix index 36eccd64dd64..ae28e424e5e8 100644 --- a/pkgs/development/compilers/open-watcom/v2.nix +++ b/pkgs/development/compilers/open-watcom/v2.nix @@ -12,14 +12,14 @@ stdenv.mkDerivation rec { pname = "open-watcom-v2"; - version = "unstable-2022-04-21"; + version = "unstable-2022-04-23"; name = "${pname}-unwrapped-${version}"; src = fetchFromGitHub { owner = "open-watcom"; repo = "open-watcom-v2"; - rev = "bec9e74cdcd048db527ccacc8894493d2ec6e12a"; - sha256 = "iJG7+OQYZCRyKO/NXkM3gJjgWRbQk26O+66QaAIJAcc="; + rev = "3351d37f44eef84fcd428b8b5537cb29a7db22a8"; + sha256 = "mSF9xFKJ5AQ+Ds84qMD8xJJ7B9AMujgksxMNzSDzLA4="; }; postPatch = '' From 379f69851f070a39ec78a521ecbbca263d75cb8b Mon Sep 17 00:00:00 2001 From: Jonas Heinrich Date: Thu, 21 Apr 2022 12:25:00 +0200 Subject: [PATCH 111/161] sonixd: 0.15.0 -> 0.15.1 --- pkgs/applications/audio/sonixd/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/applications/audio/sonixd/default.nix b/pkgs/applications/audio/sonixd/default.nix index fff5a50624e6..961fa2c8c1cd 100644 --- a/pkgs/applications/audio/sonixd/default.nix +++ b/pkgs/applications/audio/sonixd/default.nix @@ -5,11 +5,11 @@ appimageTools.wrapType2 rec { pname = "sonixd"; - version = "0.15.0"; + version = "0.15.1"; src = fetchurl { url = "https://github.com/jeffvli/sonixd/releases/download/v${version}/Sonixd-${version}-linux-x86_64.AppImage"; - sha256 = "sha256-mZdM2wPJktitSCgIyOY/GwYPixPVTnYiOBVMQN8b7XU="; + sha256 = "sha256-23WU1nwvrzyw0J+Pplm3JbsScjJxu+RhmwVoe/PjozY="; }; extraInstallCommands = '' From 24b53785cce8f81e2171c2e4042bfd4e3d0b2606 Mon Sep 17 00:00:00 2001 From: Jonas Heinrich Date: Wed, 13 Apr 2022 19:20:37 +0200 Subject: [PATCH 112/161] nixos/create_ap: add module --- .../from_md/release-notes/rl-2205.section.xml | 8 +++ .../manual/release-notes/rl-2205.section.md | 2 + nixos/modules/module-list.nix | 1 + .../modules/services/networking/create_ap.nix | 50 +++++++++++++++++++ .../linux/linux-wifi-hotspot/default.nix | 15 ++++-- 5 files changed, 71 insertions(+), 5 deletions(-) create mode 100644 nixos/modules/services/networking/create_ap.nix diff --git a/nixos/doc/manual/from_md/release-notes/rl-2205.section.xml b/nixos/doc/manual/from_md/release-notes/rl-2205.section.xml index af2aecda0da4..05b3822cab71 100644 --- a/nixos/doc/manual/from_md/release-notes/rl-2205.section.xml +++ b/nixos/doc/manual/from_md/release-notes/rl-2205.section.xml @@ -375,6 +375,14 @@ services.headscale + + + create_ap, + a module for creating wifi hotspots using the program + linux-wifi-hotspot. Available as + services.create_ap. + + blocky, diff --git a/nixos/doc/manual/release-notes/rl-2205.section.md b/nixos/doc/manual/release-notes/rl-2205.section.md index 6d22973fb9b3..16c59ce3dddb 100644 --- a/nixos/doc/manual/release-notes/rl-2205.section.md +++ b/nixos/doc/manual/release-notes/rl-2205.section.md @@ -107,6 +107,8 @@ In addition to numerous new and upgraded packages, this release has the followin - [headscale](https://github.com/juanfont/headscale), an Open Source implementation of the [Tailscale](https://tailscale.io) Control Server. Available as [services.headscale](options.html#opt-services.headscale.enable) +- [create_ap](https://github.com/lakinduakash/linux-wifi-hotspot), a module for creating wifi hotspots using the program linux-wifi-hotspot. Available as [services.create_ap](options.html#opt-services.create_ap.enable). + - [blocky](https://0xerr0r.github.io/blocky/), fast and lightweight DNS proxy as ad-blocker for local network with many features. - [pacemaker](https://clusterlabs.org/pacemaker/) cluster resource manager diff --git a/nixos/modules/module-list.nix b/nixos/modules/module-list.nix index 9aa8817ca517..132416d865f4 100644 --- a/nixos/modules/module-list.nix +++ b/nixos/modules/module-list.nix @@ -740,6 +740,7 @@ ./services/networking/coredns.nix ./services/networking/corerad.nix ./services/networking/coturn.nix + ./services/networking/create_ap.nix ./services/networking/croc.nix ./services/networking/dante.nix ./services/networking/ddclient.nix diff --git a/nixos/modules/services/networking/create_ap.nix b/nixos/modules/services/networking/create_ap.nix new file mode 100644 index 000000000000..a3c330fab007 --- /dev/null +++ b/nixos/modules/services/networking/create_ap.nix @@ -0,0 +1,50 @@ +{ config, lib, pkgs, ... }: + +with lib; + +let + cfg = config.services.create_ap; + configFile = pkgs.writeText "create_ap.conf" (generators.toKeyValue { } cfg.settings); +in { + options = { + services.create_ap = { + enable = mkEnableOption "setup wifi hotspots using create_ap"; + settings = mkOption { + type = with types; attrsOf (oneOf [ int bool str ]); + default = {}; + description = '' + Configuration for create_ap. + See upstream example configuration + for supported values. + ''; + example = { + INTERNET_IFACE = "eth0"; + WIFI_IFACE = "wlan0"; + SSID = "My Wifi Hotspot"; + PASSPHRASE = "12345678"; + }; + }; + }; + }; + + config = mkIf cfg.enable { + + systemd = { + services.create_ap = { + wantedBy = [ "multi-user.target" ]; + description = "Create AP Service"; + after = [ "network.target" ]; + restartTriggers = [ configFile ]; + serviceConfig = { + ExecStart = "${pkgs.linux-wifi-hotspot}/bin/create_ap --config ${configFile}"; + KillSignal = "SIGINT"; + Restart = "on-failure"; + }; + }; + }; + + }; + + meta.maintainers = with lib.maintainers; [ onny ]; + +} diff --git a/pkgs/os-specific/linux/linux-wifi-hotspot/default.nix b/pkgs/os-specific/linux/linux-wifi-hotspot/default.nix index 2cefc9b0216d..a29fe923f60d 100644 --- a/pkgs/os-specific/linux/linux-wifi-hotspot/default.nix +++ b/pkgs/os-specific/linux/linux-wifi-hotspot/default.nix @@ -8,7 +8,13 @@ , iw , makeWrapper , qrencode -, hostapd }: +, hostapd +, getopt +, dnsmasq +, iproute2 +, flock +, iptables +, gawk }: stdenv.mkDerivation rec { pname = "linux-wifi-hotspot"; @@ -41,9 +47,6 @@ stdenv.mkDerivation rec { --replace "etc" "$out/etc" substituteInPlace ./src/scripts/wihotspot \ --replace "/usr" "$out" - substituteInPlace ./src/scripts/create_ap.service \ - --replace "/usr/bin/create_ap" "$out/bin/create_cap" \ - --replace "/etc/create_ap.conf" "$out/etc/create_cap.conf" ''; makeFlags = [ @@ -52,7 +55,9 @@ stdenv.mkDerivation rec { postInstall = '' wrapProgram $out/bin/create_ap \ - --prefix PATH : ${lib.makeBinPath [ hostapd ]} + --prefix PATH : ${lib.makeBinPath [ + hostapd getopt iw which dnsmasq iproute2 flock iptables gawk + ]} wrapProgram $out/bin/wihotspot-gui \ --prefix PATH : ${lib.makeBinPath [ iw ]} \ From 9d5527d70688b87e85864816e42e2bfd36d0a5e8 Mon Sep 17 00:00:00 2001 From: Jonas Heinrich Date: Tue, 19 Apr 2022 15:21:57 +0200 Subject: [PATCH 113/161] opensnitch: 1.5.0 -> 1.5.1 --- pkgs/tools/networking/opensnitch/daemon.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/tools/networking/opensnitch/daemon.nix b/pkgs/tools/networking/opensnitch/daemon.nix index 25b420664199..96c509e47fbd 100644 --- a/pkgs/tools/networking/opensnitch/daemon.nix +++ b/pkgs/tools/networking/opensnitch/daemon.nix @@ -15,13 +15,13 @@ buildGoModule rec { pname = "opensnitch"; - version = "1.5.0"; + version = "1.5.1"; src = fetchFromGitHub { owner = "evilsocket"; repo = "opensnitch"; rev = "v${version}"; - sha256 = "sha256-vtD82v0VlaJtCICXduD3IxJ0xjlBuzGKLWLoCiwPX2I="; + sha256 = "sha256-8IfupmQb1romGEvv/xqFkYhp0gGoY4ZEllX6rZYIkqw="; }; patches = [ From 92e9f9ba62207297e0620b7bdc990e3fb8e1ce2f Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 13:37:43 +0200 Subject: [PATCH 114/161] python3Packages.celery: 5.2.3 -> 5.2.6 --- pkgs/development/python-modules/celery/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/celery/default.nix b/pkgs/development/python-modules/celery/default.nix index 247d25bf420a..4c7e2d8d62f5 100644 --- a/pkgs/development/python-modules/celery/default.nix +++ b/pkgs/development/python-modules/celery/default.nix @@ -24,14 +24,14 @@ buildPythonPackage rec { pname = "celery"; - version = "5.2.3"; + version = "5.2.6"; format = "setuptools"; disabled = pythonOlder "3.7"; src = fetchPypi { inherit pname version; - hash = "sha256-4s1BZnrZfU9qL0Zy0cam662hlMYZJTBYtfI3BKqtqoI="; + hash = "sha256-0TmMrfMPV2Jms0Nw4o6IAwbsVfektjB1SbCunBVmNIE="; }; propagatedBuildInputs = [ From 4e4feb0e0666c44a53c130efaf1082060104408d Mon Sep 17 00:00:00 2001 From: Armeen Mahdian Date: Fri, 22 Apr 2022 16:05:32 -0500 Subject: [PATCH 115/161] ocamlPackages.llvm: switch to python3 --- pkgs/development/ocaml-modules/llvm/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/ocaml-modules/llvm/default.nix b/pkgs/development/ocaml-modules/llvm/default.nix index 9ab3d906ab1e..c0600df64f27 100644 --- a/pkgs/development/ocaml-modules/llvm/default.nix +++ b/pkgs/development/ocaml-modules/llvm/default.nix @@ -1,4 +1,4 @@ -{ stdenv, lib, python2, cmake, libllvm, ocaml, findlib, ctypes }: +{ stdenv, lib, python3, cmake, libllvm, ocaml, findlib, ctypes }: let version = lib.getVersion libllvm; in @@ -8,7 +8,7 @@ stdenv.mkDerivation { inherit (libllvm) src; - nativeBuildInputs = [ cmake python2 ocaml findlib ]; + nativeBuildInputs = [ cmake python3 ocaml findlib ]; buildInputs = [ ctypes ]; propagatedBuildInputs = [ libllvm ]; From 3740158c42044124923f05e20dab24ff4b9def8b Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 14:10:35 +0200 Subject: [PATCH 116/161] python3Packages.flask-appbuilder: 3.4.4 -> 4.0.0 --- .../python-modules/flask-appbuilder/default.nix | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/pkgs/development/python-modules/flask-appbuilder/default.nix b/pkgs/development/python-modules/flask-appbuilder/default.nix index 0129c9877230..1d84fb88ed7c 100644 --- a/pkgs/development/python-modules/flask-appbuilder/default.nix +++ b/pkgs/development/python-modules/flask-appbuilder/default.nix @@ -27,7 +27,7 @@ buildPythonPackage rec { pname = "flask-appbuilder"; - version = "3.4.4"; + version = "4.0.0"; format = "setuptools"; disabled = pythonOlder "3.7"; @@ -35,14 +35,10 @@ buildPythonPackage rec { src = fetchPypi { pname = "Flask-AppBuilder"; inherit version; - sha256 = "sha256-uZzuvNusqMzAS/vmg3CuZ+D442J4LbFwsBboVIx/srE="; + hash = "sha256-g+iHUL83PokXPGu7HJ8ffLocQr0uGpMqS5MbfIlZZ2E="; }; - # See here: https://github.com/dpgaspar/Flask-AppBuilder/commit/7097a7b133f27c78d2b54d2a46e4a4c24478a066.patch - # https://github.com/dpgaspar/Flask-AppBuilder/pull/1610 - # The patch from the PR doesn't apply cleanly so I edited it manually. patches = [ - ./upgrade-to-flask_jwt_extended-4.patch (fetchpatch { # https://github.com/dpgaspar/Flask-AppBuilder/pull/1734 name = "flask-appbuilder-wtf3.patch"; @@ -82,16 +78,11 @@ buildPythonPackage rec { postPatch = '' substituteInPlace setup.py \ --replace "apispec[yaml]>=3.3, <4" "apispec[yaml] >=3.3" \ - --replace "Flask>=0.12, <2" "Flask" \ - --replace "Flask-Login>=0.3, <0.5" "Flask-Login >=0.3, <0.6" \ - --replace "Flask-Babel>=1, <2" "Flask-Babel >=1, <3" \ + --replace "Flask-Login>=0.3, <0.5" "Flask-Login >=0.3" \ --replace "Flask-WTF>=0.14.2, <0.15.0" "Flask-WTF" \ --replace "WTForms<3.0.0" "WTForms" \ --replace "marshmallow-sqlalchemy>=0.22.0, <0.27.0" "marshmallow-sqlalchemy" \ - --replace "Flask-JWT-Extended>=3.18, <4" "Flask-JWT-Extended>=4.1.0" \ - --replace "PyJWT>=1.7.1, <2.0.0" "PyJWT>=2.0.1" \ - --replace "prison>=0.2.1, <1.0.0" "prison" \ - --replace "SQLAlchemy<1.4.0" "SQLAlchemy" + --replace "prison>=0.2.1, <1.0.0" "prison" ''; # Majority of tests require network access or mongo From 5961157781db55f8d54d5c8f488779c90ec7cf2c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christian=20K=C3=B6gler?= Date: Sat, 16 Apr 2022 12:09:08 +0200 Subject: [PATCH 117/161] tree-sitter: Add tree-sitter-scheme --- .../tools/parsing/tree-sitter/grammars/default.nix | 1 + .../tree-sitter/grammars/tree-sitter-scheme.json | 11 +++++++++++ pkgs/development/tools/parsing/tree-sitter/update.nix | 4 ++++ 3 files changed, 16 insertions(+) create mode 100644 pkgs/development/tools/parsing/tree-sitter/grammars/tree-sitter-scheme.json diff --git a/pkgs/development/tools/parsing/tree-sitter/grammars/default.nix b/pkgs/development/tools/parsing/tree-sitter/grammars/default.nix index b7ae694c9181..c5820018bb6b 100644 --- a/pkgs/development/tools/parsing/tree-sitter/grammars/default.nix +++ b/pkgs/development/tools/parsing/tree-sitter/grammars/default.nix @@ -70,6 +70,7 @@ tree-sitter-ruby = lib.importJSON ./tree-sitter-ruby.json; tree-sitter-rust = lib.importJSON ./tree-sitter-rust.json; tree-sitter-scala = lib.importJSON ./tree-sitter-scala.json; + tree-sitter-scheme = lib.importJSON ./tree-sitter-scheme.json; tree-sitter-scss = lib.importJSON ./tree-sitter-scss.json; tree-sitter-sparql = lib.importJSON ./tree-sitter-sparql.json; tree-sitter-supercollider = lib.importJSON ./tree-sitter-supercollider.json; diff --git a/pkgs/development/tools/parsing/tree-sitter/grammars/tree-sitter-scheme.json b/pkgs/development/tools/parsing/tree-sitter/grammars/tree-sitter-scheme.json new file mode 100644 index 000000000000..d15067e35478 --- /dev/null +++ b/pkgs/development/tools/parsing/tree-sitter/grammars/tree-sitter-scheme.json @@ -0,0 +1,11 @@ +{ + "url": "https://github.com/6cdh/tree-sitter-scheme", + "rev": "a1d2233f4e5498bb305858323c93525e1dd165c0", + "date": "2022-04-07T21:19:08+08:00", + "path": "/nix/store/ljq285gnl26w8jg1ahdhxk2xa4l2s3vn-tree-sitter-scheme", + "sha256": "0ggh2jpgnnakhsry12m31f5ranjhgkkcxp5jhsqxw6ac778n28h9", + "fetchLFS": false, + "fetchSubmodules": false, + "deepClone": false, + "leaveDotGit": false +} diff --git a/pkgs/development/tools/parsing/tree-sitter/update.nix b/pkgs/development/tools/parsing/tree-sitter/update.nix index 961a5e96e0b5..35fa30779d3c 100644 --- a/pkgs/development/tools/parsing/tree-sitter/update.nix +++ b/pkgs/development/tools/parsing/tree-sitter/update.nix @@ -312,6 +312,10 @@ let orga = "MichaHoffmann"; repo = "tree-sitter-hcl"; }; + "tree-sitter-scheme" = { + orga = "6cdh"; + repo = "tree-sitter-scheme"; + }; }; allGrammars = From 82cd2cc2de707a9c77b4ad4aff6229cb49f55f38 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 23 Apr 2022 13:10:00 +0000 Subject: [PATCH 118/161] python310Packages.apprise: 0.9.8 -> 0.9.8.1 --- pkgs/development/python-modules/apprise/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/apprise/default.nix b/pkgs/development/python-modules/apprise/default.nix index ed54595ff435..bf8d88ced32e 100644 --- a/pkgs/development/python-modules/apprise/default.nix +++ b/pkgs/development/python-modules/apprise/default.nix @@ -20,14 +20,14 @@ buildPythonPackage rec { pname = "apprise"; - version = "0.9.8"; + version = "0.9.8.1"; format = "setuptools"; disabled = pythonOlder "3.7"; src = fetchPypi { inherit pname version; - hash = "sha256-PK1WxfJHWHbe/l+/6woBA2Gik+rKF5Uiuf35r4KNzEM="; + hash = "sha256-eLn/ARDFLwmFfe3WEL4J9mBS6OwSklDC26doTx5X8JU="; }; nativeBuildInputs = [ From a7f327c6ea2ae72881a7b7b473dd61f9d587f2b3 Mon Sep 17 00:00:00 2001 From: Mario Rodas Date: Sat, 23 Apr 2022 13:11:00 +0000 Subject: [PATCH 119/161] python310Packages.apprise: 0.9.8.1 -> 0.9.8.2 https://github.com/caronc/apprise/releases/tag/v0.9.8.2 --- pkgs/development/python-modules/apprise/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/apprise/default.nix b/pkgs/development/python-modules/apprise/default.nix index bf8d88ced32e..8be62dfd0da1 100644 --- a/pkgs/development/python-modules/apprise/default.nix +++ b/pkgs/development/python-modules/apprise/default.nix @@ -20,14 +20,14 @@ buildPythonPackage rec { pname = "apprise"; - version = "0.9.8.1"; + version = "0.9.8.2"; format = "setuptools"; disabled = pythonOlder "3.7"; src = fetchPypi { inherit pname version; - hash = "sha256-eLn/ARDFLwmFfe3WEL4J9mBS6OwSklDC26doTx5X8JU="; + hash = "sha256-EDKa77sU09HOBp4NVsHNwp6S4UbHyqX8T8rFGOnV8kA="; }; nativeBuildInputs = [ From bcacb9b84cc0dd65c177fdc0e37186ae2c9c6286 Mon Sep 17 00:00:00 2001 From: Florian Brandes Date: Sat, 23 Apr 2022 15:14:45 +0200 Subject: [PATCH 120/161] ipmiview: 2.19.0 -> 2.20.0 Signed-off-by: Florian Brandes --- pkgs/applications/misc/ipmiview/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/applications/misc/ipmiview/default.nix b/pkgs/applications/misc/ipmiview/default.nix index 4c01e52aa5db..a25806533ef8 100644 --- a/pkgs/applications/misc/ipmiview/default.nix +++ b/pkgs/applications/misc/ipmiview/default.nix @@ -13,12 +13,12 @@ stdenv.mkDerivation rec { pname = "IPMIView"; - version = "2.19.0"; - buildVersion = "210401"; + version = "2.20.0"; + buildVersion = "220309"; src = fetchurl { url = "https://www.supermicro.com/wftp/utility/IPMIView/Linux/IPMIView_${version}_build.${buildVersion}_bundleJRE_Linux_x64.tar.gz"; - sha256 = "sha256-6hxOu/Wkcrp9MaMYlxOR2DZW21Wi3BIFZp3Vm8NRBWs="; + hash = "sha256-qtklBMuK0jb9Ye0IkYM2WYFRMAfZg9tk08a1JQ64cjA="; }; nativeBuildInputs = [ patchelf makeWrapper ]; From c9de099fa1b00bca89183a9e3347c2629f1dcf72 Mon Sep 17 00:00:00 2001 From: Thiago Kenji Okada Date: Sat, 23 Apr 2022 14:46:40 +0100 Subject: [PATCH 121/161] lvm2: move fix-blkdeactivate.patch to version 2.03.15+ only --- pkgs/os-specific/linux/lvm2/common.nix | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkgs/os-specific/linux/lvm2/common.nix b/pkgs/os-specific/linux/lvm2/common.nix index 53cab49f1a63..4cb86bf3d8b9 100644 --- a/pkgs/os-specific/linux/lvm2/common.nix +++ b/pkgs/os-specific/linux/lvm2/common.nix @@ -96,8 +96,9 @@ stdenv.mkDerivation rec { sed -i 's|^#define LVM_CONFIGURE_LINE.*$|#define LVM_CONFIGURE_LINE ""|g' ./include/configure.h ''; - patches = [ + patches = lib.optionals (lib.versionAtLeast version "2.03.15") [ # fixes paths to and checks for tools + # TODO: needs backport to LVM 2.02 used by static/musl (substituteAll (let optionalTool = cond: pkg: if cond then pkg else "/run/current-system/sw"; in { From 9a3454a75bcb3e7bedac8ee764d1340157b7b295 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 23 Apr 2022 14:32:17 +0000 Subject: [PATCH 122/161] python310Packages.pyskyqremote: 0.3.5 -> 0.3.6 --- pkgs/development/python-modules/pyskyqremote/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/python-modules/pyskyqremote/default.nix b/pkgs/development/python-modules/pyskyqremote/default.nix index e72e38b30cba..e7e52991f6e8 100644 --- a/pkgs/development/python-modules/pyskyqremote/default.nix +++ b/pkgs/development/python-modules/pyskyqremote/default.nix @@ -9,7 +9,7 @@ buildPythonPackage rec { pname = "pyskyqremote"; - version = "0.3.5"; + version = "0.3.6"; format = "setuptools"; disabled = pythonOlder "3.7"; @@ -17,8 +17,8 @@ buildPythonPackage rec { src = fetchFromGitHub { owner = "RogerSelwyn"; repo = "skyq_remote"; - rev = version; - sha256 = "sha256-/BhNoU1dnZj07ZvG126srSb6eW00n8htFuDttq006QE="; + rev = "refs/tags/${version}"; + sha256 = "sha256-CCbLRb8eoMuYH3it2onfiUzHLW6sirePR/lqATpp4K0="; }; propagatedBuildInputs = [ From cad35edf30b1955c534567ea08d2f5cd4e7a9c27 Mon Sep 17 00:00:00 2001 From: Giulio De Pasquale Date: Fri, 15 Apr 2022 00:31:58 +0200 Subject: [PATCH 123/161] llvmPackage: Only force GCC stdenv for native x86_32 Per ee35de1861fa37c968619b3868a6a0e5b8caa949 the condition that was there before was not quite right, to avoid infinite recurion. This one is probably also not quite right, but good enough for now. Co-Authored-By: John Ericson --- pkgs/top-level/all-packages.nix | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix index 98e3c19b45ec..415b66ae5144 100644 --- a/pkgs/top-level/all-packages.nix +++ b/pkgs/top-level/all-packages.nix @@ -13268,7 +13268,7 @@ with pkgs; inherit (stdenvAdapters) overrideCC; buildLlvmTools = buildPackages.llvmPackages_11.tools; targetLlvmLibraries = targetPackages.llvmPackages_11.libraries or llvmPackages_11.libraries; - } // lib.optionalAttrs (stdenv.hostPlatform.isi686 && buildPackages.stdenv.cc.isGNU) { + } // lib.optionalAttrs (stdenv.hostPlatform.isi686 && stdenv.hostPlatform == stdenv.buildPlatform && buildPackages.stdenv.cc.isGNU) { stdenv = gcc7Stdenv; })); @@ -13276,7 +13276,7 @@ with pkgs; inherit (stdenvAdapters) overrideCC; buildLlvmTools = buildPackages.llvmPackages_12.tools; targetLlvmLibraries = targetPackages.llvmPackages_12.libraries or llvmPackages_12.libraries; - } // lib.optionalAttrs (stdenv.hostPlatform.isi686 && buildPackages.stdenv.cc.isGNU) { + } // lib.optionalAttrs (stdenv.hostPlatform.isi686 && stdenv.hostPlatform == stdenv.buildPlatform && buildPackages.stdenv.cc.isGNU) { stdenv = gcc7Stdenv; })); @@ -13284,7 +13284,7 @@ with pkgs; inherit (stdenvAdapters) overrideCC; buildLlvmTools = buildPackages.llvmPackages_13.tools; targetLlvmLibraries = targetPackages.llvmPackages_13.libraries or llvmPackages_13.libraries; - } // lib.optionalAttrs (stdenv.hostPlatform.isi686 && buildPackages.stdenv.cc.isGNU) { + } // lib.optionalAttrs (stdenv.hostPlatform.isi686 && stdenv.hostPlatform == stdenv.buildPlatform && buildPackages.stdenv.cc.isGNU) { stdenv = gcc7Stdenv; })); @@ -13292,7 +13292,7 @@ with pkgs; inherit (stdenvAdapters) overrideCC; buildLlvmTools = buildPackages.llvmPackages_14.tools; targetLlvmLibraries = targetPackages.llvmPackages_14.libraries or llvmPackages_14.libraries; - } // lib.optionalAttrs (stdenv.hostPlatform.isi686 && buildPackages.stdenv.cc.isGNU) { + } // lib.optionalAttrs (stdenv.hostPlatform.isi686 && stdenv.hostPlatform == stdenv.buildPlatform && buildPackages.stdenv.cc.isGNU) { stdenv = gcc7Stdenv; })); From 5c37b6361592f52d067a5a507e6fcbdfb2e28078 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 16:59:19 +0200 Subject: [PATCH 124/161] python3Packages.slither-analyzer: 0.8.2 -> 0.8.3 --- .../python-modules/slither-analyzer/default.nix | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/pkgs/development/python-modules/slither-analyzer/default.nix b/pkgs/development/python-modules/slither-analyzer/default.nix index 8eaf442fb5de..d3295cbb1f8b 100644 --- a/pkgs/development/python-modules/slither-analyzer/default.nix +++ b/pkgs/development/python-modules/slither-analyzer/default.nix @@ -1,7 +1,7 @@ { lib , stdenv , buildPythonPackage -, fetchPypi +, fetchFromGitHub , makeWrapper , pythonOlder , crytic-compile @@ -13,12 +13,16 @@ buildPythonPackage rec { pname = "slither-analyzer"; - version = "0.8.2"; - disabled = pythonOlder "3.6"; + version = "0.8.3"; + format = "setuptools"; - src = fetchPypi { - inherit pname version; - sha256 = "sha256-77045eB7KvHBb0j61qz4zJTtEprg4/aH6MrPlQY1wiM="; + disabled = pythonOlder "3.7"; + + src = fetchFromGitHub { + owner = "crytic"; + repo = "slither"; + rev = version; + sha256 = "sha256-Kh5owlkRB9hDlfIRiS+aNFe4YtZj38CLeE3Fe+R7diM="; }; nativeBuildInputs = [ From d4268e79530b7e4b228216fefe233586edfcb575 Mon Sep 17 00:00:00 2001 From: Ryan Horiguchi Date: Sat, 23 Apr 2022 17:53:31 +0200 Subject: [PATCH 125/161] gnomeExtensions: update-extensions.py remove debug code --- pkgs/desktops/gnome/extensions/update-extensions.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/pkgs/desktops/gnome/extensions/update-extensions.py b/pkgs/desktops/gnome/extensions/update-extensions.py index f6e831c7b536..a2f42be06258 100755 --- a/pkgs/desktops/gnome/extensions/update-extensions.py +++ b/pkgs/desktops/gnome/extensions/update-extensions.py @@ -51,14 +51,7 @@ def fetch_extension_data(uuid: str, version: str) -> Tuple[str, str]: if url == 'https://extensions.gnome.org/extension-data/VitalsCoreCoding.com.v53.shell-extension.zip': url = 'https://extensions.gnome.org/extension-data/VitalsCoreCoding.com.v53.shell-extension_v1BI2FB.zip' - # Yes, we download that file two times: - - # The first time is for the maintainer, so they may have a personal backup to fix potential issues - # subprocess.run( - # ["wget", url], capture_output=True, text=True - # ) - - # The second time, we add the file to store + # Download extension and add the zip content to nix-store process = subprocess.run( ["nix-prefetch-url", "--unpack", "--print-path", url], capture_output=True, text=True ) From 40696c3e6f84d8ad6ed265541ef826c245fd5171 Mon Sep 17 00:00:00 2001 From: K900 Date: Fri, 22 Apr 2022 08:54:53 +0300 Subject: [PATCH 126/161] openconnect: re-enable p11kit --- pkgs/tools/networking/openconnect/common.nix | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pkgs/tools/networking/openconnect/common.nix b/pkgs/tools/networking/openconnect/common.nix index 63a0ecc93eb2..555fd43035e6 100644 --- a/pkgs/tools/networking/openconnect/common.nix +++ b/pkgs/tools/networking/openconnect/common.nix @@ -6,6 +6,7 @@ , stdenv , pkg-config , gnutls +, p11-kit , openssl , useOpenSSL ? false , gmp @@ -30,7 +31,8 @@ stdenv.mkDerivation rec { ]; buildInputs = [ gmp libxml2 stoken zlib (if useOpenSSL then openssl else gnutls) ] - ++ lib.optional stdenv.isDarwin PCSC; + ++ lib.optional stdenv.isDarwin PCSC + ++ lib.optional stdenv.isLinux p11-kit; nativeBuildInputs = [ pkg-config autoreconfHook ]; meta = with lib; { From a2054b6d2c7254ee34daef3f756a4a95a5689b77 Mon Sep 17 00:00:00 2001 From: Alyssa Ross Date: Wed, 3 Nov 2021 20:29:40 +0000 Subject: [PATCH 127/161] vpnc: switch to maintained fork This is the same fork now used by Alpine, Arch, Debian, Gentoo, and SliTaz. vpnc-script is no longer bundled, but we already have it packaged anyway so that's okay. --- pkgs/tools/networking/vpnc/default.nix | 57 +++++-------------- .../vpnc/no_default_route_when_netmask.patch | 12 ---- 2 files changed, 14 insertions(+), 55 deletions(-) delete mode 100644 pkgs/tools/networking/vpnc/no_default_route_when_netmask.patch diff --git a/pkgs/tools/networking/vpnc/default.nix b/pkgs/tools/networking/vpnc/default.nix index 31a4262d8a64..ed61c5ade911 100644 --- a/pkgs/tools/networking/vpnc/default.nix +++ b/pkgs/tools/networking/vpnc/default.nix @@ -1,29 +1,21 @@ -{ lib, stdenv, fetchsvn -, makeWrapper, pkg-config -, gawk, gnutls, libgcrypt, nettools, openresolv, perl +{ lib, stdenv, fetchFromGitHub, fetchpatch +, makeWrapper, pkg-config, perl +, gawk, gnutls, libgcrypt, openresolv, vpnc-scripts , opensslSupport ? false, openssl # Distributing this is a GPL violation. }: stdenv.mkDerivation { pname = "vpnc"; - version = "0.5.3-post-r550"; - src = fetchsvn { - url = "https://svn.unix-ag.uni-kl.de/vpnc"; - rev = "550"; - sha256 = "0x4ckfv9lpykwmh28v1kyzz91y1j2v48fi8q5nsawrba4q0wlrls"; + version = "unstable-2021-11-04"; + + src = fetchFromGitHub { + owner = "streambinder"; + repo = "vpnc"; + rev = "c8bb5371b881f8853f191c495e762f834c9def5d"; + sha256 = "1j1p83nfc2fpwczjcggsby0b44hk97ky0s6vns6md3awlbpgdn57"; + fetchSubmodules = true; }; - postUnpack = '' - mv $sourceRoot/trunk/* $sourceRoot/. - rm -r $sourceRoot/{trunk,branches,tags} - ''; - - patches = [ ./no_default_route_when_netmask.patch ]; - - # The `etc/vpnc/vpnc-script' script relies on `which' and on - # `ifconfig' as found in net-tools (not GNU Inetutils). - propagatedBuildInputs = [ nettools ]; - nativeBuildInputs = [ makeWrapper ] ++ lib.optional (!opensslSupport) pkg-config; buildInputs = [ libgcrypt perl ] @@ -32,36 +24,15 @@ stdenv.mkDerivation { makeFlags = [ "PREFIX=$(out)" "ETCDIR=$(out)/etc/vpnc" - "SCRIPT_PATH=$(out)/etc/vpnc/vpnc-script" + "SCRIPT_PATH=${vpnc-scripts}/bin/vpnc-script" ] ++ lib.optional opensslSupport "OPENSSL_GPL_VIOLATION=yes"; postPatch = '' - patchShebangs makeman.pl - ''; - - preConfigure = '' - substituteInPlace "vpnc-script" \ - --replace "which" "type -P" \ - --replace "awk" "${gawk}/bin/awk" \ - --replace "/sbin/resolvconf" "${openresolv}/bin/resolvconf" - - substituteInPlace "config.c" \ - --replace "/etc/vpnc/vpnc-script" "$out/etc/vpnc/vpnc-script" - ''; - - postInstall = '' - for i in "$out/{bin,sbin}/"* - do - wrapProgram $i --prefix PATH : \ - "${nettools}/bin:${nettools}/sbin" - done - - mkdir -p $out/share/doc/vpnc - cp README nortel.txt ChangeLog $out/share/doc/vpnc/ + patchShebangs src/makeman.pl ''; meta = with lib; { - homepage = "https://www.unix-ag.uni-kl.de/~massar/vpnc/"; + homepage = "https://davidepucci.it/doc/vpnc/"; description = "Virtual private network (VPN) client for Cisco's VPN concentrators"; license = if opensslSupport then licenses.unfree else licenses.gpl2Plus; platforms = platforms.linux; diff --git a/pkgs/tools/networking/vpnc/no_default_route_when_netmask.patch b/pkgs/tools/networking/vpnc/no_default_route_when_netmask.patch deleted file mode 100644 index fa12abe9b776..000000000000 --- a/pkgs/tools/networking/vpnc/no_default_route_when_netmask.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -uNr a/vpnc-script b/vpnc-script ---- a/vpnc-script 2015-09-06 13:19:11.408661526 +0200 -+++ b/vpnc-script 2015-09-06 14:47:40.260871556 +0200 -@@ -647,7 +647,7 @@ - echo "$i" | grep : >/dev/null || \ - set_network_route "$i" "255.255.255.255" "32" - done -- elif [ -n "$INTERNAL_IP4_ADDRESS" ]; then -+ elif [ -n "$INTERNAL_IP4_ADDRESS" -a -z "$INTERNAL_IP4_NETMASK" ]; then - set_default_route - fi - if [ -n "$CISCO_IPV6_SPLIT_INC" ]; then From 9b2a6976a6e3d14eacd62f481042f3cc2505752a Mon Sep 17 00:00:00 2001 From: happysalada Date: Sat, 23 Apr 2022 12:00:18 -0400 Subject: [PATCH 128/161] vector: 0.21.0 -> 0.21.1 --- pkgs/tools/misc/vector/default.nix | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/pkgs/tools/misc/vector/default.nix b/pkgs/tools/misc/vector/default.nix index 41279b1434db..ec9958ec5458 100644 --- a/pkgs/tools/misc/vector/default.nix +++ b/pkgs/tools/misc/vector/default.nix @@ -30,7 +30,7 @@ let pname = "vector"; - version = "0.21.0"; + version = "0.21.1"; in rustPlatform.buildRustPackage { inherit pname version; @@ -39,10 +39,10 @@ rustPlatform.buildRustPackage { owner = "timberio"; repo = pname; rev = "v${version}"; - sha256 = "sha256-ZhOtrv63ZhG3OEWLTk+VdZr6Y6f9KvyCJvAKWck7B7o="; + sha256 = "sha256-eskm+H0D+SB3PB76T6Z+iL5jjyy51lOXy88QXsn/Azs="; }; - cargoSha256 = "sha256-VGB+ljojXGrQmcN0AT90hFk9h5nwjDTtzGpWmItw9x4="; + cargoSha256 = "sha256-fN6o8Fcqdhs5c3RID+ok1Xo5g6nF9m3f8EWIJ47dn/k="; nativeBuildInputs = [ pkg-config cmake perl ]; buildInputs = [ oniguruma openssl protobuf rdkafka zstd ] ++ lib.optionals stdenv.isDarwin [ Security libiconv coreutils CoreServices ]; @@ -62,9 +62,8 @@ rustPlatform.buildRustPackage { buildFeatures = features; # TODO investigate compilation failure for tests - # dev dependency includes httpmock which depends on iashc which depends on curl-sys with http2 feature enabled - # compilation fails because of a missing http2 include - doCheck = !stdenv.isDarwin; + # there are about 100 tests failing (out of 1100) for version 0.21.1 + doCheck = false; checkFlags = [ # tries to make a network access From 1bb806a081208bd0893943385576561a49e3c6da Mon Sep 17 00:00:00 2001 From: ivanbrennan Date: Sat, 23 Apr 2022 13:46:04 -0400 Subject: [PATCH 129/161] rover: 0.4.8 -> 0.5.1 Note that the librusty_v8 dependency was removed in https://github.com/apollographql/rover/commit/3e4781d3c476b743aa42a0cdcb802e1a05abe13b --- pkgs/development/tools/rover/default.nix | 12 ++--- pkgs/development/tools/rover/librusty_v8.nix | 17 ------- pkgs/development/tools/rover/schema/etag.id | 2 +- .../tools/rover/schema/schema.graphql | 44 +++++++++---------- pkgs/development/tools/rover/update.sh | 21 --------- 5 files changed, 26 insertions(+), 70 deletions(-) delete mode 100644 pkgs/development/tools/rover/librusty_v8.nix diff --git a/pkgs/development/tools/rover/default.nix b/pkgs/development/tools/rover/default.nix index 7232089ee997..5144ecb38e88 100644 --- a/pkgs/development/tools/rover/default.nix +++ b/pkgs/development/tools/rover/default.nix @@ -3,26 +3,20 @@ , fetchFromGitHub , perl , rustPlatform -, librusty_v8 ? callPackage ./librusty_v8.nix { } }: rustPlatform.buildRustPackage rec { pname = "rover"; - version = "0.4.8"; + version = "0.5.1"; src = fetchFromGitHub { owner = "apollographql"; repo = pname; rev = "v${version}"; - sha256 = "sha256-9o2bGa9vxN7EprKgsy9TI7AFmwjo1OT1pDyiLierTq0="; + sha256 = "sha256-wBHMND/xpm9o7pkWMUj9lEtEkzy3mX+E4Dt7qDn6auY="; }; - cargoSha256 = "sha256-4oNuyZ1xNK2jP9QFEcthCjEQRyvFykd5N0j5KCXrzVY="; - - # The v8 package will try to download a `librusty_v8.a` release at build time - # to our read-only filesystem. To avoid this we pre-download the file and - # export it via RUSTY_V8_ARCHIVE - RUSTY_V8_ARCHIVE = librusty_v8; + cargoSha256 = "sha256-n0R2MdAYGsOsYt4x1N1KdGvBZYTALyhSzCGW29bnFU4="; nativeBuildInputs = [ perl diff --git a/pkgs/development/tools/rover/librusty_v8.nix b/pkgs/development/tools/rover/librusty_v8.nix deleted file mode 100644 index 3dcb1f95acdd..000000000000 --- a/pkgs/development/tools/rover/librusty_v8.nix +++ /dev/null @@ -1,17 +0,0 @@ -{ rust, stdenv, fetchurl }: - -let - arch = rust.toRustTarget stdenv.hostPlatform; - fetch_librusty_v8 = args: fetchurl { - name = "librusty_v8-${args.version}"; - url = "https://github.com/denoland/rusty_v8/releases/download/v${args.version}/librusty_v8_release_${arch}.a"; - sha256 = args.shas.${stdenv.hostPlatform.system}; - meta = { inherit (args) version; }; - }; -in -fetch_librusty_v8 { - version = "0.38.1"; - shas = { - x86_64-linux = "sha256-vRkb5ZrIOYSKa84UbsJD+Oua0wve7f1Yf3kMg/kkYSY="; - }; -} diff --git a/pkgs/development/tools/rover/schema/etag.id b/pkgs/development/tools/rover/schema/etag.id index 369872c3115e..a8b9f0cece3e 100644 --- a/pkgs/development/tools/rover/schema/etag.id +++ b/pkgs/development/tools/rover/schema/etag.id @@ -1 +1 @@ -1bdcf8916afc3cea660add457724dddd70154904f4e360e15da27fa7d5c43cd0 +2694c7b893d44c9ad8f5d7161116deb9985a6bd05e8e0cdcd7379947430e6f89 diff --git a/pkgs/development/tools/rover/schema/schema.graphql b/pkgs/development/tools/rover/schema/schema.graphql index 58198f7e181d..b20b21a91e41 100644 --- a/pkgs/development/tools/rover/schema/schema.graphql +++ b/pkgs/development/tools/rover/schema/schema.graphql @@ -7,7 +7,7 @@ argument is used for bandwidth reduction, and should be the size of the image as application. Apollo's media server will downscale larger images to at least the requested size, but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String billingInfo:BillingInfo companyUrl:String currentBillingMonth:BillingMonth currentPlan:BillingPlan!currentPlanV2:BillingPlanV2!currentSubscription:BillingSubscription currentSubscriptionV2:BillingSubscriptionV2 experimentalFeatures:AccountExperimentalFeatures!expiredTrialSubscription:BillingSubscription expiredTrialSubscriptionV2:BillingSubscriptionV2 graphIDAvailable(id:ID!):Boolean!hasBeenOnTrial:Boolean!hasBeenOnTrialV2:Boolean!"""Globally unique identifier, which isn't guaranteed stable (can be changed by administrators).""" id:ID!"""Internal immutable identifier for the account. Only visible to Apollo admins (because it really shouldn't be used in normal client apps).""" internalID:ID!invitations(includeAccepted:Boolean!=false):[AccountInvitation!]invoices:[Invoice!]invoicesV2:[InvoiceV2!]!isOnExpiredTrial:Boolean!isOnTrial:Boolean!legacyIsOnTrial:Boolean!memberships:[AccountMembership!]"""Name of the organization, which can change over time and isn't unique.""" name:String!provisionedAt:Timestamp recurlyEmail:String """Returns a different registry related stats pertaining to this account.""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow requests(from:Timestamp!to:Timestamp!):Long requestsInCurrentBillingPeriod:Long roles:AccountRoles """How many seats would be included in your next bill, as best estimated today""" seatCountForNextBill:Int seats:Seats secondaryIDs:[ID!]!"""Graphs belonging to this organization.""" services(includeDeleted:Boolean):[Service!]!"""If non-null, this organization tracks its members through an upstream, eg PingOne; -invitations are not possible on SSO-synchronized account.""" sso:OrganizationSSO state:AccountState """A list of reusable invitations for the organization.""" staticInvitations:[OrganizationInviteLink!]stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):AccountStatsWindow!@deprecated(reason:"use Account.statsWindow instead")statsWindow(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):AccountStatsWindow subscriptions:[BillingSubscription!]subscriptionsV2:[BillingSubscriptionV2!]!"""Gets a ticket for this org, by id""" ticket(id:ID!):ZendeskTicket """List of Zendesk tickets submitted for this org""" tickets:[ZendeskTicket!]}"""Columns of AccountBillingUsageStats.""" enum AccountBillingUsageStatsColumn{OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountBillingUsageStatsDimensions{operationCountProvidedExplicitly:String schemaTag:String serviceId:ID}"""Filter for data in AccountBillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountBillingUsageStatsFilter{and:[AccountBillingUsageStatsFilter!]in:AccountBillingUsageStatsFilterIn not:AccountBillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[AccountBillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountBillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountBillingUsageStatsFilterIn{"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountBillingUsageStatsMetrics{operationCount:Long!}input AccountBillingUsageStatsOrderBySpec{column:AccountBillingUsageStatsColumn!direction:Ordering!}type AccountBillingUsageStatsRecord{"""Dimensions of AccountBillingUsageStats that can be grouped by.""" groupBy:AccountBillingUsageStatsDimensions!"""Metrics of AccountBillingUsageStats that can be aggregated over.""" metrics:AccountBillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountChecksStatsMetrics{totalFailedChecks:Long!totalSuccessfulChecks:Long!}type AccountChecksStatsRecord{id:ID!metrics:AccountChecksStatsMetrics!timestamp:Timestamp!}"""Columns of AccountEdgeServerInfos.""" enum AccountEdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID SERVICE_ID TIMESTAMP USER_VERSION}type AccountEdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID serviceId:ID userVersion:String}"""Filter for data in AccountEdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountEdgeServerInfosFilter{and:[AccountEdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:AccountEdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:AccountEdgeServerInfosFilter or:[AccountEdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in AccountEdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountEdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input AccountEdgeServerInfosOrderBySpec{column:AccountEdgeServerInfosColumn!direction:Ordering!}type AccountEdgeServerInfosRecord{"""Dimensions of AccountEdgeServerInfos that can be grouped by.""" groupBy:AccountEdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountErrorStats.""" enum AccountErrorStatsColumn{CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountErrorStatsDimensions{clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountErrorStatsFilter{and:[AccountErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:AccountErrorStatsFilterIn not:AccountErrorStatsFilter or:[AccountErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountErrorStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input AccountErrorStatsOrderBySpec{column:AccountErrorStatsColumn!direction:Ordering!}type AccountErrorStatsRecord{"""Dimensions of AccountErrorStats that can be grouped by.""" groupBy:AccountErrorStatsDimensions!"""Metrics of AccountErrorStats that can be aggregated over.""" metrics:AccountErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountExperimentalFeatures{auditLogs:Boolean!championDashboard:Boolean!derivedLaunches:Boolean!federation2Preview:Boolean!preRequestPreview:Boolean!publicVariants:Boolean!variantHomepage:Boolean!webhooksPreview:Boolean!}"""Columns of AccountFieldLatencies.""" enum AccountFieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldLatenciesFilter{and:[AccountFieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldLatenciesFilterIn not:AccountFieldLatenciesFilter or:[AccountFieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input AccountFieldLatenciesOrderBySpec{column:AccountFieldLatenciesColumn!direction:Ordering!}type AccountFieldLatenciesRecord{"""Dimensions of AccountFieldLatencies that can be grouped by.""" groupBy:AccountFieldLatenciesDimensions!"""Metrics of AccountFieldLatencies that can be aggregated over.""" metrics:AccountFieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountFieldUsage.""" enum AccountFieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldUsageFilter{and:[AccountFieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldUsageFilterIn not:AccountFieldUsageFilter or:[AccountFieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input AccountFieldUsageOrderBySpec{column:AccountFieldUsageColumn!direction:Ordering!}type AccountFieldUsageRecord{"""Dimensions of AccountFieldUsage that can be grouped by.""" groupBy:AccountFieldUsageDimensions!"""Metrics of AccountFieldUsage that can be aggregated over.""" metrics:AccountFieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountInvitation{"""An accepted invitation cannot be used anymore""" acceptedAt:Timestamp """Who accepted the invitation""" acceptedBy:User """Time the invitation was created""" createdAt:Timestamp!"""Who created the invitation""" createdBy:User email:String!id:ID!"""Last time we sent an email for the invitation""" lastSentAt:Timestamp """Access role for the invitee""" role:UserPermission!}type AccountMembership{account:Account!createdAt:Timestamp!"""If this membership is a free seat (based on role)""" free:Boolean permission:UserPermission!user:User!}type AccountMutation{auditExport(id:String!):AuditLogExportMutation """Cancel a pending change from an annual team subscription to a monthly team subscription when the current period expires.""" cancelConvertAnnualTeamSubscriptionToMonthlyAtNextPeriod:Account """Cancel account subscriptions, subscriptions will remain active until the end of the paid period""" cancelSubscriptions:Account """Changes an annual team subscription to a monthly team subscription when the current period expires.""" convertAnnualTeamSubscriptionToMonthlyAtNextPeriod:Account """Changes a monthly team subscription to an annual team subscription.""" convertMonthlyTeamSubscriptionToAnnual:Account createStaticInvitation(role:UserPermission!):OrganizationInviteLink """Delete the account's avatar. Requires Account.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Acknowledge that a trial has expired and return to community""" dismissExpiredTrial:Account """Apollo admins only: extend an ongoing trial""" extendTrial(to:Timestamp!):Account """Hard delete an account and all associated services""" hardDelete:Void """Send an invitation to join the account by E-mail""" invite(email:String!role:UserPermission):AccountInvitation """Reactivate a canceled current subscription""" reactivateCurrentSubscription:Account """Refresh billing information from third-party billing service""" refreshBilling:Void """Delete an invitation""" removeInvitation(id:ID):Void """Remove a member of the account""" removeMember(id:ID!):Account requestAuditExport(actors:[ActorInput!]from:Timestamp!graphIds:[String!]to:Timestamp!):Account """Send a new E-mail for an existing invitation""" resendInvitation(id:ID):AccountInvitation revokeStaticInvitation(token:String!):OrganizationInviteLink """Apollo admins only: set the billing plan to an arbitrary plan""" setPlan(id:ID!):Void """Start a new team subscription with the given billing period""" startTeamSubscription(billingPeriod:BillingPeriod!):Account """Start a team trial""" startTrial:Account """This is called by the form shown to users after they cancel their team subscription.""" submitTeamCancellationFeedback(feedback:String!):Void """Apollo admins only: terminate any ongoing subscriptions in the account, without refunds""" terminateSubscriptions:Account """Update the billing address for a Recurly token""" updateBillingAddress(billingAddress:BillingAddressInput!):Account """Update the billing information from a Recurly token""" updateBillingInfo(token:String!):Void updateCompanyUrl(companyUrl:String):Account """Set the E-mail address of the account, used notably for billing""" updateEmail(email:String!):Void """Update the account ID""" updateID(id:ID!):Account """Update the company name""" updateName(name:String!):Void """Apollo admins only: enable or disable an account for PingOne SSO login""" updatePingOneSSOIDPID(idpid:String):Account """Updates the role assigned to new SSO users.""" updateSSODefaultRole(role:UserPermission!):OrganizationSSO """A (currently) internal to Apollo mutation to update a user's role within an organization""" updateUserPermission(permission:UserPermission!userID:ID!):User}"""Columns of AccountOperationCheckStats.""" enum AccountOperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_REQUESTS_COUNT}type AccountOperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String serviceId:ID}"""Filter for data in AccountOperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountOperationCheckStatsFilter{and:[AccountOperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:AccountOperationCheckStatsFilterIn not:AccountOperationCheckStatsFilter or:[AccountOperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountOperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountOperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountOperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input AccountOperationCheckStatsOrderBySpec{column:AccountOperationCheckStatsColumn!direction:Ordering!}type AccountOperationCheckStatsRecord{"""Dimensions of AccountOperationCheckStats that can be grouped by.""" groupBy:AccountOperationCheckStatsDimensions!"""Metrics of AccountOperationCheckStats that can be aggregated over.""" metrics:AccountOperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountPublishesStatsMetrics{totalPublishes:Long!}type AccountPublishesStatsRecord{id:ID!metrics:AccountPublishesStatsMetrics!timestamp:Timestamp!}"""Columns of AccountQueryStats.""" enum AccountQueryStatsColumn{CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type AccountQueryStatsDimensions{clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountQueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountQueryStatsFilter{and:[AccountQueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:AccountQueryStatsFilterIn not:AccountQueryStatsFilter or:[AccountQueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountQueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountQueryStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountQueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input AccountQueryStatsOrderBySpec{column:AccountQueryStatsColumn!direction:Ordering!}type AccountQueryStatsRecord{"""Dimensions of AccountQueryStats that can be grouped by.""" groupBy:AccountQueryStatsDimensions!"""Metrics of AccountQueryStats that can be aggregated over.""" metrics:AccountQueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountRoles{canAudit:Boolean!canCreateDevGraph:Boolean!canCreateService:Boolean!canDelete:Boolean!canDownloadInvoice:Boolean!@deprecated(reason:"Use canQueryBillingInfo instead")canManageMembers:Boolean!canQuery:Boolean!canQueryAudit:Boolean!canQueryBillingInfo:Boolean!canQueryInvoices:Boolean!@deprecated(reason:"Use canQueryBillingInfo instead")canQueryMembers:Boolean!canQueryStats:Boolean!canReadTickets:Boolean!canRemoveMembers:Boolean!canSetConstrainedPlan:Boolean!canUpdateBillingInfo:Boolean!canUpdateMetadata:Boolean!}enum AccountState{ACTIVE CLOSED UNKNOWN UNPROVISIONED}"""A time window with a specified granularity over a given account.""" type AccountStatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:AccountBillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountBillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountBillingUsageStatsOrderBySpec!]):[AccountBillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:AccountEdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountEdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountEdgeServerInfosOrderBySpec!]):[AccountEdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:AccountErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountErrorStatsOrderBySpec!]):[AccountErrorStatsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:AccountFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldLatenciesOrderBySpec!]):[AccountFieldLatenciesRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:AccountFieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldUsageOrderBySpec!]):[AccountFieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:AccountOperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountOperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountOperationCheckStatsOrderBySpec!]):[AccountOperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:AccountQueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountQueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountQueryStatsOrderBySpec!]):[AccountQueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:AccountTracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountTracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountTracePathErrorsRefsOrderBySpec!]):[AccountTracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:AccountTraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountTraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountTraceRefsOrderBySpec!]):[AccountTraceRefsRecord!]!}"""Columns of AccountTracePathErrorsRefs.""" enum AccountTracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type AccountTracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in AccountTracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountTracePathErrorsRefsFilter{and:[AccountTracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:AccountTracePathErrorsRefsFilterIn not:AccountTracePathErrorsRefsFilter or:[AccountTracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in AccountTracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountTracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type AccountTracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input AccountTracePathErrorsRefsOrderBySpec{column:AccountTracePathErrorsRefsColumn!direction:Ordering!}type AccountTracePathErrorsRefsRecord{"""Dimensions of AccountTracePathErrorsRefs that can be grouped by.""" groupBy:AccountTracePathErrorsRefsDimensions!"""Metrics of AccountTracePathErrorsRefs that can be aggregated over.""" metrics:AccountTracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountTraceRefs.""" enum AccountTraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type AccountTraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID traceId:ID}"""Filter for data in AccountTraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountTraceRefsFilter{and:[AccountTraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:AccountTraceRefsFilterIn not:AccountTraceRefsFilter or:[AccountTraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in AccountTraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountTraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type AccountTraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input AccountTraceRefsOrderBySpec{column:AccountTraceRefsColumn!direction:Ordering!}type AccountTraceRefsRecord{"""Dimensions of AccountTraceRefs that can be grouped by.""" groupBy:AccountTraceRefsDimensions!"""Metrics of AccountTraceRefs that can be aggregated over.""" metrics:AccountTraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""An actor (view of Identity) that performed an action within Studio.""" type Actor{actorId:ID!type:ActorType!}input ActorInput{actorId:ID!type:ActorType!}enum ActorType{ANONYMOUS_USER BACKFILL CRON GRAPH INTERNAL_IDENTITY SYNCHRONIZATION SYSTEM USER}union AddOperationCollectionEntryResult=OperationCollectionEntry|PermissionError|ValidationError union AddOperationCollectionToVariantResult=GraphVariant|InvalidTarget|PermissionError|ValidationError type AffectedClient{"""ID, often the name, of the client set by the user and reported alongside metrics""" clientReferenceId:ID@deprecated(reason:"Unsupported.")"""version of the client set by the user and reported alongside metrics""" clientVersion:String@deprecated(reason:"Unsupported.")}type AffectedQuery{"""If the operation would be approved if the check ran again. Returns null if queried from SchemaDiff.changes.affectedQueries.alreadyApproved""" alreadyApproved:Boolean """If the operation would be ignored if the check ran again""" alreadyIgnored:Boolean """List of changes affecting this query. Returns null if queried from SchemaDiff.changes.affectedQueries.changes""" changes:[ChangeOnOperation!]"""Name to display to the user for the operation""" displayName:String id:ID!"""Determines if this query validates against the proposed schema""" isValid:Boolean """Whether this operation was ignored and its severity was downgraded for that reason""" markedAsIgnored:Boolean """Whether the changes were marked as safe and its severity was downgraded for that reason""" markedAsSafe:Boolean """Name provided for the operation, which can be empty string if it is an anonymous operation""" name:String """First 128 characters of query signature for display""" signature:String}interface ApiKey{id:ID!keyName:String token:String!}type ApiKeyProvision{apiKey:ApiKey!created:Boolean!}type AuditLogExport{actors:[Identity!]bigqueryTriggeredAt:Timestamp completedAt:Timestamp createdAt:Timestamp!exportedFiles:[String!]from:Timestamp!graphs:[Service!]id:ID!requester:User status:AuditStatus!to:Timestamp!}type AuditLogExportMutation{cancel:Account delete:Account}enum AuditStatus{CANCELLED COMPLETED EXPIRED FAILED IN_PROGRESS QUEUED}type AvatarDeleteError{clientMessage:String!code:AvatarDeleteErrorCode!serverMessage:String!}enum AvatarDeleteErrorCode{SSO_USERS_CANNOT_DELETE_SELF_AVATAR}type AvatarUploadError{clientMessage:String!code:AvatarUploadErrorCode!serverMessage:String!}enum AvatarUploadErrorCode{SSO_USERS_CANNOT_UPLOAD_SELF_AVATAR}union AvatarUploadResult=AvatarUploadError|MediaUploadInfo type BillingAddress{address1:String address2:String city:String country:String state:String zip:String}"""Billing address inpnut""" input BillingAddressInput{address1:String!address2:String city:String!country:String!state:String!zip:String!}type BillingInfo{address:BillingAddress!cardType:String firstName:String lastFour:Int lastName:String month:Int vatNumber:String year:Int}enum BillingModel{REQUEST_BASED SEAT_BASED}type BillingMonth{end:Timestamp!requests:Long!start:Timestamp!}enum BillingPeriod{MONTHLY QUARTERLY SEMI_ANNUALLY YEARLY}type BillingPlan{addons:[BillingPlanAddon!]!billingModel:BillingModel!billingPeriod:BillingPeriod capabilities:BillingPlanCapabilities!description:String id:ID!isTrial:Boolean!kind:BillingPlanKind!name:String!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of subscribing to this plan with a quantity of 1 (currently always the case)""" pricePerUnitInUsdCents:Int!"""Whether the plan is accessible by all users in QueryRoot.allPlans, QueryRoot.plan, or AccountMutation.setPlan""" public:Boolean!tier:BillingPlanTier!}type BillingPlanAddon{id:ID!pricePerUnitInUsdCents:Int!}type BillingPlanAddonV2{id:ID!pricePerUnitInUsdCents:Int!}type BillingPlanCapabilities{clients:Boolean!contracts:Boolean!datadog:Boolean!errors:Boolean!federation:Boolean!launches:Boolean!maxAuditInDays:Int!maxRangeInDays:Int maxRequestsPerMonth:Long metrics:Boolean!notifications:Boolean!operationRegistry:Boolean!ranges:[String!]!schemaValidation:Boolean!traces:Boolean!userRoles:Boolean!webhooks:Boolean!}enum BillingPlanKind{COMMUNITY ENTERPRISE_INTERNAL ENTERPRISE_PAID ENTERPRISE_PILOT TEAM_PAID TEAM_TRIAL}enum BillingPlanKindV2{COMMUNITY ENTERPRISE_INTERNAL ENTERPRISE_PAID ENTERPRISE_PILOT TEAM_PAID TEAM_TRIAL UNKNOWN}enum BillingPlanTier{COMMUNITY ENTERPRISE TEAM}enum BillingPlanTierV2{COMMUNITY ENTERPRISE TEAM UNKNOWN}type BillingPlanV2{addons:[BillingPlanAddonV2!]!billingModel:BillingModel!billingPeriod:BillingPeriod clients:Boolean!contracts:Boolean!datadog:Boolean!description:String errors:Boolean!federation:Boolean!id:ID!isTrial:Boolean!kind:BillingPlanKindV2!launches:Boolean!maxAuditInDays:Int!maxRangeInDays:Int maxRequestsPerMonth:Long metrics:Boolean!name:String!notifications:Boolean!operationRegistry:Boolean!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of subscribing to this plan with a quantity of 1 (currently always the case)""" pricePerUnitInUsdCents:Int!"""Whether the plan is accessible by all users in QueryRoot.allPlans, QueryRoot.plan, or AccountMutation.setPlan""" public:Boolean!ranges:[String!]!schemaValidation:Boolean!tier:BillingPlanTierV2!traces:Boolean!userRoles:Boolean!webhooks:Boolean!}type BillingSubscription{activatedAt:Timestamp!addons:[BillingSubscriptionAddon!]!autoRenew:Boolean!"""The price of the subscription when ignoring add-ons (such as seats), ie quantity * pricePerUnitInUsdCents""" basePriceInUsdCents:Long!canceledAt:Timestamp currentPeriodEndsAt:Timestamp!currentPeriodStartedAt:Timestamp!expiresAt:Timestamp plan:BillingPlan!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of every unit in the subscription (hence multiplied by quantity to get to the basePriceInUsdCents)""" pricePerUnitInUsdCents:Int!quantity:Int!"""Total price of the subscription when it next renews, including add-ons (such as seats)""" renewalTotalPriceInUsdCents:Long!state:SubscriptionState!"""Total price of the subscription, including add-ons (such as seats)""" totalPriceInUsdCents:Long!"""When this subscription's trial period expires (if it is a trial). Not the same as the +invitations are not possible on SSO-synchronized account.""" sso:OrganizationSSO state:AccountState """A list of reusable invitations for the organization.""" staticInvitations:[OrganizationInviteLink!]stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):AccountStatsWindow!@deprecated(reason:"use Account.statsWindow instead")statsWindow(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):AccountStatsWindow subscriptions:[BillingSubscription!]subscriptionsV2:[BillingSubscriptionV2!]!"""Gets a ticket for this org, by id""" ticket(id:ID!):ZendeskTicket """List of Zendesk tickets submitted for this org""" tickets:[ZendeskTicket!]}"""Columns of AccountBillingUsageStats.""" enum AccountBillingUsageStatsColumn{OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountBillingUsageStatsDimensions{operationCountProvidedExplicitly:String schemaTag:String serviceId:ID}"""Filter for data in AccountBillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountBillingUsageStatsFilter{and:[AccountBillingUsageStatsFilter!]in:AccountBillingUsageStatsFilterIn not:AccountBillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[AccountBillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountBillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountBillingUsageStatsFilterIn{"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountBillingUsageStatsMetrics{operationCount:Long!}input AccountBillingUsageStatsOrderBySpec{column:AccountBillingUsageStatsColumn!direction:Ordering!}type AccountBillingUsageStatsRecord{"""Dimensions of AccountBillingUsageStats that can be grouped by.""" groupBy:AccountBillingUsageStatsDimensions!"""Metrics of AccountBillingUsageStats that can be aggregated over.""" metrics:AccountBillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountChecksStatsMetrics{totalFailedChecks:Long!totalSuccessfulChecks:Long!}type AccountChecksStatsRecord{id:ID!metrics:AccountChecksStatsMetrics!timestamp:Timestamp!}"""Columns of AccountEdgeServerInfos.""" enum AccountEdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID SERVICE_ID TIMESTAMP USER_VERSION}type AccountEdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID serviceId:ID userVersion:String}"""Filter for data in AccountEdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountEdgeServerInfosFilter{and:[AccountEdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:AccountEdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:AccountEdgeServerInfosFilter or:[AccountEdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in AccountEdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountEdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input AccountEdgeServerInfosOrderBySpec{column:AccountEdgeServerInfosColumn!direction:Ordering!}type AccountEdgeServerInfosRecord{"""Dimensions of AccountEdgeServerInfos that can be grouped by.""" groupBy:AccountEdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountErrorStats.""" enum AccountErrorStatsColumn{CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountErrorStatsDimensions{clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountErrorStatsFilter{and:[AccountErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:AccountErrorStatsFilterIn not:AccountErrorStatsFilter or:[AccountErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountErrorStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input AccountErrorStatsOrderBySpec{column:AccountErrorStatsColumn!direction:Ordering!}type AccountErrorStatsRecord{"""Dimensions of AccountErrorStats that can be grouped by.""" groupBy:AccountErrorStatsDimensions!"""Metrics of AccountErrorStats that can be aggregated over.""" metrics:AccountErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountExperimentalFeatures{auditLogs:Boolean!championDashboard:Boolean!federation2Preview:Boolean!preRequestPreview:Boolean!publicVariants:Boolean!variantHomepage:Boolean!webhooksPreview:Boolean!}"""Columns of AccountFieldExecutions.""" enum AccountFieldExecutionsColumn{ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldExecutionsDimensions{fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldExecutions. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldExecutionsFilter{and:[AccountFieldExecutionsFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldExecutionsFilterIn not:AccountFieldExecutionsFilter or:[AccountFieldExecutionsFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldExecutions. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldExecutionsFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldExecutionsMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input AccountFieldExecutionsOrderBySpec{column:AccountFieldExecutionsColumn!direction:Ordering!}type AccountFieldExecutionsRecord{"""Dimensions of AccountFieldExecutions that can be grouped by.""" groupBy:AccountFieldExecutionsDimensions!"""Metrics of AccountFieldExecutions that can be aggregated over.""" metrics:AccountFieldExecutionsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountFieldLatencies.""" enum AccountFieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldLatenciesFilter{and:[AccountFieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldLatenciesFilterIn not:AccountFieldLatenciesFilter or:[AccountFieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input AccountFieldLatenciesOrderBySpec{column:AccountFieldLatenciesColumn!direction:Ordering!}type AccountFieldLatenciesRecord{"""Dimensions of AccountFieldLatencies that can be grouped by.""" groupBy:AccountFieldLatenciesDimensions!"""Metrics of AccountFieldLatencies that can be aggregated over.""" metrics:AccountFieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountFieldRequestsByClientVersion.""" enum AccountFieldRequestsByClientVersionColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldRequestsByClientVersionDimensions{clientName:String clientVersion:String fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldRequestsByClientVersion. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldRequestsByClientVersionFilter{and:[AccountFieldRequestsByClientVersionFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldRequestsByClientVersionFilterIn not:AccountFieldRequestsByClientVersionFilter or:[AccountFieldRequestsByClientVersionFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldRequestsByClientVersion. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldRequestsByClientVersionFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldRequestsByClientVersionMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input AccountFieldRequestsByClientVersionOrderBySpec{column:AccountFieldRequestsByClientVersionColumn!direction:Ordering!}type AccountFieldRequestsByClientVersionRecord{"""Dimensions of AccountFieldRequestsByClientVersion that can be grouped by.""" groupBy:AccountFieldRequestsByClientVersionDimensions!"""Metrics of AccountFieldRequestsByClientVersion that can be aggregated over.""" metrics:AccountFieldRequestsByClientVersionMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountFieldUsage.""" enum AccountFieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type AccountFieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountFieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountFieldUsageFilter{and:[AccountFieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:AccountFieldUsageFilterIn not:AccountFieldUsageFilter or:[AccountFieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountFieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountFieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountFieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input AccountFieldUsageOrderBySpec{column:AccountFieldUsageColumn!direction:Ordering!}type AccountFieldUsageRecord{"""Dimensions of AccountFieldUsage that can be grouped by.""" groupBy:AccountFieldUsageDimensions!"""Metrics of AccountFieldUsage that can be aggregated over.""" metrics:AccountFieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountInvitation{"""An accepted invitation cannot be used anymore""" acceptedAt:Timestamp """Who accepted the invitation""" acceptedBy:User """Time the invitation was created""" createdAt:Timestamp!"""Who created the invitation""" createdBy:User email:String!id:ID!"""Last time we sent an email for the invitation""" lastSentAt:Timestamp """Access role for the invitee""" role:UserPermission!}type AccountMembership{account:Account!createdAt:Timestamp!"""If this membership is a free seat (based on role)""" free:Boolean permission:UserPermission!user:User!}type AccountMutation{auditExport(id:String!):AuditLogExportMutation """Cancel a pending change from an annual team subscription to a monthly team subscription when the current period expires.""" cancelConvertAnnualTeamSubscriptionToMonthlyAtNextPeriod:Account """Cancel account subscriptions, subscriptions will remain active until the end of the paid period""" cancelSubscriptions:Account """Changes an annual team subscription to a monthly team subscription when the current period expires.""" convertAnnualTeamSubscriptionToMonthlyAtNextPeriod:Account """Changes a monthly team subscription to an annual team subscription.""" convertMonthlyTeamSubscriptionToAnnual:Account createStaticInvitation(role:UserPermission!):OrganizationInviteLink """Delete the account's avatar. Requires Account.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Acknowledge that a trial has expired and return to community""" dismissExpiredTrial:Account """Apollo admins only: extend an ongoing trial""" extendTrial(to:Timestamp!):Account """Hard delete an account and all associated services""" hardDelete:Void """Send an invitation to join the account by E-mail""" invite(email:String!role:UserPermission):AccountInvitation """Reactivate a canceled current subscription""" reactivateCurrentSubscription:Account """Refresh billing information from third-party billing service""" refreshBilling:Void """Delete an invitation""" removeInvitation(id:ID):Void """Remove a member of the account""" removeMember(id:ID!):Account requestAuditExport(actors:[ActorInput!]from:Timestamp!graphIds:[String!]to:Timestamp!):Account """Send a new E-mail for an existing invitation""" resendInvitation(id:ID):AccountInvitation revokeStaticInvitation(token:String!):OrganizationInviteLink """Apollo admins only: set the billing plan to an arbitrary plan""" setPlan(id:ID!):Void """Start a new team subscription with the given billing period""" startTeamSubscription(billingPeriod:BillingPeriod!):Account """Start a team trial""" startTrial:Account """This is called by the form shown to users after they cancel their team subscription.""" submitTeamCancellationFeedback(feedback:String!):Void """Apollo admins only: terminate any ongoing subscriptions in the account, without refunds""" terminateSubscriptions:Account """Update the billing address for a Recurly token""" updateBillingAddress(billingAddress:BillingAddressInput!):Account """Update the billing information from a Recurly token""" updateBillingInfo(token:String!):Void updateCompanyUrl(companyUrl:String):Account """Set the E-mail address of the account, used notably for billing""" updateEmail(email:String!):Void """Update the account ID""" updateID(id:ID!):Account """Update the company name""" updateName(name:String!):Void """Apollo admins only: enable or disable an account for PingOne SSO login""" updatePingOneSSOIDPID(idpid:String):Account """Updates the role assigned to new SSO users.""" updateSSODefaultRole(role:UserPermission!):OrganizationSSO """A (currently) internal to Apollo mutation to update a user's role within an organization""" updateUserPermission(permission:UserPermission!userID:ID!):User}"""Columns of AccountOperationCheckStats.""" enum AccountOperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_REQUESTS_COUNT}type AccountOperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String serviceId:ID}"""Filter for data in AccountOperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountOperationCheckStatsFilter{and:[AccountOperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:AccountOperationCheckStatsFilterIn not:AccountOperationCheckStatsFilter or:[AccountOperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountOperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountOperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountOperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input AccountOperationCheckStatsOrderBySpec{column:AccountOperationCheckStatsColumn!direction:Ordering!}type AccountOperationCheckStatsRecord{"""Dimensions of AccountOperationCheckStats that can be grouped by.""" groupBy:AccountOperationCheckStatsDimensions!"""Metrics of AccountOperationCheckStats that can be aggregated over.""" metrics:AccountOperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountPublishesStatsMetrics{totalPublishes:Long!}type AccountPublishesStatsRecord{id:ID!metrics:AccountPublishesStatsMetrics!timestamp:Timestamp!}"""Columns of AccountQueryStats.""" enum AccountQueryStatsColumn{CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type AccountQueryStatsDimensions{clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in AccountQueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountQueryStatsFilter{and:[AccountQueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:AccountQueryStatsFilterIn not:AccountQueryStatsFilter or:[AccountQueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in AccountQueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountQueryStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type AccountQueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input AccountQueryStatsOrderBySpec{column:AccountQueryStatsColumn!direction:Ordering!}type AccountQueryStatsRecord{"""Dimensions of AccountQueryStats that can be grouped by.""" groupBy:AccountQueryStatsDimensions!"""Metrics of AccountQueryStats that can be aggregated over.""" metrics:AccountQueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type AccountRoles{canAudit:Boolean!canCreateDevGraph:Boolean!canCreateService:Boolean!canDelete:Boolean!canDownloadInvoice:Boolean!@deprecated(reason:"Use canQueryBillingInfo instead")canManageMembers:Boolean!canQuery:Boolean!canQueryAudit:Boolean!canQueryBillingInfo:Boolean!canQueryInvoices:Boolean!@deprecated(reason:"Use canQueryBillingInfo instead")canQueryMembers:Boolean!canQueryStats:Boolean!canReadTickets:Boolean!canRemoveMembers:Boolean!canSetConstrainedPlan:Boolean!canUpdateBillingInfo:Boolean!canUpdateMetadata:Boolean!}enum AccountState{ACTIVE CLOSED UNKNOWN UNPROVISIONED}"""A time window with a specified granularity over a given account.""" type AccountStatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:AccountBillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountBillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountBillingUsageStatsOrderBySpec!]):[AccountBillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:AccountEdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountEdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountEdgeServerInfosOrderBySpec!]):[AccountEdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:AccountErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountErrorStatsOrderBySpec!]):[AccountErrorStatsRecord!]!fieldExecutions("""Filter to select what rows to return.""" filter:AccountFieldExecutionsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldExecutions by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldExecutionsOrderBySpec!]):[AccountFieldExecutionsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:AccountFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldLatenciesOrderBySpec!]):[AccountFieldLatenciesRecord!]!fieldRequestsByClientVersion("""Filter to select what rows to return.""" filter:AccountFieldRequestsByClientVersionFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldRequestsByClientVersion by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldRequestsByClientVersionOrderBySpec!]):[AccountFieldRequestsByClientVersionRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:AccountFieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountFieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountFieldUsageOrderBySpec!]):[AccountFieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:AccountOperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountOperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountOperationCheckStatsOrderBySpec!]):[AccountOperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:AccountQueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountQueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountQueryStatsOrderBySpec!]):[AccountQueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:AccountTracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountTracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountTracePathErrorsRefsOrderBySpec!]):[AccountTracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:AccountTraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order AccountTraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[AccountTraceRefsOrderBySpec!]):[AccountTraceRefsRecord!]!}"""Columns of AccountTracePathErrorsRefs.""" enum AccountTracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type AccountTracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in AccountTracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountTracePathErrorsRefsFilter{and:[AccountTracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:AccountTracePathErrorsRefsFilterIn not:AccountTracePathErrorsRefsFilter or:[AccountTracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in AccountTracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountTracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type AccountTracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input AccountTracePathErrorsRefsOrderBySpec{column:AccountTracePathErrorsRefsColumn!direction:Ordering!}type AccountTracePathErrorsRefsRecord{"""Dimensions of AccountTracePathErrorsRefs that can be grouped by.""" groupBy:AccountTracePathErrorsRefsDimensions!"""Metrics of AccountTracePathErrorsRefs that can be aggregated over.""" metrics:AccountTracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of AccountTraceRefs.""" enum AccountTraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type AccountTraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID traceId:ID}"""Filter for data in AccountTraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input AccountTraceRefsFilter{and:[AccountTraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:AccountTraceRefsFilterIn not:AccountTraceRefsFilter or:[AccountTraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in AccountTraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input AccountTraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type AccountTraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input AccountTraceRefsOrderBySpec{column:AccountTraceRefsColumn!direction:Ordering!}type AccountTraceRefsRecord{"""Dimensions of AccountTraceRefs that can be grouped by.""" groupBy:AccountTraceRefsDimensions!"""Metrics of AccountTraceRefs that can be aggregated over.""" metrics:AccountTraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""An actor (view of Identity) that performed an action within Studio.""" type Actor{actorId:ID!type:ActorType!}input ActorInput{actorId:ID!type:ActorType!}enum ActorType{ANONYMOUS_USER BACKFILL CRON GRAPH INTERNAL_IDENTITY SYNCHRONIZATION SYSTEM USER}union AddOperationCollectionEntryResult=OperationCollectionEntry|PermissionError|ValidationError union AddOperationCollectionToVariantResult=GraphVariant|InvalidTarget|PermissionError|ValidationError type AffectedClient{"""ID, often the name, of the client set by the user and reported alongside metrics""" clientReferenceId:ID@deprecated(reason:"Unsupported.")"""version of the client set by the user and reported alongside metrics""" clientVersion:String@deprecated(reason:"Unsupported.")}type AffectedQuery{"""If the operation would be approved if the check ran again. Returns null if queried from SchemaDiff.changes.affectedQueries.alreadyApproved""" alreadyApproved:Boolean """If the operation would be ignored if the check ran again""" alreadyIgnored:Boolean """List of changes affecting this query. Returns null if queried from SchemaDiff.changes.affectedQueries.changes""" changes:[ChangeOnOperation!]"""Name to display to the user for the operation""" displayName:String id:ID!"""Determines if this query validates against the proposed schema""" isValid:Boolean """Whether this operation was ignored and its severity was downgraded for that reason""" markedAsIgnored:Boolean """Whether the changes were marked as safe and its severity was downgraded for that reason""" markedAsSafe:Boolean """Name provided for the operation, which can be empty string if it is an anonymous operation""" name:String """First 128 characters of query signature for display""" signature:String}interface ApiKey{id:ID!keyName:String token:String!}type ApiKeyProvision{apiKey:ApiKey!created:Boolean!}type AuditLogExport{actors:[Identity!]bigqueryTriggeredAt:Timestamp completedAt:Timestamp createdAt:Timestamp!exportedFiles:[String!]from:Timestamp!graphs:[Service!]id:ID!requester:User status:AuditStatus!to:Timestamp!}type AuditLogExportMutation{cancel:Account delete:Account}enum AuditStatus{CANCELLED COMPLETED EXPIRED FAILED IN_PROGRESS QUEUED}type AvatarDeleteError{clientMessage:String!code:AvatarDeleteErrorCode!serverMessage:String!}enum AvatarDeleteErrorCode{SSO_USERS_CANNOT_DELETE_SELF_AVATAR}type AvatarUploadError{clientMessage:String!code:AvatarUploadErrorCode!serverMessage:String!}enum AvatarUploadErrorCode{SSO_USERS_CANNOT_UPLOAD_SELF_AVATAR}union AvatarUploadResult=AvatarUploadError|MediaUploadInfo type BillingAddress{address1:String address2:String city:String country:String state:String zip:String}"""Billing address inpnut""" input BillingAddressInput{address1:String!address2:String city:String!country:String!state:String!zip:String!}type BillingInfo{address:BillingAddress!cardType:String firstName:String lastFour:Int lastName:String month:Int vatNumber:String year:Int}enum BillingModel{REQUEST_BASED SEAT_BASED}type BillingMonth{end:Timestamp!requests:Long!start:Timestamp!}enum BillingPeriod{MONTHLY QUARTERLY SEMI_ANNUALLY YEARLY}type BillingPlan{addons:[BillingPlanAddon!]!billingModel:BillingModel!billingPeriod:BillingPeriod capabilities:BillingPlanCapabilities!description:String id:ID!isTrial:Boolean!kind:BillingPlanKind!name:String!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of subscribing to this plan with a quantity of 1 (currently always the case)""" pricePerUnitInUsdCents:Int!"""Whether the plan is accessible by all users in QueryRoot.allPlans, QueryRoot.plan, or AccountMutation.setPlan""" public:Boolean!tier:BillingPlanTier!}type BillingPlanAddon{id:ID!pricePerUnitInUsdCents:Int!}type BillingPlanAddonV2{id:ID!pricePerUnitInUsdCents:Int!}type BillingPlanCapabilities{clients:Boolean!contracts:Boolean!datadog:Boolean!errors:Boolean!federation:Boolean!launches:Boolean!maxAuditInDays:Int!maxRangeInDays:Int maxRequestsPerMonth:Long metrics:Boolean!notifications:Boolean!operationRegistry:Boolean!ranges:[String!]!schemaValidation:Boolean!traces:Boolean!userRoles:Boolean!webhooks:Boolean!}enum BillingPlanKind{COMMUNITY ENTERPRISE_INTERNAL ENTERPRISE_PAID ENTERPRISE_PILOT TEAM_PAID TEAM_TRIAL}enum BillingPlanKindV2{COMMUNITY ENTERPRISE_INTERNAL ENTERPRISE_PAID ENTERPRISE_PILOT TEAM_PAID TEAM_TRIAL UNKNOWN}enum BillingPlanTier{COMMUNITY ENTERPRISE TEAM}enum BillingPlanTierV2{COMMUNITY ENTERPRISE TEAM UNKNOWN}type BillingPlanV2{addons:[BillingPlanAddonV2!]!billingModel:BillingModel!billingPeriod:BillingPeriod clients:Boolean!contracts:Boolean!datadog:Boolean!description:String errors:Boolean!federation:Boolean!id:ID!isTrial:Boolean!kind:BillingPlanKindV2!launches:Boolean!maxAuditInDays:Int!maxRangeInDays:Int maxRequestsPerMonth:Long metrics:Boolean!name:String!notifications:Boolean!operationRegistry:Boolean!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of subscribing to this plan with a quantity of 1 (currently always the case)""" pricePerUnitInUsdCents:Int!"""Whether the plan is accessible by all users in QueryRoot.allPlans, QueryRoot.plan, or AccountMutation.setPlan""" public:Boolean!ranges:[String!]!schemaValidation:Boolean!tier:BillingPlanTierV2!traces:Boolean!userRoles:Boolean!webhooks:Boolean!}type BillingSubscription{activatedAt:Timestamp!addons:[BillingSubscriptionAddon!]!autoRenew:Boolean!"""The price of the subscription when ignoring add-ons (such as seats), ie quantity * pricePerUnitInUsdCents""" basePriceInUsdCents:Long!canceledAt:Timestamp currentPeriodEndsAt:Timestamp!currentPeriodStartedAt:Timestamp!expiresAt:Timestamp plan:BillingPlan!"""The price of every seat""" pricePerSeatInUsdCents:Int """The price of every unit in the subscription (hence multiplied by quantity to get to the basePriceInUsdCents)""" pricePerUnitInUsdCents:Int!quantity:Int!"""Total price of the subscription when it next renews, including add-ons (such as seats)""" renewalTotalPriceInUsdCents:Long!state:SubscriptionState!"""Total price of the subscription, including add-ons (such as seats)""" totalPriceInUsdCents:Long!"""When this subscription's trial period expires (if it is a trial). Not the same as the subscription's Recurly expiration).""" trialExpiresAt:Timestamp uuid:ID!}type BillingSubscriptionAddon{id:ID!pricePerUnitInUsdCents:Int!quantity:Int!}type BillingSubscriptionAddonV2{id:ID!pricePerUnitInUsdCents:Int!quantity:Int!}type BillingSubscriptionV2{"""The price of every unit in the subscription (hence multiplied by quantity to get to the basePriceInUsdCents)""" activatedAt:Timestamp!addons:[BillingSubscriptionAddonV2!]!autoRenew:Boolean!canceledAt:Timestamp currentPeriodEndsAt:Timestamp!currentPeriodStartedAt:Timestamp!expiresAt:Timestamp plan:BillingPlanV2!"""The price of every seat""" pricePerSeatInUsdCents:Int quantity:Int!state:SubscriptionStateV2!"""When this subscription's trial period expires (if it is a trial). Not the same as the subscription's Recurly expiration).""" trialExpiresAt:Timestamp uuid:ID!}"""Columns of BillingUsageStats.""" enum BillingUsageStatsColumn{ACCOUNT_ID OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG SERVICE_ID TIMESTAMP}type BillingUsageStatsDimensions{accountId:ID operationCountProvidedExplicitly:String schemaTag:String serviceId:ID}"""Filter for data in BillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input BillingUsageStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[BillingUsageStatsFilter!]in:BillingUsageStatsFilterIn not:BillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[BillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in BillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input BillingUsageStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type BillingUsageStatsMetrics{operationCount:Long!}input BillingUsageStatsOrderBySpec{column:BillingUsageStatsColumn!direction:Ordering!}type BillingUsageStatsRecord{"""Dimensions of BillingUsageStats that can be grouped by.""" groupBy:BillingUsageStatsDimensions!"""Metrics of BillingUsageStats that can be aggregated over.""" metrics:BillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""A blob (base64'ed in JSON & GraphQL)""" scalar Blob type Build{input:BuildInput!result:BuildResult}type BuildError{code:String locations:[SourceLocation!]!message:String!}type BuildFailure{errorMessages:[BuildError!]!}union BuildInput=CompositionBuildInput|FilterBuildInput union BuildResult=BuildFailure|BuildSuccess type BuildSuccess{coreSchema:CoreSchema!}enum CacheScope{PRIVATE PUBLIC UNKNOWN UNRECOGNIZED}"""A specific change to a definition in your schema.""" type Change{affectedQueries:[AffectedQuery!]"""Target arg of change made.""" argNode:NamedIntrospectionArg """Indication of the category of the change (e.g. addition, removal, edit).""" category:ChangeCategory!"""Node related to the top level node that was changed, such as a field in an object, a value in an enum or the object of an interface.""" childNode:NamedIntrospectionValue """Indication of the kind of target and action of the change, e.g. 'TYPE_REMOVED'.""" code:String!"""Human-readable description of the change.""" description:String!"""Top level node affected by the change.""" parentNode:NamedIntrospectionType """Severity of the change, either failure or warning.""" severity:ChangeSeverity!"""Indication of the success of the overall change, either failure, warning, or notice.""" type:ChangeType!@deprecated(reason:"use severity instead")}"""Defines a set of categories that a schema change @@ -26,13 +26,13 @@ ancestor) was added/removed.""" type ChangeSummary{"""Counts for changes to fiel and all aspects of enums, unions, and scalars.""" type:TypeChangeSummaryCounts!}enum ChangeType{FAILURE NOTICE}type ChangelogLaunchResult{createdAt:Timestamp!schemaTagID:ID!}"""Destination for notifications""" interface Channel{id:ID!name:String!subscriptions:[ChannelSubscription!]!}interface ChannelSubscription{channels:[Channel!]!enabled:Boolean!id:ID!variant:String}type CheckConfiguration{"""Time when check configuration was created""" createdAt:Timestamp!"""Clients to ignore during validation""" excludedClients:[ClientFilter!]!"""Operation names to ignore during validation""" excludedOperationNames:[OperationNameFilter]"""Operations to ignore during validation""" excludedOperations:[ExcludedOperation!]!"""Graph that this check configuration belongs to""" graphID:ID!"""ID of the check configuration""" id:ID!"""Default configuration to include operations on the base variant.""" includeBaseVariant:Boolean!"""Variant overrides for validation""" includedVariants:[String!]!"""Minimum number of requests within the window for an operation to be considered.""" operationCountThreshold:Int!"""Number of requests within the window for an operation to be considered, relative to total request count. Expected values are between 0 and 0.05 (minimum 5% of total request volume)""" operationCountThresholdPercentage:Float!"""Only check operations from the last seconds. -The default is 7 days (604,800 seconds).""" timeRangeSeconds:Long!"""Time when check configuration was last updated""" updatedAt:Timestamp!"""Identity of the last user to update the check configuration""" updatedBy:Identity}"""Filter options available when listing checks.""" input CheckFilterInput{authors:[String!]branches:[String!]status:CheckFilterInputStatusOption subgraphs:[String!]}"""Options for filtering CheckWorkflows by status""" enum CheckFilterInputStatusOption{FAILED PASSED PENDING}"""The result of performing a subgraph check, including all steps.""" type CheckPartialSchemaResult{"""Overall result of the check. This will be null if composition validation was unsuccessful.""" checkSchemaResult:CheckSchemaResult """Result of compostion run as part of the overall subgraph check.""" compositionValidationResult:CompositionValidationResult!"""If any modifications were detected in the composed core schema""" coreSchemaModified:Boolean!"""Check workflow associated with the overall subgraph check.""" workflow:CheckWorkflow}type CheckSchemaResult{"""Schema diff and affected operations generated by the schema check""" diffToPrevious:SchemaDiff!"""ID of the operations check that was created""" operationsCheckID:ID!"""Generated url to view schema diff in Engine""" targetUrl:String """Workflow associated with this check result""" workflow:CheckWorkflow}type CheckWorkflow{"""The variant provided as a base to check against. Only the differences from the +The default is 7 days (604,800 seconds).""" timeRangeSeconds:Long!"""Time when check configuration was last updated""" updatedAt:Timestamp!"""Identity of the last user to update the check configuration""" updatedBy:Identity}"""Filter options available when listing checks.""" input CheckFilterInput{authors:[String!]branches:[String!]status:CheckFilterInputStatusOption subgraphs:[String!]}"""Options for filtering CheckWorkflows by status""" enum CheckFilterInputStatusOption{FAILED PASSED PENDING}"""The result of performing a subgraph check, including all steps.""" type CheckPartialSchemaResult{"""Overall result of the check. This will be null if composition validation was unsuccessful.""" checkSchemaResult:CheckSchemaResult """Result of compostion run as part of the overall subgraph check.""" compositionValidationResult:CompositionValidationResult!"""Whether any modifications were detected in the composed core schema.""" coreSchemaModified:Boolean!"""Check workflow associated with the overall subgraph check.""" workflow:CheckWorkflow}type CheckSchemaResult{"""Schema diff and affected operations generated by the schema check""" diffToPrevious:SchemaDiff!"""ID of the operations check that was created""" operationsCheckID:ID!"""Generated url to view schema diff in Engine""" targetUrl:String """Workflow associated with this check result""" workflow:CheckWorkflow}type CheckWorkflow{"""The variant provided as a base to check against. Only the differences from the base schema will be tested in operations checks.""" baseVariant:GraphVariant completedAt:Timestamp createdAt:Timestamp!"""Contextual parameters supplied by the runtime environment where the check was run.""" gitContext:GitContext id:ID!"""The name of the implementing service that was responsible for triggering the validation.""" implementingServiceName:String """If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" introspectionEndpoint:String """Only true if the check was triggered from Sandbox Checks page.""" isSandboxCheck:Boolean!"""If this check was created by rerunning, the original check that was rerun.""" rerunOf:CheckWorkflow """Checks created by re-running this check, most recent first.""" reruns(limit:Int!=20):[CheckWorkflow!]startedAt:Timestamp """Overall status of the workflow, based on the underlying task statuses.""" status:CheckWorkflowStatus!"""The set of check tasks associated with this workflow, e.g. OperationsCheck, GraphComposition, etc.""" tasks:[CheckWorkflowTask!]!"""Identity of the user who ran this check""" triggeredBy:Identity """Configuration of validation at the time the check was run.""" validationConfig:SchemaDiffValidationConfig}type CheckWorkflowMutation{"""Re-run a check workflow using the current configuration. A new workflow is created and returned.""" rerun:CheckWorkflowRerunResult}type CheckWorkflowRerunResult{"""Check workflow created by re-running.""" result:CheckWorkflow """Check workflow that was rerun.""" source:CheckWorkflow}enum CheckWorkflowStatus{FAILED PASSED PENDING}interface CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!status:CheckWorkflowTaskStatus!"""The workflow that this task belongs to.""" workflow:CheckWorkflow!}enum CheckWorkflowTaskStatus{BLOCKED FAILED PASSED PENDING}"""Client filter configuration for a graph.""" type ClientFilter{"""name of the client set by the user and reported alongside metrics""" name:String """version of the client set by the user and reported alongside metrics""" version:String}"""Options to filter by client reference ID, client name, and client version. If passing client version, make sure to either provide a client reference ID or client name.""" input ClientFilterInput{"""name of the client set by the user and reported alongside metrics""" name:String """version of the client set by the user and reported alongside metrics""" version:String}"""Filter options to exclude by client reference ID, client name, and client version.""" input ClientInfoFilter{name:String """Ignored""" referenceID:ID version:String}"""Filter options to exclude clients. Used as an output type for SchemaDiffValidationConfig.""" type ClientInfoFilterOutput{name:String version:String}enum ComparisonOperator{EQUALS GREATER_THAN GREATER_THAN_OR_EQUAL_TO LESS_THAN LESS_THAN_OR_EQUAL_TO NOT_EQUALS UNRECOGNIZED}"""The result of composition run in the cloud, upon an attempted subgraph deletion.""" type CompositionAndRemoveResult{"""The produced composition config. Will be null if there are any errors""" compositionConfig:CompositionConfig """Whether the removed implementing service existed.""" didExist:Boolean!""" List of errors during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a GraphQL schema. If present, gateways / routers are not updated.""" errors:[SchemaCompositionError]!"""ID that points to the results of composition.""" graphCompositionID:String!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!}"""The result of composition run in the cloud, upon attempted publish of a subgraph.""" type CompositionAndUpsertResult{"""The produced composition config, or null if there are any errors.""" compositionConfig:CompositionConfig """List of errors during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If present, gateways / routers -are not updated.""" errors:[SchemaCompositionError]!"""ID that points to the results of composition.""" graphCompositionID:String!"""Copy text for the launch result of a publish.""" launchCliCopy:String """Link to corresponding launches page on Studio if avaliable.""" launchUrl:String """List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!"""Whether a subgraph was created as part of this mutation.""" wasCreated:Boolean!"""Whether an implementingService was updated as part of this mutation""" wasUpdated:Boolean!}type CompositionBuildInput{subgraphs:[Subgraph!]!version:String}type CompositionCheckTask implements CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!"""The result of the composition.""" result:CompositionResult status:CheckWorkflowTaskStatus!workflow:CheckWorkflow!}"""Composition configuration exposed to the gateway.""" type CompositionConfig{"""List of GCS links for implementing services that comprise a composed graph. Is empty if tag/inaccessible is enabled.""" implementingServiceLocations:[ImplementingServiceLocation!]!@deprecated(reason:"Soon we will stop writing to GCS locations")"""Hash of the API schema.""" schemaHash:String!}"""The result of composition run in the cloud.""" type CompositionPublishResult implements CompositionResult{"""The produced composition config. Will be null if there are any errors""" compositionConfig:CompositionConfig """Supergraph SDL generated by composition (this is not the CSDL, that is a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the +are not updated.""" errors:[SchemaCompositionError]!"""ID that points to the results of composition.""" graphCompositionID:String!"""Copy text for the launch result of a publish.""" launchCliCopy:String """Link to corresponding launches page on Studio if available.""" launchUrl:String """List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!"""Whether a subgraph was created as part of this mutation.""" wasCreated:Boolean!"""Whether an implementingService was updated as part of this mutation""" wasUpdated:Boolean!}type CompositionBuildInput{subgraphs:[Subgraph!]!version:String}type CompositionCheckTask implements CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!"""The result of the composition.""" result:CompositionResult status:CheckWorkflowTaskStatus!workflow:CheckWorkflow!}"""Composition configuration exposed to the gateway.""" type CompositionConfig{"""List of GCS links for implementing services that comprise a composed graph. Is empty if tag/inaccessible is enabled.""" implementingServiceLocations:[ImplementingServiceLocation!]!@deprecated(reason:"Soon we will stop writing to GCS locations")"""Hash of the API schema.""" schemaHash:String!}"""The result of composition run in the cloud.""" type CompositionPublishResult implements CompositionResult{"""The produced composition config. Will be null if there are any errors""" compositionConfig:CompositionConfig """Supergraph SDL generated by composition (this is not the CSDL, that is a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If present, gateways / routers are not updated.""" errors:[SchemaCompositionError!]!"""ID for a particular composition.""" graphCompositionID:ID!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph SDL generated by composition.""" supergraphSdl:GraphQLDocument """Whether the gateway/router was updated via Uplink, or would have been for dry runs.""" updatedGateway:Boolean!webhookNotificationBody:String}"""Result of a composition, often as the result of a subgraph check or subgraph publish. See implementations for more details.""" interface CompositionResult{"""Supergraph SDL generated by composition (this is not the cSDL, a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the @@ -41,12 +41,12 @@ are not updated.""" errors:[SchemaCompositionError!]!"""Globally unique identifi in running composition. Will be null if any errors are encountered. Also may contain a schema hash if one could be computed, which can be used for schema validation.""" compositionValidationDetails:CompositionValidationDetails """Supergraph SDL generated by composition (this is not the CSDL, that is a deprecated format).""" csdl:GraphQLDocument@deprecated(reason:"Use supergraphSdl instead")"""List of errors during composition. Errors mean that Apollo was unable to compose the graph variant's subgraphs into a supergraph schema. If present, gateways / routers -are not updated.""" errors:[SchemaCompositionError!]!"""ID that points to the results of this composition.""" graphCompositionID:ID!"""The implementing service that was responsible for triggering the validation""" proposedImplementingService:FederatedImplementingServicePartialSchema!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph schema document generated by composition.""" supergraphSdl:GraphQLDocument """If created as part of a check workflow, the associated workflow task.""" workflowTask:CompositionCheckTask}type ContractPreview{result:ContractPreviewResult!upstreamLaunch:Launch!}type ContractPreviewErrors{errors:[String!]!failedAt:ContractVariantFailedStep!}union ContractPreviewResult=ContractPreviewErrors|ContractPreviewSuccess type ContractPreviewSuccess{apiDocument:String!coreDocument:String!fieldCount:Int!typeCount:Int!}enum ContractVariantFailedStep{ADD_DIRECTIVE_DEFINITIONS_IF_NOT_PRESENT DIRECTIVE_DEFINITION_LOCATION_AUGMENTING EMPTY_OBJECT_AND_INTERFACE_MASKING EMPTY_UNION_MASKING INPUT_VALIDATION PARSING PARSING_TAG_DIRECTIVES PARTIAL_INTERFACE_MASKING SCHEMA_RETRIEVAL TAG_INHERITING TAG_MATCHING TO_API_SCHEMA TO_FILTER_SCHEMA UNKNOWN VERSION_CHECK}type ContractVariantPreviewErrors{errorMessages:[String!]!failedStep:ContractVariantFailedStep!}union ContractVariantPreviewResult=ContractVariantPreviewErrors|ContractVariantPreviewSuccess type ContractVariantPreviewSuccess{baseApiSchema:String!baseCoreSchema:String!contractApiSchema:String!contractCoreSchema:String!}type ContractVariantUpsertErrors{errorMessages:[String!]!}union ContractVariantUpsertResult=ContractVariantUpsertErrors|ContractVariantUpsertSuccess type ContractVariantUpsertSuccess{contractVariant:GraphVariant!}type CoreSchema{apiDocument:GraphQLDocument!coreDocument:GraphQLDocument!coreHash:String!fieldCount:Int!typeCount:Int!}union CreateOperationCollectionResult=OperationCollection|PermissionError|ValidationError type CronExecution{completedAt:Timestamp failure:String id:ID!job:CronJob!resolvedAt:Timestamp resolvedBy:Actor schedule:String!startedAt:Timestamp!}type CronJob{group:String!name:String!recentExecutions(n:Int):[CronExecution!]!}enum DatadogApiRegion{EU EU1 US US1 US1FED US3 US5}type DatadogMetricsConfig{apiKey:String!apiRegion:DatadogApiRegion!enabled:Boolean!legacyMetricNames:Boolean!}union DeleteOperationCollectionResult=DeleteOperationCollectionSuccess|PermissionError type DeleteOperationCollectionSuccess{sandboxOwner:User variants:[GraphVariant!]!}"""The result of attempting to delete a graph variant.""" type DeleteSchemaTagResult{"""WHether a variant was deleted or not.""" deleted:Boolean!}enum DeletionTargetType{ACCOUNT USER}"""Support for a single directive on a graph variant""" type DirectiveSupportStatus{"""whether the directive is supported on the current graph variant""" enabled:Boolean!"""name of the directive""" name:String!}union DuplicateOperationCollectionResult=OperationCollection|PermissionError|ValidationError type DurationHistogram{averageDurationMs:Float buckets:[DurationHistogramBucket!]!durationMs("""Percentile (between 0 and 1)""" percentile:Float!):Float """Counts per durationBucket, where sequences of zeroes are replaced with the negative of their size""" sparseBuckets:[Long!]!totalCount:Long!totalDurationMs:Float!}type DurationHistogramBucket{count:Long!index:Int!rangeBeginMs:Float!rangeEndMs:Float!}input EdgeServerInfo{"""A randomly generated UUID, immutable for the lifetime of the edge server runtime.""" bootId:String!"""A unique identifier for the executable GraphQL served by the edge server. length must be <= 64 characters.""" executableSchemaId:String!"""The graph variant, defaults to 'current'""" graphVariant:String!="current" """The version of the edge server reporting agent, e.g. apollo-server-2.8, graphql-java-3.1, etc. length must be <= 256 characters.""" libraryVersion:String """The infra environment in which this edge server is running, e.g. localhost, Kubernetes, AWS Lambda, Google CloudRun, AWS ECS, etc. length must be <= 256 characters.""" platform:String """The runtime in which the edge server is running, e.g. node 12.03, zulu8.46.0.19-ca-jdk8.0.252-macosx_x64, etc. length must be <= 256 characters.""" runtimeVersion:String """If available, an identifier for the edge server instance, such that when restarting this instance it will have the same serverId, with a different bootId. For example, in Kubernetes this might be the pod name. Length must be <= 256 characters.""" serverId:String """An identifier used to distinguish the version (from the user's perspective) of the edge server's code itself. For instance, the git sha of the server's repository or the docker sha of the associated image this server runs with. Length must be <= 256 characters.""" userVersion:String}"""Columns of EdgeServerInfos.""" enum EdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID SERVICE_ID TIMESTAMP USER_VERSION}type EdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID serviceId:ID userVersion:String}"""Filter for data in EdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input EdgeServerInfosFilter{and:[EdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:EdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:EdgeServerInfosFilter or:[EdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in EdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input EdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input EdgeServerInfosOrderBySpec{column:EdgeServerInfosColumn!direction:Ordering!}type EdgeServerInfosRecord{"""Dimensions of EdgeServerInfos that can be grouped by.""" groupBy:EdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}enum EmailCategory{EDUCATIONAL}type EmailPreferences{email:String!subscriptions:[EmailCategory!]!unsubscribedFromAll:Boolean!}interface Error{message:String!}"""Columns of ErrorStats.""" enum ErrorStatsColumn{ACCOUNT_ID CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type ErrorStatsDimensions{accountId:ID clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in ErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ErrorStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[ErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ErrorStatsFilterIn not:ErrorStatsFilter or:[ErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in ErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ErrorStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type ErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input ErrorStatsOrderBySpec{column:ErrorStatsColumn!direction:Ordering!}type ErrorStatsRecord{"""Dimensions of ErrorStats that can be grouped by.""" groupBy:ErrorStatsDimensions!"""Metrics of ErrorStats that can be aggregated over.""" metrics:ErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}""" Input parameters for run explorer operation event.""" enum EventEnum{CLICK_CHECK_LIST CLICK_GO_TO_GRAPH_SETTINGS RUN_EXPLORER_OPERATION}"""Excluded operation for a graph.""" type ExcludedOperation{"""Operation ID to exclude from schema check.""" ID:ID!}"""Option to filter by operation ID.""" input ExcludedOperationInput{"""Operation ID to exclude from schema check.""" ID:ID!}type FeatureIntros{devGraph:Boolean!federatedGraph:Boolean!freeConsumerSeats:Boolean!}"""Feature Intros Input Type""" input FeatureIntrosInput{devGraph:Boolean federatedGraph:Boolean freeConsumerSeats:Boolean}"""Subgraph. Federated graph variants that are managed by Apollo Studio are composed of subgraphs. +are not updated.""" errors:[SchemaCompositionError!]!"""ID that points to the results of this composition.""" graphCompositionID:ID!"""The implementing service that was responsible for triggering the validation""" proposedImplementingService:FederatedImplementingServicePartialSchema!"""List of subgraphs that are included in this composition.""" subgraphConfigs:[SubgraphConfig!]!"""Supergraph schema document generated by composition.""" supergraphSdl:GraphQLDocument """If created as part of a check workflow, the associated workflow task.""" workflowTask:CompositionCheckTask}type ContractPreview{result:ContractPreviewResult!upstreamLaunch:Launch!}type ContractPreviewErrors{errors:[String!]!failedAt:ContractVariantFailedStep!}union ContractPreviewResult=ContractPreviewErrors|ContractPreviewSuccess type ContractPreviewSuccess{apiDocument:String!coreDocument:String!fieldCount:Int!typeCount:Int!}enum ContractVariantFailedStep{ADD_DIRECTIVE_DEFINITIONS_IF_NOT_PRESENT DIRECTIVE_DEFINITION_LOCATION_AUGMENTING EMPTY_ENUM_MASKING EMPTY_INPUT_OBJECT_MASKING EMPTY_OBJECT_AND_INTERFACE_FIELD_MASKING EMPTY_OBJECT_AND_INTERFACE_MASKING EMPTY_UNION_MASKING INPUT_VALIDATION PARSING PARSING_TAG_DIRECTIVES PARTIAL_INTERFACE_MASKING SCHEMA_RETRIEVAL TAG_INHERITING TAG_MATCHING TO_API_SCHEMA TO_FILTER_SCHEMA UNKNOWN VERSION_CHECK}type ContractVariantPreviewErrors{errorMessages:[String!]!failedStep:ContractVariantFailedStep!}union ContractVariantPreviewResult=ContractVariantPreviewErrors|ContractVariantPreviewSuccess type ContractVariantPreviewSuccess{baseApiSchema:String!baseCoreSchema:String!contractApiSchema:String!contractCoreSchema:String!}type ContractVariantUpsertErrors{errorMessages:[String!]!}union ContractVariantUpsertResult=ContractVariantUpsertErrors|ContractVariantUpsertSuccess type ContractVariantUpsertSuccess{contractVariant:GraphVariant!}type CoreSchema{apiDocument:GraphQLDocument!coreDocument:GraphQLDocument!coreHash:String!fieldCount:Int!tags:[String!]!typeCount:Int!}union CreateOperationCollectionResult=OperationCollection|PermissionError|ValidationError type CronExecution{completedAt:Timestamp failure:String id:ID!job:CronJob!resolvedAt:Timestamp resolvedBy:Actor schedule:String!startedAt:Timestamp!}type CronJob{group:String!name:String!recentExecutions(n:Int):[CronExecution!]!}enum DatadogApiRegion{EU EU1 US US1 US1FED US3 US5}type DatadogMetricsConfig{apiKey:String!apiRegion:DatadogApiRegion!enabled:Boolean!legacyMetricNames:Boolean!}union DeleteOperationCollectionResult=DeleteOperationCollectionSuccess|PermissionError type DeleteOperationCollectionSuccess{sandboxOwner:User variants:[GraphVariant!]!}"""The result of attempting to delete a graph variant.""" type DeleteSchemaTagResult{"""WHether a variant was deleted or not.""" deleted:Boolean!}enum DeletionTargetType{ACCOUNT USER}"""Support for a single directive on a graph variant""" type DirectiveSupportStatus{"""whether the directive is supported on the current graph variant""" enabled:Boolean!"""name of the directive""" name:String!}union DuplicateOperationCollectionResult=OperationCollection|PermissionError|ValidationError type DurationHistogram{averageDurationMs:Float buckets:[DurationHistogramBucket!]!durationMs("""Percentile (between 0 and 1)""" percentile:Float!):Float """Counts per durationBucket, where sequences of zeroes are replaced with the negative of their size""" sparseBuckets:[Long!]!totalCount:Long!totalDurationMs:Float!}type DurationHistogramBucket{count:Long!index:Int!rangeBeginMs:Float!rangeEndMs:Float!}input EdgeServerInfo{"""A randomly generated UUID, immutable for the lifetime of the edge server runtime.""" bootId:String!"""A unique identifier for the executable GraphQL served by the edge server. length must be <= 64 characters.""" executableSchemaId:String!"""The graph variant, defaults to 'current'""" graphVariant:String!="current" """The version of the edge server reporting agent, e.g. apollo-server-2.8, graphql-java-3.1, etc. length must be <= 256 characters.""" libraryVersion:String """The infra environment in which this edge server is running, e.g. localhost, Kubernetes, AWS Lambda, Google CloudRun, AWS ECS, etc. length must be <= 256 characters.""" platform:String """The runtime in which the edge server is running, e.g. node 12.03, zulu8.46.0.19-ca-jdk8.0.252-macosx_x64, etc. length must be <= 256 characters.""" runtimeVersion:String """If available, an identifier for the edge server instance, such that when restarting this instance it will have the same serverId, with a different bootId. For example, in Kubernetes this might be the pod name. Length must be <= 256 characters.""" serverId:String """An identifier used to distinguish the version (from the user's perspective) of the edge server's code itself. For instance, the git sha of the server's repository or the docker sha of the associated image this server runs with. Length must be <= 256 characters.""" userVersion:String}"""Columns of EdgeServerInfos.""" enum EdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID SERVICE_ID TIMESTAMP USER_VERSION}type EdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID serviceId:ID userVersion:String}"""Filter for data in EdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input EdgeServerInfosFilter{and:[EdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:EdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:EdgeServerInfosFilter or:[EdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in EdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input EdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input EdgeServerInfosOrderBySpec{column:EdgeServerInfosColumn!direction:Ordering!}type EdgeServerInfosRecord{"""Dimensions of EdgeServerInfos that can be grouped by.""" groupBy:EdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}enum EmailCategory{EDUCATIONAL}type EmailPreferences{email:String!subscriptions:[EmailCategory!]!unsubscribedFromAll:Boolean!}interface Error{message:String!}"""Columns of ErrorStats.""" enum ErrorStatsColumn{ACCOUNT_ID CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type ErrorStatsDimensions{accountId:ID clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in ErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ErrorStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[ErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ErrorStatsFilterIn not:ErrorStatsFilter or:[ErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in ErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ErrorStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type ErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input ErrorStatsOrderBySpec{column:ErrorStatsColumn!direction:Ordering!}type ErrorStatsRecord{"""Dimensions of ErrorStats that can be grouped by.""" groupBy:ErrorStatsDimensions!"""Metrics of ErrorStats that can be aggregated over.""" metrics:ErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}""" Input parameters for run explorer operation event.""" enum EventEnum{CLICK_CHECK_LIST CLICK_GO_TO_GRAPH_SETTINGS RUN_EXPLORER_OPERATION}"""Excluded operation for a graph.""" type ExcludedOperation{"""Operation ID to exclude from schema check.""" ID:ID!}"""Option to filter by operation ID.""" input ExcludedOperationInput{"""Operation ID to exclude from schema check.""" ID:ID!}type FeatureIntros{devGraph:Boolean!federatedGraph:Boolean!freeConsumerSeats:Boolean!}"""Feature Intros Input Type""" input FeatureIntrosInput{devGraph:Boolean federatedGraph:Boolean freeConsumerSeats:Boolean}"""Subgraph. Federated graph variants that are managed by Apollo Studio are composed of subgraphs. See https://www.apollographql.com/docs/federation/managed-federation/overview/ for more information.""" type FederatedImplementingService{"""The subgraph schema actively published, used for composition for the graph variant this subgraph belongs to.""" activePartialSchema:PartialSchema!"""Timestamp of when this subgraph was created.""" createdAt:Timestamp!"""The ID of the graph this subgraph belongs to.""" graphID:String!"""Which variant of a graph this subgraph belongs to.""" graphVariant:String!"""Name of the subgraph.""" name:String!"""The particular version/edition of a subgraph, entered by users. Typically a Git SHA or docker image ID.""" revision:String!"""Timestamp for when this subgraph was updated.""" updatedAt:Timestamp!"""URL of the subgraph's GraphQL endpoint.""" url:String}"""A minimal representation of a federated implementing service, using only a name and partial schema SDL""" type FederatedImplementingServicePartialSchema{"""The name of the implementing service""" name:String!"""The partial schema of the implementing service""" sdl:String!}"""Container for a list of subgraphs composing a graph.""" type FederatedImplementingServices{"""The list of underlying subgraphs.""" services:[FederatedImplementingService!]!}"""Counts of changes at the field level, including objects, interfaces, and input fields.""" type FieldChangeSummaryCounts{"""Number of changes that are additions of fields to object, interface, and input types.""" additions:Int!"""Number of changes that are field edits. This includes fields changing type and any field deprecation and description changes, but also includes any argument changes and any input object -field changes.""" edits:Int!"""Number of changes that are removals of fields from object, interface, and input types.""" removals:Int!}"""Columns of FieldLatencies.""" enum FieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in FieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldLatenciesFilter{and:[FieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldLatenciesFilterIn not:FieldLatenciesFilter or:[FieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input FieldLatenciesOrderBySpec{column:FieldLatenciesColumn!direction:Ordering!}type FieldLatenciesRecord{"""Dimensions of FieldLatencies that can be grouped by.""" groupBy:FieldLatenciesDimensions!"""Metrics of FieldLatencies that can be aggregated over.""" metrics:FieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of FieldUsage.""" enum FieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in FieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldUsageFilter{and:[FieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldUsageFilterIn not:FieldUsageFilter or:[FieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input FieldUsageOrderBySpec{column:FieldUsageColumn!direction:Ordering!}type FieldUsageRecord{"""Dimensions of FieldUsage that can be grouped by.""" groupBy:FieldUsageDimensions!"""Metrics of FieldUsage that can be aggregated over.""" metrics:FieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type FilterBuildInput{filterConfig:FilterConfig!schemaHash:String!}type FilterConfig{exclude:[String!]!include:[String!]!}input FilterConfigInput{exclude:[String!]!include:[String!]!}type GitContext{branch:String commit:ID commitUrl:String committer:String message:String remoteHost:GitRemoteHost remoteUrl:String}"""This is stored with a schema when it is uploaded""" input GitContextInput{branch:String commit:ID committer:String message:String remoteUrl:String}enum GitRemoteHost{BITBUCKET GITHUB GITLAB}type GlobalExperimentalFeatures{operationsCollections:Boolean!sandboxesFullRelease:Boolean!sandboxesPreview:Boolean!sandboxesSchemaChecksPage:Boolean!sandboxesSchemaDiffPage:Boolean!subgraphsInSandbox:Boolean!}type GraphApiKey implements ApiKey{createdAt:Timestamp!createdBy:Identity id:ID!keyName:String role:UserPermission!token:String!}"""A union of all combinations that can comprise the implementingServices for a Service""" union GraphImplementors=FederatedImplementingServices|NonFederatedImplementingService scalar GraphQLDocument """A variant of a graph, often corresponding to an environment where a graph runs (e.g. staging). -See https://www.apollographql.com/docs/studio/org/graphs/ for more details.""" type GraphVariant{"""As new schema tags keep getting published, activeSchemaPublish refers to the latest.""" activeSchemaPublish:SchemaTag """The version of composition currently in use, if applicable""" compositionVersion:String """Filter configuration used to create the contract schema""" contractFilterConfig:FilterConfig """Preview a Contract schema built from this source variant.""" contractPreview(filters:FilterConfigInput!):ContractPreview!"""Explorer setting for default headers for a graph""" defaultHeaders:String derivedVariantCount:Int!"""Graph the variant belongs to.""" graph:Service!"""Graph ID of the variant. Prefer using graph { id } when feasible.""" graphId:String!"""If the variant has managed subgraphs.""" hasManagedSubgraphs:Boolean """Global identifier for the graph variant, in the form `graph@variant`.""" id:ID!"""Represents whether this variant is a Contract.""" isContract:Boolean!"""Is this variant one of the current user's favorite variants?""" isFavoriteOfCurrentUser:Boolean!"""If the variant has managed subgraphs.""" isFederated:Boolean@deprecated(reason:"Replaced by hasManagedSubgraphs")"""If the variant is protected""" isProtected:Boolean!isPublic:Boolean!"""Represents whether this variant should be listed in the public variants directory. This can only be true if the variant is also public.""" isPubliclyListed:Boolean!"""Represents whether Apollo has verified the authenticity of this public variant. This can only be true if the variant is also public.""" isVerified:Boolean!"""Latest approved launch for the variant, and what is served through Uplink.""" latestApprovedLaunch:Launch """Latest launch for the variant, whether successful or not.""" latestLaunch:Launch """Latest publication for the variant.""" latestPublication:SchemaTag launch(id:ID!):Launch launchHistory(limit:Int!=100):[Launch!]!links:[LinkInfo!]"""Name of the variant, like `variant`.""" name:String!operationCollections:[OperationCollection!]!"""Which permissions the current user has for interacting with this variant""" permissions:GraphVariantPermissions!"""Generate a federated operation plan for a given operation""" plan(document:GraphQLDocument!operationName:String):QueryPlan """Explorer setting for preflight script to run before the actual GraphQL operations is run.""" preflightScript:String readme:Readme """Registry stats for this particular graph variant""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow """The total number of requests for this variant in the last 24 hours""" requestsInLastDay:Long """If the graphql endpoint is set up to accept cookies.""" sendCookies:Boolean sourceVariant:GraphVariant """List of subgraphs that comprise a variant, null if not federated. -Set includeDeleted to see deleted subgraphs.""" subgraphs(includeDeleted:Boolean!=false):[FederatedImplementingService!]"""URL where subscription operations can be executed.""" subscriptionUrl:String """A list of supported directives""" supportedDirectives:[DirectiveSupportStatus!]"""URL where non-subscription operations can be executed.""" url:String """The last instant that usage information (e.g. operation stat, client stats) was reported for this variant""" usageLastReportedAt:Timestamp}"""Result of looking up a variant by ref""" union GraphVariantLookup=GraphVariant|InvalidRefFormat """Modifies a variant of a graph, also called a schema tag in parts of our product.""" type GraphVariantMutation{addLinkToVariant(title:String type:LinkInfoType!url:String!):GraphVariant!configureComposition(enableTagAndInaccessible:Boolean version:String):GraphVariant enableTagAndInaccessible(enabled:Boolean!):GraphVariant@deprecated(reason:"Use configureComposition instead")"""Graph ID of the variant""" graphId:String!"""Global identifier for the graph variant, in the form `graph@variant`.""" id:ID!"""Name of the variant, like `variant`.""" name:String!relaunch:RelaunchResult!removeLinkFromVariant(linkInfoId:ID!):GraphVariant!setIsFavoriteOfCurrentUser(favorite:Boolean!):GraphVariant!updateDefaultHeaders(defaultHeaders:String):GraphVariant updateIsProtected(isProtected:Boolean!):GraphVariant updatePreflightScript(preflightScript:String):GraphVariant updateSendCookies(sendCookies:Boolean!):GraphVariant updateSubscriptionURL(subscriptionUrl:String):GraphVariant updateURL(url:String):GraphVariant updateVariantIsPublic(isPublic:Boolean!):GraphVariant updateVariantIsPubliclyListed(isPubliclyListed:Boolean!):GraphVariant updateVariantIsVerified(isVerified:Boolean!):GraphVariant updateVariantReadme(readme:String!):GraphVariant}"""A map from permission String to boolean that the currently authenticated user is allowed for a particular graph variant.""" type GraphVariantPermissions{canCreateCollectionInVariant:Boolean!"""Whether the currently authenticated user is permitted to manage/update the build configuration (e.g. build pipeline version) for this variant.""" canManageBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to update variant-level settings for the Schema Explorer.""" canManageExplorerSettings:Boolean!"""Whether the currently authenticated user is permitted to publish schemas to this variant.""" canPushSchemas:Boolean!"""Whether the currently authenticated user is permitted to view details regarding the build configuration (e.g. build pipeline version) for this variant.""" canQueryBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to download schemas associated to this variant.""" canQuerySchemas:Boolean!canShareCollectionInVariant:Boolean!canUpdateVariantLinkInfo:Boolean!"""Whether the currently authenticated user is permitted to update the README for this variant.""" canUpdateVariantReadme:Boolean!variantId:ID!}enum HTTPMethod{CONNECT DELETE GET HEAD OPTIONS PATCH POST PUT TRACE UNKNOWN UNRECOGNIZED}input HistoricQueryParameters{"""A list of clients to filter out during validation.""" excludedClients:[ClientInfoFilter!]=null """A list of operation names to filter out during validation.""" excludedOperationNames:[OperationNameFilterInput!]=null from:Timestamp="-86400" """A list of operation IDs to filter out during validation.""" ignoredOperations:[ID!]=null """A list of variants to include in the validation. If no variants are provided +field changes.""" edits:Int!"""Number of changes that are removals of fields from object, interface, and input types.""" removals:Int!}"""Columns of FieldExecutions.""" enum FieldExecutionsColumn{ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldExecutionsDimensions{fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in FieldExecutions. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldExecutionsFilter{and:[FieldExecutionsFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldExecutionsFilterIn not:FieldExecutionsFilter or:[FieldExecutionsFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldExecutions. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldExecutionsFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldExecutionsMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input FieldExecutionsOrderBySpec{column:FieldExecutionsColumn!direction:Ordering!}type FieldExecutionsRecord{"""Dimensions of FieldExecutions that can be grouped by.""" groupBy:FieldExecutionsDimensions!"""Metrics of FieldExecutions that can be aggregated over.""" metrics:FieldExecutionsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of FieldLatencies.""" enum FieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in FieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldLatenciesFilter{and:[FieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldLatenciesFilterIn not:FieldLatenciesFilter or:[FieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input FieldLatenciesOrderBySpec{column:FieldLatenciesColumn!direction:Ordering!}type FieldLatenciesRecord{"""Dimensions of FieldLatencies that can be grouped by.""" groupBy:FieldLatenciesDimensions!"""Metrics of FieldLatencies that can be aggregated over.""" metrics:FieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of FieldRequestsByClientVersion.""" enum FieldRequestsByClientVersionColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldRequestsByClientVersionDimensions{clientName:String clientVersion:String fieldName:String parentType:String schemaTag:String serviceId:ID}"""Filter for data in FieldRequestsByClientVersion. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldRequestsByClientVersionFilter{and:[FieldRequestsByClientVersionFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldRequestsByClientVersionFilterIn not:FieldRequestsByClientVersionFilter or:[FieldRequestsByClientVersionFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldRequestsByClientVersion. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldRequestsByClientVersionFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldRequestsByClientVersionMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input FieldRequestsByClientVersionOrderBySpec{column:FieldRequestsByClientVersionColumn!direction:Ordering!}type FieldRequestsByClientVersionRecord{"""Dimensions of FieldRequestsByClientVersion that can be grouped by.""" groupBy:FieldRequestsByClientVersionDimensions!"""Metrics of FieldRequestsByClientVersion that can be aggregated over.""" metrics:FieldRequestsByClientVersionMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of FieldUsage.""" enum FieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP}type FieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in FieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input FieldUsageFilter{and:[FieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:FieldUsageFilterIn not:FieldUsageFilter or:[FieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in FieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input FieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type FieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input FieldUsageOrderBySpec{column:FieldUsageColumn!direction:Ordering!}type FieldUsageRecord{"""Dimensions of FieldUsage that can be grouped by.""" groupBy:FieldUsageDimensions!"""Metrics of FieldUsage that can be aggregated over.""" metrics:FieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type FilterBuildInput{filterConfig:FilterConfig!schemaHash:String!}type FilterConfig{exclude:[String!]!include:[String!]!}input FilterConfigInput{exclude:[String!]!include:[String!]!}type GitContext{branch:String commit:ID commitUrl:String committer:String message:String remoteHost:GitRemoteHost remoteUrl:String}"""This is stored with a schema when it is uploaded""" input GitContextInput{branch:String commit:ID committer:String message:String remoteUrl:String}enum GitRemoteHost{BITBUCKET GITHUB GITLAB}type GlobalExperimentalFeatures{operationsCollections:Boolean!sandboxesFullRelease:Boolean!sandboxesPreview:Boolean!sandboxesSchemaChecksPage:Boolean!sandboxesSchemaDiffPage:Boolean!subgraphsInSandbox:Boolean!}type GraphApiKey implements ApiKey{createdAt:Timestamp!createdBy:Identity id:ID!keyName:String role:UserPermission!token:String!}"""A union of all combinations that can comprise the implementingServices for a Service""" union GraphImplementors=FederatedImplementingServices|NonFederatedImplementingService scalar GraphQLDocument """A variant of a graph, often corresponding to an environment where a graph runs (e.g. staging). +See https://www.apollographql.com/docs/studio/org/graphs/ for more details.""" type GraphVariant{"""As new schema tags keep getting published, activeSchemaPublish refers to the latest.""" activeSchemaPublish:SchemaTag """The version of composition currently in use, if applicable""" compositionVersion:String """Filter configuration used to create the contract schema""" contractFilterConfig:FilterConfig """Preview a Contract schema built from this source variant.""" contractPreview(filters:FilterConfigInput!):ContractPreview!defaultHeaders:String@deprecated(reason:"Use sharedHeaders instead")derivedVariantCount:Int!"""Graph the variant belongs to.""" graph:Service!"""Graph ID of the variant. Prefer using graph { id } when feasible.""" graphId:String!"""If the variant has managed subgraphs.""" hasManagedSubgraphs:Boolean """Global identifier for the graph variant, in the form `graph@variant`.""" id:ID!"""Represents whether this variant is a Contract.""" isContract:Boolean!"""Is this variant one of the current user's favorite variants?""" isFavoriteOfCurrentUser:Boolean!"""If the variant has managed subgraphs.""" isFederated:Boolean@deprecated(reason:"Replaced by hasManagedSubgraphs")"""If the variant is protected""" isProtected:Boolean!isPublic:Boolean!"""Represents whether this variant should be listed in the public variants directory. This can only be true if the variant is also public.""" isPubliclyListed:Boolean!"""Represents whether Apollo has verified the authenticity of this public variant. This can only be true if the variant is also public.""" isVerified:Boolean!"""Latest approved launch for the variant, and what is served through Uplink.""" latestApprovedLaunch:Launch """Latest launch for the variant, whether successful or not.""" latestLaunch:Launch """Latest publication for the variant.""" latestPublication:SchemaTag launch(id:ID!):Launch launchHistory(limit:Int!=100):[Launch!]!links:[LinkInfo!]"""Name of the variant, like `variant`.""" name:String!operationCollections:[OperationCollection!]!"""Which permissions the current user has for interacting with this variant""" permissions:GraphVariantPermissions!"""Generate a federated operation plan for a given operation""" plan(document:GraphQLDocument!operationName:String):QueryPlan """Explorer setting for preflight script to run before the actual GraphQL operations is run.""" preflightScript:String readme:Readme """Registry stats for this particular graph variant""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow """The total number of requests for this variant in the last 24 hours""" requestsInLastDay:Long """If the graphql endpoint is set up to accept cookies.""" sendCookies:Boolean """Explorer setting for shared headers for a graph""" sharedHeaders:String sourceVariant:GraphVariant """Subgraph of a given name, null if non-existent.""" subgraph(name:ID!):FederatedImplementingService """List of subgraphs that comprise a variant, null if not federated. +Set includeDeleted to see deleted subgraphs.""" subgraphs(includeDeleted:Boolean!=false):[FederatedImplementingService!]"""URL where subscription operations can be executed.""" subscriptionUrl:String """A list of supported directives""" supportedDirectives:[DirectiveSupportStatus!]"""URL where non-subscription operations can be executed.""" url:String """The last instant that usage information (e.g. operation stat, client stats) was reported for this variant""" usageLastReportedAt:Timestamp}"""Result of looking up a variant by ref""" union GraphVariantLookup=GraphVariant|InvalidRefFormat """Modifies a variant of a graph, also called a schema tag in parts of our product.""" type GraphVariantMutation{addLinkToVariant(title:String type:LinkInfoType!url:String!):GraphVariant!configureComposition(enableTagAndInaccessible:Boolean version:String):GraphVariant """Delete the variant.""" delete:DeleteSchemaTagResult!enableTagAndInaccessible(enabled:Boolean!):GraphVariant@deprecated(reason:"Use configureComposition instead")"""Graph ID of the variant""" graphId:String!"""Global identifier for the graph variant, in the form `graph@variant`.""" id:ID!"""Name of the variant, like `variant`.""" name:String!relaunch:RelaunchResult!removeLinkFromVariant(linkInfoId:ID!):GraphVariant!setIsFavoriteOfCurrentUser(favorite:Boolean!):GraphVariant!updateDefaultHeaders(defaultHeaders:String):GraphVariant@deprecated(reason:"Use updateSharedHeaders instead")updateIsProtected(isProtected:Boolean!):GraphVariant updatePreflightScript(preflightScript:String):GraphVariant updateSendCookies(sendCookies:Boolean!):GraphVariant updateSharedHeaders(sharedHeaders:String):GraphVariant updateSubscriptionURL(subscriptionUrl:String):GraphVariant updateURL(url:String):GraphVariant updateVariantIsPublic(isPublic:Boolean!):GraphVariant updateVariantIsPubliclyListed(isPubliclyListed:Boolean!):GraphVariant updateVariantIsVerified(isVerified:Boolean!):GraphVariant updateVariantReadme(readme:String!):GraphVariant}"""A map from permission String to boolean that the currently authenticated user is allowed for a particular graph variant.""" type GraphVariantPermissions{canCreateCollectionInVariant:Boolean!"""Whether the currently authenticated user is permitted to manage/update the build configuration (e.g. build pipeline version) for this variant.""" canManageBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to update variant-level settings for the Schema Explorer.""" canManageExplorerSettings:Boolean!"""Whether the currently authenticated user is permitted to publish schemas to this variant.""" canPushSchemas:Boolean!"""Whether the currently authenticated user is permitted to view details regarding the build configuration (e.g. build pipeline version) for this variant.""" canQueryBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to download schemas associated to this variant.""" canQuerySchemas:Boolean!canShareCollectionInVariant:Boolean!canUpdateVariantLinkInfo:Boolean!"""Whether the currently authenticated user is permitted to update the README for this variant.""" canUpdateVariantReadme:Boolean!variantId:ID!}enum HTTPMethod{CONNECT DELETE GET HEAD OPTIONS PATCH POST PUT TRACE UNKNOWN UNRECOGNIZED}input HistoricQueryParameters{"""A list of clients to filter out during validation.""" excludedClients:[ClientInfoFilter!]=null """A list of operation names to filter out during validation.""" excludedOperationNames:[OperationNameFilterInput!]=null from:Timestamp="-86400" """A list of operation IDs to filter out during validation.""" ignoredOperations:[ID!]=null """A list of variants to include in the validation. If no variants are provided then this defaults to the "current" variant along with the base variant. The base variant indicates the schema that generates diff and marks the metrics that are checked for broken queries. We union this base variant with the untagged values('', @@ -54,10 +54,10 @@ same as null inside of `in`, and 'current') in this metrics fetch. This strategy supports users who have not tagged their metrics or schema.""" includedVariants:[String!]=null """Minimum number of requests within the window for a query to be considered.""" queryCountThreshold:Int=1 """Number of requests within the window for a query to be considered, relative to total request count. Expected values are between 0 and 0.05 (minimum 5% of total request volume)""" queryCountThresholdPercentage:Float=0 to:Timestamp="-0"}"""An identity (e.g. Anonymous, a specific User) within Apollo Studio. See implementations.""" interface Identity{"""A view of the identity as an Actor type.""" asActor:Actor!"""An identifier for a given identity, unique within the context of the identity type.""" id:ID!"""A human-readable name for the identity in question.""" name:String!}"""An actor's identity and info about the client they used to perform the action""" type IdentityAndClientInfo{"""Client name provided when the actor performed the action""" clientName:String """Client version provided when the actor performed the action""" clientVersion:String """Identity info about the actor""" identity:Identity}union IdentityMutation=ServiceMutation|UserMutation type IgnoreOperationsInChecksResult{graph:Service!}"""The location of the implementing service config file in storage""" type ImplementingServiceLocation{"""The name of the implementing service""" name:String!"""The path in storage to access the implementing service config file""" path:String!}type InternalAdminUser{role:InternalMdgAdminRole!userID:String!}type InternalIdentity implements Identity{accounts:[Account!]!asActor:Actor!email:String id:ID!name:String!}enum InternalMdgAdminRole{INTERNAL_MDG_READ_ONLY INTERNAL_MDG_SALES INTERNAL_MDG_SUPER_ADMIN INTERNAL_MDG_SUPPORT}type IntrospectionDirective{args:[IntrospectionInputValue!]!description:String locations:[IntrospectionDirectiveLocation!]!name:String!}input IntrospectionDirectiveInput{args:[IntrospectionInputValueInput!]!description:String isRepeatable:Boolean locations:[IntrospectionDirectiveLocation!]!name:String!}"""__DirectiveLocation introspection type""" enum IntrospectionDirectiveLocation{"""Location adjacent to an argument definition.""" ARGUMENT_DEFINITION """Location adjacent to an enum definition.""" ENUM """Location adjacent to an enum value definition.""" ENUM_VALUE """Location adjacent to a field.""" FIELD """Location adjacent to a field definition.""" FIELD_DEFINITION """Location adjacent to a fragment definition.""" FRAGMENT_DEFINITION """Location adjacent to a fragment spread.""" FRAGMENT_SPREAD """Location adjacent to an inline fragment.""" INLINE_FRAGMENT """Location adjacent to an input object field definition.""" INPUT_FIELD_DEFINITION """Location adjacent to an input object type definition.""" INPUT_OBJECT """Location adjacent to an interface definition.""" INTERFACE """Location adjacent to a mutation operation.""" MUTATION """Location adjacent to an object type definition.""" OBJECT """Location adjacent to a query operation.""" QUERY """Location adjacent to a scalar definition.""" SCALAR """Location adjacent to a schema definition.""" SCHEMA """Location adjacent to a subscription operation.""" SUBSCRIPTION """Location adjacent to a union definition.""" UNION """Location adjacent to a variable definition.""" VARIABLE_DEFINITION}"""Values associated with introspection result for an enum value""" type IntrospectionEnumValue{depreactionReason:String@deprecated(reason:"Use deprecationReason instead")deprecationReason:String description:String isDeprecated:Boolean!name:String!}"""__EnumValue introspection type""" input IntrospectionEnumValueInput{deprecationReason:String description:String isDeprecated:Boolean!name:String!}"""Values associated with introspection result for field""" type IntrospectionField{args:[IntrospectionInputValue!]!deprecationReason:String description:String isDeprecated:Boolean!name:String!type:IntrospectionType!}"""__Field introspection type""" input IntrospectionFieldInput{args:[IntrospectionInputValueInput!]!deprecationReason:String description:String isDeprecated:Boolean!name:String!type:IntrospectionTypeInput!}"""Values associated with introspection result for an input field""" type IntrospectionInputValue{defaultValue:String description:String name:String!type:IntrospectionType!}"""__Value introspection type""" input IntrospectionInputValueInput{defaultValue:String deprecationReason:String description:String isDeprecated:Boolean name:String!type:IntrospectionTypeInput!}type IntrospectionSchema{directives:[IntrospectionDirective!]!mutationType:IntrospectionType queryType:IntrospectionType!subscriptionType:IntrospectionType types(filter:TypeFilterConfig={includeAbstractTypes:true includeBuiltInTypes:true includeIntrospectionTypes:true}):[IntrospectionType!]!}"""__Schema introspection type""" input IntrospectionSchemaInput{description:String directives:[IntrospectionDirectiveInput!]!mutationType:IntrospectionTypeRefInput queryType:IntrospectionTypeRefInput!subscriptionType:IntrospectionTypeRefInput types:[IntrospectionTypeInput!]}"""Object containing all possible values for an introspectionType""" type IntrospectionType{"""the base kind of the type this references, ignoring lists and nullability""" baseKind:IntrospectionTypeKind description:String enumValues(includeDeprecated:Boolean=false):[IntrospectionEnumValue!]fields:[IntrospectionField!]inputFields:[IntrospectionInputValue!]interfaces:[IntrospectionType!]kind:IntrospectionTypeKind name:String ofType:IntrospectionType possibleTypes:[IntrospectionType!]"""printed representation of type, including nested nullability and list ofTypes""" printed:String!}"""__Type introspection type""" input IntrospectionTypeInput{description:String enumValues:[IntrospectionEnumValueInput!]fields:[IntrospectionFieldInput!]inputFields:[IntrospectionInputValueInput!]interfaces:[IntrospectionTypeInput!]kind:IntrospectionTypeKind!name:String ofType:IntrospectionTypeInput possibleTypes:[IntrospectionTypeInput!]specifiedByUrl:String}enum IntrospectionTypeKind{"""Indicates this type is an enum. 'enumValues' is a valid field.""" ENUM """Indicates this type is an input object. 'inputFields' is a valid field.""" INPUT_OBJECT """Indicates this type is an interface. 'fields' and 'possibleTypes' are valid -fields""" INTERFACE """Indicates this type is a list. 'ofType' is a valid field.""" LIST """Indicates this type is a non-null. 'ofType' is a valid field.""" NON_NULL """Indicates this type is an object. 'fields' and 'interfaces' are valid fields.""" OBJECT """Indicates this type is a scalar.""" SCALAR """Indicates this type is a union. 'possibleTypes' is a valid field.""" UNION}"""Shallow __Type introspection type""" input IntrospectionTypeRefInput{kind:String name:String!}type InvalidOperation{errors:[OperationValidationError!]signature:ID!}"""Type returned by reference lookup when the reference was invalid""" type InvalidRefFormat implements Error{message:String!}type InvalidTarget implements Error{message:String!}type Invoice{closedAt:Timestamp collectionMethod:String createdAt:Timestamp!invoiceNumber:Int!state:InvoiceState!totalInCents:Int!updatedAt:Timestamp!uuid:ID!}enum InvoiceState{COLLECTED FAILED OPEN PAST_DUE UNKNOWN}enum InvoiceStateV2{COLLECTED FAILED OPEN PAST_DUE UNKNOWN}type InvoiceV2{closedAt:Timestamp collectionMethod:String createdAt:Timestamp!invoiceNumber:Int!state:InvoiceStateV2!totalInCents:Int!updatedAt:Timestamp!uuid:ID!}type Launch{approvedAt:Timestamp build:Build buildInput:BuildInput!completedAt:Timestamp createdAt:Timestamp!downstreamLaunches:[Launch!]!graphId:String!graphVariant:String!id:ID!isAvailable:Boolean isCompleted:Boolean isPublished:Boolean isTarget:Boolean latestSequenceStep:LaunchSequenceStep publication:SchemaTag results:[LaunchResult!]!schemaTag:SchemaTag sequence:[LaunchSequenceStep!]!shortenedID:String!status:LaunchStatus!subgraphChanges:[SubgraphChange!]supersededAt:Timestamp supersededBy:Launch upstreamLaunch:Launch}"""more result types will be supported in the future""" union LaunchResult=ChangelogLaunchResult type LaunchSequenceBuildStep{completedAt:Timestamp startedAt:Timestamp}type LaunchSequenceCheckStep{completedAt:Timestamp startedAt:Timestamp}type LaunchSequenceCompletedStep{completedAt:Timestamp}type LaunchSequenceInitiatedStep{startedAt:Timestamp}type LaunchSequencePublishStep{completedAt:Timestamp startedAt:Timestamp}union LaunchSequenceStep=LaunchSequenceBuildStep|LaunchSequenceCheckStep|LaunchSequenceCompletedStep|LaunchSequenceInitiatedStep|LaunchSequencePublishStep|LaunchSequenceSupersededStep type LaunchSequenceSupersededStep{completedAt:Timestamp}enum LaunchStatus{LAUNCH_COMPLETED LAUNCH_FAILED LAUNCH_INITIATED}type LinkInfo{createdAt:Timestamp!id:ID!title:String type:LinkInfoType!url:String!}enum LinkInfoType{DEVELOPER_PORTAL OTHER REPOSITORY}"""Long type""" scalar Long type MarkChangesForOperationAsSafeResult{"""Nice to have for the frontend since the Apollo cache is already watching for AffectedQuery to update. +fields""" INTERFACE """Indicates this type is a list. 'ofType' is a valid field.""" LIST """Indicates this type is a non-null. 'ofType' is a valid field.""" NON_NULL """Indicates this type is an object. 'fields' and 'interfaces' are valid fields.""" OBJECT """Indicates this type is a scalar.""" SCALAR """Indicates this type is a union. 'possibleTypes' is a valid field.""" UNION}"""Shallow __Type introspection type""" input IntrospectionTypeRefInput{kind:String name:String!}type InvalidOperation{errors:[OperationValidationError!]signature:ID!}"""Type returned by reference lookup when the reference was invalid""" type InvalidRefFormat implements Error{message:String!}type InvalidTarget implements Error{message:String!}type Invoice{closedAt:Timestamp collectionMethod:String createdAt:Timestamp!invoiceNumber:Int!state:InvoiceState!totalInCents:Int!updatedAt:Timestamp!uuid:ID!}enum InvoiceState{COLLECTED FAILED OPEN PAST_DUE UNKNOWN}enum InvoiceStateV2{COLLECTED FAILED OPEN PAST_DUE UNKNOWN}type InvoiceV2{closedAt:Timestamp collectionMethod:String createdAt:Timestamp!invoiceNumber:Int!state:InvoiceStateV2!totalInCents:Int!updatedAt:Timestamp!uuid:ID!}"""A Launch represents the complete process of making a set of updates to your deployed graph.""" type Launch{"""The time at which this launch was approved.""" approvedAt:Timestamp """The build for the variant being launched. Is non-null once the build is initiated.""" build:Build """Set of items that will be passed to the build.""" buildInput:BuildInput!"""The time at which this launch completed.""" completedAt:Timestamp """The time at which this launch initiated.""" createdAt:Timestamp!"""Contract launches that were triggered by this launch.""" downstreamLaunches:[Launch!]!"""The ID of the graph that this launch was initiated for.""" graphId:String!"""The name of the variant that this launch was initiated for.""" graphVariant:String!"""Unique identifier for this launch.""" id:ID!isAvailable:Boolean """Whether the launch completed.""" isCompleted:Boolean """Whether the launch was published.""" isPublished:Boolean isTarget:Boolean """Returns the most recent launch sequence step.""" latestSequenceStep:LaunchSequenceStep """A specific publication of a graph variant pertaining to this launch.""" publication:SchemaTag """The outcome of the launch.""" results:[LaunchResult!]!schemaTag:SchemaTag """This represents a sequence in the Launch. Returns a list of sequence steps that represents points of time in the launch.""" sequence:[LaunchSequenceStep!]!"""A shortened version of Launch.id. Contains the first 8 characters of the ID.""" shortenedID:String!"""The status of the launch.""" status:LaunchStatus!"""Changes that were made to the subgraphs for this launch.""" subgraphChanges:[SubgraphChange!]"""The time at which this launch was superseded by another launch.""" supersededAt:Timestamp """Represents the launch that caused this launch to not continue/publish.""" supersededBy:Launch """Upstream launch represents the launch of the source variant.""" upstreamLaunch:Launch}"""more result types will be supported in the future""" union LaunchResult=ChangelogLaunchResult type LaunchSequenceBuildStep{completedAt:Timestamp startedAt:Timestamp}type LaunchSequenceCheckStep{completedAt:Timestamp startedAt:Timestamp}type LaunchSequenceCompletedStep{completedAt:Timestamp}type LaunchSequenceInitiatedStep{startedAt:Timestamp}type LaunchSequencePublishStep{completedAt:Timestamp startedAt:Timestamp}union LaunchSequenceStep=LaunchSequenceBuildStep|LaunchSequenceCheckStep|LaunchSequenceCompletedStep|LaunchSequenceInitiatedStep|LaunchSequencePublishStep|LaunchSequenceSupersededStep type LaunchSequenceSupersededStep{completedAt:Timestamp}enum LaunchStatus{LAUNCH_COMPLETED LAUNCH_FAILED LAUNCH_INITIATED}type LinkInfo{createdAt:Timestamp!id:ID!title:String type:LinkInfoType!url:String!}enum LinkInfoType{DEVELOPER_PORTAL OTHER REPOSITORY}"""Long type""" scalar Long type MarkChangesForOperationAsSafeResult{"""Nice to have for the frontend since the Apollo cache is already watching for AffectedQuery to update. This might return null if no behavior changes were found for the affected operation ID. This is a weird situation that should never happen.""" affectedOperation:AffectedQuery message:String!success:Boolean!}type MediaUploadInfo{csrfToken:String!maxContentLength:Int!url:String!}union MoveOperationCollectionEntryResult=InvalidTarget|MoveOperationCollectionEntrySuccess|PermissionError type MoveOperationCollectionEntrySuccess{operation:OperationCollectionEntry!originCollection:OperationCollection!targetCollection:OperationCollection!}type Mutation{account(id:ID!):AccountMutation """Creates an operation collection for the given variantRefs, or make a sandbox collection without variantRefs.""" createOperationCollection(description:String editRoles:[UserPermission!]isSandbox:Boolean!isShared:Boolean!name:String!variantRefs:[ID!]):CreateOperationCollectionResult!"""Finalize a password reset with a token included in the E-mail link, -returns the corresponding login email when successful""" finalizePasswordReset(newPassword:String!resetToken:String!):String """Mutation a graph.""" graph(id:ID!):ServiceMutation """Join an account with a token""" joinAccount(accountId:ID!joinToken:String!):Account me:IdentityMutation newAccount(companyUrl:String id:ID!):Account newService(accountId:ID!description:String hiddenFromUninvitedNonAdminAccountMembers:Boolean!=false id:ID!isDev:Boolean!=false name:String title:String):Service operationCollection(id:ID!):OperationCollectionMutation """Refresh all plans from third-party billing service""" plansRefreshBilling:Void """Report a running GraphQL server's schema.""" reportSchema("""Only sent if previously requested i.e. received ReportSchemaResult with withCoreSchema = true. This is a GraphQL schema document as a string. Note that for a GraphQL server with a core schema, this should be the core schema, not the API schema.""" coreSchema:String """Information about server and its schema.""" report:SchemaReport!):ReportSchemaResult """Ask for a user's password to be reset by E-mail""" resetPassword(email:String!):Void resolveAllInternalCronExecutions(group:String name:String):Void resolveInternalCronExecution(id:ID!):CronExecution service(id:ID!):ServiceMutation """Set the subscriptions for a given email""" setSubscriptions(email:String!subscriptions:[EmailCategory!]!token:String!):EmailPreferences """Set the studio settings for the current user""" setUserSettings(newSettings:UserSettingsInput):UserSettings signUp(email:String!fullName:String!password:String!referrer:String trackingGoogleClientId:String trackingMarketoClientId:String userSegment:UserSegment utmCampaign:String utmMedium:String utmSource:String):User """This is called by the form shown to users after they delete their user or organization account.""" submitPostDeletionFeedback(feedback:String!targetIdentifier:ID!targetType:DeletionTargetType!):Void """Mutation for basic engagement tracking in studio""" track(event:EventEnum!graphID:String!graphVariant:String!="current"):Void """Rover session tracking. Reserved to https://rover.apollo.dev/telemetry (https://github.com/apollographql/orbiter).""" trackRoverSession(anonymousId:ID!arguments:[RoverArgumentInput!]!ci:String command:String!cwdHash:SHA256!os:String!remoteUrlHash:SHA256!sessionId:ID!version:String!):Void """Unsubscribe a given email from all emails""" unsubscribeFromAll(email:String!token:String!):EmailPreferences user(id:ID!):UserMutation}type NamedIntrospectionArg{description:String name:String}type NamedIntrospectionArgNoDescription{name:String}"""The shared fields for a named introspection type. Currently this is returned for the +returns the corresponding login email when successful""" finalizePasswordReset(newPassword:String!resetToken:String!):String """Mutation a graph.""" graph(id:ID!):ServiceMutation """Join an account with a token""" joinAccount(accountId:ID!joinToken:String!):Account me:IdentityMutation newAccount(companyUrl:String id:ID!):Account newService(accountId:ID!description:String hiddenFromUninvitedNonAdminAccountMembers:Boolean!=false id:ID!isDev:Boolean!=false name:String onboardingArchitecture:OnboardingArchitecture title:String):Service operationCollection(id:ID!):OperationCollectionMutation """Report a running GraphQL server's schema.""" reportSchema("""Only sent if previously requested i.e. received ReportSchemaResult with withCoreSchema = true. This is a GraphQL schema document as a string. Note that for a GraphQL server with a core schema, this should be the core schema, not the API schema.""" coreSchema:String """Information about server and its schema.""" report:SchemaReport!):ReportSchemaResult """Ask for a user's password to be reset by E-mail""" resetPassword(email:String!):Void resolveAllInternalCronExecutions(group:String name:String):Void resolveInternalCronExecution(id:ID!):CronExecution service(id:ID!):ServiceMutation """Set the subscriptions for a given email""" setSubscriptions(email:String!subscriptions:[EmailCategory!]!token:String!):EmailPreferences """Set the studio settings for the current user""" setUserSettings(newSettings:UserSettingsInput):UserSettings signUp(email:String!fullName:String!password:String!referrer:String trackingGoogleClientId:String trackingMarketoClientId:String userSegment:UserSegment utmCampaign:String utmMedium:String utmSource:String):User """This is called by the form shown to users after they delete their user or organization account.""" submitPostDeletionFeedback(feedback:String!targetIdentifier:ID!targetType:DeletionTargetType!):Void """Mutation for basic engagement tracking in studio""" track(event:EventEnum!graphID:String!graphVariant:String!="current"):Void """Rover session tracking. Reserved to https://rover.apollo.dev/telemetry (https://github.com/apollographql/orbiter).""" trackRoverSession(anonymousId:ID!arguments:[RoverArgumentInput!]!ci:String command:String!cwdHash:SHA256!os:String!remoteUrlHash:SHA256 sessionId:ID!version:String!):Void """Unsubscribe a given email from all emails""" unsubscribeFromAll(email:String!token:String!):EmailPreferences user(id:ID!):UserMutation}type NamedIntrospectionArg{description:String name:String}type NamedIntrospectionArgNoDescription{name:String}"""The shared fields for a named introspection type. Currently this is returned for the top level value affected by a change. In the future, we may update this type to be an interface, which is extended by the more specific types: scalar, object, input object, union, interface, and enum @@ -68,8 +68,8 @@ as input fields, objects in interfaces, enum values. In the future, this value could become an interface to allow fields specific to the types returned.""" type NamedIntrospectionValue{description:String name:String printedType:String}type NamedIntrospectionValueNoDescription{name:String printedType:String}"""A non-federated service for a monolithic graph.""" type NonFederatedImplementingService{"""Timestamp of when this implementing service was created.""" createdAt:Timestamp!"""Identifies which graph this non-implementing service belongs to. Formerly known as "service_id".""" graphID:String!"""Specifies which variant of a graph this implementing service belongs to". -Formerly known as "tag".""" graphVariant:String!}type NotFoundError implements Error{message:String!}"""Arbitrary JSON object""" scalar Object type OdysseyCertification{certificationId:String!earnedAt:Timestamp!id:ID!owner:OdysseyCertificationOwner}type OdysseyCertificationOwner{fullName:String!id:ID!}type OdysseyCourse{completedAt:Timestamp enrolledAt:Timestamp id:ID!}input OdysseyCourseInput{completedAt:Timestamp courseId:String!}type OdysseyTask{completedAt:Timestamp id:ID!value:String}input OdysseyTaskInput{completedAt:Timestamp taskId:String!value:String}type Operation{id:ID!name:String signature:String truncated:Boolean!}type OperationAcceptedChange{acceptedAt:Timestamp!acceptedBy:Identity!change:StoredApprovedChange!checkID:ID!graphID:ID!id:ID!operationID:String!}"""Columns of OperationCheckStats.""" enum OperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_REQUESTS_COUNT}type OperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String serviceId:ID}"""Filter for data in OperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input OperationCheckStatsFilter{and:[OperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:OperationCheckStatsFilterIn not:OperationCheckStatsFilter or:[OperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in OperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input OperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type OperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input OperationCheckStatsOrderBySpec{column:OperationCheckStatsColumn!direction:Ordering!}type OperationCheckStatsRecord{"""Dimensions of OperationCheckStats that can be grouped by.""" groupBy:OperationCheckStatsDimensions!"""Metrics of OperationCheckStats that can be aggregated over.""" metrics:OperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type OperationCollection{createdAt:Timestamp!createdBy:Identity description:String """If a user has any of these roles, they will be able to edit this -collection. This will be null if and only if \`isShared\` is false""" editRoles:[UserPermission!]id:ID!isFavorite:Boolean!isSandbox:Boolean!isShared:Boolean!lastUpdatedAt:Timestamp!lastUpdatedBy:Identity name:String!operation(id:ID!):OperationCollectionEntryResult operations:[OperationCollectionEntry!]!""" Permissions the current user has for this collection""" permissions:OperationCollectionPermissions!variants:[GraphVariant!]!}type OperationCollectionEntry{collection:OperationCollection!createdAt:Timestamp!createdBy:Identity currentOperationRevision:OperationCollectionEntryState!id:ID!lastUpdatedAt:Timestamp!lastUpdatedBy:Identity name:String!orderingIndex:String!}type OperationCollectionEntryMutation{moveToCollection(collectionId:ID!lowerOrderingBound:String upperOrderingBound:String):MoveOperationCollectionEntryResult!reorderEntry(lowerOrderingBound:String upperOrderingBound:String):UpdateOperationCollectionResult updateName(name:String!):UpdateOperationCollectionEntryResult updateValues(operationInput:OperationCollectionEntryStateInput!):UpdateOperationCollectionEntryResult}union OperationCollectionEntryMutationResult=NotFoundError|OperationCollectionEntryMutation|PermissionError union OperationCollectionEntryResult=NotFoundError|OperationCollectionEntry type OperationCollectionEntryState{body:String!createdAt:Timestamp!createdBy:Identity headers:[OperationHeader!]variables:String}input OperationCollectionEntryStateInput{body:String!headers:[OperationHeaderInput!]""" I'm assuming this is non null""" variables:String}type OperationCollectionMutation{addOperation(name:String!operationInput:OperationCollectionEntryStateInput!):AddOperationCollectionEntryResult addToVariant(variantRef:ID!):AddOperationCollectionToVariantResult!@deprecated(reason:"Will throw NotImplemented")delete:DeleteOperationCollectionResult deleteOperation(id:ID!):RemoveOperationCollectionEntryResult duplicateCollection(description:String isSandbox:Boolean!isShared:Boolean!name:String!variantRef:ID):DuplicateOperationCollectionResult!operation(id:ID!):OperationCollectionEntryMutationResult removeFromVariant(variantRef:ID!):RemoveOperationCollectionFromVariantResult!@deprecated(reason:"Will throw NotImplemented")updateDescription(description:String):UpdateOperationCollectionResult updateEditRoles(editRoles:[UserPermission!]!):UpdateOperationCollectionResult updateIsFavorite(isFavorite:Boolean!):UpdateOperationCollectionResult updateIsShared(isShared:Boolean!):UpdateOperationCollectionResult updateName(name:String!):UpdateOperationCollectionResult}type OperationCollectionPermissions{canEditOperations:Boolean!canManage:Boolean!canReadOperations:Boolean!}union OperationCollectionResult=NotFoundError|OperationCollection|PermissionError type OperationDocument{"""Operation document body""" body:String!"""Operation name""" name:String}input OperationDocumentInput{"""Operation document body""" body:String!"""Operation name""" name:String}type OperationHeader{name:String!value:String!}input OperationHeaderInput{name:String!value:String!}"""Operation name filter configuration for a graph.""" type OperationNameFilter{"""name of the operation by the user and reported alongside metrics""" name:String!}"""Options to filter by operation name.""" input OperationNameFilterInput{"""name of the operation set by the user and reported alongside metrics""" name:String!}type OperationValidationError{message:String!}type OperationsCheckResult{"""Operations affected by all changes in diff""" affectedQueries:[AffectedQuery!]"""Summary/counts for all changes in diff""" changeSummary:ChangeSummary!"""List of schema changes with associated affected clients and operations""" changes:[Change!]!"""Indication of the success of the change, either failure, warning, or notice.""" checkSeverity:ChangeSeverity!"""The variant that was used as a base to check against""" checkedVariant:GraphVariant!createdAt:Timestamp!id:ID!"""Number of affected query operations that are neither marked as SAFE or IGNORED""" numberOfAffectedOperations:Int!"""Number of operations that were validated during schema diff""" numberOfCheckedOperations:Int!workflowTask:OperationsCheckTask!}type OperationsCheckTask implements CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!"""The result of the check.""" result:OperationsCheckResult status:CheckWorkflowTaskStatus!workflow:CheckWorkflow!}enum Ordering{ASCENDING DESCENDING}"""A reusable invite link for an organization.""" type OrganizationInviteLink{createdAt:Timestamp!"""A joinToken that can be passed to Mutation.joinAccount to join the organization.""" joinToken:String!"""The role that the user will receive if they join the organization with this link.""" role:UserPermission!}type OrganizationSSO{defaultRole:UserPermission!idpid:ID!provider:OrganizationSSOProvider!}enum OrganizationSSOProvider{PINGONE}"""PagerDuty notification channel""" type PagerDutyChannel implements Channel{id:ID!name:String!routingKey:String!subscriptions:[ChannelSubscription!]!}"""PagerDuty notification channel parameters""" input PagerDutyChannelInput{name:String routingKey:String!}"""Schema for a subgraph with associated metadata""" type PartialSchema{"""Timestamp for when the partial schema was created""" createdAt:Timestamp!"""If this sdl is currently actively composed in the gateway, this is true""" isLive:Boolean!"""The GraphQL document for a subgraph schema.""" sdl:String!"""The path of deep storage to find the raw enriched partial schema file""" sdlPath:String!}"""Input for registering a partial schema to an implementing service. +Formerly known as "tag".""" graphVariant:String!}type NotFoundError implements Error{message:String!}"""Arbitrary JSON object""" scalar Object type OdysseyAttempt{completedAt:Timestamp id:ID!responses:[OdysseyResponse!]!startedAt:Timestamp!testId:String!}type OdysseyCertification{certificationId:String!earnedAt:Timestamp!id:ID!owner:OdysseyCertificationOwner}type OdysseyCertificationOwner{fullName:String!id:ID!}type OdysseyCourse{completedAt:Timestamp enrolledAt:Timestamp id:ID!}input OdysseyCourseInput{completedAt:Timestamp courseId:String!}type OdysseyResponse{correct:Boolean!id:ID!questionId:String!values:[OdysseyValue!]!}input OdysseyResponseInput{attemptId:ID!correct:Boolean!questionId:String!values:[String!]!}type OdysseyTask{completedAt:Timestamp id:ID!value:String}input OdysseyTaskInput{completedAt:Timestamp taskId:String!value:String}type OdysseyValue{id:ID!value:String!}enum OnboardingArchitecture{MONOLITH SUPERGRAPH}type Operation{id:ID!name:String signature:String truncated:Boolean!}type OperationAcceptedChange{acceptedAt:Timestamp!acceptedBy:Identity!change:StoredApprovedChange!checkID:ID!graphID:ID!id:ID!operationID:String!}"""Columns of OperationCheckStats.""" enum OperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_REQUESTS_COUNT}type OperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String serviceId:ID}"""Filter for data in OperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input OperationCheckStatsFilter{and:[OperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:OperationCheckStatsFilterIn not:OperationCheckStatsFilter or:[OperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in OperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input OperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type OperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input OperationCheckStatsOrderBySpec{column:OperationCheckStatsColumn!direction:Ordering!}type OperationCheckStatsRecord{"""Dimensions of OperationCheckStats that can be grouped by.""" groupBy:OperationCheckStatsDimensions!"""Metrics of OperationCheckStats that can be aggregated over.""" metrics:OperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type OperationCollection{createdAt:Timestamp!createdBy:Identity description:String """If a user has any of these roles, they will be able to edit this +collection. This will be null if and only if \`isShared\` is false""" editRoles:[UserPermission!]@deprecated(reason:"deprecated in favour of minEditRole")id:ID!isFavorite:Boolean!isSandbox:Boolean!isShared:Boolean!lastUpdatedAt:Timestamp!lastUpdatedBy:Identity minEditRole:UserPermission name:String!operation(id:ID!):OperationCollectionEntryResult operations:[OperationCollectionEntry!]!""" Permissions the current user has for this collection""" permissions:OperationCollectionPermissions!variants:[GraphVariant!]!}type OperationCollectionEntry{collection:OperationCollection!createdAt:Timestamp!createdBy:Identity currentOperationRevision:OperationCollectionEntryState!id:ID!lastUpdatedAt:Timestamp!lastUpdatedBy:Identity name:String!orderingIndex:String!}type OperationCollectionEntryMutation{moveToCollection(collectionId:ID!lowerOrderingBound:String upperOrderingBound:String):MoveOperationCollectionEntryResult!reorderEntry(lowerOrderingBound:String upperOrderingBound:String):UpdateOperationCollectionResult updateName(name:String!):UpdateOperationCollectionEntryResult updateValues(operationInput:OperationCollectionEntryStateInput!):UpdateOperationCollectionEntryResult}union OperationCollectionEntryMutationResult=NotFoundError|OperationCollectionEntryMutation|PermissionError union OperationCollectionEntryResult=NotFoundError|OperationCollectionEntry type OperationCollectionEntryState{body:String!createdAt:Timestamp!createdBy:Identity headers:[OperationHeader!]variables:String}input OperationCollectionEntryStateInput{body:String!headers:[OperationHeaderInput!]""" I'm assuming this is non null""" variables:String}type OperationCollectionMutation{addOperation(name:String!operationInput:OperationCollectionEntryStateInput!):AddOperationCollectionEntryResult addToVariant(variantRef:ID!):AddOperationCollectionToVariantResult!@deprecated(reason:"Will throw NotImplemented")delete:DeleteOperationCollectionResult deleteOperation(id:ID!):RemoveOperationCollectionEntryResult duplicateCollection(description:String isSandbox:Boolean!isShared:Boolean!name:String!variantRef:ID):DuplicateOperationCollectionResult!operation(id:ID!):OperationCollectionEntryMutationResult removeFromVariant(variantRef:ID!):RemoveOperationCollectionFromVariantResult!@deprecated(reason:"Will throw NotImplemented")setMinEditRole(editRole:UserPermission):UpdateOperationCollectionResult updateDescription(description:String):UpdateOperationCollectionResult updateEditRoles(editRoles:[UserPermission!]!):UpdateOperationCollectionResult@deprecated(reason:"Deprecated in favour of setMinEditRole")updateIsFavorite(isFavorite:Boolean!):UpdateOperationCollectionResult updateIsShared(isShared:Boolean!):UpdateOperationCollectionResult updateName(name:String!):UpdateOperationCollectionResult}type OperationCollectionPermissions{canEditOperations:Boolean!canManage:Boolean!canReadOperations:Boolean!}union OperationCollectionResult=NotFoundError|OperationCollection|PermissionError type OperationDocument{"""Operation document body""" body:String!"""Operation name""" name:String}input OperationDocumentInput{"""Operation document body""" body:String!"""Operation name""" name:String}type OperationHeader{name:String!value:String!}input OperationHeaderInput{name:String!value:String!}"""Operation name filter configuration for a graph.""" type OperationNameFilter{"""name of the operation by the user and reported alongside metrics""" name:String!}"""Options to filter by operation name.""" input OperationNameFilterInput{"""name of the operation set by the user and reported alongside metrics""" name:String!}type OperationValidationError{message:String!}type OperationsCheckResult{"""Operations affected by all changes in diff""" affectedQueries:[AffectedQuery!]"""Summary/counts for all changes in diff""" changeSummary:ChangeSummary!"""List of schema changes with associated affected clients and operations""" changes:[Change!]!"""Indication of the success of the change, either failure, warning, or notice.""" checkSeverity:ChangeSeverity!"""The variant that was used as a base to check against""" checkedVariant:GraphVariant!createdAt:Timestamp!id:ID!"""Number of affected query operations that are neither marked as SAFE or IGNORED""" numberOfAffectedOperations:Int!"""Number of operations that were validated during schema diff""" numberOfCheckedOperations:Int!workflowTask:OperationsCheckTask!}type OperationsCheckTask implements CheckWorkflowTask{completedAt:Timestamp createdAt:Timestamp!id:ID!"""The result of the check.""" result:OperationsCheckResult status:CheckWorkflowTaskStatus!workflow:CheckWorkflow!}enum Ordering{ASCENDING DESCENDING}"""A reusable invite link for an organization.""" type OrganizationInviteLink{createdAt:Timestamp!"""A joinToken that can be passed to Mutation.joinAccount to join the organization.""" joinToken:String!"""The role that the user will receive if they join the organization with this link.""" role:UserPermission!}type OrganizationSSO{defaultRole:UserPermission!idpid:ID!provider:OrganizationSSOProvider!}enum OrganizationSSOProvider{PINGONE}"""PagerDuty notification channel""" type PagerDutyChannel implements Channel{id:ID!name:String!routingKey:String!subscriptions:[ChannelSubscription!]!}"""PagerDuty notification channel parameters""" input PagerDutyChannelInput{name:String routingKey:String!}"""Schema for a subgraph with associated metadata""" type PartialSchema{"""Timestamp for when the partial schema was created""" createdAt:Timestamp!"""If this sdl is currently actively composed in the gateway, this is true""" isLive:Boolean!"""The GraphQL document for a subgraph schema.""" sdl:String!"""The path of deep storage to find the raw enriched partial schema file""" sdlPath:String!}"""Input for registering a partial schema to an implementing service. One of the fields must be specified (validated server-side). If a new partialSchemaSDL is passed in, this operation will store it before @@ -79,7 +79,7 @@ If both the sdl and hash are specified, an error will be thrown if the provided hash doesn't match our hash of the sdl contents. If the sdl field is specified, the hash does not need to be and will be computed server-side.""" input PartialSchemaInput{"""Hash of the partial schema to associate; error is thrown if only the hash is specified and the hash has not been seen before""" hash:String """Contents of the partial schema in SDL syntax, but may reference types -that aren't defined in this document""" sdl:String}type PermissionError implements Error{message:String!}type PromoteSchemaError{code:PromoteSchemaErrorCode!message:String!}enum PromoteSchemaErrorCode{CANNOT_PROMOTE_SCHEMA_FOR_FEDERATED_GRAPH}type PromoteSchemaResponse{code:PromoteSchemaResponseCode!tag:SchemaTag!}enum PromoteSchemaResponseCode{NO_CHANGES_DETECTED PROMOTION_SUCCESS}union PromoteSchemaResponseOrError=PromoteSchemaError|PromoteSchemaResponse type Protobuf{json:String!object:Object!raw:Blob!text:String!}type Query{"""Account by ID""" account(id:ID!):Account """Retrieve account by billing provider identifier""" accountByBillingCode(id:ID!):Account """Whether an account ID is available for mutation{newAccount(id:)}""" accountIDAvailable(id:ID!):Boolean!"""All accounts""" allAccounts(search:String tier:BillingPlanTier):[Account!]"""All available plans""" allPlans:[BillingPlan!]!allPublicVariants:[GraphVariant!]"""All services""" allServices(search:String):[Service!]"""All timezones with their offsets from UTC""" allTimezoneOffsets:[TimezoneOffset!]!"""All users""" allUsers(search:String):[User!]"""Look up a plan by ID""" billingPlan(id:ID):BillingPlanV2 """All available plans""" billingPlans:[BillingPlanV2!]!"""If this is true, the user is an Apollo administrator who can ignore restrictions based purely on billing plan.""" canBypassPlanRestrictions:Boolean!diffSchemas(baseSchema:String!nextSchema:String!):[Change!]!"""Get the unsubscribe settings for a given email.""" emailPreferences(email:String!token:String!):EmailPreferences experimentalFeatures:GlobalExperimentalFeatures!"""Address of the Studio frontend.""" frontendUrlRoot:String!"""Access a graph by ID.""" graph(id:ID!):Service internalActiveCronJobs:[CronJob!]!internalAdminUsers:[InternalAdminUser!]internalUnresolvedCronExecutionFailures:[CronExecution!]!"""User or graph querying the API, null if not authenticated.""" me:Identity odysseyCertification(id:ID!):OdysseyCertification operationCollection(id:ID!):OperationCollectionResult!operationCollectionEntries(collectionEntryIds:[ID!]!):[OperationCollectionEntry!]!"""Access an organization by ID.""" organization(id:ID!):Account """Look up a plan by ID""" plan(id:ID):BillingPlan """A list of public variants that have been selected to be shown on our Graph Directory.""" publiclyListedVariants:[GraphVariant!]"""Service by ID""" service(id:ID!):Service """Query statistics across all services. For admins only; normal users must go through AccountsStatsWindow or ServiceStatsWindow.""" stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):StatsWindow!"""Get the studio settings for the current user""" studioSettings:UserSettings """The plan started by AccountMutation.startTeamSubscription""" teamBillingPlan(billingPeriod:BillingPeriod!):BillingPlanV2!"""The plan started by AccountMutation.startTeamSubscription""" teamPlan(billingPeriod:BillingPeriod!):BillingPlan!"""Schema transformation for the Apollo platform API. Renames types. Internal to Apollo.""" transformSchemaForPlatformApi(baseSchema:GraphQLDocument!):GraphQLDocument """The plan started by AccountMutation.startTrial""" trialBillingPlan:BillingPlanV2!"""The plan started by AccountMutation.startTrial""" trialPlan:BillingPlan!"""User by ID""" user(id:ID!):User """Access a variant by reference of the form `graphID@variantName`, or `graphID` for the default `current` variant. +that aren't defined in this document""" sdl:String}type PermissionError implements Error{message:String!}type PromoteSchemaError{code:PromoteSchemaErrorCode!message:String!}enum PromoteSchemaErrorCode{CANNOT_PROMOTE_SCHEMA_FOR_FEDERATED_GRAPH}type PromoteSchemaResponse{code:PromoteSchemaResponseCode!tag:SchemaTag!}enum PromoteSchemaResponseCode{NO_CHANGES_DETECTED PROMOTION_SUCCESS}union PromoteSchemaResponseOrError=PromoteSchemaError|PromoteSchemaResponse type Protobuf{json:String!object:Object!raw:Blob!text:String!}type Query{"""Account by ID""" account(id:ID!):Account """Retrieve account by billing provider identifier""" accountByBillingCode(id:ID!):Account """Retrieve account by internal id""" accountByInternalID(id:ID!):Account """Whether an account ID is available for mutation{newAccount(id:)}""" accountIDAvailable(id:ID!):Boolean!"""All accounts""" allAccounts(search:String tier:BillingPlanTier):[Account!]"""All available plans""" allPlans:[BillingPlan!]!allPublicVariants:[GraphVariant!]"""All services""" allServices(search:String):[Service!]"""All timezones with their offsets from UTC""" allTimezoneOffsets:[TimezoneOffset!]!"""All users""" allUsers(search:String):[User!]"""Look up a plan by ID""" billingPlan(id:ID):BillingPlanV2 """All available plans""" billingPlans:[BillingPlanV2!]!"""If this is true, the user is an Apollo administrator who can ignore restrictions based purely on billing plan.""" canBypassPlanRestrictions:Boolean!diffSchemas(baseSchema:String!nextSchema:String!):[Change!]!"""Get the unsubscribe settings for a given email.""" emailPreferences(email:String!token:String!):EmailPreferences experimentalFeatures:GlobalExperimentalFeatures!"""Address of the Studio frontend.""" frontendUrlRoot:String!"""Access a graph by ID.""" graph(id:ID!):Service internalActiveCronJobs:[CronJob!]!internalAdminUsers:[InternalAdminUser!]internalUnresolvedCronExecutionFailures:[CronExecution!]!"""User or graph querying the API, null if not authenticated.""" me:Identity odysseyCertification(id:ID!):OdysseyCertification operationCollection(id:ID!):OperationCollectionResult!operationCollectionEntries(collectionEntryIds:[ID!]!):[OperationCollectionEntry!]!"""Access an organization by ID.""" organization(id:ID!):Account """Look up a plan by ID""" plan(id:ID):BillingPlan """A list of public variants that have been selected to be shown on our Graph Directory.""" publiclyListedVariants:[GraphVariant!]"""Service by ID""" service(id:ID!):Service """Query statistics across all services. For admins only; normal users must go through AccountsStatsWindow or ServiceStatsWindow.""" stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):StatsWindow!"""Get the studio settings for the current user""" studioSettings:UserSettings """The plan started by AccountMutation.startTeamSubscription""" teamBillingPlan(billingPeriod:BillingPeriod!):BillingPlanV2!"""The plan started by AccountMutation.startTeamSubscription""" teamPlan(billingPeriod:BillingPeriod!):BillingPlan!"""Schema transformation for the Apollo platform API. Renames types. Internal to Apollo.""" transformSchemaForPlatformApi(baseSchema:GraphQLDocument!):GraphQLDocument """The plan started by AccountMutation.startTrial""" trialBillingPlan:BillingPlanV2!"""The plan started by AccountMutation.startTrial""" trialPlan:BillingPlan!"""User by ID""" user(id:ID!):User """Access a variant by reference of the form `graphID@variantName`, or `graphID` for the default `current` variant. Returns null when the graph or variant do not exist, or when the graph cannot be accessed. Note that we can return more types implementing Error in the future.""" variant(ref:ID!):GraphVariantLookup}"""query documents to validate against""" input QueryDocumentInput{document:String}type QueryPlan{json:String!object:Object!text:String!}"""Columns of QueryStats.""" enum QueryStatsColumn{ACCOUNT_ID CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type QueryStatsDimensions{accountId:ID clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID}"""Filter for data in QueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input QueryStatsFilter{"""Selects rows whose accountId dimension equals the given value if not null. To query for the null value, use {in: {accountId: [null]}} instead.""" accountId:ID and:[QueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:QueryStatsFilterIn not:QueryStatsFilter or:[QueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID}"""Filter for data in QueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input QueryStatsFilterIn{"""Selects rows whose accountId dimension is in the given list. A null value in the list means a row with null for that dimension.""" accountId:[ID]"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]}type QueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input QueryStatsOrderBySpec{column:QueryStatsColumn!direction:Ordering!}type QueryStatsRecord{"""Dimensions of QueryStats that can be grouped by.""" groupBy:QueryStatsDimensions!"""Metrics of QueryStats that can be aggregated over.""" metrics:QueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Query Trigger""" type QueryTrigger implements ChannelSubscription{channels:[Channel!]!comparisonOperator:ComparisonOperator!enabled:Boolean!excludedOperationNames:[String!]!id:ID!metric:QueryTriggerMetric!operationNames:[String!]!percentile:Float scope:QueryTriggerScope!serviceId:String!state:QueryTriggerState!threshold:Float!variant:String window:QueryTriggerWindow!}"""Query trigger""" input QueryTriggerInput{channelIds:[String!]comparisonOperator:ComparisonOperator!enabled:Boolean excludedOperationNames:[String!]metric:QueryTriggerMetric!operationNames:[String!]percentile:Float scope:QueryTriggerScope threshold:Float!variant:String window:QueryTriggerWindow!}enum QueryTriggerMetric{"""Number of requests within the window that resulted in an error. Ignores `percentile`.""" ERROR_COUNT """Number of error requests divided by total number of requests. Ignores `percentile`.""" ERROR_PERCENTAGE """Number of requests within the window. Ignores `percentile`.""" REQUEST_COUNT """Request latency in ms. Requires `percentile`.""" REQUEST_SERVICE_TIME}enum QueryTriggerScope{ALL ANY UNRECOGNIZED}"""Query trigger state""" type QueryTriggerState{evaluatedAt:Timestamp!lastTriggeredAt:Timestamp operations:[QueryTriggerStateOperation!]!triggered:Boolean!}type QueryTriggerStateOperation{count:Long!operation:String!triggered:Boolean!value:Float!}enum QueryTriggerWindow{FIFTEEN_MINUTES FIVE_MINUTES ONE_MINUTE UNRECOGNIZED}"""The documentation for a graph variant, as display in Studio.""" type Readme{"""Content of the document.""" content:String!id:ID!"""Last time the document was updated.""" lastUpdatedAt:Timestamp!"""Identity of who updated the document last.""" lastUpdatedBy:Identity}type RegisterOperationsMutationResponse{invalidOperations:[InvalidOperation!]newOperations:[RegisteredOperation!]registrationSuccess:Boolean!}input RegisteredClientIdentityInput{identifier:String!name:String!version:String}type RegisteredOperation{signature:ID!}input RegisteredOperationInput{document:String metadata:RegisteredOperationMetadataInput signature:ID!}input RegisteredOperationMetadataInput{"""This will be used to link existing records in Engine to a new ID.""" engineSignature:String}type RegistryApiKey{keyName:String token:String!}type RegistryStatsWindow{schemaCheckStats:[AccountChecksStatsRecord!]!schemaPublishStats:[AccountPublishesStatsRecord!]!}type RegistrySubscription implements ChannelSubscription{channel:Channel channels:[Channel!]!@deprecated(reason:"Use channels list instead")createdAt:Timestamp!enabled:Boolean!id:ID!lastUpdatedAt:Timestamp!options:SubscriptionOptions!variant:String}type RelaunchComplete{latestLaunch:Launch!updated:Boolean!}type RelaunchError{message:String!}union RelaunchResult=RelaunchComplete|RelaunchError union RemoveOperationCollectionEntryResult=OperationCollection|PermissionError union RemoveOperationCollectionFromVariantResult=GraphVariant|NotFoundError|PermissionError|ValidationError union ReorderOperationCollectionResult=OperationCollection|PermissionError type ReportSchemaError implements ReportSchemaResult{code:ReportSchemaErrorCode!inSeconds:Int!message:String!withCoreSchema:Boolean!}enum ReportSchemaErrorCode{BOOT_ID_IS_NOT_VALID_UUID BOOT_ID_IS_REQUIRED CORE_SCHEMA_HASH_IS_NOT_SCHEMA_SHA256 CORE_SCHEMA_HASH_IS_REQUIRED CORE_SCHEMA_HASH_IS_TOO_LONG EXECUTABLE_SCHEMA_ID_IS_NOT_SCHEMA_SHA256 EXECUTABLE_SCHEMA_ID_IS_REQUIRED EXECUTABLE_SCHEMA_ID_IS_TOO_LONG GRAPH_REF_INVALID_FORMAT GRAPH_REF_IS_REQUIRED GRAPH_VARIANT_DOES_NOT_MATCH_REGEX GRAPH_VARIANT_IS_REQUIRED LIBRARY_VERSION_IS_TOO_LONG PLATFORM_IS_TOO_LONG RUNTIME_VERSION_IS_TOO_LONG SCHEMA_IS_NOT_PARSABLE SCHEMA_IS_NOT_VALID SERVER_ID_IS_TOO_LONG USER_VERSION_IS_TOO_LONG}type ReportSchemaResponse implements ReportSchemaResult{inSeconds:Int!withCoreSchema:Boolean!}interface ReportSchemaResult{inSeconds:Int!withCoreSchema:Boolean!}type ReportServerInfoError implements ReportServerInfoResult{code:ReportSchemaErrorCode!inSeconds:Int!message:String!withExecutableSchema:Boolean!}type ReportServerInfoResponse implements ReportServerInfoResult{inSeconds:Int!withExecutableSchema:Boolean!}interface ReportServerInfoResult{inSeconds:Int!withExecutableSchema:Boolean!}enum Resolution{R1D R1H R1M R5M R6H R15M}enum ResponseHints{NONE SAMPLE_RESPONSES SUBGRAPHS TIMINGS TRACE_TIMINGS}type RoleOverride{graph:Service!lastUpdatedAt:Timestamp!role:UserPermission!user:User!}input RoverArgumentInput{key:String!value:Object}"""SHA-256 hash, represented in lowercase hexadecimal""" scalar SHA256 type ScheduledSummary implements ChannelSubscription{channel:Channel@deprecated(reason:"Use channels list instead")channels:[Channel!]!enabled:Boolean!id:ID!timezone:String!variant:String!}"""A GraphQL schema document, which may optionally map back to context with which the schema was ingested.""" type Schema{createTemporaryURL(expiresInSeconds:Int!=86400):TemporaryURL """The timestamp of initial ingestion of a schema to a graph.""" createdAt:Timestamp!"""The raw GraphQL document for the schema in question""" document:GraphQLDocument!"""The number of fields; this includes user defined fields only, excluding built-in types and fields""" fieldCount:Int!gitContext:GitContext """The hex representation of the SHA256 of the GraphQL document.""" hash:ID!introspection:IntrospectionSchema!"""The number of types; this includes user defined types only, excluding built-in types""" typeCount:Int!}"""Represents an error from running schema composition on a list of subgraph definitions.""" type SchemaCompositionError{"""A machine-readable error code.""" code:String """Affected locations.""" locations:[SourceLocation]!"""A human-readable locations.""" message:String!}"""The difference between two schemas, as usually computed during a schema check.""" type SchemaDiff{"""Clients affected by all changes in the diff.""" affectedClients:[AffectedClient!]@deprecated(reason:"Unsupported.")"""Operations affected by all changes in the diff.""" affectedQueries:[AffectedQuery!]"""Numeric summary of all changes in the diff.""" changeSummary:ChangeSummary!"""List of schema changes with associated affected clients and operations.""" changes:[Change!]!"""Number of affected query operations that are neither marked as safe or ignored.""" numberOfAffectedOperations:Int!"""Number of operations that were validated during the check.""" numberOfCheckedOperations:Int """Indication of the success of the change; either failure, warning, or notice.""" severity:ChangeSeverity!"""The tag against which this diff was created""" tag:String type:ChangeType!@deprecated(reason:"use severity instead")"""Configuration of validation""" validationConfig:SchemaDiffValidationConfig}type SchemaDiffValidationConfig{"""Clients to ignore during validation.""" excludedClients:[ClientInfoFilterOutput!]"""Operation names to ignore during validation.""" excludedOperationNames:[OperationNameFilter]"""delta in seconds from current time that determines the start of the window for reported metrics included in a schema diff. A day window from the present @@ -112,12 +112,12 @@ application. Apollo's media server will downscale larger images to at least the but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String """Get available notification endpoints""" channels(channelIds:[ID!]):[Channel!]"""Get check configuration for this graph.""" checkConfiguration:CheckConfiguration """Get a check workflow for this graph by its ID""" checkWorkflow(id:ID!):CheckWorkflow """Get check workflows for this graph ordered by creation time, most recent first.""" checkWorkflows(filter:CheckFilterInput limit:Int!=100):[CheckWorkflow!]!"""List of options available for filtering checks for this graph by author. If a filter is passed, constrains results to match the filter.""" checksAuthorOptions(filter:CheckFilterInput):[String!]!"""List of options available for filtering checks for this graph by branch. If a filter is passed, constrains results to match the filter.""" checksBranchOptions(filter:CheckFilterInput):[String!]!"""List of options available for filtering checks for this graph by subgraph name. -If a filter is passed, constrains results to match the filter.""" checksSubgraphOptions(filter:CheckFilterInput):[String!]!"""Given a graphCompositionID, return the results of composition. This can represent either a validation or a publish.""" compositionResultById(id:ID!):CompositionResult createdAt:Timestamp!createdBy:Identity datadogMetricsConfig:DatadogMetricsConfig deletedAt:Timestamp description:String devGraphOwner:User """Get a GraphQL document by hash""" document(hash:SHA256):GraphQLDocument """When this is true, this graph will be hidden from non-admin members of the org who haven't been explicitly assigned a +If a filter is passed, constrains results to match the filter.""" checksSubgraphOptions(filter:CheckFilterInput):[String!]!"""Given a graphCompositionID, return the results of composition. This can represent either a validation or a publish.""" compositionResultById(id:ID!):CompositionResult createdAt:Timestamp!createdBy:Identity datadogMetricsConfig:DatadogMetricsConfig defaultBuildPipelineTrack:String deletedAt:Timestamp description:String devGraphOwner:User """Get a GraphQL document by hash""" document(hash:SHA256):GraphQLDocument """When this is true, this graph will be hidden from non-admin members of the org who haven't been explicitly assigned a role on this graph.""" hiddenFromUninvitedNonAdminAccountMembers:Boolean!"""Globally unique identifier for this graph.""" id:ID!"""List of subgraphs that comprise a graph. A non-federated graph should have a single implementing service. -Set includeDeleted to see deleted subgraphs.""" implementingServices(graphVariant:String!includeDeleted:Boolean):GraphImplementors lastReportedAt(graphVariant:String):Timestamp """Current identity, null if not authenticated.""" me:Identity """The composition result that was most recently published to a graph variant.""" mostRecentCompositionPublish(graphVariant:String!):CompositionPublishResult """Permissions of the current user in this graph.""" myRole:UserPermission """Name of this graph. Note that this field is deprecated.""" name:String!@deprecated(reason:"Use Service.title")operation(id:ID!):Operation """Gets the operations and their approved changes for this graph, checkID, and operationID.""" operationsAcceptedChanges(checkID:ID!operationID:String!):[OperationAcceptedChange!]!"""Get an operations check result for a specific check ID""" operationsCheck(checkID:ID!):OperationsCheckResult """Get query triggers for a given variant. If variant is null all the triggers for this service will be gotten.""" queryTriggers(graphVariant:String operationNames:[String!]):[QueryTrigger!]readme:Readme """Registry specific stats for this graph.""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow """Whether registry subscriptions (with any options) are enabled. If variant is not passed, returns true if configuration is present for any variant""" registrySubscriptionsEnabled(graphVariant:String):Boolean!@deprecated(reason:"This field will be removed")reportingEnabled:Boolean!"""The list of members that can access this graph, accounting for graph role overrides""" roleOverrides:[RoleOverride!]"""Which permissions the current user has for interacting with this graph""" roles:ServiceRoles scheduledSummaries:[ScheduledSummary!]!"""Get a schema by hash or current tag""" schema(hash:ID tag:String):Schema """The current publish associated to a given variant (with 'tag' as the variant name).""" schemaTag(tag:String!):SchemaTag schemaTagById(id:ID!):SchemaTag """Get schema tags, with optional filtering to a set of tags. Always sorted by creation +Set includeDeleted to see deleted subgraphs.""" implementingServices(graphVariant:String!includeDeleted:Boolean):GraphImplementors lastReportedAt(graphVariant:String):Timestamp """Current identity, null if not authenticated.""" me:Identity """The composition result that was most recently published to a graph variant.""" mostRecentCompositionPublish(graphVariant:String!):CompositionPublishResult """Permissions of the current user in this graph.""" myRole:UserPermission """Name of this graph. Note that this field is deprecated.""" name:String!@deprecated(reason:"Use Service.title")onboardingArchitecture:OnboardingArchitecture operation(id:ID!):Operation """Gets the operations and their approved changes for this graph, checkID, and operationID.""" operationsAcceptedChanges(checkID:ID!operationID:String!):[OperationAcceptedChange!]!"""Get an operations check result for a specific check ID""" operationsCheck(checkID:ID!):OperationsCheckResult """Get query triggers for a given variant. If variant is null all the triggers for this service will be gotten.""" queryTriggers(graphVariant:String operationNames:[String!]):[QueryTrigger!]readme:Readme """Registry specific stats for this graph.""" registryStatsWindow(from:Timestamp!resolution:Resolution to:Timestamp):RegistryStatsWindow """Whether registry subscriptions (with any options) are enabled. If variant is not passed, returns true if configuration is present for any variant""" registrySubscriptionsEnabled(graphVariant:String):Boolean!@deprecated(reason:"This field will be removed")reportingEnabled:Boolean!"""The list of members that can access this graph, accounting for graph role overrides""" roleOverrides:[RoleOverride!]"""Which permissions the current user has for interacting with this graph""" roles:ServiceRoles scheduledSummaries:[ScheduledSummary!]!"""Get a schema by hash or current tag""" schema(hash:ID tag:String):Schema """The current publish associated to a given variant (with 'tag' as the variant name).""" schemaTag(tag:String!):SchemaTag schemaTagById(id:ID!):SchemaTag """Get schema tags, with optional filtering to a set of tags. Always sorted by creation date in reverse chronological order.""" schemaTags(tags:[String!]):[SchemaTag!]stats(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):ServiceStatsWindow!@deprecated(reason:"use Service.statsWindow instead")statsWindow(from:Timestamp!"""Granularity of buckets. Defaults to the entire range (aggregate all data into a single durationBucket) when null.""" resolution:Resolution """Defaults to the current time when null.""" to:Timestamp):ServiceStatsWindow """Generate a test schema publish notification body""" testSchemaPublishBody(variant:String!):String!"""Name of this graph.""" title:String!trace(id:ID!):Trace traceStorageEnabled:Boolean!"""A particular variant often representing a live traffic environment (such as "dev", "staging", or "prod"). Each variant can represent a specific URL or destination to query at, analytics, and its own schema history. -Pass in a name to get a specific variant. Use `Graph.variants` to get a list of variants.""" variant(name:String!):GraphVariant """The list of variants that exist for this graph""" variants:[GraphVariant!]!}"""Columns of ServiceBillingUsageStats.""" enum ServiceBillingUsageStatsColumn{OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG TIMESTAMP}type ServiceBillingUsageStatsDimensions{operationCountProvidedExplicitly:String schemaTag:String}"""Filter for data in ServiceBillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceBillingUsageStatsFilter{and:[ServiceBillingUsageStatsFilter!]in:ServiceBillingUsageStatsFilterIn not:ServiceBillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[ServiceBillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceBillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceBillingUsageStatsFilterIn{"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceBillingUsageStatsMetrics{operationCount:Long!}input ServiceBillingUsageStatsOrderBySpec{column:ServiceBillingUsageStatsColumn!direction:Ordering!}type ServiceBillingUsageStatsRecord{"""Dimensions of ServiceBillingUsageStats that can be grouped by.""" groupBy:ServiceBillingUsageStatsDimensions!"""Metrics of ServiceBillingUsageStats that can be aggregated over.""" metrics:ServiceBillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceEdgeServerInfos.""" enum ServiceEdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID TIMESTAMP USER_VERSION}type ServiceEdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID userVersion:String}"""Filter for data in ServiceEdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceEdgeServerInfosFilter{and:[ServiceEdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:ServiceEdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:ServiceEdgeServerInfosFilter or:[ServiceEdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in ServiceEdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceEdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input ServiceEdgeServerInfosOrderBySpec{column:ServiceEdgeServerInfosColumn!direction:Ordering!}type ServiceEdgeServerInfosRecord{"""Dimensions of ServiceEdgeServerInfos that can be grouped by.""" groupBy:ServiceEdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceErrorStats.""" enum ServiceErrorStatsColumn{CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceErrorStatsDimensions{clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String}"""Filter for data in ServiceErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceErrorStatsFilter{and:[ServiceErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ServiceErrorStatsFilterIn not:ServiceErrorStatsFilter or:[ServiceErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceErrorStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input ServiceErrorStatsOrderBySpec{column:ServiceErrorStatsColumn!direction:Ordering!}type ServiceErrorStatsRecord{"""Dimensions of ServiceErrorStats that can be grouped by.""" groupBy:ServiceErrorStatsDimensions!"""Metrics of ServiceErrorStats that can be aggregated over.""" metrics:ServiceErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldLatencies.""" enum ServiceFieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceFieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String}"""Filter for data in ServiceFieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldLatenciesFilter{and:[ServiceFieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldLatenciesFilterIn not:ServiceFieldLatenciesFilter or:[ServiceFieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input ServiceFieldLatenciesOrderBySpec{column:ServiceFieldLatenciesColumn!direction:Ordering!}type ServiceFieldLatenciesRecord{"""Dimensions of ServiceFieldLatencies that can be grouped by.""" groupBy:ServiceFieldLatenciesDimensions!"""Metrics of ServiceFieldLatencies that can be aggregated over.""" metrics:ServiceFieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldUsage.""" enum ServiceFieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceFieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String}"""Filter for data in ServiceFieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldUsageFilter{and:[ServiceFieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldUsageFilterIn not:ServiceFieldUsageFilter or:[ServiceFieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input ServiceFieldUsageOrderBySpec{column:ServiceFieldUsageColumn!direction:Ordering!}type ServiceFieldUsageRecord{"""Dimensions of ServiceFieldUsage that can be grouped by.""" groupBy:ServiceFieldUsageDimensions!"""Metrics of ServiceFieldUsage that can be aggregated over.""" metrics:ServiceFieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Mutations to a graph.""" type ServiceMutation{"""Check a proposed subgraph schema change. +Pass in a name to get a specific variant. Use `Graph.variants` to get a list of variants.""" variant(name:String!):GraphVariant """The list of variants that exist for this graph""" variants:[GraphVariant!]!}"""Columns of ServiceBillingUsageStats.""" enum ServiceBillingUsageStatsColumn{OPERATION_COUNT OPERATION_COUNT_PROVIDED_EXPLICITLY SCHEMA_TAG TIMESTAMP}type ServiceBillingUsageStatsDimensions{operationCountProvidedExplicitly:String schemaTag:String}"""Filter for data in ServiceBillingUsageStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceBillingUsageStatsFilter{and:[ServiceBillingUsageStatsFilter!]in:ServiceBillingUsageStatsFilterIn not:ServiceBillingUsageStatsFilter """Selects rows whose operationCountProvidedExplicitly dimension equals the given value if not null. To query for the null value, use {in: {operationCountProvidedExplicitly: [null]}} instead.""" operationCountProvidedExplicitly:String or:[ServiceBillingUsageStatsFilter!]"""Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceBillingUsageStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceBillingUsageStatsFilterIn{"""Selects rows whose operationCountProvidedExplicitly dimension is in the given list. A null value in the list means a row with null for that dimension.""" operationCountProvidedExplicitly:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceBillingUsageStatsMetrics{operationCount:Long!}input ServiceBillingUsageStatsOrderBySpec{column:ServiceBillingUsageStatsColumn!direction:Ordering!}type ServiceBillingUsageStatsRecord{"""Dimensions of ServiceBillingUsageStats that can be grouped by.""" groupBy:ServiceBillingUsageStatsDimensions!"""Metrics of ServiceBillingUsageStats that can be aggregated over.""" metrics:ServiceBillingUsageStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceEdgeServerInfos.""" enum ServiceEdgeServerInfosColumn{BOOT_ID EXECUTABLE_SCHEMA_ID LIBRARY_VERSION PLATFORM RUNTIME_VERSION SCHEMA_TAG SERVER_ID TIMESTAMP USER_VERSION}type ServiceEdgeServerInfosDimensions{bootId:ID executableSchemaId:ID libraryVersion:String platform:String runtimeVersion:String schemaTag:String serverId:ID userVersion:String}"""Filter for data in ServiceEdgeServerInfos. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceEdgeServerInfosFilter{and:[ServiceEdgeServerInfosFilter!]"""Selects rows whose bootId dimension equals the given value if not null. To query for the null value, use {in: {bootId: [null]}} instead.""" bootId:ID """Selects rows whose executableSchemaId dimension equals the given value if not null. To query for the null value, use {in: {executableSchemaId: [null]}} instead.""" executableSchemaId:ID in:ServiceEdgeServerInfosFilterIn """Selects rows whose libraryVersion dimension equals the given value if not null. To query for the null value, use {in: {libraryVersion: [null]}} instead.""" libraryVersion:String not:ServiceEdgeServerInfosFilter or:[ServiceEdgeServerInfosFilter!]"""Selects rows whose platform dimension equals the given value if not null. To query for the null value, use {in: {platform: [null]}} instead.""" platform:String """Selects rows whose runtimeVersion dimension equals the given value if not null. To query for the null value, use {in: {runtimeVersion: [null]}} instead.""" runtimeVersion:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serverId dimension equals the given value if not null. To query for the null value, use {in: {serverId: [null]}} instead.""" serverId:ID """Selects rows whose userVersion dimension equals the given value if not null. To query for the null value, use {in: {userVersion: [null]}} instead.""" userVersion:String}"""Filter for data in ServiceEdgeServerInfos. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceEdgeServerInfosFilterIn{"""Selects rows whose bootId dimension is in the given list. A null value in the list means a row with null for that dimension.""" bootId:[ID]"""Selects rows whose executableSchemaId dimension is in the given list. A null value in the list means a row with null for that dimension.""" executableSchemaId:[ID]"""Selects rows whose libraryVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" libraryVersion:[String]"""Selects rows whose platform dimension is in the given list. A null value in the list means a row with null for that dimension.""" platform:[String]"""Selects rows whose runtimeVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" runtimeVersion:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serverId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serverId:[ID]"""Selects rows whose userVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" userVersion:[String]}input ServiceEdgeServerInfosOrderBySpec{column:ServiceEdgeServerInfosColumn!direction:Ordering!}type ServiceEdgeServerInfosRecord{"""Dimensions of ServiceEdgeServerInfos that can be grouped by.""" groupBy:ServiceEdgeServerInfosDimensions!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceErrorStats.""" enum ServiceErrorStatsColumn{CLIENT_NAME CLIENT_VERSION ERRORS_COUNT PATH QUERY_ID QUERY_NAME REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceErrorStatsDimensions{clientName:String clientVersion:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String}"""Filter for data in ServiceErrorStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceErrorStatsFilter{and:[ServiceErrorStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ServiceErrorStatsFilterIn not:ServiceErrorStatsFilter or:[ServiceErrorStatsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceErrorStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceErrorStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceErrorStatsMetrics{errorsCount:Long!requestsWithErrorsCount:Long!}input ServiceErrorStatsOrderBySpec{column:ServiceErrorStatsColumn!direction:Ordering!}type ServiceErrorStatsRecord{"""Dimensions of ServiceErrorStats that can be grouped by.""" groupBy:ServiceErrorStatsDimensions!"""Metrics of ServiceErrorStats that can be aggregated over.""" metrics:ServiceErrorStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldExecutions.""" enum ServiceFieldExecutionsColumn{ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG TIMESTAMP}type ServiceFieldExecutionsDimensions{fieldName:String parentType:String schemaTag:String}"""Filter for data in ServiceFieldExecutions. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldExecutionsFilter{and:[ServiceFieldExecutionsFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldExecutionsFilterIn not:ServiceFieldExecutionsFilter or:[ServiceFieldExecutionsFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldExecutions. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldExecutionsFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldExecutionsMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input ServiceFieldExecutionsOrderBySpec{column:ServiceFieldExecutionsColumn!direction:Ordering!}type ServiceFieldExecutionsRecord{"""Dimensions of ServiceFieldExecutions that can be grouped by.""" groupBy:ServiceFieldExecutionsDimensions!"""Metrics of ServiceFieldExecutions that can be aggregated over.""" metrics:ServiceFieldExecutionsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldLatencies.""" enum ServiceFieldLatenciesColumn{FIELD_HISTOGRAM FIELD_NAME PARENT_TYPE SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceFieldLatenciesDimensions{field:String fieldName:String parentType:String schemaHash:String schemaTag:String}"""Filter for data in ServiceFieldLatencies. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldLatenciesFilter{and:[ServiceFieldLatenciesFilter!]"""Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldLatenciesFilterIn not:ServiceFieldLatenciesFilter or:[ServiceFieldLatenciesFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldLatencies. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldLatenciesFilterIn{"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldLatenciesMetrics{fieldHistogram:DurationHistogram!}input ServiceFieldLatenciesOrderBySpec{column:ServiceFieldLatenciesColumn!direction:Ordering!}type ServiceFieldLatenciesRecord{"""Dimensions of ServiceFieldLatencies that can be grouped by.""" groupBy:ServiceFieldLatenciesDimensions!"""Metrics of ServiceFieldLatencies that can be aggregated over.""" metrics:ServiceFieldLatenciesMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldRequestsByClientVersion.""" enum ServiceFieldRequestsByClientVersionColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT FIELD_NAME OBSERVED_EXECUTION_COUNT PARENT_TYPE REFERENCING_OPERATION_COUNT SCHEMA_TAG TIMESTAMP}type ServiceFieldRequestsByClientVersionDimensions{clientName:String clientVersion:String fieldName:String parentType:String schemaTag:String}"""Filter for data in ServiceFieldRequestsByClientVersion. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldRequestsByClientVersionFilter{and:[ServiceFieldRequestsByClientVersionFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldRequestsByClientVersionFilterIn not:ServiceFieldRequestsByClientVersionFilter or:[ServiceFieldRequestsByClientVersionFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldRequestsByClientVersion. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldRequestsByClientVersionFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldRequestsByClientVersionMetrics{estimatedExecutionCount:Long!observedExecutionCount:Long!referencingOperationCount:Long!}input ServiceFieldRequestsByClientVersionOrderBySpec{column:ServiceFieldRequestsByClientVersionColumn!direction:Ordering!}type ServiceFieldRequestsByClientVersionRecord{"""Dimensions of ServiceFieldRequestsByClientVersion that can be grouped by.""" groupBy:ServiceFieldRequestsByClientVersionDimensions!"""Metrics of ServiceFieldRequestsByClientVersion that can be aggregated over.""" metrics:ServiceFieldRequestsByClientVersionMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceFieldUsage.""" enum ServiceFieldUsageColumn{CLIENT_NAME CLIENT_VERSION ESTIMATED_EXECUTION_COUNT EXECUTION_COUNT FIELD_NAME PARENT_TYPE QUERY_ID QUERY_NAME REFERENCING_OPERATION_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP}type ServiceFieldUsageDimensions{clientName:String clientVersion:String fieldName:String parentType:String queryId:ID queryName:String schemaHash:String schemaTag:String}"""Filter for data in ServiceFieldUsage. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceFieldUsageFilter{and:[ServiceFieldUsageFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fieldName dimension equals the given value if not null. To query for the null value, use {in: {fieldName: [null]}} instead.""" fieldName:String in:ServiceFieldUsageFilterIn not:ServiceFieldUsageFilter or:[ServiceFieldUsageFilter!]"""Selects rows whose parentType dimension equals the given value if not null. To query for the null value, use {in: {parentType: [null]}} instead.""" parentType:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceFieldUsage. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceFieldUsageFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fieldName dimension is in the given list. A null value in the list means a row with null for that dimension.""" fieldName:[String]"""Selects rows whose parentType dimension is in the given list. A null value in the list means a row with null for that dimension.""" parentType:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceFieldUsageMetrics{estimatedExecutionCount:Long!executionCount:Long!referencingOperationCount:Long!}input ServiceFieldUsageOrderBySpec{column:ServiceFieldUsageColumn!direction:Ordering!}type ServiceFieldUsageRecord{"""Dimensions of ServiceFieldUsage that can be grouped by.""" groupBy:ServiceFieldUsageDimensions!"""Metrics of ServiceFieldUsage that can be aggregated over.""" metrics:ServiceFieldUsageMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Mutations to a graph.""" type ServiceMutation{"""Check a proposed subgraph schema change. If the proposal composes successfully, perform a usage check for the resulting schema.""" checkPartialSchema("""Deprecated and ignored.""" frontend:String gitContext:GitContextInput """Specifies which variant of a graph this mutation operates on.""" graphVariant:String!historicParameters:HistoricQueryParameters """Name of the implementing service to validate the partial schema against""" implementingServiceName:String!"""If this check is triggered for an sdl fetched using introspection, this is the endpoint where that schema was being served.""" introspectionEndpoint:String isSandboxCheck:Boolean!=false """The partial schema to validate against an implementing service""" partialSchema:PartialSchemaInput!"""Whether to use the maximum retention for historical validation. This only takes effect if historicParameters is null.""" useMaximumRetention:Boolean):CheckPartialSchemaResult!"""Checks a proposed schema against the schema that has been published to a particular variant, using metrics corresponding to `historicParameters`. @@ -131,10 +131,10 @@ changes affecting it will be tracked, but won't affect the outcome of the check. Returns true if the operation is newly ignored, false if it already was.""" ignoreOperationsInChecks(ids:[ID!]!):IgnoreOperationsInChecksResult """Mark the changeset that affects an operation in a given check instance as safe. -Note that only operations marked as behavior changes are allowed to be marked as safe.""" markChangesForOperationAsSafe("""ID of the schema check.""" checkID:ID!"""ID of the operation to accept changes for.""" operationID:ID!):MarkChangesForOperationAsSafeResult!newKey(keyName:String role:UserPermission!=GRAPH_ADMIN):GraphApiKey!"""Adds an override to the given users permission for this graph""" overrideUserPermission(permission:UserPermission userID:ID!):Service """Returns a preview of the Core and API schema contracts derived from a source variant and a set of filter configurations""" previewContractVariant(filterConfig:FilterConfigInput!sourceVariant:String!):ContractVariantPreviewResult!@deprecated(reason:"use GraphVariant.contractPreview instead")"""Promote the schema with the given SHA-256 hash to active for the given variant/tag.""" promoteSchema(graphVariant:String!historicParameters:HistoricQueryParameters overrideComposedSchema:Boolean!=false sha256:SHA256!):PromoteSchemaResponseOrError!"""Publish to a subgraph. If composition is successful, this will update running routers.""" publishSubgraph(activePartialSchema:PartialSchemaInput!gitContext:GitContextInput graphVariant:String!name:String!revision:String!url:String):CompositionAndUpsertResult registerOperationsWithResponse(clientIdentity:RegisteredClientIdentityInput gitContext:GitContextInput """Specifies which variant of a graph these operations belong to. +Note that only operations marked as behavior changes are allowed to be marked as safe.""" markChangesForOperationAsSafe("""ID of the schema check.""" checkID:ID!"""ID of the operation to accept changes for.""" operationID:ID!):MarkChangesForOperationAsSafeResult!newKey(keyName:String role:UserPermission!=GRAPH_ADMIN):GraphApiKey!"""Adds an override to the given users permission for this graph""" overrideUserPermission(permission:UserPermission userID:ID!):Service """Promote the schema with the given SHA-256 hash to active for the given variant/tag.""" promoteSchema(graphVariant:String!historicParameters:HistoricQueryParameters overrideComposedSchema:Boolean!=false sha256:SHA256!):PromoteSchemaResponseOrError!"""Publish to a subgraph. If composition is successful, this will update running routers.""" publishSubgraph(activePartialSchema:PartialSchemaInput!gitContext:GitContextInput graphVariant:String!name:String!revision:String!url:String):CompositionAndUpsertResult registerOperationsWithResponse(clientIdentity:RegisteredClientIdentityInput gitContext:GitContextInput """Specifies which variant of a graph these operations belong to. Formerly known as "tag" Defaults to "current" -""" graphVariant:String!="current" manifestVersion:Int operations:[RegisteredOperationInput!]!):RegisterOperationsMutationResponse """Removes a subgraph. If composition is successful, this will update running routers.""" removeImplementingServiceAndTriggerComposition("""Do not remove the service, but recompose without it and report any errors.""" dryRun:Boolean!=false graphVariant:String!name:String!):CompositionAndRemoveResult!removeKey("""API key ID""" id:ID):Void renameKey(id:ID!newKeyName:String):GraphApiKey reportServerInfo("""Only sent if previously requested i.e. received ReportServerInfoResult with withExecutableSchema = true. An executable schema is a schema document that describes the full GraphQL schema that an external client could execute queries against. This must be a valid GraphQL schema document, as per the GraphQL specification: https://spec.graphql.org/""" executableSchema:String """Information about the edge server, see descriptions for individual fields.""" info:EdgeServerInfo!):ReportServerInfoResult@deprecated(reason:"use Mutation.reportSchema instead")service:Service!"""Store a given schema document. This schema will be attached to the graph but +""" graphVariant:String!="current" manifestVersion:Int operations:[RegisteredOperationInput!]!):RegisterOperationsMutationResponse """Removes a subgraph. If composition is successful, this will update running routers.""" removeImplementingServiceAndTriggerComposition("""Do not remove the service, but recompose without it and report any errors.""" dryRun:Boolean!=false graphVariant:String!name:String!):CompositionAndRemoveResult!removeKey("""API key ID""" id:ID):Void renameKey(id:ID!newKeyName:String):GraphApiKey reportServerInfo("""Only sent if previously requested i.e. received ReportServerInfoResult with withExecutableSchema = true. An executable schema is a schema document that describes the full GraphQL schema that an external client could execute queries against. This must be a valid GraphQL schema document, as per the GraphQL specification: https://spec.graphql.org/""" executableSchema:String """Information about the edge server, see descriptions for individual fields.""" info:EdgeServerInfo!):ReportServerInfoResult@deprecated(reason:"use Mutation.reportSchema instead")service:Service!setDefaultBuildPipelineTrack(version:String!):String """Store a given schema document. This schema will be attached to the graph but not be associated with any variant. On success, returns the schema hash.""" storeSchemaDocument(schemaDocument:String!):StoreSchemaResponseOrError!"""Test Slack notification channel""" testSlackChannel(id:ID!notification:SlackNotificationInput!):Void testSubscriptionForChannel(channelID:ID!subscriptionID:ID!):String!transfer(to:String!):Service triggerRepublish(graphVariant:String!):Void undelete:Service """Revert the effects of ignoreOperation. Returns true if the operation is no longer ignored, false if it wasn't.""" unignoreOperationsInChecks(ids:[ID!]!):UnignoreOperationsInChecksResult """Unmark changes for an operation as safe.""" unmarkChangesForOperationAsSafe("""ID of the schema check.""" checkID:ID!"""ID of the operation to unmark changes for.""" operationID:ID!):MarkChangesForOperationAsSafeResult!"""Update schema check configuration for a graph.""" updateCheckConfiguration("""Clients to ignore during validation.""" excludedClients:[ClientFilterInput!]"""Operation names to ignore during validation.""" excludedOperationNames:[OperationNameFilterInput!]"""Operations to ignore during validation.""" excludedOperations:[ExcludedOperationInput!]"""Default configuration to include operations on the base variant.""" includeBaseVariant:Boolean """Variant overrides for validation.""" includedVariants:[String!]"""Minimum number of requests within the window for a query to be considered.""" operationCountThreshold:Int """Number of requests within the window for a query to be considered, relative to @@ -143,13 +143,13 @@ total request volume)""" operationCountThresholdPercentage:Float """Only check o Run composition with the Implementing Service's partial schema replaced with the one provided in the mutation's input. Store the composed schema, return the hash of the composed schema, and any warnings and errors pertaining to composition. -This mutation will not run validation against operations.""" validatePartialSchemaOfImplementingServiceAgainstGraph(graphVariant:String!implementingServiceName:String!partialSchema:PartialSchemaInput!):CompositionValidationResult!"""Make changes to a graph variant.""" variant(name:String!):GraphVariantMutation}"""Columns of ServiceOperationCheckStats.""" enum ServiceOperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG TIMESTAMP UNCACHED_REQUESTS_COUNT}type ServiceOperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String}"""Filter for data in ServiceOperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceOperationCheckStatsFilter{and:[ServiceOperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ServiceOperationCheckStatsFilterIn not:ServiceOperationCheckStatsFilter or:[ServiceOperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceOperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceOperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceOperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input ServiceOperationCheckStatsOrderBySpec{column:ServiceOperationCheckStatsColumn!direction:Ordering!}type ServiceOperationCheckStatsRecord{"""Dimensions of ServiceOperationCheckStats that can be grouped by.""" groupBy:ServiceOperationCheckStatsDimensions!"""Metrics of ServiceOperationCheckStats that can be aggregated over.""" metrics:ServiceOperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceQueryStats.""" enum ServiceQueryStatsColumn{CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type ServiceQueryStatsDimensions{clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String}"""Filter for data in ServiceQueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceQueryStatsFilter{and:[ServiceQueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:ServiceQueryStatsFilterIn not:ServiceQueryStatsFilter or:[ServiceQueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceQueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceQueryStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceQueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input ServiceQueryStatsOrderBySpec{column:ServiceQueryStatsColumn!direction:Ordering!}type ServiceQueryStatsRecord{"""Dimensions of ServiceQueryStats that can be grouped by.""" groupBy:ServiceQueryStatsDimensions!"""Metrics of ServiceQueryStats that can be aggregated over.""" metrics:ServiceQueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""A map from role (permission) String to boolean that the current user is allowed for a particular graph.""" type ServiceRoles{"""Whether the currently authenticated user is permitted to perform schema checks (i.e. run `rover (sub)graph check`).""" canCheckSchemas:Boolean!"""Whether the currently authenticated user is permitted to create new graph variants.""" canCreateVariants:Boolean!"""Whether the currently authenticated user is permitted to delete the graph in question""" canDelete:Boolean!"""Whether the currently authenticated user is permitted to manage user access to the graph in question.""" canManageAccess:Boolean!"""Whether the currently authenticated user is permitted to manage the build configuration (e.g. build pipeline version).""" canManageBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to manage 3rd party integrations (e.g. Datadog forwarding).""" canManageIntegrations:Boolean!"""Whether the currently authenticated user is permitted to manage graph-level API keys.""" canManageKeys:Boolean!"""Whether the currently authenticated user is permitted to perform basic administration (e.g. set to public) over variants.""" canManageVariants:Boolean!"""Whether the currently authenticated user is permitted to view details about the build configuration (e.g. build pipeline version).""" canQueryBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to view details of the check configuration for this graph.""" canQueryCheckConfiguration:Boolean!canQueryDeletedImplementingServices:Boolean!"""Whether the currently authenticated user is permitted to view which subgraphs the graph is composed of.""" canQueryImplementingServices:Boolean!canQueryIntegrations:Boolean!canQueryPrivateInfo:Boolean!canQueryPublicInfo:Boolean!canQueryReadmeAuthor:Boolean!canQueryRoleOverrides:Boolean!"""Whether the currently authenticated user is permitted to download schemas owned by this graph.""" canQuerySchemas:Boolean!canQueryStats:Boolean!canQueryTokens:Boolean!canQueryTraces:Boolean!"""Whether the currently authenticated user is permitted to register operations (i.e. `apollo client:push`) for this graph.""" canRegisterOperations:Boolean!canStoreSchemasWithoutVariant:Boolean!canUndelete:Boolean!canUpdateAvatar:Boolean!canUpdateDescription:Boolean!canUpdateTitle:Boolean!canVisualizeStats:Boolean!@deprecated(reason:"Replaced with canQueryTraces and canQueryStats")"""Whether the currently authenticated user is permitted to make updates to the check configuration for this graph.""" canWriteCheckConfiguration:Boolean!canWriteTraces:Boolean!@deprecated(reason:"Never worked, not replaced")}"""A time window with a specified granularity over a given service.""" type ServiceStatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:ServiceBillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceBillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceBillingUsageStatsOrderBySpec!]):[ServiceBillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:ServiceEdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceEdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceEdgeServerInfosOrderBySpec!]):[ServiceEdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:ServiceErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceErrorStatsOrderBySpec!]):[ServiceErrorStatsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:ServiceFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldLatenciesOrderBySpec!]):[ServiceFieldLatenciesRecord!]!fieldStats("""Filter to select what rows to return.""" filter:ServiceFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldLatenciesOrderBySpec!]):[ServiceFieldLatenciesRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:ServiceFieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldUsageOrderBySpec!]):[ServiceFieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:ServiceOperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceOperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceOperationCheckStatsOrderBySpec!]):[ServiceOperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:ServiceQueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceQueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceQueryStatsOrderBySpec!]):[ServiceQueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:ServiceTracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceTracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceTracePathErrorsRefsOrderBySpec!]):[ServiceTracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:ServiceTraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceTraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceTraceRefsOrderBySpec!]):[ServiceTraceRefsRecord!]!}"""Columns of ServiceTracePathErrorsRefs.""" enum ServiceTracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type ServiceTracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in ServiceTracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceTracePathErrorsRefsFilter{and:[ServiceTracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:ServiceTracePathErrorsRefsFilterIn not:ServiceTracePathErrorsRefsFilter or:[ServiceTracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in ServiceTracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceTracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type ServiceTracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input ServiceTracePathErrorsRefsOrderBySpec{column:ServiceTracePathErrorsRefsColumn!direction:Ordering!}type ServiceTracePathErrorsRefsRecord{"""Dimensions of ServiceTracePathErrorsRefs that can be grouped by.""" groupBy:ServiceTracePathErrorsRefsDimensions!"""Metrics of ServiceTracePathErrorsRefs that can be aggregated over.""" metrics:ServiceTracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceTraceRefs.""" enum ServiceTraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type ServiceTraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String traceId:ID}"""Filter for data in ServiceTraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceTraceRefsFilter{and:[ServiceTraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:ServiceTraceRefsFilterIn not:ServiceTraceRefsFilter or:[ServiceTraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in ServiceTraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceTraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type ServiceTraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input ServiceTraceRefsOrderBySpec{column:ServiceTraceRefsColumn!direction:Ordering!}type ServiceTraceRefsRecord{"""Dimensions of ServiceTraceRefs that can be grouped by.""" groupBy:ServiceTraceRefsDimensions!"""Metrics of ServiceTraceRefs that can be aggregated over.""" metrics:ServiceTraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Slack notification channel""" type SlackChannel implements Channel{id:ID!name:String!subscriptions:[ChannelSubscription!]!url:String!}"""Slack notification channel parameters""" input SlackChannelInput{name:String url:String!}input SlackNotificationField{key:String!value:String!}"""Slack notification message""" input SlackNotificationInput{color:String fallback:String!fields:[SlackNotificationField!]iconUrl:String text:String timestamp:Timestamp title:String titleLink:String username:String}type SourceLocation{column:Int!line:Int!}"""A time window with a specified granularity.""" type StatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:BillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order BillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[BillingUsageStatsOrderBySpec!]):[BillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:EdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order EdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[EdgeServerInfosOrderBySpec!]):[EdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:ErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ErrorStatsOrderBySpec!]):[ErrorStatsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:FieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldLatenciesOrderBySpec!]):[FieldLatenciesRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:FieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldUsageOrderBySpec!]):[FieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:OperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order OperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[OperationCheckStatsOrderBySpec!]):[OperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:QueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order QueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[QueryStatsOrderBySpec!]):[QueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:TracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order TracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[TracePathErrorsRefsOrderBySpec!]):[TracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:TraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order TraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[TraceRefsOrderBySpec!]):[TraceRefsRecord!]!}type StoreSchemaError{code:StoreSchemaErrorCode!message:String!}enum StoreSchemaErrorCode{SCHEMA_IS_NOT_PARSABLE SCHEMA_IS_NOT_VALID}type StoreSchemaResponse{sha256:SHA256!}union StoreSchemaResponseOrError=StoreSchemaError|StoreSchemaResponse type StoredApprovedChange{argNode:NamedIntrospectionArgNoDescription childNode:NamedIntrospectionValueNoDescription code:ChangeCode!parentNode:NamedIntrospectionTypeNoDescription}scalar StringOrInt type StringToString{key:String!value:String!}input StringToStringInput{key:String!value:String!}type Subgraph{hash:String!name:String!routingURL:String!}type SubgraphChange{name:ID!type:SubgraphChangeType!}enum SubgraphChangeType{ADDITION DELETION MODIFICATION}type SubgraphConfig{id:ID!name:String!schemaHash:String!sdl:String!url:String!}type SubscriptionOptions{"""Enables notifications for schema updates""" schemaUpdates:Boolean!}input SubscriptionOptionsInput{"""Enables notifications for schema updates""" schemaUpdates:Boolean!}enum SubscriptionState{ACTIVE CANCELED EXPIRED FUTURE PAST_DUE PAUSED PENDING UNKNOWN}enum SubscriptionStateV2{ACTIVE CANCELED EXPIRED FUTURE PAST_DUE PAUSED PENDING UNKNOWN}type TemporaryURL{url:String!}enum ThemeName{DARK LIGHT}enum TicketPriority{P0 P1 P2 P3}enum TicketStatus{CLOSED HOLD NEW OPEN PENDING SOLVED}"""ISO 8601, extended format with nanoseconds, Zulu (or '[+-]seconds' for times relative to now)""" scalar Timestamp type TimezoneOffset{minutesOffsetFromUTC:Int!zoneID:String!}"""Counts of changes.""" type TotalChangeSummaryCounts{"""Number of changes that are additions. This includes adding types, adding fields to object, input +This mutation will not run validation against operations.""" validatePartialSchemaOfImplementingServiceAgainstGraph(graphVariant:String!implementingServiceName:String!partialSchema:PartialSchemaInput!):CompositionValidationResult!"""Make changes to a graph variant.""" variant(name:String!):GraphVariantMutation}"""Columns of ServiceOperationCheckStats.""" enum ServiceOperationCheckStatsColumn{CACHED_REQUESTS_COUNT CLIENT_NAME CLIENT_VERSION QUERY_ID QUERY_NAME SCHEMA_TAG TIMESTAMP UNCACHED_REQUESTS_COUNT}type ServiceOperationCheckStatsDimensions{clientName:String clientVersion:String queryId:ID queryName:String schemaTag:String}"""Filter for data in ServiceOperationCheckStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceOperationCheckStatsFilter{and:[ServiceOperationCheckStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String in:ServiceOperationCheckStatsFilterIn not:ServiceOperationCheckStatsFilter or:[ServiceOperationCheckStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceOperationCheckStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceOperationCheckStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceOperationCheckStatsMetrics{cachedRequestsCount:Long!uncachedRequestsCount:Long!}input ServiceOperationCheckStatsOrderBySpec{column:ServiceOperationCheckStatsColumn!direction:Ordering!}type ServiceOperationCheckStatsRecord{"""Dimensions of ServiceOperationCheckStats that can be grouped by.""" groupBy:ServiceOperationCheckStatsDimensions!"""Metrics of ServiceOperationCheckStats that can be aggregated over.""" metrics:ServiceOperationCheckStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceQueryStats.""" enum ServiceQueryStatsColumn{CACHED_HISTOGRAM CACHED_REQUESTS_COUNT CACHE_TTL_HISTOGRAM CLIENT_NAME CLIENT_VERSION FORBIDDEN_OPERATION_COUNT FROM_ENGINEPROXY QUERY_ID QUERY_NAME REGISTERED_OPERATION_COUNT REQUESTS_WITH_ERRORS_COUNT SCHEMA_HASH SCHEMA_TAG TIMESTAMP UNCACHED_HISTOGRAM UNCACHED_REQUESTS_COUNT}type ServiceQueryStatsDimensions{clientName:String clientVersion:String fromEngineproxy:String queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String}"""Filter for data in ServiceQueryStats. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceQueryStatsFilter{and:[ServiceQueryStatsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose fromEngineproxy dimension equals the given value if not null. To query for the null value, use {in: {fromEngineproxy: [null]}} instead.""" fromEngineproxy:String in:ServiceQueryStatsFilterIn not:ServiceQueryStatsFilter or:[ServiceQueryStatsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String}"""Filter for data in ServiceQueryStats. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceQueryStatsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose fromEngineproxy dimension is in the given list. A null value in the list means a row with null for that dimension.""" fromEngineproxy:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]}type ServiceQueryStatsMetrics{cacheTtlHistogram:DurationHistogram!cachedHistogram:DurationHistogram!cachedRequestsCount:Long!forbiddenOperationCount:Long!registeredOperationCount:Long!requestsWithErrorsCount:Long!totalLatencyHistogram:DurationHistogram!totalRequestCount:Long!uncachedHistogram:DurationHistogram!uncachedRequestsCount:Long!}input ServiceQueryStatsOrderBySpec{column:ServiceQueryStatsColumn!direction:Ordering!}type ServiceQueryStatsRecord{"""Dimensions of ServiceQueryStats that can be grouped by.""" groupBy:ServiceQueryStatsDimensions!"""Metrics of ServiceQueryStats that can be aggregated over.""" metrics:ServiceQueryStatsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""A map from role (permission) String to boolean that the current user is allowed for a particular graph.""" type ServiceRoles{"""Whether the currently authenticated user is permitted to perform schema checks (i.e. run `rover (sub)graph check`).""" canCheckSchemas:Boolean!"""Whether the currently authenticated user is permitted to create new graph variants.""" canCreateVariants:Boolean!"""Whether the currently authenticated user is permitted to delete the graph in question""" canDelete:Boolean!"""Whether the currently authenticated user is permitted to manage user access to the graph in question.""" canManageAccess:Boolean!"""Whether the currently authenticated user is permitted to manage the build configuration (e.g. build pipeline version).""" canManageBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to manage 3rd party integrations (e.g. Datadog forwarding).""" canManageIntegrations:Boolean!"""Whether the currently authenticated user is permitted to manage graph-level API keys.""" canManageKeys:Boolean!"""Whether the currently authenticated user is permitted to perform basic administration (e.g. set to public) over variants.""" canManageVariants:Boolean!"""Whether the currently authenticated user is permitted to view details about the build configuration (e.g. build pipeline version).""" canQueryBuildConfig:Boolean!"""Whether the currently authenticated user is permitted to view details of the check configuration for this graph.""" canQueryCheckConfiguration:Boolean!canQueryDeletedImplementingServices:Boolean!"""Whether the currently authenticated user is permitted to view which subgraphs the graph is composed of.""" canQueryImplementingServices:Boolean!canQueryIntegrations:Boolean!canQueryPrivateInfo:Boolean!canQueryPublicInfo:Boolean!canQueryReadmeAuthor:Boolean!canQueryRoleOverrides:Boolean!"""Whether the currently authenticated user is permitted to download schemas owned by this graph.""" canQuerySchemas:Boolean!canQueryStats:Boolean!canQueryTokens:Boolean!canQueryTraces:Boolean!"""Whether the currently authenticated user is permitted to register operations (i.e. `apollo client:push`) for this graph.""" canRegisterOperations:Boolean!canStoreSchemasWithoutVariant:Boolean!canUndelete:Boolean!canUpdateAvatar:Boolean!canUpdateDescription:Boolean!canUpdateTitle:Boolean!canVisualizeStats:Boolean!@deprecated(reason:"Replaced with canQueryTraces and canQueryStats")"""Whether the currently authenticated user is permitted to make updates to the check configuration for this graph.""" canWriteCheckConfiguration:Boolean!canWriteTraces:Boolean!@deprecated(reason:"Never worked, not replaced")}"""A time window with a specified granularity over a given service.""" type ServiceStatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:ServiceBillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceBillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceBillingUsageStatsOrderBySpec!]):[ServiceBillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:ServiceEdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceEdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceEdgeServerInfosOrderBySpec!]):[ServiceEdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:ServiceErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceErrorStatsOrderBySpec!]):[ServiceErrorStatsRecord!]!fieldExecutions("""Filter to select what rows to return.""" filter:ServiceFieldExecutionsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldExecutions by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldExecutionsOrderBySpec!]):[ServiceFieldExecutionsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:ServiceFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldLatenciesOrderBySpec!]):[ServiceFieldLatenciesRecord!]!fieldRequestsByClientVersion("""Filter to select what rows to return.""" filter:ServiceFieldRequestsByClientVersionFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldRequestsByClientVersion by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldRequestsByClientVersionOrderBySpec!]):[ServiceFieldRequestsByClientVersionRecord!]!fieldStats("""Filter to select what rows to return.""" filter:ServiceFieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldLatenciesOrderBySpec!]):[ServiceFieldLatenciesRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:ServiceFieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceFieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceFieldUsageOrderBySpec!]):[ServiceFieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:ServiceOperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceOperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceOperationCheckStatsOrderBySpec!]):[ServiceOperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:ServiceQueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceQueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceQueryStatsOrderBySpec!]):[ServiceQueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:ServiceTracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceTracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceTracePathErrorsRefsOrderBySpec!]):[ServiceTracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:ServiceTraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ServiceTraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ServiceTraceRefsOrderBySpec!]):[ServiceTraceRefsRecord!]!}"""Columns of ServiceTracePathErrorsRefs.""" enum ServiceTracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type ServiceTracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String path:String queryId:ID queryName:String schemaHash:String schemaTag:String traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in ServiceTracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceTracePathErrorsRefsFilter{and:[ServiceTracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:ServiceTracePathErrorsRefsFilterIn not:ServiceTracePathErrorsRefsFilter or:[ServiceTracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in ServiceTracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceTracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type ServiceTracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input ServiceTracePathErrorsRefsOrderBySpec{column:ServiceTracePathErrorsRefsColumn!direction:Ordering!}type ServiceTracePathErrorsRefsRecord{"""Dimensions of ServiceTracePathErrorsRefs that can be grouped by.""" groupBy:ServiceTracePathErrorsRefsDimensions!"""Metrics of ServiceTracePathErrorsRefs that can be aggregated over.""" metrics:ServiceTracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of ServiceTraceRefs.""" enum ServiceTraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type ServiceTraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String traceId:ID}"""Filter for data in ServiceTraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input ServiceTraceRefsFilter{and:[ServiceTraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:ServiceTraceRefsFilterIn not:ServiceTraceRefsFilter or:[ServiceTraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in ServiceTraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input ServiceTraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type ServiceTraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input ServiceTraceRefsOrderBySpec{column:ServiceTraceRefsColumn!direction:Ordering!}type ServiceTraceRefsRecord{"""Dimensions of ServiceTraceRefs that can be grouped by.""" groupBy:ServiceTraceRefsDimensions!"""Metrics of ServiceTraceRefs that can be aggregated over.""" metrics:ServiceTraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Slack notification channel""" type SlackChannel implements Channel{id:ID!name:String!subscriptions:[ChannelSubscription!]!url:String!}"""Slack notification channel parameters""" input SlackChannelInput{name:String url:String!}input SlackNotificationField{key:String!value:String!}"""Slack notification message""" input SlackNotificationInput{color:String fallback:String!fields:[SlackNotificationField!]iconUrl:String text:String timestamp:Timestamp title:String titleLink:String username:String}type SourceLocation{column:Int!line:Int!}"""A time window with a specified granularity.""" type StatsWindow{billingUsageStats("""Filter to select what rows to return.""" filter:BillingUsageStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order BillingUsageStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[BillingUsageStatsOrderBySpec!]):[BillingUsageStatsRecord!]!edgeServerInfos("""Filter to select what rows to return.""" filter:EdgeServerInfosFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order EdgeServerInfos by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[EdgeServerInfosOrderBySpec!]):[EdgeServerInfosRecord!]!errorStats("""Filter to select what rows to return.""" filter:ErrorStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order ErrorStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[ErrorStatsOrderBySpec!]):[ErrorStatsRecord!]!fieldExecutions("""Filter to select what rows to return.""" filter:FieldExecutionsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldExecutions by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldExecutionsOrderBySpec!]):[FieldExecutionsRecord!]!fieldLatencies("""Filter to select what rows to return.""" filter:FieldLatenciesFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldLatencies by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldLatenciesOrderBySpec!]):[FieldLatenciesRecord!]!fieldRequestsByClientVersion("""Filter to select what rows to return.""" filter:FieldRequestsByClientVersionFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldRequestsByClientVersion by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldRequestsByClientVersionOrderBySpec!]):[FieldRequestsByClientVersionRecord!]!fieldUsage("""Filter to select what rows to return.""" filter:FieldUsageFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order FieldUsage by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[FieldUsageOrderBySpec!]):[FieldUsageRecord!]!operationCheckStats("""Filter to select what rows to return.""" filter:OperationCheckStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order OperationCheckStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[OperationCheckStatsOrderBySpec!]):[OperationCheckStatsRecord!]!queryStats("""Filter to select what rows to return.""" filter:QueryStatsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order QueryStats by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[QueryStatsOrderBySpec!]):[QueryStatsRecord!]!"""From field rounded down to the nearest resolution.""" roundedDownFrom:Timestamp!"""To field rounded up to the nearest resolution.""" roundedUpTo:Timestamp!tracePathErrorsRefs("""Filter to select what rows to return.""" filter:TracePathErrorsRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order TracePathErrorsRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[TracePathErrorsRefsOrderBySpec!]):[TracePathErrorsRefsRecord!]!traceRefs("""Filter to select what rows to return.""" filter:TraceRefsFilter """The maximum number of entries to return, cannot be more than 15000.""" limit:Int=10000 """A list of OrderBySpecs to order TraceRefs by. The earlier an OrderBySpec appears in the list, the higher priority it has in the final ordering. When empty or null, defaults to sorting by ascending timestamp.""" orderBy:[TraceRefsOrderBySpec!]):[TraceRefsRecord!]!}type StoreSchemaError{code:StoreSchemaErrorCode!message:String!}enum StoreSchemaErrorCode{SCHEMA_IS_NOT_PARSABLE SCHEMA_IS_NOT_VALID}type StoreSchemaResponse{sha256:SHA256!}union StoreSchemaResponseOrError=StoreSchemaError|StoreSchemaResponse type StoredApprovedChange{argNode:NamedIntrospectionArgNoDescription childNode:NamedIntrospectionValueNoDescription code:ChangeCode!parentNode:NamedIntrospectionTypeNoDescription}scalar StringOrInt type StringToString{key:String!value:String!}input StringToStringInput{key:String!value:String!}type Subgraph{hash:String!name:String!routingURL:String!}type SubgraphChange{name:ID!type:SubgraphChangeType!}enum SubgraphChangeType{ADDITION DELETION MODIFICATION}type SubgraphConfig{id:ID!name:String!schemaHash:String!sdl:String!url:String!}type SubscriptionOptions{"""Enables notifications for schema updates""" schemaUpdates:Boolean!}input SubscriptionOptionsInput{"""Enables notifications for schema updates""" schemaUpdates:Boolean!}enum SubscriptionState{ACTIVE CANCELED EXPIRED FUTURE PAST_DUE PAUSED PENDING UNKNOWN}enum SubscriptionStateV2{ACTIVE CANCELED EXPIRED FUTURE PAST_DUE PAUSED PENDING UNKNOWN}type TemporaryURL{url:String!}enum ThemeName{DARK LIGHT}enum TicketPriority{P0 P1 P2 P3}enum TicketStatus{CLOSED HOLD NEW OPEN PENDING SOLVED}"""ISO 8601, extended format with nanoseconds, Zulu (or '[+-]seconds' for times relative to now)""" scalar Timestamp type TimezoneOffset{minutesOffsetFromUTC:Int!zoneID:String!}"""Counts of changes.""" type TotalChangeSummaryCounts{"""Number of changes that are additions. This includes adding types, adding fields to object, input object, and interface types, adding values to enums, adding members to interfaces and unions, and adding arguments.""" additions:Int!"""Number of changes that are new usages of the @deprecated directive.""" deprecations:Int!"""Number of changes that are edits. This includes types changing kind, fields and arguments changing type, arguments changing default value, and any description changes. This also includes edits to @deprecated reason strings.""" edits:Int!"""Number of changes that are removals. This includes removing types, removing fields from object, input object, and interface types, removing values from enums, removing members from interfaces -and unions, and removing arguments. This also includes removing @deprecated usages.""" removals:Int!}type Trace{cacheMaxAgeMs:Float cacheScope:CacheScope clientName:String clientVersion:String durationMs:Float!endTime:Timestamp!http:TraceHTTP id:ID!operationName:String protobuf:Protobuf!root:TraceNode!signature:String!startTime:Timestamp!variablesJSON:[StringToString!]!}type TraceError{json:String!locations:[TraceSourceLocation!]!message:String!timestamp:Timestamp}type TraceHTTP{host:String method:HTTPMethod!path:String protocol:String requestHeaders:[StringToString!]!responseHeaders:[StringToString!]!secure:Boolean!statusCode:Int!}type TraceNode{cacheMaxAgeMs:Float cacheScope:CacheScope children:[TraceNode!]!childrenIds:[ID!]!descendants:[TraceNode!]!descendantsIds:[ID!]!endTime:Timestamp!errors:[TraceError!]!id:ID!key:StringOrInt originalFieldName:String parent:ID!parentId:ID path:[String!]!startTime:Timestamp!type:String}"""Columns of TracePathErrorsRefs.""" enum TracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type TracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String """If metrics were collected from a federated service, this field will be prefixed with `service:.`""" path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in TracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input TracePathErrorsRefsFilter{and:[TracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:TracePathErrorsRefsFilterIn not:TracePathErrorsRefsFilter or:[TracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in TracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input TracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type TracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input TracePathErrorsRefsOrderBySpec{column:TracePathErrorsRefsColumn!direction:Ordering!}type TracePathErrorsRefsRecord{"""Dimensions of TracePathErrorsRefs that can be grouped by.""" groupBy:TracePathErrorsRefsDimensions!"""Metrics of TracePathErrorsRefs that can be aggregated over.""" metrics:TracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of TraceRefs.""" enum TraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type TraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID traceId:ID}"""Filter for data in TraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input TraceRefsFilter{and:[TraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:TraceRefsFilterIn not:TraceRefsFilter or:[TraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in TraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input TraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type TraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input TraceRefsOrderBySpec{column:TraceRefsColumn!direction:Ordering!}type TraceRefsRecord{"""Dimensions of TraceRefs that can be grouped by.""" groupBy:TraceRefsDimensions!"""Metrics of TraceRefs that can be aggregated over.""" metrics:TraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type TraceSourceLocation{column:Int!line:Int!}"""Counts of changes at the type level, including interfaces, unions, enums, scalars, input objects, etc.""" type TypeChangeSummaryCounts{"""Number of changes that are additions of types.""" additions:Int!"""Number of changes that are edits. This includes types changing kind and any type description +and unions, and removing arguments. This also includes removing @deprecated usages.""" removals:Int!}type Trace{cacheMaxAgeMs:Float cacheScope:CacheScope clientName:String clientVersion:String durationMs:Float!endTime:Timestamp!http:TraceHTTP id:ID!operationName:String protobuf:Protobuf!root:TraceNode!signature:String!startTime:Timestamp!unexecutedOperationBody:String unexecutedOperationName:String variablesJSON:[StringToString!]!}type TraceError{json:String!locations:[TraceSourceLocation!]!message:String!timestamp:Timestamp}type TraceHTTP{host:String method:HTTPMethod!path:String protocol:String requestHeaders:[StringToString!]!responseHeaders:[StringToString!]!secure:Boolean!statusCode:Int!}type TraceNode{cacheMaxAgeMs:Float cacheScope:CacheScope children:[TraceNode!]!childrenIds:[ID!]!descendants:[TraceNode!]!descendantsIds:[ID!]!endTime:Timestamp!errors:[TraceError!]!id:ID!key:StringOrInt originalFieldName:String parent:ID!parentId:ID path:[String!]!startTime:Timestamp!type:String}"""Columns of TracePathErrorsRefs.""" enum TracePathErrorsRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET ERRORS_COUNT_IN_PATH ERRORS_COUNT_IN_TRACE ERROR_MESSAGE PATH QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_HTTP_STATUS_CODE TRACE_ID TRACE_SIZE_BYTES TRACE_STARTS_AT}type TracePathErrorsRefsDimensions{clientName:String clientVersion:String durationBucket:Int errorMessage:String """If metrics were collected from a federated service, this field will be prefixed with `service:.`""" path:String queryId:ID queryName:String schemaHash:String schemaTag:String serviceId:ID traceHttpStatusCode:Int traceId:ID traceStartsAt:Timestamp}"""Filter for data in TracePathErrorsRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input TracePathErrorsRefsFilter{and:[TracePathErrorsRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int """Selects rows whose errorMessage dimension equals the given value if not null. To query for the null value, use {in: {errorMessage: [null]}} instead.""" errorMessage:String in:TracePathErrorsRefsFilterIn not:TracePathErrorsRefsFilter or:[TracePathErrorsRefsFilter!]"""Selects rows whose path dimension equals the given value if not null. To query for the null value, use {in: {path: [null]}} instead.""" path:String """Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceHttpStatusCode dimension equals the given value if not null. To query for the null value, use {in: {traceHttpStatusCode: [null]}} instead.""" traceHttpStatusCode:Int """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in TracePathErrorsRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input TracePathErrorsRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose errorMessage dimension is in the given list. A null value in the list means a row with null for that dimension.""" errorMessage:[String]"""Selects rows whose path dimension is in the given list. A null value in the list means a row with null for that dimension.""" path:[String]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceHttpStatusCode dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceHttpStatusCode:[Int]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type TracePathErrorsRefsMetrics{errorsCountInPath:Long!errorsCountInTrace:Long!traceSizeBytes:Long!}input TracePathErrorsRefsOrderBySpec{column:TracePathErrorsRefsColumn!direction:Ordering!}type TracePathErrorsRefsRecord{"""Dimensions of TracePathErrorsRefs that can be grouped by.""" groupBy:TracePathErrorsRefsDimensions!"""Metrics of TracePathErrorsRefs that can be aggregated over.""" metrics:TracePathErrorsRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}"""Columns of TraceRefs.""" enum TraceRefsColumn{CLIENT_NAME CLIENT_VERSION DURATION_BUCKET DURATION_NS QUERY_ID QUERY_NAME SCHEMA_HASH SCHEMA_TAG SERVICE_ID TIMESTAMP TRACE_ID TRACE_SIZE_BYTES}type TraceRefsDimensions{clientName:String clientVersion:String durationBucket:Int queryId:ID queryName:String querySignature:String schemaHash:String schemaTag:String serviceId:ID traceId:ID}"""Filter for data in TraceRefs. Fields with dimension names represent equality checks. All fields are implicitly ANDed together.""" input TraceRefsFilter{and:[TraceRefsFilter!]"""Selects rows whose clientName dimension equals the given value if not null. To query for the null value, use {in: {clientName: [null]}} instead.""" clientName:String """Selects rows whose clientVersion dimension equals the given value if not null. To query for the null value, use {in: {clientVersion: [null]}} instead.""" clientVersion:String """Selects rows whose durationBucket dimension equals the given value if not null. To query for the null value, use {in: {durationBucket: [null]}} instead.""" durationBucket:Int in:TraceRefsFilterIn not:TraceRefsFilter or:[TraceRefsFilter!]"""Selects rows whose queryId dimension equals the given value if not null. To query for the null value, use {in: {queryId: [null]}} instead.""" queryId:ID """Selects rows whose queryName dimension equals the given value if not null. To query for the null value, use {in: {queryName: [null]}} instead.""" queryName:String """Selects rows whose schemaHash dimension equals the given value if not null. To query for the null value, use {in: {schemaHash: [null]}} instead.""" schemaHash:String """Selects rows whose schemaTag dimension equals the given value if not null. To query for the null value, use {in: {schemaTag: [null]}} instead.""" schemaTag:String """Selects rows whose serviceId dimension equals the given value if not null. To query for the null value, use {in: {serviceId: [null]}} instead.""" serviceId:ID """Selects rows whose traceId dimension equals the given value if not null. To query for the null value, use {in: {traceId: [null]}} instead.""" traceId:ID}"""Filter for data in TraceRefs. Fields match if the corresponding dimension's value is in the given list. All fields are implicitly ANDed together.""" input TraceRefsFilterIn{"""Selects rows whose clientName dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientName:[String]"""Selects rows whose clientVersion dimension is in the given list. A null value in the list means a row with null for that dimension.""" clientVersion:[String]"""Selects rows whose durationBucket dimension is in the given list. A null value in the list means a row with null for that dimension.""" durationBucket:[Int]"""Selects rows whose queryId dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryId:[ID]"""Selects rows whose queryName dimension is in the given list. A null value in the list means a row with null for that dimension.""" queryName:[String]"""Selects rows whose schemaHash dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaHash:[String]"""Selects rows whose schemaTag dimension is in the given list. A null value in the list means a row with null for that dimension.""" schemaTag:[String]"""Selects rows whose serviceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" serviceId:[ID]"""Selects rows whose traceId dimension is in the given list. A null value in the list means a row with null for that dimension.""" traceId:[ID]}type TraceRefsMetrics{durationNs:Long!traceSizeBytes:Long!}input TraceRefsOrderBySpec{column:TraceRefsColumn!direction:Ordering!}type TraceRefsRecord{"""Dimensions of TraceRefs that can be grouped by.""" groupBy:TraceRefsDimensions!"""Metrics of TraceRefs that can be aggregated over.""" metrics:TraceRefsMetrics!"""Starting segment timestamp.""" timestamp:Timestamp!}type TraceSourceLocation{column:Int!line:Int!}"""Counts of changes at the type level, including interfaces, unions, enums, scalars, input objects, etc.""" type TypeChangeSummaryCounts{"""Number of changes that are additions of types.""" additions:Int!"""Number of changes that are edits. This includes types changing kind and any type description changes, but also includes adding/removing values from enums, adding/removing members from interfaces and unions, and any enum value deprecation and description changes.""" edits:Int!"""Number of changes that are removals of types.""" removals:Int!}"""the TypeFilterConfig is used to isolate types, and subsequent fields, through @@ -164,9 +164,9 @@ MediaUploadInfo.csrfToken.""" avatarUpload:AvatarUploadResult """Get an image UR argument is used for bandwidth reduction, and should be the size of the image as displayed in the application. Apollo's media server will downscale larger images to at least the requested size, but this will not happen for third-party media servers.""" avatarUrl(size:Int!=40):String betaFeaturesOn:Boolean!canUpdateAvatar:Boolean!canUpdateEmail:Boolean!canUpdateFullName:Boolean!createdAt:Timestamp!email:String emailModifiedAt:Timestamp emailVerified:Boolean!experimentalFeatures:UserExperimentalFeatures!featureIntros:FeatureIntros fullName:String!"""The user's GitHub username, if they log in via GitHub. May be null even for GitHub users in some edge cases.""" githubUsername:String """The unique identifier for a user.""" id:ID!"""This role is reserved exclusively for internal MDG employees, and it controls what access they may have to other -organizations. Only admins are allowed to see this field.""" internalAdminRole:InternalMdgAdminRole """Last time any API token from this user was used against AGM services""" lastAuthenticatedAt:Timestamp logoutAfterIdleMs:Int """Which organizations a user belongs to.""" memberships:[UserMembership!]!"""The name (first and last) of a user.""" name:String!odysseyCertifications:[OdysseyCertification!]odysseyCourses:[OdysseyCourse!]odysseyHasEarlyAccess:Boolean!odysseyHasRequestedEarlyAccess:Boolean!odysseyTasks:[OdysseyTask!]sandboxOperationCollections:[OperationCollection!]!synchronized:Boolean!"""List of Zendesk tickets this user has submitted""" tickets:[ZendeskTicket!]type:UserType!}type UserApiKey implements ApiKey{id:ID!keyName:String token:String!}type UserExperimentalFeatures{exampleFeature:Boolean!}"""An organization a given user belongs to.""" type UserMembership{"""The organization a user is a member of.""" account:Account!"""When the user joined the organization.""" createdAt:Timestamp!"""What level of access a use has to an organization.""" permission:UserPermission!"""The user that is a member of an organization.""" user:User!}type UserMutation{acceptPrivacyPolicy:Void """Change the user's password""" changePassword(newPassword:String!previousPassword:String!):Void createOdysseyCertification(certificationId:String!):OdysseyCertification createOdysseyCourses(courses:[OdysseyCourseInput!]!):[OdysseyCourse!]createOdysseyTasks(tasks:[OdysseyTaskInput!]!):[OdysseyTask!]"""Delete the user's avatar. Requires User.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Hard deletes the associated user. Throws an error otherwise with reason included.""" hardDelete:Void """Create a new API key for this user. Must take in a name for this key.""" newKey(keyName:String!):UserApiKey!"""Create a new API key for this user if there are no current API keys. -If an API key already exists, this will return one at random and not create a new one.""" provisionKey(keyName:String!="add-a-name"):ApiKeyProvision """Refresh information about the user from its upstream service (eg list of organizations from GitHub)""" refresh:User """Removes the given key from this user. Can be used to remove either a web cookie or a user API key.""" removeKey(id:ID!):Void """Renames the given key to the new key name.""" renameKey(id:ID!newKeyName:String):UserApiKey resendVerificationEmail:Void setOdysseyCourse(course:OdysseyCourseInput!):OdysseyCourse setOdysseyTask(task:OdysseyTaskInput!):OdysseyTask """Submit a zendesk ticket for this user""" submitZendeskTicket(collaborators:[String!]email:String!ticket:ZendeskTicketInput!):ZendeskTicket """Update information about a user; all arguments are optional""" update(email:String fullName:String referrer:String trackingGoogleClientId:String trackingMarketoClientId:String userSegment:UserSegment utmCampaign:String utmMedium:String utmSource:String):User """Updates this users' preference concerning opting into beta features.""" updateBetaFeaturesOn(betaFeaturesOn:Boolean!):User """Update the status of a feature for this. For example, if you want to hide an introductory popup.""" updateFeatureIntros(newFeatureIntros:FeatureIntrosInput):User """Update user to have the given internal mdg admin role. +organizations. Only admins are allowed to see this field.""" internalAdminRole:InternalMdgAdminRole """Last time any API token from this user was used against AGM services""" lastAuthenticatedAt:Timestamp logoutAfterIdleMs:Int """Which organizations a user belongs to.""" memberships:[UserMembership!]!"""The name (first and last) of a user.""" name:String!odysseyAttempt(id:ID!):OdysseyAttempt odysseyAttempts:[OdysseyAttempt!]!odysseyCertifications:[OdysseyCertification!]odysseyCourses:[OdysseyCourse!]odysseyHasEarlyAccess:Boolean!odysseyHasRequestedEarlyAccess:Boolean!odysseyTasks:[OdysseyTask!]sandboxOperationCollections:[OperationCollection!]!synchronized:Boolean!"""List of Zendesk tickets this user has submitted""" tickets:[ZendeskTicket!]type:UserType!}type UserApiKey implements ApiKey{id:ID!keyName:String token:String!}type UserExperimentalFeatures{exampleFeature:Boolean!}"""An organization a given user belongs to.""" type UserMembership{"""The organization a user is a member of.""" account:Account!"""When the user joined the organization.""" createdAt:Timestamp!"""What level of access a use has to an organization.""" permission:UserPermission!"""The user that is a member of an organization.""" user:User!}type UserMutation{acceptPrivacyPolicy:Void """Change the user's password""" changePassword(newPassword:String!previousPassword:String!):Void createOdysseyAttempt(testId:String!):OdysseyAttempt createOdysseyCertification(certificationId:String!):OdysseyCertification createOdysseyCourses(courses:[OdysseyCourseInput!]!):[OdysseyCourse!]createOdysseyTasks(tasks:[OdysseyTaskInput!]!):[OdysseyTask!]"""Delete the user's avatar. Requires User.canUpdateAvatar to be true.""" deleteAvatar:AvatarDeleteError """Hard deletes the associated user. Throws an error otherwise with reason included.""" hardDelete:Void """Create a new API key for this user. Must take in a name for this key.""" newKey(keyName:String!):UserApiKey!"""Create a new API key for this user if there are no current API keys. +If an API key already exists, this will return one at random and not create a new one.""" provisionKey(keyName:String!="add-a-name"):ApiKeyProvision """Refresh information about the user from its upstream service (eg list of organizations from GitHub)""" refresh:User """Removes the given key from this user. Can be used to remove either a web cookie or a user API key.""" removeKey(id:ID!):Void """Renames the given key to the new key name.""" renameKey(id:ID!newKeyName:String):UserApiKey resendVerificationEmail:Void setOdysseyCourse(course:OdysseyCourseInput!):OdysseyCourse setOdysseyResponse(response:OdysseyResponseInput!):OdysseyResponse setOdysseyTask(task:OdysseyTaskInput!):OdysseyTask """Submit a zendesk ticket for this user""" submitZendeskTicket(collaborators:[String!]email:String!ticket:ZendeskTicketInput!):ZendeskTicket """Update information about a user; all arguments are optional""" update(email:String fullName:String referrer:String trackingGoogleClientId:String trackingMarketoClientId:String userSegment:UserSegment utmCampaign:String utmMedium:String utmSource:String):User """Updates this users' preference concerning opting into beta features.""" updateBetaFeaturesOn(betaFeaturesOn:Boolean!):User """Update the status of a feature for this. For example, if you want to hide an introductory popup.""" updateFeatureIntros(newFeatureIntros:FeatureIntrosInput):User updateOdysseyAttempt(completedAt:Timestamp id:ID!):OdysseyAttempt """Update user to have the given internal mdg admin role. It is necessary to be an MDG_INTERNAL_SUPER_ADMIN to perform update. Additionally, upserting a null value explicitly revokes this user's -admin status.""" updateRole(newRole:InternalMdgAdminRole):User user:User!verifyEmail(token:String!):User}enum UserPermission{BILLING_MANAGER CONSUMER CONTRIBUTOR GRAPH_ADMIN LEGACY_GRAPH_KEY OBSERVER ORG_ADMIN}enum UserSegment{JOIN_MY_TEAM LOCAL_DEVELOPMENT NOT_SPECIFIED PRODUCTION_GRAPHS SANDBOX SANDBOX_OPERATION_COLLECTIONS TRY_TEAM}type UserSettings{appNavCollapsed:Boolean!autoManageVariables:Boolean!id:String!mockingResponses:Boolean!preflightScriptEnabled:Boolean!responseHints:ResponseHints!tableMode:Boolean!themeName:ThemeName!}"""Explorer user settings input""" input UserSettingsInput{appNavCollapsed:Boolean autoManageVariables:Boolean mockingResponses:Boolean preflightScriptEnabled:Boolean responseHints:ResponseHints tableMode:Boolean themeName:ThemeName}enum UserType{APOLLO GITHUB SSO}type ValidateOperationsResult{validationResults:[ValidationResult!]!}type ValidationError implements Error{message:String!}enum ValidationErrorCode{DEPRECATED_FIELD INVALID_OPERATION NON_PARSEABLE_DOCUMENT}enum ValidationErrorType{FAILURE INVALID WARNING}"""Represents a single validation error, with information relating to the error +admin status.""" updateRole(newRole:InternalMdgAdminRole):User user:User!verifyEmail(token:String!):User}enum UserPermission{BILLING_MANAGER CONSUMER CONTRIBUTOR DOCUMENTER GRAPH_ADMIN LEGACY_GRAPH_KEY OBSERVER ORG_ADMIN}enum UserSegment{JOIN_MY_TEAM LOCAL_DEVELOPMENT NOT_SPECIFIED PRODUCTION_GRAPHS SANDBOX SANDBOX_OPERATION_COLLECTIONS TRY_TEAM}type UserSettings{appNavCollapsed:Boolean!autoManageVariables:Boolean!id:String!mockingResponses:Boolean!preflightScriptEnabled:Boolean!responseHints:ResponseHints!tableMode:Boolean!themeName:ThemeName!}"""Explorer user settings input""" input UserSettingsInput{appNavCollapsed:Boolean autoManageVariables:Boolean mockingResponses:Boolean preflightScriptEnabled:Boolean responseHints:ResponseHints tableMode:Boolean themeName:ThemeName}enum UserType{APOLLO GITHUB SSO}type ValidateOperationsResult{validationResults:[ValidationResult!]!}type ValidationError implements Error{message:String!}enum ValidationErrorCode{DEPRECATED_FIELD INVALID_OPERATION NON_PARSEABLE_DOCUMENT}enum ValidationErrorType{FAILURE INVALID WARNING}"""Represents a single validation error, with information relating to the error and its respective operation""" type ValidationResult{"""The validation result's error code""" code:ValidationErrorCode!"""Description of the validation error""" description:String!"""The operation related to this validation result""" operation:OperationDocument!"""The type of validation error thrown - warning, failure, or invalid.""" type:ValidationErrorType!}"""Always null""" scalar Void """Webhook notification channel""" type WebhookChannel implements Channel{id:ID!name:String!secretToken:String subscriptions:[ChannelSubscription!]!url:String!}"""PagerDuty notification channel parameters""" input WebhookChannelInput{name:String secretToken:String url:String!}type ZendeskTicket{createdAt:Timestamp!description:String!graph:Service id:Int!organization:Account priority:TicketPriority!status:TicketStatus subject:String!user:User}"""Zendesk ticket input""" input ZendeskTicketInput{description:String!graphId:String organizationId:String priority:TicketPriority!subject:String!} diff --git a/pkgs/development/tools/rover/update.sh b/pkgs/development/tools/rover/update.sh index 50a9945884b7..fbf6fba989c4 100755 --- a/pkgs/development/tools/rover/update.sh +++ b/pkgs/development/tools/rover/update.sh @@ -26,17 +26,6 @@ rover_tar_url="https://github.com/apollographql/rover/archive/refs/tags/${rover_ # Convert hash to SRI representation rover_sri_hash=$(nix hash to-sri --type sha256 "$rover_hash") -# Identify librusty version and hash -librusty_version=$( - sed --quiet '/^name = "v8"$/{n;p}' "${repo}/Cargo.lock" \ - | grep --only-matching --perl-regexp '^version = "\K[^"]+' -) -librusty_arch=x86_64-unknown-linux-gnu -librusty_url="https://github.com/denoland/rusty_v8/releases/download/v${librusty_version}/librusty_v8_release_${librusty_arch}.a" -echo "Fetching librusty" -librusty_hash=$(nix-prefetch-url "$librusty_url" --type sha256) -librusty_sri_hash=$(nix hash to-sri --type sha256 "$librusty_hash") - # Update rover version. sed --in-place \ "s|version = \"[0-9.]*\"|version = \"$rover_version\"|" \ @@ -61,16 +50,6 @@ sed --in-place \ "s|cargoSha256 = \".*\"|cargoSha256 = \"$cargoSha256\"|" \ "$dirname/default.nix" -# Update librusty version -sed --in-place \ - "s|version = \"[0-9.]*\"|version = \"$librusty_version\"|" \ - "$dirname/librusty_v8.nix" - -# Update librusty hash -sed --in-place \ - "s|x86_64-linux = \"[^\"]*\"|x86_64-linux = \"$librusty_sri_hash\"|" \ - "$dirname/librusty_v8.nix" - # Update apollo api schema info response="$(mktemp)" schemaUrl=https://graphql.api.apollographql.com/api/schema From 42282ebb76ddfd16a56a9545b56d089a8e48c179 Mon Sep 17 00:00:00 2001 From: "Robert T. McGibbon" Date: Sat, 23 Apr 2022 13:13:58 -0400 Subject: [PATCH 130/161] python3Packages.geopandas: fix tests --- .../python-modules/geopandas/default.nix | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/pkgs/development/python-modules/geopandas/default.nix b/pkgs/development/python-modules/geopandas/default.nix index b62b2c67ef78..9bacb9fd818c 100644 --- a/pkgs/development/python-modules/geopandas/default.nix +++ b/pkgs/development/python-modules/geopandas/default.nix @@ -1,4 +1,4 @@ -{ lib, stdenv, buildPythonPackage, fetchFromGitHub, pythonOlder +{ lib, stdenv, buildPythonPackage, fetchFromGitHub, fetchpatch, pythonOlder , pandas, shapely, fiona, pyproj , pytestCheckHook, Rtree }: @@ -14,6 +14,19 @@ buildPythonPackage rec { sha256 = "14azl3gppqn90k8h4hpjilsivj92k6p1jh7mdr6p4grbww1b7sdq"; }; + patches = [ + # Fixes a test, will be included in the next release after 0.10.2 + (fetchpatch { + url = "https://github.com/geopandas/geopandas/pull/2219/commits/ac67515c9df745b672cca1669adf05eaf5cb0f3b.patch"; + sha256 = "sha256-XcaoFhD6Rq0nfEpMbOJiAWPbaPDrMwFwoyppayq8NHc="; + }) + # 5 commits from post 0.10.2 that fix the test suite compatibility with pandas >=1.4 + (fetchpatch { + url = "https://github.com/geopandas/geopandas/pull/2289.patch"; + sha256 = "sha256-BcZVdaO/DdpZoVGUWaw9etFvvgwizAgrkaBISEOhV4A="; + }) + ]; + propagatedBuildInputs = [ pandas shapely From 96d3a705b8487ea2a55507c5e9bbc17caedaa61a Mon Sep 17 00:00:00 2001 From: "Robert T. McGibbon" Date: Sat, 23 Apr 2022 13:30:17 -0400 Subject: [PATCH 131/161] python3Packages.hmmlearn: fix build --- .../development/python-modules/hmmlearn/default.nix | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pkgs/development/python-modules/hmmlearn/default.nix b/pkgs/development/python-modules/hmmlearn/default.nix index bdeff30b7612..8c699af97526 100644 --- a/pkgs/development/python-modules/hmmlearn/default.nix +++ b/pkgs/development/python-modules/hmmlearn/default.nix @@ -1,4 +1,6 @@ -{ lib, fetchurl, buildPythonPackage, numpy, scikit-learn, setuptools-scm, cython, pytest }: +{ lib, fetchurl, buildPythonPackage +, numpy, scikit-learn, pybind11, setuptools-scm, cython +, pytestCheckHook }: buildPythonPackage rec { pname = "hmmlearn"; @@ -9,13 +11,12 @@ buildPythonPackage rec { sha256 = "sha256-a0snIPJ6912pNnq02Q3LAPONozFo322Rf57F3mZw9uE="; }; - buildInputs = [ setuptools-scm cython ]; + buildInputs = [ setuptools-scm cython pybind11 ]; propagatedBuildInputs = [ numpy scikit-learn ]; - checkInputs = [ pytest ]; + checkInputs = [ pytestCheckHook ]; - checkPhase = '' - pytest --pyargs hmmlearn - ''; + pythonImportsCheck = [ "hmmlearn" ]; + pytestFlagsArray = [ "--pyargs" "hmmlearn" ]; meta = with lib; { description = "Hidden Markov Models in Python with scikit-learn like API"; From 54e12e99532abc61a8b32e3bb4494feb23155736 Mon Sep 17 00:00:00 2001 From: "Robert T. McGibbon" Date: Sat, 23 Apr 2022 13:53:08 -0400 Subject: [PATCH 132/161] python3Packages.pillow-simd: 7.0.0.post3 -> 9.0.0.post1 --- pkgs/development/python-modules/pillow-simd/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/python-modules/pillow-simd/default.nix b/pkgs/development/python-modules/pillow-simd/default.nix index e2ea4c713888..826d9f15252e 100644 --- a/pkgs/development/python-modules/pillow-simd/default.nix +++ b/pkgs/development/python-modules/pillow-simd/default.nix @@ -1,6 +1,6 @@ { lib, stdenv, buildPythonPackage, fetchFromGitHub, isPyPy, isPy3k , olefile, freetype, libjpeg, zlib, libtiff, libwebp, tcl, lcms2 -, libxcb, tk, libX11, openjpeg, libimagequant, pyroma, numpy +, libxcb, tk, libX11, openjpeg, libimagequant, pyroma, numpy, defusedxml , pytestCheckHook }@args: @@ -8,14 +8,14 @@ import ../pillow/generic.nix (rec { pname = "Pillow-SIMD"; # check for release version on https://pypi.org/project/Pillow-SIMD/#history # does not match the latest pillow release version! - version = "7.0.0.post3"; + version = "9.0.0.post1"; disabled = !isPy3k; src = fetchFromGitHub { owner = "uploadcare"; repo = "pillow-simd"; rev = "v${version}"; - sha256 = "1h832xp1bzf951hr4dmjmxqfsv28sx9lr2cq96qdz1c72k40zj1h"; + sha256 = "sha256-qTZYhgHjVMXqoYl3mG1xVrFaWrPidSY8HlyFQizV27Y="; }; meta = with lib; { From 2928efa64010b88a583756d15435c622858bce7c Mon Sep 17 00:00:00 2001 From: superherointj <5861043+superherointj@users.noreply.github.com> Date: Sat, 23 Apr 2022 12:09:33 -0300 Subject: [PATCH 133/161] python3Packages.glad: init 0.1.36 --- .../python-modules/glad/default.nix | 18 ++++++++++++++++++ pkgs/top-level/python-packages.nix | 2 ++ 2 files changed, 20 insertions(+) create mode 100644 pkgs/development/python-modules/glad/default.nix diff --git a/pkgs/development/python-modules/glad/default.nix b/pkgs/development/python-modules/glad/default.nix new file mode 100644 index 000000000000..9d05720d84df --- /dev/null +++ b/pkgs/development/python-modules/glad/default.nix @@ -0,0 +1,18 @@ +{ buildPythonPackage, fetchPypi, lib }: + +buildPythonPackage rec { + pname = "glad"; + version = "0.1.36"; + + src = fetchPypi { + inherit pname version; + sha256 = "sha256-P7ANv+x65t2+ugTiFUf2fzzPx5X8NFYkUM8/K7Gf28c="; + }; + + meta = with lib; { + description = "Multi-Language Vulkan/GL/GLES/EGL/GLX/WGL Loader-Generator based on the official specs"; + homepage = "https://github.com/Dav1dde/glad"; + license = licenses.mit; + maintainers = with maintainers; [ superherointj ]; + }; +} diff --git a/pkgs/top-level/python-packages.nix b/pkgs/top-level/python-packages.nix index c18f65d85f71..e2a37a9419cf 100644 --- a/pkgs/top-level/python-packages.nix +++ b/pkgs/top-level/python-packages.nix @@ -3375,6 +3375,8 @@ in { git-sweep = callPackage ../development/python-modules/git-sweep { }; + glad = callPackage ../development/python-modules/glad { }; + glances-api = callPackage ../development/python-modules/glances-api { }; glasgow = callPackage ../development/python-modules/glasgow { }; From ad046a0c97e998b7639851b604875c84468b5574 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Fri, 22 Apr 2022 19:16:34 +0200 Subject: [PATCH 134/161] python3Packages.archinfo: 9.1.12332 -> 9.2.1 --- .../python-modules/archinfo/default.nix | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/pkgs/development/python-modules/archinfo/default.nix b/pkgs/development/python-modules/archinfo/default.nix index f9affcddd122..e44156c1f056 100644 --- a/pkgs/development/python-modules/archinfo/default.nix +++ b/pkgs/development/python-modules/archinfo/default.nix @@ -9,8 +9,8 @@ buildPythonPackage rec { pname = "archinfo"; - version = "9.1.12332"; - format = "setuptools"; + version = "9.2.1"; + format = "pyproject"; disabled = pythonOlder "3.6"; @@ -18,7 +18,7 @@ buildPythonPackage rec { owner = "angr"; repo = pname; rev = "v${version}"; - hash = "sha256-nv/hwQZgKv/cM8fF6GqI8zY9GAe8aCZ/AGFOmhz+bMM="; + hash = "sha256-RWdY7tzT5wDVjAn1QIkQm8b5lIo++VzktsYZxn8taeg="; }; checkInputs = [ @@ -26,15 +26,6 @@ buildPythonPackage rec { pytestCheckHook ]; - patches = [ - # Make archinfo import without installing pyvex, https://github.com/angr/archinfo/pull/113 - (fetchpatch { - name = "fix-import-issue.patch"; - url = "https://github.com/angr/archinfo/commit/d29c108f55ffd458ff1d3d65db2d651c76b19267.patch"; - sha256 = "sha256-9vi0QyqQLIPQxFuB8qrpcnPXWOJ6d27/IXJE/Ui6HhM="; - }) - ]; - pythonImportsCheck = [ "archinfo" ]; From acb32c13e4492d9e2e7bc98f02114cae36e1ce8f Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Fri, 22 Apr 2022 19:16:44 +0200 Subject: [PATCH 135/161] python3Packages.pyvex: 9.1.12332 -> 9.2.1 --- pkgs/development/python-modules/pyvex/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/python-modules/pyvex/default.nix b/pkgs/development/python-modules/pyvex/default.nix index e3a810be9160..799271371c7b 100644 --- a/pkgs/development/python-modules/pyvex/default.nix +++ b/pkgs/development/python-modules/pyvex/default.nix @@ -12,14 +12,14 @@ buildPythonPackage rec { pname = "pyvex"; - version = "9.1.12332"; - format = "setuptools"; + version = "9.2.1"; + format = "pyproject"; disabled = pythonOlder "3.6"; src = fetchPypi { inherit pname version; - hash = "sha256-e1lruHgppQ8mJbTx6xsUDSkLCYQISqM9c1vsjdQU4eI="; + hash = "sha256-b6LZJmAyxklvZxvane19dK/kQfbLPjkk29RydiWMZCY="; }; propagatedBuildInputs = [ From f8d729492a1a64a378401f0fb509da64157931ac Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Fri, 22 Apr 2022 19:16:47 +0200 Subject: [PATCH 136/161] python3Packages.claripy: 9.1.12332 -> 9.2.1 --- pkgs/development/python-modules/claripy/default.nix | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pkgs/development/python-modules/claripy/default.nix b/pkgs/development/python-modules/claripy/default.nix index b16505771492..8f66e8551edf 100644 --- a/pkgs/development/python-modules/claripy/default.nix +++ b/pkgs/development/python-modules/claripy/default.nix @@ -14,8 +14,8 @@ buildPythonPackage rec { pname = "claripy"; - version = "9.1.12332"; - format = "setuptools"; + version = "9.2.1"; + format = "pyproject"; disabled = pythonOlder "3.6"; @@ -23,7 +23,7 @@ buildPythonPackage rec { owner = "angr"; repo = pname; rev = "v${version}"; - sha256 = "sha256-YrR8OkDoop6kHAuk4cM4STYYOjjaMLZCQuE07/5IXqs="; + hash = "sha256-pCqhSpZfX3u9vJ8Oy1yyicagBQYK5+jBVCEab0TnGA4="; }; propagatedBuildInputs = [ @@ -42,8 +42,8 @@ buildPythonPackage rec { postPatch = '' # Use upstream z3 implementation - substituteInPlace setup.py \ - --replace "z3-solver>=4.8.5.0" "" + substituteInPlace setup.cfg \ + --replace "z3-solver >= 4.8.5.0" "" ''; pythonImportsCheck = [ From 95762cc16f77783a9844232ce2fba2d09d6d71cf Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Fri, 22 Apr 2022 19:16:55 +0200 Subject: [PATCH 137/161] python3Packages.angr: 9.1.12332 -> 9.2.1 --- pkgs/development/python-modules/angr/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/python-modules/angr/default.nix b/pkgs/development/python-modules/angr/default.nix index 1881da62f506..31629eab9d67 100644 --- a/pkgs/development/python-modules/angr/default.nix +++ b/pkgs/development/python-modules/angr/default.nix @@ -46,8 +46,8 @@ in buildPythonPackage rec { pname = "angr"; - version = "9.1.12332"; - format = "setuptools"; + version = "9.2.1"; + format = "pyproject"; disabled = pythonOlder "3.6"; @@ -55,7 +55,7 @@ buildPythonPackage rec { owner = pname; repo = pname; rev = "v${version}"; - hash = "sha256-GaW1XyFOnjU28HqptFC6+Fe41zYZMR716Nsq0dPy660="; + hash = "sha256-7t4NV1udBq3tK7czuKYUsQ+9tLahFM8DlUUBT3d6bco="; }; propagatedBuildInputs = [ From 9f78c17798baa5bcbc69375a4f33353f2824be7a Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Fri, 22 Apr 2022 19:16:58 +0200 Subject: [PATCH 138/161] python3Packages.angrop: 9.1.12332 -> 9.2.1 --- pkgs/development/python-modules/angrop/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/python-modules/angrop/default.nix b/pkgs/development/python-modules/angrop/default.nix index 8164190cd49c..d32441a989c6 100644 --- a/pkgs/development/python-modules/angrop/default.nix +++ b/pkgs/development/python-modules/angrop/default.nix @@ -9,8 +9,8 @@ buildPythonPackage rec { pname = "angrop"; - version = "9.1.12332"; - format = "setuptools"; + version = "9.2.1"; + format = "pyproject"; disabled = pythonOlder "3.6"; @@ -18,7 +18,7 @@ buildPythonPackage rec { owner = "angr"; repo = pname; rev = "v${version}"; - hash = "sha256-lhwlZ7eHaEMaTW7c+WCRSeGSIQ5IeEx6XALyYJH+Ey0="; + hash = "sha256-VhlsRd5IN8zF6aUU5Ji/ULkdecOpR+egU3vhYpi+KL8="; }; propagatedBuildInputs = [ From fa46564ddf3876505193548ba2c01e7a36b808a4 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 12:30:59 +0200 Subject: [PATCH 139/161] python3Packages.ailment: 9.2.1 -> 9.2.1 --- pkgs/development/python-modules/ailment/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/python-modules/ailment/default.nix b/pkgs/development/python-modules/ailment/default.nix index ef606a027d06..9f6828fd6329 100644 --- a/pkgs/development/python-modules/ailment/default.nix +++ b/pkgs/development/python-modules/ailment/default.nix @@ -7,8 +7,8 @@ buildPythonPackage rec { pname = "ailment"; - version = "9.1.12332"; - format = "setuptools"; + version = "9.2.1"; + format = "pyproject"; disabled = pythonOlder "3.6"; @@ -16,7 +16,7 @@ buildPythonPackage rec { owner = "angr"; repo = pname; rev = "v${version}"; - hash = "sha256-qWKvNhiOAonUi0qpOWtwbNZa2lgBQ+gaGrAHMgDdr4Q="; + hash = "sha256-F0t4vVxi4KUUtIZc8FJD9+2qf1XA58haFfjmHwAQaWA="; }; propagatedBuildInputs = [ From ecc7c9533cd4ab45feb84cbde4f94c1cc7fa03b3 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 12:39:59 +0200 Subject: [PATCH 140/161] python3Packages.cle: 9.2.1 -> 9.2.1 --- pkgs/development/python-modules/cle/default.nix | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/pkgs/development/python-modules/cle/default.nix b/pkgs/development/python-modules/cle/default.nix index 6c8126172c05..8ee898b445e7 100644 --- a/pkgs/development/python-modules/cle/default.nix +++ b/pkgs/development/python-modules/cle/default.nix @@ -15,7 +15,7 @@ let # The binaries are following the argr projects release cycle - version = "9.1.12332"; + version = "9.2.1"; # Binary files from https://github.com/angr/binaries (only used for testing and only here) binaries = fetchFromGitHub { @@ -29,7 +29,7 @@ in buildPythonPackage rec { pname = "cle"; inherit version; - format = "setuptools"; + format = "pyproject"; disabled = pythonOlder "3.6"; @@ -37,7 +37,7 @@ buildPythonPackage rec { owner = "angr"; repo = pname; rev = "v${version}"; - hash = "sha256-xcj6Skzzmw5g+0KsBMLNOhRyXQA7nbgnc9YyfJLteCM="; + hash = "sha256-OGdnrRFfx2LMMsw6giPC+4izWX603cYbpKHuslj4Gng="; }; propagatedBuildInputs = [ @@ -70,6 +70,8 @@ buildPythonPackage rec { "test_plt_full_relro" # Test fails "test_tls_pe_incorrect_tls_data_start" + # The required parts is not present on Nix + "test_remote_file_map" ]; pythonImportsCheck = [ From 9d5bd0bc2f81fc4309ecca2060a176c5863e77b3 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 12:50:00 +0200 Subject: [PATCH 141/161] python3Packages.archinfo: remove unused input --- pkgs/development/python-modules/archinfo/default.nix | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/archinfo/default.nix b/pkgs/development/python-modules/archinfo/default.nix index e44156c1f056..b9b29a7167ab 100644 --- a/pkgs/development/python-modules/archinfo/default.nix +++ b/pkgs/development/python-modules/archinfo/default.nix @@ -1,7 +1,6 @@ { lib , buildPythonPackage , fetchFromGitHub -, fetchpatch , pytestCheckHook , nose , pythonOlder @@ -34,6 +33,6 @@ buildPythonPackage rec { description = "Classes with architecture-specific information"; homepage = "https://github.com/angr/archinfo"; license = with licenses; [ bsd2 ]; - maintainers = [ maintainers.fab ]; + maintainers = with maintainers; [ fab ]; }; } From c553acd27fdcbc5de4d8ec26a829d9fc2870901b Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 23 Apr 2022 16:07:37 +0000 Subject: [PATCH 142/161] python310Packages.deezer-python: 5.2.0 -> 5.3.0 --- pkgs/development/python-modules/deezer-python/default.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkgs/development/python-modules/deezer-python/default.nix b/pkgs/development/python-modules/deezer-python/default.nix index 0f01ebab4a55..a694bd2882d0 100644 --- a/pkgs/development/python-modules/deezer-python/default.nix +++ b/pkgs/development/python-modules/deezer-python/default.nix @@ -13,7 +13,7 @@ buildPythonPackage rec { pname = "deezer-python"; - version = "5.2.0"; + version = "5.3.0"; format = "pyproject"; disabled = pythonOlder "3.6"; @@ -21,8 +21,8 @@ buildPythonPackage rec { src = fetchFromGitHub { owner = "browniebroke"; repo = pname; - rev = "v${version}"; - sha256 = "sha256-jaF5vQx8/qP9pGLfilx86v1GxHbjxaRghjjI5Me0pU0="; + rev = "refs/tags/v${version}"; + sha256 = "sha256-Y3nn7q6wGBqWN2JxfpGYd/KDxW5yeuwkos0w1AENkJA="; }; nativeBuildInputs = [ From f420bdcc83fdea9db444b266e64cf2b8fd108070 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 23 Apr 2022 12:55:45 +0000 Subject: [PATCH 143/161] python310Packages.azure-synapse-artifacts: 0.12.0 -> 0.13.0 --- .../python-modules/azure-synapse-artifacts/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/azure-synapse-artifacts/default.nix b/pkgs/development/python-modules/azure-synapse-artifacts/default.nix index 5a83385b7541..de0909fa0c02 100644 --- a/pkgs/development/python-modules/azure-synapse-artifacts/default.nix +++ b/pkgs/development/python-modules/azure-synapse-artifacts/default.nix @@ -6,12 +6,12 @@ buildPythonPackage rec { pname = "azure-synapse-artifacts"; - version = "0.12.0"; + version = "0.13.0"; src = fetchPypi { inherit pname version; extension = "zip"; - sha256 = "sha256-IfQWsITuThzh+TRgv99JTtcDFY3gMq5PjALkN4mJEZo="; + sha256 = "sha256-WJZtE7efs1xwalyb0Sr2J+pmPIt9gn2o01/prncb2uM="; }; propagatedBuildInputs = [ From cb536356ee92fbad69189309ca79fe083d9f07ad Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Sat, 23 Apr 2022 12:21:18 +0000 Subject: [PATCH 144/161] python310Packages.aiolifx: 0.7.1 -> 0.8.0 --- pkgs/development/python-modules/aiolifx/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/aiolifx/default.nix b/pkgs/development/python-modules/aiolifx/default.nix index 2cc04130c671..7cff45cb6646 100644 --- a/pkgs/development/python-modules/aiolifx/default.nix +++ b/pkgs/development/python-modules/aiolifx/default.nix @@ -8,13 +8,13 @@ buildPythonPackage rec { pname = "aiolifx"; - version = "0.7.1"; + version = "0.8.0"; disabled = pythonOlder "3.4"; src = fetchPypi { inherit pname version; - sha256 = "sha256-ktXnAgrxfDELfMQATcWHn/u6C4bKQii+mbT4mA54coo="; + sha256 = "sha256-7XwtTALfEFAI2Rl3JcVcncIZBTFNuXyyclpJj5jHyEU="; }; propagatedBuildInputs = [ From 6fbd7f1c047c5b8c198fb1e92b3a6520a1a2ac72 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 15:45:44 +0200 Subject: [PATCH 145/161] python3Packages.aiolifx: disable on older Python releases --- pkgs/development/python-modules/aiolifx/default.nix | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/pkgs/development/python-modules/aiolifx/default.nix b/pkgs/development/python-modules/aiolifx/default.nix index 7cff45cb6646..37a4ca6fa44f 100644 --- a/pkgs/development/python-modules/aiolifx/default.nix +++ b/pkgs/development/python-modules/aiolifx/default.nix @@ -9,12 +9,13 @@ buildPythonPackage rec { pname = "aiolifx"; version = "0.8.0"; + format = "setuptools"; - disabled = pythonOlder "3.4"; + disabled = pythonOlder "3.7"; src = fetchPypi { inherit pname version; - sha256 = "sha256-7XwtTALfEFAI2Rl3JcVcncIZBTFNuXyyclpJj5jHyEU="; + hash = "sha256-7XwtTALfEFAI2Rl3JcVcncIZBTFNuXyyclpJj5jHyEU="; }; propagatedBuildInputs = [ @@ -25,10 +26,12 @@ buildPythonPackage rec { # tests are not implemented doCheck = false; - pythonImportsCheck = [ "aiolifx" ]; + pythonImportsCheck = [ + "aiolifx" + ]; meta = with lib; { - description = "API for local communication with LIFX devices over a LAN"; + description = "MOdule for local communication with LIFX devices over a LAN"; homepage = "https://github.com/frawau/aiolifx"; license = licenses.mit; maintainers = with maintainers; [ netixx ]; From 20c39d574165c115f513cfb47a34174fd1cc83b4 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 15:47:12 +0200 Subject: [PATCH 146/161] python3Packages.aiolifx fix typo --- pkgs/development/python-modules/aiolifx/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/development/python-modules/aiolifx/default.nix b/pkgs/development/python-modules/aiolifx/default.nix index 37a4ca6fa44f..1ce640179a13 100644 --- a/pkgs/development/python-modules/aiolifx/default.nix +++ b/pkgs/development/python-modules/aiolifx/default.nix @@ -31,7 +31,7 @@ buildPythonPackage rec { ]; meta = with lib; { - description = "MOdule for local communication with LIFX devices over a LAN"; + description = "Module for local communication with LIFX devices over a LAN"; homepage = "https://github.com/frawau/aiolifx"; license = licenses.mit; maintainers = with maintainers; [ netixx ]; From 9a510e042414341b0ff4de056cb38af6237435a8 Mon Sep 17 00:00:00 2001 From: "Robert T. McGibbon" Date: Sat, 23 Apr 2022 13:56:56 -0400 Subject: [PATCH 147/161] python3Packages.hyppo: fix build --- pkgs/development/python-modules/hyppo/default.nix | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/hyppo/default.nix b/pkgs/development/python-modules/hyppo/default.nix index b09d5bd565ff..de089ba39d2d 100644 --- a/pkgs/development/python-modules/hyppo/default.nix +++ b/pkgs/development/python-modules/hyppo/default.nix @@ -3,6 +3,7 @@ , pythonOlder , fetchFromGitHub , pytestCheckHook +, autograd , numba , numpy , scikit-learn @@ -25,6 +26,7 @@ buildPythonPackage rec { }; propagatedBuildInputs = [ + autograd numba numpy scikit-learn @@ -40,8 +42,8 @@ buildPythonPackage rec { meta = with lib; { homepage = "https://github.com/neurodata/hyppo"; - description = "Indepedence testing in Python"; - license = licenses.asl20; + description = "Python package for multivariate hypothesis testing"; + license = licenses.mit; maintainers = with maintainers; [ bcdarwin ]; }; } From 48b6b199708e418bf56bed158062bfac3ea9aa76 Mon Sep 17 00:00:00 2001 From: Julius de Bruijn Date: Thu, 9 Dec 2021 14:30:12 +0100 Subject: [PATCH 148/161] emacsPackages.tree-sitter: fix grammar/native executor --- .../emacs/elisp-packages/manual-packages.nix | 3 + .../tree-sitter-langs/default.nix | 55 ++++++++++++++ .../emacs/elisp-packages/tsc/default.nix | 73 +++++++++++++++++++ 3 files changed, 131 insertions(+) create mode 100644 pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix create mode 100644 pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix diff --git a/pkgs/applications/editors/emacs/elisp-packages/manual-packages.nix b/pkgs/applications/editors/emacs/elisp-packages/manual-packages.nix index 56a3b696852d..6d8dbce2b32b 100644 --- a/pkgs/applications/editors/emacs/elisp-packages/manual-packages.nix +++ b/pkgs/applications/editors/emacs/elisp-packages/manual-packages.nix @@ -233,6 +233,9 @@ sv-kalender = callPackage ./sv-kalender { }; + tree-sitter-langs = callPackage ./tree-sitter-langs { }; + tsc = callPackage ./tsc { }; + youtube-dl = callPackage ./youtube-dl { }; # From old emacsPackages (pre emacsPackagesNg) diff --git a/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix new file mode 100644 index 000000000000..1dbcf605adf9 --- /dev/null +++ b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix @@ -0,0 +1,55 @@ +{ lib +, pkgs +, symlinkJoin +, fetchzip +, melpaBuild +, stdenv +, fetchFromGitHub +, writeText +}: + +let + version = "0.10.14"; + tree-sitter-grammars = stdenv.mkDerivation rec { + name = "tree-sitter-grammars"; + + inherit version; + + src = fetchzip { + name = "tree-sitter-grammars-linux-${version}.tar.gz"; + url = "https://github.com/emacs-tree-sitter/tree-sitter-langs/releases/download/${version}/${src.name}"; + sha256 = "sha256-J8VplZWhyWN8ur74Ep0CTl4nPtESzfs2Gh6MxfY5Zqc="; + stripRoot = false; + }; + + installPhase = '' + install -d $out/langs/bin + echo -n $version > $out/langs/bin/BUNDLE-VERSION + install -m444 * $out/langs/bin + ''; + }; +in melpaBuild rec { + inherit version; + + pname = "tree-sitter-langs"; + commit = version; + + src = fetchFromGitHub { + owner = "emacs-tree-sitter"; + repo = "tree-sitter-langs"; + rev = version; + sha256 = "sha256-uKfkhcm1k2Ov4fSr7ALVnpQoX/l9ssEWMn761pa7Y/c="; + }; + + recipe = writeText "recipe" '' + (tree-sitter-langs + :repo "emacs-tree-sitter/tree-sitter-langs" + :fetcher github + :files (:defaults "queries")) + ''; + + postPatch = '' + substituteInPlace ./tree-sitter-langs-build.el \ + --replace "tree-sitter-langs-grammar-dir tree-sitter-langs--dir" "tree-sitter-langs-grammar-dir \"${tree-sitter-grammars}/langs\"" + ''; +} diff --git a/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix b/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix new file mode 100644 index 000000000000..aefb7d02004d --- /dev/null +++ b/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix @@ -0,0 +1,73 @@ +{ lib +, symlinkJoin +, melpaBuild +, fetchFromGitHub +, rustPlatform +, writeText +, clang +, llvmPackages +}: +with lib.licenses; +with rustPlatform; +with llvmPackages; + +let + version = "0.16.1"; + + src = fetchFromGitHub { + owner = "emacs-tree-sitter"; + repo = "elisp-tree-sitter"; + rev = version; + sha256 = "sha256-tAohHdAsy/HTFFPSNOo0UyrdolH8h0KF2ekFXuLltBE="; + }; + + tsc = melpaBuild rec { + inherit src; + inherit version; + + pname = "tsc"; + commit = version; + + sourceRoot = "source/core"; + + recipe = writeText "recipe" '' + (tsc + :repo "emacs-tree-sitter/elisp-tree-sitter" + :fetcher github) + ''; + }; + + tsc-dyn = buildRustPackage { + inherit version; + inherit src; + + pname = "tsc-dyn"; + + nativeBuildInputs = [ clang ]; + sourceRoot = "source/core"; + + configurePhase = '' + export LIBCLANG_PATH="${libclang.lib}/lib" + ''; + + postInstall = '' + LIB=($out/lib/libtsc_dyn.*) + TSC_PATH=$out/share/emacs/site-lisp/elpa/tsc-${version} + install -d $TSC_PATH + install -m444 $out/lib/libtsc_dyn.* $TSC_PATH/''${LIB/*libtsc_/tsc-} + echo -n $version > $TSC_PATH/DYN-VERSION + rm -r $out/lib + ''; + + cargoSha256 = "sha256-7UOhs3wx6fGvqPjNxUKoEHwPtiJ5zgLFPwDSvhYlmis="; + }; +in symlinkJoin { + name = "tsc"; + paths = [ tsc tsc-dyn ]; + + meta = { + description = "The core APIs of the Emacs binding for tree-sitter."; + license = mit; + maintainers = with maintainers; [ pimeys ]; + }; +} From 4567405cd4a6bcc9f5010d93eeec9c3aa3873c79 Mon Sep 17 00:00:00 2001 From: adisbladis Date: Sat, 23 Apr 2022 22:46:58 +1200 Subject: [PATCH 149/161] emacs.pkgs.tsc: Remove `with` from top-level scope This is a huge anti-pattern that makes it next to impossible to know which attrset is providing which attr and it's very easy to make mistakes. Case in point: This package didn't properly scope `lib.maintainers`. --- .../editors/emacs/elisp-packages/tsc/default.nix | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix b/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix index aefb7d02004d..3f686f96febe 100644 --- a/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix +++ b/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix @@ -7,9 +7,6 @@ , clang , llvmPackages }: -with lib.licenses; -with rustPlatform; -with llvmPackages; let version = "0.16.1"; @@ -37,7 +34,7 @@ let ''; }; - tsc-dyn = buildRustPackage { + tsc-dyn = rustPlatform.buildRustPackage { inherit version; inherit src; @@ -47,7 +44,7 @@ let sourceRoot = "source/core"; configurePhase = '' - export LIBCLANG_PATH="${libclang.lib}/lib" + export LIBCLANG_PATH="${llvmPackages.libclang.lib}/lib" ''; postInstall = '' @@ -61,13 +58,14 @@ let cargoSha256 = "sha256-7UOhs3wx6fGvqPjNxUKoEHwPtiJ5zgLFPwDSvhYlmis="; }; + in symlinkJoin { - name = "tsc"; + name = "tsc-${version}"; paths = [ tsc tsc-dyn ]; meta = { description = "The core APIs of the Emacs binding for tree-sitter."; - license = mit; - maintainers = with maintainers; [ pimeys ]; + license = lib.licenses.mit; + maintainers = with lib.maintainers; [ pimeys ]; }; } From 2c61820661e1633d4be0a090bdd3332988fb8272 Mon Sep 17 00:00:00 2001 From: adisbladis Date: Sun, 24 Apr 2022 00:11:50 +1200 Subject: [PATCH 150/161] emacs.pkgs.tsc/tree-sitter-langs: Remove `rec` from derivations These are not required. --- .../emacs/elisp-packages/tree-sitter-langs/default.nix | 10 ++++++---- .../editors/emacs/elisp-packages/tsc/default.nix | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix index 1dbcf605adf9..ffc3f5337496 100644 --- a/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix +++ b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix @@ -10,14 +10,15 @@ let version = "0.10.14"; - tree-sitter-grammars = stdenv.mkDerivation rec { + + tree-sitter-grammars = stdenv.mkDerivation { name = "tree-sitter-grammars"; inherit version; - src = fetchzip { + src = fetchzip rec { name = "tree-sitter-grammars-linux-${version}.tar.gz"; - url = "https://github.com/emacs-tree-sitter/tree-sitter-langs/releases/download/${version}/${src.name}"; + url = "https://github.com/emacs-tree-sitter/tree-sitter-langs/releases/download/${version}/${name}"; sha256 = "sha256-J8VplZWhyWN8ur74Ep0CTl4nPtESzfs2Gh6MxfY5Zqc="; stripRoot = false; }; @@ -28,7 +29,8 @@ let install -m444 * $out/langs/bin ''; }; -in melpaBuild rec { + +in melpaBuild { inherit version; pname = "tree-sitter-langs"; diff --git a/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix b/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix index 3f686f96febe..dcb950824d0d 100644 --- a/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix +++ b/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix @@ -18,7 +18,7 @@ let sha256 = "sha256-tAohHdAsy/HTFFPSNOo0UyrdolH8h0KF2ekFXuLltBE="; }; - tsc = melpaBuild rec { + tsc = melpaBuild { inherit src; inherit version; From 028a9348963e73a5998aeb1b3265b005ff06b6b2 Mon Sep 17 00:00:00 2001 From: adisbladis Date: Sun, 24 Apr 2022 02:08:29 +1200 Subject: [PATCH 151/161] emacs.pkgs.tsc: Add update script --- .../emacs/elisp-packages/tsc/default.nix | 21 +++ .../emacs/elisp-packages/tsc/update.py | 122 ++++++++++++++++++ 2 files changed, 143 insertions(+) create mode 100644 pkgs/applications/editors/emacs/elisp-packages/tsc/update.py diff --git a/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix b/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix index dcb950824d0d..ae0c5c28736c 100644 --- a/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix +++ b/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix @@ -6,6 +6,12 @@ , writeText , clang , llvmPackages + +, runtimeShell +, writeScript +, python3 +, nix-prefetch-github +, nix }: let @@ -63,6 +69,21 @@ in symlinkJoin { name = "tsc-${version}"; paths = [ tsc tsc-dyn ]; + passthru = { + updateScript = let + pythonEnv = python3.withPackages(ps: [ ps.requests ]); + in writeScript "tsc-update" '' + #!${runtimeShell} + set -euo pipefail + export PATH=${lib.makeBinPath [ + nix-prefetch-github + nix + pythonEnv + ]}:$PATH + exec python3 ${builtins.toString ./update.py} ${builtins.toString ./.} + ''; + }; + meta = { description = "The core APIs of the Emacs binding for tree-sitter."; license = lib.licenses.mit; diff --git a/pkgs/applications/editors/emacs/elisp-packages/tsc/update.py b/pkgs/applications/editors/emacs/elisp-packages/tsc/update.py new file mode 100644 index 000000000000..ce2054f909ef --- /dev/null +++ b/pkgs/applications/editors/emacs/elisp-packages/tsc/update.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python3 +from textwrap import dedent +from os.path import ( + abspath, + dirname, + join, +) +from typing import ( + Dict, + Any, +) +import subprocess +import tempfile +import json +import sys +import re + +import requests + + +def eval_drv(nixpkgs: str, expr: str) -> Any: + expr = "\n".join( + ( + "with (import %s {});" % nixpkgs, + expr, + ) + ) + + with tempfile.NamedTemporaryFile(mode="w") as f: + f.write(dedent(expr)) + f.flush() + p = subprocess.run( + ["nix-instantiate", "--json", f.name], stdout=subprocess.PIPE, check=True + ) + + return p.stdout.decode().strip() + + +def get_src(tag_name: str) -> Dict[str, str]: + p = subprocess.run( + [ + "nix-prefetch-github", + "--rev", + tag_name, + "--json", + "emacs-tree-sitter", + "elisp-tree-sitter", + ], + stdout=subprocess.PIPE, + check=True, + ) + src = json.loads(p.stdout) + + fields = ["owner", "repo", "rev", "sha256"] + + return {f: src[f] for f in fields} + + +def get_cargo_sha256(drv_path: str): + # Note: No check=True since we expect this command to fail + p = subprocess.run(["nix-store", "-r", drv_path], stderr=subprocess.PIPE) + + stderr = p.stderr.decode() + lines = iter(stderr.split("\n")) + + for l in lines: + if l.startswith("error: hash mismatch in fixed-output derivation"): + break + else: + raise ValueError("Did not find expected hash mismatch message") + + for l in lines: + m = re.match(r"\s+got:\s+(.+)$", l) + if m: + return m.group(1) + + raise ValueError("Could not extract actual sha256 hash: ", stderr) + + +if __name__ == "__main__": + cwd = sys.argv[1] + + nixpkgs = abspath(join(cwd, "../../../../../..")) + + tag_name = requests.get( + "https://api.github.com/repos/emacs-tree-sitter/elisp-tree-sitter/releases/latest" + ).json()["tag_name"] + + src = get_src(tag_name) + + with tempfile.NamedTemporaryFile(mode="w") as f: + json.dump(src, f) + f.flush() + + drv_path = eval_drv( + nixpkgs, + """ + rustPlatform.buildRustPackage { + pname = "tsc-dyn"; + version = "%s"; + nativeBuildInputs = [ clang ]; + src = fetchFromGitHub (lib.importJSON %s); + sourceRoot = "source/core"; + cargoSha256 = lib.fakeSha256; + } + """ + % (tag_name, f.name), + ) + + cargo_sha256 = get_cargo_sha256(drv_path) + + with open(join(cwd, "src.json"), mode="w") as f: + json.dump( + { + "src": src, + "version": tag_name, + "cargoSha256": cargo_sha256, + }, + f, + indent=2, + ) + f.write("\n") From 00af59359fd4ce22922b438527ebe71fefea586e Mon Sep 17 00:00:00 2001 From: adisbladis Date: Sun, 24 Apr 2022 02:10:38 +1200 Subject: [PATCH 152/161] emacs.pkgs.tsc: 0.16.1 -> 0.18.0 Use auto-updater JSON --- .../editors/emacs/elisp-packages/tsc/default.nix | 13 +++++-------- .../editors/emacs/elisp-packages/tsc/src.json | 10 ++++++++++ 2 files changed, 15 insertions(+), 8 deletions(-) create mode 100644 pkgs/applications/editors/emacs/elisp-packages/tsc/src.json diff --git a/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix b/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix index ae0c5c28736c..a9462298979d 100644 --- a/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix +++ b/pkgs/applications/editors/emacs/elisp-packages/tsc/default.nix @@ -15,14 +15,11 @@ }: let - version = "0.16.1"; - src = fetchFromGitHub { - owner = "emacs-tree-sitter"; - repo = "elisp-tree-sitter"; - rev = version; - sha256 = "sha256-tAohHdAsy/HTFFPSNOo0UyrdolH8h0KF2ekFXuLltBE="; - }; + srcMeta = lib.importJSON ./src.json; + inherit (srcMeta) version; + + src = fetchFromGitHub srcMeta.src; tsc = melpaBuild { inherit src; @@ -62,7 +59,7 @@ let rm -r $out/lib ''; - cargoSha256 = "sha256-7UOhs3wx6fGvqPjNxUKoEHwPtiJ5zgLFPwDSvhYlmis="; + inherit (srcMeta) cargoSha256; }; in symlinkJoin { diff --git a/pkgs/applications/editors/emacs/elisp-packages/tsc/src.json b/pkgs/applications/editors/emacs/elisp-packages/tsc/src.json new file mode 100644 index 000000000000..6aa6fee1830a --- /dev/null +++ b/pkgs/applications/editors/emacs/elisp-packages/tsc/src.json @@ -0,0 +1,10 @@ +{ + "src": { + "owner": "emacs-tree-sitter", + "repo": "elisp-tree-sitter", + "rev": "909717c685ff5a2327fa2ca8fb8a25216129361c", + "sha256": "LrakDpP3ZhRQqz47dPcyoQnu5lROdaNlxGaQfQT6u+k=" + }, + "version": "0.18.0", + "cargoSha256": "sha256-IRCZqszBkGF8anF/kpcPOzHdOP4lAtJBAp6FS5tAOx8=" +} From b15c703495742190e6d5cf37ac0980391c323428 Mon Sep 17 00:00:00 2001 From: adisbladis Date: Sun, 24 Apr 2022 02:41:43 +1200 Subject: [PATCH 153/161] emacs.pkgs.tree-sitter-langs: Use grammars from nixpkgs and sources from melpa --- .../tree-sitter-langs/default.nix | 94 +++++++++++-------- 1 file changed, 54 insertions(+), 40 deletions(-) diff --git a/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix index ffc3f5337496..93b4436e9520 100644 --- a/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix +++ b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix @@ -6,52 +6,66 @@ , stdenv , fetchFromGitHub , writeText +, melpaStablePackages +, runCommand +, tree-sitter-grammars }: let - version = "0.10.14"; - tree-sitter-grammars = stdenv.mkDerivation { - name = "tree-sitter-grammars"; + inherit (melpaStablePackages) tree-sitter-langs; - inherit version; + # Note: Commented grammars are in upstream bundle but missing from our packaged grammars + grammars = [ + "agda" + "bash" + "c" + "c-sharp" + "cpp" + "css" + # "d" + "elixir" + "elm" + "fluent" + "go" + "haskell" + "hcl" + "html" + # "janet-simple" + "java" + "javascript" + "jsdoc" + "json" + "julia" + "nix" + "ocaml" + "ocaml-interface" + # "pgn" + "php" + "prisma" + "python" + "ruby" + "rust" + "scala" + "swift" + "tsx" + "typescript" + "verilog" + "zig" + ]; - src = fetchzip rec { - name = "tree-sitter-grammars-linux-${version}.tar.gz"; - url = "https://github.com/emacs-tree-sitter/tree-sitter-langs/releases/download/${version}/${name}"; - sha256 = "sha256-J8VplZWhyWN8ur74Ep0CTl4nPtESzfs2Gh6MxfY5Zqc="; - stripRoot = false; - }; + grammarDir = runCommand "emacs-tree-sitter-grammars" { + # Fake same version number as upstream language bundle to prevent triggering downloads + inherit (tree-sitter-langs) version; + } ('' + install -d $out/langs/bin + echo -n $version > $out/langs/bin/BUNDLE-VERSION + '' + lib.concatStringsSep "\n" (map (g: "ln -s ${tree-sitter-grammars."tree-sitter-${g}"}/parser $out/langs/bin/${g}.so") grammars)); - installPhase = '' - install -d $out/langs/bin - echo -n $version > $out/langs/bin/BUNDLE-VERSION - install -m444 * $out/langs/bin - ''; - }; - -in melpaBuild { - inherit version; - - pname = "tree-sitter-langs"; - commit = version; - - src = fetchFromGitHub { - owner = "emacs-tree-sitter"; - repo = "tree-sitter-langs"; - rev = version; - sha256 = "sha256-uKfkhcm1k2Ov4fSr7ALVnpQoX/l9ssEWMn761pa7Y/c="; - }; - - recipe = writeText "recipe" '' - (tree-sitter-langs - :repo "emacs-tree-sitter/tree-sitter-langs" - :fetcher github - :files (:defaults "queries")) - ''; - - postPatch = '' +in +melpaStablePackages.tree-sitter-langs.overrideAttrs(old: { + postPatch = old.postPatch or "" + '' substituteInPlace ./tree-sitter-langs-build.el \ - --replace "tree-sitter-langs-grammar-dir tree-sitter-langs--dir" "tree-sitter-langs-grammar-dir \"${tree-sitter-grammars}/langs\"" + --replace "tree-sitter-langs-grammar-dir tree-sitter-langs--dir" "tree-sitter-langs-grammar-dir \"${grammarDir}/langs\"" ''; -} +}) From b4f90318fecf53ca26a104a39ac42c914582dfe4 Mon Sep 17 00:00:00 2001 From: adisbladis Date: Sun, 24 Apr 2022 07:18:42 +1200 Subject: [PATCH 154/161] emacs.pkgs.tree-sitter-langs: Create script to keep grammars up to date with upstream defaults --- .../tree-sitter-langs/default-grammars.json | 32 ++++++++ .../tree-sitter-langs/default.nix | 49 ++---------- .../tree-sitter-langs/update-defaults.py | 74 +++++++++++++++++++ 3 files changed, 114 insertions(+), 41 deletions(-) create mode 100644 pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default-grammars.json create mode 100755 pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/update-defaults.py diff --git a/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default-grammars.json b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default-grammars.json new file mode 100644 index 000000000000..6a5608cbf8d5 --- /dev/null +++ b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default-grammars.json @@ -0,0 +1,32 @@ +[ + "tree-sitter-agda", + "tree-sitter-bash", + "tree-sitter-c", + "tree-sitter-c-sharp", + "tree-sitter-cpp", + "tree-sitter-css", + "tree-sitter-elixir", + "tree-sitter-elm", + "tree-sitter-fluent", + "tree-sitter-go", + "tree-sitter-haskell", + "tree-sitter-hcl", + "tree-sitter-html", + "tree-sitter-java", + "tree-sitter-javascript", + "tree-sitter-jsdoc", + "tree-sitter-json", + "tree-sitter-julia", + "tree-sitter-nix", + "tree-sitter-ocaml", + "tree-sitter-php", + "tree-sitter-prisma", + "tree-sitter-python", + "tree-sitter-ruby", + "tree-sitter-rust", + "tree-sitter-scala", + "tree-sitter-swift", + "tree-sitter-typescript", + "tree-sitter-verilog", + "tree-sitter-zig" +] diff --git a/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix index 93b4436e9520..99fa87abf018 100644 --- a/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix +++ b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix @@ -12,55 +12,22 @@ }: let - inherit (melpaStablePackages) tree-sitter-langs; - # Note: Commented grammars are in upstream bundle but missing from our packaged grammars - grammars = [ - "agda" - "bash" - "c" - "c-sharp" - "cpp" - "css" - # "d" - "elixir" - "elm" - "fluent" - "go" - "haskell" - "hcl" - "html" - # "janet-simple" - "java" - "javascript" - "jsdoc" - "json" - "julia" - "nix" - "ocaml" - "ocaml-interface" - # "pgn" - "php" - "prisma" - "python" - "ruby" - "rust" - "scala" - "swift" - "tsx" - "typescript" - "verilog" - "zig" - ]; + grammars = map (g: tree-sitter-grammars.${g}) (lib.importJSON ./default-grammars.json); + + libSuffix = if stdenv.isDarwin then "dylib" else "so"; + soName = g: lib.removeSuffix "-grammar" (lib.removePrefix "tree-sitter-" g.pname) + "." + libSuffix; grammarDir = runCommand "emacs-tree-sitter-grammars" { - # Fake same version number as upstream language bundle to prevent triggering downloads + # Fake same version number as upstream language bundle to prevent triggering runtime downloads inherit (tree-sitter-langs) version; } ('' install -d $out/langs/bin echo -n $version > $out/langs/bin/BUNDLE-VERSION - '' + lib.concatStringsSep "\n" (map (g: "ln -s ${tree-sitter-grammars."tree-sitter-${g}"}/parser $out/langs/bin/${g}.so") grammars)); + '' + lib.concatStringsSep "\n" (map ( + g: "ln -s ${g}/parser $out/langs/bin/${soName g}") grammars + )); in melpaStablePackages.tree-sitter-langs.overrideAttrs(old: { diff --git a/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/update-defaults.py b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/update-defaults.py new file mode 100755 index 000000000000..19bcb8989c30 --- /dev/null +++ b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/update-defaults.py @@ -0,0 +1,74 @@ +#!/usr/bin/env nix-shell +#! nix-shell ../../../../../../. -i python3 -p python3 -p nix +from os.path import ( + dirname, + abspath, + join, +) +from typing import ( + List, + Any, +) +import subprocess +import json +import sys +import os + + +def fmt_grammar(grammar: str) -> str: + return "tree-sitter-" + grammar + + +def eval_expr(nixpkgs: str, expr: str) -> Any: + p = subprocess.run( + [ + "nix-instantiate", + "--json", + "--eval", + "--expr", + ("with import %s {}; %s" % (nixpkgs, expr)), + ], + check=True, + stdout=subprocess.PIPE, + ) + return json.loads(p.stdout) + + +def check_grammar_exists(nixpkgs: str, grammar: str) -> bool: + return eval_expr( + nixpkgs, f'lib.hasAttr "{fmt_grammar(grammar)}" tree-sitter-grammars' + ) + + +def build_attr(nixpkgs, attr: str) -> str: + return ( + subprocess.run( + ["nix-build", "--no-out-link", nixpkgs, "-A", attr], + check=True, + stdout=subprocess.PIPE, + ) + .stdout.decode() + .strip() + ) + + +if __name__ == "__main__": + cwd = dirname(abspath(__file__)) + nixpkgs = abspath(join(cwd, "../../../../../..")) + + src_dir = build_attr(nixpkgs, "emacs.pkgs.tree-sitter-langs.src") + + existing: List[str] = [] + + grammars = os.listdir(join(src_dir, "repos")) + for g in grammars: + exists = check_grammar_exists(nixpkgs, g) + if exists: + existing.append(fmt_grammar(g)) + else: + sys.stderr.write("Missing grammar: " + fmt_grammar(g) + "\n") + sys.stderr.flush() + + with open(join(cwd, "default-grammars.json"), mode="w") as f: + json.dump(sorted(existing), f, indent=2) + f.write("\n") From daee75e5b32a00c9d5261be4de29ff9a8ccc1dd2 Mon Sep 17 00:00:00 2001 From: adisbladis Date: Sun, 24 Apr 2022 07:44:40 +1200 Subject: [PATCH 155/161] emacs.pkgs.tree-sitter-langs: Make language plugins configurable By using an expression like: ``` nix emacs.pkgs.tree-sitter-langs.passthru.withPlugins (p: [ p.tree-sitter-agda ]) ``` --- .../editors/emacs/elisp-packages/manual-packages.nix | 2 +- .../elisp-packages/tree-sitter-langs/default.nix | 12 +++++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/pkgs/applications/editors/emacs/elisp-packages/manual-packages.nix b/pkgs/applications/editors/emacs/elisp-packages/manual-packages.nix index 6d8dbce2b32b..4dc546c6263d 100644 --- a/pkgs/applications/editors/emacs/elisp-packages/manual-packages.nix +++ b/pkgs/applications/editors/emacs/elisp-packages/manual-packages.nix @@ -233,7 +233,7 @@ sv-kalender = callPackage ./sv-kalender { }; - tree-sitter-langs = callPackage ./tree-sitter-langs { }; + tree-sitter-langs = callPackage ./tree-sitter-langs { final = self; }; tsc = callPackage ./tsc { }; youtube-dl = callPackage ./youtube-dl { }; diff --git a/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix index 99fa87abf018..e62a37565c83 100644 --- a/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix +++ b/pkgs/applications/editors/emacs/elisp-packages/tree-sitter-langs/default.nix @@ -9,13 +9,13 @@ , melpaStablePackages , runCommand , tree-sitter-grammars +, plugins ? map (g: tree-sitter-grammars.${g}) (lib.importJSON ./default-grammars.json) +, final }: let inherit (melpaStablePackages) tree-sitter-langs; - grammars = map (g: tree-sitter-grammars.${g}) (lib.importJSON ./default-grammars.json); - libSuffix = if stdenv.isDarwin then "dylib" else "so"; soName = g: lib.removeSuffix "-grammar" (lib.removePrefix "tree-sitter-" g.pname) + "." + libSuffix; @@ -26,7 +26,7 @@ let install -d $out/langs/bin echo -n $version > $out/langs/bin/BUNDLE-VERSION '' + lib.concatStringsSep "\n" (map ( - g: "ln -s ${g}/parser $out/langs/bin/${soName g}") grammars + g: "ln -s ${g}/parser $out/langs/bin/${soName g}") plugins )); in @@ -35,4 +35,10 @@ melpaStablePackages.tree-sitter-langs.overrideAttrs(old: { substituteInPlace ./tree-sitter-langs-build.el \ --replace "tree-sitter-langs-grammar-dir tree-sitter-langs--dir" "tree-sitter-langs-grammar-dir \"${grammarDir}/langs\"" ''; + + passthru = old.passthru or {} // { + inherit plugins; + withPlugins = fn: final.tree-sitter-langs.override { plugins = fn tree-sitter-grammars; }; + }; + }) From 3b5115cb116f07439349bc96c427bb02b1d39804 Mon Sep 17 00:00:00 2001 From: "R. Ryantm" Date: Tue, 29 Mar 2022 02:31:43 +0000 Subject: [PATCH 156/161] umockdev: 0.17.7 -> 0.17.8 --- pkgs/development/libraries/umockdev/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/libraries/umockdev/default.nix b/pkgs/development/libraries/umockdev/default.nix index 286f5ccc3fcf..fbf5a71bb4b1 100644 --- a/pkgs/development/libraries/umockdev/default.nix +++ b/pkgs/development/libraries/umockdev/default.nix @@ -19,13 +19,13 @@ stdenv.mkDerivation rec { pname = "umockdev"; - version = "0.17.7"; + version = "0.17.8"; outputs = [ "bin" "out" "dev" "devdoc" ]; src = fetchurl { url = "https://github.com/martinpitt/umockdev/releases/download/${version}/${pname}-${version}.tar.xz"; - sha256 = "sha256-BdZCoW3QHM4Oue4bpuSFsuwIU1vsZ5pjqVv9TfGNC7U="; + sha256 = "sha256-s3zeWJxw5ohUtsv4NZGKcdP8khEYzIXycbBrAzdnVoU="; }; nativeBuildInputs = [ From bafe6ab889bdc3c60136e20e8e316f2ca14d07d1 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 20:24:04 +0200 Subject: [PATCH 157/161] python3Packages.pynetdicom: 2.0.1 -> 2.0.2 --- .../python-modules/pynetdicom/default.nix | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/pynetdicom/default.nix b/pkgs/development/python-modules/pynetdicom/default.nix index fa2b369aa2ba..32c93b50e55e 100644 --- a/pkgs/development/python-modules/pynetdicom/default.nix +++ b/pkgs/development/python-modules/pynetdicom/default.nix @@ -11,7 +11,7 @@ buildPythonPackage rec { pname = "pynetdicom"; - version = "2.0.1"; + version = "2.0.2"; format = "setuptools"; disabled = pythonOlder "3.7"; @@ -20,7 +20,7 @@ buildPythonPackage rec { owner = "pydicom"; repo = pname; rev = "v${version}"; - sha256 = "sha256-28SoOdS6sAj3KrfJT8PR2k8XLEY2zh0k9w1eq1y7V8M="; + sha256 = "sha256-/JWQUtFBW4uqCbs/nUxj1pRBfTCXV4wcqTkqvzpdFrM="; }; propagatedBuildInputs = [ @@ -40,14 +40,17 @@ buildPythonPackage rec { "TestAEGoodAssociation" "TestEchoSCP" "TestEchoSCPCLI" + "TestEventHandlingAcceptor" "TestFindSCP" "TestFindSCPCLI" "TestGetSCP" "TestGetSCPCLI" "TestMoveSCP" "TestMoveSCPCLI" + "TestPrimitive_N_GET" "TestQRGetServiceClass" "TestQRMoveServiceClass" + "TestSearch" "TestState" "TestStorageServiceClass" "TestStoreSCP" @@ -56,6 +59,11 @@ buildPythonPackage rec { "TestStoreSCUCLI" ]; + disabledTestPaths = [ + # Ignore apps tests + "pynetdicom/apps/tests/" + ]; + pythonImportsCheck = [ "pynetdicom" ]; From 2347b14150c650fad3e802a4a5fdd33ba1f93e1f Mon Sep 17 00:00:00 2001 From: Kira Bruneau Date: Sat, 23 Apr 2022 16:14:14 -0400 Subject: [PATCH 158/161] hubstaff: don't include updateScript in derivation `updateScript` shouldn't be included in the derivation because it isn't needed as part of the build process. --- pkgs/applications/misc/hubstaff/default.nix | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkgs/applications/misc/hubstaff/default.nix b/pkgs/applications/misc/hubstaff/default.nix index a164654b1a67..44169f6a539e 100644 --- a/pkgs/applications/misc/hubstaff/default.nix +++ b/pkgs/applications/misc/hubstaff/default.nix @@ -55,7 +55,7 @@ stdenv.mkDerivation { ln -s $opt/data/resources $opt/x86_64/resources ''; - updateScript = writeShellScript "hubstaff-updater" '' + passthru.updateScript = writeShellScript "hubstaff-updater" '' set -eu -o pipefail installation_script_url=$(curl --fail --head --location --silent --output /dev/null --write-out %{url_effective} https://app.hubstaff.com/download/linux) From d40081772638e9d17510243ce674ad15d6516f54 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 22:59:15 +0200 Subject: [PATCH 159/161] python3Packages.seventeentrack: 2022.04.4 -> 2022.04.5 --- pkgs/development/python-modules/seventeentrack/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/seventeentrack/default.nix b/pkgs/development/python-modules/seventeentrack/default.nix index f9786ec2a4bd..edce6b728347 100644 --- a/pkgs/development/python-modules/seventeentrack/default.nix +++ b/pkgs/development/python-modules/seventeentrack/default.nix @@ -14,7 +14,7 @@ buildPythonPackage rec { pname = "seventeentrack"; - version = "2022.04.4"; + version = "2022.04.5"; format = "pyproject"; disabled = pythonOlder "3.8"; @@ -23,7 +23,7 @@ buildPythonPackage rec { owner = "McSwindler"; repo = pname; rev = version; - hash = "sha256-r77UA9NDQ1EQIVXZK6povmp/jIIX/f+NbpH0qmYiHZs="; + hash = "sha256-IaR0Cfs3FL4Vguc2NLdPaunk23zC8B93Iyqe9xY/hWY="; }; nativeBuildInputs = [ From 5aab7a2babd71a34d21392c6575de4fc43dfdbc4 Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 23:04:12 +0200 Subject: [PATCH 160/161] python3Packages.can: add missing input --- pkgs/development/python-modules/can/default.nix | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkgs/development/python-modules/can/default.nix b/pkgs/development/python-modules/can/default.nix index 18077ce78cc8..8e7a8b7ea0e8 100644 --- a/pkgs/development/python-modules/can/default.nix +++ b/pkgs/development/python-modules/can/default.nix @@ -3,6 +3,7 @@ , fetchFromGitHub , future , hypothesis +, packaging , parameterized , msgpack , pyserial @@ -29,6 +30,7 @@ buildPythonPackage rec { propagatedBuildInputs = [ msgpack + packaging pyserial typing-extensions wrapt From a14121209eb34eeeb108b28f53b30e84bbf4b1db Mon Sep 17 00:00:00 2001 From: Fabian Affolter Date: Sat, 23 Apr 2022 23:08:12 +0200 Subject: [PATCH 161/161] python3Packages.marshmallow-dataclass: 8.5.3 -> 8.5.5 --- .../python-modules/marshmallow-dataclass/default.nix | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkgs/development/python-modules/marshmallow-dataclass/default.nix b/pkgs/development/python-modules/marshmallow-dataclass/default.nix index 4b36ee9b74ef..730254bcf825 100644 --- a/pkgs/development/python-modules/marshmallow-dataclass/default.nix +++ b/pkgs/development/python-modules/marshmallow-dataclass/default.nix @@ -12,7 +12,7 @@ buildPythonPackage rec { pname = "marshmallow-dataclass"; - version = "8.5.3"; + version = "8.5.5"; format = "setuptools"; disabled = pythonOlder "3.6"; @@ -21,7 +21,7 @@ buildPythonPackage rec { owner = "lovasoa"; repo = "marshmallow_dataclass"; rev = "v${version}"; - sha256 = "0mngkjfs2nxxr0y77n429hb22rmjxbnn95j4vwqr9y6q16bqxs0w"; + sha256 = "sha256-sozq+L3pa9iprAWtQd/L+LCfTWfDue04WzQ/fbM0mps="; }; propagatedBuildInputs = [