treewide: remove unused patches files

This commit is contained in:
Sigmanificient 2025-03-31 00:32:22 +02:00
parent d105e65508
commit 0489b3976a
27 changed files with 0 additions and 1977 deletions

View file

@ -1,11 +0,0 @@
diff --git a/lua/image/magick.lua b/lua/image/magick.lua
index a0c5a64..e3b57d4 100644
--- a/lua/image/magick.lua
+++ b/lua/image/magick.lua
@@ -1,3 +1,6 @@
+package.path = package.path .. ";@nix_magick@/share/lua/5.1/?/init.lua;"
+package.path = package.path .. ";@nix_magick@/share/lua/5.1/?.lua;"
+
local has_magick, magick = pcall(require, "magick")
---@return MagickImage

View file

@ -1,26 +0,0 @@
commit bf870157e0a9c3d19e968afb276b4e7d96b4df30
Author: Doron Behar <doron.behar@gmail.com>
Date: Thu Oct 21 13:10:42 2021 +0300
gr-modtool: Don't copy source permissions
This is needed for systems such as NixOS, where the build tree isn't
writable and the files copied should be.
Signed-off-by: Doron Behar <doron.behar@gmail.com>
diff --git a/gr-utils/python/modtool/core/newmod.py b/gr-utils/python/modtool/core/newmod.py
index 123059907..0c734e7ae 100644
--- a/gr-utils/python/modtool/core/newmod.py
+++ b/gr-utils/python/modtool/core/newmod.py
@@ -78,7 +78,9 @@ class ModToolNewModule(ModTool):
self._setup_scm(mode='new')
logger.info("Creating out-of-tree module in {}...".format(self.dir))
try:
- shutil.copytree(self.srcdir, self.dir)
+ # https://stackoverflow.com/a/17022146/4935114
+ shutil.copystat = lambda x, y: x
+ shutil.copytree(self.srcdir, self.dir, copy_function=shutil.copyfile)
os.chdir(self.dir)
except OSError:
raise ModToolException('Could not create directory {}.'.format(self.dir))

View file

@ -1,25 +0,0 @@
diff --git a/flake8_plugins/flake8_class_attributes_plugin/tests/conftest.py b/flake8_plugins/flake8_class_attributes_plugin/tests/conftest.py
index 1ad762aed..c91078dcf 100644
--- a/flake8_plugins/flake8_class_attributes_plugin/tests/conftest.py
+++ b/flake8_plugins/flake8_class_attributes_plugin/tests/conftest.py
@@ -1,6 +1,7 @@
import ast
import os
+import flake8
from flake8.options.manager import OptionManager
from flake8_plugins.flake8_class_attributes_plugin.flake8_class_attributes.checker import ClassAttributesChecker
@@ -17,7 +18,11 @@ def run_validator_for_test_file(filename, max_annotations_complexity=None,
raw_content = file_handler.read()
tree = ast.parse(raw_content)
- options = OptionManager('flake8_class_attributes_order', '0.1.3')
+ options = OptionManager(
+ version=flake8.__version__,
+ plugin_versions='flake8_class_attributes_order: 0.1.3',
+ parents=[],
+ )
options.use_class_attributes_order_strict_mode = strict_mode
options.class_attributes_order = attributes_order
ClassAttributesChecker.parse_options(options)

View file

@ -1,205 +0,0 @@
diff --git a/src/builders/build_basalt.rs b/src/builders/build_basalt.rs
index e67e081..9ae1966 100644
--- a/src/builders/build_basalt.rs
+++ b/src/builders/build_basalt.rs
@@ -6,6 +6,7 @@ use crate::{
util::file_utils::rm_rf,
};
use std::collections::{HashMap, VecDeque};
+use std::env;
pub fn get_build_basalt_jobs(profile: &Profile, clean_build: bool) -> VecDeque<WorkerJob> {
let mut jobs = VecDeque::<WorkerJob>::new();
@@ -101,6 +102,21 @@ pub fn get_build_basalt_jobs(profile: &Profile, clean_build: bool) -> VecDeque<W
.to_string(),
]),
));
+ jobs.push_back(WorkerJob::new_cmd(
+ None,
+ "auto-patchelf".into(),
+ Some(
+ [
+ vec![
+ "--ignore-existing",
+ "--paths".into(),
+ build_dir.into_os_string().into_string().unwrap(),
+ "--libs".into(),
+ ],
+ env::var("libs").unwrap_or_default().split(":").map(|s| s.to_string()).collect(),
+ ].concat()
+ ),
+ ));
jobs
}
diff --git a/src/builders/build_libsurvive.rs b/src/builders/build_libsurvive.rs
index b4b0dc3..69871d4 100644
--- a/src/builders/build_libsurvive.rs
+++ b/src/builders/build_libsurvive.rs
@@ -9,6 +9,7 @@ use std::{
collections::{HashMap, VecDeque},
path::Path,
};
+use std::env;
pub fn get_build_libsurvive_jobs(profile: &Profile, clean_build: bool) -> VecDeque<WorkerJob> {
let mut jobs = VecDeque::<WorkerJob>::new();
@@ -71,6 +72,21 @@ pub fn get_build_libsurvive_jobs(profile: &Profile, clean_build: bool) -> VecDeq
}
jobs.push_back(cmake.get_build_job());
jobs.push_back(cmake.get_install_job());
+ jobs.push_back(WorkerJob::new_cmd(
+ None,
+ "auto-patchelf".into(),
+ Some(
+ [
+ vec![
+ "--ignore-existing",
+ "--paths".into(),
+ build_dir.into_os_string().into_string().unwrap(),
+ "--libs".into(),
+ ],
+ env::var("libs").unwrap_or_default().split(":").map(|s| s.to_string()).collect(),
+ ].concat()
+ ),
+ ));
jobs
}
diff --git a/src/builders/build_monado.rs b/src/builders/build_monado.rs
index f379d6f..5710add 100644
--- a/src/builders/build_monado.rs
+++ b/src/builders/build_monado.rs
@@ -9,6 +9,7 @@ use std::{
collections::{HashMap, VecDeque},
path::Path,
};
+use std::env;
pub fn get_build_monado_jobs(profile: &Profile, clean_build: bool) -> VecDeque<WorkerJob> {
let mut jobs = VecDeque::<WorkerJob>::new();
@@ -83,6 +84,21 @@ pub fn get_build_monado_jobs(profile: &Profile, clean_build: bool) -> VecDeque<W
}
jobs.push_back(cmake.get_build_job());
jobs.push_back(cmake.get_install_job());
+ jobs.push_back(WorkerJob::new_cmd(
+ None,
+ "auto-patchelf".into(),
+ Some(
+ [
+ vec![
+ "--ignore-existing",
+ "--paths".into(),
+ build_dir.into_os_string().into_string().unwrap(),
+ "--libs".into(),
+ ],
+ env::var("libs").unwrap_or_default().split(":").map(|s| s.to_string()).collect(),
+ ].concat()
+ ),
+ ));
jobs
}
diff --git a/src/builders/build_opencomposite.rs b/src/builders/build_opencomposite.rs
index 631b69f..72e8a73 100644
--- a/src/builders/build_opencomposite.rs
+++ b/src/builders/build_opencomposite.rs
@@ -9,6 +9,7 @@ use std::{
collections::{HashMap, VecDeque},
path::Path,
};
+use std::env;
pub fn get_build_opencomposite_jobs(profile: &Profile, clean_build: bool) -> VecDeque<WorkerJob> {
let mut jobs = VecDeque::<WorkerJob>::new();
@@ -48,6 +49,21 @@ pub fn get_build_opencomposite_jobs(profile: &Profile, clean_build: bool) -> Vec
jobs.push_back(cmake.get_prepare_job());
}
jobs.push_back(cmake.get_build_job());
+ jobs.push_back(WorkerJob::new_cmd(
+ None,
+ "auto-patchelf".into(),
+ Some(
+ [
+ vec![
+ "--ignore-existing",
+ "--paths".into(),
+ build_dir.into_os_string().into_string().unwrap(),
+ "--libs".into(),
+ ],
+ env::var("libs").unwrap_or_default().split(":").map(|s| s.to_string()).collect(),
+ ].concat()
+ ),
+ ));
jobs
}
diff --git a/src/builders/build_openhmd.rs b/src/builders/build_openhmd.rs
index 1157eca..534a895 100644
--- a/src/builders/build_openhmd.rs
+++ b/src/builders/build_openhmd.rs
@@ -3,6 +3,7 @@ use crate::{
util::file_utils::rm_rf,
};
use std::{collections::VecDeque, path::Path};
+use std::env;
pub fn get_build_openhmd_jobs(profile: &Profile, clean_build: bool) -> VecDeque<WorkerJob> {
let mut jobs = VecDeque::<WorkerJob>::new();
@@ -80,6 +81,22 @@ pub fn get_build_openhmd_jobs(profile: &Profile, clean_build: bool) -> VecDeque<
"install".into(),
]),
));
+ // autopatchelf job
+ jobs.push_back(WorkerJob::new_cmd(
+ None,
+ "auto-patchelf".into(),
+ Some(
+ [
+ vec![
+ "--ignore-existing",
+ "--paths".into(),
+ build_dir.into_os_string().into_string().unwrap(),
+ "--libs".into(),
+ ],
+ env::var("libs").unwrap_or_default().split(":").map(|s| s.to_string()).collect(),
+ ].concat()
+ ),
+ ));
jobs
}
diff --git a/src/builders/build_wivrn.rs b/src/builders/build_wivrn.rs
index f2a415d..a160186 100644
--- a/src/builders/build_wivrn.rs
+++ b/src/builders/build_wivrn.rs
@@ -9,6 +9,7 @@ use std::{
collections::{HashMap, VecDeque},
path::Path,
};
+use std::env;
pub fn get_build_wivrn_jobs(profile: &Profile, clean_build: bool) -> VecDeque<WorkerJob> {
let mut jobs = VecDeque::<WorkerJob>::new();
@@ -60,6 +61,21 @@ pub fn get_build_wivrn_jobs(profile: &Profile, clean_build: bool) -> VecDeque<Wo
}
jobs.push_back(cmake.get_build_job());
jobs.push_back(cmake.get_install_job());
+ jobs.push_back(WorkerJob::new_cmd(
+ None,
+ "auto-patchelf".into(),
+ Some(
+ [
+ vec![
+ "--no-add-existing",
+ "--paths".into(),
+ build_dir.into_os_string().into_string().unwrap(),
+ "--libs".into(),
+ ],
+ env::var("libs").unwrap_or_default().split(":").map(|s| s.to_string()).collect(),
+ ].concat()
+ ),
+ ));
jobs
}

View file

@ -1,55 +0,0 @@
From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
From: Paul Meyer <katexochen0@gmail.com>
Date: Thu, 2 Jan 2025 09:32:41 +0100
Subject: [PATCH] nixpkgs: patch boringssl for gcc14
Signed-off-by: Paul Meyer <katexochen0@gmail.com>
---
bazel/boringssl-gcc14.patch | 25 +++++++++++++++++++++++++
bazel/repositories.bzl | 1 +
2 files changed, 26 insertions(+)
create mode 100644 bazel/boringssl-gcc14.patch
diff --git a/bazel/boringssl-gcc14.patch b/bazel/boringssl-gcc14.patch
new file mode 100644
index 0000000000000000000000000000000000000000..8dcad4cc11f691eec93efa29075c1d356732e58b
--- /dev/null
+++ b/bazel/boringssl-gcc14.patch
@@ -0,0 +1,25 @@
+diff --git a/crypto/internal.h b/crypto/internal.h
+index a77102d..a45f97b 100644
+--- a/crypto/internal.h
++++ b/crypto/internal.h
+@@ -1174,6 +1174,11 @@
+
+ // Arithmetic functions.
+
++// The most efficient versions of these functions on GCC and Clang depend on C11
++// |_Generic|. If we ever need to call these from C++, we'll need to add a
++// variant that uses C++ overloads instead.
++#if !defined(__cplusplus)
++
+ // CRYPTO_addc_* returns |x + y + carry|, and sets |*out_carry| to the carry
+ // bit. |carry| must be zero or one.
+ #if OPENSSL_HAS_BUILTIN(__builtin_addc)
+@@ -1275,6 +1280,8 @@
+ #define CRYPTO_subc_w CRYPTO_subc_u32
+ #endif
+
++#endif // !__cplusplus
++
+
+ // FIPS functions.
+
diff --git a/bazel/repositories.bzl b/bazel/repositories.bzl
index cd15ec36f45f5958f4e65d314af78a0ef7c5dc78..935bf8a1ced67c094e4e900ba84bf39033bd3bbb 100644
--- a/bazel/repositories.bzl
+++ b/bazel/repositories.bzl
@@ -263,6 +263,7 @@ def _boringssl():
patch_args = ["-p1"],
patches = [
"@envoy//bazel:boringssl_static.patch",
+ "@envoy//bazel:boringssl-gcc14.patch",
],
)

View file

@ -1,12 +0,0 @@
diff --git a/src/fbio.cpp b/src/fbio.cpp
index e5afc44..2485227 100644
--- a/src/fbio.cpp
+++ b/src/fbio.cpp
@@ -18,6 +18,7 @@
*
*/
+#include <sys/select.h>
#include <unistd.h>
#include <fcntl.h>
#include "config.h"

View file

@ -1,58 +0,0 @@
commit 1a219776a9a9487828a6cb3f2b9581afb308d4fb
Author: Johannes Altmanninger <aclopte@gmail.com>
Date: Mon Mar 3 10:37:49 2025 +0100
Add the commandline to the OSC 133 command start
Given
$ cat ~/.config/kitty/kitty.conf
notify_on_cmd_finish unfocused 0.1 command notify-send "job finished with status: %s" %c
kitty will send a notification whenever a long-running (>.1s) foreground
command finishes while kitty is not focused.
The %c placeholder will be replaced by the commandline.
This is passed via the OSC 133 command start marker, kitty's fish shell
integration.
That integration has been disabled for fish 4.0.0 because it's no longer
necessary since fish already prints OSC 133. But we missed the parameter for
the command string. Fix it. (It's debatable whether the shell or the terminal
should provide this feature but I think we should fix this regression?)
Closes #11203
See https://github.com/kovidgoyal/kitty/issues/8385#issuecomment-2692659161
(cherry picked from commit 4378e73fc746b539c851c22800b42fdfeb1a1964)
diff --git a/src/reader.rs b/src/reader.rs
index 46f68d8c4..5f68ac57d 100644
--- a/src/reader.rs
+++ b/src/reader.rs
@@ -88,6 +88,7 @@ use crate::libc::MB_CUR_MAX;
use crate::nix::isatty;
use crate::operation_context::{get_bg_context, OperationContext};
use crate::output::parse_color;
+use crate::output::BufferedOuputter;
use crate::output::Outputter;
use crate::pager::{PageRendering, Pager, SelectionMotion};
use crate::panic::AT_EXIT;
@@ -650,8 +651,13 @@ fn read_i(parser: &Parser) -> i32 {
data.command_line.clear();
data.update_buff_pos(EditableLineTag::Commandline, None);
data.command_line_changed(EditableLineTag::Commandline);
- // OSC 133 End of command
- data.screen.write_bytes(b"\x1b]133;C\x07");
+ // OSC 133 "Command start"
+ write!(
+ BufferedOuputter::new(&mut Outputter::stdoutput().borrow_mut()),
+ "\x1b]133;C;cmdline_url={}\x07",
+ escape_string(&command, EscapeStringStyle::Url),
+ )
+ .unwrap();
event::fire_generic(parser, L!("fish_preexec").to_owned(), vec![command.clone()]);
let eval_res = reader_run_command(parser, &command);
signal_clear_cancel();

View file

@ -1,82 +0,0 @@
commit 780541aa2509e6020ffbb6359e412c66eeca03dd
Author: Johannes Altmanninger <aclopte@gmail.com>
Date: Mon Mar 3 10:37:49 2025 +0100
Try to reduce write(3) calls for OSC 133 prompt markers
Something like
write!(f, "foo{}bar", ...)
seems to call f.write_str() thrice.
Splitting a single OSC 133 command into three calls to write(3) might result in
odd situations if one of them fails. Let's try to do it in one in most cases.
Add a new buffered output type that can be used with write!(). This is
somewhat redundant given that we have scoped_buffer(). While at it, remove
the confused error handling. This doesn't fail unless we are OOM (and this
new type makes that more obvious).
(cherry picked from commit e5e932e97082ce9079a8d6e665839c5bfd875049)
diff --git a/src/output.rs b/src/output.rs
index 8833df4a0..eeb3be0a7 100644
--- a/src/output.rs
+++ b/src/output.rs
@@ -456,6 +456,35 @@ impl Outputter {
}
}
+pub struct BufferedOuputter<'a>(&'a mut Outputter);
+
+impl<'a> BufferedOuputter<'a> {
+ pub fn new(outputter: &'a mut Outputter) -> Self {
+ outputter.begin_buffering();
+ Self(outputter)
+ }
+}
+
+impl<'a> Drop for BufferedOuputter<'a> {
+ fn drop(&mut self) {
+ self.0.end_buffering();
+ }
+}
+
+impl<'a> Write for BufferedOuputter<'a> {
+ fn write(&mut self, buf: &[u8]) -> Result<usize> {
+ self.0
+ .write(buf)
+ .expect("Writing to in-memory buffer should never fail");
+ Ok(buf.len())
+ }
+
+ fn flush(&mut self) -> Result<()> {
+ self.0.flush().unwrap();
+ Ok(())
+ }
+}
+
/// Given a list of RgbColor, pick the "best" one, as determined by the color support. Returns
/// RgbColor::NONE if empty.
pub fn best_color(candidates: &[RgbColor], support: ColorSupport) -> RgbColor {
diff --git a/src/reader.rs b/src/reader.rs
index ea8e4356c..46f68d8c4 100644
--- a/src/reader.rs
+++ b/src/reader.rs
@@ -664,11 +664,12 @@ fn read_i(parser: &Parser) -> i32 {
parser.libdata_mut().exit_current_script = false;
// OSC 133 "Command finished"
- let _ = write!(
- Outputter::stdoutput().borrow_mut(),
+ write!(
+ BufferedOuputter::new(&mut Outputter::stdoutput().borrow_mut()),
"\x1b]133;D;{}\x07",
parser.get_last_status()
- );
+ )
+ .unwrap();
event::fire_generic(parser, L!("fish_postexec").to_owned(), vec![command]);
// Allow any pending history items to be returned in the history array.
data.history.resolve_pending();

View file

@ -1,57 +0,0 @@
From 9358bebb11df19f46d0813723959518498d812b2 Mon Sep 17 00:00:00 2001
From: VuiMuich <vuimuich@quantentunnel.de>
Date: Mon, 2 Sep 2024 11:15:27 +0200
Subject: [PATCH] fix rust 1.80.x for nixpkgs
---
Cargo.lock | 16 ++++++++++++----
1 file changed, 12 insertions(+), 4 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index f8a82eecf..5728b55bf 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -777,6 +777,12 @@ dependencies = [
"winapi",
]
+[[package]]
+name = "num-conv"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
+
[[package]]
name = "num-traits"
version = "0.2.17"
@@ -1218,13 +1224,14 @@ dependencies = [
[[package]]
name = "time"
-version = "0.3.30"
+version = "0.3.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5"
+checksum = "ef89ece63debf11bc32d1ed8d078ac870cbeb44da02afb02a9ff135ae7ca0582"
dependencies = [
"deranged",
"itoa",
"libc",
+ "num-conv",
"num_threads",
"powerfmt",
"serde",
@@ -1240,10 +1247,11 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3"
[[package]]
name = "time-macros"
-version = "0.2.15"
+version = "0.2.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20"
+checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf"
dependencies = [
+ "num-conv",
"time-core",
]

View file

@ -1,49 +0,0 @@
diff --git a/src/MACLib/APELink.cpp b/src/MACLib/APELink.cpp
index d349f4b..b00ec83 100644
--- a/src/MACLib/APELink.cpp
+++ b/src/MACLib/APELink.cpp
@@ -63,10 +63,10 @@ void CAPELink::ParseData(const char * pData, const str_utf16 * pFilename)
if (pData != NULL)
{
// parse out the information
- char * pHeader = strstr(pData, APE_LINK_HEADER);
- char * pImageFile = strstr(pData, APE_LINK_IMAGE_FILE_TAG);
- char * pStartBlock = strstr(pData, APE_LINK_START_BLOCK_TAG);
- char * pFinishBlock = strstr(pData, APE_LINK_FINISH_BLOCK_TAG);
+ const char * pHeader = strstr(pData, APE_LINK_HEADER);
+ const char * pImageFile = strstr(pData, APE_LINK_IMAGE_FILE_TAG);
+ const char * pStartBlock = strstr(pData, APE_LINK_START_BLOCK_TAG);
+ const char * pFinishBlock = strstr(pData, APE_LINK_FINISH_BLOCK_TAG);
if (pHeader && pImageFile && pStartBlock && pFinishBlock)
{
@@ -81,7 +81,7 @@ void CAPELink::ParseData(const char * pData, const str_utf16 * pFilename)
// get the path
char cImageFile[MAX_PATH + 1]; int nIndex = 0;
- char * pImageCharacter = &pImageFile[strlen(APE_LINK_IMAGE_FILE_TAG)];
+ const char * pImageCharacter = &pImageFile[strlen(APE_LINK_IMAGE_FILE_TAG)];
while ((*pImageCharacter != 0) && (*pImageCharacter != '\r') && (*pImageCharacter != '\n'))
cImageFile[nIndex++] = *pImageCharacter++;
cImageFile[nIndex] = 0;
diff --git a/src/Shared/All.h b/src/Shared/All.h
index 328addc..7730e89 100644
--- a/src/Shared/All.h
+++ b/src/Shared/All.h
@@ -21,6 +21,8 @@ Global includes
#include <windows.h>
#endif
+#include <stdlib.h>
+
#ifdef _WIN32
#include <mmsystem.h>
#include <tchar.h>
@@ -34,7 +36,6 @@ Global includes
#include "NoWindows.h"
#endif
-#include <stdlib.h>
#include <memory.h>
#include <stdio.h>
#include <math.h>

View file

@ -1,22 +0,0 @@
diff --git a/ext/nokogiri/xml_document.c b/ext/nokogiri/xml_document.c
index 1d2119c8..c1c87713 100644
--- a/ext/nokogiri/xml_document.c
+++ b/ext/nokogiri/xml_document.c
@@ -492,7 +492,7 @@ static int block_caller(void * ctx, xmlNodePtr _node, xmlNodePtr _parent)
* The block must return a non-nil, non-false value if the +obj+ passed in
* should be included in the canonicalized document.
*/
-static VALUE canonicalize(int argc, VALUE* argv, VALUE self)
+static VALUE nokogiri_xml_document_canonicalize(int argc, VALUE* argv, VALUE self)
{
VALUE mode;
VALUE incl_ns;
@@ -573,7 +573,7 @@ void init_xml_document()
rb_define_method(klass, "encoding", encoding, 0);
rb_define_method(klass, "encoding=", set_encoding, 1);
rb_define_method(klass, "version", version, 0);
- rb_define_method(klass, "canonicalize", canonicalize, -1);
+ rb_define_method(klass, "nokogiri_xml_document_canonicalize", canonicalize, -1);
rb_define_method(klass, "dup", duplicate_document, -1);
rb_define_method(klass, "url", url, 0);
rb_define_method(klass, "create_entity", create_entity, -1);

View file

@ -1,122 +0,0 @@
--- a/src/svncpp/client_ls.cpp
+++ b/src/svncpp/client_ls.cpp
@@ -25,6 +25,7 @@
#include "svn_client.h"
#include "svn_path.h"
#include "svn_sorts.h"
+#include "svn_version.h"
//#include "svn_utf.h"
// svncpp
@@ -35,6 +36,7 @@
#include "m_is_empty.hpp"
+#if SVN_VER_MAJOR == 1 && SVN_VER_MINOR < 8
static int
compare_items_as_paths(const svn_sort__item_t *a, const svn_sort__item_t *b)
{
@@ -84,6 +86,72 @@ namespace svn
}
}
+#else
+
+#include <algorithm>
+
+static svn_error_t* store_entry(
+ void *baton,
+ const char *path,
+ const svn_dirent_t *dirent,
+ const svn_lock_t *,
+ const char *abs_path,
+ const char *,
+ const char *,
+ apr_pool_t *scratch_pool)
+{
+ svn::DirEntries *entries = reinterpret_cast<svn::DirEntries*>(baton);
+ if (path[0] == '\0') {
+ if (dirent->kind == svn_node_file) {
+ // for compatibility with svn_client_ls behaviour, listing a file
+ // stores that file name
+ entries->push_back(svn::DirEntry(svn_path_basename(abs_path, scratch_pool), dirent));
+ }
+ } else {
+ entries->push_back(svn::DirEntry(path, dirent));
+ }
+ return SVN_NO_ERROR;
+}
+
+static bool sort_by_path(svn::DirEntry const& a, svn::DirEntry const& b)
+{
+ return svn_path_compare_paths(a.name(), b.name()) < 0;
+}
+
+namespace svn
+{
+ DirEntries
+ Client::list(const char * pathOrUrl,
+ svn_opt_revision_t * revision,
+ bool recurse) throw(ClientException)
+ {
+ Pool pool;
+ DirEntries entries;
+
+ svn_error_t * error =
+ svn_client_list3(pathOrUrl,
+ revision,
+ revision,
+ SVN_DEPTH_INFINITY_OR_IMMEDIATES(recurse),
+ SVN_DIRENT_ALL,
+ FALSE, // fetch locks
+ FALSE, // include externals
+ &store_entry,
+ &entries,
+ *m_context,
+ pool);
+
+ if (error != SVN_NO_ERROR)
+ throw ClientException(error);
+
+ std::sort(entries.begin(), entries.end(), &sort_by_path);
+
+ return entries;
+ }
+}
+
+#endif
+
/* -----------------------------------------------------------------
* local variables:
* eval: (load-file "../../rapidsvn-dev.el")
--- a/src/svncpp/dirent.cpp 2017-03-19 15:48:58.956827337 +0100
+++ b/src/svncpp/dirent.cpp 2017-03-19 15:50:19.111527279 +0100
@@ -47,7 +47,7 @@
{
}
- Data(const char * _name, svn_dirent_t * dirEntry)
+ Data(const char * _name, const svn_dirent_t * dirEntry)
: name(_name), kind(dirEntry->kind), size(dirEntry->size),
hasProps(dirEntry->has_props != 0),
createdRev(dirEntry->created_rev), time(dirEntry->time)
@@ -78,7 +78,7 @@
{
}
- DirEntry::DirEntry(const char * name, svn_dirent_t * DirEntry)
+ DirEntry::DirEntry(const char * name, const svn_dirent_t * DirEntry)
: m(new Data(name, DirEntry))
{
}
--- a/include/svncpp/dirent.hpp 2017-03-19 15:50:54.860506116 +0100
+++ b/include/svncpp/dirent.hpp 2017-03-19 15:50:58.314407598 +0100
@@ -41,7 +41,7 @@
/**
* constructor for existing @a svn_dirent_t entries
*/
- DirEntry(const char * name, svn_dirent_t * dirEntry);
+ DirEntry(const char * name, const svn_dirent_t * dirEntry);
/**
* copy constructor

View file

@ -1,385 +0,0 @@
diff --git a/Cargo.lock b/Cargo.lock
index e41780e..15db50e 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -119,11 +119,11 @@ checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"
[[package]]
name = "bindgen"
-version = "0.57.0"
+version = "0.68.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd4865004a46a0aafb2a0a5eb19d3c9fc46ee5f063a6cfc605c69ac9ecf5263d"
+checksum = "726e4313eb6ec35d2730258ad4e15b547ee75d6afaa1361a922e78e59b7d8078"
dependencies = [
- "bitflags",
+ "bitflags 2.5.0",
"cexpr",
"clang-sys",
"lazy_static",
@@ -134,6 +134,7 @@ dependencies = [
"regex",
"rustc-hash",
"shlex",
+ "syn 2.0.59",
]
[[package]]
@@ -157,6 +158,12 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+[[package]]
+name = "bitflags"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
+
[[package]]
name = "block-buffer"
version = "0.9.0"
@@ -189,9 +196,9 @@ checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee"
[[package]]
name = "cexpr"
-version = "0.4.0"
+version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27"
+checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
dependencies = [
"nom",
]
@@ -243,7 +250,7 @@ checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
dependencies = [
"ansi_term",
"atty",
- "bitflags",
+ "bitflags 1.3.2",
"strsim",
"term_size",
"textwrap",
@@ -439,6 +446,16 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f4b14e20978669064c33b4c1e0fb4083412e40fe56cbea2eae80fd7591503ee"
+[[package]]
+name = "errno"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
+dependencies = [
+ "libc",
+ "windows-sys",
+]
+
[[package]]
name = "failure"
version = "0.1.8"
@@ -457,10 +474,16 @@ checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 1.0.82",
"synstructure",
]
+[[package]]
+name = "fastrand"
+version = "2.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984"
+
[[package]]
name = "fixedbitset"
version = "0.2.0"
@@ -609,9 +632,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "libc"
-version = "0.2.109"
+version = "0.2.153"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f98a04dce437184842841303488f70d0188c5f51437d2a834dc097eafa909a01"
+checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
[[package]]
name = "libloading"
@@ -629,6 +652,12 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7d73b3f436185384286bd8098d17ec07c9a7d2388a6599f824d8502b529702a"
+[[package]]
+name = "linux-raw-sys"
+version = "0.4.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
+
[[package]]
name = "lock_api"
version = "0.4.5"
@@ -665,6 +694,12 @@ version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2af4f95d8737f4ffafbd1fb3c703cdc898868a244a59786793cba0520ebdcbdd"
+[[package]]
+name = "minimal-lexical"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
+
[[package]]
name = "miniz_oxide"
version = "0.4.4"
@@ -689,12 +724,15 @@ dependencies = [
[[package]]
name = "nettle-sys"
-version = "2.0.8"
+version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b95aff9e61c8d8132e41dceae74c6e526edcac8d120072c87a300b9ab7e75226"
+checksum = "b495053a10a19a80e3a26bf1212e92e29350797b5f5bdc58268c3f3f818e66ec"
dependencies = [
"bindgen",
+ "cc",
+ "libc",
"pkg-config",
+ "tempfile",
"vcpkg",
]
@@ -706,12 +744,12 @@ checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54"
[[package]]
name = "nom"
-version = "5.1.2"
+version = "7.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af"
+checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
dependencies = [
"memchr",
- "version_check",
+ "minimal-lexical",
]
[[package]]
@@ -851,18 +889,18 @@ checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
[[package]]
name = "proc-macro2"
-version = "1.0.33"
+version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb37d2df5df740e582f28f8560cf425f52bb267d872fe58358eadb554909f07a"
+checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba"
dependencies = [
- "unicode-xid",
+ "unicode-ident",
]
[[package]]
name = "quote"
-version = "1.0.10"
+version = "1.0.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "38bc8cc6a5f2e3655e0899c1b848643b2562f853f114bfec7be120678e3ace05"
+checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
dependencies = [
"proc-macro2",
]
@@ -909,7 +947,7 @@ version = "0.2.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff"
dependencies = [
- "bitflags",
+ "bitflags 1.3.2",
]
[[package]]
@@ -951,6 +989,19 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
+[[package]]
+name = "rustix"
+version = "0.38.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89"
+dependencies = [
+ "bitflags 2.5.0",
+ "errno",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys",
+]
+
[[package]]
name = "rustversion"
version = "1.0.6"
@@ -1053,9 +1104,9 @@ dependencies = [
[[package]]
name = "shlex"
-version = "0.1.1"
+version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "signature"
@@ -1117,6 +1168,17 @@ dependencies = [
"unicode-xid",
]
+[[package]]
+name = "syn"
+version = "2.0.59"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4a6531ffc7b071655e4ce2e04bd464c4830bb585a61cabb96cf808f05172615a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
[[package]]
name = "synstructure"
version = "0.12.6"
@@ -1125,10 +1187,22 @@ checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 1.0.82",
"unicode-xid",
]
+[[package]]
+name = "tempfile"
+version = "3.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1"
+dependencies = [
+ "cfg-if",
+ "fastrand",
+ "rustix",
+ "windows-sys",
+]
+
[[package]]
name = "term"
version = "0.7.0"
@@ -1177,7 +1251,7 @@ checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 1.0.82",
]
[[package]]
@@ -1226,6 +1300,12 @@ version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f"
+[[package]]
+name = "unicode-ident"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
+
[[package]]
name = "unicode-normalization"
version = "0.1.19"
@@ -1306,6 +1386,79 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+[[package]]
+name = "windows-sys"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+dependencies = [
+ "windows-targets",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
+dependencies = [
+ "windows_aarch64_gnullvm",
+ "windows_aarch64_msvc",
+ "windows_i686_gnu",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc",
+ "windows_x86_64_gnu",
+ "windows_x86_64_gnullvm",
+ "windows_x86_64_msvc",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
+
[[package]]
name = "xxhash-rust"
version = "0.8.2"
@@ -1329,6 +1482,6 @@ checksum = "65f1a51723ec88c66d5d1fe80c841f17f63587d6691901d66be9bec6c3b51f73"
dependencies = [
"proc-macro2",
"quote",
- "syn",
+ "syn 1.0.82",
"synstructure",
]

View file

@ -1,21 +0,0 @@
diff -burN Shark.orig/CMakeLists.txt Shark/CMakeLists.txt
--- Shark.orig/CMakeLists.txt 2016-09-02 17:04:54.000000000 +0200
+++ Shark/CMakeLists.txt 2017-07-31 16:41:18.563473752 +0200
@@ -194,6 +194,8 @@
#####################################################################
# HDF5 configuration
#####################################################################
+option(ENABLE_HDF5 "Use HDF5" ON)
+if(ENABLE_HDF5)
find_package(HDF5 COMPONENTS C CXX HL QUIET)
mark_as_advanced(HDF5_DIR)
if(HDF5_FOUND)
@@ -215,7 +217,7 @@
else()
message(STATUS "HDF5 not found, skip")
endif()
-
+endif() #ENABLE_HDF5
#####################################################################
# ATLAS configuration
#####################################################################

View file

@ -1,100 +0,0 @@
diff --git a/back.c b/back.c
index c1810dc..75416fb 100644
--- a/back.c
+++ b/back.c
@@ -25,7 +25,6 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
#include "cfgfile.h"
#include "cmd.h"
-#define CFGFILE "/etc/spnavrc"
int get_daemon_pid(void);
static int update_cfg(void);
@@ -127,7 +126,7 @@ int get_daemon_pid(void)
static int update_cfg(void)
{
- if(write_cfg(CFGFILE, &cfg) == -1) {
+ if(write_cfg(cfg_path(), &cfg) == -1) {
fprintf(stderr, "failed to update config file\n");
return -1;
}
diff --git a/cfgfile.c b/cfgfile.c
index 5a9c502..2ea323d 100644
--- a/cfgfile.c
+++ b/cfgfile.c
@@ -22,12 +22,40 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
#include <ctype.h>
#include <errno.h>
#include <fcntl.h>
+#include <unistd.h>
+#include <sys/types.h>
+#include <pwd.h>
#include "cfgfile.h"
enum {TX, TY, TZ, RX, RY, RZ};
static const int def_axmap[] = {0, 2, 1, 3, 5, 4};
static const int def_axinv[] = {0, 1, 1, 0, 1, 1};
+static char* config_path;
+
+char* cfg_path()
+{
+ char* buf;
+ if((buf = getenv("XDG_CONFIG_HOME"))) {
+ if(config_path == NULL) {
+ config_path = malloc(strlen(buf) + strlen("/spnavrc") + 1);
+ if ( config_path != NULL) {
+ sprintf(config_path, "%s/spnavrc", buf);
+ }
+ };
+ return config_path;
+ } else {
+ if (!(buf = getenv("HOME"))) {
+ struct passwd *pw = getpwuid(getuid());
+ buf = pw->pw_dir;
+ }
+ config_path = malloc(strlen(buf) + strlen("/.config/spnavrc") + 1);
+ if ( config_path != NULL) {
+ sprintf(config_path, "%s/.config/spnavrc", buf);
+ }
+ return config_path;
+ }
+}
void default_cfg(struct cfg *cfg)
{
diff --git a/cfgfile.h b/cfgfile.h
index dfed8c9..5bb1b2c 100644
--- a/cfgfile.h
+++ b/cfgfile.h
@@ -47,6 +47,7 @@ struct cfg {
int devid[MAX_CUSTOM][2]; /* custom USB vendor/product id list */
};
+char* cfg_path(void);
void default_cfg(struct cfg *cfg);
int read_cfg(const char *fname, struct cfg *cfg);
int write_cfg(const char *fname, struct cfg *cfg);
diff --git a/front_gtk.c b/front_gtk.c
index e4c2cd7..6a800a0 100644
--- a/front_gtk.c
+++ b/front_gtk.c
@@ -28,8 +28,6 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
#include "cmd.h"
#include "ui.h"
-#define CFGFILE "/etc/spnavrc"
-
#define CHK_AXINV_TRANS_X "axinv_trans_x"
#define CHK_AXINV_TRANS_Y "axinv_trans_y"
#define CHK_AXINV_TRANS_Z "axinv_trans_z"
@@ -121,7 +119,7 @@ void frontend(int pfd)
gtk_init(&argc, 0);
- read_cfg(CFGFILE, &cfg);
+ read_cfg(cfg_path(), &cfg);
create_ui();

View file

@ -1,40 +0,0 @@
diff --git a/back.c b/back.c
index f364e31..c1810dc 100644
--- a/back.c
+++ b/back.c
@@ -26,7 +26,6 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
#include "cmd.h"
#define CFGFILE "/etc/spnavrc"
-#define PIDFILE "/var/run/spnavd.pid"
int get_daemon_pid(void);
static int update_cfg(void);
@@ -97,11 +96,26 @@ int get_daemon_pid(void)
{
FILE *fp;
char buf[64];
+ char* xdg_runtime_dir;
+ char* pidfile;
- if(!(fp = fopen(PIDFILE, "r"))) {
+ if(!(xdg_runtime_dir = getenv("XDG_RUNTIME_DIR"))){
+ fprintf(stderr, "XDG_RUNTIME_DIR not set, can't find spacenav pid file\n");
+ return -1;
+ }
+ pidfile = malloc(strlen(xdg_runtime_dir) + strlen("/spnavd.pid") + 1);
+ if (pidfile == NULL) {
+ fprintf(stderr, "failed to allocate memory\n");
+ return -1;
+ }
+ sprintf(pidfile, "%s/spnavd.pid", xdg_runtime_dir);
+
+ if(!(fp = fopen(pidfile, "r"))) {
fprintf(stderr, "no spacenav pid file, can't find daemon\n");
+ free(pidfile);
return -1;
}
+ free(pidfile);
if(!fgets(buf, sizeof buf, fp) || !isdigit(buf[0])) {
fprintf(stderr, "corrupted pidfile, can't find the daemon\n");
fclose(fp);

View file

@ -1,7 +0,0 @@
--- a/TESTING/MATGEN/CMakeLists.txt
+++ b/TESTING/MATGEN/CMakeLists.txt
@@ -97,3 +97,4 @@ if(enable_complex16)
endif()
add_library(matgen ${sources})
+target_link_libraries(matgen superlu)

View file

@ -1,38 +0,0 @@
diff --git a/3rdp/build/GNUmakefile b/3rdp/build/GNUmakefile
index fdf54565834a6a418f7267f4f8bf2269b80eab41..804dd76feb1a2d3b0278686f5326f7c2302e5fde 100644
--- a/3rdp/build/GNUmakefile
+++ b/3rdp/build/GNUmakefile
@@ -67,7 +67,7 @@
$(CRYPT_IDIR): | $(3RDPODIR)
$(QUIET)$(IFNOTEXIST) mkdir $(CRYPT_IDIR)
-$(CRYPTLIB_BUILD): $(3RDP_ROOT)$(DIRSEP)dist/cryptlib.zip $(3RDP_ROOT)$(DIRSEP)build/terminal-params.patch $(3RDP_ROOT)$(DIRSEP)build/cl-mingw32-static.patch $(3RDP_ROOT)$(DIRSEP)build/cl-ranlib.patch $(3RDP_ROOT)$(DIRSEP)build/cl-win32-noasm.patch $(3RDP_ROOT)$(DIRSEP)build/cl-zz-country.patch $(3RDP_ROOT)$(DIRSEP)build/cl-algorithms.patch $(3RDP_ROOT)$(DIRSEP)build/cl-allow-duplicate-ext.patch $(3RDP_ROOT)$(DIRSEP)build/cl-macosx-minver.patch $(3RDP_ROOT)$(DIRSEP)build/cl-endian.patch $(3RDP_ROOT)$(DIRSEP)build/cl-cryptodev.patch $(3RDP_ROOT)$(DIRSEP)build/cl-posix-me-gently.patch $(3RDP_ROOT)$(DIRSEP)build/cl-tpm-linux.patch $(3RDP_ROOT)$(DIRSEP)build/cl-PAM-noprompts.patch $(3RDP_ROOT)$(DIRSEP)build/cl-zlib.patch $(3RDP_ROOT)$(DIRSEP)build/Dynamic-linked-static-lib.patch $(3RDP_ROOT)$(DIRSEP)build/SSL-fix.patch $(3RDP_ROOT)$(DIRSEP)build/cl-bigger-maxattribute.patch $(3RDP_ROOT)$(DIRSEP)build/cl-vcxproj.patch $(3RDP_ROOT)$(DIRSEP)build/cl-mingw-vcver.patch $(3RDP_ROOT)$(DIRSEP)build/cl-win32-build-fix.patch $(3RDP_ROOT)$(DIRSEP)build/cl-gcc-non-const-time-val.patch $(3RDP_ROOT)$(DIRSEP)build/cl-no-odbc.patch $(3RDP_ROOT)$(DIRSEP)build/cl-noasm-defines.patch $(3RDP_ROOT)$(DIRSEP)build/cl-bn-noasm64-fix.patch $(3RDP_ROOT)$(DIRSEP)build/cl-no-RSA-suites.patch $(3RDP_ROOT)$(DIRSEP)build/cl-fix-ECC-RSA.patch $(3RDP_ROOT)$(DIRSEP)build/cl-prefer-ECC.patch $(3RDP_ROOT)$(DIRSEP)build/cl-prefer-ECC-harder.patch $(3RDP_ROOT)$(DIRSEP)build/cl-more-RSA-ECC-fixes.patch $(3RDP_ROOT)$(DIRSEP)build/cl-DH-key-init.patch $(3RDP_ROOT)$(DIRSEP)build/cl-clear-GCM-flag.patch $(3RDP_ROOT)$(DIRSEP)build/cl-use-ssh-ctr.patch $(3RDP_ROOT)$(DIRSEP)build/cl-ssh-list-ctr-modes.patch $(3RDP_ROOT)$(DIRSEP)build/cl-ssh-incCtr.patch $(3RDP_ROOT)$(DIRSEP)build/cl-ssl-suite-blocksizes.patch $(3RDP_ROOT)$(DIRSEP)build/cl-no-tpm.patch $(3RDP_ROOT)$(DIRSEP)build/cl-no-via-aes.patch $(3RDP_ROOT)$(DIRSEP)build/cl-fix-ssh-ecc-ephemeral.patch $(3RDP_ROOT)$(DIRSEP)/build/cl-just-use-cc.patch $(3RDP_ROOT)$(DIRSEP)/build/cl-learn-numbers.patch | $(CRYPT_SRC) $(CRYPT_IDIR)
+$(CRYPTLIB_BUILD): $(3RDP_ROOT)$(DIRSEP)dist/cryptlib.zip $(3RDP_ROOT)$(DIRSEP)build/terminal-params.patch $(3RDP_ROOT)$(DIRSEP)build/cl-mingw32-static.patch $(3RDP_ROOT)$(DIRSEP)build/cl-ranlib.patch $(3RDP_ROOT)$(DIRSEP)build/cl-win32-noasm.patch $(3RDP_ROOT)$(DIRSEP)build/cl-zz-country.patch $(3RDP_ROOT)$(DIRSEP)build/cl-algorithms.patch $(3RDP_ROOT)$(DIRSEP)build/cl-allow-duplicate-ext.patch $(3RDP_ROOT)$(DIRSEP)build/cl-macosx-minver.patch $(3RDP_ROOT)$(DIRSEP)build/cl-endian.patch $(3RDP_ROOT)$(DIRSEP)build/cl-cryptodev.patch $(3RDP_ROOT)$(DIRSEP)build/cl-posix-me-gently.patch $(3RDP_ROOT)$(DIRSEP)build/cl-tpm-linux.patch $(3RDP_ROOT)$(DIRSEP)build/cl-PAM-noprompts.patch $(3RDP_ROOT)$(DIRSEP)build/cl-zlib.patch $(3RDP_ROOT)$(DIRSEP)build/Dynamic-linked-static-lib.patch $(3RDP_ROOT)$(DIRSEP)build/SSL-fix.patch $(3RDP_ROOT)$(DIRSEP)build/cl-bigger-maxattribute.patch $(3RDP_ROOT)$(DIRSEP)build/cl-vcxproj.patch $(3RDP_ROOT)$(DIRSEP)build/cl-mingw-vcver.patch $(3RDP_ROOT)$(DIRSEP)build/cl-win32-build-fix.patch $(3RDP_ROOT)$(DIRSEP)build/cl-gcc-non-const-time-val.patch $(3RDP_ROOT)$(DIRSEP)build/cl-no-odbc.patch $(3RDP_ROOT)$(DIRSEP)build/cl-noasm-defines.patch $(3RDP_ROOT)$(DIRSEP)build/cl-bn-noasm64-fix.patch $(3RDP_ROOT)$(DIRSEP)build/cl-no-RSA-suites.patch $(3RDP_ROOT)$(DIRSEP)build/cl-fix-ECC-RSA.patch $(3RDP_ROOT)$(DIRSEP)build/cl-prefer-ECC.patch $(3RDP_ROOT)$(DIRSEP)build/cl-prefer-ECC-harder.patch $(3RDP_ROOT)$(DIRSEP)build/cl-more-RSA-ECC-fixes.patch $(3RDP_ROOT)$(DIRSEP)build/cl-DH-key-init.patch $(3RDP_ROOT)$(DIRSEP)build/cl-clear-GCM-flag.patch $(3RDP_ROOT)$(DIRSEP)build/cl-use-ssh-ctr.patch $(3RDP_ROOT)$(DIRSEP)build/cl-ssh-list-ctr-modes.patch $(3RDP_ROOT)$(DIRSEP)build/cl-ssh-incCtr.patch $(3RDP_ROOT)$(DIRSEP)build/cl-ssl-suite-blocksizes.patch $(3RDP_ROOT)$(DIRSEP)build/cl-no-tpm.patch $(3RDP_ROOT)$(DIRSEP)build/cl-no-via-aes.patch $(3RDP_ROOT)$(DIRSEP)build/cl-fix-ssh-ecc-ephemeral.patch $(3RDP_ROOT)$(DIRSEP)/build/cl-just-use-cc.patch $(3RDP_ROOT)$(DIRSEP)/build/cl-learn-numbers.patch $(3RDP_ROOT)/build/cl-linux-yield.patch | $(CRYPT_SRC) $(CRYPT_IDIR)
@echo Creating $@ ...
$(QUIET)-rm -rf $(CRYPT_SRC)/*
$(QUIET)unzip -oa $(3RDPDISTDIR)$(DIRSEP)cryptlib.zip -d $(CRYPT_SRC)
@@ -112,6 +112,7 @@
$(QUIET)patch -p0 -d $(CRYPT_SRC) < cl-fix-ssh-ecc-ephemeral.patch
$(QUIET)patch -p0 -d $(CRYPT_SRC) < cl-just-use-cc.patch
$(QUIET)patch -p0 -d $(CRYPT_SRC) < cl-learn-numbers.patch
+ $(QUIET)patch -p0 -d $(CRYPT_SRC) < cl-linux-yield.patch
ifeq ($(CC),mingw32-gcc)
$(QUIET)cd $(CRYPT_SRC) && env - PATH="$(PATH)" CC="$(CC)" AR="$(AR)" RANLIB="$(RANLIB)" make directories
$(QUIET)cd $(CRYPT_SRC) && env - PATH="$(PATH)" CC="$(CC)" AR="$(AR)" RANLIB="$(RANLIB)" make toolscripts
diff --git a/3rdp/build/cl-linux-yield.patch b/3rdp/build/cl-linux-yield.patch
new file mode 100644
index 0000000000000000000000000000000000000000..8cdfc8eafd3fd85f39bf0f8b519f25a31078fee4
--- /dev/null
+++ b/3rdp/build/cl-linux-yield.patch
@@ -0,0 +1,11 @@
+--- old/thread.h 2021-10-19 12:34:08.766649958 -0700
++++ kernel/thread.h 2021-10-19 12:34:43.794072316 -0700
+@@ -3005,7 +3005,7 @@
+ #endif /* Slowaris 5.7 / 7.x or newer */
+ #elif defined( _AIX ) || defined( __Android__ ) || defined( __CYGWIN__ ) || \
+ ( defined( __hpux ) && ( OSVERSION >= 11 ) ) || \
+- defined( __NetBSD__ ) || defined( __QNX__ ) || defined( __UCLIBC__ )
++ defined( __NetBSD__ ) || defined( __QNX__ ) || defined( __UCLIBC__ ) || defined(__linux__)
+ #define THREAD_YIELD() sched_yield()
+ #elif defined( __XMK__ )
+ /* The XMK underlying scheduling object is the process context, for which

View file

@ -1,111 +0,0 @@
diff --git a/aiopurpleair/api.py b/aiopurpleair/api.py
index d3b276b..c557015 100644
--- a/aiopurpleair/api.py
+++ b/aiopurpleair/api.py
@@ -5,7 +5,10 @@ from typing import Any, cast
from aiohttp import ClientSession, ClientTimeout
from aiohttp.client_exceptions import ClientError
-from pydantic import BaseModel, ValidationError
+try:
+ from pydantic.v1 import BaseModel, ValidationError
+except ModuleNotFoundError:
+ from pydantic import BaseModel, ValidationError
from aiopurpleair.const import LOGGER
from aiopurpleair.endpoints.sensors import SensorsEndpoints
diff --git a/aiopurpleair/endpoints/__init__.py b/aiopurpleair/endpoints/__init__.py
index 4d263e1..6632310 100644
--- a/aiopurpleair/endpoints/__init__.py
+++ b/aiopurpleair/endpoints/__init__.py
@@ -4,7 +4,10 @@ from __future__ import annotations
from collections.abc import Awaitable, Callable, Iterable
from typing import Any
-from pydantic import BaseModel, ValidationError
+try:
+ from pydantic.v1 import BaseModel, ValidationError
+except ModuleNotFoundError:
+ from pydantic import BaseModel, ValidationError
from aiopurpleair.errors import InvalidRequestError
from aiopurpleair.helpers.typing import ModelT
diff --git a/aiopurpleair/helpers/typing.py b/aiopurpleair/helpers/typing.py
index 4ae01e6..49f59e6 100644
--- a/aiopurpleair/helpers/typing.py
+++ b/aiopurpleair/helpers/typing.py
@@ -1,6 +1,9 @@
"""Define typing helpers."""
from typing import TypeVar
-from pydantic import BaseModel
+try:
+ from pydantic.v1 import BaseModel
+except ModuleNotFoundError:
+ from pydantic import BaseModel
ModelT = TypeVar("ModelT", bound=BaseModel)
diff --git a/aiopurpleair/models/keys.py b/aiopurpleair/models/keys.py
index 591ae01..ffadbcc 100644
--- a/aiopurpleair/models/keys.py
+++ b/aiopurpleair/models/keys.py
@@ -3,7 +3,10 @@ from __future__ import annotations
from datetime import datetime
-from pydantic import BaseModel, validator
+try:
+ from pydantic.v1 import BaseModel, validator
+except ModuleNotFoundError:
+ from pydantic import BaseModel, validator
from aiopurpleair.backports.enum import StrEnum
from aiopurpleair.helpers.validators import validate_timestamp
diff --git a/aiopurpleair/models/sensors.py b/aiopurpleair/models/sensors.py
index 5b99b51..d435996 100644
--- a/aiopurpleair/models/sensors.py
+++ b/aiopurpleair/models/sensors.py
@@ -5,7 +5,10 @@ from __future__ import annotations
from datetime import datetime
from typing import Any, Optional
-from pydantic import BaseModel, root_validator, validator
+try:
+ from pydantic.v1 import BaseModel, root_validator, validator
+except ModuleNotFoundError:
+ from pydantic import BaseModel, root_validator, validator
from aiopurpleair.const import SENSOR_FIELDS, ChannelFlag, ChannelState, LocationType
from aiopurpleair.helpers.validators import validate_timestamp
diff --git a/tests/models/test_keys.py b/tests/models/test_keys.py
index 0d7d7c8..b2e30c1 100644
--- a/tests/models/test_keys.py
+++ b/tests/models/test_keys.py
@@ -5,7 +5,10 @@ from datetime import datetime
from typing import Any
import pytest
-from pydantic import ValidationError
+try:
+ from pydantic.v1 import ValidationError
+except ModuleNotFoundError:
+ from pydantic import ValidationError
from aiopurpleair.models.keys import ApiKeyType, GetKeysResponse
diff --git a/tests/models/test_sensors.py b/tests/models/test_sensors.py
index a984b36..7b2c84f 100644
--- a/tests/models/test_sensors.py
+++ b/tests/models/test_sensors.py
@@ -5,7 +5,10 @@ from datetime import datetime
from typing import Any
import pytest
-from pydantic import ValidationError
+try:
+ from pydantic.v1 import ValidationError
+except ModuleNotFoundError:
+ from pydantic import ValidationError
from aiopurpleair.models.sensors import (
GetSensorsRequest,

View file

@ -1,73 +0,0 @@
diff --git a/tests/test_async_modbus.py b/tests/test_async_modbus.py
index b0bd3fd..6b8df87 100644
--- a/tests/test_async_modbus.py
+++ b/tests/test_async_modbus.py
@@ -194,7 +194,7 @@ async def test_read_coils(proto, slave_id, starting_address, expected_reply):
await coro
else:
reply = await coro
- assert (reply == expected_reply).all()
+ assert reply == expected_reply
server = Server(slave_id, starting_address, expected_reply)
client = AsyncClient(server, protocol)
@@ -204,7 +204,7 @@ async def test_read_coils(proto, slave_id, starting_address, expected_reply):
await coro
else:
reply = await coro
- assert (reply == expected_reply).all()
+ assert reply == expected_reply
@pytest.mark.asyncio
@@ -273,7 +273,7 @@ async def test_read_discrete_inputs(proto, slave_id, starting_address, expected_
await coro
else:
reply = await coro
- assert (reply == expected_reply).all()
+ assert reply == expected_reply
server = Server(slave_id, starting_address, expected_reply)
client = AsyncClient(server, protocol)
@@ -283,7 +283,7 @@ async def test_read_discrete_inputs(proto, slave_id, starting_address, expected_
await coro
else:
reply = await coro
- assert (reply == expected_reply).all()
+ assert reply == expected_reply
@pytest.mark.asyncio
@@ -306,7 +306,7 @@ async def test_read_holding_registers(
await coro
else:
reply = await coro
- assert (reply == expected_reply).all()
+ assert reply == expected_reply
server = Server(slave_id, starting_address, expected_reply)
client = AsyncClient(server, protocol)
@@ -316,7 +316,7 @@ async def test_read_holding_registers(
await coro
else:
reply = await coro
- assert (reply == expected_reply).all()
+ assert reply == expected_reply
@pytest.mark.asyncio
@@ -383,7 +383,7 @@ async def test_read_input_registers(proto, slave_id, starting_address, expected_
await coro
else:
reply = await coro
- assert (reply == expected_reply).all()
+ assert reply == expected_reply
server = Server(slave_id, starting_address, expected_reply)
client = AsyncClient(server, protocol)
@@ -393,4 +393,4 @@ async def test_read_input_registers(proto, slave_id, starting_address, expected_
await coro
else:
reply = await coro
- assert (reply == expected_reply).all()
+ assert reply == expected_reply

View file

@ -1,288 +0,0 @@
From d3aed2c18cc3a1c88a8052af1f34d7f81f1be11a Mon Sep 17 00:00:00 2001
From: Flakebi <flakebi@t-online.de>
Date: Wed, 28 Feb 2024 23:24:14 +0100
Subject: [PATCH] Fix with new dependency versions
- cookie_jar is private in werkzeug 2.3, so recreate the client instead
- set_cookie does not take a hostname argument anymore, use domain instead
- Headers need to specify a content type
---
test_seasurf.py | 71 ++++++++++++++++++++++++-------------------------
1 file changed, 35 insertions(+), 36 deletions(-)
diff --git a/test_seasurf.py b/test_seasurf.py
index 517b2d7..f940b91 100644
--- a/test_seasurf.py
+++ b/test_seasurf.py
@@ -71,18 +71,18 @@ class SeaSurfTestCase(BaseTestCase):
self.assertEqual(type(token), str)
def test_exempt_view(self):
- rv = self.app.test_client().post('/foo')
+ rv = self.app.test_client().post('/foo', content_type='application/json')
self.assertIn(b('bar'), rv.data)
- rv = self.app.test_client().post(u'/foo/\xf8')
+ rv = self.app.test_client().post(u'/foo/\xf8', content_type='application/json')
self.assertIn(b('bar'), rv.data)
def test_token_validation(self):
# should produce a logger warning
- rv = self.app.test_client().post('/bar')
+ rv = self.app.test_client().post('/bar', content_type='application/json')
self.assertIn(b('403 Forbidden'), rv.data)
- rv = self.app.test_client().post(u'/bar/\xf8')
+ rv = self.app.test_client().post(u'/bar/\xf8', content_type='application/json')
self.assertIn(b('403 Forbidden'), rv.data)
def test_json_token_validation_bad(self):
@@ -93,7 +93,7 @@ class SeaSurfTestCase(BaseTestCase):
with self.app.test_client() as client:
with client.session_transaction() as sess:
sess[self.csrf._csrf_name] = tokenA
- client.set_cookie('www.example.com', self.csrf._csrf_name, tokenB)
+ client.set_cookie(self.csrf._csrf_name, tokenB, domain='www.example.com')
rv = client.post('/bar', data=data)
self.assertEqual(rv.status_code, 403, rv)
@@ -107,7 +107,7 @@ class SeaSurfTestCase(BaseTestCase):
data = {'_csrf_token': token}
with self.app.test_client() as client:
with client.session_transaction() as sess:
- client.set_cookie('www.example.com', self.csrf._csrf_name, token)
+ client.set_cookie(self.csrf._csrf_name, token, domain='www.example.com')
sess[self.csrf._csrf_name] = token
rv = client.post('/bar', data=data)
@@ -121,7 +121,7 @@ class SeaSurfTestCase(BaseTestCase):
with client.session_transaction() as sess:
token = self.csrf._generate_token()
- client.set_cookie('www.example.com', self.csrf._csrf_name, token)
+ client.set_cookie(self.csrf._csrf_name, token, domain='www.example.com')
sess[self.csrf._csrf_name] = token
# once this is reached the session was stored
@@ -144,18 +144,18 @@ class SeaSurfTestCase(BaseTestCase):
with client.session_transaction() as sess:
token = self.csrf._generate_token()
- client.set_cookie('www.example.com', self.csrf._csrf_name, token)
+ client.set_cookie(self.csrf._csrf_name, token, domain='www.example.com')
sess[self.csrf._csrf_name] = token
# once this is reached the session was stored
- rv = client.post('/bar',
+ rv = client.post('/bar', content_type='application/json',
data={self.csrf._csrf_name: token},
base_url='https://www.example.com',
headers={'Referer': 'https://www.example.com/foobar'})
self.assertEqual(rv.status_code, 200)
- rv = client.post(u'/bar/\xf8',
+ rv = client.post(u'/bar/\xf8', content_type='application/json',
data={self.csrf._csrf_name: token},
base_url='https://www.example.com',
headers={'Referer': 'https://www.example.com/foobar\xf8'})
@@ -167,7 +167,7 @@ class SeaSurfTestCase(BaseTestCase):
with client.session_transaction() as sess:
token = self.csrf._generate_token()
- client.set_cookie('www.example.com', self.csrf._csrf_name, token)
+ client.set_cookie(self.csrf._csrf_name, token, domain='www.example.com')
sess[self.csrf._csrf_name] = token
rv = client.post('/bar',
@@ -187,10 +187,10 @@ class SeaSurfTestCase(BaseTestCase):
self.csrf._csrf_header_name: token,
}
- rv = client.post('/bar', headers=headers)
+ rv = client.post('/bar', headers=headers, content_type='application/json')
self.assertEqual(rv.status_code, 200, rv)
- rv = client.post(u'/bar/\xf8', headers=headers)
+ rv = client.post(u'/bar/\xf8', headers=headers, content_type='application/json')
self.assertEqual(rv.status_code, 200, rv)
def test_token_in_form_data(self):
@@ -280,14 +280,14 @@ class SeaSurfTestCaseExemptViews(BaseTestCase):
def test_exempt_view(self):
with self.app.test_client() as c:
- rv = c.post('/foo')
+ rv = c.post('/foo', content_type='application/json')
self.assertIn(b('bar'), rv.data)
cookie = get_cookie(rv, self.csrf._csrf_name)
self.assertEqual(cookie, None)
def test_token_validation(self):
# should produce a logger warning
- rv = self.app.test_client().post('/bar')
+ rv = self.app.test_client().post('/bar', content_type='application/json')
self.assertIn(b('403 Forbidden'), rv.data)
@@ -319,18 +319,18 @@ class SeaSurfTestCaseIncludeViews(BaseTestCase):
return 'foo'
def test_include_view(self):
- rv = self.app.test_client().post('/foo')
+ rv = self.app.test_client().post('/foo', content_type='application/json')
self.assertIn(b('403 Forbidden'), rv.data)
- rv = self.app.test_client().post(u'/foo/\xf8')
+ rv = self.app.test_client().post(u'/foo/\xf8', content_type='application/json')
self.assertIn(b('403 Forbidden'), rv.data)
def test_token_validation(self):
# should produce a logger warning
- rv = self.app.test_client().post('/bar')
+ rv = self.app.test_client().post('/bar', content_type='application/json')
self.assertIn(b('foo'), rv.data)
- rv = self.app.test_client().post(u'/bar/\xf8')
+ rv = self.app.test_client().post(u'/bar/\xf8', content_type='application/json')
self.assertIn(b('foo'), rv.data)
@@ -363,10 +363,10 @@ class SeaSurfTestCaseExemptUrls(BaseTestCase):
return 'foo'
def test_exempt_view(self):
- rv = self.app.test_client().post('/foo/baz')
+ rv = self.app.test_client().post('/foo/baz', content_type='application/json')
self.assertIn(b('bar'), rv.data)
with self.app.test_client() as c:
- rv = c.post('/foo/quz')
+ rv = c.post('/foo/quz', content_type='application/json')
self.assertIn(b('bar'), rv.data)
cookie = get_cookie(rv, self.csrf._csrf_name)
self.assertEqual(cookie, None)
@@ -374,7 +374,7 @@ class SeaSurfTestCaseExemptUrls(BaseTestCase):
def test_token_validation(self):
with self.app.test_client() as c:
# should produce a logger warning
- rv = c.post('/bar')
+ rv = c.post('/bar', content_type='application/json')
self.assertIn(b('403 Forbidden'), rv.data)
cookie = get_cookie(rv, self.csrf._csrf_name)
token = self.csrf._get_token()
@@ -434,7 +434,7 @@ class SeaSurfTestCaseDisableCookie(unittest.TestCase):
def test_no_csrf_cookie_even_after_manually_validated(self):
with self.app.test_client() as c:
- rv = c.post('/manual')
+ rv = c.post('/manual', content_type='application/json')
self.assertIn(b('403 Forbidden'), rv.data)
cookie = get_cookie(rv, self.csrf._csrf_name)
self.assertEqual(cookie, None)
@@ -474,14 +474,14 @@ class SeaSurfTestCaseEnableCookie(unittest.TestCase):
def test_has_csrf_cookie(self):
with self.app.test_client() as c:
- rv = c.post('/exempt_with_cookie')
+ rv = c.post('/exempt_with_cookie', content_type='application/json')
cookie = get_cookie(rv, self.csrf._csrf_name)
token = self.csrf._get_token()
self.assertEqual(cookie, token)
def test_has_csrf_cookie_but_doesnt_validate(self):
with self.app.test_client() as c:
- rv = c.post('/exempt_with_cookie')
+ rv = c.post('/exempt_with_cookie', content_type='application/json')
self.assertIn(b('exempt_with_cookie'), rv.data)
cookie = get_cookie(rv, self.csrf._csrf_name)
token = self.csrf._get_token()
@@ -530,7 +530,7 @@ class SeaSurfTestCaseSkipValidation(unittest.TestCase):
def test_skips_validation(self):
with self.app.test_client() as c:
- rv = c.post('/foo/quz')
+ rv = c.post('/foo/quz', content_type='application/json')
self.assertIn(b('bar'), rv.data)
cookie = get_cookie(rv, self.csrf._csrf_name)
token = self.csrf._get_token()
@@ -538,20 +538,20 @@ class SeaSurfTestCaseSkipValidation(unittest.TestCase):
def test_enforces_validation_reject(self):
with self.app.test_client() as c:
- rv = c.delete('/foo/baz')
+ rv = c.delete('/foo/baz', content_type='application/json')
self.assertIn(b('403 Forbidden'), rv.data)
def test_enforces_validation_accept(self):
with self.app.test_client() as c:
# GET generates CSRF token
c.get('/foo/baz')
- rv = c.delete('/foo/baz',
+ rv = c.delete('/foo/baz', content_type='application/json',
headers={'X-CSRFToken': self.csrf._get_token()})
self.assertIn(b('bar'), rv.data)
def test_manual_validation(self):
with self.app.test_client() as c:
- rv = c.post('/manual')
+ rv = c.post('/manual', content_type='application/json')
self.assertIn(b('403 Forbidden'), rv.data)
@@ -578,7 +578,7 @@ class SeaSurfTestManualValidation(unittest.TestCase):
def test_can_manually_validate_exempt_views(self):
with self.app.test_client() as c:
- rv = c.post('/manual')
+ rv = c.post('/manual', content_type='application/json')
self.assertIn(b('403 Forbidden'), rv.data)
cookie = get_cookie(rv, self.csrf._csrf_name)
token = self.csrf._get_token()
@@ -651,7 +651,7 @@ class SeaSurfTestCaseReferer(BaseTestCase):
with client.session_transaction() as sess:
token = self.csrf._generate_token()
- client.set_cookie('www.example.com', self.csrf._csrf_name, token)
+ client.set_cookie(self.csrf._csrf_name, token, domain='www.example.com')
sess[self.csrf._csrf_name] = token
# once this is reached the session was stored
@@ -728,8 +728,7 @@ class SeaSurfTestCaseSetCookie(BaseTestCase):
res3.headers.get('Set-Cookie', ''),
'CSRF cookie always be re-set if a token is requested by the template')
- client.cookie_jar.clear()
-
+ with self.app.test_client() as client:
res4 = client.get('/foo')
self.assertIn(self.csrf._csrf_name,
@@ -739,14 +738,14 @@ class SeaSurfTestCaseSetCookie(BaseTestCase):
def test_header_set_on_post(self):
with self.app.test_client() as client:
headers = {}
- res1 = client.post('/bar', headers=headers)
+ res1 = client.post('/bar', headers=headers, content_type='application/json')
self.assertEqual(res1.status_code, 403)
for cookie in client.cookie_jar:
if cookie.name == self.csrf._csrf_name:
headers[self.csrf._csrf_header_name] = cookie.value
- res2 = client.post('/bar', headers=headers)
+ res2 = client.post('/bar', headers=headers, content_type='application/json')
self.assertEqual(res2.status_code, 200)
def test_header_set_cookie_samesite(self):
@@ -789,7 +788,7 @@ class SeaSurfTestCaseGenerateNewToken(BaseTestCase):
client.get('/foo')
tokenA = self.csrf._get_token()
- client.set_cookie('www.example.com', self.csrf._csrf_name, tokenA)
+ client.set_cookie(self.csrf._csrf_name, tokenA, domain='www.example.com')
with client.session_transaction() as sess:
sess[self.csrf._csrf_name] = tokenA
--
2.43.0

View file

@ -1,13 +0,0 @@
diff --git a/configure.ac b/configure.ac
index 7eef3523..b8e7d274 100644
--- a/configure.ac
+++ b/configure.ac
@@ -614,7 +614,7 @@ if test "$found_py" = "1"; then
if test "$found_py" = "1" -o "$found_py3" = "1"; then
# Reset everything, so that we can look for another Python.
m4_foreach([mym4pythonver],
- [[2.7],[3.6],[3.8],[3.9],[3.10],[3.11],[3.12],[all]],
+ [[2.7],[3.6],[3.8],[3.9],[3.10],[3.11],[3.12],[3.13],[all]],
[unset PYTHON
unset PYTHON_VERSION
unset PYTHON_CPPFLAGS

View file

@ -1,87 +0,0 @@
diff --git a/tests/aio/test_storage.py b/tests/aio/test_storage.py
index 1040d18..7015278 100644
--- a/tests/aio/test_storage.py
+++ b/tests/aio/test_storage.py
@@ -17,7 +17,6 @@ from limits.aio.storage import (
from limits.aio.strategies import MovingWindowRateLimiter
from limits.errors import StorageError
from limits.storage import storage_from_string
-from tests.utils import fixed_start
@pytest.mark.asyncio
@@ -197,7 +196,6 @@ class TestConcreteStorages:
async def test_storage_string(self, uri, args, expected_instance, fixture):
assert isinstance(storage_from_string(uri, **args), expected_instance)
- @fixed_start
async def test_expiry_incr(self, uri, args, expected_instance, fixture):
storage = storage_from_string(uri, **args)
limit = RateLimitItemPerSecond(1)
@@ -205,7 +203,6 @@ class TestConcreteStorages:
time.sleep(1.1)
assert await storage.get(limit.key_for()) == 0
- @fixed_start
async def test_expiry_acquire_entry(self, uri, args, expected_instance, fixture):
if not issubclass(expected_instance, MovingWindowSupport):
pytest.skip("%s does not support acquire entry" % expected_instance)
diff --git a/tests/aio/test_strategy.py b/tests/aio/test_strategy.py
index b21f808..efa3b95 100644
--- a/tests/aio/test_strategy.py
+++ b/tests/aio/test_strategy.py
@@ -18,14 +18,12 @@ from tests.utils import (
async_all_storage,
async_moving_window_storage,
async_window,
- fixed_start,
)
@pytest.mark.asyncio
class TestAsyncWindow:
@async_all_storage
- @fixed_start
async def test_fixed_window(self, uri, args, fixture):
storage = storage_from_string(uri, **args)
limiter = FixedWindowRateLimiter(storage)
@@ -37,7 +35,6 @@ class TestAsyncWindow:
assert (await limiter.get_window_stats(limit)).reset_time == start + 2
@async_all_storage
- @fixed_start
async def test_fixed_window_empty_stats(self, uri, args, fixture):
storage = storage_from_string(uri, **args)
limiter = FixedWindowRateLimiter(storage)
@@ -61,7 +58,6 @@ class TestAsyncWindow:
) == 58
@async_all_storage
- @fixed_start
async def test_fixed_window_multiple_cost(self, uri, args, fixture):
storage = storage_from_string(uri, **args)
limiter = FixedWindowRateLimiter(storage)
@@ -73,7 +69,6 @@ class TestAsyncWindow:
assert not await limiter.hit(limit, "k2", cost=6)
@async_all_storage
- @fixed_start
async def test_fixed_window_with_elastic_expiry(self, uri, args, fixture):
storage = storage_from_string(uri, **args)
limiter = FixedWindowElasticExpiryRateLimiter(storage)
@@ -90,7 +85,6 @@ class TestAsyncWindow:
assert (await limiter.get_window_stats(limit)).reset_time == end + 2
@async_all_storage
- @fixed_start
async def test_fixed_window_with_elastic_expiry_multiple_cost(
self, uri, args, fixture
):
@@ -179,7 +173,6 @@ class TestAsyncWindow:
MovingWindowRateLimiter(storage)
@async_all_storage
- @fixed_start
@pytest.mark.flaky
async def test_test_fixed_window(self, uri, args, fixture):
storage = storage_from_string(uri, **args)

View file

@ -1,31 +0,0 @@
diff --git a/shapely/geos.py b/shapely/geos.py
index 88c5f53..1ccd6e4 100644
--- a/shapely/geos.py
+++ b/shapely/geos.py
@@ -96,6 +96,7 @@ if sys.platform.startswith('linux'):
alt_paths = [
'libgeos_c.so.1',
'libgeos_c.so',
+ '@libgeos_c@',
]
_lgeos = load_dll('geos_c', fallbacks=alt_paths)
@@ -160,6 +161,7 @@ elif sys.platform == 'darwin':
"/usr/local/lib/libgeos_c.dylib",
# homebrew Apple Silicon
"/opt/homebrew/lib/libgeos_c.dylib",
+ "@libgeos_c@",
]
_lgeos = load_dll('geos_c', fallbacks=alt_paths)
diff --git a/tests/test_dlls.py b/tests/test_dlls.py
index c71da8e..c36262c 100644
--- a/tests/test_dlls.py
+++ b/tests/test_dlls.py
@@ -18,4 +18,5 @@ class LoadingTestCase(unittest.TestCase):
'/opt/homebrew/lib/libgeos_c.dylib', # homebrew (macOS)
os.path.join(sys.prefix, "lib", "libgeos_c.so"), # anaconda (Linux)
'libgeos_c.so.1',
- 'libgeos_c.so'])
+ 'libgeos_c.so',
+ '@libgeos_c@'])

View file

@ -1,15 +0,0 @@
diff --git a/zipfile2/tests/test__zipfile.py b/zipfile2/tests/test__zipfile.py
index 60f2ed2..db6e5bc 100644
--- a/zipfile2/tests/test__zipfile.py
+++ b/zipfile2/tests/test__zipfile.py
@@ -585,8 +585,8 @@ class TestsPermissionExtraction(unittest.TestCase):
if index & 1 << order:
mode |= permissions[permgroup][order]
for order in range(3):
- if specialindex & 1 << order:
- mode |= permissions['special'][order]
+ if specialindex & 1 << order and order == 0:
+ raise unittest.SkipTest("The nix build process doesn't allow you to use the setuid bit")
os.chmod(path, mode)
real_permission = os.stat(path).st_mode & 0xFFF
self.files.append((path, real_permission))

View file

@ -1,12 +0,0 @@
--- a/src/decoder/plugins/FfmpegDecoderPlugin.cxx
+++ b/src/decoder/plugins/FfmpegDecoderPlugin.cxx
@@ -20,8 +20,8 @@
/* necessary because libavutil/common.h uses UINT64_C */
#define __STDC_CONSTANT_MACROS
-#include "lib/ffmpeg/Time.hxx"
#include "config.h"
+#include "lib/ffmpeg/Time.hxx"
#include "FfmpegDecoderPlugin.hxx"
#include "lib/ffmpeg/Domain.hxx"
#include "lib/ffmpeg/Error.hxx"

View file

@ -1,32 +0,0 @@
From: Vincent Breitmoser <look@my.amazin.horse>
Date: Thu, 13 Jun 2019 21:27:43 +0200
Subject: gpg: accept subkeys with a good revocation but no self-sig during
import
* g10/import.c (chk_self_sigs): Set the NODE_GOOD_SELFSIG flag when we
encounter a valid revocation signature. This allows import of subkey
revocation signatures, even in the absence of a corresponding subkey
binding signature.
--
This fixes the remaining test in import-incomplete.scm.
GnuPG-Bug-id: 4393
Signed-off-by: Daniel Kahn Gillmor <dkg@fifthhorseman.net>
---
g10/import.c | 1 +
1 file changed, 1 insertion(+)
diff --git a/g10/import.c b/g10/import.c
index 4fdf248..ee2fed8 100644
--- a/g10/import.c
+++ b/g10/import.c
@@ -3613,6 +3613,7 @@ chk_self_sigs (ctrl_t ctrl, kbnode_t keyblock, u32 *keyid, int *non_self)
/* It's valid, so is it newer? */
if (sig->timestamp >= rsdate)
{
+ knode->flag |= NODE_GOOD_SELFSIG; /* Subkey is valid. */
if (rsnode)
{
/* Delete the last revocation sig since