patches: use the full LDN patch
This commit is contained in:
parent
805b358baf
commit
63cb3b183a
|
@ -1,40 +0,0 @@
|
|||
From 047c9a2f793cc426e281bb50aa7d073e8f638269 Mon Sep 17 00:00:00 2001
|
||||
From: liushuyu <liushuyu011@gmail.com>
|
||||
Date: Mon, 12 Sep 2022 23:17:21 -0600
|
||||
Subject: [PATCH] dedicated_room: fix token padding ...
|
||||
|
||||
... mebedtls' base64 routine has a strange behavioral issue where if the
|
||||
input is invalid, it will not report it as invalid, but rather returning
|
||||
a bunch of garbage data. This new round-tripping padding method should
|
||||
eliminate such issue.
|
||||
---
|
||||
src/dedicated_room/yuzu_room.cpp | 13 ++++++++++++-
|
||||
1 file changed, 12 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/src/dedicated_room/yuzu_room.cpp b/src/dedicated_room/yuzu_room.cpp
|
||||
index 7b6deba41..359891883 100644
|
||||
--- a/src/dedicated_room/yuzu_room.cpp
|
||||
+++ b/src/dedicated_room/yuzu_room.cpp
|
||||
@@ -76,7 +76,18 @@ static constexpr char BanListMagic[] = "YuzuRoom-BanList-1";
|
||||
static constexpr char token_delimiter{':'};
|
||||
|
||||
static void PadToken(std::string& token) {
|
||||
- while (token.size() % 4 != 0) {
|
||||
+ std::size_t outlen = 0;
|
||||
+
|
||||
+ std::array<unsigned char, 512> output{};
|
||||
+ std::array<unsigned char, 2048> roundtrip{};
|
||||
+ for (size_t i = 0; i < 3; i++) {
|
||||
+ mbedtls_base64_decode(output.data(), output.size(), &outlen,
|
||||
+ reinterpret_cast<const unsigned char*>(token.c_str()),
|
||||
+ token.length());
|
||||
+ mbedtls_base64_encode(roundtrip.data(), roundtrip.size(), &outlen, output.data(), outlen);
|
||||
+ if (memcmp(roundtrip.data(), token.data(), token.size()) == 0) {
|
||||
+ break;
|
||||
+ }
|
||||
token.push_back('=');
|
||||
}
|
||||
}
|
||||
--
|
||||
2.37.3
|
||||
|
File diff suppressed because it is too large
Load Diff
Reference in New Issue