mirror of
https://github.com/geode-sdk/geode.git
synced 2025-03-22 02:45:49 -04:00
implement http headers to webasyncrequest and make index conditional request
This commit is contained in:
parent
b9e670fffe
commit
727304a276
4 changed files with 44 additions and 30 deletions
loader
|
@ -86,6 +86,7 @@ namespace geode::utils::web {
|
|||
mutable std::mutex m_mutex;
|
||||
std::variant<std::monostate, std::ostream*, ghc::filesystem::path> m_target =
|
||||
std::monostate();
|
||||
std::vector<std::string> m_httpHeaders;
|
||||
|
||||
template <class T>
|
||||
friend class AsyncWebResult;
|
||||
|
@ -134,6 +135,7 @@ namespace geode::utils::web {
|
|||
AsyncCancelled m_cancelled = nullptr;
|
||||
bool m_sent = false;
|
||||
std::variant<std::monostate, std::ostream*, ghc::filesystem::path> m_target;
|
||||
std::vector<std::string> m_httpHeaders;
|
||||
|
||||
template <class T>
|
||||
friend class AsyncWebResult;
|
||||
|
@ -159,6 +161,12 @@ namespace geode::utils::web {
|
|||
* @returns Same AsyncWebRequest
|
||||
*/
|
||||
AsyncWebRequest& join(std::string const& requestID);
|
||||
|
||||
/**
|
||||
* In order to specify a http header to the request, give it here.
|
||||
* Can be called more than once.
|
||||
*/
|
||||
AsyncWebRequest& header(std::string const& header);
|
||||
/**
|
||||
* URL to fetch from the internet asynchronously
|
||||
* @param url URL of the data to download. Redirects will be
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#include <Geode/utils/ranges.hpp>
|
||||
#include <Geode/utils/string.hpp>
|
||||
#include <Geode/utils/vector.hpp>
|
||||
#include <fmt/format.h>
|
||||
#include <hash.hpp>
|
||||
#include <thread>
|
||||
|
||||
|
@ -104,39 +105,32 @@ void Index::updateIndex(IndexUpdateCallback callback, bool force) {
|
|||
|
||||
#endif
|
||||
|
||||
// read sha of currently installed commit
|
||||
std::string currentCommitSHA = "";
|
||||
if (ghc::filesystem::exists(indexDir / "current")) {
|
||||
auto data = utils::file::readString(indexDir / "current");
|
||||
if (data) {
|
||||
currentCommitSHA = data.value();
|
||||
}
|
||||
}
|
||||
|
||||
web::AsyncWebRequest()
|
||||
.join("index-update")
|
||||
.fetch("https://api.github.com/repos/geode-sdk/mods/commits")
|
||||
.json()
|
||||
.then([this, force, callback](nlohmann::json const& json) {
|
||||
.header(fmt::format("If-None-Match: \"{}\"", currentCommitSHA))
|
||||
.header("Accept: application/vnd.github.sha")
|
||||
.fetch("https://api.github.com/repos/geode-sdk/mods/commits/main")
|
||||
.text()
|
||||
.then([this, force, callback, currentCommitSHA](std::string const& upcomingCommitSHA) {
|
||||
auto indexDir = Loader::get()->getGeodeDirectory() / "index";
|
||||
|
||||
// check if rate-limited (returns object)
|
||||
JsonChecker checkerObj(json);
|
||||
auto obj = checkerObj.root("[geode-sdk/mods/commits]").obj();
|
||||
if (obj.has("documentation_url") && obj.has("message")) {
|
||||
RETURN_ERROR(obj.has("message").get<std::string>());
|
||||
}
|
||||
// gee i sure hope no one does 60 commits to the mod index an hour and download every
|
||||
// single one of them
|
||||
if (upcomingCommitSHA == "") {
|
||||
m_upToDate = true;
|
||||
m_updating = false;
|
||||
|
||||
// get sha of latest commit
|
||||
JsonChecker checker(json);
|
||||
auto root = checker.root("[geode-sdk/mods/commits]").array();
|
||||
|
||||
std::string upcomingCommitSHA;
|
||||
if (auto first = root.at(0).obj().needs("sha")) {
|
||||
upcomingCommitSHA = first.get<std::string>();
|
||||
}
|
||||
else {
|
||||
RETURN_ERROR("Unable to get hash from latest commit: " + checker.getError());
|
||||
}
|
||||
|
||||
// read sha of currently installed commit
|
||||
std::string currentCommitSHA = "";
|
||||
if (ghc::filesystem::exists(indexDir / "current")) {
|
||||
auto data = utils::file::readString(indexDir / "current");
|
||||
if (data) {
|
||||
currentCommitSHA = data.value();
|
||||
}
|
||||
if (callback) callback(UpdateStatus::Finished, "", 100);
|
||||
return;
|
||||
}
|
||||
|
||||
// update if forced or latest commit has
|
||||
|
|
|
@ -186,8 +186,9 @@ bool InternalLoader::verifyLoaderResources(IndexUpdateCallback callback) {
|
|||
log::debug(
|
||||
"compare {} {} {}", file.path().string(), hash, LOADER_RESOURCE_HASHES.at(name)
|
||||
);
|
||||
return true;
|
||||
this->downloadLoaderResources(callback);
|
||||
return false;
|
||||
return false; // todo
|
||||
}
|
||||
coverage += 1;
|
||||
}
|
||||
|
|
|
@ -127,7 +127,7 @@ static std::unordered_map<std::string, SentAsyncWebRequestHandle> RUNNING_REQUES
|
|||
static std::mutex RUNNING_REQUESTS_MUTEX;
|
||||
|
||||
SentAsyncWebRequest::SentAsyncWebRequest(AsyncWebRequest const& req, std::string const& id) :
|
||||
m_id(id), m_url(req.m_url), m_target(req.m_target) {
|
||||
m_id(id), m_url(req.m_url), m_target(req.m_target), m_httpHeaders(req.m_httpHeaders) {
|
||||
#define AWAIT_RESUME() \
|
||||
while (m_paused) {} \
|
||||
if (m_cancelled) { \
|
||||
|
@ -178,6 +178,12 @@ SentAsyncWebRequest::SentAsyncWebRequest(AsyncWebRequest const& req, std::string
|
|||
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0);
|
||||
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
|
||||
|
||||
curl_slist* headers = nullptr;
|
||||
for (auto& header : m_httpHeaders) {
|
||||
headers = curl_slist_append(headers, header.c_str());
|
||||
}
|
||||
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers);
|
||||
|
||||
struct ProgressData {
|
||||
SentAsyncWebRequest* self;
|
||||
std::ofstream* file;
|
||||
|
@ -293,6 +299,11 @@ AsyncWebRequest& AsyncWebRequest::join(std::string const& requestID) {
|
|||
return *this;
|
||||
}
|
||||
|
||||
AsyncWebRequest& AsyncWebRequest::header(std::string const& header) {
|
||||
m_httpHeaders.push_back(header);
|
||||
return *this;
|
||||
}
|
||||
|
||||
AsyncWebResponse AsyncWebRequest::fetch(std::string const& url) {
|
||||
m_url = url;
|
||||
return AsyncWebResponse(*this);
|
||||
|
|
Loading…
Reference in a new issue