add support for variable chunk size, double

This commit is contained in:
2023-01-18 00:47:08 +01:00
parent 2e745b617d
commit d888e91ada
8 changed files with 35 additions and 13 deletions

View File

@ -94,8 +94,8 @@ std::unique_ptr<StateI> ReceiveStartSHA1::nextState(void) {
std::cout << "ReceiveStartSHA1 checking existing file\n";
size_t f_i {0};
size_t tmp_have_count {0};
for (size_t c_i = 0; f_i + FTInfoSHA1::chunk_size < file_map.length(); f_i += FTInfoSHA1::chunk_size, c_i++) {
if (sha1_info.chunks[c_i] == hash_sha1(file_map.data()+f_i, FTInfoSHA1::chunk_size)) {
for (size_t c_i = 0; f_i + sha1_info.chunk_size < file_map.length(); f_i += sha1_info.chunk_size, c_i++) {
if (sha1_info.chunks[c_i] == hash_sha1(file_map.data()+f_i, sha1_info.chunk_size)) {
have_chunk[c_i] = true;
tmp_have_count++;
}

View File

@ -29,11 +29,12 @@ SendStartSHA1::SendStartSHA1(ToxClient& tcl, const CommandLine& cl) : StateI(tcl
// build info
_sha1_info.file_name = std::filesystem::path(cl.send_path).filename().string();
_sha1_info.file_size = _file_map.length();
//_sha1_info.chunk_size;
{ // build chunks
size_t i = 0;
for (; i + FTInfoSHA1::chunk_size < _file_map.length(); i += FTInfoSHA1::chunk_size) {
_sha1_info.chunks.push_back(hash_sha1(_file_map.data()+i, FTInfoSHA1::chunk_size));
for (; i + _sha1_info.chunk_size < _file_map.length(); i += _sha1_info.chunk_size) {
_sha1_info.chunks.push_back(hash_sha1(_file_map.data()+i, _sha1_info.chunk_size));
}
if (i < _file_map.length()) {

View File

@ -188,7 +188,7 @@ bool SHA1::iterate(float delta) {
// log
_io_log_timer += delta;
static const float log_interval {15.f};
static const float log_interval {10.f};
if (_io_log_timer >= log_interval) {
_io_log_timer = 0.f;
@ -203,6 +203,8 @@ bool SHA1::iterate(float delta) {
std::cout << "SHA1 speed down: " << down_kibs << "KiB/s up: " << up_kibs << "KiB/s\n";
std::cout << "SHA1 total down: " << _bytes_down / 1024 << "KiB up: " << _bytes_up / 1024 << "KiB\n";
std::cout << "SHA1 cwq:" << _chunk_want_queue.size() << " cwqr:" << _chunks_requested.size() << " trc:" << _transfers_receiving_chunk.size() << " tsc:" << _transfers_sending_chunk.size() << "\n";
}
// TODO: unmap and remap the file every couple of minutes to keep ram usage down?