Prepare for PMR with a memory_resource that understands heap_caps_malloc

custom
jacqueline 2 years ago
parent 2849399d54
commit f6d0642109
  1. 5
      src/memory/CMakeLists.txt
  2. 0
      src/memory/allocator.cpp
  3. 86
      src/memory/arena.cpp
  4. 91
      src/memory/include/arena.hpp
  5. 42
      src/memory/include/memory_resource.hpp
  6. 34
      src/memory/memory_resource.cpp
  7. 5
      src/util/CMakeLists.txt

@ -2,5 +2,8 @@
#
# SPDX-License-Identifier: GPL-3.0-only
idf_component_register(SRCS "arena.cpp" INCLUDE_DIRS "include" REQUIRES "span" "esp_psram")
idf_component_register(
SRCS "memory_resource.cpp" "allocator.cpp"
INCLUDE_DIRS "include"
REQUIRES "esp_psram")
target_compile_options(${COMPONENT_LIB} PRIVATE ${EXTRA_WARNINGS})

@ -1,86 +0,0 @@
/*
* Copyright 2023 jacqueline <me@jacqueline.id.au>
*
* SPDX-License-Identifier: GPL-3.0-only
*/
#include "arena.hpp"
#include <cstdint>
#include <optional>
#include "esp_heap_caps.h"
#include "freertos/queue.h"
#include "span.hpp"
namespace memory {
Arena::Arena(std::size_t block_size,
std::size_t num_blocks,
uint32_t alloc_flags)
: block_size_(block_size) {
pool_ = static_cast<std::byte*>(
heap_caps_malloc(block_size * num_blocks, alloc_flags));
free_blocks_ = xQueueCreate(num_blocks, sizeof(void*));
for (int i = 0; i < num_blocks; i++) {
std::byte* block = pool_ + (i * block_size);
xQueueSend(free_blocks_, &block, 0);
}
}
Arena::~Arena() {
// We shouldn't have any blocks in use when destroying an arena.
assert(uxQueueSpacesAvailable(free_blocks_) == 0);
vQueueDelete(free_blocks_);
free(pool_);
}
auto Arena::Acquire() -> std::optional<ArenaPtr> {
std::byte* block;
bool result = xQueueReceive(free_blocks_, &block, 0);
if (result) {
ArenaPtr ptr{this, block, block_size_, 0};
return ptr;
} else {
return {};
}
}
auto Arena::Return(ArenaPtr ptr) -> void {
assert(ptr.owner == this);
xQueueSend(free_blocks_, &ptr.start, 0);
}
auto Arena::BlocksFree() -> std::size_t {
return uxQueueMessagesWaiting(free_blocks_);
}
auto ArenaRef::Acquire(Arena* a) -> std::optional<ArenaRef> {
auto ptr = a->Acquire();
if (ptr) {
ArenaRef ref{*ptr};
return ref;
}
return {};
}
ArenaRef::ArenaRef(ArenaPtr p) : ptr(p) {}
ArenaRef::ArenaRef(ArenaRef&& other) : ptr(other.Release()) {}
auto ArenaRef::Release() -> ArenaPtr {
auto ret = ptr;
ptr.owner = nullptr;
ptr.start = nullptr;
ptr.size = 0;
ptr.used_size = 0;
return ret;
}
ArenaRef::~ArenaRef() {
if (ptr.owner != nullptr) {
ptr.owner->Return(ptr);
}
}
} // namespace memory

@ -1,91 +0,0 @@
/*
* Copyright 2023 jacqueline <me@jacqueline.id.au>
*
* SPDX-License-Identifier: GPL-3.0-only
*/
#pragma once
#include <cstdint>
#include <optional>
#include <utility>
#include "freertos/FreeRTOS.h"
#include "freertos/queue.h"
#include "span.hpp"
#include "sys/_stdint.h"
namespace memory {
class Arena;
/*
* A pointer to data that has been given out by an Arena, plus extra accounting
* information so that it can be returned properly.
*/
struct ArenaPtr {
Arena* owner;
std::byte* start;
std::size_t size;
// A convenience for keeping track of the subset of the block that has had
// data placed within it.
std::size_t used_size;
};
/*
* A basic memory arena. This class mediates access to fixed-size blocks of
* memory within a larger contiguous block. This is faster than re-allocating
* smaller blocks every time they're needed, and lets us easily limit the
* maximum size of the memory used.
*
* A single arena instance is safe to be used concurrently by multiple tasks,
* however there is no built in synchronisation of the underlying memory.
*/
class Arena {
public:
Arena(std::size_t block_size, std::size_t num_blocks, uint32_t alloc_flags);
~Arena();
/*
* Attempts to receive an allocation from this arena. Returns absent if
* there are no blocks left.
*/
auto Acquire() -> std::optional<ArenaPtr>;
/* Returns a previously allocated block to this arena. */
auto Return(ArenaPtr) -> void;
/* Returns the number of blocks that are currently free. */
auto BlocksFree() -> std::size_t;
Arena(const Arena&) = delete;
Arena& operator=(const Arena&) = delete;
private:
std::size_t block_size_;
// The large memory allocation that is divided into blocks.
std::byte* pool_;
// A FreeRTOS queue containing the blocks that are currently unused.
QueueHandle_t free_blocks_;
};
/*
* Wrapper around an ArenaPtr that handles acquiring and returning the block
* through RAII.
*/
class ArenaRef {
public:
static auto Acquire(Arena* a) -> std::optional<ArenaRef>;
explicit ArenaRef(ArenaPtr ptr);
~ArenaRef();
auto Release() -> ArenaPtr;
ArenaRef(ArenaRef&&);
ArenaRef(const ArenaRef&) = delete;
Arena& operator=(const Arena&) = delete;
ArenaPtr ptr;
};
} // namespace memory

@ -0,0 +1,42 @@
/*
* Copyright 2023 jacqueline <me@jacqueline.id.au>
*
* SPDX-License-Identifier: GPL-3.0-only
*/
#pragma once
#include <memory_resource>
#include <string>
#include <esp_heap_caps.h>
#include <stdint.h>
namespace memory {
enum class Capabilities : uint32_t {
kDefault = MALLOC_CAP_DEFAULT,
kInternal = MALLOC_CAP_INTERNAL | MALLOC_CAP_8BIT,
kDma = MALLOC_CAP_DMA,
kSpiRam = MALLOC_CAP_SPIRAM,
};
class Resource : public std::pmr::memory_resource {
public:
explicit Resource(Capabilities caps) : caps_(caps) {}
private:
Capabilities caps_;
void* do_allocate(std::size_t bytes, std::size_t alignment) override;
void do_deallocate(void* p,
std::size_t bytes,
std::size_t alignment) override;
bool do_is_equal(const std::pmr::memory_resource& other) const noexcept override;
};
extern Resource kSpiRamResource;
} // namespace memory

@ -0,0 +1,34 @@
/*
* Copyright 2023 jacqueline <me@jacqueline.id.au>
*
* SPDX-License-Identifier: GPL-3.0-only
*/
#include "memory_resource.hpp"
#include <memory_resource>
#include <string>
#include <utility>
#include <esp_heap_caps.h>
#include <stdint.h>
namespace memory {
Resource kSpiRamResource{Capabilities::kSpiRam};
void* Resource::do_allocate(std::size_t bytes, std::size_t alignment) {
return heap_caps_malloc(bytes, std::to_underlying(caps_));
}
void Resource::do_deallocate(void* p,
std::size_t bytes,
std::size_t alignment) {
heap_caps_free(p);
}
bool Resource::do_is_equal(const std::pmr::memory_resource& other) const {
return this == &other;
}
} // namespace memory

@ -2,7 +2,4 @@
#
# SPDX-License-Identifier: GPL-3.0-only
idf_component_register(
SRCS "allocator.cpp"
INCLUDE_DIRS "include"
REQUIRES "database")
idf_component_register(SRCS INCLUDE_DIRS "include" REQUIRES "database")

Loading…
Cancel
Save