- No more IndexRecord/Result/dbGetPage nonsense - Queue is just track ids - i am so tired and have so much to docustom
parent
009f69c929
commit
3f7f199cb9
File diff suppressed because it is too large
Load Diff
@ -1,10 +0,0 @@ |
||||
# Copyright 2023 jacqueline <me@jacqueline.id.au> |
||||
# |
||||
# SPDX-License-Identifier: GPL-3.0-only |
||||
|
||||
idf_component_register( |
||||
SRCS "source.cpp" "shuffler.cpp" |
||||
INCLUDE_DIRS "include" |
||||
REQUIRES "database" "util") |
||||
|
||||
target_compile_options(${COMPONENT_LIB} PRIVATE ${EXTRA_WARNINGS}) |
@ -1,68 +0,0 @@ |
||||
/*
|
||||
* Copyright 2023 jacqueline <me@jacqueline.id.au> |
||||
* |
||||
* SPDX-License-Identifier: GPL-3.0-only |
||||
*/ |
||||
|
||||
#pragma once |
||||
|
||||
#include <deque> |
||||
#include <memory> |
||||
#include <mutex> |
||||
#include <variant> |
||||
#include <vector> |
||||
|
||||
#include "bloom_filter.hpp" |
||||
#include "database.hpp" |
||||
#include "future_fetcher.hpp" |
||||
#include "random.hpp" |
||||
#include "source.hpp" |
||||
#include "track.hpp" |
||||
|
||||
namespace playlist { |
||||
|
||||
/*
|
||||
* A source composes of other sources and/or specific extra tracks. Supports |
||||
* iteration over its contents in a random order. |
||||
*/ |
||||
class Shuffler : public ISource { |
||||
public: |
||||
static auto Create() -> Shuffler*; |
||||
|
||||
explicit Shuffler( |
||||
util::IRandom* random, |
||||
std::unique_ptr<util::BloomFilter<database::TrackId>> filter); |
||||
|
||||
auto Current() -> std::optional<database::TrackId> override; |
||||
auto Advance() -> std::optional<database::TrackId> override; |
||||
auto Peek(std::size_t, std::vector<database::TrackId>*) |
||||
-> std::size_t override; |
||||
|
||||
typedef std::variant<database::TrackId, std::shared_ptr<IResetableSource>> |
||||
Item; |
||||
auto Add(Item) -> void; |
||||
|
||||
/*
|
||||
* Returns the enqueued items, starting from the current item, in their |
||||
* original insertion order. |
||||
*/ |
||||
auto Unshuffle() -> std::vector<Item>; |
||||
|
||||
// Not copyable or movable.
|
||||
|
||||
Shuffler(const Shuffler&) = delete; |
||||
Shuffler& operator=(const Shuffler&) = delete; |
||||
|
||||
private: |
||||
auto RefillBuffer() -> void; |
||||
|
||||
util::IRandom* random_; |
||||
|
||||
std::unique_ptr<util::BloomFilter<database::TrackId>> already_played_; |
||||
bool out_of_items_; |
||||
|
||||
std::deque<Item> ordered_items_; |
||||
std::deque<database::TrackId> shuffled_items_buffer_; |
||||
}; |
||||
|
||||
} // namespace playlist
|
@ -1,157 +0,0 @@ |
||||
/*
|
||||
* Copyright 2023 jacqueline <me@jacqueline.id.au> |
||||
* |
||||
* SPDX-License-Identifier: GPL-3.0-only |
||||
*/ |
||||
|
||||
#pragma once |
||||
|
||||
#include <deque> |
||||
#include <memory> |
||||
#include <mutex> |
||||
#include <stack> |
||||
#include <variant> |
||||
#include <vector> |
||||
|
||||
#include "bloom_filter.hpp" |
||||
#include "database.hpp" |
||||
#include "future_fetcher.hpp" |
||||
#include "random.hpp" |
||||
#include "track.hpp" |
||||
|
||||
namespace playlist { |
||||
|
||||
/*
|
||||
* Stateful interface for iterating over a collection of tracks by id. |
||||
*/ |
||||
class ISource { |
||||
public: |
||||
virtual ~ISource() {} |
||||
|
||||
virtual auto Current() -> std::optional<database::TrackId> = 0; |
||||
|
||||
/*
|
||||
* Discards the current track id and continues to the next in this source. |
||||
* Returns the new current track id. |
||||
*/ |
||||
virtual auto Advance() -> std::optional<database::TrackId> = 0; |
||||
|
||||
/*
|
||||
* Repeatedly advances until a track with the given id is the current track. |
||||
* Returns false if this source ran out of tracks before the requested id |
||||
* was encounted, true otherwise. |
||||
*/ |
||||
virtual auto AdvanceTo(database::TrackId id) -> bool { |
||||
for (auto t = Current(); t.has_value(); t = Advance()) { |
||||
if (*t == id) { |
||||
return true; |
||||
} |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/*
|
||||
* Places the next n tracks into the given vector, in order. Does not change |
||||
* the value returned by Current(). |
||||
*/ |
||||
virtual auto Peek(std::size_t n, std::vector<database::TrackId>*) |
||||
-> std::size_t = 0; |
||||
}; |
||||
|
||||
/*
|
||||
* A Source that supports restarting iteration from its original initial |
||||
* value. |
||||
*/ |
||||
class IResetableSource : public ISource { |
||||
public: |
||||
virtual ~IResetableSource() {} |
||||
|
||||
virtual auto Previous() -> std::optional<database::TrackId> = 0; |
||||
|
||||
/*
|
||||
* Restarts iteration from this source's initial value. |
||||
*/ |
||||
virtual auto Reset() -> void = 0; |
||||
}; |
||||
|
||||
class IteratorSource : public IResetableSource { |
||||
public: |
||||
IteratorSource(const database::Iterator&); |
||||
|
||||
auto Current() -> std::optional<database::TrackId> override; |
||||
auto Advance() -> std::optional<database::TrackId> override; |
||||
auto Peek(std::size_t n, std::vector<database::TrackId>*) |
||||
-> std::size_t override; |
||||
|
||||
auto Previous() -> std::optional<database::TrackId> override; |
||||
auto Reset() -> void override; |
||||
|
||||
private: |
||||
const database::Iterator& start_; |
||||
std::optional<database::TrackId> current_; |
||||
std::stack<database::Iterator, std::vector<database::Iterator>> next_; |
||||
}; |
||||
|
||||
auto CreateSourceFromResults( |
||||
std::weak_ptr<database::Database>, |
||||
std::shared_ptr<database::Result<database::IndexRecord>>) |
||||
-> std::shared_ptr<IResetableSource>; |
||||
|
||||
class IndexRecordSource : public IResetableSource { |
||||
public: |
||||
IndexRecordSource(std::weak_ptr<database::Database> db, |
||||
std::shared_ptr<database::Result<database::IndexRecord>>); |
||||
|
||||
IndexRecordSource(std::weak_ptr<database::Database> db, |
||||
std::shared_ptr<database::Result<database::IndexRecord>>, |
||||
std::size_t, |
||||
std::shared_ptr<database::Result<database::IndexRecord>>, |
||||
std::size_t); |
||||
|
||||
auto Current() -> std::optional<database::TrackId> override; |
||||
auto Advance() -> std::optional<database::TrackId> override; |
||||
auto Peek(std::size_t n, std::vector<database::TrackId>*) |
||||
-> std::size_t override; |
||||
|
||||
auto Previous() -> std::optional<database::TrackId> override; |
||||
auto Reset() -> void override; |
||||
|
||||
private: |
||||
std::weak_ptr<database::Database> db_; |
||||
|
||||
std::shared_ptr<database::Result<database::IndexRecord>> initial_page_; |
||||
ssize_t initial_item_; |
||||
|
||||
std::shared_ptr<database::Result<database::IndexRecord>> current_page_; |
||||
ssize_t current_item_; |
||||
}; |
||||
|
||||
class NestedSource : public IResetableSource { |
||||
public: |
||||
NestedSource(std::weak_ptr<database::Database> db, |
||||
std::shared_ptr<database::Result<database::IndexRecord>>); |
||||
|
||||
auto Current() -> std::optional<database::TrackId> override; |
||||
auto Advance() -> std::optional<database::TrackId> override; |
||||
auto Peek(std::size_t n, std::vector<database::TrackId>*) |
||||
-> std::size_t override; |
||||
|
||||
auto Previous() -> std::optional<database::TrackId> override; |
||||
auto Reset() -> void override; |
||||
|
||||
private: |
||||
auto CreateChild(std::shared_ptr<database::IndexRecord> page) |
||||
-> std::shared_ptr<IResetableSource>; |
||||
|
||||
std::weak_ptr<database::Database> db_; |
||||
|
||||
std::shared_ptr<database::Result<database::IndexRecord>> initial_page_; |
||||
ssize_t initial_item_; |
||||
|
||||
std::shared_ptr<database::Result<database::IndexRecord>> current_page_; |
||||
ssize_t current_item_; |
||||
|
||||
std::shared_ptr<IResetableSource> current_child_; |
||||
}; |
||||
|
||||
} // namespace playlist
|
@ -1,166 +0,0 @@ |
||||
/*
|
||||
* Copyright 2023 jacqueline <me@jacqueline.id.au> |
||||
* |
||||
* SPDX-License-Identifier: GPL-3.0-only |
||||
*/ |
||||
|
||||
#include "shuffler.hpp" |
||||
|
||||
#include <algorithm> |
||||
#include <functional> |
||||
#include <memory> |
||||
#include <set> |
||||
#include <variant> |
||||
|
||||
#include "bloom_filter.hpp" |
||||
#include "database.hpp" |
||||
#include "komihash.h" |
||||
#include "random.hpp" |
||||
#include "track.hpp" |
||||
|
||||
static constexpr std::size_t kShufflerBufferSize = 32; |
||||
|
||||
namespace playlist { |
||||
|
||||
auto Shuffler::Create() -> Shuffler* { |
||||
return new Shuffler(util::sRandom, |
||||
std::make_unique<util::BloomFilter<database::TrackId>>( |
||||
[](database::TrackId id) { |
||||
return komihash(&id, sizeof(database::TrackId), 0); |
||||
})); |
||||
} |
||||
|
||||
Shuffler::Shuffler(util::IRandom* random, |
||||
std::unique_ptr<util::BloomFilter<database::TrackId>> filter) |
||||
: random_(random), already_played_(std::move(filter)) {} |
||||
|
||||
auto Shuffler::Current() -> std::optional<database::TrackId> { |
||||
if (shuffled_items_buffer_.empty()) { |
||||
return {}; |
||||
} |
||||
return shuffled_items_buffer_.front(); |
||||
} |
||||
|
||||
auto Shuffler::Advance() -> std::optional<database::TrackId> { |
||||
if (shuffled_items_buffer_.empty() && !out_of_items_) { |
||||
RefillBuffer(); |
||||
} |
||||
|
||||
auto res = Current(); |
||||
if (res) { |
||||
// Mark tracks off in the bloom filter only *after* they've been advanced
|
||||
// past. This gives us the most flexibility for reshuffling when adding new
|
||||
// items.
|
||||
already_played_->Insert(*res); |
||||
shuffled_items_buffer_.pop_front(); |
||||
} |
||||
return res; |
||||
} |
||||
|
||||
auto Shuffler::Peek(std::size_t num, std::vector<database::TrackId>* out) |
||||
-> std::size_t { |
||||
if (shuffled_items_buffer_.size() < num) { |
||||
RefillBuffer(); |
||||
} |
||||
for (int i = 0; i < num; i++) { |
||||
if (i >= shuffled_items_buffer_.size()) { |
||||
// We must be out of data, since the buffer didn't fill up.
|
||||
return i; |
||||
} |
||||
out->push_back(shuffled_items_buffer_.at(i)); |
||||
} |
||||
return num; |
||||
} |
||||
|
||||
auto Shuffler::Add(Item item) -> void { |
||||
ordered_items_.push_back(item); |
||||
out_of_items_ = false; |
||||
|
||||
// Empty out the buffer of already shuffled items, since we will need to
|
||||
// shuffle again in order to incorporate the newly added item(s). We keep the
|
||||
// current item however because we wouldn't want Add() to change the value of
|
||||
// Current() unless we're completely out of items.
|
||||
if (shuffled_items_buffer_.size() > 1) { |
||||
shuffled_items_buffer_.erase(shuffled_items_buffer_.begin() + 1, |
||||
shuffled_items_buffer_.end()); |
||||
} |
||||
RefillBuffer(); |
||||
} |
||||
|
||||
auto Shuffler::Unshuffle() -> std::vector<Item> { |
||||
std::vector<Item> ret; |
||||
database::TrackId current = shuffled_items_buffer_.front(); |
||||
bool has_found_current = false; |
||||
|
||||
for (const Item& item : ordered_items_) { |
||||
if (!has_found_current) { |
||||
// TODO(jacqueline): *Should* this include previous items? What is the
|
||||
// 'previous' button meant to do after unshuffling?
|
||||
if (std::holds_alternative<database::TrackId>(item)) { |
||||
has_found_current = current == std::get<database::TrackId>(item); |
||||
} else { |
||||
auto source = std::get<std::shared_ptr<IResetableSource>>(item); |
||||
source->Reset(); |
||||
has_found_current = |
||||
std::get<std::shared_ptr<IResetableSource>>(item)->AdvanceTo( |
||||
current); |
||||
} |
||||
} else { |
||||
ret.push_back(item); |
||||
} |
||||
} |
||||
|
||||
return ret; |
||||
} |
||||
|
||||
auto Shuffler::RefillBuffer() -> void { |
||||
// Don't waste time iterating if we know there's nothing new.
|
||||
if (out_of_items_) { |
||||
return; |
||||
} |
||||
|
||||
int num_to_sample = kShufflerBufferSize - shuffled_items_buffer_.size(); |
||||
int resovoir_offset = shuffled_items_buffer_.size(); |
||||
|
||||
std::set<database::TrackId> in_buffer; |
||||
for (const database::TrackId& id : shuffled_items_buffer_) { |
||||
in_buffer.insert(id); |
||||
} |
||||
|
||||
uint32_t i = 0; |
||||
auto consider_item = [&, this](const database::TrackId& item) { |
||||
if (already_played_->Contains(item) || in_buffer.contains(item)) { |
||||
return; |
||||
} |
||||
if (i < num_to_sample) { |
||||
shuffled_items_buffer_.push_back(item); |
||||
} else { |
||||
uint32_t index_to_replace = random_->RangeInclusive(0, i); |
||||
if (index_to_replace < num_to_sample) { |
||||
shuffled_items_buffer_[resovoir_offset + index_to_replace] = item; |
||||
} |
||||
} |
||||
i++; |
||||
}; |
||||
|
||||
for (const Item& item : ordered_items_) { |
||||
if (std::holds_alternative<database::TrackId>(item)) { |
||||
std::invoke(consider_item, std::get<database::TrackId>(item)); |
||||
} else { |
||||
auto source = std::get<std::shared_ptr<IResetableSource>>(item); |
||||
source->Reset(); |
||||
while (source->Advance()) { |
||||
std::invoke(consider_item, *source->Current()); |
||||
} |
||||
} |
||||
} |
||||
|
||||
out_of_items_ = i > num_to_sample; |
||||
// We've now got a random *selection*, but the order might be predictable
|
||||
// (e.g. if there were only `num_to_sample` new items). Do a final in-memory
|
||||
// shuffle.
|
||||
std::random_shuffle(shuffled_items_buffer_.begin() + resovoir_offset, |
||||
shuffled_items_buffer_.end()); |
||||
} |
||||
|
||||
} // namespace playlist
|
@ -1,360 +0,0 @@ |
||||
/*
|
||||
* Copyright 2023 jacqueline <me@jacqueline.id.au> |
||||
* |
||||
* SPDX-License-Identifier: GPL-3.0-only |
||||
*/ |
||||
|
||||
#include "source.hpp" |
||||
|
||||
#include <algorithm> |
||||
#include <functional> |
||||
#include <memory> |
||||
#include <set> |
||||
#include <variant> |
||||
|
||||
#include "esp_log.h" |
||||
|
||||
#include "bloom_filter.hpp" |
||||
#include "database.hpp" |
||||
#include "komihash.h" |
||||
#include "random.hpp" |
||||
#include "track.hpp" |
||||
|
||||
namespace playlist { |
||||
|
||||
[[maybe_unused]] static constexpr char kTag[] = "queue_src"; |
||||
|
||||
IteratorSource::IteratorSource(const database::Iterator& it) |
||||
: start_(it), current_(), next_() { |
||||
Reset(); |
||||
Advance(); |
||||
} |
||||
|
||||
auto IteratorSource::Current() -> std::optional<database::TrackId> { |
||||
return current_; |
||||
} |
||||
|
||||
auto IteratorSource::Advance() -> std::optional<database::TrackId> { |
||||
ESP_LOGI(kTag, "advancing"); |
||||
while (!next_.empty()) { |
||||
auto next = next_.top().NextSync(); |
||||
if (!next) { |
||||
ESP_LOGI(kTag, "top was empty"); |
||||
next_.pop(); |
||||
continue; |
||||
} |
||||
if (next->track()) { |
||||
ESP_LOGI(kTag, "top held track %lu", next->track().value_or(0)); |
||||
current_ = next->track(); |
||||
return current_; |
||||
} |
||||
ESP_LOGI(kTag, "top held records"); |
||||
next_.push(database::Iterator(start_.database(), next->Expand(1).value())); |
||||
} |
||||
ESP_LOGI(kTag, "exhausted"); |
||||
return {}; |
||||
} |
||||
|
||||
auto IteratorSource::Peek(std::size_t n, std::vector<database::TrackId>*) |
||||
-> std::size_t { |
||||
return 0; |
||||
} |
||||
|
||||
auto IteratorSource::Previous() -> std::optional<database::TrackId> { |
||||
return {}; |
||||
} |
||||
|
||||
auto IteratorSource::Reset() -> void { |
||||
while (!next_.empty()) { |
||||
next_.pop(); |
||||
} |
||||
next_.push(start_); |
||||
} |
||||
|
||||
auto CreateSourceFromResults( |
||||
std::weak_ptr<database::Database> db, |
||||
std::shared_ptr<database::Result<database::IndexRecord>> results) |
||||
-> std::shared_ptr<IResetableSource> { |
||||
if (results->values()[0]->track()) { |
||||
return std::make_shared<IndexRecordSource>(db, results); |
||||
} else { |
||||
return std::make_shared<NestedSource>(db, results); |
||||
} |
||||
} |
||||
|
||||
IndexRecordSource::IndexRecordSource( |
||||
std::weak_ptr<database::Database> db, |
||||
std::shared_ptr<database::Result<database::IndexRecord>> initial) |
||||
: db_(db), |
||||
initial_page_(initial), |
||||
initial_item_(0), |
||||
current_page_(initial_page_), |
||||
current_item_(initial_item_) {} |
||||
|
||||
IndexRecordSource::IndexRecordSource( |
||||
std::weak_ptr<database::Database> db, |
||||
std::shared_ptr<database::Result<database::IndexRecord>> initial, |
||||
std::size_t initial_index, |
||||
std::shared_ptr<database::Result<database::IndexRecord>> current, |
||||
std::size_t current_index) |
||||
: db_(db), |
||||
initial_page_(initial), |
||||
initial_item_(initial_index), |
||||
current_page_(current), |
||||
current_item_(current_index) {} |
||||
|
||||
auto IndexRecordSource::Current() -> std::optional<database::TrackId> { |
||||
if (current_page_->values().size() <= current_item_) { |
||||
return {}; |
||||
} |
||||
if (current_page_ == initial_page_ && current_item_ < initial_item_) { |
||||
return {}; |
||||
} |
||||
|
||||
return current_page_->values().at(current_item_)->track(); |
||||
} |
||||
|
||||
auto IndexRecordSource::Advance() -> std::optional<database::TrackId> { |
||||
current_item_++; |
||||
if (current_item_ >= current_page_->values().size()) { |
||||
auto next_page = current_page_->next_page(); |
||||
if (!next_page) { |
||||
current_item_--; |
||||
return {}; |
||||
} |
||||
|
||||
auto db = db_.lock(); |
||||
if (!db) { |
||||
return {}; |
||||
} |
||||
|
||||
current_page_.reset(db->GetPage<database::IndexRecord>(&*next_page).get()); |
||||
current_item_ = 0; |
||||
} |
||||
|
||||
return Current(); |
||||
} |
||||
|
||||
auto IndexRecordSource::Previous() -> std::optional<database::TrackId> { |
||||
if (current_page_ == initial_page_ && current_item_ <= initial_item_) { |
||||
return {}; |
||||
} |
||||
|
||||
current_item_--; |
||||
if (current_item_ < 0) { |
||||
auto prev_page = current_page_->prev_page(); |
||||
if (!prev_page) { |
||||
return {}; |
||||
} |
||||
|
||||
auto db = db_.lock(); |
||||
if (!db) { |
||||
return {}; |
||||
} |
||||
|
||||
current_page_.reset(db->GetPage<database::IndexRecord>(&*prev_page).get()); |
||||
current_item_ = current_page_->values().size() - 1; |
||||
} |
||||
|
||||
return Current(); |
||||
} |
||||
|
||||
auto IndexRecordSource::Peek(std::size_t n, std::vector<database::TrackId>* out) |
||||
-> std::size_t { |
||||
if (current_page_->values().size() <= current_item_) { |
||||
return {}; |
||||
} |
||||
|
||||
auto db = db_.lock(); |
||||
if (!db) { |
||||
return 0; |
||||
} |
||||
|
||||
std::size_t items_added = 0; |
||||
|
||||
std::shared_ptr<database::Result<database::IndexRecord>> working_page = |
||||
current_page_; |
||||
std::size_t working_item = current_item_ + 1; |
||||
|
||||
while (n > 0) { |
||||
if (working_item >= working_page->values().size()) { |
||||
auto next_page = current_page_->next_page(); |
||||
if (!next_page) { |
||||
break; |
||||
} |
||||
// TODO(jacqueline): It would probably be a good idea to hold onto these
|
||||
// peeked pages, to avoid needing to look them up again later.
|
||||
working_page.reset(db->GetPage<database::IndexRecord>(&*next_page).get()); |
||||
working_item = 0; |
||||
} |
||||
|
||||
auto record = working_page->values().at(working_item); |
||||
if (record->track()) { |
||||
out->push_back(record->track().value()); |
||||
n--; |
||||
items_added++; |
||||
} |
||||
working_item++; |
||||
} |
||||
|
||||
return items_added; |
||||
} |
||||
|
||||
auto IndexRecordSource::Reset() -> void { |
||||
current_page_ = initial_page_; |
||||
current_item_ = initial_item_; |
||||
} |
||||
|
||||
NestedSource::NestedSource( |
||||
std::weak_ptr<database::Database> db, |
||||
std::shared_ptr<database::Result<database::IndexRecord>> initial) |
||||
: db_(db), |
||||
initial_page_(initial), |
||||
initial_item_(0), |
||||
current_page_(initial_page_), |
||||
current_item_(initial_item_), |
||||
current_child_(CreateChild(initial->values()[0])) {} |
||||
|
||||
auto NestedSource::Current() -> std::optional<database::TrackId> { |
||||
if (current_child_) { |
||||
return current_child_->Current(); |
||||
} |
||||
return {}; |
||||
} |
||||
|
||||
auto NestedSource::Advance() -> std::optional<database::TrackId> { |
||||
if (!current_child_) { |
||||
return {}; |
||||
} |
||||
|
||||
auto child_next = current_child_->Advance(); |
||||
if (child_next) { |
||||
return child_next; |
||||
} |
||||
// Our current child has run out of tracks. Move on to the next child.
|
||||
current_item_++; |
||||
current_child_.reset(); |
||||
|
||||
if (current_item_ >= current_page_->values().size()) { |
||||
// We're even out of items in this page!
|
||||
auto next_page = current_page_->next_page(); |
||||
if (!next_page) { |
||||
current_item_--; |
||||
return {}; |
||||
} |
||||
|
||||
auto db = db_.lock(); |
||||
if (!db) { |
||||
return {}; |
||||
} |
||||
|
||||
current_page_.reset(db->GetPage<database::IndexRecord>(&*next_page).get()); |
||||
current_item_ = 0; |
||||
} |
||||
current_child_ = CreateChild(current_page_->values()[current_item_]); |
||||
|
||||
return Current(); |
||||
} |
||||
|
||||
auto NestedSource::Previous() -> std::optional<database::TrackId> { |
||||
if (current_page_ == initial_page_ && current_item_ <= initial_item_) { |
||||
return {}; |
||||
} |
||||
|
||||
current_item_--; |
||||
current_child_.reset(); |
||||
|
||||
if (current_item_ < 0) { |
||||
auto prev_page = current_page_->prev_page(); |
||||
if (!prev_page) { |
||||
return {}; |
||||
} |
||||
|
||||
auto db = db_.lock(); |
||||
if (!db) { |
||||
return {}; |
||||
} |
||||
|
||||
current_page_.reset(db->GetPage<database::IndexRecord>(&*prev_page).get()); |
||||
current_item_ = current_page_->values().size() - 1; |
||||
} |
||||
current_child_ = CreateChild(current_page_->values()[current_item_]); |
||||
|
||||
return Current(); |
||||
} |
||||
|
||||
auto NestedSource::Peek(std::size_t n, std::vector<database::TrackId>* out) |
||||
-> std::size_t { |
||||
if (current_page_->values().size() <= current_item_) { |
||||
return {}; |
||||
} |
||||
|
||||
auto db = db_.lock(); |
||||
if (!db) { |
||||
return 0; |
||||
} |
||||
|
||||
std::size_t items_added = 0; |
||||
|
||||
std::shared_ptr<database::Result<database::IndexRecord>> working_page = |
||||
current_page_; |
||||
std::size_t working_item = current_item_; |
||||
std::shared_ptr<IResetableSource> working_child = current_child_; |
||||
|
||||
while (working_child) { |
||||
auto res = working_child->Peek(n, out); |
||||
n -= res; |
||||
items_added += res; |
||||
|
||||
if (n == 0) { |
||||
break; |
||||
} else { |
||||
working_item++; |
||||
if (working_item < working_page->values().size()) { |
||||
working_child = CreateChild(working_page->values()[working_item]); |
||||
} else { |
||||
auto next_page = current_page_->next_page(); |
||||
if (!next_page) { |
||||
break; |
||||
} |
||||
working_page.reset( |
||||
db->GetPage<database::IndexRecord>(&*next_page).get()); |
||||
working_item = 0; |
||||
working_child = CreateChild(working_page->values()[0]); |
||||
} |
||||
} |
||||
} |
||||
|
||||
return items_added; |
||||
} |
||||
|
||||
auto NestedSource::Reset() -> void { |
||||
current_page_ = initial_page_; |
||||
current_item_ = initial_item_; |
||||
current_child_ = CreateChild(initial_page_->values()[initial_item_]); |
||||
} |
||||
|
||||
auto NestedSource::CreateChild(std::shared_ptr<database::IndexRecord> record) |
||||
-> std::shared_ptr<IResetableSource> { |
||||
auto cont = record->Expand(10); |
||||
if (!cont) { |
||||
return {}; |
||||
} |
||||
auto db = db_.lock(); |
||||
if (!db) { |
||||
return {}; |
||||
} |
||||
std::shared_ptr<database::Result<database::IndexRecord>> next_level{ |
||||
db->GetPage<database::IndexRecord>(&*cont).get()}; |
||||
if (!next_level) { |
||||
return {}; |
||||
} |
||||
auto next_level_record = next_level->values()[0]; |
||||
if (next_level_record->track()) { |
||||
return std::make_shared<IndexRecordSource>(db_, next_level); |
||||
} else { |
||||
return std::make_shared<NestedSource>(db_, next_level); |
||||
} |
||||
} |
||||
|
||||
} // namespace playlist
|
Loading…
Reference in new issue