:github_url: https://github.com/pytorch/pytorch .. _program_listing_file_torch_csrc_api_include_torch_data_dataloader_stateless.h: Program Listing for File stateless.h ==================================== |exhale_lsh| :ref:`Return to documentation for file ` (``torch/csrc/api/include/torch/data/dataloader/stateless.h``) .. |exhale_lsh| unicode:: U+021B0 .. UPWARDS ARROW WITH TIP LEFTWARDS .. code-block:: cpp #pragma once #include #include #include #include #include #include #include namespace torch::data { template class StatelessDataLoader : public DataLoaderBase< Dataset, typename Dataset::BatchType, typename Sampler::BatchRequestType> { public: using super = DataLoaderBase< Dataset, typename Dataset::BatchType, typename Sampler::BatchRequestType>; using typename super::BatchRequestType; StatelessDataLoader( Dataset dataset, Sampler sampler, DataLoaderOptions options) : super(options), sampler_(std::move(sampler)) { for (const auto w : c10::irange(this->options_.workers)) { // Here we copy the dataset into the worker thread closure. Each worker // has its own copy of the dataset. This means the dataset must be // trivially copiable, or else we don't expect more than one worker to // be in use. (void)w; // Suppress unused variable warning this->workers_.emplace_back( [this, dataset]() mutable { this->worker_thread(dataset); }); } if (this->options_.workers == 0) { this->main_thread_dataset_ = std::make_unique(std::move(dataset)); } } private: void reset() override { sampler_.reset(); // Call the base class method last because it calls `prefetch()` super::reset(); } std::optional get_batch_request() override { auto indices = sampler_.next(this->options_.batch_size); if (!indices || (indices->size() < this->options_.batch_size && this->options_.drop_last)) { return std::nullopt; } AT_ASSERT(indices->size() > 0); return indices; } Sampler sampler_; }; } // namespace torch::data