| adaptive_sampling_converge_filter_count_active(float threshold, bool reset) override | PathTraceWorkGPU | virtual |
| adaptive_sampling_convergence_check_count_active(float threshold, bool reset) | PathTraceWorkGPU | protected |
| alloc_integrator_path_split() | PathTraceWorkGPU | protected |
| alloc_integrator_queue() | PathTraceWorkGPU | protected |
| alloc_integrator_soa() | PathTraceWorkGPU | protected |
| alloc_integrator_sorting() | PathTraceWorkGPU | protected |
| alloc_work_memory() override | PathTraceWorkGPU | virtual |
| buffers_ | PathTraceWork | protected |
| cancel_requested_flag_ | PathTraceWork | protected |
| compact_main_paths(const int num_active_paths) | PathTraceWorkGPU | protected |
| compact_paths(const int num_active_paths, const int max_active_path_index, DeviceKernel terminated_paths_kernel, DeviceKernel compact_paths_kernel, DeviceKernel compact_kernel) | PathTraceWorkGPU | protected |
| compact_shadow_paths() | PathTraceWorkGPU | protected |
| compute_queued_paths(DeviceKernel kernel, DeviceKernel queued_kernel) | PathTraceWorkGPU | protected |
| compute_sorted_queued_paths(DeviceKernel queued_kernel, const int num_paths_limit) | PathTraceWorkGPU | protected |
| copy_from_denoised_render_buffers(const RenderBuffers *render_buffers) | PathTraceWork | |
| copy_from_render_buffers(const RenderBuffers *render_buffers) | PathTraceWork | |
| copy_render_buffers_from_device() override | PathTraceWorkGPU | virtual |
| copy_render_buffers_to_device() override | PathTraceWorkGPU | virtual |
| copy_to_display(PathTraceDisplay *display, PassMode pass_mode, int num_samples) override | PathTraceWorkGPU | virtual |
| copy_to_display_interop(PathTraceDisplay *display, PassMode pass_mode, int num_samples) | PathTraceWorkGPU | protected |
| copy_to_display_naive(PathTraceDisplay *display, PassMode pass_mode, int num_samples) | PathTraceWorkGPU | protected |
| copy_to_render_buffers(RenderBuffers *render_buffers) | PathTraceWork | |
| create(Device *device, Film *film, DeviceScene *device_scene, bool *cancel_requested_flag) | PathTraceWork | static |
| cryptomatte_postproces() override | PathTraceWorkGPU | virtual |
| destroy_gpu_resources(PathTraceDisplay *display) override | PathTraceWorkGPU | virtual |
| device_ | PathTraceWork | protected |
| device_graphics_interop_ | PathTraceWorkGPU | protected |
| device_scene_ | PathTraceWork | protected |
| display_rgba_half_ | PathTraceWorkGPU | protected |
| effective_big_tile_params_ | PathTraceWork | protected |
| effective_buffer_params_ | PathTraceWork | protected |
| effective_full_params_ | PathTraceWork | protected |
| enqueue_adaptive_sampling_filter_x() | PathTraceWorkGPU | protected |
| enqueue_adaptive_sampling_filter_y() | PathTraceWorkGPU | protected |
| enqueue_path_iteration() | PathTraceWorkGPU | protected |
| enqueue_path_iteration(DeviceKernel kernel, const int num_paths_limit=INT_MAX) | PathTraceWorkGPU | protected |
| enqueue_reset() | PathTraceWorkGPU | protected |
| enqueue_work_tiles(bool &finished) | PathTraceWorkGPU | protected |
| enqueue_work_tiles(DeviceKernel kernel, const KernelWorkTile work_tiles[], const int num_work_tiles, const int num_active_paths, const int num_predicted_splits) | PathTraceWorkGPU | protected |
| film_ | PathTraceWork | protected |
| get_device() const | PathTraceWork | inline |
| get_display_destination_template(const PathTraceDisplay *display) const | PathTraceWork | protected |
| get_display_pass_access_info(PassMode pass_mode) const | PathTraceWork | protected |
| get_most_queued_kernel() const | PathTraceWorkGPU | protected |
| get_render_buffers() | PathTraceWork | |
| get_render_tile_film_pixels(const PassAccessor::Destination &destination, PassMode pass_mode, int num_samples) | PathTraceWorkGPU | protected |
| get_render_tile_pixels(const PassAccessor &pass_accessor, const PassAccessor::Destination &destination) | PathTraceWork | |
| has_multiple_works() const | PathTraceWork | |
| has_shadow_catcher() const | PathTraceWorkGPU | protected |
| init_execution() override | PathTraceWorkGPU | virtual |
| integrator_next_main_path_index_ | PathTraceWorkGPU | protected |
| integrator_next_shadow_path_index_ | PathTraceWorkGPU | protected |
| integrator_queue_counter_ | PathTraceWorkGPU | protected |
| integrator_shader_mnee_sort_counter_ | PathTraceWorkGPU | protected |
| integrator_shader_raytrace_sort_counter_ | PathTraceWorkGPU | protected |
| integrator_shader_sort_counter_ | PathTraceWorkGPU | protected |
| integrator_shader_sort_partition_key_offsets_ | PathTraceWorkGPU | protected |
| integrator_shader_sort_prefix_sum_ | PathTraceWorkGPU | protected |
| integrator_state_gpu_ | PathTraceWorkGPU | protected |
| integrator_state_soa_ | PathTraceWorkGPU | protected |
| integrator_state_soa_kernel_features_ | PathTraceWorkGPU | protected |
| integrator_state_soa_volume_stack_size_ | PathTraceWorkGPU | protected |
| interop_use_ | PathTraceWorkGPU | protected |
| interop_use_checked_ | PathTraceWorkGPU | protected |
| is_cancel_requested() const | PathTraceWork | inline |
| kernel_creates_ao_paths(DeviceKernel kernel) | PathTraceWorkGPU | protected |
| kernel_creates_shadow_paths(DeviceKernel kernel) | PathTraceWorkGPU | protected |
| kernel_is_shadow_path(DeviceKernel kernel) | PathTraceWorkGPU | protected |
| kernel_max_active_main_path_index(DeviceKernel kernel) | PathTraceWorkGPU | protected |
| kernel_uses_sorting(DeviceKernel kernel) | PathTraceWorkGPU | protected |
| max_active_main_path_index_ | PathTraceWorkGPU | protected |
| max_num_paths_ | PathTraceWorkGPU | protected |
| min_num_active_main_paths_ | PathTraceWorkGPU | protected |
| num_active_main_paths_paths() | PathTraceWorkGPU | protected |
| num_queued_paths_ | PathTraceWorkGPU | protected |
| num_sort_partitions_ | PathTraceWorkGPU | protected |
| PathTraceWork(Device *device, Film *film, DeviceScene *device_scene, bool *cancel_requested_flag) | PathTraceWork | protected |
| PathTraceWorkGPU(Device *device, Film *film, DeviceScene *device_scene, bool *cancel_requested_flag) | PathTraceWorkGPU | |
| queue_ | PathTraceWorkGPU | protected |
| queued_paths_ | PathTraceWorkGPU | protected |
| render_samples(RenderStatistics &statistics, int start_sample, int samples_num, int sample_offset) override | PathTraceWorkGPU | virtual |
| set_effective_buffer_params(const BufferParams &effective_full_params, const BufferParams &effective_big_tile_params, const BufferParams &effective_buffer_params) | PathTraceWork | |
| set_render_tile_pixels(PassAccessor &pass_accessor, const PassAccessor::Source &source) | PathTraceWork | |
| shadow_catcher_count_possible_splits() | PathTraceWorkGPU | protected |
| should_use_graphics_interop() | PathTraceWorkGPU | protected |
| work_tile_scheduler_ | PathTraceWorkGPU | protected |
| work_tiles_ | PathTraceWorkGPU | protected |
| zero_render_buffers() override | PathTraceWorkGPU | virtual |
| ~PathTraceWork() | PathTraceWork | virtual |