Blender V4.3
eevee_camera.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2021 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
9#include <array>
10
11#include "DRW_render.hh"
12
13#include "BLI_bounds.hh"
14
15#include "DNA_camera_types.h"
16#include "DNA_view3d_types.h"
17
18#include "BKE_camera.h"
19
21
22#include "ED_view3d.hh"
23
24#include "RE_pipeline.h"
25#include "render_types.h"
26
27#include "eevee_camera.hh"
28#include "eevee_instance.hh"
29
30namespace blender::eevee {
31
32/* -------------------------------------------------------------------- */
36void Camera::init()
37{
38 const Object *camera_eval = inst_.camera_eval_object;
39
40 CameraData &data = data_;
41
42 if (camera_eval && camera_eval->type == OB_CAMERA) {
43 const ::Camera *cam = reinterpret_cast<const ::Camera *>(camera_eval->data);
44 switch (cam->type) {
45 default:
46 case CAM_PERSP:
47 data.type = CAMERA_PERSP;
48 break;
49 case CAM_ORTHO:
50 data.type = CAMERA_ORTHO;
51 break;
52#if 0 /* TODO(fclem): Make fisheye properties inside blender. */
53 case CAM_PANO: {
54 switch (cam->panorama_type) {
55 default:
56 case CAM_PANO_EQUIRECTANGULAR:
57 data.type = CAMERA_PANO_EQUIRECT;
58 break;
59 case CAM_PANO_FISHEYE_EQUIDISTANT:
60 data.type = CAMERA_PANO_EQUIDISTANT;
61 break;
62 case CAM_PANO_FISHEYE_EQUISOLID:
63 data.type = CAMERA_PANO_EQUISOLID;
64 break;
65 case CAM_PANO_MIRRORBALL:
66 data.type = CAMERA_PANO_MIRROR;
67 break;
68 }
69 }
70#endif
71 }
72 }
73 else if (inst_.drw_view) {
75 }
76 else {
77 /* Light-probe baking. */
78 data.type = CAMERA_PERSP;
79 }
80
81 float overscan = 0.0f;
82 if ((inst_.scene->eevee.flag & SCE_EEVEE_OVERSCAN) && (inst_.drw_view || inst_.render)) {
83 overscan = inst_.scene->eevee.overscan / 100.0f;
84 }
85 overscan_changed_ = assign_if_different(overscan_, overscan);
86 camera_changed_ = assign_if_different(last_camera_object_, inst_.camera_orig_object);
87}
88
89void Camera::sync()
90{
91 const Object *camera_eval = inst_.camera_eval_object;
92
93 CameraData &data = data_;
94
95 int2 display_extent = inst_.film.display_extent_get();
96 int2 film_extent = inst_.film.film_extent_get();
97 int2 film_offset = inst_.film.film_offset_get();
98 /* Over-scan in film pixel. Not the same as `render_overscan_get`. */
99 int film_overscan = inst_.film.overscan_pixels_get(overscan_, film_extent);
100
101 rcti film_rect;
102 BLI_rcti_init(&film_rect,
103 film_offset.x,
104 film_offset.x + film_extent.x,
105 film_offset.y,
106 film_offset.y + film_extent.y);
107
108 Bounds<float2> uv_region = {float2(0.0f), float2(display_extent)};
109 if (inst_.drw_view) {
110 float2 uv_scale = float4(inst_.rv3d->viewcamtexcofac).xy();
111 float2 uv_bias = float4(inst_.rv3d->viewcamtexcofac).zw();
112 /* UV region inside the display extent reference frame. */
113 uv_region.min = (-uv_bias * float2(display_extent)) / uv_scale;
114 uv_region.max = uv_region.min + (float2(display_extent) / uv_scale);
115 }
116
117 data.uv_scale = float2(film_extent + film_overscan * 2) / uv_region.size();
118 data.uv_bias = (float2(film_offset - film_overscan) - uv_region.min) / uv_region.size();
119
120 if (inst_.is_baking()) {
121 /* Any view so that shadows and light culling works during irradiance bake. */
122 draw::View &view = inst_.volume_probes.bake.view_z_;
123 data.viewmat = view.viewmat();
124 data.viewinv = view.viewinv();
125 data.winmat = view.winmat();
126 data.type = CAMERA_ORTHO;
127
128 /* \note Follow camera parameters where distances are positive in front of the camera. */
129 data.clip_near = -view.far_clip();
130 data.clip_far = -view.near_clip();
131 data.fisheye_fov = data.fisheye_lens = -1.0f;
132 data.equirect_bias = float2(0.0f);
133 data.equirect_scale = float2(0.0f);
134 data.uv_scale = float2(1.0f);
135 data.uv_bias = float2(0.0f);
136 }
137 else if (inst_.drw_view) {
138 DRW_view_viewmat_get(inst_.drw_view, data.viewmat.ptr(), false);
139 DRW_view_viewmat_get(inst_.drw_view, data.viewinv.ptr(), true);
140
143
145 /* We are rendering camera view, no need for pan/zoom params from viewport.*/
147 }
148 else {
150 }
151
152 BKE_camera_params_compute_viewplane(&params, UNPACK2(display_extent), 1.0f, 1.0f);
153
154 BKE_camera_params_crop_viewplane(&params.viewplane, UNPACK2(display_extent), &film_rect);
155
157 params.clip_start,
158 params.clip_end,
159 params.viewplane,
160 overscan_,
161 data.winmat.ptr());
162
163 if (params.lens == 0.0f) {
164 /* Can happen for the case of XR.
165 * In this case the produced winmat is degenerate. So just revert to the input matrix. */
166 DRW_view_winmat_get(inst_.drw_view, data.winmat.ptr(), false);
167 }
168 }
169 else if (inst_.render) {
170 const Render *re = inst_.render->re;
171
172 RE_GetCameraWindow(inst_.render->re, camera_eval, data.winmat.ptr());
173
174 RE_GetCameraModelMatrix(re, camera_eval, data.viewinv.ptr());
175 data.viewmat = math::invert(data.viewinv);
176
177 rctf viewplane = re->viewplane;
178 BKE_camera_params_crop_viewplane(&viewplane, UNPACK2(display_extent), &film_rect);
179
181 re->clip_start,
182 re->clip_end,
183 viewplane,
184 overscan_,
185 data.winmat.ptr());
186 }
187 else {
188 data.viewmat = float4x4::identity();
189 data.viewinv = float4x4::identity();
190 data.winmat = math::projection::perspective(-0.1f, 0.1f, -0.1f, 0.1f, 0.1f, 1.0f);
191 }
192
193 data.wininv = math::invert(data.winmat);
194 data.persmat = data.winmat * data.viewmat;
195 data.persinv = math::invert(data.persmat);
196
197 is_camera_object_ = false;
198 if (camera_eval && camera_eval->type == OB_CAMERA) {
199 const ::Camera *cam = reinterpret_cast<const ::Camera *>(camera_eval->data);
200 data.clip_near = cam->clip_start;
201 data.clip_far = cam->clip_end;
202#if 0 /* TODO(fclem): Make fisheye properties inside blender. */
203 data.fisheye_fov = cam->fisheye_fov;
204 data.fisheye_lens = cam->fisheye_lens;
205 data.equirect_bias.x = -cam->longitude_min + M_PI_2;
206 data.equirect_bias.y = -cam->latitude_min + M_PI_2;
207 data.equirect_scale.x = cam->longitude_min - cam->longitude_max;
208 data.equirect_scale.y = cam->latitude_min - cam->latitude_max;
209 /* Combine with uv_scale/bias to avoid doing extra computation. */
210 data.equirect_bias += data.uv_bias * data.equirect_scale;
211 data.equirect_scale *= data.uv_scale;
212
213 data.equirect_scale_inv = 1.0f / data.equirect_scale;
214#else
215 data.fisheye_fov = data.fisheye_lens = -1.0f;
216 data.equirect_bias = float2(0.0f);
217 data.equirect_scale = float2(0.0f);
218#endif
219 is_camera_object_ = true;
220 }
221 else if (inst_.drw_view) {
222 /* \note Follow camera parameters where distances are positive in front of the camera. */
223 data.clip_near = -DRW_view_near_distance_get(inst_.drw_view);
224 data.clip_far = -DRW_view_far_distance_get(inst_.drw_view);
225 data.fisheye_fov = data.fisheye_lens = -1.0f;
226 data.equirect_bias = float2(0.0f);
227 data.equirect_scale = float2(0.0f);
228 }
229
230 data_.initialized = true;
231
232 update_bounds();
233}
234
235void Camera::update_bounds()
236{
237 float left, right, bottom, top, near, far;
238 projmat_dimensions(data_.winmat.ptr(), &left, &right, &bottom, &top, &near, &far);
239
240 BoundBox bbox;
241 bbox.vec[0][2] = bbox.vec[3][2] = bbox.vec[7][2] = bbox.vec[4][2] = -near;
242 bbox.vec[0][0] = bbox.vec[3][0] = left;
243 bbox.vec[4][0] = bbox.vec[7][0] = right;
244 bbox.vec[0][1] = bbox.vec[4][1] = bottom;
245 bbox.vec[7][1] = bbox.vec[3][1] = top;
246
247 /* Get the coordinates of the far plane. */
248 if (!this->is_orthographic()) {
249 float sca_far = far / near;
250 left *= sca_far;
251 right *= sca_far;
252 bottom *= sca_far;
253 top *= sca_far;
254 }
255
256 bbox.vec[1][2] = bbox.vec[2][2] = bbox.vec[6][2] = bbox.vec[5][2] = -far;
257 bbox.vec[1][0] = bbox.vec[2][0] = left;
258 bbox.vec[6][0] = bbox.vec[5][0] = right;
259 bbox.vec[1][1] = bbox.vec[5][1] = bottom;
260 bbox.vec[2][1] = bbox.vec[6][1] = top;
261
262 bound_sphere.center = {0.0f, 0.0f, 0.0f};
263 bound_sphere.radius = 0.0f;
264
265 for (auto i : IndexRange(8)) {
266 bound_sphere.center += float3(bbox.vec[i]);
267 }
268 bound_sphere.center /= 8.0f;
269 for (auto i : IndexRange(8)) {
270 float dist_sqr = math::distance_squared(bound_sphere.center, float3(bbox.vec[i]));
271 bound_sphere.radius = max_ff(bound_sphere.radius, dist_sqr);
272 }
273 bound_sphere.radius = sqrtf(bound_sphere.radius);
274
275 /* Transform into world space. */
276 bound_sphere.center = math::transform_point(data_.viewinv, bound_sphere.center);
277
278 /* Compute diagonal length. */
279 float2 p0 = float2(bbox.vec[0]) / (this->is_perspective() ? bbox.vec[0][2] : 1.0f);
280 float2 p1 = float2(bbox.vec[7]) / (this->is_perspective() ? bbox.vec[7][2] : 1.0f);
282}
283
286} // namespace blender::eevee
Camera data-block and utility functions.
void BKE_camera_params_init(CameraParams *params)
void BKE_camera_params_crop_viewplane(rctf *viewplane, int winx, int winy, const rcti *region)
void BKE_camera_params_from_view3d(CameraParams *params, const struct Depsgraph *depsgraph, const struct View3D *v3d, const struct RegionView3D *rv3d)
void BKE_camera_params_from_object(CameraParams *params, const struct Object *cam_ob)
void BKE_camera_params_compute_viewplane(CameraParams *params, int winx, int winy, float aspx, float aspy)
MINLINE float max_ff(float a, float b)
#define M_PI_2
void projmat_dimensions(const float winmat[4][4], float *r_left, float *r_right, float *r_bottom, float *r_top, float *r_near, float *r_far)
void BLI_rcti_init(struct rcti *rect, int xmin, int xmax, int ymin, int ymax)
Definition rct.c:418
#define UNPACK2(a)
@ CAM_PERSP
@ CAM_PANO
@ CAM_ORTHO
@ OB_CAMERA
@ SCE_EEVEE_OVERSCAN
@ RV3D_CAMOB
const float4x4 & viewmat(int view_id=0) const
Definition draw_view.hh:133
bool is_orthographic() const
bool is_perspective() const
int2 film_offset_get() const
int2 display_extent_get() const
int2 film_extent_get() const
static int overscan_pixels_get(float overscan, int2 extent)
VolumeProbeModule volume_probes
const RegionView3D * rv3d
#define sqrtf(x)
bool DRW_state_is_viewport_image_render()
float DRW_view_near_distance_get(const DRWView *view)
bool DRW_view_is_persp_get(const DRWView *view)
void DRW_view_winmat_get(const DRWView *view, float mat[4][4], bool inverse)
float DRW_view_far_distance_get(const DRWView *view)
void DRW_view_viewmat_get(const DRWView *view, float mat[4][4], bool inverse)
uint top
void RE_GetCameraModelMatrix(const Render *re, const Object *camera, float r_modelmat[4][4])
void RE_GetCameraWindow(Render *re, const Object *camera, float r_winmat[4][4])
void RE_GetWindowMatrixWithOverscan(bool is_ortho, float clip_start, float clip_end, rctf viewplane, float overscan, float r_winmat[4][4])
uiWidgetBaseParameters params[MAX_WIDGET_BASE_BATCH]
static int left
MatBase< T, 4, 4 > perspective(T left, T right, T bottom, T top, T near_clip, T far_clip)
Create a perspective projection matrix using OpenGL coordinate convention: Maps each axis range to [-...
T distance(const T &a, const T &b)
CartesianBasis invert(const CartesianBasis &basis)
T distance_squared(const VecBase< T, Size > &a, const VecBase< T, Size > &b)
VecBase< T, 3 > transform_point(const CartesianBasis &basis, const VecBase< T, 3 > &v)
bool assign_if_different(T &old_value, T new_value)
VecBase< float, 4 > float4
VecBase< float, 2 > float2
VecBase< float, 3 > float3
float vec[8][3]
float viewcamtexcofac[4]
struct Render * re
Definition RE_engine.h:141
float clip_start
float clip_end
rctf viewplane
struct SceneEEVEE eevee
const c_style_mat & ptr() const
VecBase< T, 2 > zw() const
VecBase< T, 2 > xy() const