Blender V4.5
eevee_camera.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2021 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
8
9#include "BLI_bounds.hh"
10#include "BLI_math_matrix.hh"
11#include "BLI_rect.h"
12
13#include "DRW_render.hh"
14
15#include "DNA_camera_types.h"
16#include "DNA_view3d_types.h"
17
18#include "RE_engine.h"
19#include "RE_pipeline.h"
20#include "render_types.h"
21
22#include "eevee_camera.hh"
23#include "eevee_instance.hh"
24
25namespace blender::eevee {
26
27/* -------------------------------------------------------------------- */
30
31void Camera::init()
32{
33 const Object *camera_eval = inst_.camera_eval_object;
34
35 CameraData &data = data_;
36
37 if (camera_eval && camera_eval->type == OB_CAMERA) {
38 const ::Camera *cam = reinterpret_cast<const ::Camera *>(camera_eval->data);
39 switch (cam->type) {
40 default:
41 case CAM_PERSP:
42 data.type = CAMERA_PERSP;
43 break;
44 case CAM_ORTHO:
45 data.type = CAMERA_ORTHO;
46 break;
47#if 0 /* TODO(fclem): Make fisheye properties inside blender. */
48 case CAM_PANO: {
49 switch (cam->panorama_type) {
50 default:
51 case CAM_PANO_EQUIRECTANGULAR:
53 break;
54 case CAM_PANO_FISHEYE_EQUIDISTANT:
56 break;
57 case CAM_PANO_FISHEYE_EQUISOLID:
59 break;
60 case CAM_PANO_MIRRORBALL:
62 break;
63 }
64 }
65#endif
66 }
67 }
68 else if (inst_.drw_view) {
69 data.type = inst_.drw_view->is_persp() ? CAMERA_PERSP : CAMERA_ORTHO;
70 }
71 else {
72 /* Light-probe baking. */
73 data.type = CAMERA_PERSP;
74 }
75
76 float overscan = 0.0f;
77 if ((inst_.scene->eevee.flag & SCE_EEVEE_OVERSCAN) && (inst_.drw_view || inst_.render)) {
78 overscan = inst_.scene->eevee.overscan / 100.0f;
79 if (inst_.drw_view && (inst_.rv3d->dist == 0.0f || v3d_camera_params_get().lens == 0.0f)) {
80 /* In these cases we need to use the v3d winmat as-is. */
81 overscan = 0.0f;
82 }
83 }
84 overscan_changed_ = assign_if_different(overscan_, overscan);
85 camera_changed_ = assign_if_different(last_camera_object_, inst_.camera_orig_object);
86}
87
88void Camera::sync()
89{
90 const Object *camera_eval = inst_.camera_eval_object;
91
92 CameraData &data = data_;
93
94 int2 display_extent = inst_.film.display_extent_get();
95 int2 film_extent = inst_.film.film_extent_get();
96 int2 film_offset = inst_.film.film_offset_get();
97 /* Over-scan in film pixel. Not the same as `render_overscan_get`. */
98 int film_overscan = Film::overscan_pixels_get(overscan_, film_extent);
99
100 rcti film_rect;
101 BLI_rcti_init(&film_rect,
102 film_offset.x,
103 film_offset.x + film_extent.x,
104 film_offset.y,
105 film_offset.y + film_extent.y);
106
107 Bounds<float2> uv_region = {float2(0.0f), float2(display_extent)};
108 if (inst_.drw_view) {
109 float2 uv_scale = float4(inst_.rv3d->viewcamtexcofac).xy();
110 float2 uv_bias = float4(inst_.rv3d->viewcamtexcofac).zw();
111 /* UV region inside the display extent reference frame. */
112 uv_region.min = (-uv_bias * float2(display_extent)) / uv_scale;
113 uv_region.max = uv_region.min + (float2(display_extent) / uv_scale);
114 }
115
116 data.uv_scale = float2(film_extent + film_overscan * 2) / uv_region.size();
117 data.uv_bias = (float2(film_offset - film_overscan) - uv_region.min) / uv_region.size();
118
119 if (inst_.is_baking()) {
120 /* Any view so that shadows and light culling works during irradiance bake. */
121 draw::View &view = inst_.volume_probes.bake.view_z_;
122 data.viewmat = view.viewmat();
123 data.viewinv = view.viewinv();
124 data.winmat = view.winmat();
125 data.type = CAMERA_ORTHO;
126
127 /* \note Follow camera parameters where distances are positive in front of the camera. */
128 data.clip_near = -view.far_clip();
129 data.clip_far = -view.near_clip();
130 data.fisheye_fov = data.fisheye_lens = -1.0f;
131 data.equirect_bias = float2(0.0f);
132 data.equirect_scale = float2(0.0f);
133 data.uv_scale = float2(1.0f);
134 data.uv_bias = float2(0.0f);
135 }
136 else if (inst_.drw_view) {
137 data.viewmat = inst_.drw_view->viewmat();
138 data.viewinv = inst_.drw_view->viewinv();
139
140 CameraParams params = v3d_camera_params_get();
141
142 if (inst_.rv3d->dist > 0.0f && params.lens > 0.0f) {
143 BKE_camera_params_compute_viewplane(&params, UNPACK2(display_extent), 1.0f, 1.0f);
144
145 BLI_assert(BLI_rctf_size_x(&params.viewplane) > 0.0f);
146 BLI_assert(BLI_rctf_size_y(&params.viewplane) > 0.0f);
147
148 BKE_camera_params_crop_viewplane(&params.viewplane, UNPACK2(display_extent), &film_rect);
149
151 params.clip_start,
152 params.clip_end,
153 params.viewplane,
154 overscan_,
155 data.winmat.ptr());
156 }
157 else {
158 /* Can happen for the case of XR or if `rv3d->dist == 0`.
159 * In this case the produced winmat is degenerate. So just revert to the input matrix. */
160 data.winmat = inst_.drw_view->winmat();
161 if (!camera_eval) {
162 /* Apply the render region, but only for non-camera views. See #153033. */
163 /* FIXME(@pragma37): This is still broken with Camera View + Render Region + Fly/Walk
164 * Navigation. Untangle this whole walk/fly navigation projection matrix mess. */
165 float2 film_center = float2(film_offset) + float2(film_extent) / 2.0f;
166 float2 uv_offset = float2(0.5f) - (film_center / float2(display_extent));
167 data.winmat = math::projection::translate(data.winmat, uv_offset * 2.0f);
168 data.winmat = math::from_scale<float4x4>(float4(1.0f / data.uv_scale, 1.0f, 1.0f)) *
169 data.winmat;
170 }
171 }
172
173 if (isnan(data.winmat.w.x)) {
174 /* Can happen in weird corner case (see #134320).
175 * Simply fall back to something that we can render with. */
176 data.winmat = math::projection::orthographic(0.01f, 0.01f, 0.01f, 0.01f, -1000.0f, +1000.0f);
177 }
178 }
179 else if (inst_.render) {
180 const Render *re = inst_.render->re;
181
182 RE_GetCameraWindow(inst_.render->re, camera_eval, data.winmat.ptr());
183
184 RE_GetCameraModelMatrix(re, camera_eval, data.viewinv.ptr());
185 data.viewmat = math::invert(data.viewinv);
186
187 rctf viewplane = re->viewplane;
188 BKE_camera_params_crop_viewplane(&viewplane, UNPACK2(display_extent), &film_rect);
189
191 re->clip_start,
192 re->clip_end,
193 viewplane,
194 overscan_,
195 data.winmat.ptr());
196 }
197 else {
198 data.viewmat = float4x4::identity();
199 data.viewinv = float4x4::identity();
200 data.winmat = math::projection::perspective(-0.1f, 0.1f, -0.1f, 0.1f, 0.1f, 1.0f);
201 }
202
203 data.wininv = math::invert(data.winmat);
204 data.persmat = data.winmat * data.viewmat;
205 data.persinv = math::invert(data.persmat);
206
207 is_camera_object_ = false;
208 if (camera_eval && camera_eval->type == OB_CAMERA) {
209 const ::Camera *cam = reinterpret_cast<const ::Camera *>(camera_eval->data);
210 data.clip_near = cam->clip_start;
211 data.clip_far = cam->clip_end;
212#if 0 /* TODO(fclem): Make fisheye properties inside blender. */
213 data.fisheye_fov = cam->fisheye_fov;
214 data.fisheye_lens = cam->fisheye_lens;
215 data.equirect_bias.x = -cam->longitude_min + M_PI_2;
216 data.equirect_bias.y = -cam->latitude_min + M_PI_2;
217 data.equirect_scale.x = cam->longitude_min - cam->longitude_max;
218 data.equirect_scale.y = cam->latitude_min - cam->latitude_max;
219 /* Combine with uv_scale/bias to avoid doing extra computation. */
220 data.equirect_bias += data.uv_bias * data.equirect_scale;
221 data.equirect_scale *= data.uv_scale;
222
223 data.equirect_scale_inv = 1.0f / data.equirect_scale;
224#else
225 data.fisheye_fov = data.fisheye_lens = -1.0f;
226 data.equirect_bias = float2(0.0f);
227 data.equirect_scale = float2(0.0f);
228#endif
229 is_camera_object_ = true;
230 }
231 else if (inst_.drw_view) {
232 /* \note Follow camera parameters where distances are positive in front of the camera. */
233 data.clip_near = -inst_.drw_view->near_clip();
234 data.clip_far = -inst_.drw_view->far_clip();
235 data.fisheye_fov = data.fisheye_lens = -1.0f;
236 data.equirect_bias = float2(0.0f);
237 data.equirect_scale = float2(0.0f);
238 }
239
240 data_.initialized = true;
241
242 update_bounds();
243}
244
245void Camera::update_bounds()
246{
247 float left, right, bottom, top, near, far;
248 projmat_dimensions(data_.winmat.ptr(), &left, &right, &bottom, &top, &near, &far);
249
250 BoundBox bbox;
251 bbox.vec[0][2] = bbox.vec[3][2] = bbox.vec[7][2] = bbox.vec[4][2] = -near;
252 bbox.vec[0][0] = bbox.vec[3][0] = left;
253 bbox.vec[4][0] = bbox.vec[7][0] = right;
254 bbox.vec[0][1] = bbox.vec[4][1] = bottom;
255 bbox.vec[7][1] = bbox.vec[3][1] = top;
256
257 /* Get the coordinates of the far plane. */
258 if (!this->is_orthographic()) {
259 float sca_far = far / near;
260 left *= sca_far;
261 right *= sca_far;
262 bottom *= sca_far;
263 top *= sca_far;
264 }
265
266 bbox.vec[1][2] = bbox.vec[2][2] = bbox.vec[6][2] = bbox.vec[5][2] = -far;
267 bbox.vec[1][0] = bbox.vec[2][0] = left;
268 bbox.vec[6][0] = bbox.vec[5][0] = right;
269 bbox.vec[1][1] = bbox.vec[5][1] = bottom;
270 bbox.vec[2][1] = bbox.vec[6][1] = top;
271
272 bound_sphere.center = {0.0f, 0.0f, 0.0f};
273 bound_sphere.radius = 0.0f;
274
275 for (auto i : IndexRange(8)) {
276 bound_sphere.center += float3(bbox.vec[i]);
277 }
278 bound_sphere.center /= 8.0f;
279 for (auto i : IndexRange(8)) {
280 float dist_sqr = math::distance_squared(bound_sphere.center, float3(bbox.vec[i]));
281 bound_sphere.radius = max_ff(bound_sphere.radius, dist_sqr);
282 }
283 bound_sphere.radius = sqrtf(bound_sphere.radius);
284
285 /* Transform into world space. */
286 bound_sphere.center = math::transform_point(data_.viewinv, bound_sphere.center);
287
288 /* Compute diagonal length. */
289 float2 p0 = float2(bbox.vec[0]) / (this->is_perspective() ? bbox.vec[0][2] : 1.0f);
290 float2 p1 = float2(bbox.vec[7]) / (this->is_perspective() ? bbox.vec[7][2] : 1.0f);
292}
293
294CameraParams Camera::v3d_camera_params_get() const
295{
296 BLI_assert(inst_.drw_view);
297
300
301 if (inst_.rv3d->persp == RV3D_CAMOB && inst_.is_viewport_image_render) {
302 /* We are rendering camera view, no need for pan/zoom params from viewport. */
304 }
305 else {
307 }
308
309 return params;
310}
311
313
314} // namespace blender::eevee
void BKE_camera_params_init(CameraParams *params)
void BKE_camera_params_crop_viewplane(rctf *viewplane, int winx, int winy, const rcti *region)
void BKE_camera_params_from_view3d(CameraParams *params, const struct Depsgraph *depsgraph, const struct View3D *v3d, const struct RegionView3D *rv3d)
void BKE_camera_params_from_object(CameraParams *params, const struct Object *cam_ob)
void BKE_camera_params_compute_viewplane(CameraParams *params, int winx, int winy, float aspx, float aspy)
struct CameraParams CameraParams
#define BLI_assert(a)
Definition BLI_assert.h:46
MINLINE float max_ff(float a, float b)
#define M_PI_2
void projmat_dimensions(const float winmat[4][4], float *r_left, float *r_right, float *r_bottom, float *r_top, float *r_near, float *r_far)
void BLI_rcti_init(struct rcti *rect, int xmin, int xmax, int ymin, int ymax)
Definition rct.cc:414
BLI_INLINE float BLI_rctf_size_x(const struct rctf *rct)
Definition BLI_rect.h:202
BLI_INLINE float BLI_rctf_size_y(const struct rctf *rct)
Definition BLI_rect.h:206
#define UNPACK2(a)
@ CAM_PERSP
@ CAM_PANO
@ CAM_ORTHO
@ OB_CAMERA
@ SCE_EEVEE_OVERSCAN
@ RV3D_CAMOB
static AppView * view
BMesh const char void * data
bool is_orthographic() const
Camera(Instance &inst, CameraData &data)
bool is_perspective() const
static int overscan_pixels_get(float overscan, int2 extent)
const RegionView3D * rv3d
#define sqrtf(x)
uint top
#define isnan
void RE_GetCameraModelMatrix(const Render *re, const Object *camera, float r_modelmat[4][4])
void RE_GetCameraWindow(Render *re, const Object *camera, float r_winmat[4][4])
void RE_GetWindowMatrixWithOverscan(bool is_ortho, float clip_start, float clip_end, rctf viewplane, float overscan, float r_winmat[4][4])
uiWidgetBaseParameters params[MAX_WIDGET_BASE_BATCH]
static int left
MatBase< T, 4, 4 > orthographic(T left, T right, T bottom, T top, T near_clip, T far_clip)
Create an orthographic projection matrix using OpenGL coordinate convention: Maps each axis range to ...
MatBase< T, 4, 4 > translate(const MatBase< T, 4, 4 > &mat, const VecBase< T, 2 > &offset)
Translate a projection matrix after creation in the screen plane. Usually used for anti-aliasing jitt...
MatBase< T, 4, 4 > perspective(T left, T right, T bottom, T top, T near_clip, T far_clip)
Create a perspective projection matrix using OpenGL coordinate convention: Maps each axis range to [-...
T distance(const T &a, const T &b)
CartesianBasis invert(const CartesianBasis &basis)
MatT from_scale(const VecBase< typename MatT::base_type, ScaleDim > &scale)
T distance_squared(const VecBase< T, Size > &a, const VecBase< T, Size > &b)
VecBase< T, 3 > transform_point(const CartesianBasis &basis, const VecBase< T, 3 > &v)
bool assign_if_different(T &old_value, T new_value)
VecBase< float, 4 > float4
VecBase< int32_t, 2 > int2
VecBase< float, 2 > float2
VecBase< float, 3 > float3
float vec[8][3]
float clip_start
float clip_end
rctf viewplane
const c_style_mat & ptr() const
VecBase< T, 2 > zw() const
VecBase< T, 2 > xy() const
i
Definition text_draw.cc:230