Blender V5.0
usd_reader_camera.cc
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2021 Tangent Animation. All rights reserved.
2 * SPDX-FileCopyrightText: 2023 Blender Authors
3 *
4 * SPDX-License-Identifier: GPL-2.0-or-later
5 *
6 * Adapted from the Blender Alembic importer implementation. */
7
10
11#include "ANIM_action.hh"
12#include "ANIM_animdata.hh"
13
14#include "BLI_math_base.h"
15
16#include "BKE_camera.h"
17#include "BKE_fcurve.hh"
18#include "BKE_object.hh"
19
20#include "DNA_camera_types.h"
21#include "DNA_object_types.h"
22
23#include <pxr/usd/usdGeom/camera.h>
24
25#include <array>
26#include <optional>
27
28namespace blender::io::usd {
29
30namespace {
31
32template<typename T> struct SampleData {
33 float frame;
34 T value;
35};
36
37template<typename T> struct AttributeData {
38 std::optional<T> initial_value = std::nullopt;
39 Vector<SampleData<T>> samples;
40
41 void reset()
42 {
43 initial_value = std::nullopt;
44 samples.clear();
45 }
46};
47
48template<typename T>
49bool read_attribute_values(const pxr::UsdAttribute &attr,
50 const pxr::UsdTimeCode initial_time,
51 AttributeData<T> &data)
52{
53 data.reset(); /* Clear any prior data. */
54
55 T value{};
56 if (attr.Get(&value, initial_time)) {
57 data.initial_value = value;
58 }
59 else {
60 data.initial_value = std::nullopt;
61 }
62
63 if (attr.ValueMightBeTimeVarying()) {
64 std::vector<double> times;
65 attr.GetTimeSamples(&times);
66
67 data.samples.resize(times.size());
68 for (int64_t i = 0; i < times.size(); i++) {
69 data.samples[i].frame = float(times[i]);
70 attr.Get(&data.samples[i].value, times[i]);
71 }
72 }
73
74 return data.initial_value.has_value() || !data.samples.is_empty();
75}
76
77void read_aperture_data(Camera *camera,
78 const pxr::UsdAttribute &usd_horiz_aperture,
79 const pxr::UsdAttribute &usd_vert_aperture,
80 const pxr::UsdAttribute &usd_horiz_offset,
81 const pxr::UsdAttribute &usd_vert_offset,
82 const pxr::UsdTimeCode initial_time,
83 const double tenth_unit_to_millimeters,
84 animrig::Channelbag &channelbag)
85{
86 /* If the Aperture values are changing, that effects the sensor_fit and shift_x|y values as
87 * well. We need to put animation data on all of them. */
88 if (usd_horiz_aperture.ValueMightBeTimeVarying() || usd_vert_aperture.ValueMightBeTimeVarying())
89 {
90 std::vector<double> times;
91 pxr::UsdAttribute::GetUnionedTimeSamples(
92 {usd_horiz_aperture, usd_vert_aperture, usd_horiz_offset, usd_vert_offset}, &times);
93
94 std::array<FCurve *, 5> curves = {
95 create_fcurve(channelbag, {"sensor_width", 0}, times.size()),
96 create_fcurve(channelbag, {"sensor_height", 0}, times.size()),
97 create_fcurve(channelbag, {"sensor_fit", 0}, times.size()),
98 create_fcurve(channelbag, {"shift_x", 0}, times.size()),
99 create_fcurve(channelbag, {"shift_y", 0}, times.size())};
100
101 for (int64_t i = 0; i < times.size(); i++) {
102 const double time = times[i];
103
104 float horiz_aperture, vert_aperture;
105 float shift_x, shift_y;
106 usd_horiz_aperture.Get(&horiz_aperture, time);
107 usd_vert_aperture.Get(&vert_aperture, time);
108 usd_horiz_offset.Get(&shift_x, time);
109 usd_vert_offset.Get(&shift_y, time);
110
111 const float sensor_x = horiz_aperture * tenth_unit_to_millimeters;
112 const float sensor_y = vert_aperture * tenth_unit_to_millimeters;
113 const char sensor_fit = horiz_aperture >= vert_aperture ? CAMERA_SENSOR_FIT_HOR :
115
116 const float sensor_size = sensor_x >= sensor_y ? sensor_x : sensor_y;
117 shift_x = (shift_x * tenth_unit_to_millimeters) / sensor_size;
118 shift_y = (shift_y * tenth_unit_to_millimeters) / sensor_size;
119
120 set_fcurve_sample(curves[0], i, float(time), sensor_x);
121 set_fcurve_sample(curves[1], i, float(time), sensor_y);
122 set_fcurve_sample(curves[2], i, float(time), sensor_fit);
123 set_fcurve_sample(curves[3], i, float(time), shift_x);
124 set_fcurve_sample(curves[4], i, float(time), shift_y);
125 }
126 }
127 else if (usd_horiz_offset.ValueMightBeTimeVarying() || usd_vert_offset.ValueMightBeTimeVarying())
128 {
129 /* Only the shift_x|y values are changing. Load in the initial values for aperture and
130 * sensor_fit and use those when setting the shift_x|y curves. */
131 float horiz_aperture, vert_aperture;
132 usd_horiz_aperture.Get(&horiz_aperture, initial_time);
133 usd_vert_aperture.Get(&vert_aperture, initial_time);
134
135 camera->sensor_x = horiz_aperture * tenth_unit_to_millimeters;
136 camera->sensor_y = vert_aperture * tenth_unit_to_millimeters;
137 camera->sensor_fit = camera->sensor_x >= camera->sensor_y ? CAMERA_SENSOR_FIT_HOR :
139 const float sensor_size = camera->sensor_x >= camera->sensor_y ? camera->sensor_x :
140 camera->sensor_y;
141
142 std::vector<double> times;
143 if (usd_horiz_offset.GetTimeSamples(&times)) {
144 FCurve *fcu = create_fcurve(channelbag, {"shift_x", 0}, times.size());
145 for (int64_t i = 0; i < times.size(); i++) {
146 const double time = times[i];
147 float shift;
148 usd_horiz_offset.Get(&shift, time);
149
150 shift = (shift * tenth_unit_to_millimeters) / sensor_size;
151 set_fcurve_sample(fcu, i, float(time), shift);
152 }
153 }
154
155 if (usd_vert_offset.GetTimeSamples(&times)) {
156 FCurve *fcu = create_fcurve(channelbag, {"shift_y", 0}, times.size());
157 for (int64_t i = 0; i < times.size(); i++) {
158 const double time = times[i];
159 float shift;
160 usd_vert_offset.Get(&shift, time);
161
162 shift = (shift * tenth_unit_to_millimeters) / sensor_size;
163 set_fcurve_sample(fcu, i, float(time), shift);
164 }
165 }
166 }
167 else {
168 /* No animation data. */
169 float horiz_aperture, vert_aperture;
170 float shift_x, shift_y;
171 usd_horiz_aperture.Get(&horiz_aperture, initial_time);
172 usd_vert_aperture.Get(&vert_aperture, initial_time);
173 usd_horiz_offset.Get(&shift_x, initial_time);
174 usd_vert_offset.Get(&shift_y, initial_time);
175
176 camera->sensor_x = horiz_aperture * tenth_unit_to_millimeters;
177 camera->sensor_y = vert_aperture * tenth_unit_to_millimeters;
178 camera->sensor_fit = camera->sensor_x >= camera->sensor_y ? CAMERA_SENSOR_FIT_HOR :
180 const float sensor_size = camera->sensor_x >= camera->sensor_y ? camera->sensor_x :
181 camera->sensor_y;
182 camera->shiftx = (shift_x * tenth_unit_to_millimeters) / sensor_size;
183 camera->shifty = (shift_y * tenth_unit_to_millimeters) / sensor_size;
184 }
185}
186
187} // namespace
188
190{
191 Camera *camera = BKE_camera_add(bmain, name_.c_str());
192
194 object_->data = camera;
195}
196
197void USDCameraReader::read_object_data(Main *bmain, const pxr::UsdTimeCode time)
198{
199 pxr::UsdAttribute usd_focal_length = cam_prim_.GetFocalLengthAttr();
200 pxr::UsdAttribute usd_focus_dist = cam_prim_.GetFocusDistanceAttr();
201 pxr::UsdAttribute usd_fstop = cam_prim_.GetFStopAttr();
202 pxr::UsdAttribute usd_clipping_range = cam_prim_.GetClippingRangeAttr();
203 pxr::UsdAttribute usd_horiz_aperture = cam_prim_.GetHorizontalApertureAttr();
204 pxr::UsdAttribute usd_vert_aperture = cam_prim_.GetVerticalApertureAttr();
205 pxr::UsdAttribute usd_horiz_offset = cam_prim_.GetHorizontalApertureOffsetAttr();
206 pxr::UsdAttribute usd_vert_offset = cam_prim_.GetVerticalApertureOffsetAttr();
207
208 /* If any of the camera attributes are time varying, then prepare the animation data. */
209 const bool is_time_varying = usd_focal_length.ValueMightBeTimeVarying() ||
210 usd_focus_dist.ValueMightBeTimeVarying() ||
211 usd_fstop.ValueMightBeTimeVarying() ||
212 usd_clipping_range.ValueMightBeTimeVarying() ||
213 usd_horiz_aperture.ValueMightBeTimeVarying() ||
214 usd_vert_aperture.ValueMightBeTimeVarying() ||
215 usd_horiz_offset.ValueMightBeTimeVarying() ||
216 usd_vert_offset.ValueMightBeTimeVarying();
217
218 Camera *camera = (Camera *)object_->data;
219
220 bAction *action = nullptr;
221 if (is_time_varying) {
222 action = blender::animrig::id_action_ensure(bmain, &camera->id);
223 }
224
225 animrig::Channelbag empty{};
226 animrig::Channelbag &channelbag = is_time_varying ?
227 animrig::action_channelbag_ensure(*action, camera->id) :
228 empty;
229
230 /*
231 * In USD, some camera properties are in tenths of a world unit.
232 * https://graphics.pixar.com/usd/release/api/class_usd_geom_camera.html#UsdGeom_CameraUnits
233 *
234 * tenth_unit_to_meters = stage_meters_per_unit / 10
235 * tenth_unit_to_millimeters = 1000 * tenth_unit_to_meters
236 * = 100 * stage_meters_per_unit
237 */
238 const double tenth_unit_to_millimeters = 100.0 * settings_->stage_meters_per_unit;
239 auto scale_default = [](std::optional<float> input, double scale, float default_value) {
240 return input.has_value() ? input.value() * scale : default_value;
241 };
242
243 AttributeData<float> data;
244 if (read_attribute_values(usd_focal_length, time, data)) {
245 camera->lens = scale_default(data.initial_value, tenth_unit_to_millimeters, camera->lens);
246
247 if (!data.samples.is_empty()) {
248 FCurve *fcu = create_fcurve(channelbag, {"lens", 0}, data.samples.size());
249 for (int64_t i = 0; i < data.samples.size(); i++) {
250 const SampleData<float> &sample = data.samples[i];
251 set_fcurve_sample(fcu, i, sample.frame, sample.value * tenth_unit_to_millimeters);
252 }
253 }
254 }
255
256 if (read_attribute_values(usd_focus_dist, time, data)) {
257 camera->dof.focus_distance = scale_default(
258 data.initial_value, this->settings_->scene_scale, camera->dof.focus_distance);
259
260 if (!data.samples.is_empty()) {
261 FCurve *fcu = create_fcurve(channelbag, {"dof.focus_distance", 0}, data.samples.size());
262 for (int64_t i = 0; i < data.samples.size(); i++) {
263 const SampleData<float> &sample = data.samples[i];
264 set_fcurve_sample(fcu, i, sample.frame, sample.value * this->settings_->scene_scale);
265 }
266 }
267 }
268
269 /* The FStop controls camera focusing and values of 0.0 should disable DOF.
270 * https://openusd.org/release/api/class_usd_geom_camera.html#a335e1647b730a575e3c0565e91eb8d49
271 */
272 if (read_attribute_values(usd_fstop, time, data)) {
273 camera->dof.aperture_fstop = scale_default(data.initial_value, 1, camera->dof.aperture_fstop);
274 camera->dof.flag |= data.initial_value.value_or(0.0f) != 0.0f ? CAM_DOF_ENABLED : 0;
275
276 if (!data.samples.is_empty()) {
277 FCurve *curve1 = create_fcurve(channelbag, {"dof.aperture_fstop", 0}, data.samples.size());
278 FCurve *curve2 = create_fcurve(channelbag, {"dof.use_dof", 0}, data.samples.size());
279 for (int64_t i = 0; i < data.samples.size(); i++) {
280 const SampleData<float> &sample = data.samples[i];
281 const bool use_dof = sample.value != 0.0f;
282 set_fcurve_sample(curve1, i, sample.frame, sample.value);
283 set_fcurve_sample(curve2, i, sample.frame, use_dof);
284 }
285 }
286 }
287
288 AttributeData<pxr::GfVec2f> clip_data;
289 if (read_attribute_values(usd_clipping_range, time, clip_data)) {
290 auto clamp_clip = [this](pxr::GfVec2f value) {
291 /* Clamp the value for clip-start, matching the range defined in RNA. */
292 return pxr::GfVec2f(max_ff(1e-6f, value[0] * settings_->scene_scale),
293 value[1] * settings_->scene_scale);
294 };
295
296 pxr::GfVec2f clip_range = clip_data.initial_value.has_value() ?
297 clamp_clip(clip_data.initial_value.value()) :
298 pxr::GfVec2f(camera->clip_start, camera->clip_end);
299 camera->clip_start = clip_range[0];
300 camera->clip_end = clip_range[1];
301
302 if (!clip_data.samples.is_empty()) {
303 std::array<FCurve *, 2> curves = {
304 create_fcurve(channelbag, {"clip_start", 0}, clip_data.samples.size()),
305 create_fcurve(channelbag, {"clip_end", 0}, clip_data.samples.size())};
306
307 for (int64_t i = 0; i < clip_data.samples.size(); i++) {
308 const SampleData<pxr::GfVec2f> &sample = clip_data.samples[i];
309 clip_range = clamp_clip(sample.value);
310 set_fcurve_sample(curves[0], i, sample.frame, clip_range[0]);
311 set_fcurve_sample(curves[1], i, sample.frame, clip_range[1]);
312 }
313 }
314 }
315
316 /* Aperture data impacts sensor size, sensor fit, and shift values simultaneously. */
317 read_aperture_data(camera,
318 usd_horiz_aperture,
319 usd_vert_aperture,
320 usd_horiz_offset,
321 usd_vert_offset,
322 time,
323 tenth_unit_to_millimeters,
324 channelbag);
325
326 /* USD Orthographic cameras have very limited support. Support a basic, non-animated, translation
327 * between USD and Blender. */
328 pxr::TfToken projection;
329 cam_prim_.GetProjectionAttr().Get(&projection, time);
330 camera->type = (projection.GetString() == "perspective") ? CAM_PERSP : CAM_ORTHO;
331 if (camera->type == CAM_ORTHO) {
332 float horiz_aperture, vert_aperture;
333 usd_horiz_aperture.Get(&horiz_aperture, time);
334 usd_vert_aperture.Get(&vert_aperture, time);
335 camera->ortho_scale = max_ff(vert_aperture, horiz_aperture);
336 }
337
338 /* Recalculate any animation curve handles. */
339 for (FCurve *fcu : channelbag.fcurves()) {
341 }
342
344}
345
346} // namespace blender::io::usd
Functions and classes to work with Actions.
Functions to work with AnimData.
Camera data-block and utility functions.
struct Camera * BKE_camera_add(struct Main *bmain, const char *name)
void BKE_fcurve_handles_recalc(FCurve *fcu)
General operations, lookup, etc. for blender objects.
Object * BKE_object_add_only_object(Main *bmain, int type, const char *name) ATTR_RETURNS_NONNULL
MINLINE float max_ff(float a, float b)
struct FCurve FCurve
@ CAM_DOF_ENABLED
@ CAM_PERSP
@ CAM_ORTHO
struct Camera Camera
@ CAMERA_SENSOR_FIT_HOR
@ CAMERA_SENSOR_FIT_VERT
Object is a sort of wrapper for general info.
@ OB_CAMERA
BMesh const char void * data
long long int int64_t
void reset()
clear internal cached data and reset random seed
blender::Span< const FCurve * > fcurves() const
void read_object_data(Main *bmain, pxr::UsdTimeCode time) override
void create_object(Main *bmain) override
const ImportSettings * settings_
void read_object_data(Main *bmain, pxr::UsdTimeCode time) override
nullptr float
#define input
#define T
bAction * id_action_ensure(Main *bmain, ID *id)
Definition animdata.cc:195
Channelbag & action_channelbag_ensure(bAction &dna_action, ID &animated_id)
void set_fcurve_sample(FCurve *fcu, int64_t sample_index, const float frame, const float value)
FCurve * create_fcurve(blender::animrig::Channelbag &channelbag, const blender::animrig::FCurveDescriptor &fcurve_descriptor, const int sample_count)
float clip_end
char sensor_fit
float sensor_y
float sensor_x
float clip_start
struct CameraDOFSettings dof
float ortho_scale
i
Definition text_draw.cc:230