197 const int num_repeat_items = node_storage.
items_num;
198 const int num_border_links = body_fn_.indices.inputs.border_links.size();
201 const int iterations = std::max<int>(
204 if (iterations >= 10) {
216 tree_logger->node_warnings.append(
217 *tree_logger->allocator,
218 {repeat_output_bnode_.identifier,
219 {NodeWarningType::Info, N_(
"Inspection index is out of range")}});
225 const int main_inputs_offset = 1;
226 const int body_inputs_offset = 1;
228 lf::Graph &lf_graph = eval_storage.graph;
233 for (
const int i :
inputs_.index_range()) {
237 for (
const int i :
outputs_.index_range()) {
244 for ([[maybe_unused]]
const int i :
IndexRange(iterations)) {
246 lf_body_nodes.
add_new(&lf_node);
251 Array<lf::FunctionNode *> lf_border_link_usage_or_nodes(num_border_links);
252 eval_storage.or_function.emplace(iterations);
253 for (
const int i : IndexRange(num_border_links)) {
254 lf::FunctionNode &lf_node = lf_graph.
add_function(*eval_storage.or_function);
255 lf_border_link_usage_or_nodes[
i] = &lf_node;
258 const bool use_index_values = zone_.input_node()->output_socket(0).is_directly_linked();
260 if (use_index_values) {
261 eval_storage.index_values.reinitialize(iterations);
263 for (
const int i : range) {
264 eval_storage.index_values[
i].set(
i);
270 static const SocketValueVariant static_unused_index{-1};
271 for (
const int iter_i : lf_body_nodes.
index_range()) {
272 lf::FunctionNode &lf_node = *lf_body_nodes[iter_i];
273 const SocketValueVariant *index_value = use_index_values ?
274 &eval_storage.index_values[iter_i] :
275 &static_unused_index;
277 for (
const int i : IndexRange(num_border_links)) {
278 lf_graph.
add_link(*lf_inputs[zone_info_.indices.inputs.border_links[
i]],
279 lf_node.
input(body_fn_.indices.inputs.border_links[
i]));
280 lf_graph.
add_link(lf_node.
output(body_fn_.indices.outputs.border_link_usages[
i]),
281 lf_border_link_usage_or_nodes[
i]->input(iter_i));
285 for (
const auto &item : body_fn_.indices.inputs.reference_sets.items()) {
286 lf_graph.
add_link(*lf_inputs[zone_info_.indices.inputs.reference_sets.lookup(item.key)],
287 lf_node.
input(item.value));
291 static bool static_true =
true;
295 lf::FunctionNode &lf_node = *lf_body_nodes[iter_i];
296 lf::FunctionNode &lf_next_node = *lf_body_nodes[iter_i + 1];
297 for (
const int i : IndexRange(num_repeat_items)) {
299 lf_node.
output(body_fn_.indices.outputs.main[
i]),
300 lf_next_node.
input(body_fn_.indices.inputs.main[
i + body_inputs_offset]));
309 for (
const int i : IndexRange(num_border_links)) {
311 *lf_outputs[zone_info_.indices.outputs.border_link_usages[
i]]);
314 if (iterations > 0) {
317 lf::FunctionNode &lf_first_body_node = *lf_body_nodes[0];
318 for (
const int i : IndexRange(num_repeat_items)) {
320 *lf_inputs[zone_info_.indices.inputs.main[
i + main_inputs_offset]],
321 lf_first_body_node.
input(body_fn_.indices.inputs.main[
i + body_inputs_offset]));
323 lf_first_body_node.
output(
324 body_fn_.indices.outputs.input_usages[
i + body_inputs_offset]),
325 *lf_outputs[zone_info_.indices.outputs.input_usages[
i + main_inputs_offset]]);
330 lf::FunctionNode &lf_last_body_node = *lf_body_nodes.
as_span().last();
331 for (
const int i : IndexRange(num_repeat_items)) {
332 lf_graph.
add_link(lf_last_body_node.
output(body_fn_.indices.outputs.main[
i]),
333 *lf_outputs[zone_info_.indices.outputs.main[
i]]);
334 lf_graph.
add_link(*lf_inputs[zone_info_.indices.inputs.output_usages[
i]],
335 lf_last_body_node.
input(body_fn_.indices.inputs.output_usages[
i]));
341 for (
const int i : IndexRange(num_repeat_items)) {
342 lf_graph.
add_link(*lf_inputs[zone_info_.indices.inputs.main[
i + main_inputs_offset]],
343 *lf_outputs[zone_info_.indices.outputs.main[
i]]);
345 *lf_inputs[zone_info_.indices.inputs.output_usages[
i]],
346 *lf_outputs[zone_info_.indices.outputs.input_usages[
i + main_inputs_offset]]);
348 for (
const int i : IndexRange(num_border_links)) {
349 static bool static_false =
false;
350 lf_outputs[zone_info_.indices.outputs.border_link_usages[
i]]->set_default_value(
355 lf_outputs[zone_info_.indices.outputs.input_usages[0]]->set_default_value(&static_true);
365 eval_storage.output_index_map.reinitialize(
outputs_.size() - 1);
366 eval_storage.input_index_map.resize(
inputs_.size() - 1);
369 Vector<const lf::GraphInputSocket *> lf_graph_inputs = lf_inputs.
as_span().drop_front(1);
371 const int iteration_usage_index = zone_info_.indices.outputs.input_usages[0];
373 eval_storage.output_index_map.as_mutable_span().take_front(iteration_usage_index));
375 eval_storage.output_index_map.as_mutable_span().drop_front(iteration_usage_index),
376 iteration_usage_index + 1);
378 Vector<const lf::GraphOutputSocket *> lf_graph_outputs = lf_outputs.
as_span().take_front(
379 iteration_usage_index);
380 lf_graph_outputs.
extend(lf_outputs.
as_span().drop_front(iteration_usage_index + 1));
382 eval_storage.body_execute_wrapper.emplace();
383 eval_storage.body_execute_wrapper->repeat_output_bnode_ = &repeat_output_bnode_;
384 eval_storage.body_execute_wrapper->lf_body_nodes_ = &lf_body_nodes;
385 eval_storage.side_effect_provider.emplace();
386 eval_storage.side_effect_provider->repeat_output_bnode_ = &repeat_output_bnode_;
387 eval_storage.side_effect_provider->lf_body_nodes_ = lf_body_nodes;
389 eval_storage.graph_executor.emplace(lf_graph,
390 std::move(lf_graph_inputs),
391 std::move(lf_graph_outputs),
393 &*eval_storage.side_effect_provider,
394 &*eval_storage.body_execute_wrapper);
395 eval_storage.graph_executor_storage = eval_storage.graph_executor->init_storage(
396 eval_storage.allocator);
400 if (btree_orig.
runtime->logged_zone_graphs) {
401 std::lock_guard
lock{btree_orig.
runtime->logged_zone_graphs->mutex};
402 btree_orig.
runtime->logged_zone_graphs->graph_by_zone_id.lookup_or_add_cb(
403 repeat_output_bnode_.identifier, [&]() { return lf_graph.to_dot(); });