16#ifndef BT_QUANTIZED_BVH_H
17#define BT_QUANTIZED_BVH_H
22#ifdef DEBUG_CHECK_DEQUANTIZATION
24#define printf spu_printf
34#ifdef BT_USE_DOUBLE_PRECISION
35#define btQuantizedBvhData btQuantizedBvhDoubleData
36#define btOptimizedBvhNodeData btOptimizedBvhNodeDoubleData
37#define btQuantizedBvhDataName "btQuantizedBvhDoubleData"
39#define btQuantizedBvhData btQuantizedBvhFloatData
40#define btOptimizedBvhNodeData btOptimizedBvhNodeFloatData
41#define btQuantizedBvhDataName "btQuantizedBvhFloatData"
47#define MAX_SUBTREE_SIZE_IN_BYTES 2048
51#define MAX_NUM_PARTS_IN_BITS 10
172 TRAVERSAL_STACKLESS = 0,
173 TRAVERSAL_STACKLESS_CACHE_FRIENDLY,
259 unsigned short int quantizedAabbMin[3];
260 unsigned short int quantizedAabbMax[3];
261 quantize(quantizedAabbMin, newAabbMin, 0);
262 quantize(quantizedAabbMax, newAabbMax, 1);
263 for (
int i = 0; i < 3; i++)
285 void buildTree(
int startIndex,
int endIndex);
344 out[0] = (
unsigned short)(((
unsigned short)(
v.getX() +
btScalar(1.)) | 1));
345 out[1] = (
unsigned short)(((
unsigned short)(
v.getY() +
btScalar(1.)) | 1));
346 out[2] = (
unsigned short)(((
unsigned short)(
v.getZ() +
btScalar(1.)) | 1));
350 out[0] = (
unsigned short)(((
unsigned short)(
v.getX()) & 0xfffe));
351 out[1] = (
unsigned short)(((
unsigned short)(
v.getY()) & 0xfffe));
352 out[2] = (
unsigned short)(((
unsigned short)(
v.getZ()) & 0xfffe));
355#ifdef DEBUG_CHECK_DEQUANTIZATION
359 if (newPoint.getX() < point.getX())
361 printf(
"unconservative X, diffX = %f, oldX=%f,newX=%f\n", newPoint.getX() - point.getX(), newPoint.getX(), point.getX());
363 if (newPoint.getY() < point.getY())
365 printf(
"unconservative Y, diffY = %f, oldY=%f,newY=%f\n", newPoint.getY() - point.getY(), newPoint.getY(), point.getY());
367 if (newPoint.getZ() < point.getZ())
369 printf(
"unconservative Z, diffZ = %f, oldZ=%f,newZ=%f\n", newPoint.getZ() - point.getZ(), newPoint.getZ(), point.getZ());
374 if (newPoint.getX() > point.getX())
376 printf(
"unconservative X, diffX = %f, oldX=%f,newX=%f\n", newPoint.getX() - point.getX(), newPoint.getX(), point.getX());
378 if (newPoint.getY() > point.getY())
380 printf(
"unconservative Y, diffY = %f, oldY=%f,newY=%f\n", newPoint.getY() - point.getY(), newPoint.getY(), point.getY());
382 if (newPoint.getZ() > point.getZ())
384 printf(
"unconservative Z, diffZ = %f, oldZ=%f,newZ=%f\n", newPoint.getZ() - point.getZ(), newPoint.getZ(), point.getZ());
394 btVector3 clampedPoint(point2);
434 virtual bool serialize(
void* o_alignedDataBuffer,
unsigned i_dataBufferSize,
bool i_swapEndian)
const;
467struct btBvhSubtreeInfoData
471 unsigned short m_quantizedAabbMin[3];
472 unsigned short m_quantizedAabbMax[3];
475struct btOptimizedBvhNodeFloatData
485struct btOptimizedBvhNodeDoubleData
496struct btQuantizedBvhNodeData
498 unsigned short m_quantizedAabbMin[3];
499 unsigned short m_quantizedAabbMax[3];
500 int m_escapeIndexOrTriangleIndex;
503struct btQuantizedBvhFloatData
510 int m_numContiguousLeafNodes;
511 int m_numQuantizedContiguousNodes;
512 btOptimizedBvhNodeFloatData *m_contiguousNodesPtr;
513 btQuantizedBvhNodeData *m_quantizedContiguousNodesPtr;
514 btBvhSubtreeInfoData *m_subTreeInfoPtr;
516 int m_numSubtreeHeaders;
520struct btQuantizedBvhDoubleData
527 int m_numContiguousLeafNodes;
528 int m_numQuantizedContiguousNodes;
529 btOptimizedBvhNodeDoubleData *m_contiguousNodesPtr;
530 btQuantizedBvhNodeData *m_quantizedContiguousNodesPtr;
533 int m_numSubtreeHeaders;
534 btBvhSubtreeInfoData *m_subTreeInfoPtr;
ATTR_WARN_UNUSED_RESULT const BMVert * v
SIMD_FORCE_INLINE const btVector3 & getAabbMin() const
SIMD_FORCE_INLINE const btVector3 & getAabbMax() const
void reportAabbOverlappingNodex(btNodeOverlapCallback *nodeCallback, const btVector3 &aabbMin, const btVector3 &aabbMax) const
***************************************** expert/internal use only *************************
BT_DECLARE_ALIGNED_ALLOCATOR()
SIMD_FORCE_INLINE void quantize(unsigned short *out, const btVector3 &point, int isMax) const
virtual int calculateSerializeBufferSizeNew() const
void setQuantizationValues(const btVector3 &bvhAabbMin, const btVector3 &bvhAabbMax, btScalar quantizationMargin=btScalar(1.0))
***************************************** expert/internal use only *************************
void swapLeafNodes(int firstIndex, int secondIndex)
int m_escapeIndexOrTriangleIndex
SIMD_FORCE_INLINE QuantizedNodeArray & getQuantizedNodeArray()
void walkRecursiveQuantizedTreeAgainstQueryAabb(const btQuantizedBvhNode *currentNode, btNodeOverlapCallback *nodeCallback, unsigned short int *quantizedQueryAabbMin, unsigned short int *quantizedQueryAabbMax) const
use the 16-byte stackless 'skipindex' node tree to do a recursive traversal
void mergeInternalNodeAabb(int nodeIndex, const btVector3 &newAabbMin, const btVector3 &newAabbMax)
void walkRecursiveQuantizedTreeAgainstQuantizedTree(const btQuantizedBvhNode *treeNodeA, const btQuantizedBvhNode *treeNodeB, btNodeOverlapCallback *nodeCallback) const
use the 16-byte stackless 'skipindex' node tree to do a recursive traversal
int getTriangleIndex() const
void updateSubtreeHeaders(int leftChildNodexIndex, int rightChildNodexIndex)
void setInternalNodeEscapeIndex(int nodeIndex, int escapeIndex)
void buildTree(int startIndex, int endIndex)
virtual void deSerializeFloat(struct btQuantizedBvhFloatData &quantizedBvhFloatData)
unsigned short int m_quantizedAabbMin[3]
btTraversalMode m_traversalMode
btAlignedObjectArray< btOptimizedBvhNode > NodeArray
for code readability:
static btQuantizedBvh * deSerializeInPlace(void *i_alignedDataBuffer, unsigned int i_dataBufferSize, bool i_swapEndian)
deSerializeInPlace loads and initializes a BVH from a buffer in memory 'in place'
SIMD_FORCE_INLINE void quantizeWithClamp(unsigned short *out, const btVector3 &point2, int isMax) const
NodeArray m_contiguousNodes
void walkStacklessQuantizedTreeAgainstRay(btNodeOverlapCallback *nodeCallback, const btVector3 &raySource, const btVector3 &rayTarget, const btVector3 &aabbMin, const btVector3 &aabbMax, int startNodeIndex, int endNodeIndex) const
virtual ~btQuantizedBvh()
void setInternalNodeAabbMin(int nodeIndex, const btVector3 &aabbMin)
virtual void deSerializeDouble(struct btQuantizedBvhDoubleData &quantizedBvhDoubleData)
QuantizedNodeArray & getLeafNodeArray()
static unsigned int getAlignmentSerializationPadding()
void reportRayOverlappingNodex(btNodeOverlapCallback *nodeCallback, const btVector3 &raySource, const btVector3 &rayTarget) const
btAlignedObjectArray< btBvhSubtreeInfo > BvhSubtreeInfoArray
void reportBoxCastOverlappingNodex(btNodeOverlapCallback *nodeCallback, const btVector3 &raySource, const btVector3 &rayTarget, const btVector3 &aabbMin, const btVector3 &aabbMax) const
unsigned calculateSerializeBufferSize() const
void walkStacklessQuantizedTreeCacheFriendly(btNodeOverlapCallback *nodeCallback, unsigned short int *quantizedQueryAabbMin, unsigned short int *quantizedQueryAabbMax) const
tree traversal designed for small-memory processors like PS3 SPU
#define MAX_NUM_PARTS_IN_BITS
void walkStacklessTree(btNodeOverlapCallback *nodeCallback, const btVector3 &aabbMin, const btVector3 &aabbMax) const
btAlignedObjectArray< btQuantizedBvhNode > QuantizedNodeArray
btBvhSubtreeInfo
btBvhSubtreeInfo provides info to gather a subtree of limited size
void walkStacklessTreeAgainstRay(btNodeOverlapCallback *nodeCallback, const btVector3 &raySource, const btVector3 &rayTarget, const btVector3 &aabbMin, const btVector3 &aabbMax, int startNodeIndex, int endNodeIndex) const
virtual bool serialize(void *o_alignedDataBuffer, unsigned i_dataBufferSize, bool i_swapEndian) const
Data buffer MUST be 16 byte aligned.
btVector3 m_bvhQuantization
BvhSubtreeInfoArray m_SubtreeHeaders
void setAabbFromQuantizeNode(const btQuantizedBvhNode &quantizedNode)
unsigned short int m_quantizedAabbMax[3]
void walkStacklessQuantizedTree(btNodeOverlapCallback *nodeCallback, unsigned short int *quantizedQueryAabbMin, unsigned short int *quantizedQueryAabbMax, int startNodeIndex, int endNodeIndex) const
void assignInternalNodeFromLeafNode(int internalNode, int leafNodeIndex)
QuantizedNodeArray m_quantizedContiguousNodes
int sortAndCalcSplittingIndex(int startIndex, int endIndex, int splitAxis)
int getEscapeIndex() const
void setInternalNodeAabbMax(int nodeIndex, const btVector3 &aabbMax)
SIMD_FORCE_INLINE BvhSubtreeInfoArray & getSubtreeInfoArray()
SIMD_FORCE_INLINE btVector3 unQuantize(const unsigned short *vecIn) const
int calcSplittingAxis(int startIndex, int endIndex)
SIMD_FORCE_INLINE bool isQuantized()
void buildInternal()
buildInternal is expert use only: assumes that setQuantizationValues and LeafNodeArray are initialize...
QuantizedNodeArray m_quantizedLeafNodes
void setTraversalMode(btTraversalMode traversalMode)
setTraversalMode let's you choose between stackless, recursive or stackless cache friendly tree trave...
#define btQuantizedBvhData
float btScalar
The btScalar type abstracts floating point numbers, to easily switch between double and single floati...
#define ATTRIBUTE_ALIGNED16(a)
#define SIMD_FORCE_INLINE
virtual ~btNodeOverlapCallback()
virtual void processNode(int subPart, int triangleIndex)=0