Houdini Engine 2.0
 All Classes Files Functions Variables Typedefs Enumerations Enumerator Macros Pages
Asset Inputs Samples

For documentation on the asset inputs APIs, see Asset Inputs.

Marshalling Geometry Into Houdini

For documentation on marshalling geometry into Houdini, see Marshalling Geometry Into Houdini.

Below is a code snippet that marshals in the simplest of geometry - a triangle - then proceeds attach some string attrbutes onto each point of the triangle, and finally dump the resulting scene into a hip file so it can be opened and viewed in Houdini.

#include <HAPI/HAPI.h>
#include <stdlib.h>
#include <iostream>
#include <string>
#include <vector>
#define ENSURE_SUCCESS( result ) \
if ( (result) != HAPI_RESULT_SUCCESS ) \
{ \
std::cout << "failure at " << __FILE__ << ":" << __LINE__ << std::endl; \
std::cout << get_last_error() << std::endl; \
exit( 1 ); \
}
#define ENSURE_COOK_SUCCESS( result ) \
if ( (result) != HAPI_STATE_READY ) \
{ \
std::cout << "failure at " << __FILE__ << ":" << __LINE__ << std::endl; \
std::cout << get_last_cook_error() << std::endl; \
exit( 1 ); \
}
static std::string get_last_error();
static std::string get_last_cook_error();
static void wait_for_cook();
int
main( int argc, char ** argv )
{
ENSURE_SUCCESS( HAPI_Initialize(
nullptr, // session
&cook_options,
true, // use_cooking_thread
-1, // cooking_thread_stack_size
nullptr, // otl_search_path
nullptr, // dso_search_path
nullptr, // image_dso_search_path
nullptr // audio_dso_search_path
) );
HAPI_AssetId geoCreatorId;
ENSURE_SUCCESS( HAPI_CreateInputAsset( nullptr, &geoCreatorId, NULL ) );
ENSURE_SUCCESS( HAPI_CookAsset( nullptr, geoCreatorId, NULL ) );
wait_for_cook();
newPart.vertexCount = 3;
newPart.pointCount = 3;
newPart.faceCount = 1;
ENSURE_SUCCESS( HAPI_SetPartInfo(
nullptr, geoCreatorId, 0, 0, &newPart ) );
pointInfo.count = 3; // 3 points
pointInfo.tupleSize = 3; // 3 floats per point (x, y, z)
pointInfo.exists = true;
ENSURE_SUCCESS( HAPI_AddAttribute(
nullptr, geoCreatorId, 0, 0, "P", &pointInfo ) );
float positions[ 9 ] =
{ 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f };
ENSURE_SUCCESS( HAPI_SetAttributeFloatData(
nullptr, geoCreatorId, 0, 0, "P", &pointInfo, positions, 0, 3 ) );
int vertices[ 3 ] = { 0, 1, 2 };
ENSURE_SUCCESS( HAPI_SetVertexList(
nullptr, geoCreatorId, 0, 0, vertices, 0, 3 ) );
int face_counts[ 1 ] = { 3 }; // 3 edges for the first face (the only face)
ENSURE_SUCCESS( HAPI_SetFaceCounts(
nullptr, geoCreatorId, 0, 0, face_counts, 0, 1 ) );
char ** strs = new char *[ 3 ];
strs[ 0 ] = _strdup( "str1" );
strs[ 1 ] = _strdup( "str2" );
strs[ 2 ] = _strdup( "str3" );
pointInfo.count = 3; // 3 points
pointInfo.tupleSize = 1;
pointInfo.exists = true;
ENSURE_SUCCESS( HAPI_AddAttribute(
nullptr, geoCreatorId, 0, 0, "strData", &pointInfo ) );
HAPI_AttributeInfo attributeInfo;
attributeInfo.exists = true;
attributeInfo.owner = HAPI_ATTROWNER_POINT;
attributeInfo.count = 3;
attributeInfo.tupleSize = 1;
ENSURE_SUCCESS( HAPI_SetAttributeStringData(
nullptr, geoCreatorId, 0, 0, "strData", &attributeInfo,
(const char **) strs, 0, 3 ) );
ENSURE_SUCCESS( HAPI_CommitGeo( nullptr, geoCreatorId, 0, 0 ) );
ENSURE_SUCCESS( HAPI_SaveHIPFile( nullptr, "testoutput.hip", false ) );
return 0;
}
static void
wait_for_cook()
{
int status;
do
{
HAPI_GetStatus( nullptr, HAPI_STATUS_COOK_STATE, &status );
}
while ( status > HAPI_STATE_MAX_READY_STATE );
ENSURE_COOK_SUCCESS( status );
}
static std::string
get_last_error()
{
int buffer_length;
nullptr,
char * buf = new char[ buffer_length ];
nullptr, HAPI_STATUS_CALL_RESULT, buf, buffer_length );
std::string result( buf );
delete[] buf;
return result;
}
static std::string
get_last_cook_error()
{
int buffer_length;
nullptr,
char * buf = new char[ buffer_length ];
nullptr, HAPI_STATUS_CALL_RESULT, buf, buffer_length );
std::string result( buf );
delete[] buf;
return result;
}

Marshalling Point Clouds

For documentation on marshalling point clouds into Houdini, see Marshalling Point Clouds.

The following sample showscases marshalling of a point cloud into Houdini Engine:

#include <HAPI/HAPI.h>
#include <stdlib.h>
#include <iostream>
#include <string>
#include <vector>
#define ENSURE_SUCCESS( result ) \
if ( (result) != HAPI_RESULT_SUCCESS ) \
{ \
std::cout << "failure at " << __FILE__ << ":" << __LINE__ << std::endl; \
std::cout << get_last_error() << std::endl; \
exit( 1 ); \
}
#define ENSURE_COOK_SUCCESS( result ) \
if ( (result) != HAPI_STATE_READY ) \
{ \
std::cout << "failure at " << __FILE__ << ":" << __LINE__ << std::endl; \
std::cout << get_last_cook_error() << std::endl; \
exit( 1 ); \
}
static std::string get_last_error();
static std::string get_last_cook_error();
static void wait_for_cook();
int
main( int argc, char ** argv )
{
cook_options.maxVerticesPerPrimitive = 4;
ENSURE_SUCCESS( HAPI_Initialize(
nullptr, // session
&cook_options,
true, // use_cooking_thread
-1, // cooking_thread_stack_size
nullptr, // otl_search_path
nullptr, // dso_search_path
nullptr, // image_dso_search_path
nullptr // audio_dso_search_path
) );
HAPI_AssetId geoCreatorId;
ENSURE_SUCCESS( HAPI_CreateInputAsset( nullptr, &geoCreatorId, nullptr ) );
ENSURE_SUCCESS( HAPI_CookAsset( nullptr, geoCreatorId, nullptr ) );
wait_for_cook();
newPart.vertexCount = 0;
newPart.pointCount = 8;
newPart.faceCount = 0;
ENSURE_SUCCESS( HAPI_SetPartInfo(
nullptr, geoCreatorId, 0, 0, &newPart ) );
pointInfo.count = 8; // 8 points
pointInfo.tupleSize = 3; // 3 floats per point (x, y, z)
pointInfo.exists = true;
ENSURE_SUCCESS( HAPI_AddAttribute(
nullptr, geoCreatorId, 0, 0, "P", &pointInfo ) );
float positions[ 24 ] = {
0.0f, 0.0f, 0.0f,
0.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f,
0.0f, 1.0f, 1.0f,
1.0f, 0.0f, 0.0f,
1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f,
1.0f, 1.0f, 1.0f
};
ENSURE_SUCCESS( HAPI_SetAttributeFloatData(
nullptr, geoCreatorId, 0, 0, "P", &pointInfo, positions, 0, 8 ) );
ENSURE_SUCCESS( HAPI_CommitGeo( nullptr, geoCreatorId, 0, 0 ) );
ENSURE_SUCCESS( HAPI_SaveHIPFile(
nullptr, "C:\\test\\testoutput.hip", false ) );
return 0;
}
static void
wait_for_cook()
{
int status;
do
{
HAPI_GetStatus( nullptr, HAPI_STATUS_COOK_STATE, &status );
}
while ( status > HAPI_STATE_MAX_READY_STATE );
ENSURE_COOK_SUCCESS( status );
}
static std::string
get_last_error()
{
int buffer_length;
&buffer_length );
char * buf = new char[ buffer_length ];
nullptr, HAPI_STATUS_CALL_RESULT, buf, buffer_length );
std::string result( buf );
delete [] buf;
return result;
}
static std::string
get_last_cook_error()
{
int buffer_length;
&buffer_length );
char * buf = new char[ buffer_length ];
nullptr, HAPI_STATUS_CALL_RESULT, buf, buffer_length );
std::string result( buf );
delete [] buf;
return result;
}

Connecting Assets

For documentation on connecting assets, see Connecting Assets.

The sample below marshals a cube into Houdini Engine, then proceeds to connect that cube to the subdivde node in Houdini. Note that the subdivide node is a standard Houdini node, we did not need to first load its definition from file with HAPI_LoadAssetLibraryFromFile(). The result is then dumped to a file so it can be viewed in Houdini:

#include <HAPI/HAPI.h>
#include <stdlib.h>
#include <iostream>
#include <string>
#include <vector>
#define ENSURE_SUCCESS( result ) \
if ( (result) != HAPI_RESULT_SUCCESS ) \
{ \
std::cout << "failure at " << __FILE__ << ":" << __LINE__ << std::endl; \
std::cout << get_last_error() << std::endl; \
exit( 1 ); \
}
#define ENSURE_COOK_SUCCESS( result ) \
if ( (result) != HAPI_STATE_READY ) \
{ \
std::cout << "failure at " << __FILE__ << ":" << __LINE__ << std::endl; \
std::cout << get_last_cook_error() << std::endl; \
exit( 1 ); \
}
static std::string get_last_error();
static std::string get_last_cook_error();
static void wait_for_cook();
int
main( int argc, char ** argv )
{
cook_options.maxVerticesPerPrimitive = 4;
ENSURE_SUCCESS( HAPI_Initialize(
nullptr, // session
&cook_options,
true, // use_cooking_thread
-1, // cooking_thread_stack_size
nullptr, // otl_search_path
nullptr, // dso_search_path
nullptr, // image_dso_search_path
nullptr // audio_dso_search_path
) );
HAPI_AssetId geoCreatorId;
ENSURE_SUCCESS( HAPI_CreateInputAsset( nullptr, &geoCreatorId, NULL ) );
ENSURE_SUCCESS( HAPI_CookAsset( nullptr, geoCreatorId, NULL ) );
wait_for_cook();
newPart.vertexCount = 24;
newPart.pointCount = 8;
newPart.faceCount = 6;
ENSURE_SUCCESS( HAPI_SetPartInfo(
nullptr, geoCreatorId, 0, 0, &newPart ) );
pointInfo.count = 8; // 8 points
pointInfo.tupleSize = 3; // 3 floats per point (x, y, z)
pointInfo.exists = true;
ENSURE_SUCCESS( HAPI_AddAttribute(
nullptr, geoCreatorId, 0, 0, "P", &pointInfo ) );
float positions[ 24 ] = {
0.0f, 0.0f, 0.0f,
0.0f, 0.0f, 1.0f,
0.0f, 1.0f, 0.0f,
0.0f, 1.0f, 1.0f,
1.0f, 0.0f, 0.0f,
1.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f,
1.0f, 1.0f, 1.0f
};
ENSURE_SUCCESS( HAPI_SetAttributeFloatData(
nullptr, geoCreatorId, 0, 0, "P", &pointInfo, positions, 0, 8 ) );
int vertices[ 24 ] = {
0, 2, 6, 4,
2, 3, 7, 6,
2, 0, 1, 3,
1, 5, 7, 3,
5, 4, 6, 7,
0, 4, 5, 1,
};
ENSURE_SUCCESS( HAPI_SetVertexList(
nullptr, geoCreatorId, 0, 0, vertices, 0, 24 ) );
int face_counts[ 6 ] = { 4, 4, 4, 4, 4, 4 }; // 4 verts for each face (quads)
ENSURE_SUCCESS( HAPI_SetFaceCounts(
nullptr, geoCreatorId, 0, 0, face_counts, 0, 6 ) );
ENSURE_SUCCESS( HAPI_CommitGeo( nullptr, geoCreatorId, 0, 0 ) );
HAPI_AssetId subdivideAssetId = -1;
ENSURE_SUCCESS( HAPI_InstantiateAsset(
nullptr, "Sop/subdivide", true, &subdivideAssetId ) );
ENSURE_SUCCESS( HAPI_ConnectAssetGeometry(
nullptr, geoCreatorId, 0, subdivideAssetId, 0 ) );
ENSURE_SUCCESS( HAPI_SaveHIPFile(
nullptr, "C:\\test\\testoutput.hip", false ) );
return 0;
}
static void
wait_for_cook()
{
int status;
do
{
HAPI_GetStatus( nullptr, HAPI_STATUS_COOK_STATE, &status );
}
while ( status > HAPI_STATE_MAX_READY_STATE );
ENSURE_COOK_SUCCESS( status );
}
static std::string
get_last_error()
{
int buffer_length;
nullptr,
char * buf = new char[ buffer_length ];
nullptr, HAPI_STATUS_CALL_RESULT, buf, buffer_length );
std::string result( buf );
delete[] buf;
return result;
}
static std::string
get_last_cook_error()
{
int buffer_length;
nullptr,
char * buf = new char[ buffer_length ];
nullptr, HAPI_STATUS_CALL_RESULT, buf, buffer_length );
std::string result( buf );
delete[] buf;
return result;
}

The result of the hip file is shown below. We see the input asset we created, as well as the subdivide asset. The "GlobalNodes" is something that HAPI creates automatically when a new Houdini Engine session is started:

HAPI_AssetInputs_Result_of_Connecting1.png

Diving into the subdivide asset, we see that a subdivide SOP node was created, but with an object merge node automatically created by HAPI feeding into it, with the path of the object merge being set to the Input node:

HAPI_AssetInputs_Result_of_Connecting2.png

Finally, the result of the connected asset is seen:

HAPI_AssetInputs_Result_of_Connecting3.png