Houdini Engine 6.2
 All Classes Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
PDG Cooking Samples

For documentation on working with PDG through HAPI, see PDG.

PDG Cooking With Events

Below is a code snippet that queries the TOP nodes in a Houdini digital asset, executes a non-blocking PDG cook of a particular TOP node, receives the emitted PDG events in real-time, and acquires the work item results.

#include <HAPI/HAPI.h>
#include <iostream>
#include <string>
#include <vector>
#include <thread>
#include <cassert>
#define ENSURE_SUCCESS( result ) \
if ( (result) != HAPI_RESULT_SUCCESS ) \
{ \
std::cout << "Failure at " << __FILE__ << ": " << __LINE__ << std::endl; \
std::cout << getLastError() << std::endl; \
exit( 1 ); \
}
#define ENSURE_COOK_SUCCESS( result ) \
if ( (result) != HAPI_RESULT_SUCCESS ) \
{ \
std::cout << "Failure at " << __FILE__ << ": " << __LINE__ << std::endl; \
std::cout << getLastCookError() << std::endl; \
exit( 1 ); \
}
static std::string getLastError();
static std::string getLastCookError();
static std::string getString(HAPI_StringHandle stringHandle);
int
main(int argc, char **argv)
{
const char * hda_file = argc == 2 ? argv[1] : "examples/top_sphere_mountain.hda";
HAPI_Session session;
ENSURE_SUCCESS(HAPI_Initialize(&session,
&cook_options,
true,
-1,
nullptr,
nullptr,
nullptr,
nullptr,
nullptr));
// Load the HDA
HAPI_AssetLibraryId asset_lib_id;
ENSURE_SUCCESS(HAPI_LoadAssetLibraryFromFile(&session, hda_file, true, &asset_lib_id));
int asset_count;
ENSURE_SUCCESS(HAPI_GetAvailableAssetCount(&session, asset_lib_id, &asset_count));
ENSURE_SUCCESS(HAPI_GetAvailableAssets(&session, asset_lib_id, &assetsh, asset_count));
std::string asset_name = getString(assetsh);
HAPI_NodeId asset_node_id;
ENSURE_SUCCESS(HAPI_CreateNode(&session, -1, asset_name.c_str(), nullptr, true, &asset_node_id));
// Do a regular node cook
ENSURE_SUCCESS(HAPI_CookNode(&session, asset_node_id, &cook_options));
int cook_status;
HAPI_Result cook_result;
do
{
cook_result = HAPI_GetStatus(&session, HAPI_STATUS_COOK_STATE, &cook_status);
} while (cook_status > HAPI_STATE_MAX_READY_STATE && cook_result == HAPI_RESULT_SUCCESS);
ENSURE_SUCCESS(cook_result);
ENSURE_COOK_SUCCESS(cook_status);
// Get the TOP Network node, which is the only child of the asset node
int network_count = 0;
ENSURE_SUCCESS(HAPI_ComposeChildNodeList(
&session, asset_node_id, HAPI_NODETYPE_TOP,
HAPI_NODEFLAGS_NETWORK, true, &network_count));
assert(network_count == 1);
std::vector<HAPI_NodeId> network_ids(network_count);
&session, asset_node_id, network_ids.data(), network_count));
// Now get the TOP node children of the TOP Network node
HAPI_NodeId top_network_id = network_ids[0];
HAPI_NodeInfo node_info;
ENSURE_SUCCESS(HAPI_GetNodeInfo(&session, top_network_id, &node_info));
std::string name = getString(node_info.nameSH);
assert(name == "topnet1");
// Get all TOP nodes but not schedulers
int child_count = 0;
ENSURE_SUCCESS(HAPI_ComposeChildNodeList(
true, &child_count));
assert(child_count == 2);
std::vector<HAPI_NodeId> child_node_ids(child_count);
&session, top_network_id, child_node_ids.data(), child_count));
HAPI_NodeId geoimport_id = -1;
std::string geoimport_name = "geometryimport1";
// Find ID of the geometry import node. This allows to cook just a particular TOP node, if needed.
for (HAPI_NodeId child_id : child_node_ids)
{
HAPI_NodeInfo child_node_info;
ENSURE_SUCCESS(HAPI_GetNodeInfo(&session, child_id, &child_node_info));
std::string child_name = getString(child_node_info.nameSH);
std::cout << "TOP node name: " << child_name << std::endl;
if (child_name.compare(geoimport_name) == 0)
{
geoimport_id = child_id;
}
}
assert(geoimport_id != -1);
// Do a PDG cook
// Cook the geometry import TOP node, in non blocking
ENSURE_SUCCESS(HAPI_CookPDG(&session, geoimport_id, 0, 0));
int num_contexts = 0;
// While its cooking, check pdg events for each graph context,
// until cook has finished or errored
std::vector<HAPI_PDG_EventInfo> pdg_events;
bool finished = false;
do
{
std::this_thread::sleep_for(std::chrono::milliseconds(100));
// Always query the number of graph contexts each time
ENSURE_SUCCESS(HAPI_GetPDGGraphContextsCount(&session, &num_contexts));
std::vector<HAPI_StringHandle> context_names(num_contexts);
std::vector<int> context_ids(num_contexts);
ENSURE_SUCCESS(HAPI_GetPDGGraphContexts(&session,
context_names.data(), context_ids.data(), 0, num_contexts));
for (int c = 0; c < num_contexts; c++)
{
int cook_context = context_ids[c];
// Check for new events
std::vector<HAPI_PDG_EventInfo> event_infos(32);
int drained = 0, leftOver = 0;
ENSURE_SUCCESS(HAPI_GetPDGEvents(&session, cook_context,
event_infos.data(), 32, &drained,
&leftOver));
// Loop over the acquired events
for (int i = 0; i < drained; i++)
{
switch (event_infos[i].eventType)
{
{
break;
}
{
break;
}
{
break;
}
{
finished = true;
break;
}
{
HAPI_PDG_WorkitemState current_state = (HAPI_PDG_WorkitemState)event_infos[i].currentState;
if (current_state == HAPI_PDG_WORKITEM_COOKED_SUCCESS || current_state == HAPI_PDG_WORKITEM_COOKED_CACHE)
{
HAPI_PDG_WorkitemInfo workitem_info;
ENSURE_SUCCESS(HAPI_GetWorkitemInfo(&session, cook_context, event_infos[i].workitemId, &workitem_info));
if (workitem_info.numResults > 0)
{
// Acquire the result tag and path
HAPI_PDG_WorkitemResultInfo *result_infos = new HAPI_PDG_WorkitemResultInfo[workitem_info.numResults];
ENSURE_SUCCESS(HAPI_GetWorkitemResultInfo(&session, event_infos[i].nodeId,
event_infos[i].workitemId, result_infos, workitem_info.numResults));
std::cout << "Result: Tag=" << getString(result_infos[0].resultTagSH) << "; Path="
<< getString(result_infos[0].resultSH) << std::endl;
// Can now load the result, if tagged as file (e.g. Bgeo files can be loaded via HAPI_LoadGeoFromFile)
delete[] result_infos;
}
}
break;
}
default:
break;
}
}
}
} while (!finished);
ENSURE_SUCCESS(HAPI_Cleanup(&session));
ENSURE_SUCCESS(HAPI_Shutdown(&session));
ENSURE_SUCCESS(HAPI_CloseSession(&session));
return 0;
}
static std::string
getLastError()
{
int bufferLength;
&bufferLength);
char * buffer = new char[bufferLength];
HAPI_GetStatusString(nullptr, HAPI_STATUS_CALL_RESULT, buffer, bufferLength);
std::string result(buffer);
delete[] buffer;
return result;
}
static std::string
getLastCookError()
{
int bufferLength;
&bufferLength);
char * buffer = new char[bufferLength];
HAPI_GetStatusString(nullptr, HAPI_STATUS_COOK_RESULT, buffer, bufferLength);
std::string result(buffer);
delete[] buffer;
return result;
}
static std::string
getString(HAPI_StringHandle stringHandle)
{
if (stringHandle == 0)
{
return "";
}
int bufferLength;
ENSURE_SUCCESS(HAPI_GetStringBufLength(nullptr,
stringHandle,
&bufferLength));
if (bufferLength > 0)
{
char * buffer = new char[bufferLength];
HAPI_GetString(nullptr, stringHandle, buffer, bufferLength);
std::string result(buffer);
delete[] buffer;
return result;
}
else
{
return "";
}
}

PDG Cooking With Results

Below is a code snippet that queries the TOP nodes in a Houdini digital asset, executes a blocking PDG cook of a particular TOP node, then acquires the work item results.

#include <HAPI/HAPI.h>
#include <iostream>
#include <string>
#include <vector>
#include <cassert>
#define ENSURE_SUCCESS( result ) \
if ( (result) != HAPI_RESULT_SUCCESS ) \
{ \
std::cout << "Failure at " << __FILE__ << ": " << __LINE__ << std::endl; \
std::cout << getLastError() << std::endl; \
exit( 1 ); \
}
#define ENSURE_COOK_SUCCESS( result ) \
if ( (result) != HAPI_RESULT_SUCCESS ) \
{ \
std::cout << "Failure at " << __FILE__ << ": " << __LINE__ << std::endl; \
std::cout << getLastCookError() << std::endl; \
exit( 1 ); \
}
static std::string getLastError();
static std::string getLastCookError();
static std::string getString(HAPI_StringHandle stringHandle);
int
main(int argc, char **argv)
{
const char * hda_file = argc == 2 ? argv[1] : "examples/top_sphere_mountain.hda";
HAPI_Session session;
ENSURE_SUCCESS(HAPI_Initialize(&session,
&cook_options,
true,
-1,
nullptr,
nullptr,
nullptr,
nullptr,
nullptr));
// Load the HDA
HAPI_AssetLibraryId asset_lib_id;
ENSURE_SUCCESS(HAPI_LoadAssetLibraryFromFile(&session, hda_file, true, &asset_lib_id));
int asset_count;
ENSURE_SUCCESS(HAPI_GetAvailableAssetCount(&session, asset_lib_id, &asset_count));
ENSURE_SUCCESS(HAPI_GetAvailableAssets(&session, asset_lib_id, &assetsh, asset_count));
std::string asset_name = getString(assetsh);
HAPI_NodeId asset_node_id;
ENSURE_SUCCESS(HAPI_CreateNode(&session, -1, asset_name.c_str(), nullptr, true, &asset_node_id));
// Do a regular node cook
ENSURE_SUCCESS(HAPI_CookNode(&session, asset_node_id, &cook_options));
int cook_status;
HAPI_Result cook_result;
do
{
cook_result = HAPI_GetStatus(&session, HAPI_STATUS_COOK_STATE, &cook_status);
} while (cook_status > HAPI_STATE_MAX_READY_STATE && cook_result == HAPI_RESULT_SUCCESS);
ENSURE_SUCCESS(cook_result);
ENSURE_COOK_SUCCESS(cook_status);
// Get the TOP Network node, which is the only child of the asset node
int network_count = 0;
ENSURE_SUCCESS(HAPI_ComposeChildNodeList(
&session, asset_node_id, HAPI_NODETYPE_TOP,
HAPI_NODEFLAGS_NETWORK, true, &network_count));
assert(network_count == 1);
std::vector<HAPI_NodeId> network_ids(network_count);
&session, asset_node_id, network_ids.data(), network_count));
// Now get the TOP node children of the TOP Network node
HAPI_NodeId top_network_id = network_ids[0];
HAPI_NodeInfo node_info;
ENSURE_SUCCESS(HAPI_GetNodeInfo(&session, top_network_id, &node_info));
std::string name = getString(node_info.nameSH);
assert(name == "topnet1");
// Get all TOP nodes but not schedulers
int child_count = 0;
ENSURE_SUCCESS(HAPI_ComposeChildNodeList(
true, &child_count));
assert(child_count == 2);
std::vector<HAPI_NodeId> child_node_ids(child_count);
&session, top_network_id, child_node_ids.data(), child_count));
HAPI_NodeId geoimport_id = -1;
std::string geoimport_name = "geometryimport1";
// Find ID of the geometry import node. This allows to cook just a particular TOP node, if needed.
for (HAPI_NodeId child_id : child_node_ids)
{
HAPI_NodeInfo child_node_info;
ENSURE_SUCCESS(HAPI_GetNodeInfo(&session, child_id, &child_node_info));
std::string child_name = getString(child_node_info.nameSH);
std::cout << "TOP node name: " << child_name << std::endl;
if (child_name.compare(geoimport_name) == 0)
{
geoimport_id = child_id;
}
}
assert(geoimport_id != -1);
// Do a PDG cook
// Cook the geometry import TOP node, blocking
ENSURE_SUCCESS(HAPI_CookPDG(&session, geoimport_id, 0, 1));
// Query work items after cooking (using new HAPI_GetPDGGraphContextId)
HAPI_PDG_GraphContextId top_context_id = -1;
ENSURE_SUCCESS(HAPI_GetPDGGraphContextId(&session, geoimport_id, &top_context_id));
int num_items = 0;
ENSURE_SUCCESS(HAPI_GetNumWorkitems(&session, geoimport_id, &num_items));
assert(num_items == 5);
HAPI_PDG_WorkitemId *workitem_ids = new HAPI_PDG_WorkitemId[num_items];
ENSURE_SUCCESS(HAPI_GetWorkitems(&session, geoimport_id, workitem_ids, num_items));
for (int i = 0; i < num_items; i++)
{
HAPI_PDG_WorkitemInfo workitem_info;
ENSURE_SUCCESS(HAPI_GetWorkitemInfo(&session, top_context_id, workitem_ids[i], &workitem_info));
HAPI_PDG_WorkitemResultInfo *result_infos = new HAPI_PDG_WorkitemResultInfo[workitem_info.numResults];
ENSURE_SUCCESS(HAPI_GetWorkitemResultInfo(&session, geoimport_id, workitem_ids[i], result_infos, workitem_info.numResults));
std::cout << "Result: Tag=" << getString(result_infos[0].resultTagSH) << "; Path="
<< getString(result_infos[0].resultSH) << std::endl;
delete[] result_infos;
}
delete[] workitem_ids;
ENSURE_SUCCESS(HAPI_Cleanup(&session));
ENSURE_SUCCESS(HAPI_Shutdown(&session));
ENSURE_SUCCESS(HAPI_CloseSession(&session));
return 0;
}
static std::string
getLastError()
{
int bufferLength;
&bufferLength);
char * buffer = new char[bufferLength];
HAPI_GetStatusString(nullptr, HAPI_STATUS_CALL_RESULT, buffer, bufferLength);
std::string result(buffer);
delete[] buffer;
return result;
}
static std::string
getLastCookError()
{
int bufferLength;
&bufferLength);
char * buffer = new char[bufferLength];
HAPI_GetStatusString(nullptr, HAPI_STATUS_COOK_RESULT, buffer, bufferLength);
std::string result(buffer);
delete[] buffer;
return result;
}
static std::string
getString(HAPI_StringHandle stringHandle)
{
if (stringHandle == 0)
{
return "";
}
int bufferLength;
ENSURE_SUCCESS(HAPI_GetStringBufLength(nullptr,
stringHandle,
&bufferLength));
if (bufferLength > 0)
{
char * buffer = new char[bufferLength];
HAPI_GetString(nullptr, stringHandle, buffer, bufferLength);
std::string result(buffer);
delete[] buffer;
return result;
}
else
{
return "";
}
}

Creating TOP Graph

Below is a code snippet that dynamically creates a TOP graph in a Houdini Engine session, creates work items, then sets and gets data from work items.

#include <HAPI/HAPI.h>
#include <iostream>
#include <string>
#include <vector>
#include <cassert>
#define ENSURE_SUCCESS( result ) \
if ( (result) != HAPI_RESULT_SUCCESS ) \
{ \
std::cout << "Failure at " << __FILE__ << ": " << __LINE__ << std::endl; \
std::cout << getLastError() << std::endl; \
exit( 1 ); \
}
#define ENSURE_COOK_SUCCESS( result ) \
if ( (result) != HAPI_RESULT_SUCCESS ) \
{ \
std::cout << "Failure at " << __FILE__ << ": " << __LINE__ << std::endl; \
std::cout << getLastCookError() << std::endl; \
exit( 1 ); \
}
static std::string getLastError();
static std::string getLastCookError();
static std::string getString(HAPI_StringHandle stringHandle);
int
main(int argc, char **argv)
{
HAPI_Session session;
ENSURE_SUCCESS(HAPI_Initialize(&session,
&cook_options,
true,
-1,
nullptr,
nullptr,
nullptr,
nullptr,
nullptr));
// Create a TOP network, then a Generic Generator connected to a Text Ouput TOP node.
// Then create work items, then set and get values.
HAPI_NodeId topnet_id;
ENSURE_SUCCESS(HAPI_CreateNode(&session, -1, "Object/topnet", nullptr, true, &topnet_id));
// Cook it regularly
ENSURE_SUCCESS(HAPI_CookNode(&session, topnet_id, &cook_options));
int cook_status;
HAPI_Result cook_result;
do
{
cook_result = HAPI_GetStatus(&session, HAPI_STATUS_COOK_STATE, &cook_status);
} while (cook_status > HAPI_STATE_MAX_READY_STATE && cook_result == HAPI_RESULT_SUCCESS);
ENSURE_SUCCESS(cook_result);
ENSURE_COOK_SUCCESS(cook_status);
HAPI_NodeId generator_id;
ENSURE_SUCCESS(HAPI_CreateNode(&session, topnet_id, "genericgenerator", nullptr, false, &generator_id));
HAPI_NodeId textoutput_id;
ENSURE_SUCCESS(HAPI_CreateNode(&session, topnet_id, "textoutput", nullptr, false, &textoutput_id));
ENSURE_SUCCESS(HAPI_ConnectNodeInput(&session, textoutput_id, 0, generator_id, 0));
// Setting the display flag is useful when wanting to cook the TOP network, instead of specific TOP node.
ENSURE_SUCCESS(HAPI_SetNodeDisplay(&session, textoutput_id, 1));
ENSURE_SUCCESS(HAPI_CookNode(&session, topnet_id, &cook_options));
do
{
cook_result = HAPI_GetStatus(&session, HAPI_STATUS_COOK_STATE, &cook_status);
} while (cook_status > HAPI_STATE_MAX_READY_STATE && cook_result == HAPI_RESULT_SUCCESS);
ENSURE_SUCCESS(cook_result);
ENSURE_COOK_SUCCESS(cook_status);
ENSURE_SUCCESS(HAPI_CookPDG(&session, textoutput_id, 0, 1));
int num_items = 0;
// By default, generic generator and text output both have 1 work item each.
ENSURE_SUCCESS(HAPI_GetNumWorkitems(&session, textoutput_id, &num_items));
assert(num_items == 1);
HAPI_ParmId parm_id = -1;
// Update the text parm on the text output.
ENSURE_SUCCESS(HAPI_GetParmIdFromName(&session, textoutput_id, "text", &parm_id));
ENSURE_SUCCESS(HAPI_SetParmStringValue(&session, textoutput_id, "Work item index is `@pdg_index`.", parm_id, 0));
// Update the item count on the generic generator so that it generates 3 work items.
ENSURE_SUCCESS(HAPI_SetParmIntValue(&session, generator_id, "itemcount", 0, 3));
// Don't need to dirty when simply changing the parm value, but dirtying here to remove the cached file
// results since the text output has been updated.
ENSURE_SUCCESS(HAPI_DirtyPDGNode(&session, generator_id, true));
// Cooking will generate files with the above text.
ENSURE_SUCCESS(HAPI_CookPDG(&session, textoutput_id, 0, 1));
ENSURE_SUCCESS(HAPI_GetNumWorkitems(&session, textoutput_id, &num_items));
assert(num_items == 3);
// Add a work item explicitly to the generic generator
HAPI_PDG_WorkitemId work_item_id;
ENSURE_SUCCESS(HAPI_CreateWorkitem(&session, generator_id, &work_item_id, "testwork1", num_items));
int val = 99;
float fvals[2] = { 2.f, 3.f };
const char *test_string = "This is a test string!";
// Just for new work item, set a integer, float array, and string values.
ENSURE_SUCCESS(HAPI_SetWorkitemIntData(&session, generator_id, work_item_id, "testInt", &val, 1));
ENSURE_SUCCESS(HAPI_SetWorkitemFloatData(&session, generator_id, work_item_id, "testFloat", fvals, 2));
ENSURE_SUCCESS(HAPI_SetWorkitemStringData(&session, generator_id, work_item_id, "testString", 0, test_string));
ENSURE_SUCCESS(HAPI_CommitWorkitems(&session, generator_id));
ENSURE_SUCCESS(HAPI_GetNumWorkitems(&session, generator_id, &num_items));
assert(num_items == 4);
int datalen = 0;
val = 0;
fvals[0] = fvals[1] = 0;
// Get work item integer value
ENSURE_SUCCESS(HAPI_GetWorkitemDataLength(&session, generator_id, work_item_id, "testInt", &datalen));
assert(datalen == 1);
ENSURE_SUCCESS(HAPI_GetWorkitemIntData(&session, generator_id, work_item_id, "testInt", &val, 1));
assert(val == 99);
// Get work item float value
ENSURE_SUCCESS(HAPI_GetWorkitemFloatData(&session, generator_id, work_item_id, "testFloat", fvals, 2));
assert(fvals[0] == 2.f && fvals[1] == 3.f);
// Get work item string value
ENSURE_SUCCESS(HAPI_GetWorkitemDataLength(&session, generator_id, work_item_id, "testString", &datalen));
assert(datalen == 1);
HAPI_StringHandle str_handle;
ENSURE_SUCCESS(HAPI_GetWorkitemStringData(&session, generator_id, work_item_id, "testString", &str_handle, datalen));
ENSURE_SUCCESS(HAPI_GetStringBufLength(&session, str_handle, &datalen));
assert(datalen == strlen(test_string) + 1);
std::vector<char> stringData(datalen + 1);
ENSURE_SUCCESS(HAPI_GetString(&session, str_handle, stringData.data(), datalen));
assert(strcmp(stringData.data(), test_string) == 0);
ENSURE_SUCCESS(HAPI_Cleanup(&session));
ENSURE_SUCCESS(HAPI_Shutdown(&session));
ENSURE_SUCCESS(HAPI_CloseSession(&session));
return 0;
}
static std::string
getLastError()
{
int bufferLength;
&bufferLength);
char * buffer = new char[bufferLength];
HAPI_GetStatusString(nullptr, HAPI_STATUS_CALL_RESULT, buffer, bufferLength);
std::string result(buffer);
delete[] buffer;
return result;
}
static std::string
getLastCookError()
{
int bufferLength;
&bufferLength);
char * buffer = new char[bufferLength];
HAPI_GetStatusString(nullptr, HAPI_STATUS_COOK_RESULT, buffer, bufferLength);
std::string result(buffer);
delete[] buffer;
return result;
}
static std::string
getString(HAPI_StringHandle stringHandle)
{
if (stringHandle == 0)
{
return "";
}
int bufferLength;
ENSURE_SUCCESS(HAPI_GetStringBufLength(nullptr,
stringHandle,
&bufferLength));
if (bufferLength > 0)
{
char * buffer = new char[bufferLength];
HAPI_GetString(nullptr, stringHandle, buffer, bufferLength);
std::string result(buffer);
delete[] buffer;
return result;
}
else
{
return "";
}
}