#include "StdAfx.h" #include "MA_Voxel_Coding.h" // -------------------------------------------------------------------------------------------- // performAnalysis // // Description - Generates a final graph representing the medial axis // -> MA_cube needs to contain the medial axis (0 = axis, 1 = other) // -> SS_cube and MARK_cube will be changed // -------------------------------------------------------------------------------------------- ma_status MA_VoxelCoding::performAnalysis( char *output_prefix ) { progress_win_ptr->SetWindowText("Creating network graph for analysis..."); // Create graphs (each pixel will be a node) generateNetworkGraph( ); if ( number_of_graphs == 0 ) { write_to_log("MA_VoxelCoding::performAnalysis - 0 graphs found in sample"); return MA_NO_MEDIAL_AXIS_FOUND; } // Simplify graph to only include nodes with edge count != 2. Another words remove // all nodes that are not end points or branches. progress_win_ptr->SetWindowText("Simplifying network graph for analysis..."); write_to_log("MA_VoxelCoding::performAnalysis - Found %d graphs, now removing all 2 edge nodes.", number_of_graphs); mergeAllTwoDegreeNodes( ); // We now have the final Medial Axis in optimal Graph form, perform analysis. unsigned short bucket_size=5; unsigned int i = 0; fstream fout; char output_name[512]; // ------------------------------------------------------------------------- // Write network graph info files if requested if ( run_options & MA_SAVE_MA_GRAPH ) { MARK_cubes[TRAVELED]->clear(); // Mark final reduced graph write_to_log("MA_VoxelCoding::performAnalysis - Marking final reduced graph."); for ( i = 0; i < number_of_graphs; i++ ) { medial_axis_graphs_array[i].drawGraphInCube( MARK_cubes[TRAVELED], true ); } sprintf( output_name, "%s_Network_Graph", output_prefix ); MARK_cubes[TRAVELED]->writeToFile( output_name, true ); } if ( run_options & MA_SAVE_NETWORK_TXT ) { MARK_cubes[TRAVELED]->clear(); sprintf( output_name, "%s_Network_Graph.txt", output_prefix ); fout.open( output_name, ios::out ); if( !fout.is_open() ) { write_to_log("MA_VoxelCoding::performAnalysis - Could not open graph file '%s'.", output_name); } else { // Mark final reduced graph write_to_log("MA_VoxelCoding::performAnalysis - Outputing graph in text form."); for ( i = 0; i < number_of_graphs; i++ ) { fout << "#######################################################" << endl; fout << "### Graph number: " << i+1 << endl; medial_axis_graphs_array[i].writeToTextFile( fout ); fout << endl << endl; } fout.close(); } } // ------------------------------------------------------------------------- // Determine distributions progress_win_ptr->SetWindowText("Performing network analysis..."); write_to_log("MA_VoxelCoding::performAnalysis - Now generating distributions."); // Figure out how many cubes fit in for node count distribution unsigned short cube_size = (unsigned short)(node_distribution_cube_size_in_um/pixel_size_in_um); Vect3usi cur_coord; Vect3usi cube_offsets( (boundary_cube->wZ%cube_size)/2, (boundary_cube->wY%cube_size)/2, (boundary_cube->wX%cube_size)/2 ); // Adjust offset in case cube is bigger than input if ( boundary_cube->wZ < cube_size ) cube_offsets.z = 0; if ( boundary_cube->wY < cube_size ) cube_offsets.y = 0; if ( boundary_cube->wX < cube_size ) cube_offsets.x = 0; Vect3usi cube_limits( boundary_cube->wZ-cube_offsets.z-1, boundary_cube->wY-cube_offsets.y-1, boundary_cube->wX-cube_offsets.x-1 ); unsigned int cube_count = (boundary_cube->wX/cube_size+1)*(boundary_cube->wY/cube_size+1)*(boundary_cube->wZ/cube_size+1); unsigned int cube_uncounted=0, cube_max=0; unsigned int *cube_count_array = new unsigned int[cube_count]; memset( cube_count_array, 0, sizeof(unsigned int)*cube_count ); // Find max_node_degree count, nodes per cube, and max_node_dist unsigned int max_node_dist = 0; unsigned int max_node_degree = 0; LinkedListNode<NetworkGraphNode> *cur_node_ptr; LinkedListNode<NetworkGraphEdge> *cur_edge_ptr; for ( i = 0; i < number_of_graphs; i++ ) { // Degree check cur_node_ptr = medial_axis_graphs_array[i].nodes_in_set_list_ptr->getHeadPtr(); while ( cur_node_ptr ) { // Increment node count for cube which coordinate falls into cur_coord = cur_node_ptr->item.coordinate_list.getHeadPtr()->item; if ( cur_coord.is_within_bounds(cube_offsets, cube_limits) ) { cur_coord.x = cur_coord.x-cube_offsets.x; cur_coord.y = cur_coord.y-cube_offsets.y; cur_coord.z = cur_coord.z-cube_offsets.z; cur_coord.x = cur_coord.x/cube_size; cur_coord.y = cur_coord.y/cube_size; cur_coord.z = cur_coord.z/cube_size; ++ cube_count_array[ ((int)cur_coord.z*(boundary_cube->wX/cube_size)*(boundary_cube->wY/cube_size)) + ((int)cur_coord.y*(boundary_cube->wX/cube_size)) + ((int)cur_coord.x) ]; } else { ++ cube_uncounted; } // Update max degree if needed if ( cur_node_ptr->item.edge_ptrs_list.getSize() > max_node_degree) { max_node_degree = cur_node_ptr->item.edge_ptrs_list.getSize(); } cur_node_ptr = cur_node_ptr->next; } // Find largest edge length cur_edge_ptr = medial_axis_graphs_array[i].edges_in_set_list_ptr->getHeadPtr(); while ( cur_edge_ptr ) { if ( (unsigned int)cur_edge_ptr->item.length > max_node_dist) { max_node_dist = (unsigned int)cur_edge_ptr->item.length; } cur_edge_ptr = cur_edge_ptr->next; } } // Find the sub cube with highest node density for (i = 0; i < cube_count; i++) { if ( cube_count_array[i] > cube_max ) { cube_max = cube_count_array[i]; } } // Now fill in distribution buckets unsigned int num_buckets_dist = (max_node_dist)/bucket_size + 1; unsigned int num_buckets_degr = (max_node_degree)+1; unsigned int edge_length_total = 0; unsigned int node_degree_total = 0; unsigned int *edge_length_distribution = new unsigned int[num_buckets_dist]; memset( edge_length_distribution, 0, sizeof(unsigned int)*num_buckets_dist ); unsigned int *node_degree_distribution = new unsigned int [num_buckets_degr]; memset( node_degree_distribution, 0, sizeof(unsigned int)*num_buckets_degr ); for ( i = 0; i < number_of_graphs; i++ ) { // Degree cur_node_ptr = medial_axis_graphs_array[i].nodes_in_set_list_ptr->getHeadPtr(); while ( cur_node_ptr ) { ++ node_degree_distribution[ cur_node_ptr->item.edge_ptrs_list.getSize() ]; ++ node_degree_total; cur_node_ptr = cur_node_ptr->next; } // Length cur_edge_ptr = medial_axis_graphs_array[i].edges_in_set_list_ptr->getHeadPtr(); while (cur_edge_ptr) { if ( cur_edge_ptr->item.length > 0.0f ) { ++edge_length_distribution[(unsigned int)( cur_edge_ptr->item.length )/bucket_size]; ++edge_length_total; } cur_edge_ptr = cur_edge_ptr->next; } } unsigned int *node_count_distribution = new unsigned int [cube_max+1]; memset( node_count_distribution, 0, sizeof(unsigned int)*(cube_max+1) ); for ( i = 0; i < cube_count; i++ ) { ++ node_count_distribution[ cube_count_array[i] ]; } // We now have all of the distributions, prepare to write to file sprintf(output_name, "%s_Medial_Axis_Analysis.csv", output_prefix); fout.open(output_name, ios::out); if( !fout.is_open() ) { write_to_log("MA_VoxelCoding::performAnalysis - Write to File: cannot open output file '%s'.", output_name); return MA_FILE_OPEN_ERROR; } // Write out info and distributions write_to_log("MA_VoxelCoding::performAnalysis - Writing distributions to CSV file %s.", output_name); fout << "Pixel size:,,," << pixel_size_in_um << ",microns" << endl; fout << "Cube size:,,,(um)," << node_distribution_cube_size_in_um << ",,(pixels)," << cube_size << endl; fout << "Edge Nodes skipped: ,,," << cube_uncounted << ",,offset(xyz)," << cube_offsets.x <<","<< cube_offsets.y <<","<< cube_offsets.z << endl << endl; fout << "Number of failed point to point links:,,," << number_of_failed_ma_links << endl; fout << "Number of failed point to path links:,,," << number_of_failed_ma_path_links << endl; fout << "Number of volumes:,,," << number_of_volumes << endl; fout << "Number of final graphs:,,," << number_of_graphs << endl << endl; fout << "Edge Length (um),,,,Node Degree,,,,Node Density" << endl; unsigned int larger_array = (num_buckets_dist > num_buckets_degr) ? (num_buckets_dist) : (num_buckets_degr); if ( cube_max > larger_array ) { larger_array = cube_max+1; } for ( i = 0; i < larger_array; i++ ) { if ( i < num_buckets_dist ) { fout << pixel_size_in_um*bucket_size*(i+1) << "," << edge_length_distribution[i] << "," << (float)edge_length_distribution[i]/edge_length_total; } else { fout << ",,"; } fout << ",,"; if ( i < num_buckets_degr ) { fout << i << "," << node_degree_distribution[i] << "," << (float)node_degree_distribution[i]/node_degree_total; } else { fout << ",,"; } fout << ",,"; if ( i <= cube_max ) { fout << i << "," << node_count_distribution[i] << "," << (float)node_count_distribution[i]/cube_count; } fout << endl; } // Clean up fout.close(); delete[] node_count_distribution; delete[] cube_count_array; delete[] node_degree_distribution; delete[] edge_length_distribution; delete[] medial_axis_graphs_array; return MA_NO_ERRORS; } // -------------------------------------------------------------------------------------------- // generateNetworkGraph // // Description - // Resources - coordinate_queue // SS_cube // -------------------------------------------------------------------------------------------- void MA_VoxelCoding::generateNetworkGraph( ) { Vect3usi cur_coord, end_coord; write_to_log("MA_VoxelCoding::generateNetworkGraph - Initializing and generating MA SS field."); // Restore SS cube SS_Xfrm_Axis->initXfrmCube( SS_cube ); // Generate SS fields in all medial axises, and add each end point to queue for ( cur_coord.z = 0; cur_coord.z < boundary_cube->wZ; cur_coord.z++ ) { for ( cur_coord.y = 0; cur_coord.y < boundary_cube->wY; cur_coord.y++ ) for ( cur_coord.x = 0; cur_coord.x < boundary_cube->wX; cur_coord.x++ ) { // Make sure the spot is an axis, and doesn't already have an SS field in it if ( (!MARK_cubes[FINAL_MA]->get_spot(cur_coord)) && (SS_cube->datac(cur_coord) == ss_infinity ) ) { end_coord = cur_coord; // Generate SS field if ( SS_Xfrm_Axis->runSS( SS_cube, end_coord, DT_NORMAL_XFRM ) != DT_NO_ERROR ) { write_to_log( "MA_VoxelCoding::generateNetworkGraph - FATAL ERROR - SS transform failed." ); doFullDump( MA_CRASH_ON_FATAL_ERROR ); } coordinate_queue->insertAtEnd( end_coord ); } } } // Now we know how many disconnected graphs we will need, allocate an array of graphs number_of_graphs = coordinate_queue->getSize(); medial_axis_graphs_array = new NetworkGraph[number_of_graphs]; // From each starting spot, we now need to create graphs NetworkGraphNode *cur_graph_node_ptr; write_to_log( "MA_VoxelCoding::generateNetworkGraph - Now creating %d graphs.", number_of_graphs ); for ( unsigned int i = 0; i < number_of_graphs; i++ ) { // Enable quick coordinate searching medial_axis_graphs_array[i].enableQuickCoordSearch( boundary_cube->dimensions ); // Insert starting node cur_graph_node_ptr = medial_axis_graphs_array[i].createNewNode( coordinate_queue->getHeadPtr()->item ); // Need to mark first cluster with '1's, and add all coordinates for Find Neighbors function current_graph_node = cur_graph_node_ptr; MARK_cubes[TRAVELED]->set_spot_1( cur_graph_node_ptr->coordinate_list.getHeadPtr()->item ); recurseOnAllNeighbors( cur_graph_node_ptr->coordinate_list.getHeadPtr()->item, &MA_VoxelCoding::markTraveledAndAddCoordinateToGraphNode ); medial_axis_graphs_array[i].updateQuickSearchMatrix( cur_graph_node_ptr ); // Find neighbors of current graph node, add/link to graph, add to queue (also marks neighs with 1 in TRAV) findGraphNodesNeighbors( graph_node_queue, &medial_axis_graphs_array[i], cur_graph_node_ptr ); // Repeat while we still have branches left, travel and link while ( graph_node_queue->getSize() > 0 ) { cur_graph_node_ptr = graph_node_queue->getHeadPtr()->item; graph_node_queue->removeHeadNode(); findGraphNodesNeighbors( graph_node_queue, &medial_axis_graphs_array[i], cur_graph_node_ptr ); } // Root node might no longer be optimal, find another medial_axis_graphs_array[i].findRootNode(); // No longer need quick search ability, free memory associated with it medial_axis_graphs_array[i].disableQuickCoordSearch( ); // Remove item from starting points queue coordinate_queue->removeHeadNode(); } } // -------------------------------------------------------------------------------------------- // markTraveledAndAddCoordinateToGraphNode // // Description - Floods TRAVELED cube area that is with connected with same SS code, // AND adds all coordinates at end of GNODES coord list. // -------------------------------------------------------------------------------------------- bool MA_VoxelCoding::markTraveledAndAddCoordinateToGraphNode( Vect3usi &cur_coord, Vect3usi &nxt_coord ) { // Make sure we are in the same cluster if ( (!MARK_cubes[TRAVELED]->get_spot(nxt_coord)) && (SS_cube->datac(cur_coord) == SS_cube->datac(nxt_coord)) ) { // Add to current graph current_graph_node->coordinate_list.insertAtEnd( nxt_coord ); // Mark and return MARK_cubes[TRAVELED]->set_spot_1( nxt_coord ); return true; } return false; } // -------------------------------------------------------------------------------------------- // findGraphNodesNeighbors // // Description - This assumes that current SS_code in TRAVELED is set to '1', all valid neighbors '0' // -------------------------------------------------------------------------------------------- void MA_VoxelCoding::findGraphNodesNeighbors( LinkedList<NetworkGraphNode*> *Q, NetworkGraph *G, NetworkGraphNode *N ) { char dz, dy, dx, xl=-1, xg=1, yl=-1, yg=1, zl=-1, zg=1; float distance; Vect3usi cur_coord; NetworkGraphNode *tmp_gnode_ptr; // For each coordinate that is part of the cluster. LinkedListNode<NetworkGraphEdge*> *cur_edge_node_ptr; LinkedListNode<Vect3usi> *cur_coord_ptr = N->coordinate_list.getHeadPtr( ); while ( cur_coord_ptr ) { cur_coord = cur_coord_ptr->item; // Clamp to boundaries if (cur_coord.x < 1) xl = 0; if (cur_coord.y < 1) yl = 0; if (cur_coord.z < 1) zl = 0; if (cur_coord.x > boundary_cube->wX-2) xg = 0; if (cur_coord.y > boundary_cube->wY-2) yg = 0; if (cur_coord.z > boundary_cube->wZ-2) zg = 0; for (dz = zl; dz <= zg; dz++) for (dy = yl; dy <= yg; dy++) for (dx = xl; dx <= xg; dx++) { // Make sure coordinate is within medial axis path cur_coord.from_zyx( cur_coord_ptr->item.z+dz, cur_coord_ptr->item.y+dy, cur_coord_ptr->item.x+dx ); if ( !MARK_cubes[FINAL_MA]->get_spot(cur_coord) ) { // Case 1: If SS_cube code is different than current, and TRAV_cube is '0', the spot is a valid NEW neighbor if ( (!MARK_cubes[TRAVELED]->get_spot(cur_coord)) && (SS_cube->datac(cur_coord) != SS_cube->datac(cur_coord_ptr->item)) ) { // Add the found neighbor to Q, link it in graph to current node distance = sqrt( (float)dz*dz + dy*dy + dx*dx ); // TODO: can use a square root table tmp_gnode_ptr = G->createNewNodeAndLink( cur_coord, N, distance); Q->insertAtEnd( tmp_gnode_ptr ); // Mark node's cluster (to prevent multiple node additions) current_graph_node = tmp_gnode_ptr; MARK_cubes[TRAVELED]->set_spot_1( cur_coord ); recurseOnAllNeighbors( cur_coord, &MA_VoxelCoding::markTraveledAndAddCoordinateToGraphNode ); G->updateQuickSearchMatrix( tmp_gnode_ptr ); } // Case 2: SS_cube code is different than current, and TRAVELED is '1', the spot has been traveled, but might not be linked else if ( (MARK_cubes[TRAVELED]->get_spot(cur_coord)) && (SS_cube->datac(cur_coord) != SS_cube->datac(cur_coord_ptr->item)) ) { // First check if edge to that spot already exists cur_edge_node_ptr = N->edge_ptrs_list.getHeadPtr();; while ( cur_edge_node_ptr ) { if ( G->doesCoordinateBelongToNode( cur_edge_node_ptr->item->getOtherSideNodePtr(N), cur_coord) == true ) { break; } cur_edge_node_ptr = cur_edge_node_ptr->next; } // If went through all edges, need to find the node and create a new edge linking the nodes if ( cur_edge_node_ptr == NULL ) { tmp_gnode_ptr = G->findNodeWithCoordinate( cur_coord ); if ( tmp_gnode_ptr ) { distance = sqrt( (float)dz*dz + dy*dy + dx*dx ); // TODO: can use a square root table G->createNewEdge( tmp_gnode_ptr, N, distance); } else { write_to_log("MA_VoxelCoding::findGraphNodesNeighbors - ERROR - Did not find node that contains desired coordinate."); } } } } } cur_coord_ptr = cur_coord_ptr->next; } } // -------------------------------------------------------------------------------------------- // mergeAllTwoDegreeNodes // // Description - // -------------------------------------------------------------------------------------------- void MA_VoxelCoding::mergeAllTwoDegreeNodes( ) { unsigned char mark_val = 1; bool nodes_removed; float combined_bs_vals; NetworkGraphNode *cur_node_ptr, *nei_node_ptr; LinkedListNode<NetworkGraphEdge> *cur_edge_ptr; LinkedListNode<NetworkGraphEdge*> *cur_edge_node_ptr; // For each disconnected graph... // 1) Merge any 2-degree nodes with neighboring nodes. // 2) Merge any greater than 2-degree nodes that are close // to each other (within higher BS value). for ( unsigned int i = 0; i < number_of_graphs; i++) { nodes_removed = false; // Start with the root node medial_axis_graphs_array[i].root_node_ptr->flags = mark_val; graph_node_queue->insertAtStart( medial_axis_graphs_array[i].root_node_ptr ); // Travel down graph and // -add new nodes to Q // -removing any 2 edge nodes while ( graph_node_queue->getSize() > 0 ) { // Grab the current top node from queue cur_node_ptr = graph_node_queue->getHeadPtr()->item; graph_node_queue->removeHeadNode(); // Place any unmarked connected nodes into Q cur_edge_node_ptr = cur_node_ptr->edge_ptrs_list.getHeadPtr(); while ( cur_edge_node_ptr ) { nei_node_ptr = cur_edge_node_ptr->item->getOtherSideNodePtr( cur_node_ptr); if ( nei_node_ptr->flags != mark_val ) { // Found an unmarked node, need to add to queue nei_node_ptr->flags = mark_val; graph_node_queue->insertAtEnd( nei_node_ptr ); } cur_edge_node_ptr = cur_edge_node_ptr->next; } // If node only has 2 edges, disconnect it, reconnecting neighbors and accumulating edge distances if ( cur_node_ptr->edge_ptrs_list.getSize() == 2 ) { if ( medial_axis_graphs_array[i].convertNodeToEdge(cur_node_ptr) == 0 ) { nodes_removed = true; } } } // Travel down graph again, merging any nodes that are close to each other cur_edge_ptr = medial_axis_graphs_array[i].edges_in_set_list_ptr->getHeadPtr(); while ( cur_edge_ptr ) { // Combine distances of each node from nearest fiber combined_bs_vals = (float)( BS_cube->datac(cur_edge_ptr->item.node1_ptr->coordinate_list.getHeadPtr()->item) + BS_cube->datac(cur_edge_ptr->item.node2_ptr->coordinate_list.getHeadPtr()->item) ); combined_bs_vals /= BS_Xfrm->getFaceValue( ); // If the edge length is less than combined BS distances, merge node with neighbor if ( (cur_edge_ptr->item.length <= combined_bs_vals ) && (cur_edge_ptr->item.length > 0.0f) ) { // Merge towards higher BS value if ( BS_cube->datac(cur_edge_ptr->item.node1_ptr->coordinate_list.getHeadPtr()->item) > BS_cube->datac(cur_edge_ptr->item.node2_ptr->coordinate_list.getHeadPtr()->item) ) { medial_axis_graphs_array[i].combineNeighboringNodes( cur_edge_ptr->item.node1_ptr, cur_edge_ptr->item.node2_ptr, &cur_edge_ptr->item ); } else { medial_axis_graphs_array[i].combineNeighboringNodes( cur_edge_ptr->item.node2_ptr, cur_edge_ptr->item.node1_ptr, &cur_edge_ptr->item ); } nodes_removed = true; // Because potentially a lot of edges were shuffled, go back to first edge cur_edge_ptr = medial_axis_graphs_array[i].edges_in_set_list_ptr->getHeadPtr(); } else { cur_edge_ptr = cur_edge_ptr->next; } } // More two degree nodes might have been created by merging near nodes, need multiple passes if ( nodes_removed ) { mark_val = !mark_val; -- i; } else { mark_val = 1; } } }