Skip to content

Commit

Permalink
Merge pull request #2 from DBobkov/master
Browse files Browse the repository at this point in the history
Fixed parsing of command line options and added usage help
  • Loading branch information
aboulch authored Feb 3, 2017
2 parents 85420b0 + 1e67615 commit ff78368
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 12 deletions.
8 changes: 5 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,19 @@ Pretrained networks can be found at [webpage](https://sites.google.com/view/boul

# Usage

HoughCNN_Exec [options] -m path_to_the_torch_model -i input_file.xyz
HoughCNN_Exec [options] -m path_to_the_torch_model -i input_file.xyz -c number_of_scales

Note: the input file must currently be at xyz format, it is possible to generate such file with Meshlab.

Note: the file predict.lua should be next to the executable
Note: the file predict.lua should be next to the executable.

Note: number of scales has to be consistent with the used model (there are separate models for different scales).

# Example

A file cube_100k is located in the test directory.

HoughCNN_Exec [options] -m path_to_the_torch_model -i test/cube_100k.xyz
HoughCNN_Exec [options] -m path_to_the_torch_model -i test/cube_100k.xyz -c scale

# Author

Expand Down
28 changes: 19 additions & 9 deletions main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,13 @@ using namespace std;

#include "houghCNN.h"

void printHelp()
{
cout << "Usage: <executable>\n -i\t <input file>\n -o\t <output file>\n -c\t <number of scales 1,3,5>\n"
<< " -m\t <model filename (has to be consistent with the number of scales)>\n"
<< " -k\t <neighborhood number of points>\n -t\t <number of hypothesis>\n -d\t <use_anisotropy_flag>\n"
<< " -s\t <accumulator size>\n -e\t <anisotropy nbr of neighborhoods>\n" << endl;
}

int main(int argc, char** argv){
srand (time(NULL));
Expand All @@ -62,26 +69,27 @@ int main(int argc, char** argv){
int k_density = 5;

int c;


opterr = 0;
while ((c = getopt (argc, argv, "i:o:m:k:t:d:p:r:a:e:")) != -1)
while ((c = getopt (argc, argv, "i:o:m:k:t:d:c:s:e")) != -1)
switch (c){
case 'i':{
input = optarg;
input = optarg; // input file
break;
}
case 'o':{
output = optarg;
output = optarg; // output file
break;
}
case 'm':{
model = optarg;
model = optarg;
break;
}
case 'k':{
stringstream sstr("");
sstr << optarg;
sstr >> k;
sstr >> k; // neighborhoods
break;
}
case 't':{
Expand Down Expand Up @@ -123,10 +131,12 @@ int main(int argc, char** argv){

if(input=="-1"){
cout << "Error need input file" << endl;
printHelp();
return 1;
}
if(model=="-1"){
cout << "Error need model file" << endl;
printHelp();
return 1;
}

Expand All @@ -136,7 +146,7 @@ int main(int argc, char** argv){

// load the point cloud
MatrixX3 pc, normals;
pc_load(input,pc);
pc_load(input, pc);

cout << "Point cloud size: " << pc.rows() << endl;
if(pc.rows() == 0){
Expand All @@ -145,7 +155,7 @@ int main(int argc, char** argv){
}

// create the estimator
NormEst ne(pc,normals);
NormEst ne(pc, normals);

ne.access_A() = s; // accumulator size
ne.access_T() = T; // number of hypothesis
Expand All @@ -165,10 +175,10 @@ int main(int argc, char** argv){
}

// estimation
ne.estimate(model,Ks, ua);
ne.estimate(model, Ks, ua);

// save the point cloud
pc_save(output,pc, normals);
pc_save(output, pc, normals);



Expand Down

0 comments on commit ff78368

Please sign in to comment.