Main Page | Namespace List | Class Hierarchy | Alphabetical List | Class List | Directories | File List | Namespace Members | Class Members | File Members | Related Pages

downSample.C

Go to the documentation of this file.
00001 
00002 #include "mpi.h"
00003 #include "petsc.h"
00004 #include "sys.h"
00005 #include "TreeNode.h"
00006 #include "parUtils.h"
00007 #include "omg.h"
00008 #include "oda.h"
00009 #include <cstdlib>
00010 #include "colors.h"
00011 #include "externVars.h"
00012 #include "dendro.h"
00013 
00014 static char help[] = "Solves 3D PBE using FEM, MG, DA and Matrix-Free methods.\n\n";
00015 //Don't want time to be synchronized. Need to check load imbalance.
00016 #ifdef MPI_WTIME_IS_GLOBAL
00017 #undef MPI_WTIME_IS_GLOBAL
00018 #endif
00019 
00020 int main(int argc, char ** argv ) {     
00021   int size, rank;
00022   unsigned int numPts;
00023   char pFile[50],oFile[50];
00024   double gSize[3];
00025   unsigned int ptsLen;
00026   unsigned int maxNumPts= 1;
00027   unsigned int dim=3;
00028   unsigned int maxDepth=30;
00029   DendroIntL locSz, totalSz;
00030   std::vector<ot::TreeNode> linOct, balOct;
00031   std::vector<double> pts;
00032 
00033   PetscInitialize(&argc,&argv,"optionsDS",help);
00034   ot::RegisterEvents();
00035 
00036   ot::DAMG_Initialize(MPI_COMM_WORLD);
00037 
00038 #ifdef PETSC_USE_LOG
00039   int stages[3];
00040   PetscLogStageRegister(&stages[0],"P2O.");
00041   PetscLogStageRegister(&stages[1],"Bal");  
00042   PetscLogStageRegister(&stages[2],"Solve");  
00043 #endif
00044 
00045   MPI_Comm_size(MPI_COMM_WORLD,&size);
00046   MPI_Comm_rank(MPI_COMM_WORLD,&rank);
00047   if(argc < 2) {
00048     std::cerr << "Usage: " << argv[0] << "inpfile  maxDepth[30] \
00049       dim[3] maxNumPtsPerOctant[1] " << std::endl;
00050     return -1;
00051   }
00052   if(argc > 2) {
00053     maxDepth = atoi(argv[2]);
00054   }
00055   if(argc > 3) {
00056     dim = atoi(argv[3]);
00057   }
00058   if(argc > 4) {
00059     maxNumPts = atoi(argv[4]);
00060   }
00061 
00062   sprintf(pFile,"%s%d_%d.pts",argv[1],rank,size);
00063   sprintf(oFile,"%s_Out%d_%d.pts",argv[1],rank,size);
00064 
00065   //Points2Octree....
00066   MPI_Barrier(MPI_COMM_WORLD);  
00067   if(!rank){
00068     std::cout << " reading  "<<pFile<<std::endl; // Point size
00069   }
00070   ot::readPtsFromFile(pFile, pts);
00071   if(!rank){
00072     std::cout << " finished reading  "<<pFile<<std::endl; // Point size
00073   }
00074   MPI_Barrier(MPI_COMM_WORLD);  
00075   ptsLen = pts.size();
00076   std::vector<ot::TreeNode> tmpNodes;
00077   for(unsigned int i = 0; i < ptsLen; i += 3) {
00078     if( (pts[i] > 0.0) &&
00079         (pts[i+1] > 0.0)  
00080         && (pts[i+2] > 0.0) &&
00081         ( (static_cast<unsigned int>(pts[i]*(static_cast<double>(1u<<maxDepth)))) < (1u<<maxDepth))  &&
00082         ( (static_cast<unsigned int>(pts[i+1]*(static_cast<double>(1u<<maxDepth)))) < (1u<<maxDepth))  &&
00083         ( (static_cast<unsigned int>(pts[i+2]*(static_cast<double>(1u<<maxDepth)))) < (1u<<maxDepth)) ) {
00084       tmpNodes.push_back( ot::TreeNode(
00085             static_cast<unsigned int>(pts[i]*static_cast<double>(1u<<maxDepth)),
00086             static_cast<unsigned int>(pts[i+1]*static_cast<double>(1u<<maxDepth)),
00087             static_cast<unsigned int>(pts[i+2]*static_cast<double>(1u<<maxDepth)),
00088             maxDepth,dim,maxDepth) );
00089     }
00090   }
00091   pts.clear();
00092   par::removeDuplicates<ot::TreeNode>(tmpNodes,false,MPI_COMM_WORLD);   
00093   linOct = tmpNodes;
00094   tmpNodes.clear();
00095   par::partitionW<ot::TreeNode>(linOct, NULL,MPI_COMM_WORLD);
00096   // reduce and only print the total ...
00097   locSz = linOct.size();
00098   par::Mpi_Reduce<DendroIntL>(&locSz, &totalSz, 1, MPI_SUM, 0, MPI_COMM_WORLD);
00099   MPI_Barrier(MPI_COMM_WORLD);  
00100   if(rank==0) {
00101     std::cout<<"# pts= " << totalSz<<std::endl;
00102   }
00103   MPI_Barrier(MPI_COMM_WORLD);  
00104 
00105   pts.resize(3*(linOct.size()));
00106   ptsLen = (3*(linOct.size()));
00107   for(unsigned int i = 0; i < linOct.size(); i++) {
00108     pts[3*i] = (((double)(linOct[i].getX())) + 0.5)/((double)(1u << maxDepth));
00109     pts[(3*i)+1] = (((double)(linOct[i].getY())) +0.5)/((double)(1u << maxDepth));
00110     pts[(3*i)+2] = (((double)(linOct[i].getZ())) +0.5)/((double)(1u << maxDepth));
00111   }//end for i
00112   linOct.clear();
00113   gSize[0] = 1.;
00114   gSize[1] = 1.;
00115   gSize[2] = 1.;
00116 
00117   MPI_Barrier(MPI_COMM_WORLD);  
00118 #ifdef PETSC_USE_LOG
00119   PetscLogStagePush(stages[0]);
00120 #endif
00121   ot::points2Octree(pts, gSize, linOct, dim, maxDepth, maxNumPts, MPI_COMM_WORLD);
00122 #ifdef PETSC_USE_LOG
00123   PetscLogStagePop();
00124 #endif
00125   // reduce and only print the total ...
00126   locSz = linOct.size();
00127   par::Mpi_Reduce<DendroIntL>(&locSz, &totalSz, 1, MPI_SUM, 0, MPI_COMM_WORLD);
00128   if(rank==0) {
00129     std::cout<<"# of Unbalanced Octants: " << totalSz<<std::endl;
00130   }
00131   pts.clear();
00132 
00133   //Balancing...
00134   MPI_Barrier(MPI_COMM_WORLD);  
00135 #ifdef PETSC_USE_LOG
00136   PetscLogStagePush(stages[1]);
00137 #endif
00138   ot::balanceOctree (linOct, balOct, dim, maxDepth, true, MPI_COMM_WORLD, NULL, NULL);
00139 #ifdef PETSC_USE_LOG
00140   PetscLogStagePop();
00141 #endif
00142   linOct.clear();
00143   // compute total inp size and output size
00144   locSz = balOct.size();
00145   par::Mpi_Reduce<DendroIntL>(&locSz, &totalSz, 1, MPI_SUM, 0, MPI_COMM_WORLD);
00146 
00147   if(!rank) {
00148     std::cout << "# of Balanced Octants: "<< totalSz << std::endl;       
00149   }
00150 
00151   MPI_Barrier(MPI_COMM_WORLD);  
00152 #ifdef PETSC_USE_LOG
00153   PetscLogStagePush(stages[2]);
00154 #endif
00155 
00156 
00157   ot::DAMG        *damg;    
00158   int       nlevels = 1; //number of multigrid levels
00159   unsigned int       dof =1;// degrees of freedom per node  
00160 
00161   PetscInt nlevelsPetscInt = nlevels;
00162   PetscOptionsGetInt(0, "-nlevels", &nlevelsPetscInt, 0);
00163   nlevels = nlevelsPetscInt;
00164 
00165   // Note: The user context for all levels will be set separately later.
00166   MPI_Barrier(MPI_COMM_WORLD);  
00167 
00168   if(!rank) {
00169     std::cout<<"nlevels initial: "<<nlevels<<std::endl;
00170   }
00171 
00172   ot::DAMGCreateAndSetDA(PETSC_COMM_WORLD, nlevels, NULL, &damg,
00173       balOct, dof, 1.5, false, true);
00174 
00175   if(!rank) {
00176     std::cout<<"nlevels final: "<<nlevels<<std::endl;
00177   }
00178 
00179   MPI_Barrier(MPI_COMM_WORLD);  
00180 
00181   if(!rank) {
00182     std::cout << "Created DA for all levels."<< std::endl;
00183   }
00184 
00185   MPI_Barrier(MPI_COMM_WORLD);
00186   ot::PrintDAMG(damg);
00187   MPI_Barrier(MPI_COMM_WORLD);
00188 
00189   ot::DA* dac = damg[0]->da;
00190 
00191   std::vector<double> outputPts;
00192 
00193   if(dac->iAmActive()) {
00194     double factor = (1.0/(static_cast<double>(1u << maxDepth)));
00195     for(dac->init<ot::DA_FLAGS::WRITABLE>();
00196         dac->curr() < dac->end<ot::DA_FLAGS::WRITABLE>();
00197         dac->next<ot::DA_FLAGS::WRITABLE>() ) {
00198       Point currPt = dac->getCurrentOffset();
00199       outputPts.push_back(currPt.x()*factor);
00200       outputPts.push_back(currPt.y()*factor);
00201       outputPts.push_back(currPt.z()*factor);
00202     } 
00203   }
00204 
00205   ot::writePtsToFile(oFile, outputPts);
00206   outputPts.clear();
00207 
00208   DAMGDestroy(damg);
00209 
00210   if (!rank) {
00211     std::cout << GRN << "Destroyed DAMG" << NRM << std::endl;
00212   }
00213 
00214 #ifdef PETSC_USE_LOG
00215   PetscLogStagePop();
00216 #endif
00217   balOct.clear();
00218 
00219   if (!rank) {
00220     std::cout << GRN << "Finalizing ..." << NRM << std::endl;
00221   }
00222   ot::DAMG_Finalize();
00223   PetscFinalize();
00224 }//end function
00225 

Generated on Tue Mar 24 16:13:59 2009 for DENDRO by  doxygen 1.3.9.1