CArl
Code Arlequin / C++ implementation
PETSC_matrix_operations.cpp
Go to the documentation of this file.
2 
3 void carl::lump_matrix( libMesh::PetscMatrix<libMesh::Number>& matrixInput,
4  libMesh::PetscMatrix<libMesh::Number>& matrixOutput)
5 {
6  if(matrixOutput.initialized())
7  {
8  matrixOutput.clear();
9  }
10 
11  int M = matrixInput.m();
12  int N = matrixInput.n();
13 
14  homemade_assert_msg(M == N, "Lumping: the matrix must be a square matrix");
15 
16  PetscInt local_M, local_N;
17 
18  MatGetLocalSize(matrixInput.mat(),&local_M,&local_N);
19 
20  // It will be a diagonal matrix, so no need of a heavy preallocation
21  matrixOutput.init(M,N,local_M,local_N,1,0);
22 
23  libMesh::PetscVector<libMesh::Number> UnityVec(matrixInput.comm(),M,local_M);
24  libMesh::PetscVector<libMesh::Number> DummyVector(matrixInput.comm(),M,local_M);
25 
26  VecSet(UnityVec.vec(),1);
27 
28  UnityVec.close();
29 
30  matrixInput.vector_mult(DummyVector,UnityVec);
31 
32  MatDiagonalSet(matrixOutput.mat(),DummyVector.vec(),INSERT_VALUES);
33 
34  matrixOutput.close();
35 }
36 
37 void carl::lump_matrix_and_invert( libMesh::PetscMatrix<libMesh::Number>& matrixInput,
38  libMesh::PetscMatrix<libMesh::Number>& matrixOutput)
39 {
40  if(matrixOutput.initialized())
41  {
42  matrixOutput.clear();
43  }
44 
45  int M = matrixInput.m();
46  int N = matrixInput.n();
47 
48  homemade_assert_msg(M == N, "Lumping: the matrix must be a square matrix");
49 
50  PetscInt local_M, local_N;
51 
52  MatGetLocalSize(matrixInput.mat(),&local_M,&local_N);
53 
54  // It will be a diagonal matrix, so no need of a heavy preallocation
55  matrixOutput.init(M,N,local_M,local_N,1,0);
56 
57  libMesh::PetscVector<libMesh::Number> UnityVec(matrixInput.comm(),M,local_M);
58  libMesh::PetscVector<libMesh::Number> DummyVector(matrixInput.comm(),M,local_M);
59 
60  VecSet(UnityVec.vec(),1);
61 
62  UnityVec.close();
63 
64  matrixInput.vector_mult(DummyVector,UnityVec);
65 
66  DummyVector.reciprocal();
67 
68  MatDiagonalSet(matrixOutput.mat(),DummyVector.vec(),INSERT_VALUES);
69 
70  matrixOutput.close();
71 }
72 
73 void carl::lump_matrix_and_invert( libMesh::PetscMatrix<libMesh::Number>& matrixInput,
74  libMesh::PetscVector<libMesh::Number>& vecOutput)
75 {
76  if(vecOutput.initialized())
77  {
78  vecOutput.clear();
79  }
80 
81  int M = matrixInput.m();
82  int N = matrixInput.n();
83 
84  homemade_assert_msg(M == N, "Lumping: the matrix must be a square matrix");
85 
86  PetscInt local_M, local_N;
87 
88  MatGetLocalSize(matrixInput.mat(),&local_M,&local_N);
89 
90  // It will be a diagonal matrix, so no need of a heavy preallocation
91  vecOutput.init(M,local_M,false);
92 
93  libMesh::PetscVector<libMesh::Number> UnityVec(matrixInput.comm(),M,local_M);
94 
95  VecSet(UnityVec.vec(),1);
96 
97  UnityVec.close();
98 
99  matrixInput.vector_mult(vecOutput,UnityVec);
100 
101  vecOutput.reciprocal();
102 }
103 
104 void carl::print_matrix(libMesh::PetscMatrix<libMesh::Number>& CouplingTestMatrix)
105 {
106  libMesh::Real accumulator = 0;
107  const libMesh::Parallel::Communicator& MatrixComm = CouplingTestMatrix.comm();
108 
109  std::cout << "| M_i,j : " << CouplingTestMatrix.m() << " x " << CouplingTestMatrix.n() << std::endl;
110 
111  int nodes = MatrixComm.size();
112 
113  PetscInt local_M, local_N;
114  MatGetLocalSize(CouplingTestMatrix.mat(),&local_M,&local_N);
115 
116  bool bPrintOnTerminal = CouplingTestMatrix.m() < 101 && CouplingTestMatrix.n() < 101 && nodes > 1;
117  if(bPrintOnTerminal)
118  {
119  for(unsigned int iii = 0; iii < CouplingTestMatrix.m(); ++iii)
120  {
121  for(unsigned int jjj = 0; jjj < CouplingTestMatrix.n(); ++jjj)
122  {
123  std::cout << " " << CouplingTestMatrix(iii,jjj);
124  }
125  std::cout << std::endl;
126  }
127  }
128 
129  libMesh::PetscVector<libMesh::Number> dummy_vec(MatrixComm,CouplingTestMatrix.n(),local_N);
130  MatGetRowSum(CouplingTestMatrix.mat(),dummy_vec.vec());
131 
132  VecSum(dummy_vec.vec(),&accumulator);
133  std::cout << "|" << std::endl;
134  std::cout << "| Sum( M_i,j ) = " << accumulator << std::endl << std::endl;
135 }
136 
137 void carl::print_matrix_col_line_sum(libMesh::PetscMatrix<libMesh::Number>& CouplingTestMatrix, const std::string name_base)
138 {
139  libMesh::PetscVector<libMesh::Number> col_sum(CouplingTestMatrix.comm(),CouplingTestMatrix.m());
140  libMesh::PetscVector<libMesh::Number> row_sum(CouplingTestMatrix.comm(),CouplingTestMatrix.n());
141 
142  for(unsigned int iii = 0; iii < CouplingTestMatrix.m(); ++iii)
143  {
144  for(unsigned int jjj = 0; jjj < CouplingTestMatrix.n(); ++jjj)
145  {
146  col_sum.add(iii,CouplingTestMatrix(iii,jjj));
147  row_sum.add(jjj,CouplingTestMatrix(iii,jjj));
148  }
149  }
150 
151 // Print MatLab debugging output? Variable defined at "carl_headers.h"
152 #ifdef PRINT_MATLAB_DEBUG
153  col_sum.print_matlab(name_base + "_col.m");
154  row_sum.print_matlab(name_base + "_row.m");
155 #endif
156 }
157 
158 void carl::print_matrix_matlab(libMesh::PetscMatrix<libMesh::Number>& CouplingTestMatrix, const std::string name_base)
159 {
160  std::cout << "| M_i,j : " << CouplingTestMatrix.m() << " x " << CouplingTestMatrix.n() << std::endl;
161 
162  CouplingTestMatrix.print_matlab(name_base);
163 }
164 
165 void carl::print_matrix_dim(libMesh::PetscMatrix<libMesh::Number>& CouplingTestMatrix, bool bDetailed)
166 {
167  std::cout << "| M_i,j : " << CouplingTestMatrix.m() << " x " << CouplingTestMatrix.n() << std::endl;
168 // if(bDetailed)
169 // {
170  MatInfo temp_info;
171  MatGetInfo(CouplingTestMatrix.mat(),MAT_LOCAL,&temp_info);
172  std::cout << "| LOCAL : memory = " << temp_info.memory << std::endl;
173  std::cout << "| non-zeros used = " << (100.*temp_info.nz_used)/temp_info.nz_allocated << " % " << std::endl;
174 
175 // int non_zeros = temp_info.nz_used;
176 // std::vector<int> all_temp_info;
177 // CouplingTestMatrix.comm().gather(0,non_zeros,all_temp_info);
178 
179 // if(CouplingTestMatrix.comm().rank() == 0)
180 // {
181 // std::cout << " | ";
182 // for(unsigned int iii = 0; iii < CouplingTestMatrix.comm().size(); ++iii)
183 // {
184 // std::cout << all_temp_info[iii] << " ";
185 // }
186 // std::cout << std::endl << std::endl;
187 // }
188 
189  MatGetInfo(CouplingTestMatrix.mat(),MAT_GLOBAL_SUM,&temp_info);
190  std::cout << "| GLOBAL : memory = " << temp_info.memory << std::endl;
191  std::cout << "| non-zeros used = " << (100.*temp_info.nz_used)/temp_info.nz_allocated << " % " << std::endl;
192  std::cout << "| total nb. of non-zeros used = " << temp_info.nz_used << std::endl;
193  std::cout << "| total nb. of non-zeros allocated = " << temp_info.nz_allocated << std::endl;
194  std::cout << "| sparsity = " << (100.*temp_info.nz_used)/(CouplingTestMatrix.m() * CouplingTestMatrix.n()) << " % " << std::endl;
195 
196  MatGetInfo(CouplingTestMatrix.mat(),MAT_GLOBAL_MAX,&temp_info);
197  std::cout << "| MAX : memory = " << temp_info.memory << std::endl;
198  std::cout << "| non-zeros used = " << (100.*temp_info.nz_used)/temp_info.nz_allocated << " % " << std::endl << std::endl;
199  //}
200 }
201 
202 void carl::print_matrix_info(libMesh::PetscMatrix<libMesh::Number>& InputMatrix, std::ostream & os)
203 {
204  const libMesh::Parallel::Communicator& WorldComm = InputMatrix.comm();
205 
206  unsigned int rank = WorldComm.rank();
207  unsigned int nodes = WorldComm.size();
208 
209  MatInfo local_info;
210  MatInfo global_info;
211  MatGetInfo(InputMatrix.mat(),MAT_LOCAL,&local_info);
212  MatGetInfo(InputMatrix.mat(),MAT_LOCAL,&global_info);
213 
214  // Set up local variables
215  int l_nz_used = local_info.nz_used;
216  int l_nz_allocated = local_info.nz_allocated;
217  int l_memory = local_info.memory;
218 
219  int l_n = -1;
220  int l_m = -1;
221 
222  double accumulator_n = 0;
223  double accumulator_m = 0;
224 
225  MatGetLocalSize(InputMatrix.mat(),&l_n,&l_m);
226 
227  std::vector<int> full_nz_used;
228  std::vector<int> full_nz_allocated;
229  std::vector<int> full_memory;
230 
231  std::vector<int> full_n;
232  std::vector<int> full_m;
233 
234  if(rank == 0)
235  {
236  full_nz_used.resize(nodes,0);
237  full_nz_allocated.resize(nodes,0);
238  full_memory.resize(nodes,0);
239  full_n.resize(nodes,0);
240  full_m.resize(nodes,0);
241  }
242 
243  WorldComm.gather(0,l_nz_used,full_nz_used);
244  WorldComm.gather(0,l_nz_allocated,full_nz_allocated);
245  WorldComm.gather(0,l_memory,full_memory);
246  WorldComm.gather(0,l_n,full_n);
247  WorldComm.gather(0,l_m,full_m);
248 
249  if(rank == 0)
250  {
251  os << "# rank \t local_n \t local_m \t start_n \t start_m \t nz_alloc \t nz_used \t memory " << std::endl;
252  for(unsigned int iii = 0; iii < nodes; ++iii)
253  {
254  os << iii << " \t "
255  << full_n[iii] << " \t "
256  << full_m[iii] << " \t "
257  << accumulator_n << " \t "
258  << accumulator_m << " \t "
259  << full_nz_allocated[iii] << " \t "
260  << full_nz_used[iii] << " \t "
261  << full_memory[iii] << std::endl;
262  accumulator_n += full_n[iii];
263  accumulator_m += full_m[iii];
264  }
265  }
266 
267  WorldComm.barrier();
268 }
269 
270 void carl::solve_linear_PETSC( libMesh::PetscMatrix<libMesh::Number>& A,
271  libMesh::PetscVector<libMesh::Number>& b,
272  libMesh::PetscVector<libMesh::Number>& x,
273  KSP& ksp, PC& pc)
274 {
275  /*
276  * Solve the system A*x = b using PETSc's linear solver, using a
277  * Krilov method, with the linear solver context "ksp" and
278  * preconditioner "pc".
279  */
280 
281  // Set up inital variables
282 }
283 
284 void carl::check_coupling_matrix( libMesh::PetscMatrix<libMesh::Number>& CouplingTestMatrix,
285  libMesh::Mesh& IntersectionMesh,
286  libMesh::Real CouplingScale,
287  const std::string matrixType,
288  int n_var)
289 {
290  std::cout << "| " << matrixType << std::endl;
291  libMesh::Real accumulator = 0;
292 
293  std::cout << "| M_i,j : " << CouplingTestMatrix.m() << " x " << CouplingTestMatrix.n() << std::endl;
294  for(unsigned int iii = 0; iii < CouplingTestMatrix.m(); ++iii)
295  {
296  for(unsigned int jjj = 0; jjj < CouplingTestMatrix.n(); ++jjj)
297  {
298  accumulator += CouplingTestMatrix(iii,jjj);
299  }
300  }
301 
302  libMesh::Real vol = 0;
303  libMesh::Elem* silly_elem;
304  for(libMesh::MeshBase::element_iterator itBegin = IntersectionMesh.elements_begin();
305  itBegin != IntersectionMesh.elements_end();
306  ++itBegin)
307  {
308  silly_elem = *itBegin;
309  vol += silly_elem->volume();
310  }
311 
312  libMesh::Real difference = accumulator - n_var*CouplingScale*vol;
313 
314  std::cout << "|" << std::endl;
315  std::cout << "| Sum( M_i,j ) = " << accumulator << std::endl;
316  std::cout << "| n * C * Volume = " << n_var*CouplingScale*vol << std::endl;
317  std::cout << "| > Difference = " << difference << std::endl << std::endl;
318 }
319 
320 void carl::write_PETSC_vector( Vec input_vec,
321  const std::string& filename, int rank, MPI_Comm comm, int dim)
322 {
323  PetscViewer viewer;
324  PetscViewerBinaryOpen(comm,filename.c_str(),FILE_MODE_WRITE,&viewer);
325  VecView(input_vec,viewer);
326 
327  PetscViewerDestroy(&viewer);
328 
329  // Hack to guarantee the proper vector division without forcing libMesh's --enable-blocked-storage
330  if(rank == 0)
331  {
332  std::ofstream vec_info(filename + ".info");
333  vec_info << "-vecload_block_size " << std::to_string(dim) << std::endl;
334  vec_info.close();
335  }
336 }
337 
338 void carl::write_PETSC_vector( libMesh::PetscVector<libMesh::Number>& input_vec,
339  const std::string& filename, int dim )
340 {
341  write_PETSC_vector(input_vec.vec(),filename,input_vec.comm().rank(),input_vec.comm().get(),dim);
342 }
343 
345  const std::string& filename, MPI_Comm comm)
346 {
347  PetscViewer viewer;
348  PetscViewerCreate(comm,&viewer);
349  PetscViewerASCIIOpen(comm,filename.c_str(),&viewer);
350  PetscViewerPushFormat (viewer,PETSC_VIEWER_ASCII_MATLAB);
351  VecView(input_vec,viewer);
352 
353  PetscViewerDestroy(&viewer);
354 }
355 
356 void carl::write_PETSC_matrix_MATLAB( Mat input_mat, const std::string& filename, MPI_Comm comm)
357 {
358  PetscViewer viewer;
359  PetscViewerCreate(comm,&viewer);
360  PetscViewerASCIIOpen(comm,filename.c_str(),&viewer);
361  PetscViewerPushFormat (viewer,PETSC_VIEWER_ASCII_MATLAB);
362  MatView(input_mat,viewer);
363 
364  PetscViewerDestroy(&viewer);
365 }
366 
367 void carl::read_PETSC_vector( Vec input_vec,
368  const std::string& filename, MPI_Comm comm)
369 {
370  PetscViewer viewer;
371  PetscViewerBinaryOpen(comm,filename.c_str(),FILE_MODE_READ,&viewer);
372  VecLoad(input_vec,viewer);
373 
374  PetscViewerDestroy(&viewer);
375 }
376 
377 void carl::read_PETSC_vector( libMesh::PetscVector<libMesh::Number>& input_vec,
378  const std::string& filename)
379 {
380  read_PETSC_vector(input_vec.vec(),filename,input_vec.comm().get());
381 };
382 
383 void carl::attach_rigid_body_mode_vectors(libMesh::PetscMatrix<libMesh::Number>& mat_sys,
384  const std::string& filename_base, int nb_of_vecs, int dimension )
385 {
386  // Read the dummy vector
387  Vec rb_vecs[6];
388 
389  PetscInt local_N;
390  MatGetLocalSize(mat_sys.mat(),NULL,&local_N);
391 
392  std::string vector_path;
393 
394  // Read the vectors
395  for(int iii = 0; iii < nb_of_vecs; ++iii)
396  {
397  VecCreate(mat_sys.comm().get(),&rb_vecs[iii]);
398  VecSetSizes(rb_vecs[iii],local_N,mat_sys.n());
399 
400  vector_path = filename_base + "_" + std::to_string(iii) + "_n_" + std::to_string(nb_of_vecs) + ".petscvec";
401  read_PETSC_vector(rb_vecs[iii],vector_path,mat_sys.comm().get());
402  }
403 
404  MatNullSpace nullsp_sys;
405  MatNullSpaceCreate(mat_sys.comm().get(), PETSC_FALSE, nb_of_vecs, rb_vecs, &nullsp_sys);
406  MatSetNullSpace(mat_sys.mat(),nullsp_sys);
407 
408  MatNullSpaceDestroy(&nullsp_sys);
409  for(int iii = 0; iii < nb_of_vecs; ++iii)
410  {
411  VecDestroy(&rb_vecs[iii]);
412  }
413 };
414 
415 void carl::write_PETSC_matrix( Mat input_mat,
416  const std::string& filename, int rank, MPI_Comm comm, int dim)
417 {
418  PetscViewer viewer;
419  PetscViewerBinaryOpen(comm,filename.c_str(),FILE_MODE_WRITE,&viewer);
420  MatView(input_mat,viewer);
421 
422  PetscViewerDestroy(&viewer);
423 
424  // Hack to guarantee the proper matrix division without forcing libMesh's --enable-blocked-storage
425  if(rank == 0)
426  {
427  std::ofstream mat_info(filename + ".info");
428  mat_info << "-matload_block_size " << std::to_string(dim) << std::endl;
429  mat_info.close();
430  }
431 }
432 
433 void carl::write_PETSC_matrix(libMesh::PetscMatrix<libMesh::Number>& input_mat,
434  const std::string& filename, int dim)
435 {
436  write_PETSC_matrix(input_mat.mat(),filename,input_mat.comm().rank(),input_mat.comm().get(),dim);
437 }
438 
439 void carl::read_PETSC_matrix( Mat input_mat,
440  const std::string& filename, MPI_Comm comm)
441 {
442 
443  PetscViewer viewer;
444  PetscViewerBinaryOpen(comm,filename.c_str(),FILE_MODE_READ,&viewer);
445  MatLoad(input_mat,viewer);
446 
447  PetscViewerDestroy(&viewer);
448 }
449 
450 void carl::read_PETSC_matrix(libMesh::PetscMatrix<libMesh::Number>& input_mat,
451  const std::string& filename)
452 {
453  read_PETSC_matrix(input_mat.mat(),filename,input_mat.comm().get());
454 }
455 
456 void carl::create_PETSC_dense_matrix_from_vectors(const Vec *vecs_in, int nb_vecs, Mat& matrix_out)
457 {
458  // Get the vectors' dimensions and create the matrix
459  PetscInt M_local, M;
460  VecGetLocalSize(vecs_in[0],&M_local);
461  VecGetSize(vecs_in[0],&M);
462 
463  MatCreateDense(PETSC_COMM_WORLD,M_local,PETSC_DECIDE,M,nb_vecs,NULL,&matrix_out);
464 
465  // Get the ownership ranges and the row indexes
466  PetscInt own_low, own_high;
467  VecGetOwnershipRange(vecs_in[0],&own_low,&own_high);
468 
469  std::vector<PetscInt> row(M_local,0);
470 
471  for(int iii = 0; iii < M_local; ++iii)
472  {
473  row[iii] = own_low + iii;
474  }
475 
476  // Set the values
477  PetscScalar* data;
478  for(int iii = 0; iii < nb_vecs; ++iii)
479  {
480  VecGetArray(vecs_in[iii],&data);
481  MatSetValues(matrix_out,M_local,row.data(),1,&iii,data,INSERT_VALUES);
482  VecRestoreArray(vecs_in[iii],&data);
483  }
484 
485  // Assembly
486  MatAssemblyBegin(matrix_out,MAT_FINAL_ASSEMBLY);
487  MatAssemblyEnd(matrix_out,MAT_FINAL_ASSEMBLY);
488 }
489 
490 void carl::PETSC_invert_dense_matrix(Mat& matrix_in, Mat& matrix_out)
491 {
492  // WARNING: this operation is extremely expensive for large systems !!!
493  // Only use this for small matrices (example: the 6x6 or 3x3
494  // or 3x3 matrices from the null space projectors,
495 
496  Mat Id_mat;
497  MatFactorInfo factor_info;
498  IS rperm, cperm;
499 
500  // Duplicate matrix data structures
501  MatDuplicate(matrix_in,MAT_DO_NOT_COPY_VALUES,&matrix_out);
502  MatDuplicate(matrix_in,MAT_DO_NOT_COPY_VALUES,&Id_mat);
503 
504  // Create identity
505  MatZeroEntries(Id_mat);
506  MatShift(Id_mat,1);
507 
508  // Factor input matrix
509  MatGetOrdering(matrix_in,MATORDERINGNATURAL, &rperm, &cperm);
510  MatFactorInfoInitialize(&factor_info);
511  MatLUFactor(matrix_in,rperm,cperm,&factor_info);
512 
513  // Calculate inverse
514  MatMatSolve(matrix_in,Id_mat,matrix_out);
515 
516  // Reset input's factoring
517  MatSetUnfactored(matrix_in);
518 
519  // Cleanup
520  MatDestroy(&Id_mat);
521 }
522 
523 void carl::PETSC_MatMultScale_Bcast(Mat mat_seq, Vec vec_seq_in, Vec vec_seq_out, double a_const)
524 {
525  // Calculate the product vec_seq_out = a_const * (mat_seq * vec_seq_in)
526  // on the first processor, and then sync the result
527 
528  // First, check if the matrices and vectors are sequential
529  MatType mat_type_query;
530  VecType vec_in_type_query;
531  VecType vec_out_type_query;
532 
533  MatGetType(mat_seq,&mat_type_query);
534  VecGetType(vec_seq_in,&vec_in_type_query);
535  VecGetType(vec_seq_out,&vec_out_type_query);
536 
537  homemade_assert_msg(std::strcmp(mat_type_query,MATSEQDENSE) == 0 ,"Matrix is not dense and sequential");
538  homemade_assert_msg(std::strcmp(vec_in_type_query,VECSEQ) == 0 ,"Input vector is not sequential");
539  homemade_assert_msg(std::strcmp(vec_out_type_query,VECSEQ) == 0 ,"Output vector is not sequential");
540 
541  // Get the vector dimension
542  PetscInt vec_dim = 0;
543  VecGetSize(vec_seq_in,&vec_dim);
544 
545  // Get the ranks
546  int rank;
547  MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
548 
549  // Set the dummy pointer and vector
550  PetscScalar * dummy_seq_array;
551 
552  // Do the product in the first processor
553  if(rank == 0)
554  {
555  MatMult(mat_seq,vec_seq_in,vec_seq_out);
556  VecScale(vec_seq_out,a_const);
557  }
558 
559  // Sync
560  VecGetArray(vec_seq_out,&dummy_seq_array);
561  MPI_Bcast(dummy_seq_array, vec_dim, MPIU_SCALAR, 0, PETSC_COMM_WORLD);
562  MPI_Barrier(PETSC_COMM_WORLD);
563  VecRestoreArray(vec_seq_out,&dummy_seq_array);
564 }
void check_coupling_matrix(libMesh::PetscMatrix< libMesh::Number > &CouplingTestMatrix, libMesh::Mesh &IntersectionMesh, libMesh::Real CouplingScale, const std::string matrixtype, int n_var=3)
void lump_matrix_and_invert(libMesh::PetscMatrix< libMesh::Number > &matrixInput, libMesh::PetscMatrix< libMesh::Number > &matrixOutput)
void print_matrix_col_line_sum(libMesh::PetscMatrix< libMesh::Number > &CouplingTestMatrix, const std::string name_base)
void write_PETSC_vector(libMesh::PetscVector< libMesh::Number > &input_vec, const std::string &filename, int dim=1)
void create_PETSC_dense_matrix_from_vectors(const Vec *vecs_in, int nb_vecs, Mat &matrix_out)
void solve_linear_PETSC(libMesh::PetscMatrix< libMesh::Number > &A, libMesh::PetscVector< libMesh::Number > &b, libMesh::PetscVector< libMesh::Number > &x, KSP &ksp, PC &pc)
void attach_rigid_body_mode_vectors(libMesh::PetscMatrix< libMesh::Number > &mat_sys, const std::string &filename_base, int nb_of_vecs, int dimension)
void lump_matrix(libMesh::PetscMatrix< libMesh::Number > &matrixInput, libMesh::PetscMatrix< libMesh::Number > &matrixOutput)
void PETSC_MatMultScale_Bcast(Mat mat_seq, Vec vec_seq_in, Vec vec_seq_out, double a_const)
void PETSC_invert_dense_matrix(Mat &matrix_in, Mat &matrix_out)
void read_PETSC_matrix(Mat input_mat, const std::string &filename, MPI_Comm comm=PETSC_COMM_WORLD)
void print_matrix_info(libMesh::PetscMatrix< libMesh::Number > &InputMatrix, std::ostream &os=libMesh::out)
void print_matrix_matlab(libMesh::PetscMatrix< libMesh::Number > &CouplingTestMatrix, const std::string name_base)
void write_PETSC_matrix_MATLAB(Mat input_mat, const std::string &filename, MPI_Comm comm=PETSC_COMM_WORLD)
#define homemade_assert_msg(asserted, msg)
Definition: common_header.h:63
void print_matrix_dim(libMesh::PetscMatrix< libMesh::Number > &CouplingTestMatrix, bool bDetailed=false)
void print_matrix(libMesh::PetscMatrix< libMesh::Number > &CouplingTestMatrix)
void read_PETSC_vector(libMesh::PetscVector< libMesh::Number > &input_vec, const std::string &filename)
void write_PETSC_vector_MATLAB(Vec input_vec, const std::string &filename, MPI_Comm comm=PETSC_COMM_WORLD)
void write_PETSC_matrix(Mat input_mat, const std::string &filename, int rank, MPI_Comm comm=PETSC_COMM_WORLD, int dim=1)