diff --git a/AMDiS/src/parallel/PetscSolverFeti.cc b/AMDiS/src/parallel/PetscSolverFeti.cc index d07d20f16a713138a831d52f6cc6b25841b1d3c7..ccae7d27d6029522a8be81e61b830d4ff23f5092 100644 --- a/AMDiS/src/parallel/PetscSolverFeti.cc +++ b/AMDiS/src/parallel/PetscSolverFeti.cc @@ -677,6 +677,18 @@ namespace AMDiS { MatAssemblyEnd(mat_lagrange, MAT_FINAL_ASSEMBLY); + int nZeroRows = PetscSolverFetiDebug::testZeroRows(mat_lagrange); + int m,n; + MatGetSize(mat_lagrange, &m ,&n); + MSG("Lagrange matrix has %d zero rows and global size of %d %d!\n", nZeroRows, m, n); + + PetscViewer petscView; + PetscViewerBinaryOpen(PETSC_COMM_WORLD, "lagrange.mat", + FILE_MODE_WRITE, &petscView); + MatView(mat_lagrange, petscView); + PetscViewerDestroy(&petscView); + + // === If required, create \ref mat_lagrange_scaled === VecAssemblyBegin(vec_scale_lagrange); @@ -797,6 +809,11 @@ namespace AMDiS { MatAssemblyBegin(mat_augmented_lagrange, MAT_FINAL_ASSEMBLY); MatAssemblyEnd(mat_augmented_lagrange, MAT_FINAL_ASSEMBLY); + int nZeroRows = PetscSolverFetiDebug::testZeroRows(mat_augmented_lagrange); + int m,n; + MatGetSize(mat_augmented_lagrange, &m ,&n); + MSG("Augmented lagrange matrix has %d zero rows and global size of %d %d!\n", nZeroRows, m, n); + if (printTimings) { MPI::COMM_WORLD.Barrier(); MSG("FETI-DP timing 05a: %.5f seconds (creation of augmented lagrange constraint matrix)\n", @@ -994,8 +1011,13 @@ namespace AMDiS { // matTmp = inv(A_BB) trans(J) trans(Q) Mat qT, jTqT; MatTranspose(mat_augmented_lagrange, MAT_INITIAL_MATRIX, &qT); + Mat jT; + MSG("START COMPUTING MAT TRANS\n"); + MatTranspose(mat_lagrange, MAT_INITIAL_MATRIX, &jT); + MSG("DONE\n"); MatTransposeMatMult(mat_lagrange, qT, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &jTqT); + MSG("DONE WITH THIS!\n"); petsc_helper::blockMatMatSolve(subdomain->getSolver(), jTqT, matTmp); MatDestroy(&qT); MatDestroy(&jTqT); @@ -1024,6 +1046,7 @@ namespace AMDiS { MatDestroy(&mat10); MatDestroy(&mat11); MatDestroy(&matTmp); + MSG("FINISHED!\n"); } else { Mat tmp;