51 if (!oomph_vec.
built())
53 std::ostringstream error_message;
54 error_message <<
"The oomph-lib vector (oomph_v) must be setup.";
56 OOMPH_CURRENT_FUNCTION,
57 OOMPH_EXCEPTION_LOCATION);
71 oomph_vec.
nrow(),
true);
80 offset = dist_pt->first_row();
86 double* v_pt =
const_cast<double*
>(oomph_vec.
values_pt());
87 Epetra_Vector* epetra_vec_pt =
88 new Epetra_Vector(Copy,*epetra_map_pt,v_pt+offset);
112 if (!dist_pt->
built())
114 std::ostringstream error_message;
115 error_message <<
"The LinearAlgebraDistribution dist_pt must be setup.";
117 OOMPH_CURRENT_FUNCTION,
118 OOMPH_EXCEPTION_LOCATION);
132 dist_pt->
nrow(),
true);
137 Epetra_Vector* epetra_vec_pt =
138 new Epetra_Vector(*epetra_map_pt,
false);
141 delete epetra_map_pt;
142 delete target_dist_pt;
145 return epetra_vec_pt;
161 if (!oomph_vec.
built())
163 std::ostringstream error_message;
164 error_message <<
"The oomph-lib vector (oomph_v) must be setup.";
166 OOMPH_CURRENT_FUNCTION,
167 OOMPH_EXCEPTION_LOCATION);
176 Epetra_Vector* epetra_vec_pt =
177 new Epetra_Vector(View,*epetra_map_pt,v_pt);
180 delete epetra_map_pt;
183 return epetra_vec_pt;
196 if (!oomph_vec.
built())
198 std::ostringstream error_message;
199 error_message <<
"The oomph-lib vector (oomph_v) must be setup.";
201 OOMPH_CURRENT_FUNCTION,
202 OOMPH_EXCEPTION_LOCATION);
211 epetra_vec_pt->ExtractView(&v_values);
215 for (
unsigned i = 0;
i < nrow_local;
i++)
217 oomph_vec[
i] = v_values[
i];
226 int nproc = epetra_vec_pt->Map().Comm().NumProc();
229 epetra_vec_pt->ExtractCopy(oomph_vec.
values_pt());
234 double* local_values;
235 epetra_vec_pt->ExtractView(&local_values);
238 int my_rank =epetra_vec_pt->Map().Comm().MyPID();
242 nrow_local[my_rank] = epetra_vec_pt->MyLength();
245 int my_nrow_local_copy = nrow_local[my_rank];
246 MPI_Allgather(&my_nrow_local_copy,1,MPI_INT,&nrow_local[0],1,MPI_INT,
251 first_row[my_rank] = epetra_vec_pt->Map().MyGlobalElements()[0];
254 int my_first_row = first_row[my_rank];
255 MPI_Allgather(&my_first_row,1,MPI_INT,&first_row[0],1,MPI_INT,
259 MPI_Allgatherv(local_values,nrow_local[my_rank],MPI_DOUBLE,
261 &nrow_local[0],&first_row[0],MPI_DOUBLE,
265 epetra_vec_pt->ExtractCopy(oomph_vec.
values_pt());
288 if (!oomph_matrix_pt->
built())
290 std::ostringstream error_message;
291 error_message <<
"The oomph-lib matrix must be built.";
293 (error_message.str(),
294 OOMPH_CURRENT_FUNCTION,
295 OOMPH_EXCEPTION_LOCATION);
297 if (!oomph_matrix_pt->
built())
299 std::ostringstream error_message;
300 error_message <<
"The oomph-lib matrix must be built.";
302 (error_message.str(),
303 OOMPH_CURRENT_FUNCTION,
304 OOMPH_EXCEPTION_LOCATION);
306 if (!oomph_matrix_pt->
built())
308 std::ostringstream error_message;
309 error_message <<
"The oomph-lib matrix must be built.";
311 (error_message.str(),
312 OOMPH_CURRENT_FUNCTION,
313 OOMPH_EXCEPTION_LOCATION);
320 int* column =
const_cast<int*
>(oomph_matrix_pt->
column_index());
321 double* value =
const_cast<double*
>(oomph_matrix_pt->
value());
322 int* row_start =
const_cast<int*
>(oomph_matrix_pt->
row_start());
336 oomph_matrix_pt->
nrow(),
true);
349 unsigned nrow_local = target_dist_pt->
nrow_local();
350 unsigned first_row = target_dist_pt->
first_row();
353 int* nnz_per_row =
new int[nrow_local];
354 for (
unsigned row=0;row<nrow_local;row++)
356 nnz_per_row[row] = row_start[row+offset+1] - row_start[offset+row];
360 Epetra_CrsMatrix* epetra_matrix_pt =
361 new Epetra_CrsMatrix(Copy,*epetra_map_pt,
365 for (
unsigned row=0; row<nrow_local; row++)
368 int ptr = row_start[row+offset];
373 epetra_matrix_pt->InsertGlobalValues(first_row+row,
380 std::ostringstream error_message;
382 <<
"Epetra Matrix Insert Global Values : epetra_error_flag = " 385 (error_message.str(),
386 OOMPH_CURRENT_FUNCTION,
387 OOMPH_EXCEPTION_LOCATION);
408 epetra_matrix_pt->FillComplete(*epetra_domain_map_pt,
413 std::ostringstream error_message;
415 <<
"Epetra Matrix Fill Complete Error : epetra_error_flag = " 418 (error_message.str(),
419 OOMPH_CURRENT_FUNCTION,
420 OOMPH_EXCEPTION_LOCATION);
425 delete[] nnz_per_row;
426 delete epetra_map_pt;
427 delete epetra_domain_map_pt;
428 delete target_dist_pt;
429 delete target_col_dist_pt;
432 return epetra_matrix_pt;
489 if (!oomph_matrix_pt->
built())
491 std::ostringstream error_message;
492 error_message <<
"The oomph-lib matrix must be built.";
494 (error_message.str(),
495 OOMPH_CURRENT_FUNCTION,
496 OOMPH_EXCEPTION_LOCATION);
503 int* column =
const_cast<int*
>(oomph_matrix_pt->
column_index());
504 double* value =
const_cast<double*
>(oomph_matrix_pt->
value());
505 int* row_start =
const_cast<int*
>(oomph_matrix_pt->
row_start());
519 oomph_matrix_pt->
nrow(),
true);
526 int first_col = oomph_matrix_pt->
first_row();
527 int ncol_local = oomph_matrix_pt->
nrow_local();
530 Epetra_Map* epetra_col_map_pt = 0;
534 std::vector<int> col_index_vector;
535 col_index_vector.reserve(oomph_matrix_pt->
nnz()+ncol_local);
536 col_index_vector.resize(ncol_local);
539 for (
int c = 0; c < ncol_local; ++c)
541 col_index_vector[c] = c+first_col;
545 std::vector<int>::iterator mid = col_index_vector.end();
548 col_index_vector.insert(mid,column,column+oomph_matrix_pt->
nnz());
552 std::vector<int>::iterator end =
553 std::remove_if(mid,col_index_vector.end(),
560 end = std::unique(mid,end);
564 new Epetra_Map(-1,end - col_index_vector.begin(),
565 &col_index_vector[0],0,
566 Epetra_MpiComm(oomph_matrix_pt->
568 communicator_pt()->mpi_comm()));
571 std::vector<int>().swap(col_index_vector);
577 int ncol = oomph_matrix_pt->
ncol();
578 Epetra_Map* epetra_col_map_pt =
579 new Epetra_LocalMap(ncol,0,Epetra_SerialComm());
592 unsigned nrow_local = target_dist_pt->
nrow_local();
593 unsigned first_row = target_dist_pt->
first_row();
596 int* nnz_per_row =
new int[nrow_local];
597 for (
unsigned row=0;row<nrow_local;++row)
599 nnz_per_row[row] = row_start[row+offset+1] - row_start[offset+row];
603 Epetra_CrsMatrix* epetra_matrix_pt =
604 new Epetra_CrsMatrix(Copy,*epetra_map_pt,
609 for (
unsigned row=0; row<nrow_local; row++)
612 int ptr = row_start[row+offset];
617 epetra_matrix_pt->InsertGlobalValues(first_row+row,
624 std::ostringstream error_message;
626 <<
"Epetra Matrix Insert Global Values : epetra_error_flag = " 629 (error_message.str(),
630 OOMPH_CURRENT_FUNCTION,
631 OOMPH_EXCEPTION_LOCATION);
641 epetra_matrix_pt->FillComplete();
646 std::ostringstream error_message;
648 <<
"Epetra Matrix Fill Complete Error : epetra_error_flag = " 651 (error_message.str(),
652 OOMPH_CURRENT_FUNCTION,
653 OOMPH_EXCEPTION_LOCATION);
658 delete[] nnz_per_row;
659 delete epetra_map_pt;
660 delete epetra_col_map_pt;
661 delete target_dist_pt;
664 return epetra_matrix_pt;
685 if (!oomph_matrix_pt->
built())
687 std::ostringstream error_message_stream;
689 <<
"This matrix has not been built";
691 OOMPH_CURRENT_FUNCTION,
692 OOMPH_EXCEPTION_LOCATION);
700 std::ostringstream error_message_stream;
702 <<
"The soln vector and this matrix must have the same distribution.";
704 OOMPH_CURRENT_FUNCTION,
705 OOMPH_EXCEPTION_LOCATION);
710 if (!oomph_x.
built())
712 std::ostringstream error_message_stream;
714 <<
"The x vector must be setup";
716 OOMPH_CURRENT_FUNCTION,
717 OOMPH_EXCEPTION_LOCATION);
728 Epetra_CrsMatrix* epetra_matrix_pt =
732 Epetra_Vector* epetra_x_pt =
737 Epetra_Vector* epetra_y_pt =
742 int epetra_error_flag = 0;
745 epetra_matrix_pt->Multiply(
false,*epetra_x_pt,
753 if (epetra_error_flag != 0)
755 std::ostringstream error_message;
757 <<
"Epetra Matrix Vector Multiply Error : epetra_error_flag = " 758 << epetra_error_flag;
760 OOMPH_CURRENT_FUNCTION,
761 OOMPH_EXCEPTION_LOCATION);
766 delete epetra_matrix_pt;
788 if (!matrix_1.
built())
790 std::ostringstream error_message_stream;
792 <<
"This matrix matrix_1 has not been built";
794 OOMPH_CURRENT_FUNCTION,
795 OOMPH_EXCEPTION_LOCATION);
798 if (!matrix_2.
built())
800 std::ostringstream error_message_stream;
802 <<
"This matrix matrix_2 has not been built";
804 OOMPH_CURRENT_FUNCTION,
805 OOMPH_EXCEPTION_LOCATION);
808 if ( matrix_1.
ncol() != matrix_2.
nrow() )
810 std::ostringstream error_message;
812 <<
"Matrix dimensions incompatible for matrix-matrix multiplication" 813 <<
"ncol() for first matrix: " << matrix_1.
ncol()
814 <<
"nrow() for second matrix: " << matrix_2.
nrow();
816 OOMPH_CURRENT_FUNCTION,
817 OOMPH_EXCEPTION_LOCATION);
824 std::ostringstream error_message;
826 <<
"Matrix dimensions incompatible for matrix-matrix multiplication" 827 <<
"ncol() for first matrix: " << matrix_1.
ncol()
828 <<
"nrow() for second matrix: " << matrix_2.
nrow();
830 OOMPH_CURRENT_FUNCTION,
831 OOMPH_EXCEPTION_LOCATION);
838 std::ostringstream error_message_stream;
840 <<
"The solution matrix and matrix_1 must have the same distribution.";
842 OOMPH_CURRENT_FUNCTION,
843 OOMPH_EXCEPTION_LOCATION);
857 bool temp_use_ml =
false;
859 (matrix_1.
ncol() == matrix_2.
ncol()))
861 temp_use_ml = use_ml;
865 Epetra_CrsMatrix* epetra_matrix_1_pt =
871 Epetra_CrsMatrix* epetra_matrix_2_pt =
876 Epetra_CrsMatrix* solution_pt;
892 solution_pt = Epetra_MatrixMult(epetra_matrix_1_pt, epetra_matrix_2_pt);
898 solution_pt =
new Epetra_CrsMatrix(Copy,epetra_matrix_1_pt->RowMap(),
901 int epetra_error_flag = 0;
904 EpetraExt::MatrixMatrix::Multiply(*epetra_matrix_1_pt,
910 if (epetra_error_flag != 0)
912 std::ostringstream error_message;
913 error_message <<
"error flag from Multiply(): " 915 <<
" from TrilinosHelpers::multiply" 918 OOMPH_CURRENT_FUNCTION,
919 OOMPH_EXCEPTION_LOCATION);
928 int nnz_local = solution_pt->NumMyNonzeros();
934 if ( (
int)matrix_1.
nrow() != solution_pt->NumGlobalRows() )
936 std::ostringstream error_message;
938 <<
"Incorrect number of global rows in solution matrix. " 939 <<
"nrow() for first input matrix: " << matrix_1.
nrow()
940 <<
" nrow() for solution: " << solution_pt->NumGlobalRows();
942 OOMPH_CURRENT_FUNCTION,
943 OOMPH_EXCEPTION_LOCATION);
947 if ( static_cast<int>(matrix_1.
nrow_local()) != solution_pt->NumMyRows() )
949 std::ostringstream error_message;
951 <<
"Incorrect number of local rows in solution matrix. " 952 <<
"nrow_local() for first input matrix: " << matrix_1.
nrow_local()
953 <<
" nrow_local() for solution: " << solution_pt->NumMyRows();
955 OOMPH_CURRENT_FUNCTION,
956 OOMPH_EXCEPTION_LOCATION);
960 if ( (
int)matrix_2.
ncol() != solution_pt->NumGlobalCols() )
962 std::ostringstream error_message;
964 <<
"Incorrect number of global columns in solution matrix. " 965 <<
"ncol() for second input matrix: " << matrix_2.
ncol()
966 <<
" ncol() for solution: " << solution_pt->NumGlobalCols();
968 OOMPH_CURRENT_FUNCTION,
969 OOMPH_EXCEPTION_LOCATION);
973 if ( static_cast<int>(matrix_1.
first_row()) != solution_pt->GRID(0) )
975 std::ostringstream error_message;
977 <<
"Incorrect global index for first row of solution matrix. " 978 <<
"first_row() for first input matrix : " << matrix_1.
first_row()
979 <<
" first_row() for solution: " << solution_pt->GRID(0);
981 OOMPH_CURRENT_FUNCTION,
982 OOMPH_EXCEPTION_LOCATION);
987 double* value =
new double[nnz_local];
988 int* column_index =
new int[nnz_local];
989 int* row_start =
new int[nrow_local+1];
994 for (
int row=first; row<last; row++)
996 row_start[row-first] = ptr;
997 solution_pt->ExtractGlobalRowCopy(row,nnz_local,num_entries,
998 value+ptr,column_index+ptr);
1001 row_start[nrow_local] = ptr;
1004 delete epetra_matrix_1_pt;
1005 delete epetra_matrix_2_pt;
1027 #ifdef OOMPH_HAS_MPI 1030 unsigned first_row = dist_pt->
first_row();
1032 int* my_global_rows =
new int[nrow_local];
1033 for (
unsigned i = 0;
i < nrow_local; ++
i)
1035 my_global_rows[
i] = first_row+
i;
1037 Epetra_Map* epetra_map_pt =
1038 new Epetra_Map(dist_pt->
nrow(),nrow_local,
1041 delete[] my_global_rows;
1042 return epetra_map_pt;
1046 return new Epetra_LocalMap(
int(dist_pt->
nrow()),
int(0),
1051 return new Epetra_LocalMap(
int(dist_pt->
nrow()),
int(0),Epetra_SerialComm());
int * column_index()
Access to C-style column index array.
unsigned long nnz() const
Return the number of nonzero entries (the local nnz)
DistributionPredicate(const int &first_col, const int &ncol_local)
Constructor: Pass number of first column and the number of local columns.
Epetra_CrsMatrix * create_distributed_epetra_matrix_for_aztecoo(CRDoubleMatrix *oomph_matrix_pt)
create and Epetra_CrsMatrix from an oomph-lib CRDoubleMatrix. Specialisation for Trilinos AztecOO...
bool built() const
access function to the Built flag - indicates whether the matrix has been build - i...
double * values_pt()
access function to the underlying values
OomphCommunicator * communicator_pt() const
const access to the communicator pointer
void multiply(const CRDoubleMatrix *matrix, const DoubleVector &x, DoubleVector &soln)
Function to perform a matrix-vector multiplication on a oomph-lib matrix and vector using Trilinos fu...
unsigned nrow() const
access function to the number of global rows.
unsigned first_row() const
access function for the first row on this processor. If not distributed then this is just zero...
bool distributed() const
access function to the distributed - indicates whether the distribution is serial or distributed ...
double * value()
Access to C-style value array.
Epetra_CrsMatrix * create_distributed_epetra_matrix(const CRDoubleMatrix *oomph_matrix_pt, const LinearAlgebraDistribution *dist_pt)
create an Epetra_CrsMatrix from an oomph-lib CRDoubleMatrix. If oomph_matrix_pt is NOT distributed (i...
Epetra_Vector * create_epetra_vector_view_data(DoubleVector &oomph_vec)
create an Epetra_Vector equivalent of DoubleVector The argument DoubleVector must be built...
bool distributed() const
distribution is serial or distributed
Describes the distribution of a distributable linear algebra type object. Typically this is a contain...
void build(const DoubleVector &old_vector)
Just copys the argument DoubleVector.
unsigned long ncol() const
Return the number of columns of the matrix.
int Last_col
Last colum held locally.
unsigned nrow_local() const
access function for the num of local rows on this processor. If no MPI then Nrow is returned...
unsigned first_row() const
access function for the first row on this processor
Class to allow sorting of column indices in conversion to epetra matrix.
Epetra_Map * create_epetra_map(const LinearAlgebraDistribution *const dist)
create an Epetra_Map corresponding to the LinearAlgebraDistribution
unsigned nrow() const
access function to the number of global rows.
Epetra_Vector * create_distributed_epetra_vector(const DoubleVector &oomph_vec)
create an Epetra_Vector from an oomph-lib DoubleVector. If oomph_vec is NOT distributed (i...
int First_col
First column held locally.
bool operator()(const int &col)
Comparison operator: is column col in the range between (including) First_col and Last_col...
unsigned nrow_local() const
access function for the num of local rows on this processor.
A vector in the mathematical sense, initially developed for linear algebra type applications. If MPI then this vector can be distributed - its distribution is described by the LinearAlgebraDistribution object at Distribution_pt. Data is stored in a C-style pointer vector (double*)
unsigned long nrow() const
Return the number of rows of the matrix.
int * row_start()
Access to C-style row_start array.
void copy_to_oomphlib_vector(const Epetra_Vector *epetra_vec_pt, DoubleVector &oomph_vec)
Helper function to copy the contents of a Trilinos vector to an oomph-lib distributed vector...
A class for compressed row matrices. This is a distributable object.
LinearAlgebraDistribution * distribution_pt() const
access to the LinearAlgebraDistribution
void build(const LinearAlgebraDistribution *distribution_pt, const unsigned &ncol, const Vector< double > &value, const Vector< int > &column_index, const Vector< int > &row_start)
build method: vector of values, vector of column indices, vector of row starts and number of rows and...
void build_without_copy(const unsigned &ncol, const unsigned &nnz, double *value, int *column_index, int *row_start)
keeps the existing distribution and just matrix that is stored without copying the matrix data ...
An oomph-lib wrapper to the MPI_Comm communicator object. Just contains an MPI_Comm object (which is ...