1 
2 #include <petsc/private/petscimpl.h>              /*I "petscsys.h" I*/
3 
4 /* Logging support */
5 PetscLogEvent PETSC_Barrier;
6 
hash(const char * str)7 static int hash(const char *str)
8 {
9   unsigned int c,hash = 5381;
10 
11   while ((c = *str++)) hash = ((hash << 5) + hash) + c; /* hash * 33 + c */
12   return hash;
13 }
14 
15 /*
16    This is used by MPIU_Allreduce() to insure that all callers originated from the same place in the PETSc code
17 */
PetscAllreduceBarrierCheck(MPI_Comm comm,PetscMPIInt ctn,int line,const char * func,const char * file)18 PetscErrorCode PetscAllreduceBarrierCheck(MPI_Comm comm,PetscMPIInt ctn,int line,const char *func,const char *file)
19 {
20   PetscMPIInt err;
21   PetscMPIInt b1[6],b2[6];
22 
23   b1[0] = -(PetscMPIInt)line;       b1[1] = -b1[0];
24   b1[2] = -(PetscMPIInt)hash(func); b1[3] = -b1[2];
25   b1[4] = -(PetscMPIInt)ctn;        b1[5] = -b1[4];
26   err = MPI_Allreduce(b1,b2,6,MPI_INT,MPI_MAX,comm);
27   if (err) return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_LIB,PETSC_ERROR_INITIAL,"MPI_Allreduce() failed with error code %d",err);
28   if (-b2[0] != b2[1]) return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduce() called in different locations (code lines) on different processors");
29   if (-b2[2] != b2[3]) return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduce() called in different locations (functions) on different processors");
30   if (-b2[4] != b2[5]) return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduce() called with different counts %d on different processors",ctn);
31   return 0;
32 }
33 
34 /*@C
35     PetscBarrier - Blocks until this routine is executed by all
36                    processors owning the object obj.
37 
38    Input Parameters:
39 .  obj - PETSc object  (Mat, Vec, IS, SNES etc...)
40         The object be caste with a (PetscObject). NULL can be used to indicate the barrier should be across MPI_COMM_WORLD
41 
42   Level: intermediate
43 
44   Notes:
45   This routine calls MPI_Barrier with the communicator of the PETSc Object obj
46 
47   Fortran Usage:
48     You may pass PETSC_NULL_VEC or any other PETSc null object, such as PETSC_NULL_MAT, to indicate the barrier should be
49     across MPI_COMM_WORLD. You can also pass in any PETSc object, Vec, Mat, etc
50 
51 @*/
PetscBarrier(PetscObject obj)52 PetscErrorCode  PetscBarrier(PetscObject obj)
53 {
54   PetscErrorCode ierr;
55   MPI_Comm       comm;
56 
57   PetscFunctionBegin;
58   if (obj) PetscValidHeader(obj,1);
59   ierr = PetscLogEventBegin(PETSC_Barrier,obj,0,0,0);CHKERRQ(ierr);
60   if (obj) {
61     ierr = PetscObjectGetComm(obj,&comm);CHKERRQ(ierr);
62   } else comm = PETSC_COMM_WORLD;
63   ierr = MPI_Barrier(comm);CHKERRQ(ierr);
64   ierr = PetscLogEventEnd(PETSC_Barrier,obj,0,0,0);CHKERRQ(ierr);
65   PetscFunctionReturn(0);
66 }
67 
68