Scrambler  1
communication_control.f90
Go to the documentation of this file.
00001 !#########################################################################
00002 !               
00003 !    Copyright (C) 2003-2012 Department of Physics and Astronomy,
00004 !                            University of Rochester,
00005 !                            Rochester, NY
00006 !
00007 !    communication_control.f90 is part of AstroBEAR.
00008 !
00009 !    AstroBEAR is free software: you can redistribute it and/or modify    
00010 !    it under the terms of the GNU General Public License as published by 
00011 !    the Free Software Foundation, either version 3 of the License, or    
00012 !    (at your option) any later version.
00013 !
00014 !    AstroBEAR is distributed in the hope that it will be useful, 
00015 !    but WITHOUT ANY WARRANTY; without even the implied warranty of
00016 !    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
00017 !    GNU General Public License for more details.
00018 !
00019 !    You should have received a copy of the GNU General Public License
00020 !    along with AstroBEAR.  If not, see <http://www.gnu.org/licenses/>.
00021 !
00022 !#########################################################################
00023 MODULE CommunicationControl
00024    USE GlobalDeclarations
00025    USE CommunicationDeclarations
00026    USE MpiPacking
00027    USE MessageDeclarations
00028 IMPLICIT NONE
00029 
00030    PUBLIC InitMessageGroups, CommInit, MPI_CHECK_MSG_GROUPS
00031 PRIVATE
00032 
00033 CONTAINS
00034 
00036    SUBROUTINE CommInit()
00037 
00038 
00039       INTEGER :: ierr
00040       INTEGER :: n
00041       CHARACTER(LEN=14) :: FILENAME
00042 
00043       !Do MPI Stuff
00044       CALL PrintProcName()
00045 !      write(FILENAME,'(A5,I4.4,A4)') "comm_",mpi_id,".log"
00046 !      OPEN (UNIT=COMM_LOG_HANDLE, file=FILENAME, status="unknown")
00047 
00048 !      packet_buffers = 4096
00049 
00050 !      packet_buffers(TRANSMIT_CHILD_GRIDS / 1000) = 80
00051 !      packet_buffers(TRANSMIT_OVERLAPS_NEIGHBORS / 1000) = 40
00052 !      packet_buffers(TRANSMIT_OLD_NODE_OVERLAPS / 1000) = 80
00053 !      packet_buffers(TRANSMIT_NEIGHBORING_CHILDREN / 1000) = 80
00054 !      packet_buffers(TRANSMIT_OLC_TO_OLD_NODES / 1000) = 80
00055 !      packet_buffers(TRANSMIT_OLC_TO_NEW_NODES / 1000) = 80
00056 !      packet_buffers(TRANSMIT_IO_WORKER_GRIDS / 1000) = 80
00057 
00058 
00063 
00064       ALLOCATE(stage_times(-2:MaxLevel,nStages))
00065       stage_times = 0.d0
00066 
00067       CALL InitMessageGroups
00068    END SUBROUTINE CommInit
00069 
00071    SUBROUTINE InitMessageGroups
00072       INTEGER :: i,j
00073       ALLOCATE(StageMessageGroups(nStages,-2:MaxLevel))
00074       DO i=1,nStages
00075          DO j=-2,Maxlevel
00076             NULLIFY(StageMessageGroups(i,j)%p)
00077          END DO
00078       END DO
00079 
00080    END SUBROUTINE InitMessageGroups
00081 
00082 
00083    SUBROUTINE MPI_CHECK_MSG_GROUPS(iRecvMessage, iSendMessage, nRecv, nSend)
00084       INTEGER :: iRecvMessage, iSendMessage, nRecv, nSend, iErr
00085       TYPE(StageMessageGroup), POINTER :: RecvMessageGroup, SendMessageGroup
00086       INTEGER, DIMENSION(:,:), ALLOCATABLE :: SendArray, RecvArray
00087       INTEGER, DIMENSION(:), POINTER :: RecvProcArray, SendProcArray
00088       INTEGER :: i, j
00089       TYPE(PackedMessage), POINTER :: message
00090       RecvMessageGroup=>StageMessageGroups(iRecvMessage, nRecv)%p
00091       SendMessageGroup=>StageMessageGroups(iSendMessage, nSend)%p
00092       ALLOCATE(SendArray(0:MPI_NP-1, 0:MPI_NP-1))
00093       ALLOCATE(RecvArray(0:MPI_NP-1, 0:MPI_NP-1))
00094       CALL GetProcListAsArray(RecvMessageGroup, RecvProcArray)
00095       CALL GetProcListAsArray(SendMessageGroup, SendProcArray)
00096       SendArray=0
00097       RecvArray=0
00098 !      IF (ASSOCIATED(RecvProcArray)) RecvArray(RecvProcArray, MPI_ID)=1
00099 !      IF (ASSOCIATED(SendProcArray)) SendArray(MPI_ID, SendProcArray)=1
00100 
00101       IF (ASSOCIATED(RecvProcArray)) THEN
00102          DO i=1, size(RecvProcArray)
00103             CALL ExtractMessageFromGroup(RecvMessageGroup, RecvProcArray(i), message)
00104             RecvArray(RecvProcArray(i), MPI_ID) = message%buffer_size
00105          END DO
00106       END IF
00107       IF (ASSOCIATED(SendProcArray)) THEN
00108          DO i=1, size(SendProcArray)
00109             CALL ExtractMessageFromGroup(SendMessageGroup, SendProcArray(i), message)
00110             SendArray(MPI_ID, SendProcArray(i)) = message%buffer_size
00111          END DO
00112       END IF
00113 
00114       CALL MPI_ALLREDUCE(MPI_IN_PLACE, SendArray, MPI_NP*MPI_NP, MPI_INTEGER, MPI_SUM, levels(nRecv)%MPI_COMM, iErr)
00115       CALL MPI_ALLREDUCE(MPI_IN_PLACE, RecvArray, MPI_NP*MPI_NP, MPI_INTEGER, MPI_SUM, levels(nSend)%MPI_COMM, iErr)
00116 
00117       IF (.NOT. ALL(SendArray==RecvArray)) THEN
00118 
00119          write(*,'(8I8)') SendArray
00120          write(*,*) '------------------'
00121          write(*,'(8I8)') RecvArray
00122          write(*,*) '------------------'
00123          DO i=0, MPI_NP-1
00124             DO j=0, MPI_NP-1
00125                IF (SendArray(i,j) > RecvArray(i,j)) THEN
00126                   write(*,*) 'processor ', i , 'only sending to ', j
00127                ELSE IF (SendArray(i,j) < RecvArray(i,j)) THEN
00128                   write(*,*) 'processor ', j , 'only receiving from ', i
00129                END IF
00130             END DO
00131             IF (SendArray(i,i) /= 0) THEN
00132                write(*,*) 'sending to myself'
00133                STOP
00134             END IF
00135          END DO
00136 !         IF (.NOT. ALL(SendArray==RecvArray)) THEN
00137 1        write(*,*) iRecvMessage, iSendMessage, nRecv, nSend, levels(nRecv)%CurrentLevelStep
00138          STOP
00139       END IF
00140    END SUBROUTINE MPI_CHECK_MSG_GROUPS
00141 
00142 
00144    SUBROUTINE PrintProcName()
00145 
00146        INTEGER :: ierr
00147        INTEGER :: resultlength
00148        CHARACTER(LEN=MPI_MAX_PROCESSOR_NAME) :: name_string
00149 
00150        CALL MPI_GET_PROCESSOR_NAME(name_string,resultlength,ierr)
00151 
00152 !       PRINT *, "Proc ", MPI_id, " name = ", TRIM(name_string)
00153 
00154    END SUBROUTINE PrintProcName
00155 
00156 
00157 !   SUBROUTINE CommunicationStats()
00158 !
00159 !       REAL(KIND=qPrec), DIMENSION(-2:MaxLevel) :: level_slice
00160 !       REAL(KIND=qPrec) :: input, buffer
00161 !       INTEGER :: ierr
00162 !       
00163 !       DO stage = 1, nStages
00164 !            DO n = -2, MaxLevel
00165 !                 input = stage_time(n,stage)
00166 !                 CALL MPI_REDUCE(input, buffer, 1, MPI_DOUBLE_PRECISION, MPI_SUM, Master, MPI_COMM_WORLD, ierr)
00167 !                 level_slice(n) = buffer
00168 !            END DO
00169 !
00170 !            PRINT "('Level averages
00171 !       END DO
00172 !   END SUBROUTINE CommunicationStats
00173 
00174 
00175 
00176    
00177 END MODULE CommunicationControl
 All Classes Files Functions Variables