Scrambler
1
|
00001 !######################################################################### 00002 ! 00003 ! Copyright (C) 2003-2012 Department of Physics and Astronomy, 00004 ! University of Rochester, 00005 ! Rochester, NY 00006 ! 00007 ! io_parsing.f90 is part of AstroBEAR. 00008 ! 00009 ! AstroBEAR is free software: you can redistribute it and/or modify 00010 ! it under the terms of the GNU General Public License as published by 00011 ! the Free Software Foundation, either version 3 of the License, or 00012 ! (at your option) any later version. 00013 ! 00014 ! AstroBEAR is distributed in the hope that it will be useful, 00015 ! but WITHOUT ANY WARRANTY; without even the implied warranty of 00016 ! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 00017 ! GNU General Public License for more details. 00018 ! 00019 ! You should have received a copy of the GNU General Public License 00020 ! along with AstroBEAR. If not, see <http://www.gnu.org/licenses/>. 00021 ! 00022 !######################################################################### 00025 00029 00034 MODULE IOParsing 00035 00036 USE ChomboDeclarations 00037 USE MessageDeclarations 00038 USE MPIPacking 00039 USE GlobalDeclarations 00040 USE TreeDeclarations 00041 USE DataDeclarations 00042 USE DataInfoOps 00043 USE CommunicationDeclarations 00044 USE ProcessingDeclarations 00045 IMPLICIT NONE 00046 00047 PRIVATE 00048 00049 !Restart parsing 00050 PUBLIC IO_SendDataToWorkers, IO_RecvDataFromMaster 00051 00052 !Output frame parsing 00053 PUBLIC IO_ParseRemoteNode, IO_UnparseRemoteNode, IO_ParseRemoteGrid, IO_UnParseRemoteGrid, IO_ParseNodeChildren, IO_UnParseNodeChildren 00054 CONTAINS 00055 00059 SUBROUTINE IO_ParseRemoteNode(message, node) 00060 00061 TYPE(PackedMessage), POINTER :: message 00062 TYPE(Nodedef), POINTER :: node 00063 00064 CALL PackData(message, node%box%mGlobal) 00065 CALL PackData(message, node%box%MPI_ID) 00066 ! CALL PackList(message, node%proclist) 00067 ! CALL PackList(message, node%proctime) 00068 00069 END SUBROUTINE IO_ParseRemoteNode 00070 00074 SUBROUTINE IO_UnparseRemoteNode(message, node) 00075 00076 TYPE(PackedMessage), POINTER :: message 00077 TYPE(Nodedef), POINTER :: node 00078 00079 INTEGER :: proc_id 00080 INTEGER, DIMENSION(3,2) :: mGlobal 00081 TYPE(NodeBox), POINTER :: box 00082 INTEGER, POINTER, DIMENSION(:) :: proclist 00083 REAL, POINTER, DIMENSION(:) :: proctime 00084 INTEGER :: iErr 00085 00086 00087 NULLIFY(node) 00088 00089 !PRINT * 00090 !PRINT *, "*** IO_UnparseRemoteNode::starting ***" 00091 !PRINT * 00092 00093 ! Get the coordinate box from the message. This will fail on a non-coordinate, 00094 ! non-termination box (i.e., any box with a lower or upper bound of 0. 00095 IF (StrictGetNextBox(message, mGlobal, "IO_UnparseRemoteNode")) THEN 00096 00097 !PRINT "('IO_UnparseRemoteNode::mGlobal = [', 6i4, '].')", mGlobal 00098 00099 ! Retrieve the node's process ID. 00100 CALL UnpackData(message, proc_id) 00101 00102 !PRINT "('IO_UnparseRemoteNode::proc_id = ', i1, '.')", proc_id 00103 00104 ! Use the coordinates and MPI_id to construct a NodeBox. 00105 CALL CreateNodeBox(mGlobal, box, proc_id) 00106 00107 ! If there are any proclist or proctime elements, these PackList calls 00108 ! will retrieve them. 00109 ! CALL UnpackList(message, proclist) 00110 !PRINT *, "IO_UnparseRemoteNode::proclist = ", proclist 00111 00112 ! CALL UnpackList(message, proctime) 00113 00114 !PRINT *, "IO_UnparseRemoteNode::proctime = ", proctime 00115 00116 ! Allocate a new node object (we don't want to use AddNode here, since 00117 ! this is a temporary structure to hold worker data and will not be taking 00118 ! up permanent residence here). 00119 ALLOCATE(node, STAT=iErr) 00120 00121 IF (iErr /= 0) THEN 00122 PRINT *, "IO_UnparseRemoteNode() error: failure to allocate node." 00123 STOP 00124 END IF 00125 00126 ! Clear all the node pointers. 00127 CALL NullifyNodeFields(node) 00128 00129 ! Assign the appropriate attributes to the node. 00130 node%box = box 00131 ! node%proclist => proclist 00132 ! node%proctime => proctime 00133 00134 ! Nullify the pointers used to construct this node. 00135 ! DO NOT DEALLOCATE THEM, AS THIS WOULD DESTROY DATA. 00136 CALL DestroyNodeBox(box) 00137 NULLIFY(box, proclist, proctime) 00138 00139 END IF 00140 !PRINT *, "IO_UnparseRemoteNode::done." 00141 !PRINT * 00142 00143 END SUBROUTINE IO_UnparseRemoteNode 00144 00149 SUBROUTINE IO_ParseRemoteGrid(message, Info, finest_level) 00150 00151 TYPE(PackedMessage), POINTER :: message 00152 TYPE(InfoDef), POINTER :: Info 00153 INTEGER :: finest_level 00154 00155 INTEGER :: mx, my, mz 00156 00157 00158 !PRINT * 00159 !PRINT "('*** IO_ParseRemoteGrid(', i2, ', ', i2, ')::starting ***')", MPI_id, Info%level 00160 !PRINT "('*** IO_ParseRemoteGrid(', i2, ', ', i2, ')::buffer size = ', i8, ' ***')", MPI_id, Info%level, message%buffer_size 00161 !PRINT * 00162 00163 mx = Info%mX(1) 00164 my = Info%mX(2) 00165 mz = Info%mX(3) 00166 00167 !PRINT "('IO_ParseRemoteGrid::Info%mGlobal = [', 6i4, '].')", Info%mGlobal 00168 !PRINT "('IO_ParseRemoteGrid::total doubles = ', i6, '.')", PRODUCT(Info%mX) * NrVars 00169 ! Pack the grid structure's global coordinates. 00170 CALL PackData(message, Info%mGlobal) 00171 00172 ! Pack the cell-centered data. 00173 CALL PackData(message, Info%q(1:mx, 1:my, 1:mz, 1:NrVars)) 00174 IF (NrDiagnosticVars > 0) THEN 00175 CALL PackData(message, Info%diagnostics(1:mx, 1:my, 1:mz, 1:NrDiagnosticVars)) 00176 DEALLOCATE(Info%diagnostics) 00177 NULLIFY(Info%diagnostics) 00178 END IF 00179 00180 00181 !IF (ALL(Info%mGlobal == RESHAPE((/19, 19, 33, 26, 26, 38 /), (/3, 2/)))) THEN 00182 ! PRINT *, "Killing ParseRemoteGrid..." 00183 ! STOP 00184 !END IF 00185 00186 ! Only gets executed if the problem is MHD. 00187 IF (MaintainAuxArrays) THEN 00188 ! face-centered data does not take up the entire aux cube, so we pack 00189 ! the data in slices along a single dimension. 00190 CALL PackData(message, Info%aux(1:mx+1, 1:my, 1:mz, 1)) 00191 CALL PackData(message, Info%aux(1:mx, 1:my+1, 1:mz, 2)) 00192 IF (nDim == 3) CALL PackData(message, Info%aux(1:mx, 1:my, 1:mz+1, 3)) 00193 END IF 00194 00195 ! Pack costmap data if the level is not the finest level (otherwise there's 00196 ! no need for costmap data). 00197 ! IF (message%level < finest_level) CALL PackData(message, Info%costmap(1:mx, 1:my, 1:mz, 1:2))i 00198 !PRINT "('*** IO_ParseRemoteGrid(', i2, ', ', i2, ')::done ***')", MPI_id, Info%level 00199 !PRINT *, "done." 00200 00201 END SUBROUTINE IO_ParseRemoteGrid 00202 00207 SUBROUTINE IO_UnparseRemoteGrid(message, Info, finest_level) 00208 00209 TYPE(PackedMessage), POINTER :: message 00210 TYPE(InfoDef), POINTER :: Info 00211 INTEGER :: finest_level 00212 00213 INTEGER, DIMENSION(3,2) :: mGlobal 00214 INTEGER :: iErr 00215 INTEGER :: mx, my, mz 00216 INTEGER :: i,j,k 00217 00218 00219 !PRINT * 00220 !PRINT "('*** IO_UnparseRemoteGrid(', i2, ', ', i2, ') starting ***')", message%remote_proc, message%level 00221 !PRINT "('*** IO_UnparseRemoteGrid(', i2, ', ', i2, ')::buffer size = ', i8, ' ***')", message%remote_proc, message%level, message%buffer_size 00222 ! Unpack global coordinates of grid. 00223 CALL UnpackData(message, mGlobal) 00224 00225 !PRINT "('IO_UnparseRemoteGrid::mGlobal = [', 6i4, '].')", mGlobal 00226 00227 ! Calculate local dimensions from global coordinates. 00228 mx = mGlobal(1,2) - mGlobal(1,1) + 1 00229 my = mGlobal(2,2) - mGlobal(2,1) + 1 00230 mz = mGlobal(3,2) - mGlobal(3,1) + 1 00231 00232 !PRINT "('IO_UnparseRemoteGrid::[mx, my, mz] = [', 3i4, ']; product = ', i6, '.')", mx, my, mz, mx*my*mz*NrVars 00233 00234 ! Construct a node info structure from the coordinates we have obtained. 00235 ! CALL InitInfo(Info, message%level, mGlobal) 00236 ALLOCATE(Info) 00237 ALLOCATE(Info%q(1:mx, 1:my, 1:mz, 1:NrVars)) 00238 ! Unparse the cell-centered data into the new InfoDef structure. 00239 CALL UnpackData(message, Info%q) 00240 IF (NrDiagnosticVars > 0) THEN 00241 ALLOCATE(Info%diagnostics(1:mx,1:my,1:mz,1:NrDiagnosticVars)) 00242 CALL UnpackData(message, Info%diagnostics) 00243 END IF 00244 00245 ! Only runs on MHD jobs. 00246 IF (MaintainAuxArrays) THEN 00247 ! face-centered data does not take up the entire aux cube, so we unpack 00248 ! the data in slices along a single dimension, with a different MHD flux variable 00249 ! associated with each slice. 00250 IF (nDim == 2) THEN 00251 ALLOCATE(Info%aux(1:mx+1,1:my+1,1,2)) 00252 ELSE 00253 ALLOCATE(Info%aux(1:mx+1,1:my+1,1:mz+1,3)) 00254 END IF 00255 CALL UnpackData(message, Info%aux(1:mx+1, 1:my, 1:mz, 1)) 00256 CALL UnpackData(message, Info%aux(1:mx, 1:my+1, 1:mz, 2)) 00257 IF (nDim == 3) CALL UnpackData(message, Info%aux(1:mx, 1:my, 1:mz+1, 3)) 00258 END IF 00259 00260 ! Unpack costmap data to Info structure if we are not on the finest level. 00261 ! IF (message%level < finest_level) CALL UnpackData(message, Info%costmap(1:mx, 1:my, 1:mz, 1:2)) 00262 !PRINT "('*** IO_UnparseRemoteGrid(', i2, ', ', i2, ') done ***')", message%remote_proc, message%level 00263 !PRINT * 00264 00265 END SUBROUTINE IO_UnparseRemoteGrid 00266 00267 00271 SUBROUTINE IO_ParseNodeChildren(message, node) 00272 00273 TYPE(PackedMessage), POINTER :: message 00274 TYPE(Nodedef), POINTER :: node 00275 00276 TYPE(NodedefList), POINTER :: child_list 00277 00278 00279 00280 ! Pack the number of children this node has. 00281 CALL PackData(message, NodeCount(node%children)) 00282 00283 child_list => node%children 00284 00285 ! Loop over the list and pack each of the node's children. 00286 DO WHILE (ASSOCIATED(child_list)) 00287 CALL IO_ParseRemoteNode(message, child_list%self) 00288 child_list => child_list%next 00289 END DO 00290 00291 00292 END SUBROUTINE IO_ParseNodeChildren 00293 00299 SUBROUTINE IO_UnparseNodeChildren(message, child_count, node) 00300 00301 TYPE(PackedMessage), POINTER :: message 00302 INTEGER :: child_count 00303 TYPE(Nodedef), POINTER :: node 00304 00305 INTEGER :: nchildren 00306 INTEGER :: n 00307 TYPE(Nodedef), POINTER :: staging_node 00308 00309 00310 ! Retrieve the number of children packed into the message. 00311 CALL UnpackData(message, child_count) 00312 00313 NULLIFY(node%children) 00314 NULLIFY(node%lastchild) 00315 DO n = 1, child_count 00316 NULLIFY(staging_node) 00317 ! Unpack the node data to a temporary node structure. 00318 CALL IO_UnparseRemoteNode(message, staging_node) 00319 00320 ! Add the new temporary child node to the given node's child list. 00321 CALL AddNodeToList(staging_node, node%lastchild, node%children) 00322 END DO 00323 00324 END SUBROUTINE IO_UnparseNodeChildren 00325 00326 00327 00328 00329 00330 00337 SUBROUTINE IO_SendDataToWorkers(chandle, level, box, child_list, child_count) 00338 00339 TYPE(ChomboHandle), POINTER :: chandle 00340 TYPE(NodeBoxList), POINTER :: child_list 00341 INTEGER :: child_count 00342 INTEGER :: n 00343 TYPE(Nodedef), POINTER :: node 00344 TYPE(InfoDef), POINTER :: Info 00345 TYPE(NodeBox), POINTER :: box, child_box 00346 REAL(KIND=qPrec), DIMENSION(:,:,:,:), POINTER :: qdata, auxdata 00347 INTEGER :: mx, my, mz 00348 INTEGER :: i,j,k 00349 INTEGER :: effective_finest_level 00350 INTEGER :: iErr 00351 INTEGER :: level 00352 00353 effective_finest_level = MIN(chandle%finest_level, MaxLevel) 00354 00355 NULLIFY(qdata, auxdata) 00356 mx = box%mGlobal(1,2) - box%mGlobal(1,1) + 1 00357 my = box%mGlobal(2,2) - box%mGlobal(2,1) + 1 00358 mz = box%mGlobal(3,2) - box%mGlobal(3,1) + 1 00359 00360 ! Pack the bounds of the arrays to follow 00361 CALL MPI_Send(box%mGlobal, 6, MPI_INTEGER, box%MPI_ID, TRANSMIT_IO_WORKER_DATA, MPI_COMM_WORLD, iErr) 00362 00363 CALL MPI_Send(child_count, 1, MPI_INTEGER, box%MPI_ID, TRANSMIT_IO_WORKER_DATA, MPI_COMM_WORLD, iErr) 00364 ! Loop through the child box list and pack the appropriate number of children. 00365 DO i = 1, child_count 00366 child_box => child_list%self 00367 child_box%MPI_id = box%MPI_id 00368 CALL MPI_Send(child_box%mGlobal, 6, MPI_INTEGER, box%MPI_ID, TRANSMIT_IO_WORKER_DATA, MPI_COMM_WORLD, iErr) 00369 child_list => child_list%next 00370 END DO 00371 00372 IF (level >= 0) THEN 00373 ! Retrieve and pack the cell-centered data. 00374 CALL IO_GetQDataFromChomboFile(chandle, box%mGlobal, qdata) 00375 CALL MPI_Send(qdata, size(qdata), MPI_DOUBLE_PRECISION, box%MPI_ID, TRANSMIT_IO_WORKER_DATA, MPI_COMM_WORLD, iErr) 00376 DEALLOCATE(qdata) 00377 NULLIFY(qdata) 00378 00379 ! For MHD problems, retrieve and pack the face-centered data. 00380 IF (MaintainAuxArrays) THEN 00381 CALL IO_GetAuxDataFromChomboFile(chandle, box%mGlobal, auxdata) 00382 IF (nDim == 2) THEN 00383 CALL MPI_Send(auxdata, (mx+1)*(my+1)*2, MPI_DOUBLE_PRECISION, box%MPI_ID, TRANSMIT_IO_WORKER_DATA, MPI_COMM_WORLD, iErr) 00384 ELSE 00385 CALL MPI_Send(auxdata, (mx+1)*(my+1)*(mz+1)*3, MPI_DOUBLE_PRECISION, box%MPI_ID, TRANSMIT_IO_WORKER_DATA, MPI_COMM_WORLD, iErr) 00386 END IF 00387 DEALLOCATE(auxdata) 00388 NULLIFY(auxdata) 00389 END IF 00390 END IF 00391 00392 END SUBROUTINE IO_SendDataToWorkers 00393 00397 SUBROUTINE IO_RecvDataFromMaster(level, mGlobal) 00398 00399 TYPE(PackedMessage), POINTER :: message 00400 INTEGER, DIMENSION(3,2) :: mGlobal 00401 INTEGER, DIMENSION(3,2) :: mChild 00402 TYPE(NodeBox), POINTER :: remote_box, child_box 00403 TYPE(Nodedef), POINTER :: node 00404 TYPE(InfoDef), POINTER :: Info 00405 INTEGER :: mx, my, mz 00406 INTEGER :: child_count 00407 INTEGER :: n, level 00408 TYPE(NodeBoxList), POINTER :: box_list, last_box 00409 CHARACTER(LEN=28) :: fname 00410 REAL(KIND=qPREC), DIMENSION(:,:,:,:), ALLOCATABLE :: recv_buffer 00411 INTEGER :: iErr 00412 INTEGER :: status(MPI_STATUS_SIZE) 00413 00414 CALL CreateNodeBox(mGlobal, remote_box, MPI_id) 00415 CALL StrictFindNode(level, remote_box, node, fname) 00416 00417 CALL MPI_RECV(child_count, 1, MPI_INTEGER, 0, TRANSMIT_IO_WORKER_DATA, MPI_COMM_WORLD, status, iErr) 00418 00419 NULLIFY(last_box) 00420 NULLIFY(box_list) 00421 00422 ! Create a box list containing this node's children. This consists of retrieving 00423 ! the child count and then unpacking child_count node objects from the message. 00424 DO n = 1, child_count 00425 NULLIFY(child_box) 00426 CALL MPI_RECV(mChild, 6, MPI_INTEGER, 0, TRANSMIT_IO_WORKER_DATA, MPI_COMM_WORLD, status, iErr) 00427 CALL AddNodeBoxToList(last_box, box_list) 00428 last_box%self%mGlobal=mChild 00429 last_box%self%MPI_ID=MPI_ID 00430 END DO 00431 00432 ! If this is a data level, then there will be face-centered and edge-centered data. 00433 IF (level >= 0) THEN 00434 Info => node%Info 00435 00436 mx = Info%mX(1) 00437 my = Info%mX(2) 00438 mz = Info%mX(3) 00439 00440 ! Unpack the cell-centered data. 00441 ALLOCATE(recv_buffer(mx,my,mz,NrVars)) 00442 CALL MPI_RECV(recv_buffer, size(recv_buffer), MPI_DOUBLE_PRECISION, 0, TRANSMIT_IO_WORKER_DATA, MPI_COMM_WORLD, status, iErr) 00443 Info%q(1:mx,1:my,1:mz,1:NrVars)=recv_buffer 00444 DEALLOCATE(recv_buffer) 00445 00446 ! If using face-centered arrays, unpack the face-centered data. 00447 IF (MaintainAuxArrays) THEN 00448 IF (nDIm == 2) THEN 00449 ALLOCATE(recv_buffer(mx+1,my+1,1,2)) 00450 CALL MPI_RECV(recv_buffer, (mx+1)*(my+1)*2, MPI_DOUBLE_PRECISION, 0, TRANSMIT_IO_WORKER_DATA, MPI_COMM_WORLD, status, iErr) 00451 Info%aux(1:mx+1,1:my+1,1:1,1:2)=recv_buffer 00452 ELSEIF (nDim == 3) THEN 00453 ALLOCATE(recv_buffer(mx+1,my+1,mz+1,3)) 00454 CALL MPI_RECV(recv_buffer, (mx+1)*(my+1)*(mz+1)*3, MPI_DOUBLE_PRECISION, 0, TRANSMIT_IO_WORKER_DATA, MPI_COMM_WORLD, status, iErr) 00455 Info%aux(1:mx+1,1:my+1,1:mz+1,1:3)=recv_buffer 00456 END IF 00457 DEALLOCATE(recv_buffer) 00458 00459 CALL UpdateAux(Info, RESHAPE((/ 1, 1, 1, mx, my, mz /), (/3, 2/))) 00460 END IF 00461 00462 END IF 00463 00464 ! Redistribute child nodes. 00465 CALL ChomboReCreateChildren(node, level, child_count, box_list) 00466 00467 CALL ClearNodeBoxList(box_list) 00468 CALL DestroyNodeBox(remote_box) 00469 00470 END SUBROUTINE IO_RecvDataFromMaster 00471 00472 END MODULE IOParsing