mpitrace.man [src/java/bin/win-amd64/mpi] Revision: default Date:
<?xml version='1.0' encoding='utf-8' standalone='yes'?>
<instrumentationManifest
xmlns="http://schemas.microsoft.com/win/2004/08/events"
xmlns:attr="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#attributes"
xmlns:coll="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#collective"
xmlns:comm="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#communication"
xmlns:dt="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#datatype"
xmlns:eh="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#errorhandling"
xmlns:grp="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#group"
xmlns:info="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#info"
xmlns:init="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#initialize"
xmlns:io="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#io"
xmlns:misc="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/networking/events#misc"
xmlns:mpi="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/events"
xmlns:msg="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/networking/events#messages"
xmlns:nd="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/networking/events#networkdirect"
xmlns:net="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/networking/events"
xmlns:p2p="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#point2point"
xmlns:poll="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#polling"
xmlns:rma="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#rma"
xmlns:shm="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/networking/events#sharedmemory"
xmlns:sock="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/networking/events#sockets"
xmlns:spwn="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#spawn"
xmlns:topo="http://manifests.microsoft.com/win/2004/08/windows/hpc/mpi/api/events#topology"
xmlns:win="http://manifests.microsoft.com/win/2004/08/windows/events"
xmlns:xs="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
>
<instrumentation>
<events>
<provider
guid="{E6D64EF7-EA35-4D3C-A657-31C5311D1759}"
messageFileName="msmpires.dll"
name="Microsoft-MPI-Channel-Provider"
parameterFileName="msmpires.dll"
resourceFileName="msmpires.dll"
symbol="MICROSOFT_MPI_CHANNEL_PROVIDER"
>
<channels>
<channel
chid="DebugChannel"
enabled="false"
name="Microsoft-MPI-Channel"
symbol="MICROSOFT_MPI_CHANNEL"
type="Debug"
>
<logging>
<!-- enable ring buffer logging -->
<retention>false</retention>
<!-- (1024^3) * 15 (size in bytes)-->
<maxSize>16106127360</maxSize>
</logging>
<publishing>
<!-- all events less than or equal to win:Verbose will be logged -->
<level>5</level>
<!-- mask for event filtering -->
<keywords>0x00</keywords>
<!-- seconds of inactivity before ETW stream is flushed -->
<latency>2</latency>
<!--SystemTime=use system clock | QPC = call QPC for timestamp -->
<clockType>QPC</clockType>
</publishing>
</channel>
</channels>
<events>
<!-- ND Events -->
<!-- ch3u_nd_adapter.cpp -->
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Error"
message="$(string.error.AdapterInit)"
symbol="_ND_Error_AdapterInit"
template="AdapterInit"
value="100"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Informational"
message="$(string.info.AdapterShutdown)"
symbol="_ND_Info_AdapterShutdown"
template="object"
value="101"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Error"
message="$(string.error.AdapterListen)"
symbol="_ND_Error_AdapterListen"
template="AdapterListen"
value="102"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Error"
message="$(string.error.AdapterGetConnectionRequest)"
symbol="_ND_Error_AdapterGetConnectionRequest"
template="AdapterGetConnectionRequest"
value="103"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Informational"
message="$(string.info.AdapterConnect)"
symbol="_ND_Info_AdapterConnect"
template="object.host.port.pointer.pointer"
value="104"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Error"
message="$(string.error.AdapterCreateConnector)"
symbol="_ND_Error_AdapterCreateConnector"
template="AdapterCreateConnector"
value="105"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Error"
message="$(string.error.AdapterAccept.GetPeerAddress)"
symbol="_ND_Error_AdapterAccept_GetPeerAddress"
template="object.error.pointer.pointer.pointer"
value="106"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Informational"
message="$(string.info.AdapterAccept.Reject)"
symbol="_ND_Info_AdapterAccept_Reject"
template="object.error.pointer.pointer.pointer"
value="107"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Informational"
message="$(string.info.AdapterAccept.Success)"
symbol="_ND_Info_AdapterAccept_Success"
template="object.host.port.pointer.pointer.pointer"
value="108"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Error"
message="$(string.error.AdapterGetConnSucceeded)"
symbol="_ND_Error_AdapterGetConnSucceeded"
template="AdapterGetConnSucceeded"
value="109"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Informational"
message="$(string.info.AdapterGetConnSucceeded)"
symbol="_ND_Info_AdapterGetConnSucceeded"
template="AdapterGetConnSucceeded"
value="110"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Error"
message="$(string.error.AdapterGetConnReqHandler)"
symbol="_ND_Error_AdapterGetConnReqHandler"
template="object.error.pointer"
value="111"
/>
<!-- ch3u_nd_endpoint.cpp -->
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Error"
message="$(string.error.Endpoint)"
symbol="_ND_Error_Endpoint"
template="Endpoint"
value="112"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Informational"
message="$(string.info.Endpoint)"
symbol="_ND_Info_Endpoint"
template="Endpoint"
value="113"
/>
<!-- ch3u_nd_env.cpp -->
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Error"
message="$(string.error.EnvironmentListen)"
symbol="_ND_Error_EnvironmentListen"
template="EnvironmentListen"
value="114"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Informational"
message="$(string.info.EnvironmentListen)"
symbol="_ND_Info_EnvironmentListen"
template="EnvironmentListen"
value="115"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Informational"
message="$(string.info.EnvironmentListen.Success)"
symbol="_ND_Info_EnvironmentListen_Success"
template="object.host.port"
value="116"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Error"
message="$(string.error.EnvironmentGetBusinessCard)"
symbol="_ND_Error_EnvironmentGetBusinessCard"
template="EnvironmentGetBusinessCard"
value="117"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Error"
message="$(string.error.EnvironmentConnect)"
symbol="_ND_Error_EnvironmentConnect"
template="EnvironmentConnect"
value="118"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Informational"
message="$(string.info.EnvironmentConnect)"
symbol="_ND_Info_EnvironmentConnect"
template="EnvironmentConnect"
value="119"
/>
<event
channel="DebugChannel"
keywords="channel_nd"
level="win:Informational"
message="$(string.info.EnvironmentConnect.Success)"
symbol="_ND_Info_EnvironmentConnect_Success"
template="object.rank.businesscard"
value="120"
/>
<!-- Shared Memory -->
<event
channel="DebugChannel"
keywords="channel_shmem"
level="win:Error"
message="$(string.error.MPIDI_CH3I_Shm_connect)"
symbol="_SHMEM_Error_MPIDI_CH3I_Shm_connect"
template="Shm_connect"
value="121"
/>
<event
channel="DebugChannel"
keywords="channel_shmem"
level="win:Informational"
message="$(string.info.MPIDI_CH3I_Shm_connect.Success)"
symbol="_SHMEM_Info_MPIDI_CH3I_Shm_connect_Success"
template="host.businesscard"
value="122"
/>
<event
channel="DebugChannel"
keywords="channel_shmem"
level="win:Error"
message="$(string.error.MPIDI_CH3I_Accept_shm_connection)"
symbol="_SHMEM_Error_MPIDI_CH3I_Accept_shm_connection"
template="Shm_accept"
value="123"
/>
<event
channel="DebugChannel"
keywords="channel_shmem"
level="win:Informational"
message="$(string.info.MPIDI_CH3I_Accept_shm_connection.Success)"
symbol="_SHMEM_Info_MPIDI_CH3I_Accept_shm_connection_Success"
template="rank.rank"
value="124"
/>
<event
channel="DebugChannel"
keywords="channel_shmem"
level="win:Informational"
message="$(string.info.MPIDI_CH3I_Accept_shm_connection.Success)"
symbol="_SHMEM_Info_MPIDI_CH3I_Accept_shm_connection_SuccessAfterAttach"
template="rank.rank"
value="125"
/>
<!-- Sockets -->
<!-- ch3_progress_sock.c -->
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.MPIDI_CH3I_Sock_connect.PostFailed)"
symbol="_SOCKETS_Error_MPIDI_CH3I_Sock_connect_PostFailed"
template="rank.rank.businesscard"
value="126"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.CloseConnectionComplete_cb.Terminated)"
symbol="_SOCKETS_Info_CloseConnectionComplete_cb_Terminated"
template="rank"
value="127"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.CloseConnectionComplete_cb.Intentional)"
symbol="_SOCKETS_Info_CloseConnectionComplete_cb_Intentional"
template="empty"
value="128"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.MPIDI_CH3I_Post_close_connection.Ignored)"
symbol="_SOCKETS_Info_MPIDI_CH3I_Post_close_connection_Ignored"
template="rank"
value="129"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.MPIDI_CH3I_Post_close_connection.Honored)"
symbol="_SOCKETS_Info_MPIDI_CH3I_Post_close_connection_Honored"
template="rank"
value="130"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.SendFailed_cb)"
symbol="_SOCKETS_Error_SendFailed_cb"
template="error.host.port"
value="131"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.RecvFailed_cb.SocketClosed)"
symbol="_SOCKETS_Info_RecvFailed_cb_SocketClosed"
template="host.port"
value="132"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.RecvFailed_cb.SocketAborted)"
symbol="_SOCKETS_Info_RecvFailed_cb_SocketAborted"
template="error.host.port"
value="133"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.RecvFailed_cb.Failure)"
symbol="_SOCKETS_Error_RecvFailed_cb_Failure"
template="error.host.port"
value="134"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.connection_accept)"
symbol="_SOCKETS_Info_connection_accept"
template="rank"
value="135"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.connection_accept_CloseOldConnection)"
symbol="_SOCKETS_Info_connection_accept_CloseOldConnection"
template="rank"
value="136"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.connection_reject)"
symbol="_SOCKETS_Info_connection_reject"
template="host.port"
value="137"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.read_message_data)"
symbol="_SOCKETS_Error_read_message_data"
template="error.rank"
value="138"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.MPIDI_CH3I_SOCK_start_write.PostSendVFailed)"
symbol="_SOCKETS_Error_MPIDI_CH3I_SOCK_start_write_PostSendVFailed"
template="error.rank"
value="139"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.SendOpenResponseSucceeded_cb.Success)"
symbol="_SOCKETS_Info_SendOpenResponseSucceeded_cb_Success"
template="host.port"
value="140"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.SendOpenResponseSucceeded_cb.HeadToHead)"
symbol="_SOCKETS_Info_SendOpenResponseSucceeded_cb_HeadToHead"
template="host.port"
value="141"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.RecvOpenRequestDataSucceeded_cb.PGFail)"
symbol="_SOCKETS_Error_RecvOpenRequestDataSucceeded_cb_PGFail"
template="empty"
value="142"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.RecvOpenRequestDataSucceeded_cb.SendResponseFailed)"
symbol="_SOCKETS_Error_RecvOpenRequestDataSucceeded_cb_SendResponseFailed"
template="error.rank"
value="143"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.RecvOpenRequestSucceeded_cb)"
symbol="_SOCKETS_Error_RecvOpenRequestSucceeded_cb"
template="RecvOpenRequestSucceeded"
value="144"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.RecvOpenRequestSucceeded_cb)"
symbol="_SOCKETS_Info_RecvOpenRequestSucceeded_cb"
template="RecvOpenRequestSucceeded"
value="145"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.RecvOpenRequestFailed_cb)"
symbol="_SOCKETS_Info_RecvOpenRequestFailed_cb"
template="host.port"
value="146"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.AcceptNewConnectionFailed_cb.Canceled)"
symbol="_SOCKETS_Info_AcceptNewConnectionFailed_cb_Canceled"
template="empty"
value="147"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.AcceptNewConnectionFailed_cb.Failed)"
symbol="_SOCKETS_Error_AcceptNewConnectionFailed_cb_Failed"
template="error.message"
value="148"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.AcceptNewConnectionSucceeded_cb.PostListener)"
symbol="_SOCKETS_Error_AcceptNewConnectionSucceeded_cb_PostListener"
template="error"
value="149"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.AcceptNewConnectionSucceeded_cb.HeadToHead)"
symbol="_SOCKETS_Info_AcceptNewConnectionSucceeded_cb_HeadToHead"
template="error.host.port"
value="150"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.AcceptNewConnectionSucceeded_cb.Succeeded)"
symbol="_SOCKETS_Info_AcceptNewConnectionSucceeded_cb_Succeeded"
template="host.port"
value="151"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.MPIDI_CH3I_Post_accept)"
symbol="_SOCKETS_Info_MPIDI_CH3I_Post_accept"
template="empty"
value="152"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.RecvOpenResponseSucceeded_cb.Disconnect)"
symbol="_SOCKETS_Info_RecvOpenResponseSucceeded_cb_Disconnect"
template="host.port"
value="153"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.RecvOpenResponseSucceeded_cb.UnexpectedControl)"
symbol="_SOCKETS_Error_RecvOpenResponseSucceeded_cb_UnexpectedControl"
template="host.port"
value="154"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.RecvOpenResponseSucceeded_cb.ConnectionComplete)"
symbol="_SOCKETS_Info_RecvOpenResponseSucceeded_cb_ConnectionComplete"
template="host.port"
value="155"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.RecvOpenResponseSucceeded_cb.HeadToHeadRejected)"
symbol="_SOCKETS_Info_RecvOpenResponseSucceeded_cb_HeadToHeadRejected"
template="host.port"
value="156"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.SendOpenRequestSucceeded_cb.Disconnected)"
symbol="_SOCKETS_Info_SendOpenRequestSucceeded_cb_Disconnected"
template="rank"
value="157"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.SendOpenRequestSucceeded_cb.PostRecvPktFailed)"
symbol="_SOCKETS_Error_SendOpenRequestSucceeded_cb_PostRecvPktFailed"
template="rank"
value="158"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.SendOpenRequestSucceeded_cb.Succeeded)"
symbol="_SOCKETS_Info_SendOpenRequestSucceeded_cb_Succeeded"
template="rank"
value="159"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.send_open_request.Failed)"
symbol="_SOCKETS_Error_send_open_request_Failed"
template="error.rank"
value="160"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.send_open_request.Succeeded)"
symbol="_SOCKETS_Info_send_open_request_Succeeded"
template="rank"
value="161"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.ConnectFailed_cb.Disconnect)"
symbol="_SOCKETS_Info_ConnectFailed_cb_Disconnect"
template="rank"
value="162"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.ConnectFailed_cb.Failed)"
symbol="_SOCKETS_Error_ConnectFailed_cb_Failed"
template="error.rank"
value="163"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.ConnectSucceeded_cb.Disconnect)"
symbol="_SOCKETS_Info_ConnectSucceeded_cb_Disconnect"
template="rank"
value="164"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.ConnectSucceeded_cb.Succeeded)"
symbol="_SOCKETS_Info_ConnectSucceeded_cb_Succeeded"
template="rank"
value="165"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.MPIDI_CH3I_Post_connect)"
symbol="_SOCKETS_Info_MPIDI_CH3I_Post_connect"
template="rank.host.port"
value="166"
/>
<!-- sock.c -->
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.sock_get_overlapped_result)"
symbol="_SOCKETS_Error_sock_get_overlapped_result"
template="empty"
value="167"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.sock_get_overlapped_result.Failed)"
symbol="_SOCKETS_Error_sock_get_overlapped_result_Failed"
template="error.message"
value="168"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.sock_safe_send)"
symbol="_SOCKETS_Error_sock_safe_send"
template="error.message"
value="169"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.sock_safe_receive)"
symbol="_SOCKETS_Error_sock_safe_receive"
template="error.message"
value="170"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.MPIDU_Sock_init)"
symbol="_SOCKETS_Error_MPIDU_Sock_init"
template="error.message"
value="171"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.socki_get_host_list)"
symbol="_SOCKETS_Error_socki_get_host_list"
template="error.message.host"
value="172"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.socki_get_host_list.AddIp)"
symbol="_SOCKETS_Error_socki_get_host_list_AddIp"
template="error"
value="173"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.socki_get_host_list.AddHostname)"
symbol="_SOCKETS_Error_socki_get_host_list_AddHostname"
template="error"
value="174"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.MPIDU_Sock_get_host_description)"
symbol="_SOCKETS_Error_MPIDU_Sock_get_host_description"
template="error.message"
value="175"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.MPIDU_Sock_create_native_fd)"
symbol="_SOCKETS_Error_MPIDU_Sock_create_native_fd"
template="error.message"
value="176"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.easy_create_ranged.Port)"
symbol="_SOCKETS_Error_easy_create_ranged_Port"
template="port.maxport"
value="177"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.easy_create_ranged)"
symbol="_SOCKETS_Error_easy_create_ranged"
template="error.message"
value="178"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.MPIDU_Sock_listen)"
symbol="_SOCKETS_Error_MPIDU_Sock_listen"
template="error.message"
value="179"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.post_next_accept)"
symbol="_SOCKETS_Error_post_next_accept"
template="error.message"
value="180"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.AcceptFailed.ResetPosted)"
symbol="_SOCKETS_Info_AcceptFailed_ResetPosted"
template="empty"
value="181"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.AcceptFailed.ResetPostFailed)"
symbol="_SOCKETS_Error_AcceptFailed_ResetPostFailed"
template="error"
value="182"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.AcceptFailed)"
symbol="_SOCKETS_Error_AcceptFailed"
template="error.message"
value="183"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.sock_finish_accept)"
symbol="_SOCKETS_Info_sock_finish_accept"
template="empty"
value="184"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.gle_connect_ex.WSAIoctlSocketError)"
symbol="_SOCKETS_Error_gle_connect_ex_WSAIoctlSocketError"
template="error.message"
value="185"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.gle_connect_ex.Succeeded)"
symbol="_SOCKETS_Info_gle_connect_ex_Succeeded"
template="host.port"
value="186"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.gle_connect_ex.pfnConnectEx)"
symbol="_SOCKETS_Error_gle_connect_ex_pfnConnectEx"
template="error.message"
value="187"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.gle_postpone_retry_connect)"
symbol="_SOCKETS_Error_gle_postpone_retry_connect"
template="error.message"
value="188"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.sock_cancel_inprogress_connect)"
symbol="_SOCKETS_Info_sock_cancel_inprogress_connect"
template="host.port"
value="189"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.ConnectFailed)"
symbol="_SOCKETS_Error_ConnectFailed"
template="ConnectFailed"
value="190"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.ConnectFailed)"
symbol="_SOCKETS_Info_ConnectFailed"
template="ConnectFailed"
value="191"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.sock_finish_connect)"
symbol="_SOCKETS_Info_sock_finish_connect"
template="host.port"
value="192"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.MPIDU_Sock_post_connect_endpoints)"
symbol="_SOCKETS_Error_MPIDU_Sock_post_connect_endpoints"
template="error.host.port"
value="193"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.MPIDU_Sock_post_connect_gle_bind_any)"
symbol="_SOCKETS_Error_MPIDU_Sock_post_connect_gle_bind_any"
template="error.message"
value="194"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.GracefulCloseFailed)"
symbol="_SOCKETS_Error_GracefulCloseFailed"
template="error.message.host.port"
value="195"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Informational"
message="$(string.info.GracefulCloseSucceeded)"
symbol="_SOCKETS_Info_GracefulCloseSucceeded"
template="empty"
value="196"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.ReadSucceeded.ConnectionClosed)"
symbol="_SOCKETS_Error_ReadSucceeded_ConnectionClosed"
template="empty"
value="197"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.ReadSucceeded.Error)"
symbol="_SOCKETS_Error_ReadSucceeded_Error"
template="error.message"
value="198"
/>
<event
channel="DebugChannel"
keywords="channel_sockets"
level="win:Error"
message="$(string.error.MPIDU_Sock_keepalive)"
symbol="_SOCKETS_Error_MPIDU_Sock_keepalive"
template="error.message"
value="199"
/>
<event
channel="DebugChannel"
keywords="info"
level="win:Informational"
message="$(string.info.MPI_Init_info)"
symbol="_MPI_Init_info"
template="rank.major.minor.build"
value="200"
/>
<event
channel="DebugChannel"
keywords="info"
level="win:Informational"
message="$(string.info.MPI_Finalize_info)"
symbol="_MPI_Finalize_info"
template="rank"
value="201"
/>
<event
channel="DebugChannel"
keywords="smpd"
level="win:Informational"
message="$(string.info.SMPD_Nodemanager.context)"
symbol="_SMPD_Context_info"
template="rank.context"
value="202"
/>
</events>
<levels/>
<opcodes/>
<keywords>
<keyword
mask="0x000000000001"
name="channel_sockets"
symbol="KEYWORD_SOCKETS"
/>
<keyword
mask="0x000000000002"
name="channel_shmem"
symbol="KEYWORD_SHMEM"
/>
<keyword
mask="0x000000000004"
name="channel_nd"
symbol="KEYWORD_ND"
/>
<keyword
mask="0x000000000008"
name="info"
symbol="KEYWORD_INFO"
/>
<keyword
mask="0x000000000010"
name="smpd"
symbol="KEYWORD_SMPD"
/>
</keywords>
<maps>
<valueMap name="AdapterInitSites">
<map
message="$(string.error.AdapterInit.Open)"
value="1"
/>
<map
message="$(string.error.AdapterInit.Query)"
value="2"
/>
<map
message="$(string.error.AdapterInit.CQDepth)"
value="3"
/>
<map
message="$(string.error.AdapterInit.InitiatorQDepth)"
value="4"
/>
<map
message="$(string.error.AdapterInit.RecvQDepth)"
value="5"
/>
</valueMap>
<valueMap name="AdapterListenSites">
<map
message="$(string.error.AdapterListen.CreateListener)"
value="1"
/>
<map
message="$(string.error.AdapterListen.Bind)"
value="2"
/>
<map
message="$(string.error.AdapterListen.GetLocalAddress)"
value="3"
/>
<map
message="$(string.error.AdapterListen.Listen)"
value="4"
/>
</valueMap>
<valueMap name="AdapterGetConnectionRequestSites">
<map
message="$(string.error.AdapterGetConnectionRequest.CreateConnector)"
value="1"
/>
<map
message="$(string.error.AdapterGetConnectionRequest.GetConnectionRequest)"
value="2"
/>
</valueMap>
<valueMap name="AdapterCreateConnectorSites">
<map
message="$(string.error.AdapterCreateConnector.CreateConnector)"
value="1"
/>
<map
message="$(string.error.AdapterCreateConnector.Bind)"
value="2"
/>
</valueMap>
<valueMap name="AdapterGetConnSucceededSites">
<map
message="$(string.error.AdapterGetConnSucceeded.InvalidBufferSize)"
value="1"
/>
<map
message="$(string.info.AdapterGetConnSucceeded.AbortedOrInvalid)"
value="2"
/>
<map
message="$(string.error.AdapterGetConnSucceeded.Reject)"
value="3"
/>
<map
message="$(string.error.AdapterGetConnSucceeded.MismatchedVersion)"
value="4"
/>
<map
message="$(string.error.AdapterGetConnSucceeded.PGFind)"
value="5"
/>
<map
message="$(string.error.AdapterGetConnSucceeded.Rank)"
value="6"
/>
<map
message="$(string.error.AdapterGetConnSucceeded.Shutdown)"
value="7"
/>
<map
message="$(string.info.AdapterGetConnSucceeded.HeadToHeadReject)"
value="8"
/>
<map
message="$(string.info.AdapterGetConnSucceeded.HeadToHeadShutdown)"
value="9"
/>
<map
message="$(string.info.AdapterGetConnSucceeded.DefaultReject)"
value="10"
/>
<map
message="$(string.info.AdapterGetConnSucceeded.Success)"
value="11"
/>
</valueMap>
<valueMap name="EndpointSites">
<map
message="$(string.error.EndpointCompleteConnect.BufferSize)"
value="1"
/>
<map
message="$(string.error.EndpointCompleteConnect.Default)"
value="2"
/>
<map
message="$(string.info.EndpointCompleteConnect.Pending)"
value="3"
/>
<map
message="$(string.info.EndpointConnReqFailed.Passive)"
value="4"
/>
<map
message="$(string.info.EndpointConnReqFailed.Canceled)"
value="5"
/>
<map
message="$(string.error.EndpointConnReqFailed.Failed)"
value="6"
/>
<map
message="$(string.info.EndpointConnCompleted)"
value="7"
/>
<map
message="$(string.info.EndpointConnFailed.Retry)"
value="8"
/>
<map
message="$(string.error.EndpointConnFailed.Fail)"
value="9"
/>
<map
message="$(string.info.EndpointAccept.Pending)"
value="10"
/>
<map
message="$(string.error.EndpointPrepostReceives.Failed)"
value="11"
/>
<map
message="$(string.info.EndpointAcceptCompleted)"
value="12"
/>
<map
message="$(string.info.EndpointAcceptFailed.AbortedOrTimeout)"
value="13"
/>
<map
message="$(string.error.EndpointAcceptFailed.Failed)"
value="14"
/>
<map
message="$(string.info.EndpointDisconnect)"
value="15"
/>
<map
message="$(string.info.EndpointConnect)"
value="16"
/>
<map
message="$(string.info.EndpointAccept)"
value="17"
/>
<map
message="$(string.info.EndpointHandleTimeout)"
value="18"
/>
<map
message="$(string.error.EndpointCompleteConnectAbortedOrInvalid)"
value="19"
/>
<map
message="$(string.info.EndpointCompleteConnectConnect)"
value="20"
/>
<map
message="$(string.info.EndpointHandleTimeoutConnect)"
value="21"
/>
</valueMap>
<valueMap name="EnvironmentListenSites">
<map
message="$(string.info.EnvironmentListen.NoNDv2Providers)"
value="1"
/>
<map
message="$(string.error.EnvironmentListen.QueryAddressListForSizeFailed)"
value="2"
/>
<map
message="$(string.error.EnvironmentListen.QueryAddressListFailed)"
value="3"
/>
<map
message="$(string.info.EnvironmentListen.Success)"
value="4"
/>
</valueMap>
<valueMap name="EnvironmentConnectSites">
<map
message="$(string.error.EnvironmentConnect.NoLocalNoRemoteForce)"
value="1"
/>
<map
message="$(string.error.EnvironmentConnect.NoLocalForce)"
value="2"
/>
<map
message="$(string.error.EnvironmentConnect.NoLocalNoFallback)"
value="3"
/>
<map
message="$(string.error.EnvironmentConnect.NoLocalNoFallbackForce)"
value="4"
/>
<map
message="$(string.error.EnvironmentConnect.NoRemoteForce)"
value="5"
/>
<map
message="$(string.error.EnvironmentConnect.NoRemoteNoFallback)"
value="6"
/>
<map
message="$(string.error.EnvironmentConnect.NoPathForce)"
value="7"
/>
<map
message="$(string.error.EnvironmentConnect.NoPathNoFallback)"
value="8"
/>
<map
message="$(string.info.EnvironmentConnect.NoLocalFallback)"
value="9"
/>
<map
message="$(string.info.EnvironmentConnect.NoRemoteFallback)"
value="10"
/>
<map
message="$(string.info.EnvironmentConnect.NoPathFallback)"
value="11"
/>
</valueMap>
<valueMap name="Shm_connectSites">
<map
message="$(string.error.MPIDI_CH3I_Shm_connect.QueueName)"
value="1"
/>
<map
message="$(string.error.MPIDI_CH3I_Shm_connect.QueueAttach)"
value="2"
/>
<map
message="$(string.error.MPIDI_CH3I_Shm_connect.WriteQueue)"
value="3"
/>
<map
message="$(string.error.MPIDI_CH3I_Shm_connect.NotifyConnect)"
value="4"
/>
</valueMap>
<valueMap name="Shm_acceptSites">
<map
message="$(string.error.MPIDI_CH3I_Accept_shm_connection.QueueAttach)"
value="1"
/>
<map
message="$(string.error.MPIDI_CH3I_Accept_shm_connection.MismatchedVersion)"
value="2"
/>
<map
message="$(string.error.MPIDI_CH3I_Accept_shm_connection.PGFind)"
value="3"
/>
<map
message="$(string.error.MPIDI_CH3I_Accept_shm_connection.Rank)"
value="4"
/>
<map
message="$(string.error.MPIDI_CH3I_Accept_shm_connection.GetConnStringFailed)"
value="5"
/>
<map
message="$(string.error.MPIDI_CH3I_Accept_shm_connection.GetStringArgFailed)"
value="6"
/>
<map
message="$(string.error.MPIDI_CH3I_Accept_shm_connection.BootstrapQueueAttach)"
value="7"
/>
</valueMap>
<valueMap name="RecvOpenRequestSucceededSites">
<map
message="$(string.error.RecvOpenRequestSucceeded_cb.UnexpectedControl)"
value="1"
/>
<map
message="$(string.error.RecvOpenRequestSucceeded_cb.MismatchedVersion)"
value="2"
/>
<map
message="$(string.error.RecvOpenRequestSucceeded_cb.Internal)"
value="3"
/>
<map
message="$(string.info.RecvOpenRequestSucceeded_cb.Success)"
value="4"
/>
</valueMap>
<valueMap name="ConnectFailedSites">
<map
message="$(string.error.ConnectFailed.AbortedBeforeTimeout)"
value="1"
/>
<map
message="$(string.info.ConnectFailed.Timeout)"
value="2"
/>
<map
message="$(string.error.ConnectFailed.AbortedClosing)"
value="3"
/>
<map
message="$(string.info.ConnectFailed.Refused)"
value="4"
/>
<map
message="$(string.info.ConnectFailed.Error)"
value="5"
/>
<map
message="$(string.error.ConnectFailed.Exhausted)"
value="6"
/>
<map
message="$(string.error.ConnectFailed.Fail)"
value="7"
/>
</valueMap>
</maps>
<templates>
<template tid="AdapterInit">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:UInt32"
map="AdapterInitSites"
name="site"
/>
<data
inType="win:Int32"
name="hresult"
outType="xs:int"
/>
<data
inType="win:Pointer"
name="paddr"
outType="win:HexInt64"
/>
<data
inType="win:Pointer"
name="hExSet"
outType="win:HexInt64"
/>
<data
inType="win:UInt64"
name="cbZCopyThreshold"
outType="xs:unsignedLong"
/>
</template>
<template tid="AdapterListen">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:UInt32"
map="AdapterListenSites"
name="site"
/>
<data
inType="win:Int32"
name="hresult"
outType="xs:int"
/>
</template>
<template tid="AdapterGetConnectionRequest">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:UInt32"
map="AdapterGetConnectionRequestSites"
name="site"
/>
<data
inType="win:Int32"
name="hresult"
outType="xs:int"
/>
</template>
<template tid="AdapterGetConnSucceeded">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:UInt32"
map="AdapterGetConnSucceededSites"
name="site"
/>
<data
inType="win:UInt32"
name="hresult"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="pIConnector"
outType="win:HexInt64"
/>
</template>
<template tid="AdapterCreateConnector">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:UInt32"
map="AdapterCreateConnectorSites"
name="site"
/>
<data
inType="win:UInt32"
name="hresult"
outType="win:HexInt32"
/>
</template>
<template tid="Endpoint">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:UInt32"
map="EndpointSites"
name="site"
/>
<data
inType="win:UInt32"
name="hresult"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="local_host"
/>
<data
inType="win:UInt32"
name="local_port"
/>
<data
inType="win:AnsiString"
name="remote_host"
/>
<data
inType="win:UInt32"
name="remote_port"
/>
</template>
<template tid="EnvironmentListen">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:UInt32"
map="EnvironmentListenSites"
name="site"
/>
<data
inType="win:Int32"
name="hresult"
outType="xs:int"
/>
</template>
<template tid="EnvironmentGetBusinessCard">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:Int32"
name="result"
outType="xs:int"
/>
<data
inType="win:Pointer"
name="pszBusinessCard"
outType="win:HexInt64"
/>
<data
inType="win:Pointer"
name="pcbBusinessCard"
outType="win:HexInt64"
/>
</template>
<template tid="EnvironmentConnect">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:UInt32"
map="EnvironmentConnectSites"
name="site"
/>
<data
inType="win:Int32"
name="result"
outType="xs:int"
/>
<data
inType="win:UInt32"
name="rank"
outType="xs:unsignedInt"
/>
<data
inType="win:AnsiString"
name="BusinessCard"
outType="xs:string"
/>
<data
inType="win:Pointer"
name="pVc"
outType="win:HexInt64"
/>
<data
inType="win:Pointer"
name="szBusinessCard"
outType="win:HexInt64"
/>
<data
inType="win:Int32"
name="fForceUse"
outType="xs:int"
/>
<data
inType="win:Pointer"
name="pbHandled"
outType="win:HexInt64"
/>
</template>
<template tid="Shm_connect">
<data
inType="win:UInt32"
map="Shm_connectSites"
name="site"
/>
<data
inType="win:Int32"
name="result"
outType="xs:int"
/>
<data
inType="win:AnsiString"
name="business_card"
outType="xs:string"
/>
<data
inType="win:Pointer"
name="vc"
outType="win:HexInt64"
/>
<data
inType="win:Pointer"
name="pbusiness_card"
outType="win:HexInt64"
/>
<data
inType="win:Pointer"
name="flag"
outType="win:HexInt64"
/>
</template>
<template tid="Shm_accept">
<data
inType="win:UInt32"
map="Shm_acceptSites"
name="site"
/>
<data
inType="win:UInt32"
name="pid"
outType="xs:unsignedInt"
/>
<data
inType="win:Pointer"
name="p"
outType="win:HexInt64"
/>
</template>
<template tid="RecvOpenRequestSucceeded">
<data
inType="win:UInt32"
map="RecvOpenRequestSucceededSites"
name="site"
/>
<data
inType="win:AnsiString"
name="host"
outType="xs:string"
/>
<data
inType="win:Int32"
name="port"
outType="xs:int"
/>
</template>
<template tid="ConnectFailed">
<data
inType="win:UInt32"
map="ConnectFailedSites"
name="site"
/>
<data
inType="win:Int32"
name="result"
outType="xs:int"
/>
<data
inType="win:Pointer"
name="pexov"
outType="win:HexInt64"
/>
<data
inType="win:AnsiString"
name="host"
outType="xs:string"
/>
<data
inType="win:Int32"
name="port"
outType="xs:int"
/>
</template>
<template tid="object">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
</template>
<template tid="object.host.port">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:AnsiString"
name="host"
/>
<data
inType="win:UInt32"
name="port"
/>
</template>
<template tid="object.error.pointer.pointer.pointer">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:HexInt32"
name="hresult"
/>
<data
inType="win:Pointer"
name="param1"
outType="win:HexInt64"
/>
<data
inType="win:Pointer"
name="param2"
outType="win:HexInt64"
/>
<data
inType="win:Pointer"
name="param3"
outType="win:HexInt64"
/>
</template>
<template tid="object.error.pointer">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:HexInt32"
name="hresult"
/>
<data
inType="win:Pointer"
name="param1"
outType="win:HexInt64"
/>
</template>
<template tid="object.host.port.pointer.pointer">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:AnsiString"
name="host"
/>
<data
inType="win:UInt32"
name="port"
/>
<data
inType="win:Pointer"
name="param1"
outType="win:HexInt64"
/>
<data
inType="win:Pointer"
name="param2"
outType="win:HexInt64"
/>
</template>
<template tid="object.host.port.pointer.pointer.pointer">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:AnsiString"
name="host"
/>
<data
inType="win:UInt32"
name="port"
/>
<data
inType="win:Pointer"
name="param1"
outType="win:HexInt64"
/>
<data
inType="win:Pointer"
name="param2"
outType="win:HexInt64"
/>
<data
inType="win:Pointer"
name="param3"
outType="win:HexInt64"
/>
</template>
<template tid="object.rank.businesscard">
<data
inType="win:Pointer"
name="this_pointer"
outType="win:HexInt64"
/>
<data
inType="win:UInt32"
name="rank"
/>
<data
inType="win:AnsiString"
name="business_card"
/>
</template>
<template tid="empty"/>
<template tid="error">
<data
inType="win:HexInt32"
name="hresult"
/>
</template>
<template tid="error.message">
<data
inType="win:HexInt32"
name="error_code"
/>
<data
inType="win:AnsiString"
name="message"
/>
</template>
<template tid="error.rank">
<data
inType="win:HexInt32"
name="error_code"
/>
<data
inType="win:UInt32"
name="rank"
/>
</template>
<template tid="error.host">
<data
inType="win:HexInt32"
name="error_code"
/>
<data
inType="win:AnsiString"
name="host"
/>
</template>
<template tid="error.host.port">
<data
inType="win:HexInt32"
name="error_code"
/>
<data
inType="win:AnsiString"
name="host"
/>
<data
inType="win:UInt32"
name="port"
/>
</template>
<template tid="error.message.host">
<data
inType="win:HexInt32"
name="error_code"
/>
<data
inType="win:AnsiString"
name="error_message"
/>
<data
inType="win:AnsiString"
name="host"
/>
</template>
<template tid="error.message.host.port">
<data
inType="win:HexInt32"
name="error_code"
/>
<data
inType="win:AnsiString"
name="error_message"
/>
<data
inType="win:AnsiString"
name="host"
/>
<data
inType="win:UInt32"
name="port"
/>
</template>
<template tid="host">
<data
inType="win:AnsiString"
name="host"
/>
</template>
<template tid="host.port">
<data
inType="win:AnsiString"
name="host"
/>
<data
inType="win:UInt32"
name="port"
/>
</template>
<template tid="port.maxport">
<data
inType="win:Int32"
name="port"
/>
<data
inType="win:Int32"
name="max_port"
/>
</template>
<template tid="rank">
<data
inType="win:UInt32"
name="rank"
/>
</template>
<template tid="rank.rank">
<data
inType="win:UInt32"
name="rank_a"
/>
<data
inType="win:UInt32"
name="rank_b"
/>
</template>
<template tid="rank.rank.businesscard">
<data
inType="win:UInt32"
name="rank_a"
/>
<data
inType="win:UInt32"
name="rank_b"
/>
<data
inType="win:AnsiString"
name="business_card"
/>
</template>
<template tid="rank.host.port">
<data
inType="win:UInt32"
name="rank"
/>
<data
inType="win:AnsiString"
name="name"
/>
<data
inType="win:UInt32"
name="port"
/>
</template>
<template tid="host.businesscard">
<data
inType="win:AnsiString"
name="host"
/>
<data
inType="win:AnsiString"
name="business_card"
/>
</template>
<template tid="rank.context">
<data
inType="win:UInt32"
name="rank"
/>
<data
inType="win:AnsiString"
name="context"
/>
</template>
<template tid="rank.major.minor.build">
<data
inType="win:UInt32"
name="rank"
/>
<data
inType="win:UInt32"
name="major"
/>
<data
inType="win:UInt32"
name="minor"
/>
<data
inType="win:UInt32"
name="build"
/>
</template>
</templates>
</provider>
<provider
guid="{5b09c0bf-453f-43e6-b344-435148a14443}"
message="$(string.provider)"
messageFileName="msmpires.dll"
name="Microsoft-HPC-MPI"
parameterFileName="msmpires.dll"
resourceFileName="msmpires.dll"
symbol="MICROSOFT_HPC_MPI_PROVIDER"
>
<channels>
<!-- Microsoft-Windows-HPC/MPI -->
<channel
chid="MpiCommunicationChannel"
enabled="false"
isolation="System"
message="$(string.communication.channel)"
name="Microsoft-HPC-MPI/Communication"
symbol="MICROSOFT_HPC_MPI_COMMUNICATION_CHANNEL"
type="Debug"
>
<logging>
<!-- enable ring buffer logging -->
<retention>false</retention>
<!-- (1024^3) * 15 (size in bytes)-->
<maxSize>16106127360</maxSize>
</logging>
<publishing>
<!-- all events less than or equal to verbose will be logged -->
<level>5</level>
<!-- mask for event filtering -->
<keywords>0x00</keywords>
<!-- seconds of inactivity before ETW stream is flushed -->
<latency>2</latency>
<!--SystemTime=use system clock | QPC = call QPC for timestamp -->
<clockType>QPC</clockType>
</publishing>
</channel>
<!-- Microsoft-Windows-HPC/MPI/Debug -->
<channel
chid="MpiApiChannel"
enabled="false"
isolation="System"
message="$(string.api.channel)"
name="Microsoft-HPC-MPI/Api"
symbol="MICROSOFT_HPC_MPI_API_CHANNEL"
type="Debug"
>
<logging>
<!-- enable ring buffer logging -->
<retention>false</retention>
<!-- (1024^3) * 15 (size in bytes)-->
<maxSize>16106127360</maxSize>
</logging>
<publishing>
<!-- all events less than or equal to win:Verbose will be logged -->
<level>5</level>
<!-- mask for event filtering -->
<keywords>0x00</keywords>
<!-- seconds of inactivity before ETW stream is flushed -->
<latency>2</latency>
<!--SystemTime=use system clock | QPC = call QPC for timestamp -->
<clockType>QPC</clockType>
</publishing>
</channel>
</channels>
<tasks>
<task
eventGUID="{fb23875d-07a1-41b6-be32-efb82de721af}"
message="$(string.MPI_Comm_create_keyval)"
name="attr:MPI_Comm_create_keyval"
symbol="TASK_MPI_Comm_create_keyval"
value="10"
/>
<task
eventGUID="{226d1860-c47b-48e8-9335-057008319c9d}"
message="$(string.MPI_Comm_delete_attr)"
name="attr:MPI_Comm_delete_attr"
symbol="TASK_MPI_Comm_delete_attr"
value="11"
/>
<task
eventGUID="{b599433f-72f3-4f51-a6e7-e76950a0ea5d}"
message="$(string.MPI_Comm_free_keyval)"
name="attr:MPI_Comm_free_keyval"
symbol="TASK_MPI_Comm_free_keyval"
value="12"
/>
<task
eventGUID="{1116eda2-f576-4945-8355-80434317c26f}"
message="$(string.MPI_Comm_get_attr)"
name="attr:MPI_Comm_get_attr"
symbol="TASK_MPI_Comm_get_attr"
value="13"
/>
<task
eventGUID="{be854f8a-aff8-4c2a-bcde-4921cd3508b8}"
message="$(string.MPI_Comm_set_attr)"
name="attr:MPI_Comm_set_attr"
symbol="TASK_MPI_Comm_set_attr"
value="14"
/>
<task
eventGUID="{a9b852d7-50a5-472f-a6cd-92a4b6016142}"
message="$(string.MPI_Type_create_keyval)"
name="attr:MPI_Type_create_keyval"
symbol="TASK_MPI_Type_create_keyval"
value="15"
/>
<task
eventGUID="{84baded6-0c5e-4f86-8f17-25b1b4d26abd}"
message="$(string.MPI_Type_delete_attr)"
name="attr:MPI_Type_delete_attr"
symbol="TASK_MPI_Type_delete_attr"
value="16"
/>
<task
eventGUID="{897807ce-1a10-4c82-9c60-0a447c55a487}"
message="$(string.MPI_Type_free_keyval)"
name="attr:MPI_Type_free_keyval"
symbol="TASK_MPI_Type_free_keyval"
value="17"
/>
<task
eventGUID="{78386cb4-3f94-4289-8a0d-18e1fe56ddce}"
message="$(string.MPI_Type_get_attr)"
name="attr:MPI_Type_get_attr"
symbol="TASK_MPI_Type_get_attr"
value="18"
/>
<task
eventGUID="{5c4436a5-2f03-42da-8261-1432d14b7079}"
message="$(string.MPI_Type_set_attr)"
name="attr:MPI_Type_set_attr"
symbol="TASK_MPI_Type_set_attr"
value="19"
/>
<task
eventGUID="{d9d2b8d2-92b6-43c8-9e84-1564b7f60990}"
message="$(string.MPI_Win_create_keyval)"
name="attr:MPI_Win_create_keyval"
symbol="TASK_MPI_Win_create_keyval"
value="20"
/>
<task
eventGUID="{a83cc783-0663-422f-8fa2-558f980b3f31}"
message="$(string.MPI_Win_delete_attr)"
name="attr:MPI_Win_delete_attr"
symbol="TASK_MPI_Win_delete_attr"
value="21"
/>
<task
eventGUID="{c83253cd-4311-4797-84e5-ee0d85b010e6}"
message="$(string.MPI_Win_free_keyval)"
name="attr:MPI_Win_free_keyval"
symbol="TASK_MPI_Win_free_keyval"
value="22"
/>
<task
eventGUID="{34266594-4a51-437b-990d-15dfd060d1f2}"
message="$(string.MPI_Win_get_attr)"
name="attr:MPI_Win_get_attr"
symbol="TASK_MPI_Win_get_attr"
value="23"
/>
<task
eventGUID="{3aaccabf-34c5-4b4f-88f9-0166cb0a629c}"
message="$(string.MPI_Win_set_attr)"
name="attr:MPI_Win_set_attr"
symbol="TASK_MPI_Win_set_attr"
value="24"
/>
<task
eventGUID="{3b988c53-fdfb-4fad-8c34-7920583b2da4}"
message="$(string.MPI_Allgather)"
name="coll:MPI_Allgather"
symbol="TASK_MPI_Allgather"
value="25"
/>
<task
eventGUID="{12798656-77d7-4d38-ae06-804416e6470f}"
message="$(string.MPI_Allgatherv)"
name="coll:MPI_Allgatherv"
symbol="TASK_MPI_Allgatherv"
value="26"
/>
<task
eventGUID="{15ac7296-74c6-4605-badf-1551f6e4f3e1}"
message="$(string.MPI_Allreduce)"
name="coll:MPI_Allreduce"
symbol="TASK_MPI_Allreduce"
value="27"
/>
<task
eventGUID="{f5f4ff66-14f0-41b1-926f-74c57d012718}"
message="$(string.MPI_Alltoall)"
name="coll:MPI_Alltoall"
symbol="TASK_MPI_Alltoall"
value="28"
/>
<task
eventGUID="{b64daac3-54c8-48db-b73e-2b34731ea86b}"
message="$(string.MPI_Alltoallv)"
name="coll:MPI_Alltoallv"
symbol="TASK_MPI_Alltoallv"
value="29"
/>
<task
eventGUID="{11b6cf66-ae78-41e7-b4fc-685712f598de}"
message="$(string.MPI_Alltoallw)"
name="coll:MPI_Alltoallw"
symbol="TASK_MPI_Alltoallw"
value="30"
/>
<task
eventGUID="{49168b15-eea6-4d6e-b4c0-358654c32891}"
message="$(string.MPI_Barrier)"
name="coll:MPI_Barrier"
symbol="TASK_MPI_Barrier"
value="31"
/>
<task
eventGUID="{5fba8f3b-1e1d-4ff4-a4e8-3a72e56114c8}"
message="$(string.MPI_Bcast)"
name="coll:MPI_Bcast"
symbol="TASK_MPI_Bcast"
value="32"
/>
<task
eventGUID="{8e4b1855-3ccd-49d1-ba44-0253998d1ddc}"
message="$(string.MPI_Exscan)"
name="coll:MPI_Exscan"
symbol="TASK_MPI_Exscan"
value="33"
/>
<task
eventGUID="{669cb50d-f4d9-4a18-9ccd-05f2b6145dce}"
message="$(string.MPI_Gather)"
name="coll:MPI_Gather"
symbol="TASK_MPI_Gather"
value="34"
/>
<task
eventGUID="{10ad63dd-6fcd-40e3-b04e-e2df0a47c5b1}"
message="$(string.MPI_Gatherv)"
name="coll:MPI_Gatherv"
symbol="TASK_MPI_Gatherv"
value="35"
/>
<task
eventGUID="{3aff5bc0-9d62-4771-82e6-71d08ff79317}"
message="$(string.MPI_Op_create)"
name="misc:MPI_Op_create"
symbol="TASK_MPI_Op_create"
value="36"
/>
<task
eventGUID="{424c1f5a-4c1d-4185-86b4-00ccf1ecdbd9}"
message="$(string.MPI_Op_free)"
name="misc:MPI_Op_free"
symbol="TASK_MPI_Op_free"
value="37"
/>
<task
eventGUID="{3b6153f1-d1ae-4084-b55e-94ea5560e545}"
message="$(string.MPI_Reduce_scatter)"
name="coll:MPI_Reduce_scatter"
symbol="TASK_MPI_Reduce_scatter"
value="38"
/>
<task
eventGUID="{d843bdf6-a7ad-4c4f-ad9e-c3942622d0a7}"
message="$(string.MPI_Reduce)"
name="coll:MPI_Reduce"
symbol="TASK_MPI_Reduce"
value="39"
/>
<task
eventGUID="{cb05872e-8d03-40b6-a5f4-f9f8dda72b9e}"
message="$(string.MPI_Scan)"
name="coll:MPI_Scan"
symbol="TASK_MPI_Scan"
value="40"
/>
<task
eventGUID="{c9017c72-d9af-4a62-9b3d-ce7a434e2f73}"
message="$(string.MPI_Scatter)"
name="coll:MPI_Scatter"
symbol="TASK_MPI_Scatter"
value="41"
/>
<task
eventGUID="{aadfefbd-a232-4912-b9c1-02ee7f7370e2}"
message="$(string.MPI_Scatterv)"
name="coll:MPI_Scatterv"
symbol="TASK_MPI_Scatterv"
value="42"
/>
<task
eventGUID="{386c3e16-dd0b-4dc4-8e71-3702052f044c}"
message="$(string.MPI_Comm_compare)"
name="comm:MPI_Comm_compare"
symbol="TASK_MPI_Comm_compare"
value="43"
/>
<task
eventGUID="{7c168705-6828-4a85-8907-8afc41becfc7}"
message="$(string.MPI_Comm_create)"
name="comm:MPI_Comm_create"
symbol="TASK_MPI_Comm_create"
value="44"
/>
<task
eventGUID="{699c1e36-9fdc-46bd-9271-615cb66fc45c}"
message="$(string.MPI_Comm_dup)"
name="comm:MPI_Comm_dup"
symbol="TASK_MPI_Comm_dup"
value="45"
/>
<task
eventGUID="{80c582ed-4a7c-4547-a626-c78c13b44ca4}"
message="$(string.MPI_Comm_free)"
name="comm:MPI_Comm_free"
symbol="TASK_MPI_Comm_free"
value="46"
/>
<task
eventGUID="{c6dc74cd-ef42-43d7-9d01-451ee694ae9b}"
message="$(string.MPI_Comm_get_name)"
name="comm:MPI_Comm_get_name"
symbol="TASK_MPI_Comm_get_name"
value="47"
/>
<task
eventGUID="{021278b9-ac90-4104-b2bb-70669183a031}"
message="$(string.MPI_Comm_group)"
name="comm:MPI_Comm_group"
symbol="TASK_MPI_Comm_group"
value="48"
/>
<task
eventGUID="{e2790ec6-4e8e-4bd5-9a2e-c727d0ac5278}"
message="$(string.MPI_Comm_rank)"
name="comm:MPI_Comm_rank"
symbol="TASK_MPI_Comm_rank"
value="49"
/>
<task
eventGUID="{f6b9bdac-7def-448f-802f-424f746676f2}"
message="$(string.MPI_Comm_remote_group)"
name="comm:MPI_Comm_remote_group"
symbol="TASK_MPI_Comm_remote_group"
value="50"
/>
<task
eventGUID="{1ebf7b24-e5f4-4226-a0c2-281fd220fc0a}"
message="$(string.MPI_Comm_remote_size)"
name="comm:MPI_Comm_remote_size"
symbol="TASK_MPI_Comm_remote_size"
value="51"
/>
<task
eventGUID="{c670130e-6001-44e5-80c4-ca6561d905ba}"
message="$(string.MPI_Comm_set_name)"
name="comm:MPI_Comm_set_name"
symbol="TASK_MPI_Comm_set_name"
value="52"
/>
<task
eventGUID="{13a6bff5-7b5f-4d4a-b40c-4bbafc4fec4a}"
message="$(string.MPI_Comm_size)"
name="comm:MPI_Comm_size"
symbol="TASK_MPI_Comm_size"
value="53"
/>
<task
eventGUID="{7f60a929-d871-4f01-8ee0-efd581deb9dc}"
message="$(string.MPI_Comm_split)"
name="comm:MPI_Comm_split"
symbol="TASK_MPI_Comm_split"
value="54"
/>
<task
eventGUID="{8977726d-2143-4600-8e64-a5954451b61a}"
message="$(string.MPI_Comm_test_inter)"
name="comm:MPI_Comm_test_inter"
symbol="TASK_MPI_Comm_test_inter"
value="55"
/>
<task
eventGUID="{3912dec2-9b55-4176-bd86-ab13875801f7}"
message="$(string.MPI_Intercomm_create)"
name="comm:MPI_Intercomm_create"
symbol="TASK_MPI_Intercomm_create"
value="56"
/>
<task
eventGUID="{d642e6ac-ef15-4ad8-980b-8700cceb8c73}"
message="$(string.MPI_Intercomm_merge)"
name="comm:MPI_Intercomm_merge"
symbol="TASK_MPI_Intercomm_merge"
value="57"
/>
<task
eventGUID="{cb5cb74b-4e42-4e20-bd10-2fe32b509dd1}"
message="$(string.MPI_Get_address)"
name="dt:MPI_Get_address"
symbol="TASK_MPI_Get_address"
value="58"
/>
<task
eventGUID="{ba5c968b-47c2-4f6f-8042-c58188cfd13e}"
message="$(string.MPI_Get_count)"
name="dt:MPI_Get_count"
symbol="TASK_MPI_Get_count"
value="59"
/>
<task
eventGUID="{049d3144-e8aa-49b2-8910-a53aa4ca9f09}"
message="$(string.MPI_Get_elements)"
name="dt:MPI_Get_elements"
symbol="TASK_MPI_Get_elements"
value="60"
/>
<task
eventGUID="{f357adf6-8a7d-434c-acde-8f1d83f779cf}"
message="$(string.MPI_Pack)"
name="dt:MPI_Pack"
symbol="TASK_MPI_Pack"
value="61"
/>
<task
eventGUID="{4dffd89f-4925-40f4-bc15-80cd3ffd42ad}"
message="$(string.MPI_Pack_external)"
name="dt:MPI_Pack_external"
symbol="TASK_MPI_Pack_external"
value="62"
/>
<task
eventGUID="{52168a21-fe36-48cd-bb76-8d530bd12cf7}"
message="$(string.MPI_Pack_external_size)"
name="dt:MPI_Pack_external_size"
symbol="TASK_MPI_Pack_external_size"
value="63"
/>
<task
eventGUID="{087e72dc-04e5-4492-ac20-c324bbf74806}"
message="$(string.MPI_Pack_size)"
name="dt:MPI_Pack_size"
symbol="TASK_MPI_Pack_size"
value="64"
/>
<task
eventGUID="{45276ee4-248c-411a-beb6-0a06f8c3d640}"
message="$(string.MPI_Register_datarep)"
name="dt:MPI_Register_datarep"
symbol="TASK_MPI_Register_datarep"
value="65"
/>
<task
eventGUID="{03555a8c-0364-4126-a29d-745301ecb928}"
message="$(string.MPI_Status_set_elements)"
name="dt:MPI_Status_set_elements"
symbol="TASK_MPI_Status_set_elements"
value="66"
/>
<task
eventGUID="{33d2a471-a4ee-4704-9e55-8b30c830291b}"
message="$(string.MPI_Type_commit)"
name="dt:MPI_Type_commit"
symbol="TASK_MPI_Type_commit"
value="67"
/>
<task
eventGUID="{dd14c0dc-e1d7-4437-87a0-f453ed6dc8fe}"
message="$(string.MPI_Type_contiguous)"
name="dt:MPI_Type_contiguous"
symbol="TASK_MPI_Type_contiguous"
value="68"
/>
<task
eventGUID="{50cf880d-65e4-4564-9d33-baf45c330dc8}"
message="$(string.MPI_Type_create_darray)"
name="dt:MPI_Type_create_darray"
symbol="TASK_MPI_Type_create_darray"
value="69"
/>
<task
eventGUID="{fd4acda8-0b8f-4c7b-ad8a-f6fd40053011}"
message="$(string.MPI_Type_create_hindexed)"
name="dt:MPI_Type_create_hindexed"
symbol="TASK_MPI_Type_create_hindexed"
value="70"
/>
<task
eventGUID="{c0c11e7c-be94-4907-9d0f-35dfca13d0de}"
message="$(string.MPI_Type_create_hvector)"
name="dt:MPI_Type_create_hvector"
symbol="TASK_MPI_Type_create_hvector"
value="71"
/>
<task
eventGUID="{19a1811f-72a7-4157-b266-03c1244496a8}"
message="$(string.MPI_Type_create_indexed_block)"
name="dt:MPI_Type_create_indexed_block"
symbol="TASK_MPI_Type_create_indexed_block"
value="72"
/>
<task
eventGUID="{90e36275-dd27-44e9-8120-6bc20b91e447}"
message="$(string.MPI_Type_create_resized)"
name="dt:MPI_Type_create_resized"
symbol="TASK_MPI_Type_create_resized"
value="73"
/>
<task
eventGUID="{29d7e267-d5c3-4e25-8397-184e2cbb424f}"
message="$(string.MPI_Type_create_struct)"
name="dt:MPI_Type_create_struct"
symbol="TASK_MPI_Type_create_struct"
value="74"
/>
<task
eventGUID="{efd6dce7-1533-42cc-ae68-4ea247022ba8}"
message="$(string.MPI_Type_create_subarray)"
name="dt:MPI_Type_create_subarray"
symbol="TASK_MPI_Type_create_subarray"
value="75"
/>
<task
eventGUID="{cf555856-fa3d-4592-bc2d-25c0b169fa90}"
message="$(string.MPI_Type_dup)"
name="dt:MPI_Type_dup"
symbol="TASK_MPI_Type_dup"
value="76"
/>
<task
eventGUID="{a0d98ab2-2035-4d57-b82e-810e90dba985}"
message="$(string.MPI_Type_free)"
name="dt:MPI_Type_free"
symbol="TASK_MPI_Type_free"
value="77"
/>
<task
eventGUID="{7c25e787-389a-4a6c-b520-a49a43749f25}"
message="$(string.MPI_Type_get_contents)"
name="dt:MPI_Type_get_contents"
symbol="TASK_MPI_Type_get_contents"
value="78"
/>
<task
eventGUID="{0310142b-b983-4282-ab29-1367e6749437}"
message="$(string.MPI_Type_get_envelope)"
name="dt:MPI_Type_get_envelope"
symbol="TASK_MPI_Type_get_envelope"
value="79"
/>
<task
eventGUID="{88ec037d-2fdf-43b7-ac93-bf76c4b27a7e}"
message="$(string.MPI_Type_get_extent)"
name="dt:MPI_Type_get_extent"
symbol="TASK_MPI_Type_get_extent"
value="80"
/>
<task
eventGUID="{2c841c81-3b7c-49bc-8a90-68ddbe96d041}"
message="$(string.MPI_Type_get_name)"
name="dt:MPI_Type_get_name"
symbol="TASK_MPI_Type_get_name"
value="81"
/>
<task
eventGUID="{25938555-7815-42a7-b260-c9d83101718b}"
message="$(string.MPI_Type_get_true_extent)"
name="dt:MPI_Type_get_true_extent"
symbol="TASK_MPI_Type_get_true_extent"
value="82"
/>
<task
eventGUID="{bc84a0f6-c7c4-41f2-8188-364094f982ae}"
message="$(string.MPI_Type_indexed)"
name="dt:MPI_Type_indexed"
symbol="TASK_MPI_Type_indexed"
value="83"
/>
<task
eventGUID="{a17c8426-3328-4537-96f9-93298906aacd}"
message="$(string.MPI_Type_match_size)"
name="dt:MPI_Type_match_size"
symbol="TASK_MPI_Type_match_size"
value="84"
/>
<task
eventGUID="{ed93bc62-ef41-4dd8-9927-edcda452bf87}"
message="$(string.MPI_Type_set_name)"
name="dt:MPI_Type_set_name"
symbol="TASK_MPI_Type_set_name"
value="85"
/>
<task
eventGUID="{d57a93c1-50b9-4135-a211-8f5428ea202b}"
message="$(string.MPI_Type_size)"
name="dt:MPI_Type_size"
symbol="TASK_MPI_Type_size"
value="86"
/>
<task
eventGUID="{d49238f5-bc80-4fee-8d18-928d13695153}"
message="$(string.MPI_Type_vector)"
name="dt:MPI_Type_vector"
symbol="TASK_MPI_Type_vector"
value="87"
/>
<task
eventGUID="{21badd53-27ee-4f39-bc96-68de18709d49}"
message="$(string.MPI_Unpack)"
name="dt:MPI_Unpack"
symbol="TASK_MPI_Unpack"
value="88"
/>
<task
eventGUID="{e9661dd9-a224-4821-aa2f-f44046f29bc5}"
message="$(string.MPI_Unpack_external)"
name="dt:MPI_Unpack_external"
symbol="TASK_MPI_Unpack_external"
value="89"
/>
<task
eventGUID="{5e79f1d6-a20e-4b1d-9e0e-0c837015fe3c}"
message="$(string.MPI_Add_error_class)"
name="eh:MPI_Add_error_class"
symbol="TASK_MPI_Add_error_class"
value="90"
/>
<task
eventGUID="{5a0e8da3-7782-4858-9ec2-cbc05246eb04}"
message="$(string.MPI_Add_error_code)"
name="eh:MPI_Add_error_code"
symbol="TASK_MPI_Add_error_code"
value="91"
/>
<task
eventGUID="{a972bfc8-1a22-4229-a261-183a0a60576c}"
message="$(string.MPI_Add_error_string)"
name="eh:MPI_Add_error_string"
symbol="TASK_MPI_Add_error_string"
value="92"
/>
<task
eventGUID="{d6569772-184e-419f-8c06-1a57bfd396aa}"
message="$(string.MPI_Comm_call_errhandler)"
name="eh:MPI_Comm_call_errhandler"
symbol="TASK_MPI_Comm_call_errhandler"
value="93"
/>
<task
eventGUID="{21cfaac4-2ed0-4263-b237-8dd23f1fc03b}"
message="$(string.MPI_Comm_create_errhandler)"
name="eh:MPI_Comm_create_errhandler"
symbol="TASK_MPI_Comm_create_errhandler"
value="94"
/>
<task
eventGUID="{4f92583c-431d-44a8-8cd1-e897aa26b3af}"
message="$(string.MPI_Comm_get_errhandler)"
name="eh:MPI_Comm_get_errhandler"
symbol="TASK_MPI_Comm_get_errhandler"
value="95"
/>
<task
eventGUID="{01ce9767-094f-42aa-9ecc-aa3503287578}"
message="$(string.MPI_Comm_set_errhandler)"
name="eh:MPI_Comm_set_errhandler"
symbol="TASK_MPI_Comm_set_errhandler"
value="96"
/>
<task
eventGUID="{077f6495-af4f-47f4-bf00-bb121c1b5223}"
message="$(string.MPI_Errhandler_free)"
name="eh:MPI_Errhandler_free"
symbol="TASK_MPI_Errhandler_free"
value="97"
/>
<task
eventGUID="{4bf9954f-da8e-4ee3-ad3f-d07c40c019ab}"
message="$(string.MPI_Error_class)"
name="eh:MPI_Error_class"
symbol="TASK_MPI_Error_class"
value="98"
/>
<task
eventGUID="{d3d70629-c9b5-41d1-baaa-4ff7f71a679d}"
message="$(string.MPI_Error_string)"
name="eh:MPI_Error_string"
symbol="TASK_MPI_Error_string"
value="99"
/>
<task
eventGUID="{e16916af-91de-4142-b220-f60de6bb3528}"
message="$(string.MPI_File_call_errhandler)"
name="eh:MPI_File_call_errhandler"
symbol="TASK_MPI_File_call_errhandler"
value="100"
/>
<task
eventGUID="{44aec7b0-c3a4-418a-87f3-fb3cec15fef1}"
message="$(string.MPI_File_create_errhandler)"
name="eh:MPI_File_create_errhandler"
symbol="TASK_MPI_File_create_errhandler"
value="101"
/>
<task
eventGUID="{58b843f7-cf6c-47f9-82c8-4eecdcbd7425}"
message="$(string.MPI_File_get_errhandler)"
name="eh:MPI_File_get_errhandler"
symbol="TASK_MPI_File_get_errhandler"
value="102"
/>
<task
eventGUID="{b542cdad-3778-43cc-8b39-470b1240480e}"
message="$(string.MPI_File_set_errhandler)"
name="eh:MPI_File_set_errhandler"
symbol="TASK_MPI_File_set_errhandler"
value="103"
/>
<task
eventGUID="{6336a46f-2efa-4279-b2c1-ec3766665a8e}"
message="$(string.MPI_Win_call_errhandler)"
name="eh:MPI_Win_call_errhandler"
symbol="TASK_MPI_Win_call_errhandler"
value="104"
/>
<task
eventGUID="{704a3f36-b59c-433b-bb0d-d603af682b5c}"
message="$(string.MPI_Win_create_errhandler)"
name="eh:MPI_Win_create_errhandler"
symbol="TASK_MPI_Win_create_errhandler"
value="105"
/>
<task
eventGUID="{e8a85fc4-5e1e-464f-93e9-1d121c054106}"
message="$(string.MPI_Win_get_errhandler)"
name="eh:MPI_Win_get_errhandler"
symbol="TASK_MPI_Win_get_errhandler"
value="106"
/>
<task
eventGUID="{729ba18a-4b42-4d60-b7f8-96222a34a847}"
message="$(string.MPI_Win_set_errhandler)"
name="eh:MPI_Win_set_errhandler"
symbol="TASK_MPI_Win_set_errhandler"
value="107"
/>
<task
eventGUID="{9798679d-38cc-4816-812a-f2d8027eb895}"
message="$(string.MPI_Group_compare)"
name="grp:MPI_Group_compare"
symbol="TASK_MPI_Group_compare"
value="108"
/>
<task
eventGUID="{ddb10354-0c15-488f-af72-93b603707b2d}"
message="$(string.MPI_Group_difference)"
name="grp:MPI_Group_difference"
symbol="TASK_MPI_Group_difference"
value="109"
/>
<task
eventGUID="{c86d9e5c-5bde-4c39-b792-6a3b4bc59cdf}"
message="$(string.MPI_Group_excl)"
name="grp:MPI_Group_excl"
symbol="TASK_MPI_Group_excl"
value="110"
/>
<task
eventGUID="{a11d9591-dab3-4fbe-83eb-25a813897fed}"
message="$(string.MPI_Group_free)"
name="grp:MPI_Group_free"
symbol="TASK_MPI_Group_free"
value="111"
/>
<task
eventGUID="{476000ba-73d2-4e5d-aa77-ca681a4a676b}"
message="$(string.MPI_Group_incl)"
name="grp:MPI_Group_incl"
symbol="TASK_MPI_Group_incl"
value="112"
/>
<task
eventGUID="{bfbb1d4c-20ca-47b6-a3fc-1c0eba21ab32}"
message="$(string.MPI_Group_intersection)"
name="grp:MPI_Group_intersection"
symbol="TASK_MPI_Group_intersection"
value="113"
/>
<task
eventGUID="{742ee873-804a-42aa-b58c-54c7a7576c49}"
message="$(string.MPI_Group_range_excl)"
name="grp:MPI_Group_range_excl"
symbol="TASK_MPI_Group_range_excl"
value="114"
/>
<task
eventGUID="{cf378412-0d4a-4154-ba1d-4eb9b5a4e793}"
message="$(string.MPI_Group_range_incl)"
name="grp:MPI_Group_range_incl"
symbol="TASK_MPI_Group_range_incl"
value="115"
/>
<task
eventGUID="{23634353-7a64-4b1b-a8bb-a6425aa6001f}"
message="$(string.MPI_Group_rank)"
name="grp:MPI_Group_rank"
symbol="TASK_MPI_Group_rank"
value="116"
/>
<task
eventGUID="{a6aa371a-a7e2-45af-949d-9f2e9eb6bfff}"
message="$(string.MPI_Group_size)"
name="grp:MPI_Group_size"
symbol="TASK_MPI_Group_size"
value="117"
/>
<task
eventGUID="{2e2b5055-bd5d-4d03-8726-8189b157167d}"
message="$(string.MPI_Group_translate_ranks)"
name="grp:MPI_Group_translate_ranks"
symbol="TASK_MPI_Group_translate_ranks"
value="118"
/>
<task
eventGUID="{ce53a78b-9fec-4e15-a165-e3bc5f84ed46}"
message="$(string.MPI_Group_union)"
name="grp:MPI_Group_union"
symbol="TASK_MPI_Group_union"
value="119"
/>
<task
eventGUID="{c190739f-7c4d-461e-b41d-85b844f7ac5b}"
message="$(string.MPI_Info_create)"
name="info:MPI_Info_create"
symbol="TASK_MPI_Info_create"
value="120"
/>
<task
eventGUID="{54dccaf6-2731-4434-92fc-b92eb6a810ad}"
message="$(string.MPI_Info_delete)"
name="info:MPI_Info_delete"
symbol="TASK_MPI_Info_delete"
value="121"
/>
<task
eventGUID="{f6fc97f7-41cb-4f21-bad2-00c3b3a56952}"
message="$(string.MPI_Info_dup)"
name="info:MPI_Info_dup"
symbol="TASK_MPI_Info_dup"
value="122"
/>
<task
eventGUID="{0d76e7b2-99da-4b99-8b3e-397d1768ac45}"
message="$(string.MPI_Info_free)"
name="info:MPI_Info_free"
symbol="TASK_MPI_Info_free"
value="123"
/>
<task
eventGUID="{286e996d-b6cd-4314-9ec3-29a594f9ef47}"
message="$(string.MPI_Info_get)"
name="info:MPI_Info_get"
symbol="TASK_MPI_Info_get"
value="124"
/>
<task
eventGUID="{dd13bb16-cf09-4be7-927d-9063c30d47b0}"
message="$(string.MPI_Info_get_nkeys)"
name="info:MPI_Info_get_nkeys"
symbol="TASK_MPI_Info_get_nkeys"
value="125"
/>
<task
eventGUID="{a4db105d-03a4-4288-a2aa-b9d05ec86840}"
message="$(string.MPI_Info_get_nthkey)"
name="info:MPI_Info_get_nthkey"
symbol="TASK_MPI_Info_get_nthkey"
value="126"
/>
<task
eventGUID="{6851e305-d338-40cd-8153-58f52bf6c588}"
message="$(string.MPI_Info_get_valuelen)"
name="info:MPI_Info_get_valuelen"
symbol="TASK_MPI_Info_get_valuelen"
value="127"
/>
<task
eventGUID="{c976f96f-d3bd-4435-9ab1-d7195e1734a2}"
message="$(string.MPI_Info_set)"
name="info:MPI_Info_set"
symbol="TASK_MPI_Info_set"
value="128"
/>
<task
eventGUID="{133636a9-b1f2-4605-a895-3adfa995aa01}"
message="$(string.MPI_Abort)"
name="init:MPI_Abort"
symbol="TASK_MPI_Abort"
value="129"
/>
<task
eventGUID="{35213102-3a9f-4c87-a13a-db1995cb8ac8}"
message="$(string.MPI_Finalize)"
name="init:MPI_Finalize"
symbol="TASK_MPI_Finalize"
value="130"
/>
<task
eventGUID="{6baf689e-110c-4533-9460-ad166223cc66}"
message="$(string.MPI_Init)"
name="init:MPI_Init"
symbol="TASK_MPI_Init"
value="131"
/>
<task
eventGUID="{e81a728b-08da-460c-aefd-cf01a3cccf66}"
message="$(string.MPI_Init_thread)"
name="init:MPI_Init_thread"
symbol="TASK_MPI_Init_thread"
value="132"
/>
<task
eventGUID="{d0f23388-e340-4d49-b635-3174672ed95d}"
message="$(string.MPI_Is_thread_main)"
name="init:MPI_Is_thread_main"
symbol="TASK_MPI_Is_thread_main"
value="133"
/>
<task
eventGUID="{21623c1c-8437-46a9-bc0d-2d5b9f31a338}"
message="$(string.MPI_Query_thread)"
name="init:MPI_Query_thread"
symbol="TASK_MPI_Query_thread"
value="134"
/>
<task
eventGUID="{b96b2192-3107-40ef-a2f3-be70f9f3b251}"
message="$(string.MPI_Get_processor_name)"
name="init:MPI_Get_processor_name"
symbol="TASK_MPI_Get_processor_name"
value="135"
/>
<task
eventGUID="{ce037463-082f-4cf5-bcdd-9b3879174f48}"
message="$(string.MPI_Get_version)"
name="init:MPI_Get_version"
symbol="TASK_MPI_Get_version"
value="136"
/>
<task
eventGUID="{8c473646-9f86-45e2-8378-1461fe82f40a}"
message="$(string.MPI_Bsend)"
name="p2p:MPI_Bsend"
symbol="TASK_MPI_Bsend"
value="137"
/>
<task
eventGUID="{803be3fb-d08f-431c-bf2f-6957c5025d94}"
message="$(string.MPI_Bsend_init)"
name="p2p:MPI_Bsend_init"
symbol="TASK_MPI_Bsend_init"
value="138"
/>
<task
eventGUID="{aa118351-1773-4145-a075-c3f4af2a3d33}"
message="$(string.MPI_Buffer_attach)"
name="p2p:MPI_Buffer_attach"
symbol="TASK_MPI_Buffer_attach"
value="139"
/>
<task
eventGUID="{916e3c84-645a-4a2f-8b17-4c716de32a40}"
message="$(string.MPI_Buffer_detach)"
name="p2p:MPI_Buffer_detach"
symbol="TASK_MPI_Buffer_detach"
value="140"
/>
<task
eventGUID="{e0ec62c3-c3f6-4a32-b2c0-3dc0bc220260}"
message="$(string.MPI_Cancel)"
name="p2p:MPI_Cancel"
symbol="TASK_MPI_Cancel"
value="141"
/>
<task
eventGUID="{e57547b8-a4eb-4c4b-b99f-61acf5b8a59b}"
message="$(string.MPI_Grequest_complete)"
name="p2p:MPI_Grequest_complete"
symbol="TASK_MPI_Grequest_complete"
value="142"
/>
<task
eventGUID="{b5e5ba11-d3ae-495d-a92a-12ef973ed480}"
message="$(string.MPI_Grequest_start)"
name="p2p:MPI_Grequest_start"
symbol="TASK_MPI_Grequest_start"
value="143"
/>
<task
eventGUID="{79def37b-e9be-4599-ac2d-369e8a622b8c}"
message="$(string.MPI_Ibsend)"
name="p2p:MPI_Ibsend"
symbol="TASK_MPI_Ibsend"
value="144"
/>
<task
eventGUID="{0a44f556-5322-472f-816f-386d6976413f}"
message="$(string.MPI_Iprobe)"
name="p2p:MPI_Iprobe"
symbol="TASK_MPI_Iprobe"
value="145"
/>
<task
eventGUID="{f67a5d0b-93ac-4ef5-8cdd-543949fb13a4}"
message="$(string.MPI_Irecv)"
name="p2p:MPI_Irecv"
symbol="TASK_MPI_Irecv"
value="146"
/>
<task
eventGUID="{4e8f3cb4-ca3e-42c0-a131-379694f6b8e0}"
message="$(string.MPI_Irsend)"
name="p2p:MPI_Irsend"
symbol="TASK_MPI_Irsend"
value="147"
/>
<task
eventGUID="{2cbe9ccd-e6ea-4d3f-a17f-f6efc78d6d20}"
message="$(string.MPI_Isend)"
name="p2p:MPI_Isend"
symbol="TASK_MPI_Isend"
value="148"
/>
<task
eventGUID="{7904f6f6-9f3e-4265-a61a-3555ac207bc6}"
message="$(string.MPI_Issend)"
name="p2p:MPI_Issend"
symbol="TASK_MPI_Issend"
value="149"
/>
<task
eventGUID="{3b3f1343-ab4e-4e59-88a4-c80f81d6e23f}"
message="$(string.MPI_Probe)"
name="p2p:MPI_Probe"
symbol="TASK_MPI_Probe"
value="150"
/>
<task
eventGUID="{74b325b5-3d16-4e82-8fbf-533722495027}"
message="$(string.MPI_Recv)"
name="p2p:MPI_Recv"
symbol="TASK_MPI_Recv"
value="151"
/>
<task
eventGUID="{978f5c65-469e-4928-a0cd-e60fcc0cb992}"
message="$(string.MPI_Recv_init)"
name="p2p:MPI_Recv_init"
symbol="TASK_MPI_Recv_init"
value="152"
/>
<task
eventGUID="{c25d6596-17a4-49c2-8032-6aedfa4e80a7}"
message="$(string.MPI_Request_free)"
name="p2p:MPI_Request_free"
symbol="TASK_MPI_Request_free"
value="153"
/>
<task
eventGUID="{fd8d3d5f-54f6-4137-ab7e-00560014b925}"
message="$(string.MPI_Request_get_status)"
name="p2p:MPI_Request_get_status"
symbol="TASK_MPI_Request_get_status"
value="154"
/>
<task
eventGUID="{f1a486f8-42ac-4ed6-a6d1-0b1c45d273ea}"
message="$(string.MPI_Rsend)"
name="p2p:MPI_Rsend"
symbol="TASK_MPI_Rsend"
value="155"
/>
<task
eventGUID="{6c029224-e1ba-45ac-add3-504bbc8615af}"
message="$(string.MPI_Rsend_init)"
name="p2p:MPI_Rsend_init"
symbol="TASK_MPI_Rsend_init"
value="156"
/>
<task
eventGUID="{78efcbf2-16ae-45c8-a613-2bf951c6ab31}"
message="$(string.MPI_Send)"
name="p2p:MPI_Send"
symbol="TASK_MPI_Send"
value="157"
/>
<task
eventGUID="{5e69e175-44e3-4e94-9e58-4531e7853f90}"
message="$(string.MPI_Send_init)"
name="p2p:MPI_Send_init"
symbol="TASK_MPI_Send_init"
value="158"
/>
<task
eventGUID="{c531d60a-3216-4aec-9936-621ceb2aebb7}"
message="$(string.MPI_Sendrecv)"
name="p2p:MPI_Sendrecv"
symbol="TASK_MPI_Sendrecv"
value="159"
/>
<task
eventGUID="{bbdc7c46-b1d5-4667-882c-3f067f0fe671}"
message="$(string.MPI_Sendrecv_replace)"
name="p2p:MPI_Sendrecv_replace"
symbol="TASK_MPI_Sendrecv_replace"
value="160"
/>
<task
eventGUID="{6a18bbe1-f442-4076-80e5-852449bcae57}"
message="$(string.MPI_Ssend)"
name="p2p:MPI_Ssend"
symbol="TASK_MPI_Ssend"
value="161"
/>
<task
eventGUID="{81a1691a-e852-475b-a020-f8c9b8cfae41}"
message="$(string.MPI_Ssend_init)"
name="p2p:MPI_Ssend_init"
symbol="TASK_MPI_Ssend_init"
value="162"
/>
<task
eventGUID="{ecab43e7-a42e-49d6-8f05-e5cf3d52d84e}"
message="$(string.MPI_Start)"
name="p2p:MPI_Start"
symbol="TASK_MPI_Start"
value="163"
/>
<task
eventGUID="{28cf10d1-9060-49d9-9dca-89fd4b1cd892}"
message="$(string.MPI_Startall)"
name="p2p:MPI_Startall"
symbol="TASK_MPI_Startall"
value="164"
/>
<task
eventGUID="{acc46ea1-17a5-4989-82fb-1e6ae21ab188}"
message="$(string.MPI_Status_set_cancelled)"
name="p2p:MPI_Status_set_cancelled"
symbol="TASK_MPI_Status_set_cancelled"
value="165"
/>
<task
eventGUID="{bb7a79b6-0453-4f8f-96b3-37f1a63f1dc0}"
message="$(string.MPI_Wait)"
name="p2p:MPI_Wait"
symbol="TASK_MPI_Wait"
value="166"
/>
<task
eventGUID="{25ba7bd0-0a3b-4c54-b4da-984ff69c5f96}"
message="$(string.MPI_Waitall)"
name="p2p:MPI_Waitall"
symbol="TASK_MPI_Waitall"
value="167"
/>
<task
eventGUID="{57dea1ae-e6c0-48d6-b804-b6a0d51abf82}"
message="$(string.MPI_Waitany)"
name="p2p:MPI_Waitany"
symbol="TASK_MPI_Waitany"
value="168"
/>
<task
eventGUID="{2e0532c6-15af-45d7-9b2a-655f3fd492f9}"
message="$(string.MPI_Waitsome)"
name="p2p:MPI_Waitsome"
symbol="TASK_MPI_Waitsome"
value="169"
/>
<task
eventGUID="{3dbb6c5b-fd71-4e54-8946-0f5df36cc087}"
message="$(string.MPI_Test)"
name="poll:MPI_Test"
symbol="TASK_MPI_Test"
value="170"
/>
<task
eventGUID="{bfe39429-a907-4cb1-a309-60003bd50667}"
message="$(string.MPI_Test_cancelled)"
name="poll:MPI_Test_cancelled"
symbol="TASK_MPI_Test_cancelled"
value="171"
/>
<task
eventGUID="{b08d1c5f-fde1-4a40-8095-e4a3b0e4d811}"
message="$(string.MPI_Testall)"
name="poll:MPI_Testall"
symbol="TASK_MPI_Testall"
value="172"
/>
<task
eventGUID="{84c6cb30-3bbb-42b3-96a6-7caf4de29f0e}"
message="$(string.MPI_Testany)"
name="poll:MPI_Testany"
symbol="TASK_MPI_Testany"
value="173"
/>
<task
eventGUID="{40ffec47-a091-4219-9ae1-52ed65568e15}"
message="$(string.MPI_Testsome)"
name="poll:MPI_Testsome"
symbol="TASK_MPI_Testsome"
value="174"
/>
<task
eventGUID="{34abec38-1cf0-4278-90ad-7d4796764ccb}"
message="$(string.MPI_Accumulate)"
name="rma:MPI_Accumulate"
symbol="TASK_MPI_Accumulate"
value="175"
/>
<task
eventGUID="{3bb3fd9d-1e16-47b4-8d6f-790a8aadf9ef}"
message="$(string.MPI_Alloc_mem)"
name="rma:MPI_Alloc_mem"
symbol="TASK_MPI_Alloc_mem"
value="176"
/>
<task
eventGUID="{d3a10868-01d8-40ee-be35-bb6799726d1e}"
message="$(string.MPI_Free_mem)"
name="rma:MPI_Free_mem"
symbol="TASK_MPI_Free_mem"
value="177"
/>
<task
eventGUID="{424d06c2-1a50-49d7-9527-b666cb755d5d}"
message="$(string.MPI_Get)"
name="rma:MPI_Get"
symbol="TASK_MPI_Get"
value="178"
/>
<task
eventGUID="{ee52de8a-2ce7-4179-a111-c9caac57640a}"
message="$(string.MPI_Put)"
name="rma:MPI_Put"
symbol="TASK_MPI_Put"
value="179"
/>
<task
eventGUID="{7c75bb2e-268b-4982-a522-0dbf43b9ee84}"
message="$(string.MPI_Win_complete)"
name="rma:MPI_Win_complete"
symbol="TASK_MPI_Win_complete"
value="180"
/>
<task
eventGUID="{4e5c299a-8785-4540-b72b-2b3596048d5a}"
message="$(string.MPI_Win_create)"
name="rma:MPI_Win_create"
symbol="TASK_MPI_Win_create"
value="181"
/>
<task
eventGUID="{d4b6031c-5a9f-4675-bfe9-ae0e21f7e7de}"
message="$(string.MPI_Win_fence)"
name="rma:MPI_Win_fence"
symbol="TASK_MPI_Win_fence"
value="182"
/>
<task
eventGUID="{6f0d3f3b-a61f-4bd1-8941-02c1ccfe5a9b}"
message="$(string.MPI_Win_free)"
name="rma:MPI_Win_free"
symbol="TASK_MPI_Win_free"
value="183"
/>
<task
eventGUID="{fafd0de4-250d-4ceb-b7e3-1bf843f45325}"
message="$(string.MPI_Win_get_group)"
name="rma:MPI_Win_get_group"
symbol="TASK_MPI_Win_get_group"
value="184"
/>
<task
eventGUID="{66ddc34c-5a0d-4617-88fd-4a8a0e8f9ba2}"
message="$(string.MPI_Win_get_name)"
name="rma:MPI_Win_get_name"
symbol="TASK_MPI_Win_get_name"
value="185"
/>
<task
eventGUID="{c3b052f0-4aca-48fd-97c5-9fc73c1d5c62}"
message="$(string.MPI_Win_lock)"
name="rma:MPI_Win_lock"
symbol="TASK_MPI_Win_lock"
value="186"
/>
<task
eventGUID="{1a306871-4a0e-4edf-a9f6-ef5b1d25525c}"
message="$(string.MPI_Win_post)"
name="rma:MPI_Win_post"
symbol="TASK_MPI_Win_post"
value="187"
/>
<task
eventGUID="{8c2dd2f3-57c2-4c9e-9b45-f99127237f66}"
message="$(string.MPI_Win_set_name)"
name="rma:MPI_Win_set_name"
symbol="TASK_MPI_Win_set_name"
value="188"
/>
<task
eventGUID="{2824ba5f-65b5-4c95-9f49-0c1ce838fd95}"
message="$(string.MPI_Win_start)"
name="rma:MPI_Win_start"
symbol="TASK_MPI_Win_start"
value="189"
/>
<task
eventGUID="{7fcccfa6-0095-4f17-8efd-94a3021cb9c3}"
message="$(string.MPI_Win_test)"
name="rma:MPI_Win_test"
symbol="TASK_MPI_Win_test"
value="190"
/>
<task
eventGUID="{40f0f480-8f75-4e85-912a-dfd1f4df7d2a}"
message="$(string.MPI_Win_unlock)"
name="rma:MPI_Win_unlock"
symbol="TASK_MPI_Win_unlock"
value="191"
/>
<task
eventGUID="{22ed8b55-f7de-4b7b-a43d-42a785ac186c}"
message="$(string.MPI_Win_wait)"
name="rma:MPI_Win_wait"
symbol="TASK_MPI_Win_wait"
value="192"
/>
<task
eventGUID="{8156cc53-9272-4a8b-9912-92da394ba76d}"
message="$(string.MPI_File_close)"
name="io:MPI_File_close"
symbol="TASK_MPI_File_close"
value="193"
/>
<task
eventGUID="{e336755b-9f8a-476d-a9d5-97fec06e1a2c}"
message="$(string.MPI_File_delete)"
name="io:MPI_File_delete"
symbol="TASK_MPI_File_delete"
value="194"
/>
<task
eventGUID="{7893a4c9-9d45-4fdc-af4c-16c87872ba2c}"
message="$(string.MPI_File_c2f)"
name="io:MPI_File_c2f"
symbol="TASK_MPI_File_c2f"
value="195"
/>
<task
eventGUID="{bf2b05ed-f1d1-4c51-8813-6521721c8560}"
message="$(string.MPI_File_f2c)"
name="io:MPI_File_f2c"
symbol="TASK_MPI_File_f2c"
value="196"
/>
<task
eventGUID="{c36a0cf6-ce8e-4530-ab08-342c486bf399}"
message="$(string.MPI_File_sync)"
name="io:MPI_File_sync"
symbol="TASK_MPI_File_sync"
value="197"
/>
<task
eventGUID="{3568b799-4593-4fba-9711-cd15fbe45420}"
message="$(string.MPI_File_get_amode)"
name="io:MPI_File_get_amode"
symbol="TASK_MPI_File_get_amode"
value="198"
/>
<task
eventGUID="{aa33c1fa-5976-4ebe-8b30-7248a5f4527a}"
message="$(string.MPI_File_get_atomicity)"
name="io:MPI_File_get_atomicity"
symbol="TASK_MPI_File_get_atomicity"
value="199"
/>
<task
eventGUID="{95ecdc24-92b1-4709-9790-211e63f71460}"
message="$(string.MPI_File_get_byte_offset)"
name="io:MPI_File_get_byte_offset"
symbol="TASK_MPI_File_get_byte_offset"
value="200"
/>
<task
eventGUID="{11200636-22c7-4e8b-9a2f-3f9581b0fea8}"
message="$(string.MPI_File_get_type_extent)"
name="io:MPI_File_get_type_extent"
symbol="TASK_MPI_File_get_type_extent"
value="201"
/>
<task
eventGUID="{369bf245-b112-4202-bf56-05335cd3844f}"
message="$(string.MPI_File_get_group)"
name="io:MPI_File_get_group"
symbol="TASK_MPI_File_get_group"
value="202"
/>
<task
eventGUID="{429d5d52-8467-4baa-8e77-459ce7a9a7b1}"
message="$(string.MPI_File_get_info)"
name="io:MPI_File_get_info"
symbol="TASK_MPI_File_get_info"
value="203"
/>
<task
eventGUID="{70e09176-146f-405b-a89a-a0656c670c57}"
message="$(string.MPI_File_get_position)"
name="io:MPI_File_get_position"
symbol="TASK_MPI_File_get_position"
value="204"
/>
<task
eventGUID="{d509acfe-68fa-4136-80b1-c57ad4588821}"
message="$(string.MPI_File_get_position_shared)"
name="io:MPI_File_get_position_shared"
symbol="TASK_MPI_File_get_position_shared"
value="205"
/>
<task
eventGUID="{060de131-b511-49c5-844a-7e6b7329b29a}"
message="$(string.MPI_File_get_size)"
name="io:MPI_File_get_size"
symbol="TASK_MPI_File_get_size"
value="206"
/>
<task
eventGUID="{1ec35529-f89f-45f1-8c7f-7c5de25bffdb}"
message="$(string.MPI_File_get_view)"
name="io:MPI_File_get_view"
symbol="TASK_MPI_File_get_view"
value="207"
/>
<task
eventGUID="{0369609c-484d-4935-972a-6d5243618dcf}"
message="$(string.MPI_File_iread)"
name="io:MPI_File_iread"
symbol="TASK_MPI_File_iread"
value="208"
/>
<task
eventGUID="{f251aa10-0116-44e6-a762-1d2cf4ffe67c}"
message="$(string.MPI_File_iread_at)"
name="io:MPI_File_iread_at"
symbol="TASK_MPI_File_iread_at"
value="209"
/>
<task
eventGUID="{882d1c18-14b7-4233-9888-e2c9d2c6e5db}"
message="$(string.MPI_File_iread_shared)"
name="io:MPI_File_iread_shared"
symbol="TASK_MPI_File_iread_shared"
value="210"
/>
<task
eventGUID="{59aff121-2783-42ea-b859-e235323911a2}"
message="$(string.MPI_File_iwrite)"
name="io:MPI_File_iwrite"
symbol="TASK_MPI_File_iwrite"
value="211"
/>
<task
eventGUID="{4ac2410c-98b4-41d6-aae4-a472fa322dd3}"
message="$(string.MPI_File_iwrite_at)"
name="io:MPI_File_iwrite_at"
symbol="TASK_MPI_File_iwrite_at"
value="212"
/>
<task
eventGUID="{f8574f3b-170c-4b64-8c63-070410344a3b}"
message="$(string.MPI_File_iwrite_shared)"
name="io:MPI_File_iwrite_shared"
symbol="TASK_MPI_File_iwrite_shared"
value="213"
/>
<task
eventGUID="{1481eec1-7898-4fbb-affd-ee73448b03ef}"
message="$(string.MPI_File_open)"
name="io:MPI_File_open"
symbol="TASK_MPI_File_open"
value="214"
/>
<task
eventGUID="{0599a490-48b2-45e9-8843-4421ddde1916}"
message="$(string.MPI_File_preallocate)"
name="io:MPI_File_preallocate"
symbol="TASK_MPI_File_preallocate"
value="215"
/>
<task
eventGUID="{35006ee2-f89c-45a3-ba95-f3134019e907}"
message="$(string.MPI_File_read_at_all_begin)"
name="io:MPI_File_read_at_all_begin"
symbol="TASK_MPI_File_read_at_all_begin"
value="216"
/>
<task
eventGUID="{84ed9d95-d29f-4c05-82c3-e5602a3b7bdf}"
message="$(string.MPI_File_read_at_all_end)"
name="io:MPI_File_read_at_all_end"
symbol="TASK_MPI_File_read_at_all_end"
value="217"
/>
<task
eventGUID="{e3717c38-cc76-4fc1-ae0f-f655181847ef}"
message="$(string.MPI_File_read)"
name="io:MPI_File_read"
symbol="TASK_MPI_File_read"
value="218"
/>
<task
eventGUID="{8a96b6a3-9f7d-47c6-a3e0-9c11189e0365}"
message="$(string.MPI_File_read_all)"
name="io:MPI_File_read_all"
symbol="TASK_MPI_File_read_all"
value="219"
/>
<task
eventGUID="{0198e580-1784-46f2-b4aa-c700e86adc19}"
message="$(string.MPI_File_read_all_begin)"
name="io:MPI_File_read_all_begin"
symbol="TASK_MPI_File_read_all_begin"
value="220"
/>
<task
eventGUID="{266015e3-5a11-434d-a8db-4ea2044566d0}"
message="$(string.MPI_File_read_all_end)"
name="io:MPI_File_read_all_end"
symbol="TASK_MPI_File_read_all_end"
value="221"
/>
<task
eventGUID="{edba795c-1f21-4959-9773-ce01dc8ca6e2}"
message="$(string.MPI_File_read_at)"
name="io:MPI_File_read_at"
symbol="TASK_MPI_File_read_at"
value="222"
/>
<task
eventGUID="{2aac3b0e-c857-44e9-8ecf-68a75af422af}"
message="$(string.MPI_File_read_at_all)"
name="io:MPI_File_read_at_all"
symbol="TASK_MPI_File_read_at_all"
value="223"
/>
<task
eventGUID="{3c308bb8-2319-40c3-9657-5212a6110746}"
message="$(string.MPI_File_read_ordered)"
name="io:MPI_File_read_ordered"
symbol="TASK_MPI_File_read_ordered"
value="224"
/>
<task
eventGUID="{f4604acf-f2af-46c6-a6a3-22b31145fcf0}"
message="$(string.MPI_File_read_ordered_begin)"
name="io:MPI_File_read_ordered_begin"
symbol="TASK_MPI_File_read_ordered_begin"
value="225"
/>
<task
eventGUID="{62756667-02a5-4594-98c5-f96b4943b356}"
message="$(string.MPI_File_read_ordered_end)"
name="io:MPI_File_read_ordered_end"
symbol="TASK_MPI_File_read_ordered_end"
value="226"
/>
<task
eventGUID="{6aab5e35-abfa-4bdf-a69a-e4a1b4f3de75}"
message="$(string.MPI_File_read_shared)"
name="io:MPI_File_read_shared"
symbol="TASK_MPI_File_read_shared"
value="227"
/>
<task
eventGUID="{1ba8ca46-e4c0-4f99-8d69-1df17742b2f3}"
message="$(string.MPI_File_seek)"
name="io:MPI_File_seek"
symbol="TASK_MPI_File_seek"
value="228"
/>
<task
eventGUID="{43160a0c-4dbb-47aa-a366-18bee2ce5816}"
message="$(string.MPI_File_seek_shared)"
name="io:MPI_File_seek_shared"
symbol="TASK_MPI_File_seek_shared"
value="229"
/>
<task
eventGUID="{a45f251b-1de7-484d-91c0-99d6a1b04081}"
message="$(string.MPI_File_set_atomicity)"
name="io:MPI_File_set_atomicity"
symbol="TASK_MPI_File_set_atomicity"
value="230"
/>
<task
eventGUID="{f089459f-9ced-4baf-9a2c-0c5430434c36}"
message="$(string.MPI_File_set_info)"
name="io:MPI_File_set_info"
symbol="TASK_MPI_File_set_info"
value="231"
/>
<task
eventGUID="{191108c1-33c9-42b1-abe2-02202fdf7639}"
message="$(string.MPI_File_set_size)"
name="io:MPI_File_set_size"
symbol="TASK_MPI_File_set_size"
value="232"
/>
<task
eventGUID="{ede7e1b8-f321-4da2-96c2-b312257a0410}"
message="$(string.MPI_File_set_view)"
name="io:MPI_File_set_view"
symbol="TASK_MPI_File_set_view"
value="233"
/>
<task
eventGUID="{8faac9a3-45b8-4c87-9634-29ef68f53ee2}"
message="$(string.MPI_File_write_at_all_begin)"
name="io:MPI_File_write_at_all_begin"
symbol="TASK_MPI_File_write_at_all_begin"
value="234"
/>
<task
eventGUID="{07ed9128-2ed8-49eb-b7cf-24b91ed77793}"
message="$(string.MPI_File_write_at_all_end)"
name="io:MPI_File_write_at_all_end"
symbol="TASK_MPI_File_write_at_all_end"
value="235"
/>
<task
eventGUID="{d070034f-fa5d-4729-812e-cef547b0a8b3}"
message="$(string.MPI_File_write)"
name="io:MPI_File_write"
symbol="TASK_MPI_File_write"
value="236"
/>
<task
eventGUID="{e6b2555b-a658-4cce-ae17-a70fac481346}"
message="$(string.MPI_File_write_all)"
name="io:MPI_File_write_all"
symbol="TASK_MPI_File_write_all"
value="237"
/>
<task
eventGUID="{16292cae-af31-4724-9726-e0f821da077c}"
message="$(string.MPI_File_write_all_begin)"
name="io:MPI_File_write_all_begin"
symbol="TASK_MPI_File_write_all_begin"
value="238"
/>
<task
eventGUID="{13478529-4e52-410c-939b-1a4ed99ef0b7}"
message="$(string.MPI_File_write_all_end)"
name="io:MPI_File_write_all_end"
symbol="TASK_MPI_File_write_all_end"
value="239"
/>
<task
eventGUID="{90b8b005-f47c-4095-97de-4c1b0bfddcdf}"
message="$(string.MPI_File_write_at)"
name="io:MPI_File_write_at"
symbol="TASK_MPI_File_write_at"
value="240"
/>
<task
eventGUID="{90ba8ef4-22d3-4477-a6c2-30a9d9525d67}"
message="$(string.MPI_File_write_at_all)"
name="io:MPI_File_write_at_all"
symbol="TASK_MPI_File_write_at_all"
value="241"
/>
<task
eventGUID="{383d4e57-ff77-4157-a32d-3aaa142791fb}"
message="$(string.MPI_File_write_ordered)"
name="io:MPI_File_write_ordered"
symbol="TASK_MPI_File_write_ordered"
value="242"
/>
<task
eventGUID="{82dfbfc7-a990-415e-b1a9-f0ae16c66106}"
message="$(string.MPI_File_write_ordered_begin)"
name="io:MPI_File_write_ordered_begin"
symbol="TASK_MPI_File_write_ordered_begin"
value="243"
/>
<task
eventGUID="{7471d744-f9be-472b-ae43-e7b530cfe5aa}"
message="$(string.MPI_File_write_ordered_end)"
name="io:MPI_File_write_ordered_end"
symbol="TASK_MPI_File_write_ordered_end"
value="244"
/>
<task
eventGUID="{e5b92541-e3c6-4345-af0f-61ead9826ab8}"
message="$(string.MPI_File_write_shared)"
name="io:MPI_File_write_shared"
symbol="TASK_MPI_File_write_shared"
value="245"
/>
<task
eventGUID="{9e79b9e1-02a6-4cd8-9b3f-60dc290682d8}"
message="$(string.MPI_Close_port)"
name="io:MPI_Close_port"
symbol="TASK_MPI_Close_port"
value="246"
/>
<task
eventGUID="{261bbe78-b54b-4879-9207-01a61cef4d6b}"
message="$(string.MPI_Comm_accept)"
name="spwn:MPI_Comm_accept"
symbol="TASK_MPI_Comm_accept"
value="247"
/>
<task
eventGUID="{cdf795f8-be02-49e0-86a8-de1deb3dee78}"
message="$(string.MPI_Comm_connect)"
name="spwn:MPI_Comm_connect"
symbol="TASK_MPI_Comm_connect"
value="248"
/>
<task
eventGUID="{58665c46-5c44-4776-bae4-8fb6d51fa0ef}"
message="$(string.MPI_Comm_disconnect)"
name="spwn:MPI_Comm_disconnect"
symbol="TASK_MPI_Comm_disconnect"
value="249"
/>
<task
eventGUID="{3b38933a-6f21-48b6-9573-c52eef62064f}"
message="$(string.MPI_Comm_get_parent)"
name="spwn:MPI_Comm_get_parent"
symbol="TASK_MPI_Comm_get_parent"
value="250"
/>
<task
eventGUID="{2d2ac1c7-dbac-4f85-bce6-1e4d8d4b8e27}"
message="$(string.MPI_Comm_join)"
name="spwn:MPI_Comm_join"
symbol="TASK_MPI_Comm_join"
value="251"
/>
<task
eventGUID="{9948a0f2-6b01-411d-835a-b3f2b756c9e5}"
message="$(string.MPI_Comm_spawn)"
name="spwn:MPI_Comm_spawn"
symbol="TASK_MPI_Comm_spawn"
value="252"
/>
<task
eventGUID="{6807c269-f967-43be-870f-3e86c3fa47be}"
message="$(string.MPI_Comm_spawn_multiple)"
name="spwn:MPI_Comm_spawn_multiple"
symbol="TASK_MPI_Comm_spawn_multiple"
value="253"
/>
<task
eventGUID="{638740b5-62a8-4e85-9686-9915385bab78}"
message="$(string.MPI_Lookup_name)"
name="spwn:MPI_Lookup_name"
symbol="TASK_MPI_Lookup_name"
value="254"
/>
<task
eventGUID="{3e6aa2f6-3e53-4939-949c-42d29686d965}"
message="$(string.MPI_Open_port)"
name="spwn:MPI_Open_port"
symbol="TASK_MPI_Open_port"
value="255"
/>
<task
eventGUID="{d3f2d058-4eb0-4aa3-8b7c-c6139164e6eb}"
message="$(string.MPI_Publish_name)"
name="spwn:MPI_Publish_name"
symbol="TASK_MPI_Publish_name"
value="256"
/>
<task
eventGUID="{1dfba3c3-d819-4b0d-a869-4d33761a0989}"
message="$(string.MPI_Unpublish_name)"
name="spwn:MPI_Unpublish_name"
symbol="TASK_MPI_Unpublish_name"
value="257"
/>
<task
eventGUID="{4cc44dbc-8812-4e8f-bf7f-ccc5a2428fe8}"
message="$(string.MPI_Cart_coords)"
name="topo:MPI_Cart_coords"
symbol="TASK_MPI_Cart_coords"
value="258"
/>
<task
eventGUID="{8b9aed63-6f84-4a76-ab9d-139565a9f7d7}"
message="$(string.MPI_Cart_create)"
name="topo:MPI_Cart_create"
symbol="TASK_MPI_Cart_create"
value="259"
/>
<task
eventGUID="{8f12a02f-ff98-4e5e-b528-0fe7f2f55e4e}"
message="$(string.MPI_Cart_get)"
name="topo:MPI_Cart_get"
symbol="TASK_MPI_Cart_get"
value="260"
/>
<task
eventGUID="{c7b2924a-ca73-4e6b-b111-ccac35bd8281}"
message="$(string.MPI_Cart_map)"
name="topo:MPI_Cart_map"
symbol="TASK_MPI_Cart_map"
value="261"
/>
<task
eventGUID="{5abe7967-665f-4e45-b85b-32bfae126483}"
message="$(string.MPI_Cart_rank)"
name="topo:MPI_Cart_rank"
symbol="TASK_MPI_Cart_rank"
value="262"
/>
<task
eventGUID="{a765be26-b6eb-44b8-801b-eb1298617869}"
message="$(string.MPI_Cart_shift)"
name="topo:MPI_Cart_shift"
symbol="TASK_MPI_Cart_shift"
value="263"
/>
<task
eventGUID="{ed6e2af7-2b4a-484a-9f4e-161802ab58a2}"
message="$(string.MPI_Cart_sub)"
name="topo:MPI_Cart_sub"
symbol="TASK_MPI_Cart_sub"
value="264"
/>
<task
eventGUID="{fc8726c4-686a-4aec-9e58-5baf51a9e037}"
message="$(string.MPI_Cartdim_get)"
name="topo:MPI_Cartdim_get"
symbol="TASK_MPI_Cartdim_get"
value="265"
/>
<task
eventGUID="{c176bde5-8c37-49d3-befe-2aa792f62c40}"
message="$(string.MPI_Dims_create)"
name="topo:MPI_Dims_create"
symbol="TASK_MPI_Dims_create"
value="266"
/>
<task
eventGUID="{5cc969a9-422c-46a9-8aba-d65e3e28886a}"
message="$(string.MPI_Graph_get)"
name="topo:MPI_Graph_get"
symbol="TASK_MPI_Graph_get"
value="267"
/>
<task
eventGUID="{8c62f982-a455-4533-ba81-1c55a42e29a4}"
message="$(string.MPI_Graph_map)"
name="topo:MPI_Graph_map"
symbol="TASK_MPI_Graph_map"
value="268"
/>
<task
eventGUID="{31f9e96c-ecbb-4f27-9128-6c376dadb47b}"
message="$(string.MPI_Graph_neighbors)"
name="topo:MPI_Graph_neighbors"
symbol="TASK_MPI_Graph_neighbors"
value="269"
/>
<task
eventGUID="{9b854046-9fd7-4c27-a7b9-fe91789cec7e}"
message="$(string.MPI_Graph_create)"
name="topo:MPI_Graph_create"
symbol="TASK_MPI_Graph_create"
value="270"
/>
<task
eventGUID="{e1e0c834-1651-43b0-bfa4-80df7bcefbb9}"
message="$(string.MPI_Graphdims_get)"
name="topo:MPI_Graphdims_get"
symbol="TASK_MPI_Graphdims_get"
value="271"
/>
<task
eventGUID="{353ecd49-da33-444e-a2a8-afd2dac094fb}"
message="$(string.MPI_Graph_neighbors_count)"
name="topo:MPI_Graph_neighbors_count"
symbol="TASK_MPI_Graph_neighbors_count"
value="272"
/>
<task
eventGUID="{ed983f67-6c21-43dd-b760-2e00b83abe53}"
message="$(string.MPI_Topo_test)"
name="topo:MPI_Topo_test"
symbol="TASK_MPI_Topo_test"
value="273"
/>
<!-- Communications Tasks -->
<task
eventGUID="{3e45e4f3-7b79-4293-baff-1bfbbad1f7fc}"
message="$(string.send.nd)"
name="nd:send"
symbol="TASK_ND_SEND"
value="274"
/>
<task
eventGUID="{ea5ddcf7-4ede-46b8-a09a-ff989a8910e0}"
message="$(string.recv.nd)"
name="nd:recv"
symbol="TASK_ND_RECV"
value="275"
/>
<task
eventGUID="{a62f72ea-3d18-440e-a02f-dd7566802b04}"
message="$(string.send.sock)"
name="sock:send"
symbol="TASK_SOCK_SEND"
value="276"
/>
<task
eventGUID="{fa8ad8c9-58da-49d5-8e9a-48b54f871226}"
message="$(string.recv.sock)"
name="sock:recv"
symbol="TASK_SOCK_RECV"
value="277"
/>
<task
eventGUID="{de17157b-45e2-4480-8657-aaa19faf94aa}"
message="$(string.send.shm)"
name="shm:send"
symbol="TASK_SHM_SEND"
value="278"
/>
<task
eventGUID="{170c827a-dbe9-4578-8c14-e3d0a6b52069}"
message="$(string.recv.shm)"
name="shm:recv"
symbol="TASK_SHM_RECV"
value="279"
/>
<task
eventGUID="{8e98b586-8176-4774-8fcb-1521affee436}"
message="$(string.send.msg)"
name="msg:send"
symbol="TASK_MSG_SEND"
value="280"
/>
<task
eventGUID="{c3eebfd2-ff13-4ee5-9796-1c62acb45b0d}"
message="$(string.recv.msg)"
name="msg:recv"
symbol="TASK_MSG_RECV"
value="281"
/>
<!-- MSMPI extension Tasks -->
<task
eventGUID="{587ce403-111b-47f9-b0ea-58733d4ffd20}"
message="$(string.MSMPI_Queuelock_acquire)"
name="p2p:MSMPI_Queuelock_acquire"
symbol="TASK_MSMPI_Queuelock_acquire"
value="282"
/>
<task
eventGUID="{04115535-ee79-4aed-ae8e-50f8878e512b}"
message="$(string.MSMPI_Queuelock_release)"
name="p2p:MSMPI_Queuelock_release"
symbol="TASK_MSMPI_Queuelock_release"
value="283"
/>
<task
eventGUID="{21f8b3bd-651a-4a6a-b009-e5b0671dc24d}"
message="$(string.MSMPI_Waitsome_interruptible)"
name="p2p:MSMPI_Waitsome_interruptible"
symbol="TASK_MSMPI_Waitsome_interruptible"
value="284"
/>
<task
eventGUID="{9A019D9E-D59C-4F23-AAF4-56265731BC36}"
message="$(string.MSMPI_Request_set_apc)"
name="p2p:MSMPI_Request_set_apc"
symbol="TASK_MSMPI_Request_set_apc"
value="285"
/>
<!-- MPI v3 Tasks -->
<task
eventGUID="{9CA4BCF9-E1D6-4142-9A7E-B9ECF7A9D5D2}"
message="$(string.MPI_Comm_split_type)"
name="comm:MPI_Comm_split_type"
symbol="TASK_MPI_Comm_split_type"
value="286"
/>
<task
eventGUID="{87437FAE-D3D4-4BF2-9CF2-FA16E0C1B965}"
message="$(string.MPI_Win_allocate_shared)"
name="rma:MPI_Win_allocate_shared"
symbol="TASK_MPI_Win_allocate_shared"
value="287"
/>
<task
eventGUID="{5924EB83-DD7B-45BE-9B31-40C54D10ECCB}"
message="$(string.MPI_Win_shared_query)"
name="rma:MPI_Win_shared_query"
symbol="TASK_MPI_Win_shared_query"
value="288"
/>
<task
eventGUID="{AA24FC9C-554B-421D-891B-AE86974F384E}"
message="$(string.MPI_Type_create_hindexed_block)"
name="dt:MPI_Type_create_hindexed_block"
symbol="TASK_MPI_Type_create_hindexed_block"
value="289"
/>
<task
eventGUID="{0DDE1288-A97B-4784-ADC8-6D600AD5D2E4}"
message="$(string.MPI_Type_size_x)"
name="dt:MPI_Type_size_x"
symbol="TASK_MPI_Type_size_x"
value="290"
/>
<task
eventGUID="{AF05789D-05F3-4476-BE09-4F83768066A0}"
message="$(string.MPI_Type_get_extent_x)"
name="dt:MPI_Type_get_extent_x"
symbol="TASK_MPI_Type_get_extent_x"
value="291"
/>
<task
eventGUID="{3411D1F8-AEDF-40BA-BB8F-1A10D244A22A}"
message="$(string.MPI_Type_get_true_extent_x)"
name="dt:MPI_Type_get_true_extent_x"
symbol="TASK_MPI_Type_get_true_extent_x"
value="292"
/>
<task
eventGUID="{D9F3285F-6114-49E6-8C54-83505FA6FC38}"
message="$(string.MPI_Get_elements_x)"
name="dt:MPI_Get_elements_x"
symbol="TASK_MPI_Get_elements_x"
value="293"
/>
<task
eventGUID="{0646A968-6DBF-4717-BEEF-19772FF5DA15}"
message="$(string.MPI_Status_set_elements_x)"
name="dt:MPI_Status_set_elements_x"
symbol="TASK_MPI_Status_set_elements_x"
value="294"
/>
<task
eventGUID="{9ddfbe23-53bf-4c84-92c9-30c04efac89b}"
message="$(string.MPI_Improbe)"
name="p2p:MPI_Improbe"
symbol="TASK_MPI_Improbe"
value="295"
/>
<task
eventGUID="{424bf1db-7cdd-4cab-b0ee-77561897f11a}"
message="$(string.MPI_Mprobe)"
name="p2p:MPI_Mprobe"
symbol="TASK_MPI_Mprobe"
value="296"
/>
<task
eventGUID="{d5c0faa2-ca8d-4a15-8a51-2135dc04455c}"
message="$(string.MPI_Mrecv)"
name="p2p:MPI_Mrecv"
symbol="TASK_MPI_Mrecv"
value="297"
/>
<task
eventGUID="{6d5705f5-d270-4a97-a3e2-fd30f08fdf18}"
message="$(string.MPI_Imrecv)"
name="p2p:MPI_Imrecv"
symbol="TASK_MPI_Imrecv"
value="298"
/>
<task
eventGUID="{A51D4AD9-2F8E-471D-B107-7E03825E0DE6}"
message="$(string.MPI_Ibarrier)"
name="coll:MPI_Ibarrier"
symbol="TASK_MPI_Ibarrier"
value="299"
/>
<task
eventGUID="{BC7B16ED-7E61-4F1B-9339-F778047C9230}"
message="$(string.MPI_Ibcast)"
name="coll:MPI_Ibcast"
symbol="TASK_MPI_Ibcast"
value="300"
/>
<task
eventGUID="{4B92A054-BD4C-4B13-B166-E517E64B6B00}"
message="$(string.MPI_Igather)"
name="coll:MPI_Igather"
symbol="TASK_MPI_Igather"
value="301"
/>
<task
eventGUID="{F8C4ADF9-E0D2-4457-9333-533100DD2A18}"
message="$(string.MPI_Igatherv)"
name="coll:MPI_Igatherv"
symbol="TASK_MPI_Igatherv"
value="302"
/>
<task
eventGUID="{61004E5A-323D-416E-BE69-AD01E3917EEB}"
message="$(string.MPI_Iscatter)"
name="coll:MPI_Iscatter"
symbol="TASK_MPI_Iscatter"
value="303"
/>
<task
eventGUID="{A8D9C53B-FA00-4B6A-966D-59253E4EC9C7}"
message="$(string.MPI_Iscatterv)"
name="coll:MPI_Iscatterv"
symbol="TASK_MPI_Iscatterv"
value="304"
/>
<task
eventGUID="{05E2A41B-6597-4796-A33B-802985D25848}"
message="$(string.MPI_Iallgather)"
name="coll:MPI_Iallgather"
symbol="TASK_MPI_Iallgather"
value="305"
/>
<!--task
eventGUID="{743A14BD-557F-4E8C-9DC2-0775DEAC3B00}"
message="$(string.MPI_Iallgatherv)"
name="coll:MPI_Iallgatherv"
symbol="TASK_MPI_Iallgatherv"
value="306"
/>
<task
eventGUID="{F7CA06A1-0145-49F0-B465-AD88277724DB}"
message="$(string.MPI_Ialltoall)"
name="coll:MPI_Ialltoall"
symbol="TASK_MPI_Ialltoall"
value="307"
/>
<task
eventGUID="{13784368-36D0-42F5-9580-C5D032FA9684}"
message="$(string.MPI_Ialltoallv)"
name="coll:MPI_Ialltoallv"
symbol="TASK_MPI_Ialltoallv"
value="308"
/>
<task
eventGUID="{594018C9-A9D2-4556-B670-E4E4862BADA1}"
message="$(string.MPI_Ialltoallw)"
name="coll:MPI_Ialltoallw"
symbol="TASK_MPI_Ialltoallw"
value="309"
/-->
<task
eventGUID="{CBE1BC92-09B0-4B9E-AB08-9BFA342F1DDB}"
message="$(string.MPI_Ireduce)"
name="coll:MPI_Ireduce"
symbol="TASK_MPI_Ireduce"
value="310"
/>
<task
eventGUID="{D24D3C80-08DA-49E6-AF12-F37F3C6AC1E0}"
message="$(string.MPI_Iallreduce)"
name="coll:MPI_Iallreduce"
symbol="TASK_MPI_Iallreduce"
value="311"
/>
<!--task
eventGUID="{9349A878-B8B6-4523-A109-073BE638484E}"
message="$(string.MPI_Ireduce_scatter_block)"
name="coll:MPI_Ireduce_scatter_block"
symbol="TASK_MPI_Ireduce_scatter_block"
value="312"
/>
<task
eventGUID="{201959FA-981C-466A-9F2D-F174EDA888E0}"
message="$(string.MPI_Ireduce_scatter)"
name="coll:MPI_Ireduce_scatter"
symbol="TASK_MPI_Ireduce_scatter"
value="313"
/>
<task
eventGUID="{B04C2BC1-AFA4-44C1-84CD-D0C6B80CAB88}"
message="$(string.MPI_Iscan)"
name="coll:MPI_Iscan"
symbol="TASK_MPI_Iscan"
value="314"
/>
<task
eventGUID="{C34A4BA2-B35F-4284-A651-2E323456B66C}"
message="$(string.MPI_Iexscan)"
name="coll:MPI_Iexscan"
symbol="TASK_MPI_Iexscan"
value="315"
/-->
<task
eventGUID="{6D2C1D30-DE41-477F-9CB4-E07AB08386F0}"
message="$(string.sock.defer)"
name="sock:defer"
symbol="TASK_SOCK_DEFER"
value="316"
/>
<task
eventGUID="{E356FB6E-0EDF-4930-9842-BB09633CD58A}"
message="$(string.shm.defer)"
name="shm:defer"
symbol="TASK_SHM_DEFER"
value="317"
/>
<task
eventGUID="{925DBCCF-A5B2-414B-8A48-306CB1B8885B}"
message="$(string.nd.defer)"
name="nd:defer"
symbol="TASK_ND_DEFER"
value="318"
/>
<task
eventGUID="{23dbd15f-3812-4d54-9cad-ddc1fb900e09}"
message="$(string.MPI_Dist_graph_neighbors_count)"
name="topo:MPI_Dist_graph_neighbors_count"
symbol="TASK_MPI_Dist_graph_neighbors_count"
value="319"
/>
<task
eventGUID="{ac783a6e-757e-4d12-9219-e594f8a7d23e}"
message="$(string.MPI_Dist_graph_neighbors)"
name="topo:MPI_Dist_graph_neighbors"
symbol="TASK_MPI_Dist_graph_neighbors"
value="320"
/>
<task
eventGUID="{9618033e-fa0b-4dca-98e9-02133be926f4}"
message="$(string.MPI_Dist_graph_create_adjacent)"
name="topo:MPI_Dist_graph_create_adjacent"
symbol="TASK_MPI_Dist_graph_create_adjacent"
value="321"
/>
<task
eventGUID="{736314C2-C005-4660-BB87-8D4E13EFDE95}"
message="$(string.MPI_Dist_graph_create)"
name="topo:MPI_Dist_graph_create"
symbol="TASK_MPI_Dist_graph_create"
value="322"
/>
</tasks>
<opcodes>
<opcode
message="$(string.queue)"
name="net:queue"
symbol="OPCODE_MPI_NET_QUEUE"
value="10"
>queued</opcode>
<opcode
message="$(string.connect)"
name="net:connect"
symbol="OPCODE_MPI_NET_CONNECT"
value="11"
>connect</opcode>
<opcode
message="$(string.head)"
name="net:head"
symbol="OPCODE_MPI_NET_HEAD"
value="12"
>head</opcode>
<opcode
message="$(string.inline)"
name="net:inline"
symbol="OPCODE_MPI_NET_INLINE"
value="13"
>inline dump</opcode>
<opcode
message="$(string.continue)"
name="net:continue"
symbol="OPCODE_MPI_NET_CONTINUE"
value="14"
>continue</opcode>
<opcode
message="$(string.done)"
name="net:done"
symbol="OPCODE_MPI_NET_DONE"
value="15"
>done</opcode>
<opcode
message="$(string.done)"
name="net:packet"
symbol="OPCODE_MPI_NET_PACKET"
value="16"
>packet</opcode>
<opcode
message="$(string.done)"
name="net:data"
symbol="OPCODE_MPI_NET_DATA"
value="17"
>data</opcode>
<opcode
message="$(string.deferconnect)"
name="net:deferconnect"
symbol="OPCODE_MPI_NET_DEFER_CONNECT"
value="18"
>deferconnect</opcode>
<opcode
message="$(string.deferwrite)"
name="net:deferwrite"
symbol="OPCODE_MPI_NET_DEFER_WRITE"
value="19"
>deferwrite</opcode>
</opcodes>
<keywords>
<keyword
mask="0x000000000001"
name="mpi:p2p"
symbol="KEYWORD_MPI_P2P"
/>
<keyword
mask="0x000000000002"
name="mpi:poll"
symbol="KEYWORD_MPI_POLL"
/>
<keyword
mask="0x000000000004"
name="mpi:coll"
symbol="KEYWORD_MPI_COLL"
/>
<keyword
mask="0x000000000008"
name="mpi:rma"
symbol="KEYWORD_MPI_RMA"
/>
<keyword
mask="0x000000000010"
name="mpi:comm"
symbol="KEYWORD_MPI_COMM"
/>
<keyword
mask="0x000000000020"
name="mpi:eh"
symbol="KEYWORD_MPI_EH"
/>
<keyword
mask="0x000000000040"
name="mpi:grp"
symbol="KEYWORD_MPI_GRP"
/>
<keyword
mask="0x000000000080"
name="mpi:attr"
symbol="KEYWORD_MPI_ATTR"
/>
<keyword
mask="0x000000000100"
name="mpi:dt"
symbol="KEYWORD_MPI_DT"
/>
<keyword
mask="0x000000000200"
name="mpi:io"
symbol="KEYWORD_MPI_IO"
/>
<keyword
mask="0x000000000400"
name="mpi:topo"
symbol="KEYWORD_MPI_TOPO"
/>
<keyword
mask="0x000000000800"
name="mpi:spwn"
symbol="KEYWORD_MPI_SPAWN"
/>
<keyword
mask="0x000000001000"
name="mpi:init"
symbol="KEYWORD_MPI_INIT"
/>
<keyword
mask="0x000000002000"
name="mpi:info"
symbol="KEYWORD_MPI_INFO"
/>
<keyword
mask="0x000000004000"
name="mpi:misc"
symbol="KEYWORD_MPI_MISC"
/>
<keyword
mask="0x000000008000"
name="mpi:sock"
symbol="KEYWORD_MPI_SOCKETS"
/>
<keyword
mask="0x000000010000"
name="mpi:shm"
symbol="KEYWORD_MPI_SHAREDMEMORY"
/>
<keyword
mask="0x000000020000"
name="mpi:nd"
symbol="KEYWORD_MPI_NETWORKDIRECT"
/>
<keyword
mask="0x000000040000"
name="mpi:msg"
symbol="KEYWORD_MPI_MSG"
/>
<keyword
mask="0x010000000000"
name="mpi:net_rdata"
symbol="KEYWORD_MPI_NET_RDATA"
/>
<keyword
mask="0x020000000000"
name="mpi:net_sdata"
symbol="KEYWORD_MPI_NET_SDATA"
/>
<keyword
mask="0x200000000000"
name="mpi:api_enter"
symbol="KEYWORD_MPI_API_ENTER"
/>
<keyword
mask="0x400000000000"
name="mpi:api_leave"
symbol="KEYWORD_MPI_API_LEAVE"
/>
<keyword
mask="0x800000000000"
name="mpi:api_error"
symbol="KEYWORD_MPI_API_ERROR"
/>
</keywords>
<maps>
<valueMap name="MPI_ERROR">
<map
message="$(string.MPI_SUCCESS)"
value="0"
>MPI_SUCCESS</map>
<map
message="$(string.MPI_ERR_BUFFER)"
value="1"
>MPI_ERR_BUFFER</map>
<map
message="$(string.MPI_ERR_TOPOLOGY)"
value="10"
>MPI_ERR_TOPOLOGY</map>
<map
message="$(string.MPI_ERR_DIMS)"
value="11"
>MPI_ERR_DIMS</map>
<map
message="$(string.MPI_ERR_ARG)"
value="12"
>MPI_ERR_ARG</map>
<map
message="$(string.MPI_ERR_UNKNOWN)"
value="13"
>MPI_ERR_UNKNOWN</map>
<map
message="$(string.MPI_ERR_TRUNCATE)"
value="14"
>MPI_ERR_TRUNCATE</map>
<map
message="$(string.MPI_ERR_OTHER)"
value="15"
>MPI_ERR_OTHER</map>
<map
message="$(string.MPI_ERR_INTERN)"
value="16"
>MPI_ERR_INTERN</map>
<map
message="$(string.MPI_ERR_IN_STATUS)"
value="17"
>MPI_ERR_IN_STATUS</map>
<map
message="$(string.MPI_ERR_PENDING)"
value="18"
>MPI_ERR_PENDING</map>
<map
message="$(string.MPI_ERR_REQUEST)"
value="19"
>MPI_ERR_REQUEST</map>
<map
message="$(string.MPI_ERR_COUNT)"
value="2"
>MPI_ERR_COUNT</map>
<map
message="$(string.MPI_ERR_ACCESS)"
value="20"
>MPI_ERR_ACCESS</map>
<map
message="$(string.MPI_ERR_AMODE)"
value="21"
>MPI_ERR_AMODE</map>
<map
message="$(string.MPI_ERR_BAD_FILE)"
value="22"
>MPI_ERR_BAD_FILE</map>
<map
message="$(string.MPI_ERR_CONVERSION)"
value="23"
>MPI_ERR_CONVERSION</map>
<map
message="$(string.MPI_ERR_DUP_DATAREP)"
value="24"
>MPI_ERR_DUP_DATAREP</map>
<map
message="$(string.MPI_ERR_FILE_EXISTS)"
value="25"
>MPI_ERR_FILE_EXISTS</map>
<map
message="$(string.MPI_ERR_FILE_IN_USE)"
value="26"
>MPI_ERR_FILE_IN_USE</map>
<map
message="$(string.MPI_ERR_FILE)"
value="27"
>MPI_ERR_FILE</map>
<map
message="$(string.MPI_ERR_INFO)"
value="28"
>MPI_ERR_INFO</map>
<map
message="$(string.MPI_ERR_INFO_KEY)"
value="29"
>MPI_ERR_INFO_KEY</map>
<map
message="$(string.MPI_ERR_TYPE)"
value="3"
>MPI_ERR_TYPE</map>
<map
message="$(string.MPI_ERR_INFO_VALUE)"
value="30"
>MPI_ERR_INFO_VALUE</map>
<map
message="$(string.MPI_ERR_INFO_NOKEY)"
value="31"
>MPI_ERR_INFO_NOKEY</map>
<map
message="$(string.MPI_ERR_IO)"
value="32"
>MPI_ERR_IO</map>
<map
message="$(string.MPI_ERR_NAME)"
value="33"
>MPI_ERR_NAME</map>
<map
message="$(string.MPI_ERR_NO_MEM)"
value="34"
>MPI_ERR_NO_MEM</map>
<map
message="$(string.MPI_ERR_NOT_SAME)"
value="35"
>MPI_ERR_NOT_SAME</map>
<map
message="$(string.MPI_ERR_NO_SPACE)"
value="36"
>MPI_ERR_NO_SPACE</map>
<map
message="$(string.MPI_ERR_NO_SUCH_FILE)"
value="37"
>MPI_ERR_NO_SUCH_FILE</map>
<map
message="$(string.MPI_ERR_PORT)"
value="38"
>MPI_ERR_PORT</map>
<map
message="$(string.MPI_ERR_QUOTA)"
value="39"
>MPI_ERR_QUOTA</map>
<map
message="$(string.MPI_ERR_TAG)"
value="4"
>MPI_ERR_TAG</map>
<map
message="$(string.MPI_ERR_READ_ONLY)"
value="40"
>MPI_ERR_READ_ONLY</map>
<map
message="$(string.MPI_ERR_SERVICE)"
value="41"
>MPI_ERR_SERVICE</map>
<map
message="$(string.MPI_ERR_SPAWN)"
value="42"
>MPI_ERR_SPAWN</map>
<map
message="$(string.MPI_ERR_UNSUPPORTED_DATAREP)"
value="43"
>MPI_ERR_UNSUPPORTED_DATAREP</map>
<map
message="$(string.MPI_ERR_UNSUPPORTED_OPERATION)"
value="44"
>MPI_ERR_UNSUPPORTED_OPERATION</map>
<map
message="$(string.MPI_ERR_WIN)"
value="45"
>MPI_ERR_WIN</map>
<map
message="$(string.MPI_ERR_BASE)"
value="46"
>MPI_ERR_BASE</map>
<map
message="$(string.MPI_ERR_LOCKTYPE)"
value="47"
>MPI_ERR_LOCKTYPE</map>
<map
message="$(string.MPI_ERR_KEYVAL)"
value="48"
>MPI_ERR_KEYVAL</map>
<map
message="$(string.MPI_ERR_RMA_CONFLICT)"
value="49"
>MPI_ERR_RMA_CONFLICT</map>
<map
message="$(string.MPI_ERR_COMM)"
value="5"
>MPI_ERR_COMM</map>
<map
message="$(string.MPI_ERR_RMA_SYNC)"
value="50"
>MPI_ERR_RMA_SYNC</map>
<map
message="$(string.MPI_ERR_SIZE)"
value="51"
>MPI_ERR_SIZE</map>
<map
message="$(string.MPI_ERR_DISP)"
value="52"
>MPI_ERR_DISP</map>
<map
message="$(string.MPI_ERR_ASSERT)"
value="53"
>MPI_ERR_ASSERT</map>
<map
message="$(string.MPI_ERR_RANK)"
value="6"
>MPI_ERR_RANK</map>
<map
message="$(string.MPI_ERR_ROOT)"
value="7"
>MPI_ERR_ROOT</map>
<map
message="$(string.MPI_ERR_GROUP)"
value="8"
>MPI_ERR_GROUP</map>
<map
message="$(string.MPI_ERR_OP)"
value="9"
>MPI_ERR_OP</map>
</valueMap>
<valueMap name="MPI_Datatype">
<map
message="$(string.MPI_LB)"
value="0x4c000010"
>MPI_LB</map>
<map
message="$(string.MPI_UB)"
value="0x4c000011"
>MPI_UB</map>
<map
message="$(string.MPI_CHAR)"
value="0x4c000101"
>MPI_CHAR</map>
<map
message="$(string.MPI_UNSIGNED_CHAR)"
value="0x4c000102"
>MPI_UNSIGNED_CHAR</map>
<map
message="$(string.MPI_BYTE)"
value="0x4c00010d"
>MPI_BYTE</map>
<map
message="$(string.MPI_PACKED)"
value="0x4c00010f"
>MPI_PACKED</map>
<map
message="$(string.MPI_SIGNED_CHAR)"
value="0x4c000118"
>MPI_SIGNED_CHAR</map>
<map
message="$(string.MPI_CHARACTER)"
value="0x4c00011a"
>MPI_CHARACTER</map>
<map
message="$(string.MPI_INTEGER1)"
value="0x4c00012d"
>MPI_INTEGER1</map>
<map
message="$(string.MPI_SHORT)"
value="0x4c000203"
>MPI_SHORT</map>
<map
message="$(string.MPI_UNSIGNED_SHORT)"
value="0x4c000204"
>MPI_UNSIGNED_SHORT</map>
<map
message="$(string.MPI_WCHAR)"
value="0x4c00020e"
>MPI_WCHAR</map>
<map
message="$(string.MPI_INTEGER2)"
value="0x4c00022f"
>MPI_INTEGER2</map>
<map
message="$(string.MPI_INT)"
value="0x4c000405"
>MPI_INT</map>
<map
message="$(string.MPI_UNSIGNED)"
value="0x4c000406"
>MPI_UNSIGNED</map>
<map
message="$(string.MPI_LONG)"
value="0x4c000407"
>MPI_LONG</map>
<map
message="$(string.MPI_UNSIGNED_LONG)"
value="0x4c000408"
>MPI_UNSIGNED_LONG</map>
<map
message="$(string.MPI_FLOAT)"
value="0x4c00040a"
>MPI_FLOAT</map>
<map
message="$(string.MPI_INTEGER)"
value="0x4c00041b"
>MPI_INTEGER</map>
<map
message="$(string.MPI_REAL)"
value="0x4c00041c"
>MPI_REAL</map>
<map
message="$(string.MPI_LOGICAL)"
value="0x4c00041d"
>MPI_LOGICAL</map>
<map
message="$(string.MPI_REAL4)"
value="0x4c000427"
>MPI_REAL4</map>
<map
message="$(string.MPI_INTEGER4)"
value="0x4c000430"
>MPI_INTEGER4</map>
<map
message="$(string.MPI_LONG_LONG_INT)"
value="0x4c000809"
>MPI_LONG_LONG_INT</map>
<map
message="$(string.MPI_DOUBLE)"
value="0x4c00080b"
>MPI_DOUBLE</map>
<map
message="$(string.MPI_LONG_DOUBLE)"
value="0x4c00080c"
>MPI_LONG_DOUBLE</map>
<map
message="$(string.MPI_2INT)"
value="0x4c000816"
>MPI_2INT</map>
<map
message="$(string.MPI_UNSIGNED_LONG_LONG)"
value="0x4c000819"
>MPI_UNSIGNED_LONG_LONG</map>
<map
message="$(string.MPI_COMPLEX)"
value="0x4c00081e"
>MPI_COMPLEX</map>
<map
message="$(string.MPI_DOUBLE_PRECISION)"
value="0x4c00081f"
>MPI_DOUBLE_PRECISION</map>
<map
message="$(string.MPI_2INTEGER)"
value="0x4c000820"
>MPI_2INTEGER</map>
<map
message="$(string.MPI_2REAL)"
value="0x4c000821"
>MPI_2REAL</map>
<map
message="$(string.MPI_COMPLEX8)"
value="0x4c000828"
>MPI_COMPLEX8</map>
<map
message="$(string.MPI_REAL8)"
value="0x4c000829"
>MPI_REAL8</map>
<map
message="$(string.MPI_INTEGER8)"
value="0x4c000831"
>MPI_INTEGER8</map>
<map
message="$(string.MPI_DOUBLE_COMPLEX)"
value="0x4c001022"
>MPI_DOUBLE_COMPLEX</map>
<map
message="$(string.MPI_2DOUBLE_PRECISION)"
value="0x4c001023"
>MPI_2DOUBLE_PRECISION</map>
<map
message="$(string.MPI_2COMPLEX)"
value="0x4c001024"
>MPI_2COMPLEX</map>
<map
message="$(string.MPI_COMPLEX16)"
value="0x4c00102a"
>MPI_COMPLEX16</map>
<map
message="$(string.MPI_REAL16)"
value="0x4c00102b"
>MPI_REAL16</map>
<map
message="$(string.MPI_INTEGER16)"
value="0x4c001032"
>MPI_INTEGER16</map>
<map
message="$(string.MPI_2DOUBLE_COMPLEX)"
value="0x4c002025"
>MPI_2DOUBLE_COMPLEX</map>
<map
message="$(string.MPI_COMPLEX32)"
value="0x4c00202c"
>MPI_COMPLEX32</map>
<map
message="$(string.MPI_FLOAT_INT)"
value="0x8c000000"
>MPI_FLOAT_INT</map>
<map
message="$(string.MPI_DOUBLE_INT)"
value="0x8c000001"
>MPI_DOUBLE_INT</map>
<map
message="$(string.MPI_LONG_INT)"
value="0x8c000002"
>MPI_LONG_INT</map>
<map
message="$(string.MPI_SHORT_INT)"
value="0x8c000003"
>MPI_SHORT_INT</map>
<map
message="$(string.MPI_LONG_DOUBLE_INT)"
value="0x8c000004"
>MPI_LONG_DOUBLE_INT</map>
</valueMap>
<valueMap name="MPI_Op">
<map
message="$(string.MPI_OP_NULL)"
value="0x18000000"
>MPI_OP_NULL</map>
<map
message="$(string.MPI_MAX)"
value="0x58000001"
>MPI_MAX</map>
<map
message="$(string.MPI_MIN)"
value="0x58000002"
>MPI_MIN</map>
<map
message="$(string.MPI_SUM)"
value="0x58000003"
>MPI_SUM</map>
<map
message="$(string.MPI_PROD)"
value="0x58000004"
>MPI_PROD</map>
<map
message="$(string.MPI_LAND)"
value="0x58000005"
>MPI_LAND</map>
<map
message="$(string.MPI_BAND)"
value="0x58000006"
>MPI_BAND</map>
<map
message="$(string.MPI_LOR)"
value="0x58000007"
>MPI_LOR</map>
<map
message="$(string.MPI_BOR)"
value="0x58000008"
>MPI_BOR</map>
<map
message="$(string.MPI_LXOR)"
value="0x58000009"
>MPI_LXOR</map>
<map
message="$(string.MPI_BXOR)"
value="0x5800000a"
>MPI_BXOR</map>
<map
message="$(string.MPI_MINLOC)"
value="0x5800000b"
>MPI_MINLOC</map>
<map
message="$(string.MPI_MAXLOC)"
value="0x5800000c"
>MPI_MAXLOC</map>
<map
message="$(string.MPI_REPLACE)"
value="0x5800000d"
>MPI_REPLACE</map>
</valueMap>
<valueMap name="MPI_Comm">
<map
message="$(string.MPI_COMM_NULL)"
value="0x04000000"
>MPI_COMM_NULL</map>
<map
message="$(string.MPI_COMM_WORLD)"
value="0x44000000"
>MPI_COMM_WORLD</map>
<map
message="$(string.MPI_COMM_SELF)"
value="0x44000001"
>MPI_COMM_SELF</map>
</valueMap>
<valueMap name="MPI_Win">
<map
message="$(string.MPI_WIN_NULL)"
value="0x20000000"
>MPI_WIN_NULL</map>
</valueMap>
<valueMap name="MPI_Info">
<map
message="$(string.MPI_INFO_NULL)"
value="0x1c000000"
>MPI_INFO_NULL</map>
</valueMap>
<valueMap name="MPI_Request">
<map
message="$(string.MPI_REQUEST_NULL)"
value="0x2c000000"
>MPI_REQUEST_NULL</map>
</valueMap>
<valueMap name="MPI_Group">
<map
message="$(string.MPI_GROUP_NULL)"
value="0x08000000"
>MPI_GROUP_NULL</map>
<map
message="$(string.MPI_GROUP_EMPTY)"
value="0x48000000"
>MPI_GROUP_EMPTY</map>
</valueMap>
<valueMap name="MPI_Errorhandler">
<map
message="$(string.MPI_ERRHANDLER_NULL)"
value="0x14000000"
>MPI_ERRHANDLER_NULL</map>
<map
message="$(string.MPI_ERRORS_ARE_FATAL)"
value="0x54000000"
>MPI_ERRORS_ARE_FATAL</map>
<map
message="$(string.MPI_ERRORS_RETURN)"
value="0x54000001"
>MPI_ERRORS_RETURN</map>
</valueMap>
<valueMap name="MPI_Rank">
<map
message="$(string.MPI_PROC_NULL)"
value="0xFFFFFFFF"
>MPI_PROC_NULL</map>
<map
message="$(string.MPI_ANY_SOURCE)"
value="0xFFFFFFFE"
>MPI_ANY_SOURCE</map>
<map
message="$(string.MPI_ROOT)"
value="0xFFFFFFFD"
>MPI_ROOT</map>
</valueMap>
<valueMap name="MPI_Tag">
<map
message="$(string.MPI_ANY_TAG)"
value="0xFFFFFFFF"
>MPI_ANY_TAG</map>
</valueMap>
<valueMap name="MPI_Message">
<map
message="$(string.MPI_MESSAGE_NULL)"
value="0x2c000000"
>MPI_MESSAGE_NULL</map>
<map
message="$(string.MPI_MESSAGE_NO_PROC)"
value="0x6c000000"
>MPI_MESSAGE_NO_PROC</map>
</valueMap>
</maps>
<templates>
<template tid="MPI_Comm_create_keyval.enter">
<data
inType="win:Pointer"
name="comm_copy_attr_fn"
/>
<data
inType="win:Pointer"
name="comm_delete_attr_fn"
/>
<data
inType="win:Pointer"
name="extra_state"
/>
</template>
<template tid="MPI_Comm_create_keyval.leave">
<data
inType="win:UInt32"
name="comm_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_delete_attr.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="comm_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_free_keyval.enter">
<data
inType="win:UInt32"
name="comm_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_get_attr.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="comm_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_get_attr.leave">
<data
inType="win:Pointer"
name="attribute_val"
/>
<data
inType="win:Int32"
name="flag"
/>
</template>
<template tid="MPI_Comm_set_attr.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="comm_keyval"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="attribute_val"
/>
</template>
<template tid="MPI_Type_create_keyval.enter">
<data
inType="win:Pointer"
name="type_copy_attr_fn"
/>
<data
inType="win:Pointer"
name="type_delete_attr_fn"
/>
<data
inType="win:Pointer"
name="extra_state"
/>
</template>
<template tid="MPI_Type_create_keyval.leave">
<data
inType="win:UInt32"
name="type_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_delete_attr.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="type"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="type_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_free_keyval.enter">
<data
inType="win:UInt32"
name="comm_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_get_attr.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="type"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="type_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_get_attr.leave">
<data
inType="win:Pointer"
name="attribute_val"
/>
<data
inType="win:Int32"
name="flag"
/>
</template>
<template tid="MPI_Type_set_attr.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="type"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="type_keyval"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="attribute_val"
/>
</template>
<template tid="MPI_Win_create_keyval.enter">
<data
inType="win:Pointer"
name="win_copy_attr_fn"
/>
<data
inType="win:Pointer"
name="win_delete_attr_fn"
/>
<data
inType="win:Pointer"
name="extra_state"
/>
</template>
<template tid="MPI_Win_create_keyval.leave">
<data
inType="win:UInt32"
name="win_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_delete_attr.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="win_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_free_keyval.enter">
<data
inType="win:UInt32"
name="win_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_get_attr.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="win_keyval"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_get_attr.leave">
<data
inType="win:Pointer"
name="attribute_val"
/>
<data
inType="win:Int32"
name="flag"
/>
</template>
<template tid="MPI_Win_set_attr.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="win_keyval"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="attribute_val"
/>
</template>
<template tid="MPI_Allgather.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="sendcount"
/>
<data
inType="win:Int32"
name="recvcount"
/>
</template>
<template tid="MPI_Iallgather.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="sendcount"
/>
<data
inType="win:Int32"
name="recvcount"
/>
</template>
<template tid="MPI_Iallgather.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Allgatherv.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="sendcount"
/>
<data
inType="win:UInt8"
name="max_recvcounts"
/>
<data
count="max_recvcounts"
inType="win:Int32"
name="recvcounts"
/>
<data
inType="win:UInt8"
name="max_displs"
/>
<data
count="max_displs"
inType="win:Int32"
name="displs"
/>
</template>
<template tid="MPI_Allreduce.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Op"
name="op"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Iallreduce.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Op"
name="op"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Iallreduce.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Alltoall.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="sendcount"
/>
<data
inType="win:Int32"
name="recvcount"
/>
</template>
<template tid="MPI_Alltoallv.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt8"
name="max_sendcnts"
/>
<data
count="max_sendcnts"
inType="win:Int32"
name="sendcnts"
/>
<data
inType="win:UInt8"
name="max_recvcnts"
/>
<data
count="max_recvcnts"
inType="win:Int32"
name="recvcnts"
/>
<data
inType="win:UInt8"
name="max_sdispls"
/>
<data
count="max_sdispls"
inType="win:Int32"
name="sdispls"
/>
<data
inType="win:UInt8"
name="max_rdispls"
/>
<data
count="max_rdispls"
inType="win:Int32"
name="rdispls"
/>
</template>
<template tid="MPI_Alltoallw.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt8"
name="max_sendtypes"
/>
<data
count="max_sendtypes"
inType="win:UInt32"
map="MPI_Datatype"
name="sendtypes"
outType="win:HexInt32"
/>
<data
inType="win:UInt8"
name="max_recvtypes"
/>
<data
count="max_recvtypes"
inType="win:UInt32"
map="MPI_Datatype"
name="recvtypes"
outType="win:HexInt32"
/>
<data
inType="win:UInt8"
name="max_sendcnts"
/>
<data
count="max_sendcnts"
inType="win:Int32"
name="sendcnts"
/>
<data
inType="win:UInt8"
name="max_recvcnts"
/>
<data
count="max_recvcnts"
inType="win:Int32"
name="recvcnts"
/>
<data
inType="win:UInt8"
name="max_sdispls"
/>
<data
count="max_sdispls"
inType="win:Int32"
name="sdispls"
/>
<data
inType="win:UInt8"
name="max_rdispls"
/>
<data
count="max_rdispls"
inType="win:Int32"
name="rdispls"
/>
</template>
<template tid="MPI_Barrier.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Ibarrier.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Ibarrier.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Bcast.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="buffer"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="root"
/>
</template>
<template tid="MPI_Ibcast.enter">
<data
inType="win:Pointer"
name="buffer"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="root"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Ibcast.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Exscan.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Op"
name="op"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Gather.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="sendcnt"
/>
<data
inType="win:Int32"
name="recvcnt"
/>
<data
inType="win:Int32"
name="root"
/>
</template>
<template tid="MPI_Igather.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="sendcnt"
/>
<data
inType="win:Int32"
name="recvcnt"
/>
<data
inType="win:Int32"
name="root"
/>
</template>
<template tid="MPI_Igather.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Gatherv.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="sendcnt"
/>
<data
inType="win:UInt8"
name="max_recvcnts"
/>
<data
count="max_recvcnts"
inType="win:Int32"
name="recvcnts"
/>
<data
inType="win:UInt8"
name="max_displs"
/>
<data
count="max_displs"
inType="win:Int32"
name="displs"
/>
<data
inType="win:Int32"
name="root"
/>
</template>
<template tid="MPI_Igatherv.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="sendcnt"
/>
<data
inType="win:UInt8"
name="max_recvcnts"
/>
<data
count="max_recvcnts"
inType="win:Int32"
name="recvcnts"
/>
<data
inType="win:UInt8"
name="max_displs"
/>
<data
count="max_displs"
inType="win:Int32"
name="displs"
/>
<data
inType="win:Int32"
name="root"
/>
</template>
<template tid="MPI_Igatherv.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Op_create.enter">
<data
inType="win:Pointer"
name="function"
/>
<data
inType="win:Int32"
name="commute"
/>
</template>
<template tid="MPI_Op_create.leave">
<data
inType="win:UInt32"
map="MPI_Op"
name="op"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Op_free.enter">
<data
inType="win:UInt32"
map="MPI_Op"
name="op"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Reduce_scatter.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:UInt8"
name="max_recvcnts"
/>
<data
count="max_recvcnts"
inType="win:Int32"
name="recvcnts"
/>
<data
inType="win:UInt32"
map="MPI_Op"
name="op"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Reduce.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Op"
name="op"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="root"
/>
</template>
<template tid="MPI_Ireduce.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Op"
name="op"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="root"
/>
</template>
<template tid="MPI_Ireduce.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Scan.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Op"
name="op"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Scatter.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="sendcnt"
/>
<data
inType="win:Int32"
name="recvcnt"
/>
<data
inType="win:Int32"
name="root"
/>
</template>
<template tid="MPI_Iscatter.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="sendcnt"
/>
<data
inType="win:Int32"
name="recvcnt"
/>
<data
inType="win:Int32"
name="root"
/>
</template>
<template tid="MPI_Iscatter.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Scatterv.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt8"
name="max_sendcnts"
/>
<data
count="max_sendcnts"
inType="win:Int32"
name="sendcnts"
/>
<data
inType="win:Int32"
name="recvcount"
/>
<data
inType="win:UInt8"
name="max_displs"
/>
<data
count="max_displs"
inType="win:Int32"
name="displs"
/>
<data
inType="win:Int32"
name="root"
/>
</template>
<template tid="MPI_Iscatterv.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:UInt8"
name="max_sendcnts"
/>
<data
count="max_sendcnts"
inType="win:Int32"
name="sendcnts"
/>
<data
inType="win:Int32"
name="recvcount"
/>
<data
inType="win:UInt8"
name="max_displs"
/>
<data
count="max_displs"
inType="win:Int32"
name="displs"
/>
<data
inType="win:Int32"
name="root"
/>
</template>
<template tid="MPI_Iscatterv.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_compare.enter">
<data
inType="win:UInt32"
name="comm1"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="comm2"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_compare.leave">
<data
inType="win:Int32"
name="result"
/>
</template>
<template tid="MPI_Comm_create.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_create.leave">
<data
inType="win:UInt32"
name="newcomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_dup.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_dup.leave">
<data
inType="win:UInt32"
name="newcomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_free.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_get_name.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_get_name.leave">
<data
inType="win:UInt32"
name="resultlen"
/>
<data
inType="win:AnsiString"
length="resultlen"
name="name"
/>
</template>
<template tid="MPI_Comm_group.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_group.leave">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_rank.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_rank.leave">
<data
inType="win:UInt32"
map="MPI_Rank"
name="rank"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_remote_group.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_remote_group.leave">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_remote_size.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_remote_size.leave">
<data
inType="win:Int32"
name="size"
/>
</template>
<template tid="MPI_Comm_set_name.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="name"
/>
</template>
<template tid="MPI_Comm_size.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_size.leave">
<data
inType="win:Int32"
name="size"
/>
</template>
<template tid="MPI_Comm_split.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="color"
/>
<data
inType="win:Int32"
name="key"
/>
</template>
<template tid="MPI_Comm_split.leave">
<data
inType="win:UInt32"
name="newcomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_split_type.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="split_type"
/>
<data
inType="win:Int32"
name="key"
/>
<data
inType="win:UInt32"
map="MPI_Info"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_split_type.leave">
<data
inType="win:UInt32"
name="newcomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_test_inter.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_test_inter.leave">
<data
inType="win:Int32"
name="inter"
/>
</template>
<template tid="MPI_Intercomm_create.enter">
<data
inType="win:UInt32"
name="local_comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="local_lead"
/>
<data
inType="win:UInt32"
name="peer_comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="remote_lead"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Intercomm_create.leave">
<data
inType="win:UInt32"
name="newintercomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Intercomm_merge.enter">
<data
inType="win:UInt32"
name="intercomm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="high"
/>
</template>
<template tid="MPI_Intercomm_merge.leave">
<data
inType="win:UInt32"
name="newintercomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Get_address.enter">
<data
inType="win:Pointer"
name="location"
/>
</template>
<template tid="MPI_Get_address.leave">
<data
inType="win:Int64"
name="address"
/>
</template>
<template tid="MPI_Get_count.enter">
<data
inType="win:Pointer"
name="status"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Get_count.leave">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int64"
name="status_count"
/>
</template>
<template tid="MPI_Get_elements.enter">
<data
inType="win:Pointer"
name="status"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Get_elements.leave">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int64"
name="byte_count"
/>
</template>
<template tid="MPI_Get_elements_x.enter">
<data
inType="win:Pointer"
name="status"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Get_elements_x.leave">
<data
inType="win:Int64"
name="count"
/>
<data
inType="win:Int64"
name="byte_count"
/>
</template>
<template tid="MPI_Pack.enter">
<data
inType="win:Pointer"
name="inbuf"
/>
<data
inType="win:Int32"
name="incount"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="outbuf"
/>
<data
inType="win:Int32"
name="outcount"
/>
<data
inType="win:Int32"
name="position"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Pack.leave">
<data
inType="win:Int32"
name="position"
/>
</template>
<template tid="MPI_Pack_external.enter">
<data
inType="win:AnsiString"
name="datarep"
/>
<data
inType="win:Pointer"
name="inbuf"
/>
<data
inType="win:Int32"
name="incount"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="outbuf"
/>
<data
inType="win:Int64"
name="outcount"
/>
<data
inType="win:Int64"
name="position"
/>
</template>
<template tid="MPI_Pack_external.leave">
<data
inType="win:Int64"
name="position"
/>
</template>
<template tid="MPI_Pack_external_size.enter">
<data
inType="win:AnsiString"
name="datatrep"
/>
<data
inType="win:Int32"
name="incount"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Pack_external_size.leave">
<data
inType="win:Int64"
name="size"
/>
</template>
<template tid="MPI_Pack_size.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="incount"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Pack_size.leave">
<data
inType="win:Int32"
name="size"
/>
</template>
<template tid="MPI_Register_datarep.enter">
<data
inType="win:AnsiString"
name="datarep"
/>
<data
inType="win:Pointer"
name="read_conversion_fn"
/>
<data
inType="win:Pointer"
name="write_conversion_fn"
/>
<data
inType="win:Pointer"
name="dtype_file_extent_fn"
/>
<data
inType="win:Pointer"
name="extra_state"
/>
</template>
<template tid="MPI_Status_set_elements.enter">
<data
inType="win:Pointer"
name="status"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="count"
/>
</template>
<template tid="MPI_Status_set_elements.leave">
<data
inType="win:Int64"
name="status_count"
/>
</template>
<template tid="MPI_Status_set_elements_x.enter">
<data
inType="win:Pointer"
name="status"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int64"
name="count"
/>
</template>
<template tid="MPI_Status_set_elements_x.leave">
<data
inType="win:Int64"
name="status_count"
/>
</template>
<template tid="MPI_Type_commit.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_contiguous.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="old_type"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_contiguous.leave">
<data
inType="win:UInt32"
name="new_type_p"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_darray.enter">
<data
inType="win:Int32"
name="size"
/>
<data
inType="win:UInt32"
map="MPI_Rank"
name="rank"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="ndims"
/>
<data
inType="win:UInt8"
name="max_array_of_gsizes"
/>
<data
count="max_array_of_gsizes"
inType="win:Int32"
name="array_of_gsizes"
/>
<data
inType="win:UInt8"
name="max_array_of_distribs"
/>
<data
count="max_array_of_distribs"
inType="win:Int32"
name="array_of_distribs"
/>
<data
inType="win:UInt8"
name="max_array_of_dargs"
/>
<data
count="max_array_of_dargs"
inType="win:Int32"
name="array_of_dargs"
/>
<data
inType="win:UInt8"
name="max_array_of_psizes"
/>
<data
count="max_array_of_psizes"
inType="win:Int32"
name="array_of_psizes"
/>
<data
inType="win:Int32"
name="order"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="oldtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_darray.leave">
<data
inType="win:UInt32"
name="newtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_hindexed.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt8"
name="max_array_of_blocklengths"
/>
<data
count="max_array_of_blocklengths"
inType="win:Int32"
name="array_of_blocklengths"
/>
<data
inType="win:UInt8"
name="max_array_of_displacements"
/>
<data
count="max_array_of_displacements"
inType="win:Pointer"
name="array_of_displacements"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="old"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_hindexed.leave">
<data
inType="win:UInt32"
name="newtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_hvector.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="blocklength"
/>
<data
inType="win:Int64"
name="stride"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="oldtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_hvector.leave">
<data
inType="win:UInt32"
name="newtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_indexed_block.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="blocklength"
/>
<data
inType="win:UInt8"
name="max_array_of_displacements"
/>
<data
count="max_array_of_displacements"
inType="win:Int32"
name="array_of_displacements"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="oldtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_indexed_block.leave">
<data
inType="win:UInt32"
name="newtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_resized.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int64"
name="lb"
/>
<data
inType="win:Int64"
name="extend"
/>
</template>
<template tid="MPI_Type_create_resized.leave">
<data
inType="win:UInt32"
name="newtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_struct.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt8"
name="max_array_of_blocklengths"
/>
<data
count="max_array_of_blocklengths"
inType="win:Int32"
name="array_of_blocklengths"
/>
<data
inType="win:UInt8"
name="max_array_of_displacements"
/>
<data
count="max_array_of_displacements"
inType="win:Pointer"
name="array_of_displacements"
/>
<data
inType="win:UInt8"
name="max_array_of_types"
/>
<data
count="max_array_of_types"
inType="win:Int32"
name="array_of_types"
/>
</template>
<template tid="MPI_Type_create_struct.leave">
<data
inType="win:UInt32"
name="newtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_subarray.enter">
<data
inType="win:Int32"
name="ndims"
/>
<data
inType="win:UInt8"
name="max_array_of_sizes"
/>
<data
count="max_array_of_sizes"
inType="win:Int32"
name="array_of_sizes"
/>
<data
inType="win:UInt8"
name="max_array_of_subsizes"
/>
<data
count="max_array_of_subsizes"
inType="win:Int32"
name="array_of_subsizes"
/>
<data
inType="win:UInt8"
name="max_array_of_starts"
/>
<data
count="max_array_of_starts"
inType="win:Int32"
name="array_of_starts"
/>
<data
inType="win:Int32"
name="order"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="oldtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_subarray.leave">
<data
inType="win:UInt32"
name="newtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_dup.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_dup.leave">
<data
inType="win:UInt32"
name="newtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_free.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_get_contents.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="max_int"
/>
<data
inType="win:Int32"
name="max_addr"
/>
<data
inType="win:Int32"
name="max_types"
/>
</template>
<template tid="MPI_Type_get_contents.leave">
<data
inType="win:UInt8"
name="max_array_of_integers"
/>
<data
count="max_array_of_integers"
inType="win:Int32"
name="array_of_integers"
/>
<data
inType="win:UInt8"
name="max_array_of_addresses"
/>
<data
count="max_array_of_addresses"
inType="win:Pointer"
name="array_of_addresses"
/>
<data
inType="win:UInt8"
name="max_array_of_datatypes"
/>
<data
count="max_array_of_datatypes"
inType="win:Int32"
name="array_of_datatypes"
/>
</template>
<template tid="MPI_Type_get_envelope.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_get_envelope.leave">
<data
inType="win:Int32"
name="num_integers"
/>
<data
inType="win:Int32"
name="num_addresses"
/>
<data
inType="win:Int32"
name="num_datatypes"
/>
<data
inType="win:Int32"
name="combiner"
/>
</template>
<template tid="MPI_Type_get_extent.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_get_extent.leave">
<data
inType="win:Int64"
name="lb"
/>
<data
inType="win:Int64"
name="extend"
/>
</template>
<template tid="MPI_Type_get_extent_x.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_get_extent_x.leave">
<data
inType="win:Int64"
name="lb"
/>
<data
inType="win:Int64"
name="extend"
/>
</template>
<template tid="MPI_Type_get_name.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_get_name.leave">
<data
inType="win:UInt32"
name="resultlen"
/>
<data
inType="win:AnsiString"
length="resultlen"
name="type_name"
/>
</template>
<template tid="MPI_Type_get_true_extent.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_get_true_extent.leave">
<data
inType="win:Int64"
name="true_lb"
/>
<data
inType="win:Int64"
name="true_extend"
/>
</template>
<template tid="MPI_Type_get_true_extent_x.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_get_true_extent_x.leave">
<data
inType="win:Int64"
name="true_lb"
/>
<data
inType="win:Int64"
name="true_extend"
/>
</template>
<template tid="MPI_Type_indexed.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt8"
name="max_blocklens"
/>
<data
count="max_blocklens"
inType="win:Int32"
name="blocklens"
/>
<data
inType="win:UInt8"
name="max_indices"
/>
<data
count="max_indices"
inType="win:Int32"
name="indices"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="old_type"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_indexed.leave">
<data
inType="win:UInt32"
name="newtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_set_name.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="name"
/>
</template>
<template tid="MPI_Type_match_size.enter">
<data
inType="win:Int32"
name="typeclass"
/>
<data
inType="win:Int32"
name="size"
/>
</template>
<template tid="MPI_Type_match_size.leave">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_size.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_size.leave">
<data
inType="win:Int32"
name="size"
/>
</template>
<template tid="MPI_Type_size_x.enter">
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_size_x.leave">
<data
inType="win:Int64"
name="size"
/>
</template>
<template tid="MPI_Type_vector.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="blocklength"
/>
<data
inType="win:Int32"
name="stride"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_vector.leave">
<data
inType="win:UInt32"
name="newtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Unpack.enter">
<data
inType="win:Pointer"
name="inbuf"
/>
<data
inType="win:Int32"
name="insize"
/>
<data
inType="win:Int32"
name="position"
/>
<data
inType="win:Pointer"
name="outbuf"
/>
<data
inType="win:Int32"
name="outcount"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Unpack.leave">
<data
inType="win:Int32"
name="position"
/>
</template>
<template tid="MPI_Unpack_external.enter">
<data
inType="win:AnsiString"
name="datatrep"
/>
<data
inType="win:Pointer"
name="inbuf"
/>
<data
inType="win:Int64"
name="insize"
/>
<data
inType="win:Int64"
name="position"
/>
<data
inType="win:Pointer"
name="outbuf"
/>
<data
inType="win:Int32"
name="outcount"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Unpack_external.leave">
<data
inType="win:Int64"
name="position"
/>
</template>
<template tid="MPI_Add_error_class.leave">
<data
inType="win:UInt32"
name="errorclass"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Add_error_code.enter">
<data
inType="win:UInt32"
name="errorclass"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Add_error_code.leave">
<data
inType="win:UInt32"
name="errorcode"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Add_error_string.enter">
<data
inType="win:UInt32"
name="errorcode"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="string"
/>
</template>
<template tid="MPI_Comm_call_errhandler.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="errorcode"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_call_errhandler.leave">
<data
inType="win:UInt32"
name="errorcode"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_create_errhandler.enter">
<data
inType="win:Pointer"
name="function"
/>
</template>
<template tid="MPI_Comm_create_errhandler.leave">
<data
inType="win:UInt32"
map="MPI_Errorhandler"
name="errhandler"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_get_errhandler.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_get_errhandler.leave">
<data
inType="win:UInt32"
map="MPI_Errorhandler"
name="errhandler"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_set_errhandler.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Errorhandler"
name="errhandler"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Errhandler_free.enter">
<data
inType="win:UInt32"
map="MPI_Errorhandler"
name="errhandler"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Error_class.enter">
<data
inType="win:UInt32"
name="errorcode"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Error_class.leave">
<data
inType="win:UInt32"
name="errorclass"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Error_string.enter">
<data
inType="win:UInt32"
name="errorcode"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Error_string.leave">
<data
inType="win:UInt32"
name="resultlen"
/>
<data
inType="win:AnsiString"
length="resultlen"
name="string"
/>
</template>
<template tid="MPI_File_call_errhandler.enter">
<data
inType="win:Pointer"
name="file"
/>
<data
inType="win:UInt32"
name="error"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_call_errhandler.leave">
<data
inType="win:UInt32"
name="errorcode"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_create_errhandler.enter">
<data
inType="win:Pointer"
name="function"
/>
</template>
<template tid="MPI_File_create_errhandler.leave">
<data
inType="win:UInt32"
map="MPI_Errorhandler"
name="errhandler"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_get_errhandler.enter">
<data
inType="win:Pointer"
name="file"
/>
</template>
<template tid="MPI_File_get_errhandler.leave">
<data
inType="win:UInt32"
map="MPI_Errorhandler"
name="errhandler"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_set_errhandler.enter">
<data
inType="win:Pointer"
name="file"
/>
<data
inType="win:UInt32"
map="MPI_Errorhandler"
name="errhandler"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_call_errhandler.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="errorcode"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_call_errhandler.leave">
<data
inType="win:UInt32"
name="errorcode"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_create_errhandler.enter">
<data
inType="win:Pointer"
name="function"
/>
</template>
<template tid="MPI_Win_create_errhandler.leave">
<data
inType="win:UInt32"
map="MPI_Errorhandler"
name="errhandler"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_get_errhandler.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_get_errhandler.leave">
<data
inType="win:UInt32"
map="MPI_Errorhandler"
name="errhandler"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_set_errhandler.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Errorhandler"
name="errhandler"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_compare.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group1"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Group"
name="group2"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_compare.leave">
<data
inType="win:Int32"
name="result"
/>
</template>
<template tid="MPI_Group_difference.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group1"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Group"
name="group2"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_difference.leave">
<data
inType="win:UInt32"
name="newgroup"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_excl.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="n"
/>
<data
inType="win:UInt8"
name="max_ranks"
/>
<data
count="max_ranks"
inType="win:Int32"
name="ranks"
/>
</template>
<template tid="MPI_Group_excl.leave">
<data
inType="win:UInt32"
name="newgroup"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_free.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_incl.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="n"
/>
<data
inType="win:UInt8"
name="max_ranks"
/>
<data
count="max_ranks"
inType="win:Int32"
name="ranks"
/>
</template>
<template tid="MPI_Group_incl.leave">
<data
inType="win:UInt32"
name="newgroup"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_intersection.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group1"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Group"
name="group2"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_intersection.leave">
<data
inType="win:UInt32"
name="newgroup"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_range_excl.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="n"
/>
<data
inType="win:UInt8"
name="max_ranges"
/>
<struct
count="max_ranges"
name="ranges"
>
<data
inType="win:Int32"
name="firstrank"
/>
<data
inType="win:Int32"
name="lastrank"
/>
<data
inType="win:Int32"
name="stride"
/>
</struct>
</template>
<template tid="MPI_Group_range_excl.leave">
<data
inType="win:UInt32"
name="newgroup"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_range_incl.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="n"
/>
<data
inType="win:UInt8"
name="max_ranges"
/>
<struct
count="max_ranges"
name="ranges"
>
<data
inType="win:Int32"
name="firstrank"
/>
<data
inType="win:Int32"
name="lastrank"
/>
<data
inType="win:Int32"
name="stride"
/>
</struct>
</template>
<template tid="MPI_Group_range_incl.leave">
<data
inType="win:UInt32"
name="newgroup"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_rank.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_rank.leave">
<data
inType="win:UInt32"
map="MPI_Rank"
name="rank"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_size.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_size.leave">
<data
inType="win:Int32"
name="size"
/>
</template>
<template tid="MPI_Group_translate_ranks.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group1"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="n"
/>
<data
inType="win:UInt8"
name="max_ranks1"
/>
<data
count="max_ranks1"
inType="win:Int32"
name="ranks1"
/>
<data
inType="win:UInt32"
map="MPI_Group"
name="group2"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_translate_ranks.leave">
<data
inType="win:Int32"
name="n"
/>
<data
inType="win:UInt8"
name="max_ranks2"
/>
<data
count="max_ranks2"
inType="win:Int32"
name="ranks2"
/>
</template>
<template tid="MPI_Group_union.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group1"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Group"
name="group2"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Group_union.leave">
<data
inType="win:UInt32"
name="newgroup"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Info_create.leave">
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Info_delete.enter">
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="key"
/>
</template>
<template tid="MPI_Info_dup.enter">
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Info_dup.leave">
<data
inType="win:UInt32"
name="newinfo"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Info_free.enter">
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Info_get.enter">
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="key"
/>
<data
inType="win:UInt32"
name="valuelen"
/>
</template>
<template tid="MPI_Info_get.leave">
<data
inType="win:AnsiString"
name="value"
/>
<data
inType="win:Int32"
name="flag"
/>
</template>
<template tid="MPI_Info_get_nkeys.enter">
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Info_get_nkeys.leave">
<data
inType="win:Int32"
name="nkeys"
/>
</template>
<template tid="MPI_Info_get_nthkey.enter">
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="n"
/>
</template>
<template tid="MPI_Info_get_nthkey.leave">
<data
inType="win:AnsiString"
name="key"
/>
</template>
<template tid="MPI_Info_get_valuelen.enter">
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="key"
/>
</template>
<template tid="MPI_Info_get_valuelen.leave">
<data
inType="win:Int32"
name="valuelen"
/>
<data
inType="win:Int32"
name="flag"
/>
</template>
<template tid="MPI_Info_set.enter">
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="key"
/>
<data
inType="win:AnsiString"
name="value"
/>
</template>
<template tid="MPI_Abort.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="errorcode"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Init_thread.enter">
<data
inType="win:Int32"
name="required"
/>
</template>
<template tid="MPI_Init_thread.leave">
<data
inType="win:Int32"
name="provided"
/>
</template>
<template tid="MPI_Is_thread_main.leave">
<data
inType="win:Int32"
name="flag"
/>
</template>
<template tid="MPI_Query_thread.leave">
<data
inType="win:Int32"
name="provided"
/>
</template>
<template tid="MPI_Get_processor_name.leave">
<data
inType="win:UInt32"
name="resultlen"
/>
<data
inType="win:AnsiString"
length="resultlen"
name="name"
/>
</template>
<template tid="MPI_Get_version.leave">
<data
inType="win:Int32"
name="version"
/>
<data
inType="win:Int32"
name="subversion"
/>
</template>
<template tid="MPI_Bsend.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Bsend_init.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Bsend_init.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Buffer_attach.enter">
<data
inType="win:Pointer"
name="buffer"
/>
<data
inType="win:Int32"
name="size"
/>
</template>
<template tid="MPI_Buffer_detach.leave">
<data
inType="win:Pointer"
name="buffer"
/>
<data
inType="win:Int32"
name="size"
/>
</template>
<template tid="MPI_Cancel.enter">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Grequest_complete.enter">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Grequest_start.enter">
<data
inType="win:Pointer"
name="query_fn"
/>
<data
inType="win:Pointer"
name="free_fn"
/>
<data
inType="win:Pointer"
name="cancel_fn"
/>
<data
inType="win:Pointer"
name="extra_state"
/>
</template>
<template tid="MPI_Grequest_start.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Ibsend.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Ibsend.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Improbe.enter">
<data
inType="win:Int32"
name="src"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Improbe.leave">
<data
inType="win:Int32"
name="flag"
/>
<data
inType="win:UInt32"
map="MPI_Message"
name="message"
outType="win:HexInt32"
/>
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Imrecv.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Message"
name="message"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Imrecv.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Iprobe.enter">
<data
inType="win:UInt32"
name="src"
/>
<data
inType="win:UInt32"
name="tag"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Iprobe.leave">
<data
inType="win:Int32"
name="flag"
/>
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Irecv.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="src"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Irecv.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Irsend.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Irsend.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Isend.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Isend.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Issend.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Issend.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Mprobe.enter">
<data
inType="win:Int32"
name="src"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Mprobe.leave">
<data
inType="win:UInt32"
map="MPI_Message"
name="message"
outType="win:HexInt32"
/>
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Mrecv.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Message"
name="message"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Mrecv.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Probe.enter">
<data
inType="win:Int32"
name="src"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Probe.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Recv.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="src"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Recv.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Recv_init.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="src"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Recv_init.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Request_free.enter">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Request_get_status.enter">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Request_get_status.leave">
<data
inType="win:Int32"
name="flag"
/>
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Rsend.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Rsend_init.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Rsend_init.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Send.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Send_init.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Send_init.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Sendrecv.enter">
<data
inType="win:Pointer"
name="sendbuf"
/>
<data
inType="win:Int32"
name="sendcount"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="sendtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="sendtag"
/>
<data
inType="win:Pointer"
name="recvbuf"
/>
<data
inType="win:Int32"
name="recvcount"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="recvtype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="source"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="recvtag"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Sendrecv_replace.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="sendtag"
/>
<data
inType="win:Int32"
name="src"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="recvtag"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Sendrecv_replace.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Ssend.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Ssend_init.enter">
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Ssend_init.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Start.enter">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Startall.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt8"
name="max_array_of_requests"
/>
<data
count="max_array_of_requests"
inType="win:UInt32"
name="array_of_requests"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Status_set_cancelled.enter">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
<data
inType="win:Int32"
name="flag"
/>
</template>
<template tid="MPI_Test.enter">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Test.leave">
<data
inType="win:Int32"
name="flag"
/>
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Test_cancelled.enter">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Test_cancelled.leave">
<data
inType="win:Int32"
name="flag"
/>
</template>
<template tid="MPI_Testall.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt8"
name="max_array_of_requests"
/>
<data
count="max_array_of_requests"
inType="win:UInt32"
name="array_of_requests"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Testall.leave">
<data
inType="win:Int32"
name="flag"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt8"
name="max_array_of_statuses"
/>
<struct
count="max_array_of_statuses"
name="array_of_statuses"
>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Testany.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt8"
name="max_array_of_requests"
/>
<data
count="max_array_of_requests"
inType="win:UInt32"
name="array_of_requests"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Testany.leave">
<data
inType="win:Int32"
name="index"
/>
<data
inType="win:Int32"
name="flag"
/>
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Testsome.enter">
<data
inType="win:Int32"
name="incount"
/>
<data
inType="win:UInt8"
name="max_array_of_requests"
/>
<data
count="max_array_of_requests"
inType="win:UInt32"
name="array_of_requests"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Testsome.leave">
<data
inType="win:Int32"
name="outcount"
/>
<data
inType="win:UInt8"
name="max_array_of_indices"
/>
<data
count="max_array_of_indices"
inType="win:Int32"
name="array_of_indices"
/>
<data
inType="win:UInt8"
name="max_array_of_statuses"
/>
<struct
count="max_array_of_statuses"
name="array_of_statuses"
>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Wait.enter">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Wait.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Waitall.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt8"
name="max_array_of_requests"
/>
<data
count="max_array_of_requests"
inType="win:UInt32"
name="array_of_requests"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Waitall.leave">
<data
inType="win:UInt32"
map="MPI_ERROR"
name="rc"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt8"
name="max_array_of_statuses"
/>
<struct
count="max_array_of_statuses"
name="array_of_statuses"
>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Waitany.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt8"
name="max_array_of_requests"
/>
<data
count="max_array_of_requests"
inType="win:UInt32"
name="array_of_requests"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Waitany.leave">
<data
inType="win:Int32"
name="index"
/>
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Waitsome.enter">
<data
inType="win:Int32"
name="incount"
/>
<data
inType="win:UInt8"
name="max_array_of_requests"
/>
<data
count="max_array_of_requests"
inType="win:UInt32"
name="array_of_requests"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Waitsome.leave">
<data
inType="win:Int32"
name="outcount"
/>
<data
inType="win:UInt8"
name="max_array_of_indices"
/>
<data
count="max_array_of_indices"
inType="win:UInt32"
name="array_of_indicess"
outType="win:HexInt32"
/>
<data
inType="win:UInt8"
name="max_array_of_statuses"
/>
<struct
count="max_array_of_statuses"
name="array_of_statuses"
>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Accumulate.enter">
<data
inType="win:Pointer"
name="origin_addr"
/>
<data
inType="win:Int32"
name="origin_count"
/>
<data
inType="win:UInt32"
name="origin_datatype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Rank"
name="target_rank"
/>
<data
inType="win:UInt64"
name="target_disp"
/>
<data
inType="win:Int32"
name="target_count"
/>
<data
inType="win:UInt32"
name="target_datatype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Op"
name="op"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Alloc_mem.enter">
<data
inType="win:Int64"
name="size"
/>
<data
inType="win:UInt32"
map="MPI_Info"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Alloc_mem.leave">
<data
inType="win:Pointer"
name="base"
/>
</template>
<template tid="MPI_Free_mem.enter">
<data
inType="win:Pointer"
name="baseptr"
/>
</template>
<template tid="MPI_Get.enter">
<data
inType="win:Pointer"
name="origin_addr"
/>
<data
inType="win:Int32"
name="origin_count"
/>
<data
inType="win:UInt32"
name="origin_datatype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Rank"
name="target_rank"
/>
<data
inType="win:UInt64"
name="target_disp"
/>
<data
inType="win:Int32"
name="target_count"
/>
<data
inType="win:UInt32"
name="target_datatype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Put.enter">
<data
inType="win:Pointer"
name="origin_addr"
/>
<data
inType="win:Int32"
name="origin_count"
/>
<data
inType="win:UInt32"
name="origin_datatype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Rank"
name="target_rank"
/>
<data
inType="win:UInt64"
name="target_disp"
/>
<data
inType="win:Int32"
name="target_count"
/>
<data
inType="win:UInt32"
name="target_datatype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_complete.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_create.enter">
<data
inType="win:Pointer"
name="base"
/>
<data
inType="win:Int64"
name="size"
/>
<data
inType="win:Int32"
name="disp_unit"
/>
<data
inType="win:UInt32"
map="MPI_Info"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_create.leave">
<data
inType="win:UInt32"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_allocate_shared.enter">
<data
inType="win:Int64"
name="size"
/>
<data
inType="win:Int32"
name="disp_unit"
/>
<data
inType="win:UInt32"
map="MPI_Info"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_allocate_shared.leave">
<data
inType="win:Pointer"
name="baseptr"
/>
<data
inType="win:UInt32"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_shared_query.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="rank"
/>
</template>
<template tid="MPI_Win_shared_query.leave">
<data
inType="win:Int64"
name="size"
/>
<data
inType="win:Int32"
name="disp_unit"
/>
<data
inType="win:Pointer"
name="baseptr"
/>
</template>
<template tid="MPI_Win_fence.enter">
<data
inType="win:Int32"
name="assert"
/>
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_free.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_get_group.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_get_group.leave">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_get_name.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_get_name.leave">
<data
inType="win:UInt32"
name="resultlen"
/>
<data
inType="win:AnsiString"
length="resultlen"
name="name"
/>
</template>
<template tid="MPI_Win_lock.enter">
<data
inType="win:Int32"
name="locktype"
/>
<data
inType="win:UInt32"
map="MPI_Rank"
name="rank"
/>
<data
inType="win:Int32"
name="assert"
/>
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_post.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="assert"
/>
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_set_name.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="win_name"
/>
</template>
<template tid="MPI_Win_start.enter">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="assert"
/>
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_test.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_test.leave">
<data
inType="win:Int32"
name="flag"
/>
</template>
<template tid="MPI_Win_unlock.enter">
<data
inType="win:UInt32"
map="MPI_Rank"
name="rank"
/>
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Win_wait.enter">
<data
inType="win:UInt32"
map="MPI_Win"
name="win"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_close.enter">
<data
inType="win:Pointer"
name="mpi_fh"
/>
</template>
<template tid="MPI_File_delete.enter">
<data
inType="win:AnsiString"
name="filename"
/>
<data
inType="win:UInt32"
map="MPI_Info"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_c2f.enter">
<data
inType="win:Pointer"
name="mpi_fh"
/>
</template>
<template tid="MPI_File_c2f.leave">
<data
inType="win:UInt32"
name="file"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_f2c.enter">
<data
inType="win:UInt32"
name="fh"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_f2c.leave">
<data
inType="win:Pointer"
name="mpi_file"
/>
</template>
<template tid="MPI_File_sync.enter">
<data
inType="win:Pointer"
name="fh"
/>
</template>
<template tid="MPI_File_get_amode.enter">
<data
inType="win:Pointer"
name="fh"
/>
</template>
<template tid="MPI_File_get_amode.leave">
<data
inType="win:Int32"
name="amode"
/>
</template>
<template tid="MPI_File_get_atomicity.enter">
<data
inType="win:Pointer"
name="fn"
/>
</template>
<template tid="MPI_File_get_atomicity.leave">
<data
inType="win:Int32"
name="flag"
/>
</template>
<template tid="MPI_File_get_byte_offset.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="offset"
/>
</template>
<template tid="MPI_File_get_byte_offset.leave">
<data
inType="win:Int64"
name="disp"
/>
</template>
<template tid="MPI_File_get_type_extent.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_get_type_extent.leave">
<data
inType="win:Int64"
name="extend"
/>
</template>
<template tid="MPI_File_get_group.enter">
<data
inType="win:Pointer"
name="fh"
/>
</template>
<template tid="MPI_File_get_group.leave">
<data
inType="win:UInt32"
map="MPI_Group"
name="group"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_get_info.enter">
<data
inType="win:Pointer"
name="fh"
/>
</template>
<template tid="MPI_File_get_info.leave">
<data
inType="win:UInt32"
name="info_used"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_get_position.enter">
<data
inType="win:Pointer"
name="fh"
/>
</template>
<template tid="MPI_File_get_position.leave">
<data
inType="win:Int64"
name="offset"
/>
</template>
<template tid="MPI_File_get_position_shared.enter">
<data
inType="win:Pointer"
name="fh"
/>
</template>
<template tid="MPI_File_get_position_shared.leave">
<data
inType="win:Int64"
name="offset"
/>
</template>
<template tid="MPI_File_get_size.enter">
<data
inType="win:Pointer"
name="fh"
/>
</template>
<template tid="MPI_File_get_size.leave">
<data
inType="win:Int64"
name="size"
/>
</template>
<template tid="MPI_File_get_view.enter">
<data
inType="win:Pointer"
name="fh"
/>
</template>
<template tid="MPI_File_get_view.leave">
<data
inType="win:UInt64"
name="disp"
outType="win:HexInt64"
/>
<data
inType="win:UInt32"
name="etype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="filetype"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="datarep"
/>
</template>
<template tid="MPI_File_iread.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_iread.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_iread_at.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="offset"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_iread_at.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_iread_shared.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_iread_shared.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_iwrite.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_iwrite.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_iwrite_at.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="offset"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_iwrite_at.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_iwrite_shared.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_iwrite_shared.leave">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_open.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="filename"
/>
<data
inType="win:Int32"
name="amode"
/>
<data
inType="win:UInt32"
map="MPI_Info"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_open.leave">
<data
inType="win:Pointer"
name="fh"
/>
</template>
<template tid="MPI_File_preallocate.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="size"
/>
</template>
<template tid="MPI_File_read_at_all_begin.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="offset"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_read_at_all_end.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
</template>
<template tid="MPI_File_read_at_all_end.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_read.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_read.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_read_all.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_read_all.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_read_all_begin.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_read_all_end.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
</template>
<template tid="MPI_File_read_all_end.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_read_at.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="offset"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_read_at.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_read_at_all.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="offset"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_read_at_all.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_read_ordered.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_read_ordered.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_read_ordered_begin.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_read_ordered_end.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
</template>
<template tid="MPI_File_read_ordered_end.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_read_shared.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_read_shared.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_seek.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="offset"
/>
<data
inType="win:Int32"
name="whence"
/>
</template>
<template tid="MPI_File_seek_shared.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="offset"
/>
<data
inType="win:Int32"
name="whence"
/>
</template>
<template tid="MPI_File_set_atomicity.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int32"
name="flag"
/>
</template>
<template tid="MPI_File_set_info.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_set_size.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="size"
/>
</template>
<template tid="MPI_File_set_view.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="disp"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="etype"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="filetype"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="datarep"
/>
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_write_at_all_begin.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="offset"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_write_at_all_end.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
</template>
<template tid="MPI_File_write_at_all_end.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_write.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_write.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_write_all.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_write_all.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_write_all_begin.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_write_all_end.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
</template>
<template tid="MPI_File_write_all_end.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_write_at.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="offset"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_write_at.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_write_at_all.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Int64"
name="offset"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_write_at_all.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_write_ordered.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_write_ordered.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_write_ordered_begin.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_write_ordered_end.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
</template>
<template tid="MPI_File_write_ordered_end.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_File_write_shared.enter">
<data
inType="win:Pointer"
name="fh"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_File_write_shared.leave">
<struct name="_status">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MPI_Close_port.enter">
<data
inType="win:AnsiString"
name="port_name"
/>
</template>
<template tid="MPI_Comm_accept.enter">
<data
inType="win:AnsiString"
name="port_name"
/>
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="root"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_accept.leave">
<data
inType="win:UInt32"
map="MPI_Comm"
name="newcomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_connect.enter">
<data
inType="win:AnsiString"
name="port_name"
/>
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="root"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_connect.leave">
<data
inType="win:UInt32"
map="MPI_Comm"
name="newcomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_disconnect.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_get_parent.leave">
<data
inType="win:UInt32"
map="MPI_Comm"
name="parent"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_join.enter">
<data
inType="win:UInt32"
name="fd"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_join.leave">
<data
inType="win:UInt32"
map="MPI_Comm"
name="intercomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_spawn.enter">
<data
inType="win:AnsiString"
name="command"
/>
<data
inType="win:Pointer"
name="argv"
/>
<data
inType="win:Int32"
name="maxprocs"
/>
<data
inType="win:UInt32"
map="MPI_Info"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="root"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_spawn.leave">
<data
inType="win:UInt32"
name="intercomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_spawn_multiple.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:UInt8"
name="max_array_of_commands"
/>
<data
count="max_array_of_commands"
inType="win:Pointer"
name="array_of_commands"
/>
<data
inType="win:UInt8"
name="max_array_of_argv"
/>
<data
count="max_array_of_argv"
inType="win:Pointer"
name="array_of_argv"
/>
<data
inType="win:UInt8"
name="max_array_of_maxprocs"
/>
<data
count="max_array_of_maxprocs"
inType="win:Int32"
name="array_of_maxprocs"
/>
<data
inType="win:UInt8"
name="max_array_of_info"
/>
<data
count="max_array_of_info"
inType="win:UInt32"
map="MPI_Info"
name="array_of_info"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="root"
/>
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Comm_spawn_multiple.leave">
<data
inType="win:UInt32"
name="intercomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Lookup_name.enter">
<data
inType="win:AnsiString"
name="service_name"
/>
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Lookup_name.leave">
<data
inType="win:AnsiString"
name="port_name"
/>
</template>
<template tid="MPI_Open_port.enter">
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Open_port.leave">
<data
inType="win:AnsiString"
name="port_name"
/>
</template>
<template tid="MPI_Publish_name.enter">
<data
inType="win:AnsiString"
name="service_name"
/>
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="port_name"
/>
</template>
<template tid="MPI_Unpublish_name.enter">
<data
inType="win:AnsiString"
name="service_name"
/>
<data
inType="win:UInt32"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="port_name"
/>
</template>
<template tid="MPI_Cart_coords.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Rank"
name="rank"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="maxdims"
/>
</template>
<template tid="MPI_Cart_coords.leave">
<data
inType="win:Int32"
name="ndims"
/>
<data
inType="win:UInt8"
name="max_coords"
/>
<data
count="max_coords"
inType="win:Int32"
name="coords"
/>
</template>
<template tid="MPI_Cart_create.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm_old"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="ndims"
/>
<data
inType="win:UInt8"
name="max_dims"
/>
<data
count="max_dims"
inType="win:Int32"
name="dims"
/>
<data
inType="win:UInt8"
name="max_periods"
/>
<data
count="max_periods"
inType="win:Int32"
name="periods"
/>
<data
inType="win:Int32"
name="reorder"
/>
</template>
<template tid="MPI_Cart_create.leave">
<data
inType="win:UInt32"
map="MPI_Comm"
name="newcomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Cart_get.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="maxdims"
/>
</template>
<template tid="MPI_Cart_get.leave">
<data
inType="win:Int32"
name="ndims"
/>
<data
inType="win:UInt8"
name="max_dims"
/>
<data
count="max_dims"
inType="win:Int32"
name="dims"
/>
<data
inType="win:UInt8"
name="max_periods"
/>
<data
count="max_periods"
inType="win:Int32"
name="periods"
/>
<data
inType="win:UInt8"
name="max_coords"
/>
<data
count="max_coords"
inType="win:Int32"
name="coords"
/>
</template>
<template tid="MPI_Cart_map.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm_old"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="ndims"
/>
<data
inType="win:UInt8"
name="max_dims"
/>
<data
count="max_dims"
inType="win:Int32"
name="dims"
/>
<data
inType="win:UInt8"
name="max_periods"
/>
<data
count="max_periods"
inType="win:Int32"
name="periods"
/>
</template>
<template tid="MPI_Cart_map.leave">
<data
inType="win:UInt32"
map="MPI_Rank"
name="newrank"
/>
</template>
<template tid="MPI_Cart_rank.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt8"
name="max_coords"
/>
<data
count="max_coords"
inType="win:Int32"
name="coords"
/>
</template>
<template tid="MPI_Cart_rank.leave">
<data
inType="win:UInt32"
map="MPI_Rank"
name="rank"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Cart_shift.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="direction"
/>
<data
inType="win:Int32"
name="displ"
/>
</template>
<template tid="MPI_Cart_shift.leave">
<data
inType="win:Int32"
name="source"
/>
<data
inType="win:Int32"
name="dest"
/>
</template>
<template tid="MPI_Cart_sub.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt8"
name="max_remain_dims"
/>
<data
count="max_remain_dims"
inType="win:Int32"
name="remain_dims"
/>
</template>
<template tid="MPI_Cart_sub.leave">
<data
inType="win:UInt32"
name="newcomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Cartdim_get.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Cartdim_get.leave">
<data
inType="win:Int32"
name="ndims"
/>
</template>
<template tid="MPI_Dims_create.enter">
<data
inType="win:Int32"
name="nnodes"
/>
<data
inType="win:Int32"
name="ndims"
/>
</template>
<template tid="MPI_Graph_get.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="maxindes"
/>
<data
inType="win:Int32"
name="maxedges"
/>
</template>
<template tid="MPI_Graph_get.leave">
<data
inType="win:Int32"
name="indexcount"
/>
<data
inType="win:Pointer"
name="index"
/>
<data
inType="win:Int32"
name="edgecount"
/>
<data
inType="win:Pointer"
name="edges"
/>
</template>
<template tid="MPI_Graph_map.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="nnodes"
/>
</template>
<template tid="MPI_Graph_map.leave">
<data
inType="win:Int32"
name="newrank"
/>
</template>
<template tid="MPI_Graph_neighbors.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Rank"
name="rank"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="maxneighbors"
/>
</template>
<template tid="MPI_Graph_neighbors.leave">
<data
inType="win:Int32"
name="nneighbors"
/>
<data
inType="win:Pointer"
name="neighbors"
/>
</template>
<template tid="MPI_Graph_create.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="nnodes"
/>
<data
inType="win:Int32"
name="reorder"
/>
</template>
<template tid="MPI_Graph_create.leave">
<data
inType="win:UInt32"
name="newcomm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Graphdims_get.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Graphdims_get.leave">
<data
inType="win:Int32"
name="nnodes"
/>
<data
inType="win:Int32"
name="nedges"
/>
</template>
<template tid="MPI_Graph_neighbors_count.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Rank"
name="rank"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Graph_neighbors_count.leave">
<data
inType="win:Int32"
name="nneighbors"
/>
</template>
<template tid="MPI_Dist_graph_neighbors_count.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Dist_graph_neighbors_count.leave">
<data
inType="win:Int32"
name="indegree"
/>
<data
inType="win:Int32"
name="outdegree"
/>
<data
inType="win:Int32"
name="weighted"
/>
</template>
<template tid="MPI_Dist_graph_neighbors.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="maxindegree"
/>
<data
inType="win:UInt32"
name="maxoutdegree"
/>
</template>
<template tid="MPI_Dist_graph_neighbors.leave">
<data
inType="win:Pointer"
name="sources"
/>
<data
inType="win:Pointer"
name="sourceweights"
/>
<data
inType="win:Pointer"
name="destinations"
/>
<data
inType="win:Pointer"
name="destweights"
/>
</template>
<template tid="MPI_Dist_graph_create_adjacent.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm_old"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="indegree"
/>
<data
inType="win:UInt32"
name="outdegree"
/>
<data
inType="win:UInt32"
map="MPI_Info"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="reorder"
/>
</template>
<template tid="MPI_Dist_graph_create_adjacent.leave">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm_dist_graph"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Dist_graph_create.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm_old"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="n"
/>
<data
inType="win:UInt32"
map="MPI_Info"
name="info"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
name="reorder"
/>
</template>
<template tid="MPI_Dist_graph_create.leave">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm_dist_graph"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Topo_test.enter">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Topo_test.leave">
<data
inType="win:Int32"
name="topo_type"
/>
</template>
<template tid="function.error">
<data
inType="win:UInt32"
map="MPI_ERROR"
name="error_class"
outType="win:HexInt32"
/>
<data
inType="win:AnsiString"
name="error_string"
/>
</template>
<template tid="nd.send.done">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="shm.send.done">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="sock.send.done">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="nd.recv.done">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="shm.recv.done">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="sock.recv.done">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="nd.send.inline">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="shm.send.inline">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="sock.send.inline">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="nd.send.continue">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
</template>
<template tid="shm.send.continue">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
</template>
<template tid="sock.send.continue">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
</template>
<template tid="nd.send.head">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="shm.send.head">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="sock.send.head">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="nd.recv.data">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="shm.recv.data">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="sock.recv.data">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="nd.recv.packet">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="shm.recv.packet">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="sock.recv.packet">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="nd.send.connect">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="shm.send.connect">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="sock.send.connect">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="nd.send.queue">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="shm.send.queue">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="sock.send.queue">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
<data
inType="win:Int32"
name="IOV"
/>
<data
inType="win:UInt32"
name="Size"
/>
<data
inType="win:Int32"
name="Type"
/>
</template>
<template tid="msg.recv">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest_rank"
/>
<data
inType="win:Int32"
name="src_rank"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="element_count"
/>
</template>
<template tid="msg.send">
<data
inType="win:UInt32"
map="MPI_Comm"
name="comm"
outType="win:HexInt32"
/>
<data
inType="win:Int32"
name="dest_rank"
/>
<data
inType="win:Int32"
name="src_rank"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="tag"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="datatype"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="buf"
/>
<data
inType="win:Int32"
name="element_count"
/>
</template>
<template tid="MSMPI_Waitsome_interruptible.enter">
<data
inType="win:Int32"
name="incount"
/>
<data
inType="win:UInt8"
name="max_array_of_requests"
/>
<data
count="max_array_of_requests"
inType="win:UInt32"
name="array_of_requests"
outType="win:HexInt32"
/>
</template>
<template tid="MSMPI_Waitsome_interruptible.leave">
<data
inType="win:Int32"
name="outcount"
/>
<data
inType="win:UInt8"
name="max_array_of_indices"
/>
<data
count="max_array_of_indices"
inType="win:UInt32"
name="array_of_indicess"
outType="win:HexInt32"
/>
<data
inType="win:UInt8"
name="max_array_of_statuses"
/>
<struct
count="max_array_of_statuses"
name="array_of_statuses"
>
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="cancelled"
/>
<data
inType="win:UInt32"
name="MPI_SOURCE"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_Tag"
name="MPI_TAG"
outType="win:HexInt32"
/>
<data
inType="win:UInt32"
map="MPI_ERROR"
name="MPI_ERROR"
outType="win:HexInt32"
/>
</struct>
</template>
<template tid="MSMPI_Request_set_apc.enter">
<data
inType="win:UInt32"
name="request"
outType="win:HexInt32"
/>
<data
inType="win:Pointer"
name="callback_fn"
/>
<data
inType="win:Pointer"
name="callback_status"
/>
</template>
<template tid="MPI_Type_create_hindexed_block.enter">
<data
inType="win:Int32"
name="count"
/>
<data
inType="win:Int32"
name="blocklength"
/>
<data
inType="win:UInt8"
name="max_array_of_displacements"
/>
<data
count="max_array_of_displacements"
inType="win:Pointer"
name="array_of_displacements"
/>
<data
inType="win:UInt32"
map="MPI_Datatype"
name="oldtype"
outType="win:HexInt32"
/>
</template>
<template tid="MPI_Type_create_hindexed_block.leave">
<data
inType="win:UInt32"
name="newtype"
outType="win:HexInt32"
/>
</template>
<template tid="nd.defer.connect">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="shm.defer.connect">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="sock.defer.connect">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="nd.defer.write">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="shm.defer.write">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
<template tid="sock.defer.write">
<data
inType="win:Int32"
name="Rank"
/>
<data
inType="win:Int32"
name="VCRank"
/>
<data
inType="win:Int32"
name="MessageId"
/>
</template>
</templates>
<!-- Events -->
<events>
<!-- Microsoft-Windows-HPC-MPI/Api events -->
<!--MPI_Comm_create_keyval-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_create_keyval.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_create_keyval"
task="attr:MPI_Comm_create_keyval"
template="MPI_Comm_create_keyval.enter"
value="100"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_create_keyval.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_create_keyval"
task="attr:MPI_Comm_create_keyval"
template="MPI_Comm_create_keyval.leave"
value="101"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_create_keyval"
task="attr:MPI_Comm_create_keyval"
template="function.error"
value="102"
/>
<!--MPI_Comm_delete_attr-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_delete_attr.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_delete_attr"
task="attr:MPI_Comm_delete_attr"
template="MPI_Comm_delete_attr.enter"
value="103"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_delete_attr"
task="attr:MPI_Comm_delete_attr"
value="104"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_delete_attr"
task="attr:MPI_Comm_delete_attr"
template="function.error"
value="105"
/>
<!--MPI_Comm_free_keyval-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_free_keyval.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_free_keyval"
task="attr:MPI_Comm_free_keyval"
template="MPI_Comm_free_keyval.enter"
value="106"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_free_keyval"
task="attr:MPI_Comm_free_keyval"
value="107"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_free_keyval"
task="attr:MPI_Comm_free_keyval"
template="function.error"
value="108"
/>
<!--MPI_Comm_get_attr-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_get_attr.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_get_attr"
task="attr:MPI_Comm_get_attr"
template="MPI_Comm_get_attr.enter"
value="109"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_get_attr.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_get_attr"
task="attr:MPI_Comm_get_attr"
template="MPI_Comm_get_attr.leave"
value="110"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_get_attr"
task="attr:MPI_Comm_get_attr"
template="function.error"
value="111"
/>
<!--MPI_Comm_set_attr-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_set_attr.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_set_attr"
task="attr:MPI_Comm_set_attr"
template="MPI_Comm_set_attr.enter"
value="112"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_set_attr"
task="attr:MPI_Comm_set_attr"
value="113"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_set_attr"
task="attr:MPI_Comm_set_attr"
template="function.error"
value="114"
/>
<!--MPI_Type_create_keyval-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_create_keyval.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_create_keyval"
task="attr:MPI_Type_create_keyval"
template="MPI_Type_create_keyval.enter"
value="115"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_create_keyval.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_create_keyval"
task="attr:MPI_Type_create_keyval"
template="MPI_Type_create_keyval.leave"
value="116"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_create_keyval"
task="attr:MPI_Type_create_keyval"
template="function.error"
value="117"
/>
<!--MPI_Type_delete_attr-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_delete_attr.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_delete_attr"
task="attr:MPI_Type_delete_attr"
template="MPI_Type_delete_attr.enter"
value="118"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_delete_attr"
task="attr:MPI_Type_delete_attr"
value="119"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_delete_attr"
task="attr:MPI_Type_delete_attr"
template="function.error"
value="120"
/>
<!--MPI_Type_free_keyval-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_free_keyval.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_free_keyval"
task="attr:MPI_Type_free_keyval"
template="MPI_Type_free_keyval.enter"
value="121"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_free_keyval"
task="attr:MPI_Type_free_keyval"
value="122"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_free_keyval"
task="attr:MPI_Type_free_keyval"
template="function.error"
value="123"
/>
<!--MPI_Type_get_attr-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_get_attr.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_get_attr"
task="attr:MPI_Type_get_attr"
template="MPI_Type_get_attr.enter"
value="124"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_get_attr.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_get_attr"
task="attr:MPI_Type_get_attr"
template="MPI_Type_get_attr.leave"
value="125"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_get_attr"
task="attr:MPI_Type_get_attr"
template="function.error"
value="126"
/>
<!--MPI_Type_set_attr-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_set_attr.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_set_attr"
task="attr:MPI_Type_set_attr"
template="MPI_Type_set_attr.enter"
value="127"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_set_attr"
task="attr:MPI_Type_set_attr"
value="128"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_set_attr"
task="attr:MPI_Type_set_attr"
template="function.error"
value="129"
/>
<!--MPI_Win_create_keyval-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_create_keyval.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_create_keyval"
task="attr:MPI_Win_create_keyval"
template="MPI_Win_create_keyval.enter"
value="130"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Win_create_keyval.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_create_keyval"
task="attr:MPI_Win_create_keyval"
template="MPI_Win_create_keyval.leave"
value="131"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_create_keyval"
task="attr:MPI_Win_create_keyval"
template="function.error"
value="132"
/>
<!--MPI_Win_delete_attr-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_delete_attr.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_delete_attr"
task="attr:MPI_Win_delete_attr"
template="MPI_Win_delete_attr.enter"
value="133"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_delete_attr"
task="attr:MPI_Win_delete_attr"
value="134"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_delete_attr"
task="attr:MPI_Win_delete_attr"
template="function.error"
value="135"
/>
<!--MPI_Win_free_keyval-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_free_keyval.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_free_keyval"
task="attr:MPI_Win_free_keyval"
template="MPI_Win_free_keyval.enter"
value="136"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_free_keyval"
task="attr:MPI_Win_free_keyval"
value="137"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_free_keyval"
task="attr:MPI_Win_free_keyval"
template="function.error"
value="138"
/>
<!--MPI_Win_get_attr-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_get_attr.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_get_attr"
task="attr:MPI_Win_get_attr"
template="MPI_Win_get_attr.enter"
value="139"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Win_get_attr.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_get_attr"
task="attr:MPI_Win_get_attr"
template="MPI_Win_get_attr.leave"
value="140"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_get_attr"
task="attr:MPI_Win_get_attr"
template="function.error"
value="141"
/>
<!--MPI_Win_set_attr-->
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_set_attr.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_set_attr"
task="attr:MPI_Win_set_attr"
template="MPI_Win_set_attr.enter"
value="142"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_set_attr"
task="attr:MPI_Win_set_attr"
value="143"
/>
<event
channel="MpiApiChannel"
keywords="mpi:attr mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_set_attr"
task="attr:MPI_Win_set_attr"
template="function.error"
value="144"
/>
<!--MPI_Allgather-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Allgather.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Allgather"
task="coll:MPI_Allgather"
template="MPI_Allgather.enter"
value="145"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Allgather"
task="coll:MPI_Allgather"
value="146"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Allgather"
task="coll:MPI_Allgather"
template="function.error"
value="147"
/>
<!--MPI_Allgatherv-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Allgatherv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Allgatherv"
task="coll:MPI_Allgatherv"
template="MPI_Allgatherv.enter"
value="148"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Allgatherv"
task="coll:MPI_Allgatherv"
value="149"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Allgatherv"
task="coll:MPI_Allgatherv"
template="function.error"
value="150"
/>
<!--MPI_Allreduce-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Allreduce.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Allreduce"
task="coll:MPI_Allreduce"
template="MPI_Allreduce.enter"
value="151"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Allreduce"
task="coll:MPI_Allreduce"
value="152"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Allreduce"
task="coll:MPI_Allreduce"
template="function.error"
value="153"
/>
<!--MPI_Alltoall-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Alltoall.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Alltoall"
task="coll:MPI_Alltoall"
template="MPI_Alltoall.enter"
value="154"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Alltoall"
task="coll:MPI_Alltoall"
value="155"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Alltoall"
task="coll:MPI_Alltoall"
template="function.error"
value="156"
/>
<!--MPI_Alltoallv-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Alltoallv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Alltoallv"
task="coll:MPI_Alltoallv"
template="MPI_Alltoallv.enter"
value="157"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Alltoallv"
task="coll:MPI_Alltoallv"
value="158"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Alltoallv"
task="coll:MPI_Alltoallv"
template="function.error"
value="159"
/>
<!--MPI_Alltoallw-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Alltoallw.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Alltoallw"
task="coll:MPI_Alltoallw"
template="MPI_Alltoallw.enter"
value="160"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Alltoallw"
task="coll:MPI_Alltoallw"
value="161"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Alltoallw"
task="coll:MPI_Alltoallw"
template="function.error"
value="162"
/>
<!--MPI_Barrier-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Barrier.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Barrier"
task="coll:MPI_Barrier"
template="MPI_Barrier.enter"
value="163"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Barrier"
task="coll:MPI_Barrier"
value="164"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Barrier"
task="coll:MPI_Barrier"
template="function.error"
value="165"
/>
<!--MPI_Bcast-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Bcast.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Bcast"
task="coll:MPI_Bcast"
template="MPI_Bcast.enter"
value="166"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Bcast"
task="coll:MPI_Bcast"
value="167"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Bcast"
task="coll:MPI_Bcast"
template="function.error"
value="168"
/>
<!--MPI_Exscan-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Exscan.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Exscan"
task="coll:MPI_Exscan"
template="MPI_Exscan.enter"
value="169"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Exscan"
task="coll:MPI_Exscan"
value="170"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Exscan"
task="coll:MPI_Exscan"
template="function.error"
value="171"
/>
<!--MPI_Gather-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Gather.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Gather"
task="coll:MPI_Gather"
template="MPI_Gather.enter"
value="172"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Gather"
task="coll:MPI_Gather"
value="173"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Gather"
task="coll:MPI_Gather"
template="function.error"
value="174"
/>
<!--MPI_Gatherv-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Gatherv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Gatherv"
task="coll:MPI_Gatherv"
template="MPI_Gatherv.enter"
value="175"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Gatherv"
task="coll:MPI_Gatherv"
value="176"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Gatherv"
task="coll:MPI_Gatherv"
template="function.error"
value="177"
/>
<!--MPI_Op_create-->
<event
channel="MpiApiChannel"
keywords="mpi:misc mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Op_create.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Op_create"
task="misc:MPI_Op_create"
template="MPI_Op_create.enter"
value="178"
/>
<event
channel="MpiApiChannel"
keywords="mpi:misc mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Op_create.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Op_create"
task="misc:MPI_Op_create"
template="MPI_Op_create.leave"
value="179"
/>
<event
channel="MpiApiChannel"
keywords="mpi:misc mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Op_create"
task="misc:MPI_Op_create"
template="function.error"
value="180"
/>
<!--MPI_Op_free-->
<event
channel="MpiApiChannel"
keywords="mpi:misc mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Op_free.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Op_free"
task="misc:MPI_Op_free"
template="MPI_Op_free.enter"
value="181"
/>
<event
channel="MpiApiChannel"
keywords="mpi:misc mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Op_free"
task="misc:MPI_Op_free"
value="182"
/>
<event
channel="MpiApiChannel"
keywords="mpi:misc mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Op_free"
task="misc:MPI_Op_free"
template="function.error"
value="183"
/>
<!--MPI_Reduce_scatter-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Reduce_scatter.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Reduce_scatter"
task="coll:MPI_Reduce_scatter"
template="MPI_Reduce_scatter.enter"
value="184"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Reduce_scatter"
task="coll:MPI_Reduce_scatter"
value="185"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Reduce_scatter"
task="coll:MPI_Reduce_scatter"
template="function.error"
value="186"
/>
<!--MPI_Reduce-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Reduce.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Reduce"
task="coll:MPI_Reduce"
template="MPI_Reduce.enter"
value="187"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Reduce"
task="coll:MPI_Reduce"
value="188"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Reduce"
task="coll:MPI_Reduce"
template="function.error"
value="189"
/>
<!--MPI_Scan-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Scan.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Scan"
task="coll:MPI_Scan"
template="MPI_Scan.enter"
value="190"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Scan"
task="coll:MPI_Scan"
value="191"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Scan"
task="coll:MPI_Scan"
template="function.error"
value="192"
/>
<!--MPI_Scatter-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Scatter.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Scatter"
task="coll:MPI_Scatter"
template="MPI_Scatter.enter"
value="193"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Scatter"
task="coll:MPI_Scatter"
value="194"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Scatter"
task="coll:MPI_Scatter"
template="function.error"
value="195"
/>
<!--MPI_Scatterv-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Scatterv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Scatterv"
task="coll:MPI_Scatterv"
template="MPI_Scatterv.enter"
value="196"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Scatterv"
task="coll:MPI_Scatterv"
value="197"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Scatterv"
task="coll:MPI_Scatterv"
template="function.error"
value="198"
/>
<!--MPI_Comm_compare-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_compare.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_compare"
task="comm:MPI_Comm_compare"
template="MPI_Comm_compare.enter"
value="199"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_compare.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_compare"
task="comm:MPI_Comm_compare"
template="MPI_Comm_compare.leave"
value="200"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_compare"
task="comm:MPI_Comm_compare"
template="function.error"
value="201"
/>
<!--MPI_Comm_create-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_create.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_create"
task="comm:MPI_Comm_create"
template="MPI_Comm_create.enter"
value="202"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_create.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_create"
task="comm:MPI_Comm_create"
template="MPI_Comm_create.leave"
value="203"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_create"
task="comm:MPI_Comm_create"
template="function.error"
value="204"
/>
<!--MPI_Comm_dup-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_dup.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_dup"
task="comm:MPI_Comm_dup"
template="MPI_Comm_dup.enter"
value="205"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_dup.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_dup"
task="comm:MPI_Comm_dup"
template="MPI_Comm_dup.leave"
value="206"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_dup"
task="comm:MPI_Comm_dup"
template="function.error"
value="207"
/>
<!--MPI_Comm_free-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_free.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_free"
task="comm:MPI_Comm_free"
template="MPI_Comm_free.enter"
value="208"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_free"
task="comm:MPI_Comm_free"
value="209"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_free"
task="comm:MPI_Comm_free"
template="function.error"
value="210"
/>
<!--MPI_Comm_get_name-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_get_name.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_get_name"
task="comm:MPI_Comm_get_name"
template="MPI_Comm_get_name.enter"
value="211"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_get_name.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_get_name"
task="comm:MPI_Comm_get_name"
template="MPI_Comm_get_name.leave"
value="212"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_get_name"
task="comm:MPI_Comm_get_name"
template="function.error"
value="213"
/>
<!--MPI_Comm_group-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_group.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_group"
task="comm:MPI_Comm_group"
template="MPI_Comm_group.enter"
value="214"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_group.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_group"
task="comm:MPI_Comm_group"
template="MPI_Comm_group.leave"
value="215"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_group"
task="comm:MPI_Comm_group"
template="function.error"
value="216"
/>
<!--MPI_Comm_rank-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_rank.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_rank"
task="comm:MPI_Comm_rank"
template="MPI_Comm_rank.enter"
value="217"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_rank.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_rank"
task="comm:MPI_Comm_rank"
template="MPI_Comm_rank.leave"
value="218"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_rank"
task="comm:MPI_Comm_rank"
template="function.error"
value="219"
/>
<!--MPI_Comm_remote_group-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_remote_group.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_remote_group"
task="comm:MPI_Comm_remote_group"
template="MPI_Comm_remote_group.enter"
value="220"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_remote_group.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_remote_group"
task="comm:MPI_Comm_remote_group"
template="MPI_Comm_remote_group.leave"
value="221"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_remote_group"
task="comm:MPI_Comm_remote_group"
template="function.error"
value="222"
/>
<!--MPI_Comm_remote_size-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_remote_size.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_remote_size"
task="comm:MPI_Comm_remote_size"
template="MPI_Comm_remote_size.enter"
value="223"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_remote_size.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_remote_size"
task="comm:MPI_Comm_remote_size"
template="MPI_Comm_remote_size.leave"
value="224"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_remote_size"
task="comm:MPI_Comm_remote_size"
template="function.error"
value="225"
/>
<!--MPI_Comm_set_name-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_set_name.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_set_name"
task="comm:MPI_Comm_set_name"
template="MPI_Comm_set_name.enter"
value="226"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_set_name"
task="comm:MPI_Comm_set_name"
value="227"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_set_name"
task="comm:MPI_Comm_set_name"
template="function.error"
value="228"
/>
<!--MPI_Comm_size-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_size.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_size"
task="comm:MPI_Comm_size"
template="MPI_Comm_size.enter"
value="229"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_size.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_size"
task="comm:MPI_Comm_size"
template="MPI_Comm_size.leave"
value="230"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_size"
task="comm:MPI_Comm_size"
template="function.error"
value="231"
/>
<!--MPI_Comm_split-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_split.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_split"
task="comm:MPI_Comm_split"
template="MPI_Comm_split.enter"
value="232"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_split.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_split"
task="comm:MPI_Comm_split"
template="MPI_Comm_split.leave"
value="233"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_split"
task="comm:MPI_Comm_split"
template="function.error"
value="234"
/>
<!--MPI_Comm_test_inter-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_test_inter.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_test_inter"
task="comm:MPI_Comm_test_inter"
template="MPI_Comm_test_inter.enter"
value="235"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_test_inter.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_test_inter"
task="comm:MPI_Comm_test_inter"
template="MPI_Comm_test_inter.leave"
value="236"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_test_inter"
task="comm:MPI_Comm_test_inter"
template="function.error"
value="237"
/>
<!--MPI_Intercomm_create-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Intercomm_create.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Intercomm_create"
task="comm:MPI_Intercomm_create"
template="MPI_Intercomm_create.enter"
value="238"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Intercomm_create.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Intercomm_create"
task="comm:MPI_Intercomm_create"
template="MPI_Intercomm_create.leave"
value="239"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Intercomm_create"
task="comm:MPI_Intercomm_create"
template="function.error"
value="240"
/>
<!--MPI_Intercomm_merge-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Intercomm_merge.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Intercomm_merge"
task="comm:MPI_Intercomm_merge"
template="MPI_Intercomm_merge.enter"
value="241"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Intercomm_merge.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Intercomm_merge"
task="comm:MPI_Intercomm_merge"
template="MPI_Intercomm_merge.leave"
value="242"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Intercomm_merge"
task="comm:MPI_Intercomm_merge"
template="function.error"
value="243"
/>
<!--MPI_Get_address-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Get_address.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Get_address"
task="dt:MPI_Get_address"
template="MPI_Get_address.enter"
value="244"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Get_address.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Get_address"
task="dt:MPI_Get_address"
template="MPI_Get_address.leave"
value="245"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Get_address"
task="dt:MPI_Get_address"
template="function.error"
value="246"
/>
<!--MPI_Get_count-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Get_count.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Get_count"
task="dt:MPI_Get_count"
template="MPI_Get_count.enter"
value="247"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Get_count.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Get_count"
task="dt:MPI_Get_count"
template="MPI_Get_count.leave"
value="248"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Get_count"
task="dt:MPI_Get_count"
template="function.error"
value="249"
/>
<!--MPI_Get_elements-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Get_elements.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Get_elements"
task="dt:MPI_Get_elements"
template="MPI_Get_elements.enter"
value="250"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Get_elements.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Get_elements"
task="dt:MPI_Get_elements"
template="MPI_Get_elements.leave"
value="251"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Get_elements"
task="dt:MPI_Get_elements"
template="function.error"
value="252"
/>
<!--MPI_Pack-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Pack.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Pack"
task="dt:MPI_Pack"
template="MPI_Pack.enter"
value="253"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Pack.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Pack"
task="dt:MPI_Pack"
template="MPI_Pack.leave"
value="254"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Pack"
task="dt:MPI_Pack"
template="function.error"
value="255"
/>
<!--MPI_Pack_external-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Pack_external.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Pack_external"
task="dt:MPI_Pack_external"
template="MPI_Pack_external.enter"
value="256"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Pack_external.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Pack_external"
task="dt:MPI_Pack_external"
template="MPI_Pack_external.leave"
value="257"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Pack_external"
task="dt:MPI_Pack_external"
template="function.error"
value="258"
/>
<!--MPI_Pack_external_size-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Pack_external_size.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Pack_external_size"
task="dt:MPI_Pack_external_size"
template="MPI_Pack_external_size.enter"
value="259"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Pack_external_size.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Pack_external_size"
task="dt:MPI_Pack_external_size"
template="MPI_Pack_external_size.leave"
value="260"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Pack_external_size"
task="dt:MPI_Pack_external_size"
template="function.error"
value="261"
/>
<!--MPI_Pack_size-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Pack_size.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Pack_size"
task="dt:MPI_Pack_size"
template="MPI_Pack_size.enter"
value="262"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Pack_size.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Pack_size"
task="dt:MPI_Pack_size"
template="MPI_Pack_size.leave"
value="263"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Pack_size"
task="dt:MPI_Pack_size"
template="function.error"
value="264"
/>
<!--MPI_Register_datarep-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Register_datarep.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Register_datarep"
task="dt:MPI_Register_datarep"
template="MPI_Register_datarep.enter"
value="265"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Register_datarep"
task="dt:MPI_Register_datarep"
value="266"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Register_datarep"
task="dt:MPI_Register_datarep"
template="function.error"
value="267"
/>
<!--MPI_Status_set_elements-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Status_set_elements.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Status_set_elements"
task="dt:MPI_Status_set_elements"
template="MPI_Status_set_elements.enter"
value="268"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Status_set_elements.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Status_set_elements"
task="dt:MPI_Status_set_elements"
template="MPI_Status_set_elements.leave"
value="269"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Status_set_elements"
task="dt:MPI_Status_set_elements"
template="function.error"
value="270"
/>
<!--MPI_Type_commit-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_commit.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_commit"
task="dt:MPI_Type_commit"
template="MPI_Type_commit.enter"
value="271"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_commit"
task="dt:MPI_Type_commit"
value="272"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_commit"
task="dt:MPI_Type_commit"
template="function.error"
value="273"
/>
<!--MPI_Type_contiguous-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_contiguous.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_contiguous"
task="dt:MPI_Type_contiguous"
template="MPI_Type_contiguous.enter"
value="274"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_contiguous.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_contiguous"
task="dt:MPI_Type_contiguous"
template="MPI_Type_contiguous.leave"
value="275"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_contiguous"
task="dt:MPI_Type_contiguous"
template="function.error"
value="276"
/>
<!--MPI_Type_create_darray-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_create_darray.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_create_darray"
task="dt:MPI_Type_create_darray"
template="MPI_Type_create_darray.enter"
value="277"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_create_darray.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_create_darray"
task="dt:MPI_Type_create_darray"
template="MPI_Type_create_darray.leave"
value="278"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_create_darray"
task="dt:MPI_Type_create_darray"
template="function.error"
value="279"
/>
<!--MPI_Type_create_hindexed-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_create_hindexed.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_create_hindexed"
task="dt:MPI_Type_create_hindexed"
template="MPI_Type_create_hindexed.enter"
value="280"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_create_hindexed.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_create_hindexed"
task="dt:MPI_Type_create_hindexed"
template="MPI_Type_create_hindexed.leave"
value="281"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_create_hindexed"
task="dt:MPI_Type_create_hindexed"
template="function.error"
value="282"
/>
<!--MPI_Type_create_hvector-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_create_hvector.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_create_hvector"
task="dt:MPI_Type_create_hvector"
template="MPI_Type_create_hvector.enter"
value="283"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_create_hvector.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_create_hvector"
task="dt:MPI_Type_create_hvector"
template="MPI_Type_create_hvector.leave"
value="284"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_create_hvector"
task="dt:MPI_Type_create_hvector"
template="function.error"
value="285"
/>
<!--MPI_Type_create_indexed_block-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_create_indexed_block.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_create_indexed_block"
task="dt:MPI_Type_create_indexed_block"
template="MPI_Type_create_indexed_block.enter"
value="286"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_create_indexed_block.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_create_indexed_block"
task="dt:MPI_Type_create_indexed_block"
template="MPI_Type_create_indexed_block.leave"
value="287"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_create_indexed_block"
task="dt:MPI_Type_create_indexed_block"
template="function.error"
value="288"
/>
<!--MPI_Type_create_resized-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_create_resized.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_create_resized"
task="dt:MPI_Type_create_resized"
template="MPI_Type_create_resized.enter"
value="289"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_create_resized.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_create_resized"
task="dt:MPI_Type_create_resized"
template="MPI_Type_create_resized.leave"
value="290"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_create_resized"
task="dt:MPI_Type_create_resized"
template="function.error"
value="291"
/>
<!--MPI_Type_create_struct-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_create_struct.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_create_struct"
task="dt:MPI_Type_create_struct"
template="MPI_Type_create_struct.enter"
value="292"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_create_struct.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_create_struct"
task="dt:MPI_Type_create_struct"
template="MPI_Type_create_struct.leave"
value="293"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_create_struct"
task="dt:MPI_Type_create_struct"
template="function.error"
value="294"
/>
<!--MPI_Type_create_subarray-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_create_subarray.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_create_subarray"
task="dt:MPI_Type_create_subarray"
template="MPI_Type_create_subarray.enter"
value="295"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_create_subarray.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_create_subarray"
task="dt:MPI_Type_create_subarray"
template="MPI_Type_create_subarray.leave"
value="296"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_create_subarray"
task="dt:MPI_Type_create_subarray"
template="function.error"
value="297"
/>
<!--MPI_Type_dup-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_dup.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_dup"
task="dt:MPI_Type_dup"
template="MPI_Type_dup.enter"
value="298"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_dup.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_dup"
task="dt:MPI_Type_dup"
template="MPI_Type_dup.leave"
value="299"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_dup"
task="dt:MPI_Type_dup"
template="function.error"
value="300"
/>
<!--MPI_Type_free-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_free.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_free"
task="dt:MPI_Type_free"
template="MPI_Type_free.enter"
value="301"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_free"
task="dt:MPI_Type_free"
value="302"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_free"
task="dt:MPI_Type_free"
template="function.error"
value="303"
/>
<!--MPI_Type_get_contents-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_get_contents.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_get_contents"
task="dt:MPI_Type_get_contents"
template="MPI_Type_get_contents.enter"
value="304"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_get_contents.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_get_contents"
task="dt:MPI_Type_get_contents"
template="MPI_Type_get_contents.leave"
value="305"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_get_contents"
task="dt:MPI_Type_get_contents"
template="function.error"
value="306"
/>
<!--MPI_Type_get_envelope-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_get_envelope.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_get_envelope"
task="dt:MPI_Type_get_envelope"
template="MPI_Type_get_envelope.enter"
value="307"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_get_envelope.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_get_envelope"
task="dt:MPI_Type_get_envelope"
template="MPI_Type_get_envelope.leave"
value="308"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_get_envelope"
task="dt:MPI_Type_get_envelope"
template="function.error"
value="309"
/>
<!--MPI_Type_get_extent-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_get_extent.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_get_extent"
task="dt:MPI_Type_get_extent"
template="MPI_Type_get_extent.enter"
value="310"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_get_extent.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_get_extent"
task="dt:MPI_Type_get_extent"
template="MPI_Type_get_extent.leave"
value="311"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_get_extent"
task="dt:MPI_Type_get_extent"
template="function.error"
value="312"
/>
<!--MPI_Type_get_name-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_get_name.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_get_name"
task="dt:MPI_Type_get_name"
template="MPI_Type_get_name.enter"
value="313"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_get_name.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_get_name"
task="dt:MPI_Type_get_name"
template="MPI_Type_get_name.leave"
value="314"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_get_name"
task="dt:MPI_Type_get_name"
template="function.error"
value="315"
/>
<!--MPI_Type_get_true_extent-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_get_true_extent.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_get_true_extent"
task="dt:MPI_Type_get_true_extent"
template="MPI_Type_get_true_extent.enter"
value="316"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_get_true_extent.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_get_true_extent"
task="dt:MPI_Type_get_true_extent"
template="MPI_Type_get_true_extent.leave"
value="317"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_get_true_extent"
task="dt:MPI_Type_get_true_extent"
template="function.error"
value="318"
/>
<!--MPI_Type_indexed-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_indexed.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_indexed"
task="dt:MPI_Type_indexed"
template="MPI_Type_indexed.enter"
value="319"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_indexed.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_indexed"
task="dt:MPI_Type_indexed"
template="MPI_Type_indexed.leave"
value="320"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_indexed"
task="dt:MPI_Type_indexed"
template="function.error"
value="321"
/>
<!--MPI_Type_match_size-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_match_size.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_match_size"
task="dt:MPI_Type_match_size"
template="MPI_Type_match_size.enter"
value="322"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_match_size.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_match_size"
task="dt:MPI_Type_match_size"
template="MPI_Type_match_size.leave"
value="323"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_match_size"
task="dt:MPI_Type_match_size"
template="function.error"
value="324"
/>
<!--MPI_Type_set_name-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_set_name.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_set_name"
task="dt:MPI_Type_set_name"
template="MPI_Type_set_name.enter"
value="325"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_set_name"
task="dt:MPI_Type_set_name"
value="326"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_set_name"
task="dt:MPI_Type_set_name"
template="function.error"
value="327"
/>
<!--MPI_Type_size-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_size.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_size"
task="dt:MPI_Type_size"
template="MPI_Type_size.enter"
value="328"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_size.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_size"
task="dt:MPI_Type_size"
template="MPI_Type_size.leave"
value="329"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_size"
task="dt:MPI_Type_size"
template="function.error"
value="330"
/>
<!--MPI_Type_vector-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_vector.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_vector"
task="dt:MPI_Type_vector"
template="MPI_Type_vector.enter"
value="331"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_vector.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_vector"
task="dt:MPI_Type_vector"
template="MPI_Type_vector.leave"
value="332"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_vector"
task="dt:MPI_Type_vector"
template="function.error"
value="333"
/>
<!--MPI_Unpack-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Unpack.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Unpack"
task="dt:MPI_Unpack"
template="MPI_Unpack.enter"
value="334"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Unpack.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Unpack"
task="dt:MPI_Unpack"
template="MPI_Unpack.leave"
value="335"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Unpack"
task="dt:MPI_Unpack"
template="function.error"
value="336"
/>
<!--MPI_Unpack_external-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Unpack_external.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Unpack_external"
task="dt:MPI_Unpack_external"
template="MPI_Unpack_external.enter"
value="337"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Unpack_external.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Unpack_external"
task="dt:MPI_Unpack_external"
template="MPI_Unpack_external.leave"
value="338"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Unpack_external"
task="dt:MPI_Unpack_external"
template="function.error"
value="339"
/>
<!--MPI_Add_error_class-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Add_error_class.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Add_error_class"
task="eh:MPI_Add_error_class"
value="340"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Add_error_class.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Add_error_class"
task="eh:MPI_Add_error_class"
template="MPI_Add_error_class.leave"
value="341"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Add_error_class"
task="eh:MPI_Add_error_class"
template="function.error"
value="342"
/>
<!--MPI_Add_error_code-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Add_error_code.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Add_error_code"
task="eh:MPI_Add_error_code"
template="MPI_Add_error_code.enter"
value="343"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Add_error_code.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Add_error_code"
task="eh:MPI_Add_error_code"
template="MPI_Add_error_code.leave"
value="344"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Add_error_code"
task="eh:MPI_Add_error_code"
template="function.error"
value="345"
/>
<!--MPI_Add_error_string-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Add_error_string.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Add_error_string"
task="eh:MPI_Add_error_string"
template="MPI_Add_error_string.enter"
value="346"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Add_error_string"
task="eh:MPI_Add_error_string"
value="347"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Add_error_string"
task="eh:MPI_Add_error_string"
template="function.error"
value="348"
/>
<!--MPI_Comm_call_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_call_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_call_errhandler"
task="eh:MPI_Comm_call_errhandler"
template="MPI_Comm_call_errhandler.enter"
value="349"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_call_errhandler.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_call_errhandler"
task="eh:MPI_Comm_call_errhandler"
template="MPI_Comm_call_errhandler.leave"
value="350"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_call_errhandler"
task="eh:MPI_Comm_call_errhandler"
template="function.error"
value="351"
/>
<!--MPI_Comm_create_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_create_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_create_errhandler"
task="eh:MPI_Comm_create_errhandler"
template="MPI_Comm_create_errhandler.enter"
value="352"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_create_errhandler.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_create_errhandler"
task="eh:MPI_Comm_create_errhandler"
template="MPI_Comm_create_errhandler.leave"
value="353"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_create_errhandler"
task="eh:MPI_Comm_create_errhandler"
template="function.error"
value="354"
/>
<!--MPI_Comm_get_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_get_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_get_errhandler"
task="eh:MPI_Comm_get_errhandler"
template="MPI_Comm_get_errhandler.enter"
value="355"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_get_errhandler.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_get_errhandler"
task="eh:MPI_Comm_get_errhandler"
template="MPI_Comm_get_errhandler.leave"
value="356"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_get_errhandler"
task="eh:MPI_Comm_get_errhandler"
template="function.error"
value="357"
/>
<!--MPI_Comm_set_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_set_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_set_errhandler"
task="eh:MPI_Comm_set_errhandler"
template="MPI_Comm_set_errhandler.enter"
value="358"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_set_errhandler"
task="eh:MPI_Comm_set_errhandler"
value="359"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_set_errhandler"
task="eh:MPI_Comm_set_errhandler"
template="function.error"
value="360"
/>
<!--MPI_Errhandler_free-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Errhandler_free.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Errhandler_free"
task="eh:MPI_Errhandler_free"
template="MPI_Errhandler_free.enter"
value="361"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Errhandler_free"
task="eh:MPI_Errhandler_free"
value="362"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Errhandler_free"
task="eh:MPI_Errhandler_free"
template="function.error"
value="363"
/>
<!--MPI_Error_class-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Error_class.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Error_class"
task="eh:MPI_Error_class"
template="MPI_Error_class.enter"
value="364"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Error_class.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Error_class"
task="eh:MPI_Error_class"
template="MPI_Error_class.leave"
value="365"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Error_class"
task="eh:MPI_Error_class"
template="function.error"
value="366"
/>
<!--MPI_Error_string-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Error_string.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Error_string"
task="eh:MPI_Error_string"
template="MPI_Error_string.enter"
value="367"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Error_string.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Error_string"
task="eh:MPI_Error_string"
template="MPI_Error_string.leave"
value="368"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Error_string"
task="eh:MPI_Error_string"
template="function.error"
value="369"
/>
<!--MPI_File_call_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_call_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_call_errhandler"
task="eh:MPI_File_call_errhandler"
template="MPI_File_call_errhandler.enter"
value="370"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_call_errhandler.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_call_errhandler"
task="eh:MPI_File_call_errhandler"
template="MPI_File_call_errhandler.leave"
value="371"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_call_errhandler"
task="eh:MPI_File_call_errhandler"
template="function.error"
value="372"
/>
<!--MPI_File_create_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_create_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_create_errhandler"
task="eh:MPI_File_create_errhandler"
template="MPI_File_create_errhandler.enter"
value="373"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_create_errhandler.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_create_errhandler"
task="eh:MPI_File_create_errhandler"
template="MPI_File_create_errhandler.leave"
value="374"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_create_errhandler"
task="eh:MPI_File_create_errhandler"
template="function.error"
value="375"
/>
<!--MPI_File_get_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_get_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_get_errhandler"
task="eh:MPI_File_get_errhandler"
template="MPI_File_get_errhandler.enter"
value="376"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_get_errhandler.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_get_errhandler"
task="eh:MPI_File_get_errhandler"
template="MPI_File_get_errhandler.leave"
value="377"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_get_errhandler"
task="eh:MPI_File_get_errhandler"
template="function.error"
value="378"
/>
<!--MPI_File_set_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_set_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_set_errhandler"
task="eh:MPI_File_set_errhandler"
template="MPI_File_set_errhandler.enter"
value="379"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_set_errhandler"
task="eh:MPI_File_set_errhandler"
value="380"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_set_errhandler"
task="eh:MPI_File_set_errhandler"
template="function.error"
value="381"
/>
<!--MPI_Win_call_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_call_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_call_errhandler"
task="eh:MPI_Win_call_errhandler"
template="MPI_Win_call_errhandler.enter"
value="382"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Win_call_errhandler.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_call_errhandler"
task="eh:MPI_Win_call_errhandler"
template="MPI_Win_call_errhandler.leave"
value="383"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_call_errhandler"
task="eh:MPI_Win_call_errhandler"
template="function.error"
value="384"
/>
<!--MPI_Win_create_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_create_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_create_errhandler"
task="eh:MPI_Win_create_errhandler"
template="MPI_Win_create_errhandler.enter"
value="385"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Win_create_errhandler.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_create_errhandler"
task="eh:MPI_Win_create_errhandler"
template="MPI_Win_create_errhandler.leave"
value="386"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_create_errhandler"
task="eh:MPI_Win_create_errhandler"
template="function.error"
value="387"
/>
<!--MPI_Win_get_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_get_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_get_errhandler"
task="eh:MPI_Win_get_errhandler"
template="MPI_Win_get_errhandler.enter"
value="388"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Win_get_errhandler.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_get_errhandler"
task="eh:MPI_Win_get_errhandler"
template="MPI_Win_get_errhandler.leave"
value="389"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_get_errhandler"
task="eh:MPI_Win_get_errhandler"
template="function.error"
value="390"
/>
<!--MPI_Win_set_errhandler-->
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_set_errhandler.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_set_errhandler"
task="eh:MPI_Win_set_errhandler"
template="MPI_Win_set_errhandler.enter"
value="391"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_set_errhandler"
task="eh:MPI_Win_set_errhandler"
value="392"
/>
<event
channel="MpiApiChannel"
keywords="mpi:eh mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_set_errhandler"
task="eh:MPI_Win_set_errhandler"
template="function.error"
value="393"
/>
<!--MPI_Group_compare-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_compare.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_compare"
task="grp:MPI_Group_compare"
template="MPI_Group_compare.enter"
value="394"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Group_compare.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_compare"
task="grp:MPI_Group_compare"
template="MPI_Group_compare.leave"
value="395"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_compare"
task="grp:MPI_Group_compare"
template="function.error"
value="396"
/>
<!--MPI_Group_difference-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_difference.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_difference"
task="grp:MPI_Group_difference"
template="MPI_Group_difference.enter"
value="397"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Group_difference.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_difference"
task="grp:MPI_Group_difference"
template="MPI_Group_difference.leave"
value="398"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_difference"
task="grp:MPI_Group_difference"
template="function.error"
value="399"
/>
<!--MPI_Group_excl-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_excl.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_excl"
task="grp:MPI_Group_excl"
template="MPI_Group_excl.enter"
value="400"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Group_excl.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_excl"
task="grp:MPI_Group_excl"
template="MPI_Group_excl.leave"
value="401"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_excl"
task="grp:MPI_Group_excl"
template="function.error"
value="402"
/>
<!--MPI_Group_free-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_free.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_free"
task="grp:MPI_Group_free"
template="MPI_Group_free.enter"
value="403"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_free"
task="grp:MPI_Group_free"
value="404"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_free"
task="grp:MPI_Group_free"
template="function.error"
value="405"
/>
<!--MPI_Group_incl-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_incl.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_incl"
task="grp:MPI_Group_incl"
template="MPI_Group_incl.enter"
value="406"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Group_incl.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_incl"
task="grp:MPI_Group_incl"
template="MPI_Group_incl.leave"
value="407"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_incl"
task="grp:MPI_Group_incl"
template="function.error"
value="408"
/>
<!--MPI_Group_intersection-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_intersection.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_intersection"
task="grp:MPI_Group_intersection"
template="MPI_Group_intersection.enter"
value="409"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Group_intersection.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_intersection"
task="grp:MPI_Group_intersection"
template="MPI_Group_intersection.leave"
value="410"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_intersection"
task="grp:MPI_Group_intersection"
template="function.error"
value="411"
/>
<!--MPI_Group_range_excl-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_range_excl.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_range_excl"
task="grp:MPI_Group_range_excl"
template="MPI_Group_range_excl.enter"
value="412"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Group_range_excl.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_range_excl"
task="grp:MPI_Group_range_excl"
template="MPI_Group_range_excl.leave"
value="413"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_range_excl"
task="grp:MPI_Group_range_excl"
template="function.error"
value="414"
/>
<!--MPI_Group_range_incl-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_range_incl.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_range_incl"
task="grp:MPI_Group_range_incl"
template="MPI_Group_range_incl.enter"
value="415"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Group_range_incl.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_range_incl"
task="grp:MPI_Group_range_incl"
template="MPI_Group_range_incl.leave"
value="416"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_range_incl"
task="grp:MPI_Group_range_incl"
template="function.error"
value="417"
/>
<!--MPI_Group_rank-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_rank.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_rank"
task="grp:MPI_Group_rank"
template="MPI_Group_rank.enter"
value="418"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Group_rank.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_rank"
task="grp:MPI_Group_rank"
template="MPI_Group_rank.leave"
value="419"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_rank"
task="grp:MPI_Group_rank"
template="function.error"
value="420"
/>
<!--MPI_Group_size-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_size.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_size"
task="grp:MPI_Group_size"
template="MPI_Group_size.enter"
value="421"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Group_size.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_size"
task="grp:MPI_Group_size"
template="MPI_Group_size.leave"
value="422"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_size"
task="grp:MPI_Group_size"
template="function.error"
value="423"
/>
<!--MPI_Group_translate_ranks-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_translate_ranks.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_translate_ranks"
task="grp:MPI_Group_translate_ranks"
template="MPI_Group_translate_ranks.enter"
value="424"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Group_translate_ranks.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_translate_ranks"
task="grp:MPI_Group_translate_ranks"
template="MPI_Group_translate_ranks.leave"
value="425"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_translate_ranks"
task="grp:MPI_Group_translate_ranks"
template="function.error"
value="426"
/>
<!--MPI_Group_union-->
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Group_union.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Group_union"
task="grp:MPI_Group_union"
template="MPI_Group_union.enter"
value="427"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Group_union.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Group_union"
task="grp:MPI_Group_union"
template="MPI_Group_union.leave"
value="428"
/>
<event
channel="MpiApiChannel"
keywords="mpi:grp mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Group_union"
task="grp:MPI_Group_union"
template="function.error"
value="429"
/>
<!--MPI_Info_create-->
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Info_create.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Info_create"
task="info:MPI_Info_create"
value="430"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Info_create.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Info_create"
task="info:MPI_Info_create"
template="MPI_Info_create.leave"
value="431"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Info_create"
task="info:MPI_Info_create"
template="function.error"
value="432"
/>
<!--MPI_Info_delete-->
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Info_delete.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Info_delete"
task="info:MPI_Info_delete"
template="MPI_Info_delete.enter"
value="433"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Info_delete"
task="info:MPI_Info_delete"
value="434"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Info_delete"
task="info:MPI_Info_delete"
template="function.error"
value="435"
/>
<!--MPI_Info_dup-->
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Info_dup.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Info_dup"
task="info:MPI_Info_dup"
template="MPI_Info_dup.enter"
value="436"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Info_dup.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Info_dup"
task="info:MPI_Info_dup"
template="MPI_Info_dup.leave"
value="437"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Info_dup"
task="info:MPI_Info_dup"
template="function.error"
value="438"
/>
<!--MPI_Info_free-->
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Info_free.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Info_free"
task="info:MPI_Info_free"
template="MPI_Info_free.enter"
value="439"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Info_free"
task="info:MPI_Info_free"
value="440"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Info_free"
task="info:MPI_Info_free"
template="function.error"
value="441"
/>
<!--MPI_Info_get-->
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Info_get.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Info_get"
task="info:MPI_Info_get"
template="MPI_Info_get.enter"
value="442"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Info_get.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Info_get"
task="info:MPI_Info_get"
template="MPI_Info_get.leave"
value="443"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Info_get"
task="info:MPI_Info_get"
template="function.error"
value="444"
/>
<!--MPI_Info_get_nkeys-->
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Info_get_nkeys.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Info_get_nkeys"
task="info:MPI_Info_get_nkeys"
template="MPI_Info_get_nkeys.enter"
value="445"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Info_get_nkeys.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Info_get_nkeys"
task="info:MPI_Info_get_nkeys"
template="MPI_Info_get_nkeys.leave"
value="446"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Info_get_nkeys"
task="info:MPI_Info_get_nkeys"
template="function.error"
value="447"
/>
<!--MPI_Info_get_nthkey-->
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Info_get_nthkey.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Info_get_nthkey"
task="info:MPI_Info_get_nthkey"
template="MPI_Info_get_nthkey.enter"
value="448"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Info_get_nthkey.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Info_get_nthkey"
task="info:MPI_Info_get_nthkey"
template="MPI_Info_get_nthkey.leave"
value="449"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Info_get_nthkey"
task="info:MPI_Info_get_nthkey"
template="function.error"
value="450"
/>
<!--MPI_Info_get_valuelen-->
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Info_get_valuelen.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Info_get_valuelen"
task="info:MPI_Info_get_valuelen"
template="MPI_Info_get_valuelen.enter"
value="451"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Info_get_valuelen.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Info_get_valuelen"
task="info:MPI_Info_get_valuelen"
template="MPI_Info_get_valuelen.leave"
value="452"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Info_get_valuelen"
task="info:MPI_Info_get_valuelen"
template="function.error"
value="453"
/>
<!--MPI_Info_set-->
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Info_set.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Info_set"
task="info:MPI_Info_set"
template="MPI_Info_set.enter"
value="454"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Info_set"
task="info:MPI_Info_set"
value="455"
/>
<event
channel="MpiApiChannel"
keywords="mpi:info mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Info_set"
task="info:MPI_Info_set"
template="function.error"
value="456"
/>
<!--MPI_Abort-->
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Abort.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Abort"
task="init:MPI_Abort"
template="MPI_Abort.enter"
value="457"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Abort"
task="init:MPI_Abort"
value="458"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Abort"
task="init:MPI_Abort"
template="function.error"
value="459"
/>
<!--MPI_Finalize-->
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Finalize.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Finalize"
task="init:MPI_Finalize"
value="460"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Finalize"
task="init:MPI_Finalize"
value="461"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Finalize"
task="init:MPI_Finalize"
template="function.error"
value="462"
/>
<!--MPI_Init-->
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Init.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Init"
task="init:MPI_Init"
value="463"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Init"
task="init:MPI_Init"
value="464"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Init"
task="init:MPI_Init"
template="function.error"
value="465"
/>
<!--MPI_Init_thread-->
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Init_thread.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Init_thread"
task="init:MPI_Init_thread"
template="MPI_Init_thread.enter"
value="466"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Init_thread.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Init_thread"
task="init:MPI_Init_thread"
template="MPI_Init_thread.leave"
value="467"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Init_thread"
task="init:MPI_Init_thread"
template="function.error"
value="468"
/>
<!--MPI_Is_thread_main-->
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Is_thread_main.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Is_thread_main"
task="init:MPI_Is_thread_main"
value="469"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Is_thread_main.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Is_thread_main"
task="init:MPI_Is_thread_main"
template="MPI_Is_thread_main.leave"
value="470"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Is_thread_main"
task="init:MPI_Is_thread_main"
template="function.error"
value="471"
/>
<!--MPI_Query_thread-->
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Query_thread.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Query_thread"
task="init:MPI_Query_thread"
value="472"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Query_thread.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Query_thread"
task="init:MPI_Query_thread"
template="MPI_Query_thread.leave"
value="473"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Query_thread"
task="init:MPI_Query_thread"
template="function.error"
value="474"
/>
<!--MPI_Get_processor_name-->
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Get_processor_name.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Get_processor_name"
task="init:MPI_Get_processor_name"
value="475"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Get_processor_name.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Get_processor_name"
task="init:MPI_Get_processor_name"
template="MPI_Get_processor_name.leave"
value="476"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Get_processor_name"
task="init:MPI_Get_processor_name"
template="function.error"
value="477"
/>
<!--MPI_Get_version-->
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Get_version.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Get_version"
task="init:MPI_Get_version"
value="478"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Get_version.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Get_version"
task="init:MPI_Get_version"
template="MPI_Get_version.leave"
value="479"
/>
<event
channel="MpiApiChannel"
keywords="mpi:init mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Get_version"
task="init:MPI_Get_version"
template="function.error"
value="480"
/>
<!--MPI_Bsend-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Bsend.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Bsend"
task="p2p:MPI_Bsend"
template="MPI_Bsend.enter"
value="481"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Bsend"
task="p2p:MPI_Bsend"
value="482"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Bsend"
task="p2p:MPI_Bsend"
template="function.error"
value="483"
/>
<!--MPI_Bsend_init-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Bsend_init.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Bsend_init"
task="p2p:MPI_Bsend_init"
template="MPI_Bsend_init.enter"
value="484"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Bsend_init.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Bsend_init"
task="p2p:MPI_Bsend_init"
template="MPI_Bsend_init.leave"
value="485"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Bsend_init"
task="p2p:MPI_Bsend_init"
template="function.error"
value="486"
/>
<!--MPI_Buffer_attach-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Buffer_attach.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Buffer_attach"
task="p2p:MPI_Buffer_attach"
template="MPI_Buffer_attach.enter"
value="487"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Buffer_attach"
task="p2p:MPI_Buffer_attach"
value="488"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Buffer_attach"
task="p2p:MPI_Buffer_attach"
template="function.error"
value="489"
/>
<!--MPI_Buffer_detach-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Buffer_detach.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Buffer_detach"
task="p2p:MPI_Buffer_detach"
value="490"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Buffer_detach.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Buffer_detach"
task="p2p:MPI_Buffer_detach"
template="MPI_Buffer_detach.leave"
value="491"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Buffer_detach"
task="p2p:MPI_Buffer_detach"
template="function.error"
value="492"
/>
<!--MPI_Cancel-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Cancel.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Cancel"
task="p2p:MPI_Cancel"
template="MPI_Cancel.enter"
value="493"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Cancel"
task="p2p:MPI_Cancel"
value="494"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Cancel"
task="p2p:MPI_Cancel"
template="function.error"
value="495"
/>
<!--MPI_Grequest_complete-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Grequest_complete.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Grequest_complete"
task="p2p:MPI_Grequest_complete"
template="MPI_Grequest_complete.enter"
value="496"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Grequest_complete"
task="p2p:MPI_Grequest_complete"
value="497"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Grequest_complete"
task="p2p:MPI_Grequest_complete"
template="function.error"
value="498"
/>
<!--MPI_Grequest_start-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Grequest_start.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Grequest_start"
task="p2p:MPI_Grequest_start"
template="MPI_Grequest_start.enter"
value="499"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Grequest_start.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Grequest_start"
task="p2p:MPI_Grequest_start"
template="MPI_Grequest_start.leave"
value="500"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Grequest_start"
task="p2p:MPI_Grequest_start"
template="function.error"
value="501"
/>
<!--MPI_Ibsend-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Ibsend.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Ibsend"
task="p2p:MPI_Ibsend"
template="MPI_Ibsend.enter"
value="502"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Ibsend.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Ibsend"
task="p2p:MPI_Ibsend"
template="MPI_Ibsend.leave"
value="503"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Ibsend"
task="p2p:MPI_Ibsend"
template="function.error"
value="504"
/>
<!--MPI_Iprobe-->
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_enter"
level="win:Verbose"
message="$(string.MPI_Iprobe.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Iprobe"
task="p2p:MPI_Iprobe"
template="MPI_Iprobe.enter"
value="505"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave"
level="win:Verbose"
message="$(string.MPI_Iprobe.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Iprobe"
task="p2p:MPI_Iprobe"
template="MPI_Iprobe.leave"
value="506"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Iprobe"
task="p2p:MPI_Iprobe"
template="function.error"
value="507"
/>
<!--MPI_Irecv-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Irecv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Irecv"
task="p2p:MPI_Irecv"
template="MPI_Irecv.enter"
value="508"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Irecv.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Irecv"
task="p2p:MPI_Irecv"
template="MPI_Irecv.leave"
value="509"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Irecv"
task="p2p:MPI_Irecv"
template="function.error"
value="510"
/>
<!--MPI_Irsend-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Irsend.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Irsend"
task="p2p:MPI_Irsend"
template="MPI_Irsend.enter"
value="511"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Irsend.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Irsend"
task="p2p:MPI_Irsend"
template="MPI_Irsend.leave"
value="512"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Irsend"
task="p2p:MPI_Irsend"
template="function.error"
value="513"
/>
<!--MPI_Isend-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Isend.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Isend"
task="p2p:MPI_Isend"
template="MPI_Isend.enter"
value="514"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Isend.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Isend"
task="p2p:MPI_Isend"
template="MPI_Isend.leave"
value="515"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Isend"
task="p2p:MPI_Isend"
template="function.error"
value="516"
/>
<!--MPI_Issend-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Issend.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Issend"
task="p2p:MPI_Issend"
template="MPI_Issend.enter"
value="517"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Issend.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Issend"
task="p2p:MPI_Issend"
template="MPI_Issend.leave"
value="518"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Issend"
task="p2p:MPI_Issend"
template="function.error"
value="519"
/>
<!--MPI_Probe-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Probe.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Probe"
task="p2p:MPI_Probe"
template="MPI_Probe.enter"
value="520"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Probe.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Probe"
task="p2p:MPI_Probe"
template="MPI_Probe.leave"
value="521"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Probe"
task="p2p:MPI_Probe"
template="function.error"
value="522"
/>
<!--MPI_Recv-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Recv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Recv"
task="p2p:MPI_Recv"
template="MPI_Recv.enter"
value="523"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Recv.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Recv"
task="p2p:MPI_Recv"
template="MPI_Recv.leave"
value="524"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Recv"
task="p2p:MPI_Recv"
template="function.error"
value="525"
/>
<!--MPI_Recv_init-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Recv_init.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Recv_init"
task="p2p:MPI_Recv_init"
template="MPI_Recv_init.enter"
value="526"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Recv_init.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Recv_init"
task="p2p:MPI_Recv_init"
template="MPI_Recv_init.leave"
value="527"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Recv_init"
task="p2p:MPI_Recv_init"
template="function.error"
value="528"
/>
<!--MPI_Request_free-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Request_free.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Request_free"
task="p2p:MPI_Request_free"
template="MPI_Request_free.enter"
value="529"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Request_free"
task="p2p:MPI_Request_free"
value="530"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Request_free"
task="p2p:MPI_Request_free"
template="function.error"
value="531"
/>
<!--MPI_Request_get_status-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Request_get_status.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Request_get_status"
task="p2p:MPI_Request_get_status"
template="MPI_Request_get_status.enter"
value="532"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Request_get_status.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Request_get_status"
task="p2p:MPI_Request_get_status"
template="MPI_Request_get_status.leave"
value="533"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Request_get_status"
task="p2p:MPI_Request_get_status"
template="function.error"
value="534"
/>
<!--MPI_Rsend-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Rsend.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Rsend"
task="p2p:MPI_Rsend"
template="MPI_Rsend.enter"
value="535"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Rsend"
task="p2p:MPI_Rsend"
value="536"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Rsend"
task="p2p:MPI_Rsend"
template="function.error"
value="537"
/>
<!--MPI_Rsend_init-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Rsend_init.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Rsend_init"
task="p2p:MPI_Rsend_init"
template="MPI_Rsend_init.enter"
value="538"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Rsend_init.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Rsend_init"
task="p2p:MPI_Rsend_init"
template="MPI_Rsend_init.leave"
value="539"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Rsend_init"
task="p2p:MPI_Rsend_init"
template="function.error"
value="540"
/>
<!--MPI_Send-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Send.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Send"
task="p2p:MPI_Send"
template="MPI_Send.enter"
value="541"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Send"
task="p2p:MPI_Send"
value="542"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Send"
task="p2p:MPI_Send"
template="function.error"
value="543"
/>
<!--MPI_Send_init-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Send_init.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Send_init"
task="p2p:MPI_Send_init"
template="MPI_Send_init.enter"
value="544"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Send_init.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Send_init"
task="p2p:MPI_Send_init"
template="MPI_Send_init.leave"
value="545"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Send_init"
task="p2p:MPI_Send_init"
template="function.error"
value="546"
/>
<!--MPI_Sendrecv-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Sendrecv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Sendrecv"
task="p2p:MPI_Sendrecv"
template="MPI_Sendrecv.enter"
value="547"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Sendrecv"
task="p2p:MPI_Sendrecv"
value="548"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Sendrecv"
task="p2p:MPI_Sendrecv"
template="function.error"
value="549"
/>
<!--MPI_Sendrecv_replace-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Sendrecv_replace.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Sendrecv_replace"
task="p2p:MPI_Sendrecv_replace"
template="MPI_Sendrecv_replace.enter"
value="550"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Sendrecv_replace.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Sendrecv_replace"
task="p2p:MPI_Sendrecv_replace"
template="MPI_Sendrecv_replace.leave"
value="551"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Sendrecv_replace"
task="p2p:MPI_Sendrecv_replace"
template="function.error"
value="552"
/>
<!--MPI_Ssend-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Ssend.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Ssend"
task="p2p:MPI_Ssend"
template="MPI_Ssend.enter"
value="553"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Ssend"
task="p2p:MPI_Ssend"
value="554"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Ssend"
task="p2p:MPI_Ssend"
template="function.error"
value="555"
/>
<!--MPI_Ssend_init-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Ssend_init.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Ssend_init"
task="p2p:MPI_Ssend_init"
template="MPI_Ssend_init.enter"
value="556"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Ssend_init.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Ssend_init"
task="p2p:MPI_Ssend_init"
template="MPI_Ssend_init.leave"
value="557"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Ssend_init"
task="p2p:MPI_Ssend_init"
template="function.error"
value="558"
/>
<!--MPI_Start-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Start.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Start"
task="p2p:MPI_Start"
template="MPI_Start.enter"
value="559"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Start"
task="p2p:MPI_Start"
value="560"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Start"
task="p2p:MPI_Start"
template="function.error"
value="561"
/>
<!--MPI_Startall-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Startall.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Startall"
task="p2p:MPI_Startall"
template="MPI_Startall.enter"
value="562"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Startall"
task="p2p:MPI_Startall"
value="563"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Startall"
task="p2p:MPI_Startall"
template="function.error"
value="564"
/>
<!--MPI_Status_set_cancelled-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Status_set_cancelled.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Status_set_cancelled"
task="p2p:MPI_Status_set_cancelled"
template="MPI_Status_set_cancelled.enter"
value="565"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Status_set_cancelled"
task="p2p:MPI_Status_set_cancelled"
value="566"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Status_set_cancelled"
task="p2p:MPI_Status_set_cancelled"
template="function.error"
value="567"
/>
<!--MPI_Wait-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Wait.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Wait"
task="p2p:MPI_Wait"
template="MPI_Wait.enter"
value="568"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Wait.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Wait"
task="p2p:MPI_Wait"
template="MPI_Wait.leave"
value="569"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Wait"
task="p2p:MPI_Wait"
template="function.error"
value="570"
/>
<!--MPI_Waitall-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Waitall.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Waitall"
task="p2p:MPI_Waitall"
template="MPI_Waitall.enter"
value="571"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Waitall.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Waitall"
task="p2p:MPI_Waitall"
template="MPI_Waitall.leave"
value="572"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Waitall"
task="p2p:MPI_Waitall"
template="function.error"
value="573"
/>
<!--MPI_Waitany-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Waitany.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Waitany"
task="p2p:MPI_Waitany"
template="MPI_Waitany.enter"
value="574"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Waitany.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Waitany"
task="p2p:MPI_Waitany"
template="MPI_Waitany.leave"
value="575"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Waitany"
task="p2p:MPI_Waitany"
template="function.error"
value="576"
/>
<!--MPI_Waitsome-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Waitsome.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Waitsome"
task="p2p:MPI_Waitsome"
template="MPI_Waitsome.enter"
value="577"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Waitsome.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Waitsome"
task="p2p:MPI_Waitsome"
template="MPI_Waitsome.leave"
value="578"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Waitsome"
task="p2p:MPI_Waitsome"
template="function.error"
value="579"
/>
<!--MPI_Test-->
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_enter"
level="win:Verbose"
message="$(string.MPI_Test.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Test"
task="poll:MPI_Test"
template="MPI_Test.enter"
value="580"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave"
level="win:Verbose"
message="$(string.MPI_Test.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Test"
task="poll:MPI_Test"
template="MPI_Test.leave"
value="581"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Test"
task="poll:MPI_Test"
template="function.error"
value="582"
/>
<!--MPI_Test_cancelled-->
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_enter"
level="win:Verbose"
message="$(string.MPI_Test_cancelled.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Test_cancelled"
task="poll:MPI_Test_cancelled"
template="MPI_Test_cancelled.enter"
value="583"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave"
level="win:Verbose"
message="$(string.MPI_Test_cancelled.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Test_cancelled"
task="poll:MPI_Test_cancelled"
template="MPI_Test_cancelled.leave"
value="584"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Test_cancelled"
task="poll:MPI_Test_cancelled"
template="function.error"
value="585"
/>
<!--MPI_Testall-->
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_enter"
level="win:Verbose"
message="$(string.MPI_Testall.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Testall"
task="poll:MPI_Testall"
template="MPI_Testall.enter"
value="586"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave"
level="win:Verbose"
message="$(string.MPI_Testall.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Testall"
task="poll:MPI_Testall"
template="MPI_Testall.leave"
value="587"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Testall"
task="poll:MPI_Testall"
template="function.error"
value="588"
/>
<!--MPI_Testany-->
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_enter"
level="win:Verbose"
message="$(string.MPI_Testany.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Testany"
task="poll:MPI_Testany"
template="MPI_Testany.enter"
value="589"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave"
level="win:Verbose"
message="$(string.MPI_Testany.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Testany"
task="poll:MPI_Testany"
template="MPI_Testany.leave"
value="590"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Testany"
task="poll:MPI_Testany"
template="function.error"
value="591"
/>
<!--MPI_Testsome-->
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_enter"
level="win:Verbose"
message="$(string.MPI_Testsome.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Testsome"
task="poll:MPI_Testsome"
template="MPI_Testsome.enter"
value="592"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave"
level="win:Verbose"
message="$(string.MPI_Testsome.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Testsome"
task="poll:MPI_Testsome"
template="MPI_Testsome.leave"
value="593"
/>
<event
channel="MpiApiChannel"
keywords="mpi:poll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Testsome"
task="poll:MPI_Testsome"
template="function.error"
value="594"
/>
<!--MPI_Accumulate-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Accumulate.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Accumulate"
task="rma:MPI_Accumulate"
template="MPI_Accumulate.enter"
value="595"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Accumulate"
task="rma:MPI_Accumulate"
value="596"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Accumulate"
task="rma:MPI_Accumulate"
template="function.error"
value="597"
/>
<!--MPI_Alloc_mem-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Alloc_mem.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Alloc_mem"
task="rma:MPI_Alloc_mem"
template="MPI_Alloc_mem.enter"
value="598"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Alloc_mem.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Alloc_mem"
task="rma:MPI_Alloc_mem"
template="MPI_Alloc_mem.leave"
value="599"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Alloc_mem"
task="rma:MPI_Alloc_mem"
template="function.error"
value="600"
/>
<!--MPI_Free_mem-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Free_mem.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Free_mem"
task="rma:MPI_Free_mem"
template="MPI_Free_mem.enter"
value="601"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Free_mem"
task="rma:MPI_Free_mem"
value="602"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Free_mem"
task="rma:MPI_Free_mem"
template="function.error"
value="603"
/>
<!--MPI_Get-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Get.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Get"
task="rma:MPI_Get"
template="MPI_Get.enter"
value="604"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Get"
task="rma:MPI_Get"
value="605"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Get"
task="rma:MPI_Get"
template="function.error"
value="606"
/>
<!--MPI_Put-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Put.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Put"
task="rma:MPI_Put"
template="MPI_Put.enter"
value="607"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Put"
task="rma:MPI_Put"
value="608"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Put"
task="rma:MPI_Put"
template="function.error"
value="609"
/>
<!--MPI_Win_complete-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_complete.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_complete"
task="rma:MPI_Win_complete"
template="MPI_Win_complete.enter"
value="610"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_complete"
task="rma:MPI_Win_complete"
value="611"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_complete"
task="rma:MPI_Win_complete"
template="function.error"
value="612"
/>
<!--MPI_Win_create-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_create.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_create"
task="rma:MPI_Win_create"
template="MPI_Win_create.enter"
value="613"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Win_create.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_create"
task="rma:MPI_Win_create"
template="MPI_Win_create.leave"
value="614"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_create"
task="rma:MPI_Win_create"
template="function.error"
value="615"
/>
<!--MPI_Win_fence-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_fence.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_fence"
task="rma:MPI_Win_fence"
template="MPI_Win_fence.enter"
value="616"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_fence"
task="rma:MPI_Win_fence"
value="617"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_fence"
task="rma:MPI_Win_fence"
template="function.error"
value="618"
/>
<!--MPI_Win_free-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_free.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_free"
task="rma:MPI_Win_free"
template="MPI_Win_free.enter"
value="619"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_free"
task="rma:MPI_Win_free"
value="620"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_free"
task="rma:MPI_Win_free"
template="function.error"
value="621"
/>
<!--MPI_Win_get_group-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_get_group.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_get_group"
task="rma:MPI_Win_get_group"
template="MPI_Win_get_group.enter"
value="622"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Win_get_group.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_get_group"
task="rma:MPI_Win_get_group"
template="MPI_Win_get_group.leave"
value="623"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_get_group"
task="rma:MPI_Win_get_group"
template="function.error"
value="624"
/>
<!--MPI_Win_get_name-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_get_name.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_get_name"
task="rma:MPI_Win_get_name"
template="MPI_Win_get_name.enter"
value="625"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Win_get_name.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_get_name"
task="rma:MPI_Win_get_name"
template="MPI_Win_get_name.leave"
value="626"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_get_name"
task="rma:MPI_Win_get_name"
template="function.error"
value="627"
/>
<!--MPI_Win_lock-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_lock.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_lock"
task="rma:MPI_Win_lock"
template="MPI_Win_lock.enter"
value="628"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_lock"
task="rma:MPI_Win_lock"
value="629"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_lock"
task="rma:MPI_Win_lock"
template="function.error"
value="630"
/>
<!--MPI_Win_post-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_post.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_post"
task="rma:MPI_Win_post"
template="MPI_Win_post.enter"
value="631"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_post"
task="rma:MPI_Win_post"
value="632"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_post"
task="rma:MPI_Win_post"
template="function.error"
value="633"
/>
<!--MPI_Win_set_name-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_set_name.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_set_name"
task="rma:MPI_Win_set_name"
template="MPI_Win_set_name.enter"
value="634"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_set_name"
task="rma:MPI_Win_set_name"
value="635"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_set_name"
task="rma:MPI_Win_set_name"
template="function.error"
value="636"
/>
<!--MPI_Win_start-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_start.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_start"
task="rma:MPI_Win_start"
template="MPI_Win_start.enter"
value="637"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_start"
task="rma:MPI_Win_start"
value="638"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_start"
task="rma:MPI_Win_start"
template="function.error"
value="639"
/>
<!--MPI_Win_test-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_test.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_test"
task="rma:MPI_Win_test"
template="MPI_Win_test.enter"
value="640"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Win_test.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_test"
task="rma:MPI_Win_test"
template="MPI_Win_test.leave"
value="641"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_test"
task="rma:MPI_Win_test"
template="function.error"
value="642"
/>
<!--MPI_Win_unlock-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_unlock.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_unlock"
task="rma:MPI_Win_unlock"
template="MPI_Win_unlock.enter"
value="643"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_unlock"
task="rma:MPI_Win_unlock"
value="644"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_unlock"
task="rma:MPI_Win_unlock"
template="function.error"
value="645"
/>
<!--MPI_Win_wait-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_wait.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_wait"
task="rma:MPI_Win_wait"
template="MPI_Win_wait.enter"
value="646"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_wait"
task="rma:MPI_Win_wait"
value="647"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_wait"
task="rma:MPI_Win_wait"
template="function.error"
value="648"
/>
<!--MPI_File_close-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_close.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_close"
task="io:MPI_File_close"
template="MPI_File_close.enter"
value="649"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_close"
task="io:MPI_File_close"
value="650"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_close"
task="io:MPI_File_close"
template="function.error"
value="651"
/>
<!--MPI_File_delete-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_delete.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_delete"
task="io:MPI_File_delete"
template="MPI_File_delete.enter"
value="652"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_delete"
task="io:MPI_File_delete"
value="653"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_delete"
task="io:MPI_File_delete"
template="function.error"
value="654"
/>
<!--MPI_File_c2f-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_c2f.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_c2f"
task="io:MPI_File_c2f"
template="MPI_File_c2f.enter"
value="655"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_c2f.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_c2f"
task="io:MPI_File_c2f"
template="MPI_File_c2f.leave"
value="656"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_c2f"
task="io:MPI_File_c2f"
template="function.error"
value="657"
/>
<!--MPI_File_f2c-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_f2c.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_f2c"
task="io:MPI_File_f2c"
template="MPI_File_f2c.enter"
value="658"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_f2c.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_f2c"
task="io:MPI_File_f2c"
template="MPI_File_f2c.leave"
value="659"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_f2c"
task="io:MPI_File_f2c"
template="function.error"
value="660"
/>
<!--MPI_File_sync-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_sync.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_sync"
task="io:MPI_File_sync"
template="MPI_File_sync.enter"
value="661"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_sync"
task="io:MPI_File_sync"
value="662"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_sync"
task="io:MPI_File_sync"
template="function.error"
value="663"
/>
<!--MPI_File_get_amode-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_get_amode.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_get_amode"
task="io:MPI_File_get_amode"
template="MPI_File_get_amode.enter"
value="664"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_get_amode.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_get_amode"
task="io:MPI_File_get_amode"
template="MPI_File_get_amode.leave"
value="665"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_get_amode"
task="io:MPI_File_get_amode"
template="function.error"
value="666"
/>
<!--MPI_File_get_atomicity-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_get_atomicity.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_get_atomicity"
task="io:MPI_File_get_atomicity"
template="MPI_File_get_atomicity.enter"
value="667"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_get_atomicity.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_get_atomicity"
task="io:MPI_File_get_atomicity"
template="MPI_File_get_atomicity.leave"
value="668"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_get_atomicity"
task="io:MPI_File_get_atomicity"
template="function.error"
value="669"
/>
<!--MPI_File_get_byte_offset-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_get_byte_offset.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_get_byte_offset"
task="io:MPI_File_get_byte_offset"
template="MPI_File_get_byte_offset.enter"
value="670"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_get_byte_offset.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_get_byte_offset"
task="io:MPI_File_get_byte_offset"
template="MPI_File_get_byte_offset.leave"
value="671"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_get_byte_offset"
task="io:MPI_File_get_byte_offset"
template="function.error"
value="672"
/>
<!--MPI_File_get_type_extent-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_get_type_extent.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_get_type_extent"
task="io:MPI_File_get_type_extent"
template="MPI_File_get_type_extent.enter"
value="673"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_get_type_extent.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_get_type_extent"
task="io:MPI_File_get_type_extent"
template="MPI_File_get_type_extent.leave"
value="674"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_get_type_extent"
task="io:MPI_File_get_type_extent"
template="function.error"
value="675"
/>
<!--MPI_File_get_group-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_get_group.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_get_group"
task="io:MPI_File_get_group"
template="MPI_File_get_group.enter"
value="676"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_get_group.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_get_group"
task="io:MPI_File_get_group"
template="MPI_File_get_group.leave"
value="677"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_get_group"
task="io:MPI_File_get_group"
template="function.error"
value="678"
/>
<!--MPI_File_get_info-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_get_info.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_get_info"
task="io:MPI_File_get_info"
template="MPI_File_get_info.enter"
value="679"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_get_info.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_get_info"
task="io:MPI_File_get_info"
template="MPI_File_get_info.leave"
value="680"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_get_info"
task="io:MPI_File_get_info"
template="function.error"
value="681"
/>
<!--MPI_File_get_position-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_get_position.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_get_position"
task="io:MPI_File_get_position"
template="MPI_File_get_position.enter"
value="682"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_get_position.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_get_position"
task="io:MPI_File_get_position"
template="MPI_File_get_position.leave"
value="683"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_get_position"
task="io:MPI_File_get_position"
template="function.error"
value="684"
/>
<!--MPI_File_get_position_shared-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_get_position_shared.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_get_position_shared"
task="io:MPI_File_get_position_shared"
template="MPI_File_get_position_shared.enter"
value="685"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_get_position_shared.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_get_position_shared"
task="io:MPI_File_get_position_shared"
template="MPI_File_get_position_shared.leave"
value="686"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_get_position_shared"
task="io:MPI_File_get_position_shared"
template="function.error"
value="687"
/>
<!--MPI_File_get_size-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_get_size.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_get_size"
task="io:MPI_File_get_size"
template="MPI_File_get_size.enter"
value="688"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_get_size.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_get_size"
task="io:MPI_File_get_size"
template="MPI_File_get_size.leave"
value="689"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_get_size"
task="io:MPI_File_get_size"
template="function.error"
value="690"
/>
<!--MPI_File_get_view-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_get_view.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_get_view"
task="io:MPI_File_get_view"
template="MPI_File_get_view.enter"
value="691"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_get_view.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_get_view"
task="io:MPI_File_get_view"
template="MPI_File_get_view.leave"
value="692"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_get_view"
task="io:MPI_File_get_view"
template="function.error"
value="693"
/>
<!--MPI_File_iread-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_iread.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_iread"
task="io:MPI_File_iread"
template="MPI_File_iread.enter"
value="694"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_iread.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_iread"
task="io:MPI_File_iread"
template="MPI_File_iread.leave"
value="695"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_iread"
task="io:MPI_File_iread"
template="function.error"
value="696"
/>
<!--MPI_File_iread_at-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_iread_at.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_iread_at"
task="io:MPI_File_iread_at"
template="MPI_File_iread_at.enter"
value="697"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_iread_at.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_iread_at"
task="io:MPI_File_iread_at"
template="MPI_File_iread_at.leave"
value="698"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_iread_at"
task="io:MPI_File_iread_at"
template="function.error"
value="699"
/>
<!--MPI_File_iread_shared-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_iread_shared.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_iread_shared"
task="io:MPI_File_iread_shared"
template="MPI_File_iread_shared.enter"
value="700"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_iread_shared.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_iread_shared"
task="io:MPI_File_iread_shared"
template="MPI_File_iread_shared.leave"
value="701"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_iread_shared"
task="io:MPI_File_iread_shared"
template="function.error"
value="702"
/>
<!--MPI_File_iwrite-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_iwrite.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_iwrite"
task="io:MPI_File_iwrite"
template="MPI_File_iwrite.enter"
value="703"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_iwrite.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_iwrite"
task="io:MPI_File_iwrite"
template="MPI_File_iwrite.leave"
value="704"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_iwrite"
task="io:MPI_File_iwrite"
template="function.error"
value="705"
/>
<!--MPI_File_iwrite_at-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_iwrite_at.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_iwrite_at"
task="io:MPI_File_iwrite_at"
template="MPI_File_iwrite_at.enter"
value="706"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_iwrite_at.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_iwrite_at"
task="io:MPI_File_iwrite_at"
template="MPI_File_iwrite_at.leave"
value="707"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_iwrite_at"
task="io:MPI_File_iwrite_at"
template="function.error"
value="708"
/>
<!--MPI_File_iwrite_shared-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_iwrite_shared.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_iwrite_shared"
task="io:MPI_File_iwrite_shared"
template="MPI_File_iwrite_shared.enter"
value="709"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_iwrite_shared.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_iwrite_shared"
task="io:MPI_File_iwrite_shared"
template="MPI_File_iwrite_shared.leave"
value="710"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_iwrite_shared"
task="io:MPI_File_iwrite_shared"
template="function.error"
value="711"
/>
<!--MPI_File_open-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_open.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_open"
task="io:MPI_File_open"
template="MPI_File_open.enter"
value="712"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_open.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_open"
task="io:MPI_File_open"
template="MPI_File_open.leave"
value="713"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_open"
task="io:MPI_File_open"
template="function.error"
value="714"
/>
<!--MPI_File_preallocate-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_preallocate.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_preallocate"
task="io:MPI_File_preallocate"
template="MPI_File_preallocate.enter"
value="715"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_preallocate"
task="io:MPI_File_preallocate"
value="716"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_preallocate"
task="io:MPI_File_preallocate"
template="function.error"
value="717"
/>
<!--MPI_File_read_at_all_begin-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read_at_all_begin.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read_at_all_begin"
task="io:MPI_File_read_at_all_begin"
template="MPI_File_read_at_all_begin.enter"
value="718"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read_at_all_begin"
task="io:MPI_File_read_at_all_begin"
value="719"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read_at_all_begin"
task="io:MPI_File_read_at_all_begin"
template="function.error"
value="720"
/>
<!--MPI_File_read_at_all_end-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read_at_all_end.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read_at_all_end"
task="io:MPI_File_read_at_all_end"
template="MPI_File_read_at_all_end.enter"
value="721"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_read_at_all_end.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read_at_all_end"
task="io:MPI_File_read_at_all_end"
template="MPI_File_read_at_all_end.leave"
value="722"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read_at_all_end"
task="io:MPI_File_read_at_all_end"
template="function.error"
value="723"
/>
<!--MPI_File_read-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read"
task="io:MPI_File_read"
template="MPI_File_read.enter"
value="724"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_read.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read"
task="io:MPI_File_read"
template="MPI_File_read.leave"
value="725"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read"
task="io:MPI_File_read"
template="function.error"
value="726"
/>
<!--MPI_File_read_all-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read_all.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read_all"
task="io:MPI_File_read_all"
template="MPI_File_read_all.enter"
value="727"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_read_all.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read_all"
task="io:MPI_File_read_all"
template="MPI_File_read_all.leave"
value="728"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read_all"
task="io:MPI_File_read_all"
template="function.error"
value="729"
/>
<!--MPI_File_read_all_begin-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read_all_begin.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read_all_begin"
task="io:MPI_File_read_all_begin"
template="MPI_File_read_all_begin.enter"
value="730"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read_all_begin"
task="io:MPI_File_read_all_begin"
value="731"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read_all_begin"
task="io:MPI_File_read_all_begin"
template="function.error"
value="732"
/>
<!--MPI_File_read_all_end-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read_all_end.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read_all_end"
task="io:MPI_File_read_all_end"
template="MPI_File_read_all_end.enter"
value="733"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_read_all_end.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read_all_end"
task="io:MPI_File_read_all_end"
template="MPI_File_read_all_end.leave"
value="734"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read_all_end"
task="io:MPI_File_read_all_end"
template="function.error"
value="735"
/>
<!--MPI_File_read_at-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read_at.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read_at"
task="io:MPI_File_read_at"
template="MPI_File_read_at.enter"
value="736"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_read_at.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read_at"
task="io:MPI_File_read_at"
template="MPI_File_read_at.leave"
value="737"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read_at"
task="io:MPI_File_read_at"
template="function.error"
value="738"
/>
<!--MPI_File_read_at_all-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read_at_all.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read_at_all"
task="io:MPI_File_read_at_all"
template="MPI_File_read_at_all.enter"
value="739"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_read_at_all.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read_at_all"
task="io:MPI_File_read_at_all"
template="MPI_File_read_at_all.leave"
value="740"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read_at_all"
task="io:MPI_File_read_at_all"
template="function.error"
value="741"
/>
<!--MPI_File_read_ordered-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read_ordered.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read_ordered"
task="io:MPI_File_read_ordered"
template="MPI_File_read_ordered.enter"
value="742"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_read_ordered.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read_ordered"
task="io:MPI_File_read_ordered"
template="MPI_File_read_ordered.leave"
value="743"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read_ordered"
task="io:MPI_File_read_ordered"
template="function.error"
value="744"
/>
<!--MPI_File_read_ordered_begin-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read_ordered_begin.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read_ordered_begin"
task="io:MPI_File_read_ordered_begin"
template="MPI_File_read_ordered_begin.enter"
value="745"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read_ordered_begin"
task="io:MPI_File_read_ordered_begin"
value="746"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read_ordered_begin"
task="io:MPI_File_read_ordered_begin"
template="function.error"
value="747"
/>
<!--MPI_File_read_ordered_end-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read_ordered_end.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read_ordered_end"
task="io:MPI_File_read_ordered_end"
template="MPI_File_read_ordered_end.enter"
value="748"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_read_ordered_end.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read_ordered_end"
task="io:MPI_File_read_ordered_end"
template="MPI_File_read_ordered_end.leave"
value="749"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read_ordered_end"
task="io:MPI_File_read_ordered_end"
template="function.error"
value="750"
/>
<!--MPI_File_read_shared-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_read_shared.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_read_shared"
task="io:MPI_File_read_shared"
template="MPI_File_read_shared.enter"
value="751"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_read_shared.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_read_shared"
task="io:MPI_File_read_shared"
template="MPI_File_read_shared.leave"
value="752"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_read_shared"
task="io:MPI_File_read_shared"
template="function.error"
value="753"
/>
<!--MPI_File_seek-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_seek.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_seek"
task="io:MPI_File_seek"
template="MPI_File_seek.enter"
value="754"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_seek"
task="io:MPI_File_seek"
value="755"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_seek"
task="io:MPI_File_seek"
template="function.error"
value="756"
/>
<!--MPI_File_seek_shared-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_seek_shared.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_seek_shared"
task="io:MPI_File_seek_shared"
template="MPI_File_seek_shared.enter"
value="757"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_seek_shared"
task="io:MPI_File_seek_shared"
value="758"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_seek_shared"
task="io:MPI_File_seek_shared"
template="function.error"
value="759"
/>
<!--MPI_File_set_atomicity-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_set_atomicity.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_set_atomicity"
task="io:MPI_File_set_atomicity"
template="MPI_File_set_atomicity.enter"
value="760"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_set_atomicity"
task="io:MPI_File_set_atomicity"
value="761"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_set_atomicity"
task="io:MPI_File_set_atomicity"
template="function.error"
value="762"
/>
<!--MPI_File_set_info-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_set_info.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_set_info"
task="io:MPI_File_set_info"
template="MPI_File_set_info.enter"
value="763"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_set_info"
task="io:MPI_File_set_info"
value="764"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_set_info"
task="io:MPI_File_set_info"
template="function.error"
value="765"
/>
<!--MPI_File_set_size-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_set_size.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_set_size"
task="io:MPI_File_set_size"
template="MPI_File_set_size.enter"
value="766"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_set_size"
task="io:MPI_File_set_size"
value="767"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_set_size"
task="io:MPI_File_set_size"
template="function.error"
value="768"
/>
<!--MPI_File_set_view-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_set_view.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_set_view"
task="io:MPI_File_set_view"
template="MPI_File_set_view.enter"
value="769"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_set_view"
task="io:MPI_File_set_view"
value="770"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_set_view"
task="io:MPI_File_set_view"
template="function.error"
value="771"
/>
<!--MPI_File_write_at_all_begin-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write_at_all_begin.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write_at_all_begin"
task="io:MPI_File_write_at_all_begin"
template="MPI_File_write_at_all_begin.enter"
value="772"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write_at_all_begin"
task="io:MPI_File_write_at_all_begin"
value="773"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write_at_all_begin"
task="io:MPI_File_write_at_all_begin"
template="function.error"
value="774"
/>
<!--MPI_File_write_at_all_end-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write_at_all_end.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write_at_all_end"
task="io:MPI_File_write_at_all_end"
template="MPI_File_write_at_all_end.enter"
value="775"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_write_at_all_end.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write_at_all_end"
task="io:MPI_File_write_at_all_end"
template="MPI_File_write_at_all_end.leave"
value="776"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write_at_all_end"
task="io:MPI_File_write_at_all_end"
template="function.error"
value="777"
/>
<!--MPI_File_write-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write"
task="io:MPI_File_write"
template="MPI_File_write.enter"
value="778"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_write.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write"
task="io:MPI_File_write"
template="MPI_File_write.leave"
value="779"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write"
task="io:MPI_File_write"
template="function.error"
value="780"
/>
<!--MPI_File_write_all-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write_all.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write_all"
task="io:MPI_File_write_all"
template="MPI_File_write_all.enter"
value="781"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_write_all.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write_all"
task="io:MPI_File_write_all"
template="MPI_File_write_all.leave"
value="782"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write_all"
task="io:MPI_File_write_all"
template="function.error"
value="783"
/>
<!--MPI_File_write_all_begin-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write_all_begin.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write_all_begin"
task="io:MPI_File_write_all_begin"
template="MPI_File_write_all_begin.enter"
value="784"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write_all_begin"
task="io:MPI_File_write_all_begin"
value="785"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write_all_begin"
task="io:MPI_File_write_all_begin"
template="function.error"
value="786"
/>
<!--MPI_File_write_all_end-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write_all_end.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write_all_end"
task="io:MPI_File_write_all_end"
template="MPI_File_write_all_end.enter"
value="787"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_write_all_end.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write_all_end"
task="io:MPI_File_write_all_end"
template="MPI_File_write_all_end.leave"
value="788"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write_all_end"
task="io:MPI_File_write_all_end"
template="function.error"
value="789"
/>
<!--MPI_File_write_at-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write_at.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write_at"
task="io:MPI_File_write_at"
template="MPI_File_write_at.enter"
value="790"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_write_at.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write_at"
task="io:MPI_File_write_at"
template="MPI_File_write_at.leave"
value="791"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write_at"
task="io:MPI_File_write_at"
template="function.error"
value="792"
/>
<!--MPI_File_write_at_all-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write_at_all.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write_at_all"
task="io:MPI_File_write_at_all"
template="MPI_File_write_at_all.enter"
value="793"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_write_at_all.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write_at_all"
task="io:MPI_File_write_at_all"
template="MPI_File_write_at_all.leave"
value="794"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write_at_all"
task="io:MPI_File_write_at_all"
template="function.error"
value="795"
/>
<!--MPI_File_write_ordered-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write_ordered.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write_ordered"
task="io:MPI_File_write_ordered"
template="MPI_File_write_ordered.enter"
value="796"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_write_ordered.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write_ordered"
task="io:MPI_File_write_ordered"
template="MPI_File_write_ordered.leave"
value="797"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write_ordered"
task="io:MPI_File_write_ordered"
template="function.error"
value="798"
/>
<!--MPI_File_write_ordered_begin-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write_ordered_begin.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write_ordered_begin"
task="io:MPI_File_write_ordered_begin"
template="MPI_File_write_ordered_begin.enter"
value="799"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write_ordered_begin"
task="io:MPI_File_write_ordered_begin"
value="800"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write_ordered_begin"
task="io:MPI_File_write_ordered_begin"
template="function.error"
value="801"
/>
<!--MPI_File_write_ordered_end-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write_ordered_end.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write_ordered_end"
task="io:MPI_File_write_ordered_end"
template="MPI_File_write_ordered_end.enter"
value="802"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_write_ordered_end.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write_ordered_end"
task="io:MPI_File_write_ordered_end"
template="MPI_File_write_ordered_end.leave"
value="803"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write_ordered_end"
task="io:MPI_File_write_ordered_end"
template="function.error"
value="804"
/>
<!--MPI_File_write_shared-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_File_write_shared.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_File_write_shared"
task="io:MPI_File_write_shared"
template="MPI_File_write_shared.enter"
value="805"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.MPI_File_write_shared.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_File_write_shared"
task="io:MPI_File_write_shared"
template="MPI_File_write_shared.leave"
value="806"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_File_write_shared"
task="io:MPI_File_write_shared"
template="function.error"
value="807"
/>
<!--MPI_Close_port-->
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Close_port.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Close_port"
task="io:MPI_Close_port"
template="MPI_Close_port.enter"
value="808"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Close_port"
task="io:MPI_Close_port"
value="809"
/>
<event
channel="MpiApiChannel"
keywords="mpi:io mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Close_port"
task="io:MPI_Close_port"
template="function.error"
value="810"
/>
<!--MPI_Comm_accept-->
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_accept.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_accept"
task="spwn:MPI_Comm_accept"
template="MPI_Comm_accept.enter"
value="811"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_accept.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_accept"
task="spwn:MPI_Comm_accept"
template="MPI_Comm_accept.leave"
value="812"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_accept"
task="spwn:MPI_Comm_accept"
template="function.error"
value="813"
/>
<!--MPI_Comm_connect-->
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_connect.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_connect"
task="spwn:MPI_Comm_connect"
template="MPI_Comm_connect.enter"
value="814"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_connect.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_connect"
task="spwn:MPI_Comm_connect"
template="MPI_Comm_connect.leave"
value="815"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_connect"
task="spwn:MPI_Comm_connect"
template="function.error"
value="816"
/>
<!--MPI_Comm_disconnect-->
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_disconnect.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_disconnect"
task="spwn:MPI_Comm_disconnect"
template="MPI_Comm_disconnect.enter"
value="817"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_disconnect"
task="spwn:MPI_Comm_disconnect"
value="818"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_disconnect"
task="spwn:MPI_Comm_disconnect"
template="function.error"
value="819"
/>
<!--MPI_Comm_get_parent-->
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_get_parent.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_get_parent"
task="spwn:MPI_Comm_get_parent"
value="820"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_get_parent.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_get_parent"
task="spwn:MPI_Comm_get_parent"
template="MPI_Comm_get_parent.leave"
value="821"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_get_parent"
task="spwn:MPI_Comm_get_parent"
template="function.error"
value="822"
/>
<!--MPI_Comm_join-->
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_join.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_join"
task="spwn:MPI_Comm_join"
template="MPI_Comm_join.enter"
value="823"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_join.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_join"
task="spwn:MPI_Comm_join"
template="MPI_Comm_join.leave"
value="824"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_join"
task="spwn:MPI_Comm_join"
template="function.error"
value="825"
/>
<!--MPI_Comm_spawn-->
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_spawn.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_spawn"
task="spwn:MPI_Comm_spawn"
template="MPI_Comm_spawn.enter"
value="826"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_spawn.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_spawn"
task="spwn:MPI_Comm_spawn"
template="MPI_Comm_spawn.leave"
value="827"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_spawn"
task="spwn:MPI_Comm_spawn"
template="function.error"
value="828"
/>
<!--MPI_Comm_spawn_multiple-->
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_spawn_multiple.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_spawn_multiple"
task="spwn:MPI_Comm_spawn_multiple"
template="MPI_Comm_spawn_multiple.enter"
value="829"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_spawn_multiple.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_spawn_multiple"
task="spwn:MPI_Comm_spawn_multiple"
template="MPI_Comm_spawn_multiple.leave"
value="830"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_spawn_multiple"
task="spwn:MPI_Comm_spawn_multiple"
template="function.error"
value="831"
/>
<!--MPI_Lookup_name-->
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Lookup_name.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Lookup_name"
task="spwn:MPI_Lookup_name"
template="MPI_Lookup_name.enter"
value="832"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Lookup_name.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Lookup_name"
task="spwn:MPI_Lookup_name"
template="MPI_Lookup_name.leave"
value="833"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Lookup_name"
task="spwn:MPI_Lookup_name"
template="function.error"
value="834"
/>
<!--MPI_Open_port-->
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Open_port.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Open_port"
task="spwn:MPI_Open_port"
template="MPI_Open_port.enter"
value="835"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Open_port.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Open_port"
task="spwn:MPI_Open_port"
template="MPI_Open_port.leave"
value="836"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Open_port"
task="spwn:MPI_Open_port"
template="function.error"
value="837"
/>
<!--MPI_Publish_name-->
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Publish_name.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Publish_name"
task="spwn:MPI_Publish_name"
template="MPI_Publish_name.enter"
value="838"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Publish_name"
task="spwn:MPI_Publish_name"
value="839"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Publish_name"
task="spwn:MPI_Publish_name"
template="function.error"
value="840"
/>
<!--MPI_Unpublish_name-->
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Unpublish_name.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Unpublish_name"
task="spwn:MPI_Unpublish_name"
template="MPI_Unpublish_name.enter"
value="841"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Unpublish_name"
task="spwn:MPI_Unpublish_name"
value="842"
/>
<event
channel="MpiApiChannel"
keywords="mpi:spwn mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Unpublish_name"
task="spwn:MPI_Unpublish_name"
template="function.error"
value="843"
/>
<!--MPI_Cart_coords-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Cart_coords.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Cart_coords"
task="topo:MPI_Cart_coords"
template="MPI_Cart_coords.enter"
value="844"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Cart_coords.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Cart_coords"
task="topo:MPI_Cart_coords"
template="MPI_Cart_coords.leave"
value="845"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Cart_coords"
task="topo:MPI_Cart_coords"
template="function.error"
value="846"
/>
<!--MPI_Cart_create-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Cart_create.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Cart_create"
task="topo:MPI_Cart_create"
template="MPI_Cart_create.enter"
value="847"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Cart_create.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Cart_create"
task="topo:MPI_Cart_create"
template="MPI_Cart_create.leave"
value="848"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Cart_create"
task="topo:MPI_Cart_create"
template="function.error"
value="849"
/>
<!--MPI_Cart_get-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Cart_get.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Cart_get"
task="topo:MPI_Cart_get"
template="MPI_Cart_get.enter"
value="850"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Cart_get.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Cart_get"
task="topo:MPI_Cart_get"
template="MPI_Cart_get.leave"
value="851"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Cart_get"
task="topo:MPI_Cart_get"
template="function.error"
value="852"
/>
<!--MPI_Cart_map-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Cart_map.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Cart_map"
task="topo:MPI_Cart_map"
template="MPI_Cart_map.enter"
value="853"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Cart_map.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Cart_map"
task="topo:MPI_Cart_map"
template="MPI_Cart_map.leave"
value="854"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Cart_map"
task="topo:MPI_Cart_map"
template="function.error"
value="855"
/>
<!--MPI_Cart_rank-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Cart_rank.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Cart_rank"
task="topo:MPI_Cart_rank"
template="MPI_Cart_rank.enter"
value="856"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Cart_rank.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Cart_rank"
task="topo:MPI_Cart_rank"
template="MPI_Cart_rank.leave"
value="857"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Cart_rank"
task="topo:MPI_Cart_rank"
template="function.error"
value="858"
/>
<!--MPI_Cart_shift-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Cart_shift.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Cart_shift"
task="topo:MPI_Cart_shift"
template="MPI_Cart_shift.enter"
value="859"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Cart_shift.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Cart_shift"
task="topo:MPI_Cart_shift"
template="MPI_Cart_shift.leave"
value="860"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Cart_shift"
task="topo:MPI_Cart_shift"
template="function.error"
value="861"
/>
<!--MPI_Cart_sub-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Cart_sub.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Cart_sub"
task="topo:MPI_Cart_sub"
template="MPI_Cart_sub.enter"
value="862"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Cart_sub.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Cart_sub"
task="topo:MPI_Cart_sub"
template="MPI_Cart_sub.leave"
value="863"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Cart_sub"
task="topo:MPI_Cart_sub"
template="function.error"
value="864"
/>
<!--MPI_Cartdim_get-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Cartdim_get.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Cartdim_get"
task="topo:MPI_Cartdim_get"
template="MPI_Cartdim_get.enter"
value="865"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Cartdim_get.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Cartdim_get"
task="topo:MPI_Cartdim_get"
template="MPI_Cartdim_get.leave"
value="866"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Cartdim_get"
task="topo:MPI_Cartdim_get"
template="function.error"
value="867"
/>
<!--MPI_Dims_create-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Dims_create.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Dims_create"
task="topo:MPI_Dims_create"
template="MPI_Dims_create.enter"
value="868"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Dims_create"
task="topo:MPI_Dims_create"
value="869"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Dims_create"
task="topo:MPI_Dims_create"
template="function.error"
value="870"
/>
<!--MPI_Graph_get-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Graph_get.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Graph_get"
task="topo:MPI_Graph_get"
template="MPI_Graph_get.enter"
value="871"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Graph_get.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Graph_get"
task="topo:MPI_Graph_get"
template="MPI_Graph_get.leave"
value="872"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Graph_get"
task="topo:MPI_Graph_get"
template="function.error"
value="873"
/>
<!--MPI_Graph_map-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Graph_map.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Graph_map"
task="topo:MPI_Graph_map"
template="MPI_Graph_map.enter"
value="874"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Graph_map.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Graph_map"
task="topo:MPI_Graph_map"
template="MPI_Graph_map.leave"
value="875"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Graph_map"
task="topo:MPI_Graph_map"
template="function.error"
value="876"
/>
<!--MPI_Graph_neighbors-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Graph_neighbors.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Graph_neighbors"
task="topo:MPI_Graph_neighbors"
template="MPI_Graph_neighbors.enter"
value="877"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Graph_neighbors.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Graph_neighbors"
task="topo:MPI_Graph_neighbors"
template="MPI_Graph_neighbors.leave"
value="878"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Graph_neighbors"
task="topo:MPI_Graph_neighbors"
template="function.error"
value="879"
/>
<!--MPI_Graph_create-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Graph_create.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Graph_create"
task="topo:MPI_Graph_create"
template="MPI_Graph_create.enter"
value="880"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Graph_create.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Graph_create"
task="topo:MPI_Graph_create"
template="MPI_Graph_create.leave"
value="881"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Graph_create"
task="topo:MPI_Graph_create"
template="function.error"
value="882"
/>
<!--MPI_Graphdims_get-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Graphdims_get.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Graphdims_get"
task="topo:MPI_Graphdims_get"
template="MPI_Graphdims_get.enter"
value="883"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Graphdims_get.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Graphdims_get"
task="topo:MPI_Graphdims_get"
template="MPI_Graphdims_get.leave"
value="884"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Graphdims_get"
task="topo:MPI_Graphdims_get"
template="function.error"
value="885"
/>
<!--MPI_Graph_neighbors_count-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Graph_neighbors_count.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Graph_neighbors_count"
task="topo:MPI_Graph_neighbors_count"
template="MPI_Graph_neighbors_count.enter"
value="886"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Graph_neighbors_count.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Graph_neighbors_count"
task="topo:MPI_Graph_neighbors_count"
template="MPI_Graph_neighbors_count.leave"
value="887"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Graph_neighbors_count"
task="topo:MPI_Graph_neighbors_count"
template="function.error"
value="888"
/>
<!--MPI_Topo_test-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Topo_test.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Topo_test"
task="topo:MPI_Topo_test"
template="MPI_Topo_test.enter"
value="889"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Topo_test.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Topo_test"
task="topo:MPI_Topo_test"
template="MPI_Topo_test.leave"
value="890"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Topo_test"
task="topo:MPI_Topo_test"
template="function.error"
value="891"
/>
<!-- Microsoft-Windows-HPC-MPI/Connections events -->
<event
channel="MpiCommunicationChannel"
keywords="mpi:nd"
level="win:Informational"
message="$(string.nd.send.queue)"
opcode="net:queue"
symbol="EVENT_SendNd_Queue"
task="nd:send"
template="nd.send.queue"
value="892"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:shm"
level="win:Informational"
message="$(string.shm.send.queue)"
opcode="net:queue"
symbol="EVENT_SendShm_Queue"
task="shm:send"
template="shm.send.queue"
value="893"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:sock"
level="win:Informational"
message="$(string.sock.send.queue)"
opcode="net:queue"
symbol="EVENT_SendSock_Queue"
task="sock:send"
template="sock.send.queue"
value="894"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:nd"
level="win:Informational"
message="$(string.nd.send.connect)"
opcode="net:connect"
symbol="EVENT_SendNd_Connect"
task="nd:send"
template="nd.send.connect"
value="895"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:shm"
level="win:Informational"
message="$(string.shm.send.connect)"
opcode="net:connect"
symbol="EVENT_SendShm_Connect"
task="shm:send"
template="shm.send.connect"
value="896"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:sock mpi:net_sdata"
level="win:Informational"
message="$(string.sock.send.connect)"
opcode="net:connect"
symbol="EVENT_SendSock_Connect"
task="sock:send"
template="sock.send.connect"
value="897"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:shm mpi:net_sdata"
level="win:Informational"
message="$(string.shm.send.inline)"
opcode="net:inline"
symbol="EVENT_SendShm_Inline"
task="shm:send"
template="shm.send.inline"
value="898"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:nd mpi:net_sdata"
level="win:Informational"
message="$(string.nd.send.inline)"
opcode="net:inline"
symbol="EVENT_SendNd_Inline"
task="nd:send"
template="nd.send.inline"
value="899"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:sock mpi:net_sdata"
level="win:Informational"
message="$(string.sock.send.inline)"
opcode="net:inline"
symbol="EVENT_SendSock_Inline"
task="sock:send"
template="sock.send.inline"
value="900"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:nd"
level="win:Informational"
message="$(string.nd.send.continue)"
opcode="net:continue"
symbol="EVENT_SendNd_Continue"
task="nd:send"
template="nd.send.continue"
value="901"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:shm"
level="win:Informational"
message="$(string.shm.send.continue)"
opcode="net:continue"
symbol="EVENT_SendShm_Continue"
task="shm:send"
template="shm.send.continue"
value="902"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:sock"
level="win:Informational"
message="$(string.sock.send.continue)"
opcode="net:continue"
symbol="EVENT_SendSock_Continue"
task="sock:send"
template="sock.send.continue"
value="903"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:shm"
level="win:Informational"
message="$(string.shm.send.done)"
opcode="net:done"
symbol="EVENT_SendShm_Done"
task="shm:send"
template="shm.send.done"
value="904"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:sock"
level="win:Informational"
message="$(string.sock.send.done)"
opcode="net:done"
symbol="EVENT_SendSock_Done"
task="sock:send"
template="sock.send.done"
value="905"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:nd"
level="win:Informational"
message="$(string.nd.send.done)"
opcode="net:done"
symbol="EVENT_SendNd_Done"
task="nd:send"
template="nd.send.done"
value="906"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:shm"
level="win:Informational"
message="$(string.shm.recv.done)"
opcode="net:done"
symbol="EVENT_RecvShm_Done"
task="shm:recv"
template="shm.recv.done"
value="907"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:nd"
level="win:Informational"
message="$(string.nd.recv.done)"
opcode="net:done"
symbol="EVENT_RecvNd_Done"
task="nd:recv"
template="nd.recv.done"
value="908"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:sock"
level="win:Informational"
message="$(string.sock.recv.done)"
opcode="net:done"
symbol="EVENT_RecvSock_Done"
task="sock:recv"
template="sock.recv.done"
value="909"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:shm mpi:net_sdata"
level="win:Informational"
message="$(string.shm.send.head)"
opcode="net:head"
symbol="EVENT_SendShm_Head"
task="shm:send"
template="shm.send.head"
value="910"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:nd mpi:net_sdata"
level="win:Informational"
message="$(string.nd.send.head)"
opcode="net:head"
symbol="EVENT_SendNd_Head"
task="nd:send"
template="nd.send.head"
value="911"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:sock mpi:net_sdata"
level="win:Informational"
message="$(string.sock.send.head)"
opcode="net:head"
symbol="EVENT_SendSock_Head"
task="sock:send"
template="sock.send.head"
value="912"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:shm mpi:net_rdata"
level="win:Informational"
message="$(string.shm.recv.packet)"
opcode="net:packet"
symbol="EVENT_RecvShm_Packet"
task="shm:recv"
template="shm.recv.packet"
value="913"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:nd mpi:net_rdata"
level="win:Informational"
message="$(string.nd.recv.packet)"
opcode="net:packet"
symbol="EVENT_RecvNd_Packet"
task="nd:recv"
template="nd.recv.packet"
value="914"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:sock mpi:net_rdata"
level="win:Informational"
message="$(string.sock.recv.packet)"
opcode="net:packet"
symbol="EVENT_RecvSock_Packet"
task="sock:recv"
template="sock.recv.packet"
value="915"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:shm"
level="win:Informational"
message="$(string.shm.recv.data)"
opcode="net:data"
symbol="EVENT_RecvShm_Data"
task="shm:recv"
template="shm.recv.data"
value="916"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:nd"
level="win:Informational"
message="$(string.nd.recv.data)"
opcode="net:data"
symbol="EVENT_RecvNd_Data"
task="nd:recv"
template="nd.recv.data"
value="917"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:sock"
level="win:Informational"
message="$(string.sock.recv.data)"
opcode="net:data"
symbol="EVENT_RecvSock_Data"
task="sock:recv"
template="sock.recv.data"
value="918"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:msg"
level="win:Informational"
message="$(string.send.msg.ssend)"
symbol="EVENT_SsendMsg"
task="msg:send"
template="msg.send"
value="919"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:msg"
level="win:Informational"
message="$(string.send.msg.rsend)"
symbol="EVENT_RsendMsg"
task="msg:send"
template="msg.send"
value="920"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:msg"
level="win:Informational"
message="$(string.send.msg.send)"
symbol="EVENT_SendMsg"
task="msg:send"
template="msg.send"
value="921"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:msg"
level="win:Informational"
message="$(string.recv.msg.recv)"
symbol="EVENT_RecvMsg"
task="msg:recv"
template="msg.recv"
value="922"
/>
<!--MSMPI_Queuelock_acquire-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MSMPI_Queuelock_acquire.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MSMPI_Queuelock_acquire"
task="p2p:MSMPI_Queuelock_acquire"
value="923"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MSMPI_Queuelock_acquire"
task="p2p:MSMPI_Queuelock_acquire"
value="924"
/>
<!--MSMPI_Queuelock_release-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MSMPI_Queuelock_release.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MSMPI_Queuelock_release"
task="p2p:MSMPI_Queuelock_release"
value="925"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MSMPI_Queuelock_release"
task="p2p:MSMPI_Queuelock_release"
value="926"
/>
<!--MSMPI_Waitsome_interruptible-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MSMPI_Waitsome_interruptible.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MSMPI_Waitsome_interruptible"
task="p2p:MSMPI_Waitsome_interruptible"
template="MSMPI_Waitsome_interruptible.enter"
value="927"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MSMPI_Waitsome_interruptible.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MSMPI_Waitsome_interruptible"
task="p2p:MSMPI_Waitsome_interruptible"
template="MSMPI_Waitsome_interruptible.leave"
value="928"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MSMPI_Waitsome_interruptible"
task="p2p:MSMPI_Waitsome_interruptible"
template="function.error"
value="929"
/>
<!--MSMPI_Request_set_apc-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MSMPI_Request_set_apc.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MSMPI_Request_set_apc"
task="p2p:MSMPI_Request_set_apc"
template="MSMPI_Request_set_apc.enter"
value="930"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.function.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MSMPI_Request_set_apc"
task="p2p:MSMPI_Request_set_apc"
value="931"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MSMPI_Request_set_apc"
task="p2p:MSMPI_Request_set_apc"
template="function.error"
value="932"
/>
<!--MPI_Comm_split_type-->
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Comm_split_type.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Comm_split_type"
task="comm:MPI_Comm_split_type"
template="MPI_Comm_split_type.enter"
value="933"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Comm_split_type.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Comm_split_type"
task="comm:MPI_Comm_split_type"
template="MPI_Comm_split_type.leave"
value="934"
/>
<event
channel="MpiApiChannel"
keywords="mpi:comm mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Comm_split_type"
task="comm:MPI_Comm_split_type"
template="function.error"
value="935"
/>
<!--MPI_Win_allocate_shared-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_allocate_shared.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_allocate_shared"
task="rma:MPI_Win_allocate_shared"
template="MPI_Win_allocate_shared.enter"
value="936"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Win_allocate_shared.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_allocate_shared"
task="rma:MPI_Win_allocate_shared"
template="MPI_Win_allocate_shared.leave"
value="937"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_allocate_shared"
task="rma:MPI_Win_allocate_shared"
template="function.error"
value="938"
/>
<!--MPI_Win_shared_query-->
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Win_shared_query.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Win_shared_query"
task="rma:MPI_Win_shared_query"
template="MPI_Win_shared_query.enter"
value="939"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Win_shared_query.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Win_shared_query"
task="rma:MPI_Win_shared_query"
template="MPI_Win_shared_query.leave"
value="940"
/>
<event
channel="MpiApiChannel"
keywords="mpi:rma mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Win_shared_query"
task="rma:MPI_Win_shared_query"
template="function.error"
value="941"
/>
<!--MPI_Type_create_hindexed_block-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_create_hindexed_block.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_create_hindexed_block"
task="dt:MPI_Type_create_hindexed_block"
template="MPI_Type_create_hindexed_block.enter"
value="942"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_create_hindexed_block.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_create_hindexed_block"
task="dt:MPI_Type_create_hindexed_block"
template="MPI_Type_create_hindexed_block.leave"
value="943"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_create_hindexed_block"
task="dt:MPI_Type_create_hindexed_block"
template="function.error"
value="944"
/>
<!--MPI_Type_size_x-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_size_x.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_size_x"
task="dt:MPI_Type_size_x"
template="MPI_Type_size_x.enter"
value="945"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_size_x.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_size_x"
task="dt:MPI_Type_size_x"
template="MPI_Type_size_x.leave"
value="946"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_size_x"
task="dt:MPI_Type_size_x"
template="function.error"
value="947"
/>
<!--MPI_Type_get_extent_x-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_get_extent_x.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_get_extent_x"
task="dt:MPI_Type_get_extent_x"
template="MPI_Type_get_extent_x.enter"
value="948"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_get_extent_x.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_get_extent_x"
task="dt:MPI_Type_get_extent_x"
template="MPI_Type_get_extent_x.leave"
value="949"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_get_extent_x"
task="dt:MPI_Type_get_extent_x"
template="function.error"
value="950"
/>
<!--MPI_Type_get_true_extent_x-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Type_get_true_extent_x.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Type_get_true_extent_x"
task="dt:MPI_Type_get_true_extent_x"
template="MPI_Type_get_true_extent_x.enter"
value="951"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Type_get_true_extent_x.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Type_get_true_extent_x"
task="dt:MPI_Type_get_true_extent_x"
template="MPI_Type_get_true_extent_x.leave"
value="952"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Type_get_true_extent_x"
task="dt:MPI_Type_get_true_extent_x"
template="function.error"
value="953"
/>
<!--MPI_Get_elements_x-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Get_elements_x.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Get_elements_x"
task="dt:MPI_Get_elements_x"
template="MPI_Get_elements_x.enter"
value="954"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Get_elements_x.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Get_elements_x"
task="dt:MPI_Get_elements_x"
template="MPI_Get_elements_x.leave"
value="955"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Get_elements_x"
task="dt:MPI_Get_elements_x"
template="function.error"
value="956"
/>
<!--MPI_Status_set_elements_x-->
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Status_set_elements_x.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Status_set_elements_x"
task="dt:MPI_Status_set_elements_x"
template="MPI_Status_set_elements_x.enter"
value="957"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Status_set_elements_x.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Status_set_elements_x"
task="dt:MPI_Status_set_elements_x"
template="MPI_Status_set_elements_x.leave"
value="958"
/>
<event
channel="MpiApiChannel"
keywords="mpi:dt mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Status_set_elements_x"
task="dt:MPI_Status_set_elements_x"
template="function.error"
value="959"
/>
<!--MPI_Improbe-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Improbe.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Improbe"
task="p2p:MPI_Improbe"
template="MPI_Improbe.enter"
value="960"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Improbe.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Improbe"
task="p2p:MPI_Improbe"
template="MPI_Improbe.leave"
value="961"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Improbe"
task="p2p:MPI_Improbe"
template="function.error"
value="962"
/>
<!--MPI_Mprobe-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Mprobe.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Mprobe"
task="p2p:MPI_Mprobe"
template="MPI_Mprobe.enter"
value="963"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Mprobe.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Mprobe"
task="p2p:MPI_Mprobe"
template="MPI_Mprobe.leave"
value="964"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Mprobe"
task="p2p:MPI_Mprobe"
template="function.error"
value="965"
/>
<!--MPI_Mrecv-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Mrecv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Mrecv"
task="p2p:MPI_Mrecv"
template="MPI_Mrecv.enter"
value="966"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Mrecv.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Mrecv"
task="p2p:MPI_Mrecv"
template="MPI_Mrecv.leave"
value="967"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Mrecv"
task="p2p:MPI_Mrecv"
template="function.error"
value="968"
/>
<!--MPI_Imrecv-->
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Imrecv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Imrecv"
task="p2p:MPI_Imrecv"
template="MPI_Imrecv.enter"
value="969"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Imrecv.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Imrecv"
task="p2p:MPI_Imrecv"
template="MPI_Imrecv.leave"
value="970"
/>
<event
channel="MpiApiChannel"
keywords="mpi:p2p mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Imrecv"
task="p2p:MPI_Imrecv"
template="function.error"
value="971"
/>
<!--MPI_Ibarrier-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Ibarrier.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Ibarrier"
task="coll:MPI_Ibarrier"
template="MPI_Ibarrier.enter"
value="972"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Ibarrier"
task="coll:MPI_Ibarrier"
template="MPI_Ibarrier.leave"
value="973"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Ibarrier"
task="coll:MPI_Ibarrier"
template="function.error"
value="974"
/>
<!--MPI_Ibcast-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Ibcast.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Ibcast"
task="coll:MPI_Ibcast"
template="MPI_Ibcast.enter"
value="975"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Ibcast"
task="coll:MPI_Ibcast"
template="MPI_Ibcast.leave"
value="976"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Ibcast"
task="coll:MPI_Ibcast"
template="function.error"
value="977"
/>
<!--MPI_Igather-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Igather.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Igather"
task="coll:MPI_Igather"
template="MPI_Igather.enter"
value="978"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Igather"
task="coll:MPI_Igather"
template="MPI_Igather.leave"
value="979"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Igather"
task="coll:MPI_Igather"
template="function.error"
value="980"
/>
<!--MPI_Igatherv-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Igatherv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Igatherv"
task="coll:MPI_Igatherv"
template="MPI_Igatherv.enter"
value="981"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Igatherv"
task="coll:MPI_Igatherv"
template="MPI_Igatherv.leave"
value="982"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Igatherv"
task="coll:MPI_Igatherv"
template="function.error"
value="983"
/>
<!--MPI_Iscatter-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Iscatter.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Iscatter"
task="coll:MPI_Iscatter"
template="MPI_Iscatter.enter"
value="984"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Iscatter"
task="coll:MPI_Iscatter"
template="MPI_Iscatter.leave"
value="985"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Iscatter"
task="coll:MPI_Iscatter"
template="function.error"
value="986"
/>
<!--MPI_Iscatterv-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Iscatterv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Iscatterv"
task="coll:MPI_Iscatterv"
template="MPI_Iscatterv.enter"
value="987"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Iscatterv"
task="coll:MPI_Iscatterv"
template="MPI_Iscatterv.leave"
value="988"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Iscatterv"
task="coll:MPI_Iscatterv"
template="function.error"
value="989"
/>
<!--MPI_Iallgather-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Iallgather.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Iallgather"
task="coll:MPI_Iallgather"
template="MPI_Iallgather.enter"
value="990"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Iallgather"
task="coll:MPI_Iallgather"
template="MPI_Iallgather.leave"
value="991"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Iallgather"
task="coll:MPI_Iallgather"
template="function.error"
value="992"
/>
<!--MPI_Iallgatherv
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Iallgatherv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Iallgatherv"
task="coll:MPI_Iallgatherv"
template="MPI_Iallgatherv.enter"
value="993"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Iallgatherv"
task="coll:MPI_Iallgatherv"
template="MPI_Iallgatherv.leave"
value="994"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Iallgatherv"
task="coll:MPI_Iallgatherv"
template="function.error"
value="995"
/>-->
<!--MPI_Ialltoall
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Ialltoall.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Ialltoall"
task="coll:MPI_Ialltoall"
template="MPI_Ialltoall.enter"
value="996"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Ialltoall"
task="coll:MPI_Ialltoall"
template="MPI_Ialltoall.leave"
value="997"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Ialltoall"
task="coll:MPI_Ialltoall"
template="function.error"
value="998"
/>-->
<!--MPI_Ialltoallv
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Ialltoallv.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Ialltoallv"
task="coll:MPI_Ialltoallv"
template="MPI_Ialltoallv.enter"
value="999"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Ialltoallv"
task="coll:MPI_Ialltoallv"
template="MPI_Ialltoallv.leave"
value="1000"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Ialltoallv"
task="coll:MPI_Ialltoallv"
template="function.error"
value="1001"
/>-->
<!--MPI_Ialltoallw
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Ialltoallw.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Ialltoallw"
task="coll:MPI_Ialltoallw"
template="MPI_Ialltoallw.enter"
value="1002"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Ialltoallw"
task="coll:MPI_Ialltoallw"
template="MPI_Ialltoallw.leave"
value="1003"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Ialltoallw"
task="coll:MPI_Ialltoallw"
template="function.error"
value="1004"
/>-->
<!--MPI_Ireduce-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Ireduce.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Ireduce"
task="coll:MPI_Ireduce"
template="MPI_Ireduce.enter"
value="1005"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Ireduce"
task="coll:MPI_Ireduce"
template="MPI_Ireduce.leave"
value="1006"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Ireduce"
task="coll:MPI_Ireduce"
template="function.error"
value="1007"
/>
<!--MPI_Iallreduce-->
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Iallreduce.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Iallreduce"
task="coll:MPI_Iallreduce"
template="MPI_Iallreduce.enter"
value="1008"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Iallreduce"
task="coll:MPI_Iallreduce"
template="MPI_Iallreduce.leave"
value="1009"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Iallreduce"
task="coll:MPI_Iallreduce"
template="function.error"
value="1010"
/>
<!--MPI_Ireduce_scatter_block
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Ireduce_scatter_block.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Ireduce_scatter_block"
task="coll:MPI_Ireduce_scatter_block"
template="MPI_Ireduce_scatter_block.enter"
value="1011"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Ireduce_scatter_block"
task="coll:MPI_Ireduce_scatter_block"
template="MPI_Ireduce_scatter_block.leave"
value="1012"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Ireduce_scatter_block"
task="coll:MPI_Ireduce_scatter_block"
template="function.error"
value="1013"
/>-->
<!--MPI_Ireduce_scatter
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Ireduce_scatter.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Ireduce_scatter"
task="coll:MPI_Ireduce_scatter"
template="MPI_Ireduce_scatter.enter"
value="1014"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Ireduce_scatter"
task="coll:MPI_Ireduce_scatter"
template="MPI_Ireduce_scatter.leave"
value="1015"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Ireduce_scatter"
task="coll:MPI_Ireduce_scatter"
template="function.error"
value="1016"
/>-->
<!--MPI_Iscan
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Iscan.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Iscan"
task="coll:MPI_Iscan"
template="MPI_Iscan.enter"
value="1017"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Iscan"
task="coll:MPI_Iscan"
template="MPI_Iscan.leave"
value="1018"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Iscan"
task="coll:MPI_Iscan"
template="function.error"
value="1019"
/>-->
<!--MPI_Iexscan
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Iexscan.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Iexscan"
task="coll:MPI_Iexscan"
template="MPI_Iexscan.enter"
value="1020"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave"
level="win:Informational"
message="$(string.NBC.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Iexscan"
task="coll:MPI_Iexscan"
template="MPI_Iexscan.leave"
value="1021"
/>
<event
channel="MpiApiChannel"
keywords="mpi:coll mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Iexscan"
task="coll:MPI_Iexscan"
template="function.error"
value="1022"
/>-->
<event
channel="MpiCommunicationChannel"
keywords="mpi:nd"
level="win:Informational"
message="$(string.nd.defer.connect)"
opcode="net:deferconnect"
symbol="EVENT_DeferNd_Connect"
task="nd:defer"
template="nd.defer.connect"
value="1023"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:shm"
level="win:Informational"
message="$(string.shm.defer.connect)"
opcode="net:deferconnect"
symbol="EVENT_DeferShm_Connect"
task="shm:defer"
template="shm.defer.connect"
value="1024"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:sock"
level="win:Informational"
message="$(string.sock.defer.connect)"
opcode="net:deferconnect"
symbol="EVENT_DeferSock_Connect"
task="sock:send"
template="sock.defer.connect"
value="1025"
/>
<event
channel="MpiCommunicationChannel"
keywords="mpi:nd"
level="win:Informational"
message="$(string.nd.defer.write)"
opcode="net:deferwrite"
symbol="EVENT_DeferNd_Write"
task="nd:defer"
template="nd.defer.write"
value="1026"
/>
<!-- event
channel="MpiCommunicationChannel"
keywords="mpi:shm"
level="win:Informational"
message="$(string.shm.defer.write)"
opcode="net:deferwrite"
symbol="EVENT_DeferShm_Write"
task="shm:defer"
template="shm.defer.write"
value="1027"
/ -->
<event
channel="MpiCommunicationChannel"
keywords="mpi:sock"
level="win:Informational"
message="$(string.sock.defer.write)"
opcode="net:deferwrite"
symbol="EVENT_DeferSock_Write"
task="sock:defer"
template="sock.defer.write"
value="1028"
/>
<!--MPI_Dist_graph_neighbors_count-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Dist_graph_neighbors_count.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Dist_graph_neighbors_count"
task="topo:MPI_Dist_graph_neighbors_count"
template="MPI_Dist_graph_neighbors_count.enter"
value="1029"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Dist_graph_neighbors_count.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Dist_graph_neighbors_count"
task="topo:MPI_Dist_graph_neighbors_count"
template="MPI_Dist_graph_neighbors_count.leave"
value="1030"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Dist_graph_neighbors_count"
task="topo:MPI_Dist_graph_neighbors_count"
template="function.error"
value="1031"
/>
<!--MPI_Dist_graph_neighbors-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Dist_graph_neighbors.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Dist_graph_neighbors"
task="topo:MPI_Dist_graph_neighbors"
template="MPI_Dist_graph_neighbors.enter"
value="1032"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Dist_graph_neighbors.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Dist_graph_neighbors"
task="topo:MPI_Dist_graph_neighbors"
template="MPI_Dist_graph_neighbors.leave"
value="1033"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Dist_graph_neighbors"
task="topo:MPI_Dist_graph_neighbors"
template="function.error"
value="1034"
/>
<!--MPI_Dist_graph_create_adjacent-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Dist_graph_create_adjacent.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Dist_graph_create_adjacent"
task="topo:MPI_Dist_graph_create_adjacent"
template="MPI_Dist_graph_create_adjacent.enter"
value="1035"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Dist_graph_create_adjacent.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Dist_graph_create_adjacent"
task="topo:MPI_Dist_graph_create_adjacent"
template="MPI_Dist_graph_create_adjacent.leave"
value="1036"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Dist_graph_create_adjacent"
task="topo:MPI_Dist_graph_create_adjacent"
template="function.error"
value="1037"
/>
<!--MPI_Dist_graph_create-->
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_enter"
level="win:Informational"
message="$(string.MPI_Dist_graph_create.enter)"
opcode="win:Start"
symbol="EVENT_Enter_MPI_Dist_graph_create"
task="topo:MPI_Dist_graph_create"
template="MPI_Dist_graph_create.enter"
value="1038"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave"
level="win:Informational"
message="$(string.MPI_Dist_graph_create.leave)"
opcode="win:Stop"
symbol="EVENT_Leave_MPI_Dist_graph_create"
task="topo:MPI_Dist_graph_create"
template="MPI_Dist_graph_create.leave"
value="1039"
/>
<event
channel="MpiApiChannel"
keywords="mpi:topo mpi:api_leave mpi:api_error"
level="win:Error"
message="$(string.function.error)"
opcode="win:Stop"
symbol="EVENT_Error_MPI_Dist_graph_create"
task="topo:MPI_Dist_graph_create"
template="function.error"
value="1040"
/>
</events>
</provider>
</events>
</instrumentation>
<localization>
<resources culture="en-US">
<stringTable>
<!-- Message strings for diagnostic events in the channels. -->
<!-- ch3u_nd_adapter.cpp -->
<string
id="error.AdapterInit"
value="ERROR: %2 object:%1 hresult:%3 parameters:%4 %5 %6"
/>
<string
id="error.AdapterInit.Open"
value="NdOpenAdapter failed."
/>
<string
id="error.AdapterInit.Query"
value="IND2Adapter::Query failed."
/>
<string
id="error.AdapterInit.CQDepth"
value="Max completion queue depth is too small."
/>
<string
id="error.AdapterInit.InitiatorQDepth"
value="Max initiator queue depth too small."
/>
<string
id="error.AdapterInit.RecvQDepth"
value="Max receive queue depth too small."
/>
<string
id="error.AdapterInit.CreateOverlapped"
value="IND2Adapter::CreateOverlappedFile failed. Could not create overlapped file."
/>
<string
id="info.AdapterShutdown"
value="INFO: object:%1 Adapter shutdown."
/>
<string
id="error.AdapterListen"
value="ERROR: %2 object:%1 hresult:%3"
/>
<string
id="error.AdapterListen.CreateListener"
value="IND2Adapter::CreateListener failed. "
/>
<string
id="error.AdapterListen.Bind"
value="IND2Adapter::Bind failed."
/>
<string
id="error.AdapterListen.GetLocalAddress"
value="IND2Adapter::GetLocalAddress failed."
/>
<string
id="error.AdapterListen.Listen"
value="IND2Adapter::Listen failed."
/>
<string
id="error.AdapterGetConnectionRequest"
value="ERROR: %2 object:%1 hresult:%3"
/>
<string
id="error.AdapterGetConnectionRequest.CreateConnector"
value="IND2Adapter::CreateConnector failed."
/>
<string
id="error.AdapterGetConnectionRequest.GetConnectionRequest"
value="IND2Adapter::GetConnectionRequest failed."
/>
<string
id="info.AdapterConnect"
value="INFO: Connected to host %2 on port %3. object:%1 parameters:%4 %5"
/>
<string
id="error.AdapterCreateConnector"
value="ERROR: %2 object:%1 hresult:%3"
/>
<string
id="error.AdapterCreateConnector.CreateConnector"
value="IND2Adapter::CreateConnector failed."
/>
<string
id="error.AdapterCreateConnector.Bind"
value="IND2Adapter::Bind failed."
/>
<string
id="error.AdapterAccept.GetPeerAddress"
value="ERROR: IND2Connector::GetPeerAddress failed. object:%1 hresult:%2 parameters:%3 %4 %5"
/>
<string
id="info.AdapterAccept.Reject"
value="INFO: Rejecting connection attempt. object:%1 hresult:%2 parameters:%3 %4 %5"
/>
<string
id="info.AdapterAccept.Success"
value="INFO: Accepting connection from host %2 on port %3. object:%1 parameters:%4 %5 %6"
/>
<string
id="error.AdapterGetConnReqHandler"
value="ERROR: GetConnReqHandler has failed. object:%1 hresult:%2 parameters:%3"
/>
<string
id="error.AdapterGetConnSucceeded"
value="ERROR: %2 object:%1 hresult:%3 parameters:%4"
/>
<string
id="info.AdapterGetConnSucceeded"
value="INFO: %2 object:%1 hresult:%3 parameters:%4"
/>
<string
id="error.AdapterGetConnSucceeded.InvalidBufferSize"
value="IND2Connector::GetPrivateData failed with INVALID_BUFFER_SIZE."
/>
<string
id="info.AdapterGetConnSucceeded.AbortedOrInvalid"
value="Connection rejected due to status ND_CONNECTION_ABORTED or ND_CONNECTION_INVALID."
/>
<string
id="error.AdapterGetConnSucceeded.Reject"
value="Connection rejected."
/>
<string
id="error.AdapterGetConnSucceeded.MismatchedVersion"
value="Connection rejected due to mismatched versions of MSMPI."
/>
<string
id="error.AdapterGetConnSucceeded.PGFind"
value="Connection rejected. PG could not be found."
/>
<string
id="error.AdapterGetConnSucceeded.Rank"
value="Connection rejected. Rank is invalid."
/>
<string
id="info.AdapterGetConnSucceeded.HeadToHeadReject"
value="Connection rejected due to head to head race; other side will accept."
/>
<string
id="info.AdapterGetConnSucceeded.HeadToHeadShutdown"
value="Connection aborted due to head to head race."
/>
<string
id="error.AdapterGetConnSucceeded.Shutdown"
value="Connection request while shutting down."
/>
<string
id="info.AdapterGetConnSucceeded.DefaultReject"
value="Connection rejected."
/>
<string
id="info.AdapterGetConnSucceeded.Success"
value="Success."
/>
<!-- ch3u_nd_endpoint.cpp -->
<string
id="error.Endpoint"
value="ERROR: %2 object:%1 hresult:%3 local ip address: %4 local port: %5 remote ip address: %6 remote port: %7"
/>
<string
id="info.Endpoint"
value="INFO: %2 object:%1 hresult:%3 local ip address: %4 local port: %5 remote ip address: %6 remote port: %7"
/>
<string
id="error.EndpointCompleteConnect.BufferSize"
value="Invalid buffer size."
/>
<string
id="error.EndpointCompleteConnect.Default"
value="Unable to connect."
/>
<string
id="info.EndpointCompleteConnect.Pending"
value="Connection pending."
/>
<string
id="info.EndpointConnReqFailed.Passive"
value="Connection refused."
/>
<string
id="info.EndpointConnReqFailed.Canceled"
value="Connection canceled."
/>
<string
id="error.EndpointConnReqFailed.Failed"
value="Connection failed."
/>
<string
id="info.EndpointConnCompleted"
value="Connection successfully completed."
/>
<string
id="info.EndpointConnFailed.Retry"
value="Connection failed, retrying."
/>
<string
id="error.EndpointConnFailed.Fail"
value="Connection failed."
/>
<string
id="info.EndpointAccept.Pending"
value="Connection pending."
/>
<string
id="error.EndpointPrepostReceives.Failed"
value="Receive failed on endpoint."
/>
<string
id="info.EndpointAcceptCompleted"
value="Connection accepted successfully."
/>
<string
id="info.EndpointAcceptFailed.AbortedOrTimeout"
value="Accept failed due to abort or timeout."
/>
<string
id="error.EndpointAcceptFailed.Failed"
value="Accept failed."
/>
<string
id="info.EndpointDisconnect"
value="Disconnecting."
/>
<string
id="info.EndpointConnect"
value="Initiating connection."
/>
<string
id="info.EndpointAccept"
value="Initiating accept."
/>
<string
id="info.EndpointHandleTimeout"
value="I/O timeout."
/>
<string
id="error.EndpointCompleteConnectAbortedOrInvalid"
value="IND2Connector::GetPrivateData failed."
/>
<string
id="info.EndpointCompleteConnectConnect"
value="Initiating complete of connection."
/>
<string
id="info.EndpointHandleTimeoutConnect"
value="Initiating complete of connection after timeout."
/>
<!-- ch3u_nd_env.cpp -->
<string
id="info.EnvironmentListen"
value="INFO: %2 object:%1 hresult:%3"
/>
<string
id="error.EnvironmentListen"
value="ERROR: %2 object:%1 hresult:%3"
/>
<string
id="info.EnvironmentListen.NoNDv2Providers"
value="No NDv2 providers are available. Trying NDv1."
/>
<string
id="error.EnvironmentListen.QueryAddressListForSizeFailed"
value="NdQueryAddressList query for size has failed."
/>
<string
id="error.EnvironmentListen.QueryAddressListFailed"
value="NdQueryAddressList has failed."
/>
<string
id="info.EnvironmentListen.Success"
value="INFO: Host %2 listening on port %3. object:%1"
/>
<string
id="error.EnvironmentGetBusinessCard"
value="ERROR: GetBusinessCard failed. object:%1 hresult:%2 parameters:%3 %4"
/>
<string
id="error.EnvironmentConnect"
value="ERROR: Could not connect to rank %4 with business card (%5). %2 object:%1 hresult:%3 parameters:%6 %7 %8 %9"
/>
<string
id="info.EnvironmentConnect"
value="INFO: Could not connect to rank %4 with business card (%5). %2 object:%1 hresult:%3 parameters:%6 %7 %8 %9"
/>
<string
id="error.EnvironmentConnect.NoLocalNoRemoteForce"
value="No ND adapter is available on either rank and the socket interconnect is disabled."
/>
<string
id="error.EnvironmentConnect.NoLocalForce"
value="No ND adapter is available and the socket interconnect is disabled."
/>
<string
id="error.EnvironmentConnect.NoLocalNoFallback"
value="No ND adapter is available and fallback to the socket interconnect is disabled."
/>
<string
id="error.EnvironmentConnect.NoLocalNoFallbackForce"
value="No ND adapter is available and the socket interconnect is disabled."
/>
<string
id="error.EnvironmentConnect.NoRemoteForce"
value="No ND adapter on the remote rank is available and the socket interconnect is disabled."
/>
<string
id="error.EnvironmentConnect.NoRemoteNoFallback"
value="No ND adapter is available on the remote rank and fallback to the socket interconnect is disabled."
/>
<string
id="error.EnvironmentConnect.NoPathForce"
value="No route to the remote rank could be resolved and the socket interconnect is disabled."
/>
<string
id="error.EnvironmentConnect.NoPathNoFallback"
value="No route to the remote rank could be resolved and fallback to the socket interconnect is disabled."
/>
<string
id="info.EnvironmentConnect.NoLocalFallback"
value="No ND adapter is available. Falling back to the socket interconnect."
/>
<string
id="info.EnvironmentConnect.NoRemoteFallback"
value="No ND adapter is available on the remote rank. Falling back to the socket interconnect."
/>
<string
id="info.EnvironmentConnect.NoPathFallback"
value="No route to the remote rank could be resolved. Falling back to the socket interconnect."
/>
<string
id="info.EnvironmentConnect.Success"
value="INFO: Connected to rank %2 with business card (%3). object:%1"
/>
<!-- SHMEM -->
<string
id="error.MPIDI_CH3I_Shm_connect"
value="ERROR: %1 result=%2 business card:%3 parameters:%4 %5 %6"
/>
<string
id="error.MPIDI_CH3I_Shm_connect.QueueName"
value="Unable to get queue name from business card."
/>
<string
id="error.MPIDI_CH3I_Shm_connect.QueueAttach"
value="Unable to attach to queue."
/>
<string
id="error.MPIDI_CH3I_Shm_connect.WriteQueue"
value="Unable to create write queue."
/>
<string
id="error.MPIDI_CH3I_Shm_connect.NotifyConnect"
value="Unable to send creation event."
/>
<string
id="info.MPIDI_CH3I_Shm_connect.Success"
value="INFO: Successfully connected to host %1 with business card (%2)."
/>
<string
id="error.MPIDI_CH3I_Accept_shm_connection"
value="ERROR: %1 parameters:%2 %3."
/>
<string
id="error.MPIDI_CH3I_Accept_shm_connection.QueueAttach"
value="Unable to attach to queue."
/>
<string
id="error.MPIDI_CH3I_Accept_shm_connection.MismatchedVersion"
value="Connection rejected due to mismatched versions of MSMPI."
/>
<string
id="error.MPIDI_CH3I_Accept_shm_connection.PGFind"
value="Connection rejected. PG could not be found."
/>
<string
id="error.MPIDI_CH3I_Accept_shm_connection.Rank"
value="Unable to connect to remote rank. Remote rank is invalid."
/>
<string
id="error.MPIDI_CH3I_Accept_shm_connection.GetConnStringFailed"
value="Unable to get connection string."
/>
<string
id="error.MPIDI_CH3I_Accept_shm_connection.GetStringArgFailed"
value="Unable to get shared memory key from business card."
/>
<string
id="error.MPIDI_CH3I_Accept_shm_connection.BootstrapQueueAttach"
value="Unable to attach to queue."
/>
<string
id="info.MPIDI_CH3I_Accept_shm_connection.Success"
value="INFO: Rank %1 has accepted a connection from rank %2."
/>
<!-- SOCKETS -->
<string
id="error.MPIDI_CH3I_Sock_connect.PostFailed"
value="ERROR: Post connection failed on rank %1 to rank %2 with business card (%3)."
/>
<string
id="info.CloseConnectionComplete_cb.Terminated"
value="INFO: Connection to rank %1 terminated."
/>
<string
id="info.CloseConnectionComplete_cb.Intentional"
value="INFO: Connection closed."
/>
<string
id="info.MPIDI_CH3I_Post_close_connection.Ignored"
value="INFO: Ignoring request to close connection to rank %1; the send queue is not empty."
/>
<string
id="info.MPIDI_CH3I_Post_close_connection.Honored"
value="INFO: Posting close connection to rank %1."
/>
<string
id="error.SendFailed_cb"
value="ERROR: (rc=%1) Send to host %2 on port %3 failed."
/>
<string
id="info.RecvFailed_cb.SocketClosed"
value="INFO: Receive from host %1 on port %2 failed. Connection closed gracefully."
/>
<string
id="info.RecvFailed_cb.SocketAborted"
value="INFO: (rc=%1) Receive from host %2 on port %3 failed. Connection aborted."
/>
<string
id="error.RecvFailed_cb.Failure"
value="ERROR: (rc=%1) Receive from host %2 on port %3 failed."
/>
<string
id="info.connection_accept"
value="INFO: Accepting connection request from rank %1."
/>
<string
id="info.connection_accept_CloseOldConnection"
value="INFO: Accepting new connection request from rank %1. Closing existing connection."
/>
<string
id="info.connection_reject"
value="INFO: Rejecting connection request from host %1 on port %2."
/>
<string
id="error.read_message_data"
value="ERROR: (rc=%1) Unable to post read to rank %2."
/>
<string
id="error.MPIDI_CH3I_SOCK_start_write.PostSendVFailed"
value="ERROR: (rc=%1) Unable to post write to rank %2."
/>
<string
id="info.SendOpenResponseSucceeded_cb.Success"
value="INFO: Successfully sent open response to host %1 on port %2."
/>
<string
id="info.SendOpenResponseSucceeded_cb.HeadToHead"
value="INFO: Head to head conflict with host %1 on port %2. Closing connection."
/>
<string
id="error.RecvOpenRequestDataSucceeded_cb.PGFail"
value="ERROR: PG lookup failed."
/>
<string
id="error.RecvOpenRequestDataSucceeded_cb.SendResponseFailed"
value="ERROR: (rc=%1) Unable to post send open response to rank %2."
/>
<string
id="info.RecvOpenRequestSucceeded_cb"
value="INFO: %1 host %2 on port %3."
/>
<string
id="info.RecvOpenRequestSucceeded_cb.Success"
value="Successfully received an open request from"
/>
<string
id="error.RecvOpenRequestSucceeded_cb"
value="ERROR: %1 host %2 on port %3."
/>
<string
id="error.RecvOpenRequestSucceeded_cb.UnexpectedControl"
value="Unexpected control packet received from"
/>
<string
id="error.RecvOpenRequestSucceeded_cb.MismatchedVersion"
value="A different version of MSMPI is being used on"
/>
<string
id="error.RecvOpenRequestSucceeded_cb.Internal"
value="Internal error receiving open request from"
/>
<string
id="info.RecvOpenRequestFailed_cb"
value="INFO: Failed receiving open request from host %1 on port %2."
/>
<string
id="info.AcceptNewConnectionFailed_cb.Canceled"
value="INFO: Request to accept new connection was canceled."
/>
<string
id="error.AcceptNewConnectionFailed_cb.Failed"
value="ERROR: (rc=%1) Accept new connection failed. %2"
/>
<string
id="error.AcceptNewConnectionSucceeded_cb.PostListener"
value="ERROR: (rc=%1) Unable to renew listener socket."
/>
<string
id="info.AcceptNewConnectionSucceeded_cb.HeadToHead"
value="INFO: (rc=%1) Head to head conflict with host %2 on port %3."
/>
<string
id="info.AcceptNewConnectionSucceeded_cb.Succeeded"
value="INFO: Successfully accepted new connection request from host %1 on port %2."
/>
<string
id="info.MPIDI_CH3I_Post_accept"
value="INFO: Posting accept packet; listening for new connection request."
/>
<string
id="info.RecvOpenResponseSucceeded_cb.Disconnect"
value="INFO: Received open response packet from host %1 on port %2 but the socket is disconnected."
/>
<string
id="error.RecvOpenResponseSucceeded_cb.UnexpectedControl"
value="ERROR: Unexpected control packet from host %1 on port %2 receiving open response."
/>
<string
id="info.RecvOpenResponseSucceeded_cb.ConnectionComplete"
value="INFO: Successfully received open response from host %1 on port %2."
/>
<string
id="info.RecvOpenResponseSucceeded_cb.HeadToHeadRejected"
value="INFO: Head to head conflict. Connection request rejected by host %1 on port %2."
/>
<string
id="info.SendOpenRequestSucceeded_cb.Disconnected"
value="INFO: Send open request to rank %1 succeded but socket was disconnected."
/>
<string
id="error.SendOpenRequestSucceeded_cb.PostRecvPktFailed"
value="ERROR: Unable to post receive for open response to rank %1."
/>
<string
id="info.SendOpenRequestSucceeded_cb.Succeeded"
value="INFO: Send open request to rank %1 succeeded."
/>
<string
id="error.send_open_request.Failed"
value="ERROR: (rc=%1) Unable to post open request to rank %2."
/>
<string
id="info.send_open_request.Succeeded"
value="INFO: Open connection request to rank %1 has been posted."
/>
<string
id="info.ConnectFailed_cb.Disconnect"
value="INFO: Connect to rank %1 failed. Socket disconnected."
/>
<string
id="error.ConnectFailed_cb.Failed"
value="ERROR: (rc=%1) Connect to rank %1 failed."
/>
<string
id="info.ConnectSucceeded_cb.Disconnect"
value="INFO: Send connect request to rank %1 succeeded. Socket disconnected."
/>
<string
id="info.ConnectSucceeded_cb.Succeeded"
value="INFO: Send connect request to rank %1 succeeded."
/>
<string
id="info.MPIDI_CH3I_Post_connect"
value="INFO: Posting connect to rank %1 on host %2 and port %3."
/>
<!-- sock.c -->
<string
id="error.sock_get_overlapped_result"
value="ERROR: Get overlapped result reports STATUS_CANCELED."
/>
<string
id="error.sock_get_overlapped_result.Failed"
value="ERROR: (rc=%1) Get overlapped result failed: %2"
/>
<string
id="error.sock_safe_send"
value="ERROR: (rc=%1) Send failed: %2"
/>
<string
id="error.sock_safe_receive"
value="ERROR: (rc=%1) Receive failed: %2"
/>
<string
id="error.MPIDU_Sock_init"
value="ERROR: (rc=%1) Function WSAStartup failed: %2"
/>
<string
id="error.socki_get_host_list"
value="ERROR: (rc=%1) Could not resolve hostname %3: %2"
/>
<string
id="error.socki_get_host_list.AddIp"
value="ERROR: (rc=%1) Could not add ip address to host description."
/>
<string
id="error.socki_get_host_list.AddHostname"
value="ERROR: (rc=%1) Could not add hostname to list."
/>
<string
id="error.MPIDU_Sock_get_host_description"
value="ERROR: (rc=%1) Function gethostname failed: %2"
/>
<string
id="error.MPIDU_Sock_create_native_fd"
value="ERROR: (rc=%1) Function WSASocketW failed: %2"
/>
<string
id="error.easy_create_ranged.Port"
value="ERROR: Port value %1 too large (expected a value less than or equal to %2)."
/>
<string
id="error.easy_create_ranged"
value="ERROR: (rc=%1) Function Bind failed: %2"
/>
<string
id="error.MPIDU_Sock_listen"
value="ERROR: (rc=%1) Function Listen failed: %2"
/>
<string
id="error.post_next_accept"
value="ERROR: (rc=%1) Function AcceptEx failed: %2"
/>
<string
id="info.AcceptFailed.ResetPosted"
value="INFO: Accept connection failed, connection reset. New accept posted."
/>
<string
id="error.AcceptFailed.ResetPostFailed"
value="ERROR: (rc=%1) Accept connection failed, unable to post next accept."
/>
<string
id="error.AcceptFailed"
value="ERROR: (rc=%1) Accept connection failed: %2"
/>
<string
id="info.sock_finish_accept"
value="INFO: Accept succeeded."
/>
<string
id="error.gle_connect_ex.WSAIoctlSocketError"
value="ERROR: (rc=%1) Unable to get connect function. %2"
/>
<string
id="info.gle_connect_ex.Succeeded"
value="INFO: Connected to %1 on port %2."
/>
<string
id="error.gle_connect_ex.pfnConnectEx"
value="ERROR: (rc=%1) Connect function failed. %2"
/>
<string
id="error.gle_postpone_retry_connect"
value="ERROR: (rc=%1) Unable to create timer. %2"
/>
<string
id="info.sock_cancel_inprogress_connect"
value="INFO: Canceling in-progress connect to %1 on port %2."
/>
<string
id="error.ConnectFailed"
value="ERROR: Connect to %4 on port %5 failed. %1 result:%2 parameters:%3"
/>
<string
id="info.ConnectFailed"
value="INFO: Connect to %4 on port %5 failed. %1 result:%2 parameters:%3"
/>
<string
id="error.ConnectFailed.AbortedBeforeTimeout"
value="Operation was aborted before timeout."
/>
<string
id="info.ConnectFailed.Timeout"
value="Timed out. Trying again."
/>
<string
id="error.ConnectFailed.AbortedClosing"
value="Operation was aborted."
/>
<string
id="info.ConnectFailed.Refused"
value="Connection refused. Trying again."
/>
<string
id="info.ConnectFailed.Error"
value="Unknown error."
/>
<string
id="error.ConnectFailed.Exhausted"
value="All endpoints exhausted."
/>
<string
id="error.ConnectFailed.Fail"
value="Failed."
/>
<string
id="info.sock_finish_connect"
value="INFO: Connect to %1 on port %2 succeeded."
/>
<string
id="error.MPIDU_Sock_post_connect_endpoints"
value="ERROR: (rc=%1) Unable to add %2:%3 to list of valid endpoints."
/>
<string
id="error.MPIDU_Sock_post_connect_gle_bind_any"
value="ERROR: (rc=%1) Unable to bind socket. %2"
/>
<string
id="error.GracefulCloseFailed"
value="ERROR: (rc=%1) Graceful close to host %3 on port %4 failed. %2"
/>
<string
id="info.GracefulCloseSucceeded"
value="INFO: Graceful close succeeded."
/>
<string
id="error.ReadSucceeded.ConnectionClosed"
value="ERROR: Read succeeded but socket connection is closed."
/>
<string
id="error.ReadSucceeded.Error"
value="ERROR: (rc=%1) Read succeeded with error %2"
/>
<string
id="error.MPIDU_Sock_keepalive"
value="ERROR: (rc=%1) Socket keep alive failed. %2"
/>
<string
id="info.MPI_Init_info"
value="INFO: MPI process rank %1 has initialized. MPI Version: %2.%3.%4"
/>
<string
id="info.MPI_Finalize_info"
value="INFO: Rank %1 has finalized."
/>
<string
id="info.SMPD_Nodemanager.context"
value="INFO: SMPD process launching rank %1 with context string (%2)."
/>
<string
id="provider"
value="Windows HPC MPI Event Provider"
/>
<string
id="api.channel"
value="Windows HPC MPI Api Event Channel"
/>
<string
id="communication.channel"
value="Windows HPC MPI communication.Event Channel"
/>
<string
id="communication.0"
value="id={%1.%2.%3}"
/>
<string
id="communication.1"
value="id={%1.%2.%3} n_iov=%4 size=%5"
/>
<string
id="sock.recv.data"
value="id={%1.%2.%3}"
/>
<string
id="recv.msg.recv"
value="RecvMsg: comm=%1, dest_rank=%2, src_rank=%3, tag=%4, datatype=%5, buf=%6, size=%7"
/>
<string
id="send.msg.send"
value="SendMsg: comm=%1, dest_rank=%2, src_rank=%3, tag=%4, datatype=%5, buf=%6, size=%7"
/>
<string
id="send.msg.rsend"
value="RsendMsg: comm=%1, dest_rank=%2, src_rank=%3, tag=%4, datatype=%5, buf=%6, size=%7"
/>
<string
id="send.msg.ssend"
value="SsendMsg: comm=%1, dest_rank=%2, src_rank=%3, tag=%4, datatype=%5, buf=%6, size=%7"
/>
<string
id="shm.recv.data"
value="id={%1.%2.%3}"
/>
<string
id="nd.recv.data"
value="id={%1.%2.%3}"
/>
<string
id="sock.recv.packet"
value="id={%1.%2.%3} type=%4"
/>
<string
id="shm.recv.packet"
value="id={%1.%2.%3} type=%4"
/>
<string
id="nd.recv.packet"
value="id={%1.%2.%3} type=%4"
/>
<string
id="sock.send.head"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="shm.send.head"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="nd.send.head"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="sock.send.done"
value="id={%1.%2.%3}"
/>
<string
id="shm.send.done"
value="id={%1.%2.%3}"
/>
<string
id="nd.send.done"
value="id={%1.%2.%3}"
/>
<string
id="sock.recv.done"
value="id={%1.%2.%3}"
/>
<string
id="shm.recv.done"
value="id={%1.%2.%3}"
/>
<string
id="nd.recv.done"
value="id={%1.%2.%3}"
/>
<string
id="sock.send.continue"
value="id={%1.%2.%3} n_iov=%4 size=%5"
/>
<string
id="shm.send.continue"
value="id={%1.%2.%3} n_iov=%4 size=%5"
/>
<string
id="nd.send.continue"
value="id={%1.%2.%3} n_iov=%4 size=%5"
/>
<string
id="sock.send.inline"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="shm.send.inline"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="nd.send.inline"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="shm.send.connect"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="nd.send.connect"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="sock.send.connect"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="shm.send.queue"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="nd.send.queue"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="sock.send.queue"
value="id={%1.%2.%3} n_iov=%4 size=%5 type=%6"
/>
<string
id="api.attribute"
value="attribute"
/>
<string
id="api.collective"
value="collective"
/>
<string
id="api.communication"
value="communication"
/>
<string
id="api.datatype"
value="datatype"
/>
<string
id="api.errorhandling"
value="errorhandling"
/>
<string
id="api.group"
value="group"
/>
<string
id="api.info"
value="info"
/>
<string
id="api.init"
value="init"
/>
<string
id="api.pt2pt"
value="pt2pt"
/>
<string
id="api.poll"
value="poll"
/>
<string
id="api.rma"
value="rma"
/>
<string
id="api.io"
value="io"
/>
<string
id="api.spawn"
value="spawn"
/>
<string
id="api.topology"
value="topology"
/>
<string
id="function.error"
value="ERROR: error_class=%1, error_string=%2"
/>
<string
id="function.leave"
value="LEAVE: rc=0"
/>
<string
id="MPI_Comm_create_keyval"
value="MPI_Comm_create_keyval"
/>
<string
id="MPI_Comm_create_keyval.enter"
value="ENTER: comm_copy_attr_fn=%1, comm_delete_attr_fn=%2, extra_state=%3"
/>
<string
id="MPI_Comm_create_keyval.leave"
value="LEAVE: rc=0, comm_keyval=%1"
/>
<string
id="MPI_Comm_delete_attr"
value="MPI_Comm_delete_attr"
/>
<string
id="MPI_Comm_delete_attr.enter"
value="ENTER: comm=%1, comm_keyval=%2"
/>
<string
id="MPI_Comm_free_keyval"
value="MPI_Comm_free_keyval"
/>
<string
id="MPI_Comm_free_keyval.enter"
value="ENTER: comm_keyval=%1"
/>
<string
id="MPI_Comm_get_attr"
value="MPI_Comm_get_attr"
/>
<string
id="MPI_Comm_get_attr.enter"
value="ENTER: comm=%1, comm_keyval=%2"
/>
<string
id="MPI_Comm_get_attr.leave"
value="LEAVE: rc=0, attribute_val=%1, flag=%2"
/>
<string
id="MPI_Comm_set_attr"
value="MPI_Comm_set_attr"
/>
<string
id="MPI_Comm_set_attr.enter"
value="ENTER: comm=%1, comm_keyval=%2, attribute_val=%3"
/>
<string
id="MPI_Type_create_keyval"
value="MPI_Type_create_keyval"
/>
<string
id="MPI_Type_create_keyval.enter"
value="ENTER: type_copy_attr_fn= %1, type_delete_attr_fn=%2, extra_state=%3"
/>
<string
id="MPI_Type_create_keyval.leave"
value="LEAVE: rc=0, type_keyval=%1"
/>
<string
id="MPI_Type_delete_attr"
value="MPI_Type_delete_attr"
/>
<string
id="MPI_Type_delete_attr.enter"
value="ENTER: type=%1, type_keyval=%2"
/>
<string
id="MPI_Type_free_keyval"
value="MPI_Type_free_keyval"
/>
<string
id="MPI_Type_free_keyval.enter"
value="ENTER: type_keyval=%1"
/>
<string
id="MPI_Type_get_attr"
value="MPI_Type_get_attr"
/>
<string
id="MPI_Type_get_attr.enter"
value="ENTER: type=%1, type_keyval=%2"
/>
<string
id="MPI_Type_get_attr.leave"
value="LEAVE: rc=0, attribute_val=%1, flag=%2"
/>
<string
id="MPI_Type_set_attr"
value="MPI_Type_set_attr"
/>
<string
id="MPI_Type_set_attr.enter"
value="ENTER: type=%1, type_keyval=%2, attribute_val=%3"
/>
<string
id="MPI_Win_create_keyval"
value="MPI_Win_create_keyval"
/>
<string
id="MPI_Win_create_keyval.enter"
value="ENTER: win_copy_attr_fn=%1, win_delete_attr_fn=%2, extra_state=%3"
/>
<string
id="MPI_Win_create_keyval.leave"
value="LEAVE: win_keyval=%1"
/>
<string
id="MPI_Win_delete_attr"
value="MPI_Win_delete_attr"
/>
<string
id="MPI_Win_delete_attr.enter"
value="ENTER: win=%1,win_keyval=%2"
/>
<string
id="MPI_Win_free_keyval"
value="MPI_Win_free_keyval"
/>
<string
id="MPI_Win_free_keyval.enter"
value="ENTER: win_keyval=%1"
/>
<string
id="MPI_Win_get_attr"
value="MPI_Win_get_attr"
/>
<string
id="MPI_Win_get_attr.enter"
value="ENTER: win=%1, win_keyval=%2"
/>
<string
id="MPI_Win_get_attr.leave"
value="LEAVE: rc=0, attribute_val=%1, flag=%2"
/>
<string
id="MPI_Win_set_attr"
value="MPI_Win_set_attr"
/>
<string
id="MPI_Win_set_attr.enter"
value="ENTER: win=%1, keyval=%2, attribute_val=%3"
/>
<string
id="MPI_Allgather"
value="MPI_Allgather"
/>
<string
id="MPI_Allgather.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcount=%6, recvcount=%7"
/>
<string
id="MPI_Iallgather"
value="MPI_Iallgather"
/>
<string
id="MPI_Iallgather.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcount=%6, recvcount=%7"
/>
<string
id="MPI_Allgatherv"
value="MPI_Allgatherv"
/>
<string
id="MPI_Allgatherv.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcount=%6, recvcounts=%7, displs=%8"
/>
<string
id="MPI_Allreduce"
value="MPI_Allreduce"
/>
<string
id="MPI_Allreduce.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, datatype=%4, count=%5, op=%6"
/>
<string
id="MPI_Iallreduce"
value="MPI_Iallreduce"
/>
<string
id="MPI_Iallreduce.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, datatype=%4, count=%5, op=%6"
/>
<string
id="MPI_Alltoall"
value="MPI_Alltoall"
/>
<string
id="MPI_Alltoall.enter"
value="ENTER: comm=%1, sendbuf=%1, recvbuf=%4, sendtype=%3, recvtype=%6, sendcount=%2, recvcount=%5"
/>
<string
id="MPI_Alltoallv"
value="MPI_Alltoallv"
/>
<string
id="MPI_Alltoallv.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcnts=%7, recvcnts=%9, sdispls=%11, rdispls=%13"
/>
<string
id="MPI_Alltoallw"
value="MPI_Alltoallw"
/>
<string
id="MPI_Alltoallw.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, sendtypes=%5, recvtypes=%7, sendcnts=%9, recvcnts=%11, sdispls=%13, rdispls=%15"
/>
<string
id="MPI_Barrier"
value="MPI_Barrier"
/>
<string
id="MPI_Barrier.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Ibarrier"
value="MPI_Ibarrier"
/>
<string
id="MPI_Ibarrier.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Bcast"
value="MPI_Bcast"
/>
<string
id="MPI_Bcast.enter"
value="ENTER: comm=%1, buffer=%2, datatype=%3, count=%4, root=%5"
/>
<string
id="MPI_Ibcast"
value="MPI_Ibcast"
/>
<string
id="MPI_Ibcast.enter"
value="ENTER: buffer=%1, count=%2, datatype=%3, root=%4, comm=%5"
/>
<string
id="NBC.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Exscan"
value="MPI_Exscan"
/>
<string
id="MPI_Exscan.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, datatype=%4, count=%5, op=%6"
/>
<string
id="MPI_Gather"
value="MPI_Gather"
/>
<string
id="MPI_Gather.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcnt=%6, recvcnt=%7, root=%8"
/>
<string
id="MPI_Igather"
value="MPI_Igather"
/>
<string
id="MPI_Igather.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcnt=%6, recvcnt=%7, root=%8"
/>
<string
id="MPI_Gatherv"
value="MPI_Gatherv"
/>
<string
id="MPI_Gatherv.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcnt=%6, recvcnts=%8, displs=%10, root=%11"
/>
<string
id="MPI_Igatherv"
value="MPI_Igatherv"
/>
<string
id="MPI_Igatherv.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcnt=%6, recvcnts=%8, displs=%10, root=%11"
/>
<string
id="MPI_Op_create"
value="MPI_Op_create"
/>
<string
id="MPI_Op_create.enter"
value="ENTER: function=%1, commute=%2"
/>
<string
id="MPI_Op_create.leave"
value="LEAVE: rc=0 op=%1"
/>
<string
id="MPI_Op_free"
value="MPI_Op_free"
/>
<string
id="MPI_Op_free.enter"
value="ENTER: op=%1"
/>
<string
id="MPI_Reduce_scatter"
value="MPI_Reduce_scatter"
/>
<string
id="MPI_Reduce_scatter.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, datatype=%4, recvcnts=%6, op=%7"
/>
<string
id="MPI_Reduce"
value="MPI_Reduce"
/>
<string
id="MPI_Reduce.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, datatype=%4, count=%5, op=%6, root=%7"
/>
<string
id="MPI_Ireduce"
value="MPI_Ireduce"
/>
<string
id="MPI_Ireduce.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, datatype=%4, count=%5, op=%6, root=%7"
/>
<string
id="MPI_Scan"
value="MPI_Scan"
/>
<string
id="MPI_Scan.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, datatype=%4, count=%5, op=%6"
/>
<string
id="MPI_Scatter"
value="MPI_Scatter"
/>
<string
id="MPI_Scatter.enter"
value="ENTER: comm=%1, sendbuff=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcnt=%6, recvcnt=%7, root=%8"
/>
<string
id="MPI_Iscatter"
value="MPI_Iscatter"
/>
<string
id="MPI_Iscatter.enter"
value="ENTER: comm=%1, sendbuff=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcnt=%6, recvcnt=%7, root=%8"
/>
<string
id="MPI_Scatterv"
value="MPI_Scatterv"
/>
<string
id="MPI_Scatterv.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcnts=%7, recvcnt=%8, displs=%10, root=%11"
/>
<string
id="MPI_Iscatterv"
value="MPI_Iscatterv"
/>
<string
id="MPI_Iscatterv.enter"
value="ENTER: comm=%1, sendbuf=%2, recvbuf=%3, sendtype=%4, recvtype=%5, sendcnts=%7, recvcnt=%8, displs=%10, root=%11"
/>
<string
id="MPI_Comm_compare"
value="MPI_Comm_compare"
/>
<string
id="MPI_Comm_compare.enter"
value="ENTER: comm1=%1 comm2=%2"
/>
<string
id="MPI_Comm_compare.leave"
value="LEAVE: rc=0 result=%1"
/>
<string
id="MPI_Comm_create"
value="MPI_Comm_create"
/>
<string
id="MPI_Comm_create.enter"
value="ENTER: comm=%1 group=%2"
/>
<string
id="MPI_Comm_create.leave"
value="LEAVE: rc=0 newcomm=%1"
/>
<string
id="MPI_Comm_dup"
value="MPI_Comm_dup"
/>
<string
id="MPI_Comm_dup.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Comm_dup.leave"
value="LEAVE: newcomm=%1"
/>
<string
id="MPI_Comm_free"
value="MPI_Comm_free"
/>
<string
id="MPI_Comm_free.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Comm_get_name"
value="MPI_Comm_get_name"
/>
<string
id="MPI_Comm_get_name.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Comm_get_name.leave"
value="LEAVE: rc=0 name=%2, resultlength=%1"
/>
<string
id="MPI_Comm_group"
value="MPI_Comm_group"
/>
<string
id="MPI_Comm_group.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Comm_group.leave"
value="LEAVE: rc=0 group=%1"
/>
<string
id="MPI_Comm_rank"
value="MPI_Comm_rank"
/>
<string
id="MPI_Comm_rank.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Comm_rank.leave"
value="LEAVE: rc=0 rank=%1"
/>
<string
id="MPI_Comm_remote_group"
value="MPI_Comm_remote_group"
/>
<string
id="MPI_Comm_remote_group.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Comm_remote_group.leave"
value="LEAVE: rc=0 group=%1"
/>
<string
id="MPI_Comm_remote_size"
value="MPI_Comm_remote_size"
/>
<string
id="MPI_Comm_remote_size.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Comm_remote_size.leave"
value="LEAVE: size=%1"
/>
<string
id="MPI_Comm_set_name"
value="MPI_Comm_set_name"
/>
<string
id="MPI_Comm_set_name.enter"
value="ENTER: comm=%1 comm_name=%2"
/>
<string
id="MPI_Comm_size"
value="MPI_Comm_size"
/>
<string
id="MPI_Comm_size.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Comm_size.leave"
value="LEAVE: rc=0 size=%1"
/>
<string
id="MPI_Comm_split"
value="MPI_Comm_split"
/>
<string
id="MPI_Comm_split.enter"
value="ENTER: comm=%1 color=%2 key=%3"
/>
<string
id="MPI_Comm_split.leave"
value="LEAVE: rc=0 newcomm=%1"
/>
<string
id="MPI_Comm_split_type"
value="MPI_Comm_split_type"
/>
<string
id="MPI_Comm_split_type.enter"
value="ENTER: comm=%1 split_type=%2 key=%3 info=%4"
/>
<string
id="MPI_Comm_split_type.leave"
value="LEAVE: rc=0 newcomm=%1"
/>
<string
id="MPI_Comm_test_inter"
value="MPI_Comm_test_inter"
/>
<string
id="MPI_Comm_test_inter.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Comm_test_inter.leave"
value="LEAVE: rc=0 flag=%1"
/>
<string
id="MPI_Intercomm_create"
value="MPI_Intercomm_create"
/>
<string
id="MPI_Intercomm_create.enter"
value="ENTER: local_comm=%1 local_leader=%2 peer_comm=%3 remote_leader=%4 tag=%5"
/>
<string
id="MPI_Intercomm_create.leave"
value="LEAVE: rc=0 newintercomm=%1"
/>
<string
id="MPI_Intercomm_merge"
value="MPI_Intercomm_merge"
/>
<string
id="MPI_Intercomm_merge.enter"
value="ENTER: intercomm=%1 high=%2"
/>
<string
id="MPI_Intercomm_merge.leave"
value="LEAVE: rc=0 newintracomm=%1"
/>
<string
id="MPI_Get_address"
value="MPI_Get_address"
/>
<string
id="MPI_Get_address.enter"
value="ENTER: location=%1"
/>
<string
id="MPI_Get_address.leave"
value="LEAVE: rc=0 address=%1"
/>
<string
id="MPI_Get_count"
value="MPI_Get_count"
/>
<string
id="MPI_Get_count.enter"
value="ENTER: status=%1, datatype=%2"
/>
<string
id="MPI_Get_count.leave"
value="LEAVE: rc=0 count=%1, status_count=%2"
/>
<string
id="MPI_Get_elements"
value="MPI_Get_elements"
/>
<string
id="MPI_Get_elements.enter"
value="ENTER: status=%1, datatype=%2"
/>
<string
id="MPI_Get_elements.leave"
value="LEAVE: rc=0 count=%1, byte_count=%2 "
/>
<string
id="MPI_Get_elements_x"
value="MPI_Get_elements_x"
/>
<string
id="MPI_Get_elements_x.enter"
value="ENTER: status=%1, datatype=%2"
/>
<string
id="MPI_Get_elements_x.leave"
value="LEAVE: rc=0 count=%1, byte_count=%2 "
/>
<string
id="MPI_Pack"
value="MPI_Pack"
/>
<string
id="MPI_Pack.enter"
value="ENTER: inbuf=%1, incount=%2, datatype=%3, outbuf=%4, outcount=%5, *position=%6, comm=%7"
/>
<string
id="MPI_Pack.leave"
value="LEAVE: rc=0 position=%1"
/>
<string
id="MPI_Pack_external"
value="MPI_Pack_external"
/>
<string
id="MPI_Pack_external.enter"
value="ENTER: datarep=%1, inbuf=%2, incount=%3, datatype=%4, outbuf=%5, outcount=%6, *position=%7"
/>
<string
id="MPI_Pack_external.leave"
value="LEAVE: rc=0 position=%1"
/>
<string
id="MPI_Pack_external_size"
value="MPI_Pack_external_size"
/>
<string
id="MPI_Pack_external_size.enter"
value="ENTER: datarep=%1 incount=%2 datatype=%3"
/>
<string
id="MPI_Pack_external_size.leave"
value="LEAVE: rc=0 size=%1"
/>
<string
id="MPI_Pack_size"
value="MPI_Pack_size"
/>
<string
id="MPI_Pack_size.enter"
value="ENTER: incount=%1, datatype=%2, comm=%3"
/>
<string
id="MPI_Pack_size.leave"
value="LEAVE: rc=0 size=%1"
/>
<string
id="MPI_Register_datarep"
value="MPI_Register_datarep"
/>
<string
id="MPI_Register_datarep.enter"
value="ENTER: datarep=%1, read_conversion_fn=%2, write_conversion_fn=%3, dtype_file_extent_fn=%4, extra_state=%5"
/>
<string
id="MPI_Status_set_elements"
value="MPI_Status_set_elements"
/>
<string
id="MPI_Status_set_elements.enter"
value="ENTER: status=%1 datatype=%2 count=%3"
/>
<string
id="MPI_Status_set_elements.leave"
value="LEAVE: rc=0 status_count=%1"
/>
<string
id="MPI_Status_set_elements_x"
value="MPI_Status_set_elements_x"
/>
<string
id="MPI_Status_set_elements_x.enter"
value="ENTER: status=%1 datatype=%2 count=%3"
/>
<string
id="MPI_Status_set_elements_x.leave"
value="LEAVE: rc=0 status_count=%1"
/>
<string
id="MPI_Type_commit"
value="MPI_Type_commit"
/>
<string
id="MPI_Type_commit.enter"
value="ENTER: datatype=%1"
/>
<string
id="MPI_Type_contiguous"
value="MPI_Type_contiguous"
/>
<string
id="MPI_Type_contiguous.enter"
value="ENTER: count=%1, old_type=%2"
/>
<string
id="MPI_Type_contiguous.leave"
value="LEAVE: rc=0 new_type_p=%1"
/>
<string
id="MPI_Type_create_darray"
value="MPI_Type_create_darray"
/>
<string
id="MPI_Type_create_darray.enter"
value="ENTER: size=%1, rank=%2, ndims=%3, array_of_gsizes=%5, array_of_distribs=%7, array_of_dargs=%9, array_of_psizes=%11, order=%12, oldtype=%13"
/>
<string
id="MPI_Type_create_darray.leave"
value="LEAVE: rc=0 newtype=%1"
/>
<string
id="MPI_Type_create_hindexed"
value="MPI_Type_create_hindexed"
/>
<string
id="MPI_Type_create_hindexed.enter"
value="ENTER: count=%1, array_of_blocklengths=%3, array_of_displacements=%5, oldtype=%6"
/>
<string
id="MPI_Type_create_hindexed.leave"
value="LEAVE: rc=0 newtype=%1"
/>
<string
id="MPI_Type_create_hvector"
value="MPI_Type_create_hvector"
/>
<string
id="MPI_Type_create_hvector.enter"
value="ENTER: count=%1, blocklength=%2, stride=%3, oldtype=%4"
/>
<string
id="MPI_Type_create_hvector.leave"
value="LEAVE: rc=0 newtype=%1"
/>
<string
id="MPI_Type_create_indexed_block"
value="MPI_Type_create_indexed_block"
/>
<string
id="MPI_Type_create_indexed_block.enter"
value="ENTER: count=%1, blocklength=%2, array_of_displacements=%4, oldtype=%5"
/>
<string
id="MPI_Type_create_indexed_block.leave"
value="LEAVE: rc=0 newtype=%1"
/>
<string
id="MPI_Type_create_resized"
value="MPI_Type_create_resized"
/>
<string
id="MPI_Type_create_resized.enter"
value="ENTER: oldtype=%1, lb=%2, extent=%3"
/>
<string
id="MPI_Type_create_resized.leave"
value="LEAVE: rc=0 newtype=%1"
/>
<string
id="MPI_Type_create_struct"
value="MPI_Type_create_struct"
/>
<string
id="MPI_Type_create_struct.enter"
value="ENTER: count=%1, array_of_blocklengths=%3, array_of_displacements=%5, array_of_types=%7"
/>
<string
id="MPI_Type_create_struct.leave"
value="LEAVE: rc=0 newtype=%1"
/>
<string
id="MPI_Type_create_subarray"
value="MPI_Type_create_subarray"
/>
<string
id="MPI_Type_create_subarray.enter"
value="ENTER: ndims=%1, array_of_sizes=%3, array_of_subsizes=%5, array_of_starts=%7, order=%8, oldtype=%9"
/>
<string
id="MPI_Type_create_subarray.leave"
value="LEAVE: rc=0 newtype=%1"
/>
<string
id="MPI_Type_dup"
value="MPI_Type_dup"
/>
<string
id="MPI_Type_dup.enter"
value="ENTER: datatype=%1"
/>
<string
id="MPI_Type_dup.leave"
value="LEAVE: rc=0 newtype=%1"
/>
<string
id="MPI_Type_free"
value="MPI_Type_free"
/>
<string
id="MPI_Type_free.enter"
value="ENTER: datatype=%1"
/>
<string
id="MPI_Type_get_contents"
value="MPI_Type_get_contents"
/>
<string
id="MPI_Type_get_contents.enter"
value="ENTER: datatype=%1, max_integers=%2, max_addresses=%3, max_datatypes=%4"
/>
<string
id="MPI_Type_get_contents.leave"
value="LEAVE: array_of_integers=%2,array_of_addresses=%4,array_of_datatypes=%6"
/>
<string
id="MPI_Type_get_envelope"
value="MPI_Type_get_envelope"
/>
<string
id="MPI_Type_get_envelope.enter"
value="ENTER: datatype=%1"
/>
<string
id="MPI_Type_get_envelope.leave"
value="LEAVE: rc=0 num_integers=%1, num_addresses=%2, num_datatypes=%3, combiner=%4"
/>
<string
id="MPI_Type_get_extent"
value="MPI_Type_get_extent"
/>
<string
id="MPI_Type_get_extent.enter"
value="ENTER: datatype=%1"
/>
<string
id="MPI_Type_get_extent.leave"
value="LEAVE: rc=0 lb=%1 extent=%2"
/>
<string
id="MPI_Type_get_extent_x"
value="MPI_Type_get_extent_x"
/>
<string
id="MPI_Type_get_extent_x.enter"
value="ENTER: datatype=%1"
/>
<string
id="MPI_Type_get_extent_x.leave"
value="LEAVE: rc=0 lb=%1 extent=%2"
/>
<string
id="MPI_Type_get_name"
value="MPI_Type_get_name"
/>
<string
id="MPI_Type_get_name.enter"
value="ENTER: datatype=%1"
/>
<string
id="MPI_Type_get_name.leave"
value="LEAVE: rc=0 typename=%1, resultlen=%2"
/>
<string
id="MPI_Type_get_true_extent"
value="MPI_Type_get_true_extent"
/>
<string
id="MPI_Type_get_true_extent.enter"
value="ENTER: datatype=%1"
/>
<string
id="MPI_Type_get_true_extent.leave"
value="LEAVE: rc=0 true_lb=%1 true_extent=%2"
/>
<string
id="MPI_Type_get_true_extent_x"
value="MPI_Type_get_true_extent_x"
/>
<string
id="MPI_Type_get_true_extent_x.enter"
value="ENTER: datatype=%1"
/>
<string
id="MPI_Type_get_true_extent_x.leave"
value="LEAVE: rc=0 true_lb=%1 true_extent=%2"
/>
<string
id="MPI_Type_indexed"
value="MPI_Type_indexed"
/>
<string
id="MPI_Type_indexed.enter"
value="ENTER: count=%1, blocklens=%3, indices=%5, old_type=%6"
/>
<string
id="MPI_Type_indexed.leave"
value="LEAVE: rc=0 newtype=%1"
/>
<string
id="MPI_Type_match_size"
value="MPI_Type_match_size"
/>
<string
id="MPI_Type_match_size.enter"
value="ENTER: typeclass=%1 size=%2"
/>
<string
id="MPI_Type_match_size.leave"
value="LEAVE: rc=0 datatype=%1"
/>
<string
id="MPI_Type_set_name"
value="MPI_Type_set_name"
/>
<string
id="MPI_Type_set_name.enter"
value="ENTER: datatype=%1, type_name=%2"
/>
<string
id="MPI_Type_size"
value="MPI_Type_size"
/>
<string
id="MPI_Type_size.enter"
value="ENTER: datatype=%1"
/>
<string
id="MPI_Type_size.leave"
value="LEAVE: rc=0 size=%1"
/>
<string
id="MPI_Type_size_x"
value="MPI_Type_size_x"
/>
<string
id="MPI_Type_size_x.enter"
value="ENTER: datatype=%1"
/>
<string
id="MPI_Type_size_x.leave"
value="LEAVE: rc=0 size=%1"
/>
<string
id="MPI_Type_vector"
value="MPI_Type_vector"
/>
<string
id="MPI_Type_vector.enter"
value="ENTER: count=%1, blocklength=%2, stride=%3, old_type=%4"
/>
<string
id="MPI_Type_vector.leave"
value="LEAVE: rc=0 newtype=%1"
/>
<string
id="MPI_Unpack"
value="MPI_Unpack"
/>
<string
id="MPI_Unpack.enter"
value="ENTER: inbuf=%1, insize=%2, position=%3, outbuf=%4, outcount=%5, datatype=%6, comm=%7"
/>
<string
id="MPI_Unpack.leave"
value="LEAVE: rc=0 position=%1"
/>
<string
id="MPI_Unpack_external"
value="MPI_Unpack_external"
/>
<string
id="MPI_Unpack_external.enter"
value="ENTER: datarep=%1, inbuf=%2, insize=%3, position=%4, outbuf=%5, outcount=%6, datatype=%7"
/>
<string
id="MPI_Unpack_external.leave"
value="LEAVE: rc=0 position=%1"
/>
<string
id="MPI_Add_error_class"
value="MPI_Add_error_class"
/>
<string
id="MPI_Add_error_class.enter"
value="ENTER: "
/>
<string
id="MPI_Add_error_class.leave"
value="LEAVE: rc=0 errorclass=%1"
/>
<string
id="MPI_Add_error_code"
value="MPI_Add_error_code"
/>
<string
id="MPI_Add_error_code.enter"
value="ENTER: errorclass=%1"
/>
<string
id="MPI_Add_error_code.leave"
value="LEAVE: rc=0 errorcode=%1"
/>
<string
id="MPI_Add_error_string"
value="MPI_Add_error_string"
/>
<string
id="MPI_Add_error_string.enter"
value="ENTER: errorcode=%1 string=%2"
/>
<string
id="MPI_Comm_call_errhandler"
value="MPI_Comm_call_errhandler"
/>
<string
id="MPI_Comm_call_errhandler.enter"
value="ENTER: comm=%1 errorcode=%2"
/>
<string
id="MPI_Comm_call_errhandler.leave"
value="LEAVE: rc=0 errorcode=%1"
/>
<string
id="MPI_Comm_create_errhandler"
value="MPI_Comm_create_errhandler"
/>
<string
id="MPI_Comm_create_errhandler.enter"
value="ENTER: function=%1"
/>
<string
id="MPI_Comm_create_errhandler.leave"
value="LEAVE: rc=0 errhandler=%1"
/>
<string
id="MPI_Comm_get_errhandler"
value="MPI_Comm_get_errhandler"
/>
<string
id="MPI_Comm_get_errhandler.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Comm_get_errhandler.leave"
value="LEAVE: rc=0 errhandler=%1"
/>
<string
id="MPI_Comm_set_errhandler"
value="MPI_Comm_set_errhandler"
/>
<string
id="MPI_Comm_set_errhandler.enter"
value="ENTER: comm=%1 errhandler=%2"
/>
<string
id="MPI_Errhandler_free"
value="MPI_Errhandler_free"
/>
<string
id="MPI_Errhandler_free.enter"
value="ENTER: errhandler=%1"
/>
<string
id="MPI_Error_class"
value="MPI_Error_class"
/>
<string
id="MPI_Error_class.enter"
value="ENTER: errorcode=%1"
/>
<string
id="MPI_Error_class.leave"
value="LEAVE: rc=0 errorclass=%1"
/>
<string
id="MPI_Error_string"
value="MPI_Error_string"
/>
<string
id="MPI_Error_string.enter"
value="ENTER: errorcode=%1"
/>
<string
id="MPI_Error_string.leave"
value="LEAVE: rc=0 string=%2, resultlen=%1"
/>
<string
id="MPI_File_call_errhandler"
value="MPI_File_call_errhandler"
/>
<string
id="MPI_File_call_errhandler.enter"
value="ENTER: file=%1 error=%2"
/>
<string
id="MPI_File_call_errhandler.leave"
value="LEAVE: rc=0 errorcode=%1"
/>
<string
id="MPI_File_create_errhandler"
value="MPI_File_create_errhandler"
/>
<string
id="MPI_File_create_errhandler.enter"
value="ENTER: function=%1"
/>
<string
id="MPI_File_create_errhandler.leave"
value="LEAVE: rc=0 errhandler=%1"
/>
<string
id="MPI_File_get_errhandler"
value="MPI_File_get_errhandler"
/>
<string
id="MPI_File_get_errhandler.enter"
value="ENTER: file=%1"
/>
<string
id="MPI_File_get_errhandler.leave"
value="LEAVE: rc=0 errhandler=%1"
/>
<string
id="MPI_File_set_errhandler"
value="MPI_File_set_errhandler"
/>
<string
id="MPI_File_set_errhandler.enter"
value="ENTER: file=%1 errhandler=%2"
/>
<string
id="MPI_Win_call_errhandler"
value="MPI_Win_call_errhandler"
/>
<string
id="MPI_Win_call_errhandler.enter"
value="ENTER: win=%1 errorcode=%2"
/>
<string
id="MPI_Win_call_errhandler.leave"
value="LEAVE: rc=0 errorcode=%1"
/>
<string
id="MPI_Win_create_errhandler"
value="MPI_Win_create_errhandler"
/>
<string
id="MPI_Win_create_errhandler.enter"
value="ENTER: function=%1"
/>
<string
id="MPI_Win_create_errhandler.leave"
value="LEAVE: rc=0 errhandler=%1"
/>
<string
id="MPI_Win_get_errhandler"
value="MPI_Win_get_errhandler"
/>
<string
id="MPI_Win_get_errhandler.enter"
value="ENTER: win=%1"
/>
<string
id="MPI_Win_get_errhandler.leave"
value="LEAVE: rc=0 errhandler=%1"
/>
<string
id="MPI_Win_set_errhandler"
value="MPI_Win_set_errhandler"
/>
<string
id="MPI_Win_set_errhandler.enter"
value="ENTER: win=%1 errhandler=%2"
/>
<string
id="MPI_Group_compare"
value="MPI_Group_compare"
/>
<string
id="MPI_Group_compare.enter"
value="ENTER: group1=%1 group2=%2"
/>
<string
id="MPI_Group_compare.leave"
value="LEAVE: rc=0 result=%1"
/>
<string
id="MPI_Group_difference"
value="MPI_Group_difference"
/>
<string
id="MPI_Group_difference.enter"
value="ENTER: group1=%1 group2=%2"
/>
<string
id="MPI_Group_difference.leave"
value="LEAVE: rc=0 newgroup=%1"
/>
<string
id="MPI_Group_excl"
value="MPI_Group_excl"
/>
<string
id="MPI_Group_excl.enter"
value="ENTER: group=%1, n=%2, ranks=%4"
/>
<string
id="MPI_Group_excl.leave"
value="LEAVE: rc=0 newgroup=%1"
/>
<string
id="MPI_Group_free"
value="MPI_Group_free"
/>
<string
id="MPI_Group_free.enter"
value="ENTER: group=%1"
/>
<string
id="MPI_Group_incl"
value="MPI_Group_incl"
/>
<string
id="MPI_Group_incl.enter"
value="ENTER: group=%1, n=%2, ranks=%4"
/>
<string
id="MPI_Group_incl.leave"
value="LEAVE: rc=0 newgroup=%1"
/>
<string
id="MPI_Group_intersection"
value="MPI_Group_intersection"
/>
<string
id="MPI_Group_intersection.enter"
value="ENTER: group1=%1 group2=%2"
/>
<string
id="MPI_Group_intersection.leave"
value="LEAVE: rc=0 newgroup=%1"
/>
<string
id="MPI_Group_range_excl"
value="MPI_Group_range_excl"
/>
<string
id="MPI_Group_range_excl.enter"
value="ENTER: group=%1, n=%2, ranges=%4"
/>
<string
id="MPI_Group_range_excl.leave"
value="LEAVE: rc=0 newgroup=%1"
/>
<string
id="MPI_Group_range_incl"
value="MPI_Group_range_incl"
/>
<string
id="MPI_Group_range_incl.enter"
value="ENTER: group=%1, n=%2, ranges=%4"
/>
<string
id="MPI_Group_range_incl.leave"
value="LEAVE: rc=0 newgroup=%1"
/>
<string
id="MPI_Group_rank"
value="MPI_Group_rank"
/>
<string
id="MPI_Group_rank.enter"
value="ENTER: group=%1"
/>
<string
id="MPI_Group_rank.leave"
value="LEAVE: rc=0 rank=%1"
/>
<string
id="MPI_Group_size"
value="MPI_Group_size"
/>
<string
id="MPI_Group_size.enter"
value="ENTER: group=%1"
/>
<string
id="MPI_Group_size.leave"
value="LEAVE: rc=0 size=%1"
/>
<string
id="MPI_Group_translate_ranks"
value="MPI_Group_translate_ranks"
/>
<string
id="MPI_Group_translate_ranks.enter"
value="ENTER: group1=%1, n=%2, ranks1=%4, group2=%5"
/>
<string
id="MPI_Group_translate_ranks.leave"
value="LEAVE: n=%1, ranks2=%3"
/>
<string
id="MPI_Group_union"
value="MPI_Group_union"
/>
<string
id="MPI_Group_union.enter"
value="ENTER: group1=%1 group2=%2"
/>
<string
id="MPI_Group_union.leave"
value="LEAVE: rc=0 newgroup=%1"
/>
<string
id="MPI_Info_create"
value="MPI_Info_create"
/>
<string
id="MPI_Info_create.enter"
value="ENTER: "
/>
<string
id="MPI_Info_create.leave"
value="LEAVE: rc=0 info=%1"
/>
<string
id="MPI_Info_delete"
value="MPI_Info_delete"
/>
<string
id="MPI_Info_delete.enter"
value="ENTER: info=%1 key=%2"
/>
<string
id="MPI_Info_dup"
value="MPI_Info_dup"
/>
<string
id="MPI_Info_dup.enter"
value="ENTER: info=%1"
/>
<string
id="MPI_Info_dup.leave"
value="LEAVE: rc=0 newinfo=%1"
/>
<string
id="MPI_Info_free"
value="MPI_Info_free"
/>
<string
id="MPI_Info_free.enter"
value="ENTER: info=%1"
/>
<string
id="MPI_Info_get"
value="MPI_Info_get"
/>
<string
id="MPI_Info_get.enter"
value="ENTER: info=%1, key=%2, valuelen=%3"
/>
<string
id="MPI_Info_get.leave"
value="LEAVE: rc=0 value=%1, flag=%2"
/>
<string
id="MPI_Info_get_nkeys"
value="MPI_Info_get_nkeys"
/>
<string
id="MPI_Info_get_nkeys.enter"
value="ENTER: info=%1"
/>
<string
id="MPI_Info_get_nkeys.leave"
value="LEAVE: rc=0 nkeys=%1"
/>
<string
id="MPI_Info_get_nthkey"
value="MPI_Info_get_nthkey"
/>
<string
id="MPI_Info_get_nthkey.enter"
value="ENTER: info=%1 n=%2"
/>
<string
id="MPI_Info_get_nthkey.leave"
value="LEAVE: rc=0 key=%1"
/>
<string
id="MPI_Info_get_valuelen"
value="MPI_Info_get_valuelen"
/>
<string
id="MPI_Info_get_valuelen.enter"
value="ENTER: info=%1 key=%2"
/>
<string
id="MPI_Info_get_valuelen.leave"
value="LEAVE: rc=0 valuelen=%1, flag=%2 "
/>
<string
id="MPI_Info_set"
value="MPI_Info_set"
/>
<string
id="MPI_Info_set.enter"
value="ENTER: info=%1 key=%2 value=%3"
/>
<string
id="MPI_Abort"
value="MPI_Abort"
/>
<string
id="MPI_Abort.enter"
value="ENTER: comm=%1 errorcode=%2"
/>
<string
id="MPI_Finalize"
value="MPI_Finalize"
/>
<string
id="MPI_Finalize.enter"
value="ENTER: "
/>
<string
id="MPI_Init"
value="MPI_Init"
/>
<string
id="MPI_Init.enter"
value="ENTER: "
/>
<string
id="MPI_Init_thread"
value="MPI_Init_thread"
/>
<string
id="MPI_Init_thread.enter"
value="ENTER: required=%1"
/>
<string
id="MPI_Init_thread.leave"
value="LEAVE: rc=0 provided=%1"
/>
<string
id="MPI_Is_thread_main"
value="MPI_Is_thread_main"
/>
<string
id="MPI_Is_thread_main.enter"
value="ENTER: "
/>
<string
id="MPI_Is_thread_main.leave"
value="LEAVE: rc=0 flag=%1"
/>
<string
id="MPI_Query_thread"
value="MPI_Query_thread"
/>
<string
id="MPI_Query_thread.enter"
value="ENTER: "
/>
<string
id="MPI_Query_thread.leave"
value="LEAVE: rc=0 provided=%1"
/>
<string
id="MPI_Get_processor_name"
value="MPI_Get_processor_name"
/>
<string
id="MPI_Get_processor_name.enter"
value="ENTER: "
/>
<string
id="MPI_Get_processor_name.leave"
value="LEAVE: rc=0 name=%2, resultlen=%1"
/>
<string
id="MPI_Get_version"
value="MPI_Get_version"
/>
<string
id="MPI_Get_version.enter"
value="ENTER: get-version"
/>
<string
id="MPI_Get_version.leave"
value="LEAVE: rc=0 version=%1 subversion=%2"
/>
<string
id="MPI_Bsend"
value="MPI_Bsend"
/>
<string
id="MPI_Bsend.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Bsend.leave"
value="MPI_Bsend"
/>
<string
id="MPI_Bsend_init"
value="MPI_Bsend_init"
/>
<string
id="MPI_Bsend_init.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Bsend_init.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Buffer_attach"
value="MPI_Buffer_attach"
/>
<string
id="MPI_Buffer_attach.enter"
value="ENTER: buffer=%1 size=%2"
/>
<string
id="MPI_Buffer_detach"
value="MPI_Buffer_detach"
/>
<string
id="MPI_Buffer_detach.enter"
value="ENTER: "
/>
<string
id="MPI_Buffer_detach.leave"
value="LEAVE: rc=0 buffer=%1 size=%2"
/>
<string
id="MPI_Cancel"
value="MPI_Cancel"
/>
<string
id="MPI_Cancel.enter"
value="ENTER: request=%1"
/>
<string
id="MPI_Grequest_complete"
value="MPI_Grequest_complete"
/>
<string
id="MPI_Grequest_complete.enter"
value="ENTER: request=%1"
/>
<string
id="MPI_Grequest_start"
value="MPI_Grequest_start"
/>
<string
id="MPI_Grequest_start.enter"
value="ENTER: query_fn=%1, query_fn=%2, cancel_fn=%3, extra_state=%4"
/>
<string
id="MPI_Grequest_start.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Ibsend"
value="MPI_Ibsend"
/>
<string
id="MPI_Ibsend.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Ibsend.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Improbe"
value="MPI_Improbe"
/>
<string
id="MPI_Improbe.enter"
value="ENTER: src=%1, tag=%2, comm=%3"
/>
<string
id="MPI_Improbe.leave"
value="LEAVE: flag=%1, message=%2, status=%3"
/>
<string
id="MPI_Imrecv"
value="MPI_Imrecv"
/>
<string
id="MPI_Imrecv.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, message=%4"
/>
<string
id="MPI_Imrecv.leave"
value="LEAVE: request=%1"
/>
<string
id="MPI_Iprobe"
value="MPI_Iprobe"
/>
<string
id="MPI_Iprobe.enter"
value="ENTER: source=%1, tag=%2, comm=%3"
/>
<string
id="MPI_Iprobe.leave"
value="LEAVE: rc=0 flag=%1, status=%2"
/>
<string
id="MPI_Irecv"
value="MPI_Irecv"
/>
<string
id="MPI_Irecv.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, source=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Irecv.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Irsend"
value="MPI_Irsend"
/>
<string
id="MPI_Irsend.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Irsend.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Isend"
value="MPI_Isend"
/>
<string
id="MPI_Isend.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Isend.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Issend"
value="MPI_Issend"
/>
<string
id="MPI_Issend.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Issend.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Mprobe"
value="MPI_Mprobe"
/>
<string
id="MPI_Mprobe.enter"
value="ENTER: src=%1, tag=%2, comm=%3"
/>
<string
id="MPI_Mprobe.leave"
value="LEAVE: message=%1, status=%2"
/>
<string
id="MPI_Mrecv"
value="MPI_Mrecv"
/>
<string
id="MPI_Mrecv.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, message=%4"
/>
<string
id="MPI_Mrecv.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_Probe"
value="MPI_Probe"
/>
<string
id="MPI_Probe.enter"
value="ENTER: src=%1, tag=%2, comm=%3"
/>
<string
id="MPI_Probe.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_Recv"
value="MPI_Recv"
/>
<string
id="MPI_Recv.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, source=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Recv.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_Recv_init"
value="MPI_Recv_init"
/>
<string
id="MPI_Recv_init.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, source=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Recv_init.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Request_free"
value="MPI_Request_free"
/>
<string
id="MPI_Request_free.enter"
value="ENTER: request=%1"
/>
<string
id="MPI_Request_get_status"
value="MPI_Request_get_status"
/>
<string
id="MPI_Request_get_status.enter"
value="ENTER: request=%1"
/>
<string
id="MPI_Request_get_status.leave"
value="LEAVE: rc=0 flag=%1, status=%2"
/>
<string
id="MPI_Rsend"
value="MPI_Rsend"
/>
<string
id="MPI_Rsend.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Rsend_init"
value="MPI_Rsend_init"
/>
<string
id="MPI_Rsend_init.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Rsend_init.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Send"
value="MPI_Send"
/>
<string
id="MPI_Send.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Send_init"
value="MPI_Send_init"
/>
<string
id="MPI_Send_init.enter"
value="ENTER: comm=%1 datatype=%2 count=%3 dst=%4 tag=%5"
/>
<string
id="MPI_Send_init.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Sendrecv"
value="MPI_Sendrecv"
/>
<string
id="MPI_Sendrecv.enter"
value="ENTER: sendbuf=%1, sendcount=%2, sendtype=%3, dest=%4, sendtag=%5, recvbuf=%6, recvcount=%7, recvtype=%8, source=%9, recvtag=%10, comm=%11"
/>
<string
id="MPI_Sendrecv_replace"
value="MPI_Sendrecv_replace"
/>
<string
id="MPI_Sendrecv_replace.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, sendtag=%5, source=%6, recvtag=%7, comm=%8"
/>
<string
id="MPI_Sendrecv_replace.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_Ssend"
value="MPI_Ssend"
/>
<string
id="MPI_Ssend.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Ssend_init"
value="MPI_Ssend_init"
/>
<string
id="MPI_Ssend_init.enter"
value="ENTER: buf=%1, count=%2, datatype=%3, dest=%4, tag=%5, comm=%6"
/>
<string
id="MPI_Ssend_init.leave"
value="LEAVE: rc=0 request=%1"
/>
<string
id="MPI_Start"
value="MPI_Start"
/>
<string
id="MPI_Start.enter"
value="ENTER: request=%1"
/>
<string
id="MPI_Startall"
value="MPI_Startall"
/>
<string
id="MPI_Startall.enter"
value="ENTER: count=%1, array_of_requests=%3"
/>
<string
id="MPI_Status_set_cancelled"
value="MPI_Status_set_cancelled"
/>
<string
id="MPI_Status_set_cancelled.enter"
value="ENTER: status=%1, flag=%2"
/>
<string
id="MPI_Wait"
value="MPI_Wait"
/>
<string
id="MPI_Wait.enter"
value="ENTER: request=%1"
/>
<string
id="MPI_Wait.leave"
value="ENTER: status=%1"
/>
<string
id="MPI_Waitall"
value="MPI_Waitall"
/>
<string
id="MPI_Waitall.enter"
value="ENTER: count=%1, array_of_requests=%3"
/>
<string
id="MPI_Waitall.leave"
value="ENTER: count=%1, array_of_statuses=%3"
/>
<string
id="MPI_Waitany"
value="MPI_Waitany"
/>
<string
id="MPI_Waitany.enter"
value="ENTER: count=%1, array_of_requests=%3 "
/>
<string
id="MPI_Waitany.leave"
value="LEAVE: rc=0 index=%1, status"
/>
<string
id="MPI_Waitsome"
value="MPI_Waitsome"
/>
<string
id="MPI_Waitsome.enter"
value="ENTER: incount=%1"
/>
<string
id="MPI_Waitsome.leave"
value="LEAVE: rc=0 outcount=%1"
/>
<string
id="MPI_Test"
value="MPI_Test"
/>
<string
id="MPI_Test.enter"
value="ENTER: request=%1"
/>
<string
id="MPI_Test.leave"
value="LEAVE: rc=0 flag=%1"
/>
<string
id="MPI_Test_cancelled"
value="MPI_Test_cancelled"
/>
<string
id="MPI_Test_cancelled.enter"
value="ENTER: status->cancelled=%1"
/>
<string
id="MPI_Test_cancelled.leave"
value="LEAVE: rc=0 flag=%1"
/>
<string
id="MPI_Testall"
value="MPI_Testall"
/>
<string
id="MPI_Testall.enter"
value="ENTER: count=%1, array_of_requests=%3"
/>
<string
id="MPI_Testall.leave"
value="LEAVE: rc=0 flag=%1, count=%2, array_of_statuses=%4"
/>
<string
id="MPI_Testany"
value="MPI_Testany"
/>
<string
id="MPI_Testany.enter"
value="ENTER: count=%1, array_of_requests=%3"
/>
<string
id="MPI_Testany.leave"
value="LEAVE: rc=0 index=%1, flag=%2, status=%3"
/>
<string
id="MPI_Testsome"
value="MPI_Testsome"
/>
<string
id="MPI_Testsome.enter"
value="ENTER: incount=%1"
/>
<string
id="MPI_Testsome.leave"
value="LEAVE: rc=0 outcount=%1"
/>
<string
id="MPI_Accumulate"
value="MPI_Accumulate"
/>
<string
id="MPI_Accumulate.enter"
value="ENTER: origin_addr=%1, origin_count=%2, origin_datatype=%3, target_rank=%4, target_disp=%5, target_count=%6, target_datatype=%7, op=%8, win=%9"
/>
<string
id="MPI_Alloc_mem"
value="MPI_Alloc_mem"
/>
<string
id="MPI_Alloc_mem.enter"
value="ENTER: size=%1, info=%2"
/>
<string
id="MPI_Alloc_mem.leave"
value="LEAVE: rc=0 base=%1"
/>
<string
id="MPI_Free_mem"
value="MPI_Free_mem"
/>
<string
id="MPI_Free_mem.enter"
value="ENTER: baseptr=%1"
/>
<string
id="MPI_Get"
value="MPI_Get"
/>
<string
id="MPI_Get.enter"
value="ENTER: origin_addr=%1, origin_count=%2, origin_datatype=%3, target_rank=%4, target_disp=%5, target_count=%6, target_datatype=%7, win=%8"
/>
<string
id="MPI_Put"
value="MPI_Put"
/>
<string
id="MPI_Put.enter"
value="ENTER: origin_addr=%1, origin_count=%2, origin_datatype=%3, target_rank=%4, target_disp=%5, target_count=%6, target_datatype=%7, win=%8"
/>
<string
id="MPI_Win_complete"
value="MPI_Win_complete"
/>
<string
id="MPI_Win_complete.enter"
value="ENTER: win=%1"
/>
<string
id="MPI_Win_create"
value="MPI_Win_create"
/>
<string
id="MPI_Win_create.enter"
value="ENTER: base=%1, size=%2, disp_unit=%3, info=%4, comm=%5"
/>
<string
id="MPI_Win_create.leave"
value="LEAVE: rc=0 win=%1"
/>
<string
id="MPI_Win_allocate_shared"
value="MPI_Win_allocate_shared"
/>
<string
id="MPI_Win_allocate_shared.enter"
value="ENTER: size=%1, disp_unit=%2, info=%3, comm=%4"
/>
<string
id="MPI_Win_allocate_shared.leave"
value="LEAVE: rc=0 baseptr=%1 win=%2"
/>
<string
id="MPI_Win_shared_query"
value="MPI_Win_shared_query"
/>
<string
id="MPI_Win_shared_query.enter"
value="ENTER: win=%1, rank=%2"
/>
<string
id="MPI_Win_shared_query.leave"
value="LEAVE: rc=0 size=%1, disp_unit=%2, baseptr=%3"
/>
<string
id="MPI_Win_fence"
value="MPI_Win_fence"
/>
<string
id="MPI_Win_fence.enter"
value="ENTER: assert=%1, win=%2"
/>
<string
id="MPI_Win_free"
value="MPI_Win_free"
/>
<string
id="MPI_Win_free.enter"
value="ENTER: win=%1"
/>
<string
id="MPI_Win_get_group"
value="MPI_Win_get_group"
/>
<string
id="MPI_Win_get_group.enter"
value="ENTER: win=%1"
/>
<string
id="MPI_Win_get_group.leave"
value="LEAVE: rc=0 group=%1"
/>
<string
id="MPI_Win_get_name"
value="MPI_Win_get_name"
/>
<string
id="MPI_Win_get_name.enter"
value="ENTER: win=%1"
/>
<string
id="MPI_Win_get_name.leave"
value="LEAVE: rc=0 name=%1, resultlen=%2"
/>
<string
id="MPI_Win_lock"
value="MPI_Win_lock"
/>
<string
id="MPI_Win_lock.enter"
value="ENTER: lock_type=%1, rank=%2, assert=%3, win=%4"
/>
<string
id="MPI_Win_post"
value="MPI_Win_post"
/>
<string
id="MPI_Win_post.enter"
value="ENTER: win=%1 group=%2 assert=%3"
/>
<string
id="MPI_Win_set_name"
value="MPI_Win_set_name"
/>
<string
id="MPI_Win_set_name.enter"
value="ENTER: win=%1, win_name=%2"
/>
<string
id="MPI_Win_start"
value="MPI_Win_start"
/>
<string
id="MPI_Win_start.enter"
value="ENTER: group=%1, assert=%2, win=%3"
/>
<string
id="MPI_Win_test"
value="MPI_Win_test"
/>
<string
id="MPI_Win_test.enter"
value="ENTER: win=%1"
/>
<string
id="MPI_Win_test.leave"
value="LEAVE: rc=0 flag=%1"
/>
<string
id="MPI_Win_unlock"
value="MPI_Win_unlock"
/>
<string
id="MPI_Win_unlock.enter"
value="ENTER: rank=%1, win=%2"
/>
<string
id="MPI_Win_wait"
value="MPI_Win_wait"
/>
<string
id="MPI_Win_wait.enter"
value="ENTER: win=%1"
/>
<string
id="MPI_File_close"
value="MPI_File_close"
/>
<string
id="MPI_File_close.enter"
value="ENTER: mpi_fh=%1"
/>
<string
id="MPI_File_delete"
value="MPI_File_delete"
/>
<string
id="MPI_File_delete.enter"
value="ENTER: filename=%1, info=%2"
/>
<string
id="MPI_File_c2f"
value="MPI_File_c2f"
/>
<string
id="MPI_File_c2f.enter"
value="ENTER: mpi_fh=%1"
/>
<string
id="MPI_File_c2f.leave"
value="LEAVE: fh=%1"
/>
<string
id="MPI_File_f2c"
value="MPI_File_f2c"
/>
<string
id="MPI_File_f2c.enter"
value="ENTER: fh=%1"
/>
<string
id="MPI_File_f2c.leave"
value="LEAVE: mpi_fh=%1"
/>
<string
id="MPI_File_sync"
value="MPI_File_sync"
/>
<string
id="MPI_File_sync.enter"
value="ENTER: fh=%1"
/>
<string
id="MPI_File_get_amode"
value="MPI_File_get_amode"
/>
<string
id="MPI_File_get_amode.enter"
value="ENTER: fh=%1"
/>
<string
id="MPI_File_get_amode.leave"
value="LEAVE: rc=0, amode=%1"
/>
<string
id="MPI_File_get_atomicity"
value="MPI_File_get_atomicity"
/>
<string
id="MPI_File_get_atomicity.enter"
value="ENTER: fh=%1"
/>
<string
id="MPI_File_get_atomicity.leave"
value="LEAVE: flag=%1"
/>
<string
id="MPI_File_get_byte_offset"
value="MPI_File_get_byte_offset"
/>
<string
id="MPI_File_get_byte_offset.enter"
value="ENTER: fh=%1 offset=%2"
/>
<string
id="MPI_File_get_byte_offset.leave"
value="LEAVE: disp=%1"
/>
<string
id="MPI_File_get_type_extent"
value="MPI_File_get_type_extent"
/>
<string
id="MPI_File_get_type_extent.enter"
value="ENTER: fh=%1, datatype=%2"
/>
<string
id="MPI_File_get_type_extent.leave"
value="LEAVE: rc=0 extent=%1"
/>
<string
id="MPI_File_get_group"
value="MPI_File_get_group"
/>
<string
id="MPI_File_get_group.enter"
value="ENTER: fh=%1"
/>
<string
id="MPI_File_get_group.leave"
value="LEAVE: rc=0 group=%1"
/>
<string
id="MPI_File_get_info"
value="MPI_File_get_info"
/>
<string
id="MPI_File_get_info.enter"
value="ENTER: fh=%1"
/>
<string
id="MPI_File_get_info.leave"
value="LEAVE: rc=0 info=%1"
/>
<string
id="MPI_File_get_position"
value="MPI_File_get_position"
/>
<string
id="MPI_File_get_position.enter"
value="ENTER: fh=%1"
/>
<string
id="MPI_File_get_position.leave"
value="LEAVE: rc=0 position=%1"
/>
<string
id="MPI_File_get_position_shared"
value="MPI_File_get_position_shared"
/>
<string
id="MPI_File_get_position_shared.enter"
value="ENTER: fh=%1"
/>
<string
id="MPI_File_get_position_shared.leave"
value="LEAVE: position=%1"
/>
<string
id="MPI_File_get_size"
value="MPI_File_get_size"
/>
<string
id="MPI_File_get_size.enter"
value="ENTER: fh=%1"
/>
<string
id="MPI_File_get_size.leave"
value="LEAVE: size=%1"
/>
<string
id="MPI_File_get_view"
value="MPI_File_get_view"
/>
<string
id="MPI_File_get_view.enter"
value="ENTER: fh=%1"
/>
<string
id="MPI_File_get_view.leave"
value="LEAVE: disp=%1 filetype=%2 etype=%3 datarep=%4"
/>
<string
id="MPI_File_iread"
value="MPI_File_iread"
/>
<string
id="MPI_File_iread.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_iread.leave"
value="LEAVE: request=%1"
/>
<string
id="MPI_File_iread_at"
value="MPI_File_iread_at"
/>
<string
id="MPI_File_iread_at.enter"
value="ENTER: fh=%1, offset=%2, buf=%3, count=%4, datatype=%5"
/>
<string
id="MPI_File_iread_at.leave"
value="LEAVE: request=%1"
/>
<string
id="MPI_File_iread_shared"
value="MPI_File_iread_shared"
/>
<string
id="MPI_File_iread_shared.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_iread_shared.leave"
value="LEAVE: request=%1"
/>
<string
id="MPI_File_iwrite"
value="MPI_File_iwrite"
/>
<string
id="MPI_File_iwrite.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_iwrite.leave"
value="LEAVE: request=%1"
/>
<string
id="MPI_File_iwrite_at"
value="MPI_File_iwrite_at"
/>
<string
id="MPI_File_iwrite_at.enter"
value="ENTER: fh=%p, offset=%2, buf=%3, count=%4, datatype=%5"
/>
<string
id="MPI_File_iwrite_at.leave"
value="LEAVE: request=%1"
/>
<string
id="MPI_File_iwrite_shared"
value="MPI_File_iwrite_shared"
/>
<string
id="MPI_File_iwrite_shared.enter"
value="ENTER: fh=%1, buff=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_iwrite_shared.leave"
value="LEAVE: request=%1"
/>
<string
id="MPI_File_open"
value="MPI_File_open"
/>
<string
id="MPI_File_open.enter"
value="ENTER: comm=%1, filename=%2, amode=%3, info=%4"
/>
<string
id="MPI_File_open.leave"
value="LEAVE: fh=%1"
/>
<string
id="MPI_File_preallocate"
value="MPI_File_preallocate"
/>
<string
id="MPI_File_preallocate.enter"
value="ENTER: fh=%1, size=%2"
/>
<string
id="MPI_File_read_at_all_begin"
value="MPI_File_read_at_all_begin"
/>
<string
id="MPI_File_read_at_all_begin.enter"
value="ENTER: fh=%1, offset=%2, buf=%3, count=%4, datatype=%5"
/>
<string
id="MPI_File_read_at_all_end"
value="MPI_File_read_at_all_end"
/>
<string
id="MPI_File_read_at_all_end.enter"
value="ENTER: fh=%1, buf=%2"
/>
<string
id="MPI_File_read_at_all_end.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_read"
value="MPI_File_read"
/>
<string
id="MPI_File_read.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_read.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_read_all"
value="MPI_File_read_all"
/>
<string
id="MPI_File_read_all.enter"
value="BEGIN: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_read_all.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_read_all_begin"
value="MPI_File_read_all_begin"
/>
<string
id="MPI_File_read_all_begin.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_read_all_end"
value="MPI_File_read_all_end"
/>
<string
id="MPI_File_read_all_end.enter"
value="ENTER: fh=%1, buf=%2"
/>
<string
id="MPI_File_read_all_end.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_read_at"
value="MPI_File_read_at"
/>
<string
id="MPI_File_read_at.enter"
value="ENTER: fh=%1, offset=%2, buf=%3, count=%4, datatype=%5"
/>
<string
id="MPI_File_read_at.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_read_at_all"
value="MPI_File_read_at_all"
/>
<string
id="MPI_File_read_at_all.enter"
value="ENTER: fh=%1, offset=%2, buf=%3, count=%4, datatype=%5"
/>
<string
id="MPI_File_read_at_all.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_read_ordered"
value="MPI_File_read_ordered"
/>
<string
id="MPI_File_read_ordered.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_read_ordered.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_read_ordered_begin"
value="MPI_File_read_ordered_begin"
/>
<string
id="MPI_File_read_ordered_begin.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_read_ordered_end"
value="MPI_File_read_ordered_end"
/>
<string
id="MPI_File_read_ordered_end.enter"
value="ENTER: fh=%1, buf=%2"
/>
<string
id="MPI_File_read_ordered_end.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_read_shared"
value="MPI_File_read_shared"
/>
<string
id="MPI_File_read_shared.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_read_shared.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_seek"
value="MPI_File_seek"
/>
<string
id="MPI_File_seek.enter"
value="ENTER: fh=%1, offset=%2, whence=%3"
/>
<string
id="MPI_File_seek_shared"
value="MPI_File_seek_shared"
/>
<string
id="MPI_File_seek_shared.enter"
value="ENTER: fh=%1, offset=%2, whence=%3"
/>
<string
id="MPI_File_set_atomicity"
value="MPI_File_set_atomicity"
/>
<string
id="MPI_File_set_atomicity.enter"
value="ENTER: fh=%1, flag=%2"
/>
<string
id="MPI_File_set_info"
value="MPI_File_set_info"
/>
<string
id="MPI_File_set_info.enter"
value="ENTER: fh=%1, info=%2"
/>
<string
id="MPI_File_set_size"
value="MPI_File_set_size"
/>
<string
id="MPI_File_set_size.enter"
value="ENTER: fh=%1, size=%2"
/>
<string
id="MPI_File_set_view"
value="MPI_File_set_view"
/>
<string
id="MPI_File_set_view.enter"
value="ENTER: fh=%1, disp=%2, etype=%3, filetype=%4, datarep=%5, info=%6"
/>
<string
id="MPI_File_write_at_all_begin"
value="MPI_File_write_at_all_begin"
/>
<string
id="MPI_File_write_at_all_begin.enter"
value="ENTER: fh=%1, offset=%2, buf=%3, count=%4, datatype=%5"
/>
<string
id="MPI_File_write_at_all_end"
value="MPI_File_write_at_all_end"
/>
<string
id="MPI_File_write_at_all_end.enter"
value="ENTER: fh=%1, buf=%2"
/>
<string
id="MPI_File_write_at_all_end.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_write"
value="MPI_File_write"
/>
<string
id="MPI_File_write.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_write.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_write_all"
value="MPI_File_write_all"
/>
<string
id="MPI_File_write_all.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_write_all.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_write_all_begin"
value="MPI_File_write_all_begin"
/>
<string
id="MPI_File_write_all_begin.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_write_all_end"
value="MPI_File_write_all_end"
/>
<string
id="MPI_File_write_all_end.enter"
value="ENTER: fh=%1, buf=%2"
/>
<string
id="MPI_File_write_all_end.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_write_at"
value="MPI_File_write_at"
/>
<string
id="MPI_File_write_at.enter"
value="ENTER: fh=%1, offset=%2, buf=%3, count=%4, datatype=%5"
/>
<string
id="MPI_File_write_at.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_write_at_all"
value="MPI_File_write_at_all"
/>
<string
id="MPI_File_write_at_all.enter"
value="ENTER: fh=%1, offset=%2, buf=%3, count=%4, datatype=%5"
/>
<string
id="MPI_File_write_at_all.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_write_ordered"
value="MPI_File_write_ordered"
/>
<string
id="MPI_File_write_ordered.enter"
value="ENTER: ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_write_ordered.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_write_ordered_begin"
value="MPI_File_write_ordered_begin"
/>
<string
id="MPI_File_write_ordered_begin.enter"
value="ENTER: ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_write_ordered_end"
value="MPI_File_write_ordered_end"
/>
<string
id="MPI_File_write_ordered_end.enter"
value="ENTER: ENTER: fh=%1, buf=%2"
/>
<string
id="MPI_File_write_ordered_end.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_File_write_shared"
value="MPI_File_write_shared"
/>
<string
id="MPI_File_write_shared.enter"
value="ENTER: fh=%1, buf=%2, count=%3, datatype=%4"
/>
<string
id="MPI_File_write_shared.leave"
value="LEAVE: status=%1"
/>
<string
id="MPI_Close_port"
value="MPI_Close_port"
/>
<string
id="MPI_Close_port.enter"
value="ENTER: port_name=%1"
/>
<string
id="MPI_Comm_accept"
value="MPI_Comm_accept"
/>
<string
id="MPI_Comm_accept.enter"
value="ENTER: port_name=%1, info=%2, root=%3, comm=%4"
/>
<string
id="MPI_Comm_accept.leave"
value="LEAVE: rc=0 newcomm=%1"
/>
<string
id="MPI_Comm_connect"
value="MPI_Comm_connect"
/>
<string
id="MPI_Comm_connect.enter"
value="ENTER: port_name=%1, info=%2, root=%3, comm=%4"
/>
<string
id="MPI_Comm_connect.leave"
value="LEAVE: rc=0 newcomm=%1"
/>
<string
id="MPI_Comm_disconnect"
value="MPI_Comm_disconnect"
/>
<string
id="MPI_Comm_disconnect.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Comm_get_parent"
value="MPI_Comm_get_parent"
/>
<string
id="MPI_Comm_get_parent.enter"
value="ENTER: "
/>
<string
id="MPI_Comm_get_parent.leave"
value="LEAVE: parent=%1"
/>
<string
id="MPI_Comm_join"
value="MPI_Comm_join"
/>
<string
id="MPI_Comm_join.enter"
value="ENTER: fd=%1"
/>
<string
id="MPI_Comm_join.leave"
value="LEAVE: rc=0 intercomm=%1"
/>
<string
id="MPI_Comm_spawn"
value="MPI_Comm_spawn"
/>
<string
id="MPI_Comm_spawn.enter"
value="ENTER: command=%1, argv=%2, maxprocs=%3, info=%4, root=%5, comm=%6"
/>
<string
id="MPI_Comm_spawn.leave"
value="LEAVE: rc=0 intercomm=%1"
/>
<string
id="MPI_Comm_spawn_multiple"
value="MPI_Comm_spawn_multiple"
/>
<string
id="MPI_Comm_spawn_multiple.enter"
value="ENTER: count=%1, array_of_commands=%3, array_of_argv=%5, array_of_maxprocs=%7, array_of_info=%8, root=%9, comm=%10"
/>
<string
id="MPI_Comm_spawn_multiple.leave"
value="LEAVE: rc=0 intercomm=%1"
/>
<string
id="MPI_Lookup_name"
value="MPI_Lookup_name"
/>
<string
id="MPI_Lookup_name.enter"
value="ENTER: service_name=%1"
/>
<string
id="MPI_Lookup_name.leave"
value="LEAVE: rc=0 port_name=%1"
/>
<string
id="MPI_Open_port"
value="MPI_Open_port"
/>
<string
id="MPI_Open_port.enter"
value="ENTER: open-port"
/>
<string
id="MPI_Open_port.leave"
value="LEAVE: rc=0 port_name=%1"
/>
<string
id="MPI_Publish_name"
value="MPI_Publish_name"
/>
<string
id="MPI_Publish_name.enter"
value="ENTER: service_name=%1, info=%2, port_name=%3"
/>
<string
id="MPI_Unpublish_name"
value="MPI_Unpublish_name"
/>
<string
id="MPI_Unpublish_name.enter"
value="ENTER: service_name=%1, info=%2, port_name=%3"
/>
<string
id="MPI_Cart_coords"
value="MPI_Cart_coords"
/>
<string
id="MPI_Cart_coords.enter"
value="ENTER: comm=%1 rank=%2 maxdims=%3"
/>
<string
id="MPI_Cart_coords.leave"
value="LEAVE: ndims=%1, coords=%3"
/>
<string
id="MPI_Cart_create"
value="MPI_Cart_create"
/>
<string
id="MPI_Cart_create.enter"
value="ENTER: comm_old=%1, ndims=%2, dims=%4, periods=%6, reorder=%7"
/>
<string
id="MPI_Cart_create.leave"
value="LEAVE: comm_cart=%1"
/>
<string
id="MPI_Cart_get"
value="MPI_Cart_get"
/>
<string
id="MPI_Cart_get.enter"
value="ENTER: comm=%1, maxdims=%2"
/>
<string
id="MPI_Cart_get.leave"
value="LEAVE: rc=0 ndims=%1, dims=%3, periods=%5, coords=%7"
/>
<string
id="MPI_Cart_map"
value="MPI_Cart_map"
/>
<string
id="MPI_Cart_map.enter"
value="ENTER: comm=%1, ndims=%2, dims=%4, perodic=%6"
/>
<string
id="MPI_Cart_map.leave"
value="LEAVE: rc=0 newrank=%1"
/>
<string
id="MPI_Cart_rank"
value="MPI_Cart_rank"
/>
<string
id="MPI_Cart_rank.enter"
value="ENTER: comm=%1, coords=%3"
/>
<string
id="MPI_Cart_rank.leave"
value="LEAVE: rc=0 rank=%1"
/>
<string
id="MPI_Cart_shift"
value="MPI_Cart_shift"
/>
<string
id="MPI_Cart_shift.enter"
value="ENTER: comm=%1, direction=%2, displ=%3"
/>
<string
id="MPI_Cart_shift.leave"
value="LEAVE: rc=0 source=%1 dest=%2"
/>
<string
id="MPI_Cart_sub"
value="MPI_Cart_sub"
/>
<string
id="MPI_Cart_sub.enter"
value="ENTER: comm=%1, remain_dims=%3"
/>
<string
id="MPI_Cart_sub.leave"
value="LEAVE: rc=0 newcomm=%1"
/>
<string
id="MPI_Cartdim_get"
value="MPI_Cartdim_get"
/>
<string
id="MPI_Cartdim_get.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Cartdim_get.leave"
value="LEAVE: rc=0 ndims=%1"
/>
<string
id="MPI_Dims_create"
value="MPI_Dims_create"
/>
<string
id="MPI_Dims_create.enter"
value="ENTER: nnodes=%1 ndims=%2"
/>
<string
id="MPI_Graph_get"
value="MPI_Graph_get"
/>
<string
id="MPI_Graph_get.enter"
value="ENTER: comm=%1 maxindes=%2 maxedges=%3"
/>
<string
id="MPI_Graph_get.leave"
value="LEAVE: rc=0 index=%1 edges=%2"
/>
<string
id="MPI_Graph_map"
value="MPI_Graph_map"
/>
<string
id="MPI_Graph_map.enter"
value="ENTER: comm=%1 nnodes=%2"
/>
<string
id="MPI_Graph_map.leave"
value="LEAVE: rc=0 newrank=%1"
/>
<string
id="MPI_Graph_neighbors"
value="MPI_Graph_neighbors"
/>
<string
id="MPI_Graph_neighbors.enter"
value="ENTER: comm=%1 rank=%2 maxneighbors=%3"
/>
<string
id="MPI_Graph_neighbors.leave"
value="ENTER: rc=0 neighbors=%1"
/>
<string
id="MPI_Graph_create"
value="MPI_Graph_create"
/>
<string
id="MPI_Graph_create.enter"
value="ENTER: comm=%1 nnodes=%2 reorder=%3"
/>
<string
id="MPI_Graph_create.leave"
value="LEAVE: rc=0 newcomm=%1"
/>
<string
id="MPI_Graphdims_get"
value="MPI_Graphdims_get"
/>
<string
id="MPI_Graphdims_get.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Graphdims_get.leave"
value="LEAVE: rc=0 nnodes=%1 nedges=%2"
/>
<string
id="MPI_Graph_neighbors_count"
value="MPI_Graph_neighbors_count"
/>
<string
id="MPI_Graph_neighbors_count.enter"
value="ENTER: comm=%1 rank=%2"
/>
<string
id="MPI_Graph_neighbors_count.leave"
value="LEAVE: rc=0 nneighbors=%1"
/>
<string
id="MPI_Dist_graph_neighbors_count"
value="MPI_Dist_graph_neighbors_count"
/>
<string
id="MPI_Dist_graph_neighbors_count.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Dist_graph_neighbors_count.leave"
value="LEAVE: rc=0 indegree=%1 outdegree=%2 weighted=%3"
/>
<string
id="MPI_Dist_graph_neighbors"
value="MPI_Dist_graph_neighbors"
/>
<string
id="MPI_Dist_graph_neighbors.enter"
value="ENTER: comm=%1 maxindegree=%2 maxoutdegree=%3"
/>
<string
id="MPI_Dist_graph_neighbors.leave"
value="LEAVE: rc=0 sources=%1 sourceweights=%2 destinations=%3 destweights=%4"
/>
<string
id="MPI_Dist_graph_create_adjacent"
value="MPI_Dist_graph_create_adjacent"
/>
<string
id="MPI_Dist_graph_create_adjacent.enter"
value="ENTER: comm=%1 indegree=%2 outdegree=%3 info=%4 reorder=%5"
/>
<string
id="MPI_Dist_graph_create_adjacent.leave"
value="LEAVE: rc=0 comm_dist_graph=%1"
/>
<string
id="MPI_Dist_graph_create"
value="MPI_Dist_graph_create"
/>
<string
id="MPI_Dist_graph_create.enter"
value="ENTER: comm=%1 n=%2 info=%3 reorder=%4"
/>
<string
id="MPI_Dist_graph_create.leave"
value="LEAVE: rc=0 comm_dist_graph=%1"
/>
<string
id="MPI_Topo_test"
value="MPI_Topo_test"
/>
<string
id="MPI_Topo_test.enter"
value="ENTER: comm=%1"
/>
<string
id="MPI_Topo_test.leave"
value="LEAVE: rc=0 topo_type=%1"
/>
<string
id="send.nd"
value="ND: Send"
/>
<string
id="recv.nd"
value="ND: Recv"
/>
<string
id="send.sock"
value="SOCK: Send"
/>
<string
id="recv.sock"
value="SOCK: Recv"
/>
<string
id="send.shm"
value="SHM: Send"
/>
<string
id="recv.shm"
value="SHM: Recv"
/>
<string
id="send.msg"
value="MSG: Send"
/>
<string
id="recv.msg"
value="MSG: Recv"
/>
<string
id="queue"
value="queu"
/>
<string
id="connect"
value="qcon"
/>
<string
id="inline"
value="inln"
/>
<string
id="continue"
value="cont"
/>
<string
id="done"
value="done"
/>
<string
id="head"
value="head"
/>
<string
id="MPI_ERR_ACCESS"
value="MPI_ERR_ACCESS (20) : Premission denied"
/>
<string
id="MPI_ERR_AMODE"
value="MPI_ERR_AMODE (21) : Error related to amode passed to MPI_File_open"
/>
<string
id="MPI_ERR_ARG"
value="MPI_ERR_ARG (12) : Invalid argument"
/>
<string
id="MPI_ERR_ASSERT"
value="MPI_ERR_ASSERT (53) : Invalid assert argument"
/>
<string
id="MPI_ERR_BAD_FILE"
value="MPI_ERR_BAD_FILE (22) : Invalid file name (e.g., path name too long)"
/>
<string
id="MPI_ERR_BASE"
value="MPI_ERR_BASE (46) : Invalid base passed to MPI_Free_mem"
/>
<string
id="MPI_ERR_BUFFER"
value="MPI_ERR_BUFFER (1) : Invalid buffer pointer"
/>
<string
id="MPI_ERR_COMM"
value="MPI_ERR_COMM (5) : Invalid communicator"
/>
<string
id="MPI_ERR_CONVERSION"
value="MPI_ERR_CONVERSION (23) : Error in user data conversion function"
/>
<string
id="MPI_ERR_COUNT"
value="MPI_ERR_COUNT (2) : Invalid count argument"
/>
<string
id="MPI_ERR_DIMS"
value="MPI_ERR_DIMS (11) : Invalid dimension argument"
/>
<string
id="MPI_ERR_DISP"
value="MPI_ERR_DISP (52) : Invalid disp argument"
/>
<string
id="MPI_ERR_DUP_DATAREP"
value="MPI_ERR_DUP_DATAREP (24) : Data representation identifier already registered"
/>
<string
id="MPI_ERR_FILE"
value="MPI_ERR_FILE (27) : Invalid file handle"
/>
<string
id="MPI_ERR_FILE_EXISTS"
value="MPI_ERR_FILE_EXISTS (25) : File exists"
/>
<string
id="MPI_ERR_FILE_IN_USE"
value="MPI_ERR_FILE_IN_USE (26) : File operation could not be completed, file in use"
/>
<string
id="MPI_ERR_GROUP"
value="MPI_ERR_GROUP (8) : Invalid group"
/>
<string
id="MPI_ERR_IN_STATUS"
value="MPI_ERR_IN_STATUS (17) : Error code is in status"
/>
<string
id="MPI_ERR_INFO"
value="MPI_ERR_INFO (28) : Invalid info argument"
/>
<string
id="MPI_ERR_INFO_KEY"
value="MPI_ERR_INFO_KEY (29) : Key longer than MPI_MAX_INFO_KEY"
/>
<string
id="MPI_ERR_INFO_NOKEY"
value="MPI_ERR_INFO_NOKEY (31) : Invalid key passed to MPI_Info_delete"
/>
<string
id="MPI_ERR_INFO_VALUE"
value="MPI_ERR_INFO_VALUE (30) : Value longer than MPI_MAX_INFO_VAL"
/>
<string
id="MPI_ERR_INTERN"
value="MPI_ERR_INTERN (16) : Internal error code"
/>
<string
id="MPI_ERR_IO"
value="MPI_ERR_IO (32) : Other I/O error"
/>
<string
id="MPI_ERR_KEYVAL"
value="MPI_ERR_KEYVAL (48) : Invalid keyval"
/>
<string
id="MPI_ERR_LOCKTYPE"
value="MPI_ERR_LOCKTYPE (47) : Invalid locktype argument"
/>
<string
id="MPI_ERR_NAME"
value="MPI_ERR_NAME (33) : Invalid service name in MPI_Lookup_name"
/>
<string
id="MPI_ERR_NO_MEM"
value="MPI_ERR_NO_MEM (34) : Alloc_mem could not allocate memory"
/>
<string
id="MPI_ERR_NO_SPACE"
value="MPI_ERR_NO_SPACE (36) : Not enough space"
/>
<string
id="MPI_ERR_NO_SUCH_FILE"
value="MPI_ERR_NO_SUCH_FILE (37) : File does not exist"
/>
<string
id="MPI_ERR_NOT_SAME"
value="MPI_ERR_NOT_SAME (35) : Collective argument/sequence not the same on all processes"
/>
<string
id="MPI_ERR_OP"
value="MPI_ERR_OP (9) : Invalid operation"
/>
<string
id="MPI_ERR_OTHER"
value="MPI_ERR_OTHER (15) : Other error; use Error_string"
/>
<string
id="MPI_ERR_PENDING"
value="MPI_ERR_PENDING (18) : Pending request"
/>
<string
id="MPI_ERR_PORT"
value="MPI_ERR_PORT (38) : Invalid port name in MPI_comm_connect"
/>
<string
id="MPI_ERR_QUOTA"
value="MPI_ERR_QUOTA (39) : Quota exceeded"
/>
<string
id="MPI_ERR_RANK"
value="MPI_ERR_RANK (6) : Invalid rank"
/>
<string
id="MPI_ERR_READ_ONLY"
value="MPI_ERR_READ_ONLY (40) : Read-only file or file system"
/>
<string
id="MPI_ERR_REQUEST"
value="MPI_ERR_REQUEST (19) : Invalid request (handle)"
/>
<string
id="MPI_ERR_RMA_CONFLICT"
value="MPI_ERR_RMA_CONFLICT (49) : Conflicting accesses to window"
/>
<string
id="MPI_ERR_RMA_SYNC"
value="MPI_ERR_RMA_SYNC (50) : Wrong synchronization of RMA calls"
/>
<string
id="MPI_ERR_ROOT"
value="MPI_ERR_ROOT (7) : Invalid root"
/>
<string
id="MPI_ERR_SERVICE"
value="MPI_ERR_SERVICE (41) : Invalid service name in MPI_Unpublish_name"
/>
<string
id="MPI_ERR_SIZE"
value="MPI_ERR_SIZE (51) : Invalid size argument"
/>
<string
id="MPI_ERR_SPAWN"
value="MPI_ERR_SPAWN (42) : Error in spawning processes"
/>
<string
id="MPI_ERR_TAG"
value="MPI_ERR_TAG (4) : Invalid tag argument"
/>
<string
id="MPI_ERR_TOPOLOGY"
value="MPI_ERR_TOPOLOGY (10) : Invalid topology"
/>
<string
id="MPI_ERR_TRUNCATE"
value="MPI_ERR_TRUNCATE (14) : Message truncated on receive"
/>
<string
id="MPI_ERR_TYPE"
value="MPI_ERR_TYPE (3) : Invalid datatype argument"
/>
<string
id="MPI_ERR_UNKNOWN"
value="MPI_ERR_UNKNOWN (13) : Unknown error"
/>
<string
id="MPI_ERR_UNSUPPORTED_DATAREP"
value="MPI_ERR_UNSUPPORTED_DATAREP (43) : Unsupported dararep in MPI_File_set_view"
/>
<string
id="MPI_ERR_UNSUPPORTED_OPERATION"
value="MPI_ERR_UNSUPPORTED_OPERATION (44) : Unsupported operation on file"
/>
<string
id="MPI_ERR_WIN"
value="MPI_ERR_WIN (45) : Invalid win argument"
/>
<string
id="MPI_SUCCESS"
value="MPI_SUCCESS (0) : Successful return code"
/>
<string
id="MPI_2COMPLEX"
value="MPI_2COMPLEX (0x4c001024)"
/>
<string
id="MPI_2DOUBLE_COMPLEX"
value="MPI_2DOUBLE_COMPLEX (0x4c002025)"
/>
<string
id="MPI_2DOUBLE_PRECISION"
value="MPI_2DOUBLE_PRECISION (0x4c001023)"
/>
<string
id="MPI_2INT"
value="MPI_2INT (0x4c000816)"
/>
<string
id="MPI_2INTEGER"
value="MPI_2INTEGER (0x4c000820)"
/>
<string
id="MPI_2REAL"
value="MPI_2REAL (0x4c000821)"
/>
<string
id="MPI_BYTE"
value="MPI_BYTE (0x4c00010d)"
/>
<string
id="MPI_CHAR"
value="MPI_CHAR (0x4c000101)"
/>
<string
id="MPI_CHARACTER"
value="MPI_CHARACTER (0x4c00011a)"
/>
<string
id="MPI_COMPLEX"
value="MPI_COMPLEX (0x4c00081e)"
/>
<string
id="MPI_COMPLEX16"
value="MPI_COMPLEX16 (0x4c00102a)"
/>
<string
id="MPI_COMPLEX32"
value="MPI_COMPLEX32 (0x4c00202c)"
/>
<string
id="MPI_COMPLEX8"
value="MPI_COMPLEX8 (0x4c000828)"
/>
<string
id="MPI_DOUBLE"
value="MPI_DOUBLE (0x4c00080b)"
/>
<string
id="MPI_DOUBLE_COMPLEX"
value="MPI_DOUBLE_COMPLEX (0x4c001022)"
/>
<string
id="MPI_DOUBLE_INT"
value="MPI_DOUBLE_INT (0x8c000001)"
/>
<string
id="MPI_DOUBLE_PRECISION"
value="MPI_DOUBLE_PRECISION (0x4c00081f)"
/>
<string
id="MPI_FLOAT"
value="MPI_FLOAT (0x4c00040a)"
/>
<string
id="MPI_FLOAT_INT"
value="MPI_FLOAT_INT (0x8c000000)"
/>
<string
id="MPI_INT"
value="MPI_INT (0x4c000405)"
/>
<string
id="MPI_INTEGER"
value="MPI_INTEGER (0x4c00041b)"
/>
<string
id="MPI_INTEGER1"
value="MPI_INTEGER1 (0x4c00012d)"
/>
<string
id="MPI_INTEGER16"
value="MPI_INTEGER16 (0x4c001032)"
/>
<string
id="MPI_INTEGER2"
value="MPI_INTEGER2 (0x4c00022f)"
/>
<string
id="MPI_INTEGER4"
value="MPI_INTEGER4 (0x4c000430)"
/>
<string
id="MPI_INTEGER8"
value="MPI_INTEGER8 (0x4c000831)"
/>
<string
id="MPI_LB"
value="MPI_LB (0x4c000010)"
/>
<string
id="MPI_LOGICAL"
value="MPI_LOGICAL (0x4c00041d)"
/>
<string
id="MPI_LONG"
value="MPI_LONG (0x4c000407)"
/>
<string
id="MPI_LONG_DOUBLE"
value="MPI_LONG_DOUBLE (0x4c00080c)"
/>
<string
id="MPI_LONG_DOUBLE_INT"
value="MPI_LONG_DOUBLE_INT (0x8c000004)"
/>
<string
id="MPI_LONG_INT"
value="MPI_LONG_INT (0x8c000002)"
/>
<string
id="MPI_LONG_LONG_INT"
value="MPI_LONG_LONG_INT (0x4c000809)"
/>
<string
id="MPI_PACKED"
value="MPI_PACKED (0x4c00010f)"
/>
<string
id="MPI_REAL"
value="MPI_REAL (0x4c00041c)"
/>
<string
id="MPI_REAL16"
value="MPI_REAL16 (0x4c00102b)"
/>
<string
id="MPI_REAL4"
value="MPI_REAL4 (0x4c000427)"
/>
<string
id="MPI_REAL8"
value="MPI_REAL8 (0x4c000829)"
/>
<string
id="MPI_SHORT"
value="MPI_SHORT (0x4c000203)"
/>
<string
id="MPI_SHORT_INT"
value="MPI_SHORT_INT (0x8c000003)"
/>
<string
id="MPI_SIGNED_CHAR"
value="MPI_SIGNED_CHAR (0x4c000118)"
/>
<string
id="MPI_UB"
value="MPI_UB (0x4c000011)"
/>
<string
id="MPI_UNSIGNED"
value="MPI_UNSIGNED (0x4c000406)"
/>
<string
id="MPI_UNSIGNED_CHAR"
value="MPI_UNSIGNED_CHAR (0x4c000102)"
/>
<string
id="MPI_UNSIGNED_LONG"
value="MPI_UNSIGNED_LONG (0x4c000408)"
/>
<string
id="MPI_UNSIGNED_LONG_LONG"
value="MPI_UNSIGNED_LONG_LONG (0x4c000819)"
/>
<string
id="MPI_UNSIGNED_SHORT"
value="MPI_UNSIGNED_SHORT (0x4c000204)"
/>
<string
id="MPI_WCHAR"
value="MPI_WCHAR (0x4c00020e)"
/>
<string
id="MPI_BAND"
value="MPI_BAND (0x58000006)"
/>
<string
id="MPI_BOR"
value="MPI_BOR (0x58000008)"
/>
<string
id="MPI_BXOR"
value="MPI_BXOR (0x5800000a)"
/>
<string
id="MPI_LAND"
value="MPI_LAND (0x58000005)"
/>
<string
id="MPI_LOR"
value="MPI_LOR (0x58000007)"
/>
<string
id="MPI_LXOR"
value="MPI_LXOR (0x58000009)"
/>
<string
id="MPI_MAX"
value="MPI_MAX (0x58000001)"
/>
<string
id="MPI_MAXLOC"
value="MPI_MAXLOC (0x5800000c)"
/>
<string
id="MPI_MIN"
value="MPI_MIN (0x58000002)"
/>
<string
id="MPI_MINLOC"
value="MPI_MINLOC (0x5800000b)"
/>
<string
id="MPI_PROD"
value="MPI_PROD (0x58000004)"
/>
<string
id="MPI_REPLACE"
value="MPI_REPLACE (0x5800000d)"
/>
<string
id="MPI_SUM"
value="MPI_SUM (0x58000003)"
/>
<string
id="MPI_OP_NULL"
value="MPI_OP_NULL (0x18000000)"
/>
<string
id="MPI_COMM_NULL"
value="MPI_COMM_NULL (0x04000000)"
/>
<string
id="MPI_COMM_WORLD"
value="MPI_COMM_WORLD (0x44000000)"
/>
<string
id="MPI_COMM_SELF"
value="MPI_COMM_SELF (0x44000001)"
/>
<string
id="MPI_WIN_NULL"
value="MPI_WIN_NULL (0x20000000)"
/>
<string
id="MPI_INFO_NULL"
value="MPI_INFO_NULL (0x1c000000)"
/>
<string
id="MPI_REQUEST_NULL"
value="MPI_REQUEST_NULL (0x2c000000)"
/>
<string
id="MPI_GROUP_NULL"
value="MPI_GROUP_NULL (0x08000000)"
/>
<string
id="MPI_GROUP_EMPTY"
value="MPI_GROUP_EMPTY (0x48000000)"
/>
<string
id="MPI_ERRHANDLER_NULL"
value="MPI_ERRHANDLER_NULL (0x14000000)"
/>
<string
id="MPI_ERRORS_ARE_FATAL"
value="MPI_ERRORS_ARE_FATAL (0x54000000)"
/>
<string
id="MPI_ERRORS_RETURN"
value="MPI_ERRORS_RETURN (0x54000001)"
/>
<string
id="MPI_PROC_NULL"
value="MPI_PROC_NULL (0xFFFFFFFF)"
/>
<string
id="MPI_ANY_SOURCE"
value="MPI_ANY_SOURCE (0xFFFFFFFE)"
/>
<string
id="MPI_ROOT"
value="MPI_ROOT (0xFFFFFFFD)"
/>
<string
id="MPI_ANY_TAG"
value="MPI_ANY_TAG (0xFFFFFFFF)"
/>
<string
id="MPI_MESSAGE_NULL"
value="MPI_MESSAGE_NULL (0x2c000000)"
/>
<string
id="MPI_MESSAGE_NO_PROC"
value="MPI_MESSAGE_NO_PROC (0x6c000000)"
/>
<string
id="MSMPI_Queuelock_acquire"
value="MSMPI_Queuelock_acquire"
/>
<string
id="MSMPI_Queuelock_acquire.enter"
value="ENTER"
/>
<string
id="MSMPI_Queuelock_acquire.leave"
value="LEAVE: status=%1"
/>
<string
id="MSMPI_Queuelock_release"
value="MSMPI_Queuelock_release"
/>
<string
id="MSMPI_Queuelock_release.enter"
value="ENTER"
/>
<string
id="MSMPI_Queuelock_release.leave"
value="LEAVE: status=%1"
/>
<string
id="MSMPI_Request_set_apc"
value="MPI_Request_set_apc"
/>
<string
id="MSMPI_Request_set_apc.enter"
value="ENTER: request=%1, callback_fn=%2, callback_status=%3"
/>
<string
id="MSMPI_Waitsome_interruptible"
value="MSMPI_Waitsome_interruptible"
/>
<string
id="MSMPI_Waitsome_interruptible.enter"
value="ENTER: incount=%1"
/>
<string
id="MSMPI_Waitsome_interruptible.leave"
value="LEAVE: rc=0 outcount=%1"
/>
<string
id="MPI_Type_create_hindexed_block"
value="MPI_Type_create_hindexed_block"
/>
<string
id="MPI_Type_create_hindexed_block.enter"
value="ENTER: count=%1, blocklength=%2, array_of_displacements=%4, oldtype=%5"
/>
<string
id="MPI_Type_create_hindexed_block.leave"
value="LEAVE: rc=0 newtype=%1"
/>
<string
id="sock.defer"
value="Deferred Socket Request"
/>
<string
id="shm.defer"
value="Deferred SHM Request"
/>
<string
id="nd.defer"
value="Deferred ND Request"
/>
<string
id="deferwrite"
value="Deferred Write Request"
/>
<string
id="deferconnect"
value="Deferred Connect Request"
/>
<string
id="nd.defer.connect"
value="Deferred ND Connection Rank=%1, VCRank=%2, MessageID=%3"
/>
<string
id="sock.defer.connect"
value="Deferred Socket Connection Rank=%1, VCRank=%2, MessageID=%3"
/>
<string
id="shm.defer.connect"
value="Deferred SHM Connection Rank=%1, VCRank=%2, MessageID=%3"
/>
<string
id="nd.defer.write"
value="Deferred ND Write Rank=%1, VCRank=%2, MessageID=%3"
/>
<string
id="sock.defer.write"
value="Deferred Socket Write Rank=%1, VCRank=%2, MessageID=%3"
/>
<string
id="shm.defer.write"
value="Deferred SHM Write Rank=%1, VCRank=%2, MessageID=%3"
/>
</stringTable>
</resources>
</localization>
</instrumentationManifest>