@@ -307,7 +307,7 @@ hsa_status_t Device::iterateAgentCallback(hsa_agent_t agent, void* data) {
307307 }
308308
309309 if (dev_type == HSA_DEVICE_TYPE_CPU) {
310- AgentInfo info = {agent, {0 }, {0 }, {0 }};
310+ AgentInfo info = {agent, {0 }, {0 }, {0 }, { 0 }, static_cast < uint32_t >(cpu_agents_. size ()) };
311311 stat = Hsa::agent_iterate_memory_pools (agent, Device::iterateCpuMemoryPoolCallback,
312312 reinterpret_cast <void *>(&info));
313313 if (stat == HSA_STATUS_SUCCESS) {
@@ -2014,21 +2014,23 @@ hsa_amd_memory_pool_t Device::getHostMemoryPool(MemorySegment mem_seg,
20142014
20152015// ================================================================================================
20162016void * Device::hostAlloc (size_t size, size_t alignment, MemorySegment mem_seg,
2017- const void * agentInfo ) const {
2017+ const void * agent_info ) const {
20182018 void * ptr = nullptr ;
20192019 uint32_t memFlags = 0 ;
20202020 if (mem_seg == kKernArg ) {
20212021 memFlags |= HSA_AMD_MEMORY_POOL_EXECUTABLE_FLAG;
20222022 }
2023-
2024- hsa_amd_memory_pool_t pool =
2025- getHostMemoryPool (mem_seg, static_cast <const amd::roc::AgentInfo*>(agentInfo));
2023+ auto cpu_agent_info = static_cast <const amd::roc::AgentInfo*>(agent_info);
2024+ if (cpu_agent_info == nullptr ) {
2025+ cpu_agent_info = cpu_agent_info_;
2026+ }
2027+ hsa_amd_memory_pool_t pool = getHostMemoryPool (mem_seg, cpu_agent_info);
20262028 hsa_status_t stat = Hsa::memory_pool_allocate (pool, size, memFlags, &ptr);
20272029
20282030 ClPrint (amd::LOG_DEBUG, amd::LOG_MEM,
20292031 " Allocate hsa host memory %p, size 0x%zx,"
2030- " numa_node = %d , mem_seg = %d" ,
2031- ptr, size, preferred_numa_node_ , static_cast <int >(mem_seg));
2032+ " numa_node = %u , mem_seg = %d" ,
2033+ ptr, size, cpu_agent_info-> id , static_cast <int >(mem_seg));
20322034 if (stat != HSA_STATUS_SUCCESS) {
20332035 LogPrintfError (" Fail allocation host memory with err %d" , stat);
20342036 return nullptr ;
0 commit comments