Lines Matching refs:device
84 void hostSignal (const DeviceInterface& vk, const VkDevice& device, VkSemaphore semaphore, const deUint64 timelineValue)
94 VK_CHECK(vk.signalSemaphore(device, &ssi));
97 // Waits for the device to be idle when destroying the guard object.
101 DeviceWaitIdleGuard (const DeviceInterface& vkd, const VkDevice device)
102 : m_vkd(vkd), m_device(device)
204 // Class to wrap a singleton instance and device
245 VkDevice device,
255 VkDevice device,
259 , m_device (device)
269 VkDevice device,
284 vkd.getBufferMemoryRequirements2(device, &requirementInfo, &requirements);
289 VkDevice device,
304 vkd.getImageMemoryRequirements2(device, &requirementInfo, &requirements);
310 VkDevice device,
316 const VkMemoryRequirements requirements = getBufferMemoryRequirements(vkd, device, buffer);
320 memory = importDedicatedMemory(vkd, device, buffer, requirements, externalType, exportedMemoryTypeIndex, nativeHandle);
322 memory = importMemory(vkd, device, requirements, externalType, exportedMemoryTypeIndex, nativeHandle);
324 VK_CHECK(vkd.bindBufferMemory(device, buffer, *memory, 0u));
326 return MovePtr<Allocation>(new SimpleAllocation(vkd, device, memory.disown()));
330 VkDevice device,
336 const VkMemoryRequirements requirements = getImageMemoryRequirements(vkd, device, image);
340 memory = importDedicatedMemory(vkd, device, image, requirements, externalType, exportedMemoryTypeIndex, nativeHandle);
342 memory = importMemory(vkd, device, requirements, externalType, exportedMemoryTypeIndex, nativeHandle);
344 VK_CHECK(vkd.bindImageMemory(device, image, *memory, 0u));
346 return MovePtr<Allocation>(new SimpleAllocation(vkd, device, memory.disown()));
372 VkDevice device,
432 Move<VkImage> image = createImage(vkd, device, &createInfo);
433 MovePtr<Allocation> allocation = importAndBindMemory(vkd, device, *image, nativeHandle, externalType, exportedMemoryTypeIndex);
460 Move<VkBuffer> buffer = createBuffer(vkd, device, &createInfo);
462 device,
557 vk::VkDevice device,
589 return vk::createImage(vkd, device, &createInfo);
593 vk::VkDevice device,
615 return vk::createBuffer(vkd, device, &createInfo);
653 // These guards will wait for the device to be idle before tearing down the resources above.
924 VkDevice device,
938 semaphores.push_back(createExportableSemaphoreType(vk, device, m_semaphoreType, m_semaphoreHandleType));
944 semaphores.push_back(createExportableSemaphoreType(vk, device, m_semaphoreType, m_semaphoreHandleType));
946 semaphores.push_back(createSemaphoreType(vk, device, m_semaphoreType));
1365 const VkDevice& device = *m_device;
1367 Unique<VkFence> fence (createFence(vk, device));
1368 const Unique<VkCommandPool> cmdPoolA (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, m_queueFamilyIndexA));
1369 const Unique<VkCommandPool> cmdPoolB (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, m_queueFamilyIndexB));
1385 // This guard will wait for the device to be idle before tearing down the resources above.
1386 const DeviceWaitIdleGuard idleGuard (vk, device);
1415 ptrCmdBuffersA.push_back(makeVkSharedPtr(makeCommandBuffer(vk, device, *cmdPoolA)));
1465 addSemaphore(vk, device, semaphoresA, semaphoreHandlesA, timelineValuesA, 2u);
1472 ptrCmdBufferB = makeVkSharedPtr(makeCommandBuffer(vk, device, *cmdPoolB));
1482 addSemaphore(vk, device, semaphoresB, semaphoreHandlesB, timelineValuesB, timelineValuesA.back());
1550 hostSignal(vk, device, semaphoreHandlesA.front(), 1);
1552 VK_CHECK(vk.waitSemaphores(device, &waitInfo, ~0ull));
1556 VK_CHECK(vk.waitForFences(device, 1, &fence.get(), VK_TRUE, ~0ull));
1587 VkDevice device,
1600 semaphores.push_back(createSemaphoreType(vk, device, m_semaphoreType));
1605 semaphores.push_back(createSemaphoreType(vk, device, m_semaphoreType));