/test/xts/hats/ai/nnrt/hdi/v2_0/nnrtFunctionTest_additional/src/ |
H A D | hdi_device_test.cpp | 51 hdiRet = device_->GetDeviceName(deviceName); in HWTEST_F() 66 auto hdiRet = device_->GetDeviceName(deviceName); in HWTEST_F() 82 hdiRet = device_->GetVendorName(vendorName); in HWTEST_F() 97 auto hdiRet = device_->GetVendorName(vendorName); in HWTEST_F() 114 hdiRet = device_->GetDeviceType(deviceType); in HWTEST_F() 134 hdiRet = device_->GetDeviceStatus(deviceStatus); in HWTEST_F() 152 hdiRet = device_->IsFloat16PrecisionSupported(isSupportedFp16); in HWTEST_F() 165 auto hdiRet = device_->IsFloat16PrecisionSupported(isSupportedFp16); in HWTEST_F() 179 hdiRet = device_->IsPerformanceModeSupported(isSupportedPerformance); in HWTEST_F() 192 auto hdiRet = device_ in HWTEST_F() [all...] |
H A D | hdi_model_prepare_test.cpp | 55 device_->PrepareOfflineModel(modelCache, modelConfig, iPreparedModelFromOffline)); in HWTEST_F() 73 device_->PrepareOfflineModel(modelCache, modelConfig, iPreparedModelFromOffline)); in HWTEST_F() 91 device_->PrepareOfflineModel(modelCache, modelConfig, iPreparedModelFromOffline)); in HWTEST_F() 109 device_->PrepareOfflineModel(modelCache, modelConfig, iPreparedModelFromOffline)); in HWTEST_F() 127 device_->PrepareOfflineModel(modelCache, modelConfig, iPreparedModelFromOffline)); in HWTEST_F() 145 device_->PrepareOfflineModel(modelCache, modelConfig, iPreparedModelFromOffline)); in HWTEST_F() 163 device_->PrepareOfflineModel(modelCache, modelConfig, iPreparedModelFromOffline)); in HWTEST_F() 181 device_->PrepareOfflineModel(modelCache, modelConfig, iPreparedModelFromOffline)); in HWTEST_F() 199 device_->PrepareOfflineModel(modelCache, modelConfig, iPreparedModelFromOffline)); in HWTEST_F() 217 device_ in HWTEST_F() [all...] |
/third_party/vk-gl-cts/external/amber/src/src/vulkan/ |
H A D | command_buffer.cc | 27 : device_(device), pool_(pool) {} in CommandBuffer() 33 device_->GetPtrs()->vkDestroyFence(device_->GetVkDevice(), fence_, nullptr); in ~CommandBuffer() 36 device_->GetPtrs()->vkFreeCommandBuffers( in ~CommandBuffer() 37 device_->GetVkDevice(), pool_->GetVkCommandPool(), 1, &command_); in ~CommandBuffer() 48 if (device_->GetPtrs()->vkAllocateCommandBuffers( in Initialize() 49 device_->GetVkDevice(), &command_info, &command_) != VK_SUCCESS) { in Initialize() 55 if (device_->GetPtrs()->vkCreateFence(device_->GetVkDevice(), &fence_info, in Initialize() 67 if (device_ in BeginRecording() [all...] |
H A D | resource.cc | 51 : device_(device), size_in_bytes_(size_in_bytes) {} in Resource() 65 if (device_->GetPtrs()->vkCreateBuffer(device_->GetVkDevice(), &buffer_info, in CreateVkBuffer() 88 if (device_->HasMemoryFlags(memory_type_index, flags)) in ChooseMemory() 120 device_->GetPtrs()->vkGetBufferMemoryRequirements(device_->GetVkDevice(), in AllocateAndBindMemoryToVkBuffer() 132 if (device_->GetPtrs()->vkBindBufferMemory(device_->GetVkDevice(), buffer, in AllocateAndBindMemoryToVkBuffer() 147 if (device_->GetPtrs()->vkAllocateMemory(device_ in AllocateMemory() [all...] |
H A D | transfer_buffer.cc | 32 if (device_) { in ~TransferBuffer() 33 device_->GetPtrs()->vkDestroyBufferView(device_->GetVkDevice(), view_, in ~TransferBuffer() 38 device_->GetPtrs()->vkFreeMemory(device_->GetVkDevice(), memory_, in ~TransferBuffer() 42 device_->GetPtrs()->vkDestroyBuffer(device_->GetVkDevice(), buffer_, in ~TransferBuffer() 76 if (device_->GetPtrs()->vkCreateBufferView(device_->GetVkDevice(), in Initialize() 83 if (!device_ in Initialize() [all...] |
H A D | transfer_image.cc | 95 image_info_.format = device_->GetVkFormat(format); in TransferImage() 105 device_->GetPtrs()->vkDestroyImageView(device_->GetVkDevice(), view_, in ~TransferImage() 110 device_->GetPtrs()->vkDestroyImage(device_->GetVkDevice(), image_, nullptr); in ~TransferImage() 113 device_->GetPtrs()->vkFreeMemory(device_->GetVkDevice(), memory_, nullptr); in ~TransferImage() 117 device_->GetPtrs()->vkFreeMemory(device_->GetVkDevice(), in ~TransferImage() 122 device_ in ~TransferImage() [all...] |
H A D | compute_pipeline.cc | 58 if (device_->GetPtrs()->vkCreateComputePipelines( in CreateVkComputePipeline() 59 device_->GetVkDevice(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, in CreateVkComputePipeline() 98 device_->GetPtrs()->vkCmdBindPipeline(command_->GetVkCommandBuffer(), in Compute() 101 device_->GetPtrs()->vkCmdDispatch(command_->GetVkCommandBuffer(), x, y, z); in Compute() 112 device_->GetPtrs()->vkDestroyPipeline(device_->GetVkDevice(), pipeline, in Compute() 114 device_->GetPtrs()->vkDestroyPipelineLayout(device_->GetVkDevice(), in Compute()
|
H A D | command_pool.cc | 22 CommandPool::CommandPool(Device* device) : device_(device) {} in CommandPool() 28 device_->GetPtrs()->vkDestroyCommandPool(device_->GetVkDevice(), pool_, in ~CommandPool() 36 pool_info.queueFamilyIndex = device_->GetQueueFamilyIndex(); in Initialize() 38 if (device_->GetPtrs()->vkCreateCommandPool( in Initialize() 39 device_->GetVkDevice(), &pool_info, nullptr, &pool_) != VK_SUCCESS) { in Initialize()
|
H A D | pipeline.cc | 44 : device_(device), in Pipeline() 56 device_->GetPtrs()->vkDestroyDescriptorSetLayout(device_->GetVkDevice(), in ~Pipeline() 64 device_->GetPtrs()->vkDestroyDescriptorPool(device_->GetVkDevice(), in ~Pipeline() 79 push_constant_ = MakeUnique<PushConstant>(device_); in Initialize() 81 command_ = MakeUnique<CommandBuffer>(device_, pool); in Initialize() 104 if (device_->GetPtrs()->vkCreateDescriptorSetLayout( in CreateDescriptorSetLayouts() 105 device_->GetVkDevice(), &desc_info, nullptr, &info.layout) != in CreateDescriptorSetLayouts() 142 if (device_ in CreateDescriptorPools() [all...] |
H A D | engine_vulkan.cc | 82 auto vk_device = device_->GetVkDevice(); in ~EngineVulkan() 85 device_->GetPtrs()->vkDestroyShaderModule(vk_device, shader.second, in ~EngineVulkan() 97 if (device_) in Initialize() 98 return Result("Vulkan::Initialize device_ already exists"); in Initialize() 116 device_ = MakeUnique<Device>(vk_config->instance, vk_config->physical_device, in Initialize() 120 Result r = device_->Initialize( in Initialize() 128 pool_ = MakeUnique<CommandPool>(device_.get()); in Initialize() 150 if (!device_->IsFormatSupportedByPhysicalDevice(*fmt, colour_info.type)) in CreatePipeline() 158 if (!device_->IsFormatSupportedByPhysicalDevice(*fmt, in CreatePipeline() 173 device_ in CreatePipeline() [all...] |
H A D | graphics_pipeline.cc | 409 device_->GetPtrs()->vkDestroyRenderPass(device_->GetVkDevice(), in ~GraphicsPipeline() 427 device_->GetVkFormat(*info->buffer->GetFormat()); in CreateRenderPass() 447 device_->GetVkFormat(*depth_stencil_buffer_.buffer->GetFormat()); in CreateRenderPass() 462 device_->GetVkFormat(*info->buffer->GetFormat()); in CreateRenderPass() 484 if (device_->GetPtrs()->vkCreateRenderPass(device_->GetVkDevice(), in CreateRenderPass() 724 if (device_->GetPtrs()->vkCreateGraphicsPipelines( in CreateVkGraphicsPipeline() 725 device_->GetVkDevice(), VK_NULL_HANDLE, 1, &pipeline_info, nullptr, in CreateVkGraphicsPipeline() 745 MakeUnique<FrameBuffer>(device_, color_buffers in Initialize() [all...] |
H A D | push_constant.cc | 29 : device_(device), buffer_(MakeUnique<Buffer>()) {} in PushConstant() 79 device_->GetMaxPushConstants()) { in RecordPushConstantVkCommand() 98 device_->GetPtrs()->vkCmdPushConstants( in RecordPushConstantVkCommand() 113 if (static_cast<size_t>(input.offset) >= device_->GetMaxPushConstants()) { in UpdateMemoryWithInput() 119 (device_->GetMaxPushConstants() - input.offset)) { in UpdateMemoryWithInput()
|
H A D | sampler.cc | 82 Sampler::Sampler(Device* device) : device_(device) {} in Sampler() 108 if (device_->GetPtrs()->vkCreateSampler(device_->GetVkDevice(), &sampler_info, in CreateSampler() 118 device_->GetPtrs()->vkDestroySampler(device_->GetVkDevice(), sampler_, in ~Sampler()
|
H A D | frame_buffer.cc | 36 : device_(device), in FrameBuffer() 45 device_->GetPtrs()->vkDestroyFramebuffer(device_->GetVkDevice(), frame_, in ~FrameBuffer() 71 device_, *info->buffer->GetFormat(), VK_IMAGE_ASPECT_COLOR_BIT, in Initialize() 98 device_, *depth_stencil_attachment_.buffer->GetFormat(), aspect, in Initialize() 113 device_, *info->buffer->GetFormat(), VK_IMAGE_ASPECT_COLOR_BIT, in Initialize() 133 if (device_->GetPtrs()->vkCreateFramebuffer(device_->GetVkDevice(), in Initialize()
|
/test/xts/hats/ai/nnrt/hdi/v1_0/nnrtFunctionTest/src/ |
H A D | hdi_model_prepare_test.cpp | 48 EXPECT_EQ(HDF_SUCCESS, device_->IsModelCacheSupported(isSupportedCache)); in HWTEST_F() 59 ASSERT_EQ(OH_NN_SUCCESS, HDICommon::ConvertModel(device_, model, tensorBuffer, &iModel)); in HWTEST_F() 67 EXPECT_EQ(HDF_SUCCESS, device_->PrepareModel(*iModel, config, iPreparedModel)); in HWTEST_F() 74 EXPECT_EQ(HDF_SUCCESS, device_->ReleaseBuffer(tensorBuffer)); in HWTEST_F() 86 EXPECT_EQ(HDF_SUCCESS, device_->IsModelCacheSupported(isSupportedCache)); in HWTEST_F() 99 EXPECT_EQ(HDF_ERR_INVALID_PARAM, device_->PrepareModelFromModelCache(modelCache, config, iPreparedModel)); in HWTEST_F() 110 EXPECT_EQ(HDF_SUCCESS, device_->IsModelCacheSupported(isSupportedCache)); in HWTEST_F() 121 ASSERT_EQ(OH_NN_SUCCESS, HDICommon::ConvertModel(device_, model, tensorBuffer, &iModel)); in HWTEST_F() 129 EXPECT_EQ(HDF_SUCCESS, device_->PrepareModel(*iModel, config, iPreparedModel)); in HWTEST_F() 137 EXPECT_EQ(HDF_ERR_INVALID_PARAM, device_ in HWTEST_F() [all...] |
H A D | hdi_model_run_test.cpp | 39 void AddModelTest(OHOS::sptr<V1_0::INnrtDevice> &device_, V1_0::ModelConfig &modelConfig, bool isDynamic) in AddModelTest() argument 51 ASSERT_EQ(OH_NN_SUCCESS, HDICommon::ConvertModel(device_, model, tensorBuffer, &iModel)); in AddModelTest() 54 EXPECT_EQ(HDF_SUCCESS, device_->PrepareModel(*iModel, modelConfig, iPreparedModel)); in AddModelTest() 67 auto tensor = HDICommon::CreateIOTensor(device_); in AddModelTest() 75 auto outputTensor = HDICommon::CreateIOTensor(device_); in AddModelTest() 93 EXPECT_EQ(HDF_SUCCESS, device_->ReleaseBuffer(tensorBuffer)); in AddModelTest() 95 HDICommon::ReleaseBufferOfTensors(device_, inputs); in AddModelTest() 96 HDICommon::ReleaseBufferOfTensors(device_, outputs); in AddModelTest() 111 AddModelTest(device_, modelConfig, false); in HWTEST_F() 122 EXPECT_EQ(HDF_SUCCESS, device_ in HWTEST_F() [all...] |
H A D | hdi_device_test.cpp | 48 auto hdiRet = device_->GetDeviceName(deviceName); in HWTEST_F() 62 auto hdiRet = device_->GetVendorName(vendorName); in HWTEST_F() 76 auto hdiRet = device_->GetDeviceType(deviceType); in HWTEST_F() 92 auto hdiRet = device_->GetDeviceStatus(deviceStatus); in HWTEST_F() 107 auto hdiRet = device_->IsFloat16PrecisionSupported(isSupportedFp16); in HWTEST_F() 119 auto hdiRet = device_->IsPerformanceModeSupported(isSupportedPerformance); in HWTEST_F() 131 auto hdiRet = device_->IsPrioritySupported(isSupportedPriority); in HWTEST_F() 143 auto hdiRet = device_->IsDynamicInputSupported(isSupportedDynamicInput); in HWTEST_F() 155 auto hdiRet = device_->IsModelCacheSupported(isSupportedCache); in HWTEST_F() 172 ASSERT_EQ(OH_NN_SUCCESS, HDICommon::ConvertModel(device_, mode in HWTEST_F() [all...] |
/test/xts/hats/ai/nnrt/hdi/v2_0/nnrtFunctionTest/src/ |
H A D | hdi_model_prepare_test.cpp | 48 EXPECT_EQ(V2_0::NNRT_ReturnCode::NNRT_SUCCESS, device_->IsModelCacheSupported(isSupportedCache)); in HWTEST_F() 59 ASSERT_EQ(OH_NN_SUCCESS, HDICommon::ConvertModel(device_, model, tensorBuffer, &iModel)); in HWTEST_F() 67 EXPECT_EQ(V2_0::NNRT_ReturnCode::NNRT_SUCCESS, device_->PrepareModel(*iModel, config, iPreparedModel)); in HWTEST_F() 75 EXPECT_EQ(V2_0::NNRT_ReturnCode::NNRT_SUCCESS, device_->ReleaseBuffer(tensorBuffer)); in HWTEST_F() 87 EXPECT_EQ(V2_0::NNRT_ReturnCode::NNRT_SUCCESS, device_->IsModelCacheSupported(isSupportedCache)); in HWTEST_F() 101 device_->PrepareModelFromModelCache(modelCache, config, iPreparedModel)); in HWTEST_F() 112 EXPECT_EQ(V2_0::NNRT_ReturnCode::NNRT_SUCCESS, device_->IsModelCacheSupported(isSupportedCache)); in HWTEST_F() 123 ASSERT_EQ(OH_NN_SUCCESS, HDICommon::ConvertModel(device_, model, tensorBuffer, &iModel)); in HWTEST_F() 131 EXPECT_EQ(V2_0::NNRT_ReturnCode::NNRT_SUCCESS, device_->PrepareModel(*iModel, config, iPreparedModel)); in HWTEST_F() 141 device_ in HWTEST_F() [all...] |
H A D | hdi_model_run_test.cpp | 39 void AddModelTest(OHOS::sptr<V2_0::INnrtDevice> &device_, V2_0::ModelConfig &modelConfig, bool isDynamic) in AddModelTest() argument 51 ASSERT_EQ(OH_NN_SUCCESS, HDICommon::ConvertModel(device_, model, tensorBuffer, &iModel)); in AddModelTest() 54 EXPECT_EQ(V2_0::NNRT_ReturnCode::NNRT_SUCCESS, device_->PrepareModel(*iModel, modelConfig, iPreparedModel)); in AddModelTest() 66 auto tensor = HDICommon::CreateIOTensor(device_); in AddModelTest() 74 auto outputTensor = HDICommon::CreateIOTensor(device_); in AddModelTest() 92 EXPECT_EQ(V2_0::NNRT_ReturnCode::NNRT_SUCCESS, device_->ReleaseBuffer(tensorBuffer)); in AddModelTest() 94 HDICommon::ReleaseBufferOfTensors(device_, inputs); in AddModelTest() 95 HDICommon::ReleaseBufferOfTensors(device_, outputs); in AddModelTest() 110 AddModelTest(device_, modelConfig, false); in HWTEST_F() 121 EXPECT_EQ(V2_0::NNRT_ReturnCode::NNRT_SUCCESS, device_ in HWTEST_F() [all...] |
H A D | hdi_device_test.cpp | 48 auto hdiRet = device_->GetDeviceName(deviceName); in HWTEST_F() 62 auto hdiRet = device_->GetVendorName(vendorName); in HWTEST_F() 76 auto hdiRet = device_->GetDeviceType(deviceType); in HWTEST_F() 92 auto hdiRet = device_->GetDeviceStatus(deviceStatus); in HWTEST_F() 107 auto hdiRet = device_->IsFloat16PrecisionSupported(isSupportedFp16); in HWTEST_F() 119 auto hdiRet = device_->IsPerformanceModeSupported(isSupportedPerformance); in HWTEST_F() 131 auto hdiRet = device_->IsPrioritySupported(isSupportedPriority); in HWTEST_F() 143 auto hdiRet = device_->IsDynamicInputSupported(isSupportedDynamicInput); in HWTEST_F() 155 auto hdiRet = device_->IsModelCacheSupported(isSupportedCache); in HWTEST_F() 172 ASSERT_EQ(OH_NN_SUCCESS, HDICommon::ConvertModel(device_, mode in HWTEST_F() [all...] |
/test/xts/hats/hdf/display/composer/common/ |
H A D | hdi_test_display.cpp | 26 : id_(id), device_(device), currentFb_(nullptr) in HdiTestDisplay() 33 int ret = device_->GetDisplayCapability(id_, cap_); in Init() 38 ret = device_->GetDisplaySupportedModes(id_, modes_); in Init() 42 ret = device_->GetDisplayMode(id_, activeModeId_); in Init() 50 ret = device_->SetDisplayPowerStatus(id_, Composer::V1_0::DispPowerStatus::POWER_STATUS_ON); in Init() 54 ret = device_->SetDisplayMode(id_, currentMode_.id); in Init() 68 ret = device_->SetClientBufferCacheCount(id_, clientLayer_->GetLayerBuffercount()); in Init() 94 int ret = device_->CreateLayer(id_, info, HdiTestLayer::MAX_BUFFER_COUNT, layerId); in CreateHdiTestLayer() 117 ret = device_->GetDisplayCompChange(id_, layers, types); in RefreshLayersCompType() 135 ret = device_ in GetLayersReleaseFence() [all...] |
/third_party/skia/third_party/externals/dawn/src/dawn_node/binding/ |
H A D | GPUDevice.cpp | 79 : env_(env), device_(device), async_(std::make_shared<AsyncRunner>(env, device)) { in GPUDevice() 80 device_.SetLoggingCallback( in GPUDevice() 85 device_.SetUncapturedErrorCallback( in GPUDevice() 91 device_.SetDeviceLostCallback( in GPUDevice() 130 if (!device_.GetLimits(&limits)) { in getLimits() 138 return interop::GPUQueue::Create<GPUQueue>(env, device_.GetQueue(), async_); in getQueue() 147 device_.Release(); in destroy() 161 return interop::GPUBuffer::Create<GPUBuffer>(env, device_.CreateBuffer(&desc), desc, in createBuffer() 162 device_, async_); in createBuffer() 179 return interop::GPUTexture::Create<GPUTexture>(env, device_ in createTexture() [all...] |
/test/xts/hats/ai/nnrt/hdi/v1_0/common/ |
H A D | hdi_nnrt_test.h | 35 OHOS::sptr<V1_0::INnrtDevice> device_; member in OHOS::NeuralNetworkRuntime::Test::HDINNRtTest 40 device_ = V1_0::INnrtDevice::Get(); 41 if (device_ == nullptr) { 49 device_.clear();
|
/test/xts/hats/ai/nnrt/hdi/v2_0/common/ |
H A D | hdi_nnrt_test.h | 34 OHOS::sptr<V2_0::INnrtDevice> device_; member in OHOS::NeuralNetworkRuntime::Test::HDINNRtTest 39 device_ = V2_0::INnrtDevice::Get(); 40 if (device_ == nullptr) { 48 device_.clear();
|
/third_party/vk-gl-cts/external/amber/src/src/dawn/ |
H A D | engine_dawn.cc | 780 if (device_) in Initialize() 781 return Result("Dawn:Initialize device_ already exists"); in Initialize() 789 device_ = dawn_config->device; in Initialize() 795 if (!device_) { in CreatePipeline() 809 auto shader = device_->CreateShaderModule(&descriptor); in CreatePipeline() 899 result = helper.CreateRenderPipelineDescriptor(*render_pipeline, *device_, in DoClear() 904 *render_pipeline, *device_, texture_views_, ::dawn::LoadOp::Clear); in DoClear() 910 ::dawn::CommandEncoder encoder = device_->CreateCommandEncoder(); in DoClear() 916 ::dawn::Queue queue = device_->CreateQueue(); in DoClear() 919 result = MapDeviceTextureToHostBuffer(*render_pipeline, *device_); in DoClear() [all...] |