1 /*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "src/gpu/vk/GrVkUtil.h"
9
10 #include "include/gpu/GrDirectContext.h"
11 #include "src/core/SkTraceEvent.h"
12 #include "src/gpu/GrDataUtils.h"
13 #include "src/gpu/GrDirectContextPriv.h"
14 #include "src/gpu/vk/GrVkGpu.h"
15 #include "src/sksl/SkSLCompiler.h"
16
GrVkFormatIsSupported(VkFormat format)17 bool GrVkFormatIsSupported(VkFormat format) {
18 switch (format) {
19 case VK_FORMAT_R8G8B8A8_UNORM:
20 case VK_FORMAT_B8G8R8A8_UNORM:
21 case VK_FORMAT_R8G8B8A8_SRGB:
22 case VK_FORMAT_R8G8B8_UNORM:
23 case VK_FORMAT_R8G8_UNORM:
24 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
25 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
26 case VK_FORMAT_R5G6B5_UNORM_PACK16:
27 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
28 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
29 case VK_FORMAT_R8_UNORM:
30 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
31 case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
32 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
33 case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
34 case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
35 case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
36 case VK_FORMAT_R16G16B16A16_SFLOAT:
37 case VK_FORMAT_R16_SFLOAT:
38 case VK_FORMAT_R16_UNORM:
39 case VK_FORMAT_R16G16_UNORM:
40 case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
41 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
42 case VK_FORMAT_R16G16B16A16_UNORM:
43 case VK_FORMAT_R16G16_SFLOAT:
44 case VK_FORMAT_S8_UINT:
45 case VK_FORMAT_D24_UNORM_S8_UINT:
46 case VK_FORMAT_D32_SFLOAT_S8_UINT:
47 return true;
48 default:
49 return false;
50 }
51 }
52
GrVkFormatNeedsYcbcrSampler(VkFormat format)53 bool GrVkFormatNeedsYcbcrSampler(VkFormat format) {
54 return format == VK_FORMAT_G8_B8R8_2PLANE_420_UNORM ||
55 format == VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM;
56 }
57
GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples)58 bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
59 SkASSERT(samples >= 1);
60 switch (samples) {
61 case 1:
62 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
63 return true;
64 case 2:
65 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
66 return true;
67 case 4:
68 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
69 return true;
70 case 8:
71 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
72 return true;
73 case 16:
74 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
75 return true;
76 default:
77 return false;
78 }
79 }
80
vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage)81 SkSL::ProgramKind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
82 if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
83 return SkSL::ProgramKind::kVertex;
84 }
85 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
86 return SkSL::ProgramKind::kFragment;
87 }
88
GrCompileVkShaderModule(GrVkGpu* gpu, const SkSL::String& shaderString, VkShaderStageFlagBits stage, VkShaderModule* shaderModule, VkPipelineShaderStageCreateInfo* stageInfo, const SkSL::Program::Settings& settings, SkSL::String* outSPIRV, SkSL::Program::Inputs* outInputs)89 bool GrCompileVkShaderModule(GrVkGpu* gpu,
90 const SkSL::String& shaderString,
91 VkShaderStageFlagBits stage,
92 VkShaderModule* shaderModule,
93 VkPipelineShaderStageCreateInfo* stageInfo,
94 const SkSL::Program::Settings& settings,
95 SkSL::String* outSPIRV,
96 SkSL::Program::Inputs* outInputs) {
97 #ifndef SKIA_OHOS_DEBUG
98 SKIA_OHOS_TRACE_PRIV("skia.shaders", "OHOS_CompileSpriV");
99 #endif
100 TRACE_EVENT0("skia.shaders", "CompileVkShaderModule");
101 auto errorHandler = gpu->getContext()->priv().getShaderErrorHandler();
102 std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram(
103 vk_shader_stage_to_skiasl_kind(stage), shaderString, settings);
104 if (!program) {
105 errorHandler->compileError(shaderString.c_str(),
106 gpu->shaderCompiler()->errorText().c_str());
107 return false;
108 }
109 *outInputs = program->fInputs;
110 if (!gpu->shaderCompiler()->toSPIRV(*program, outSPIRV)) {
111 errorHandler->compileError(shaderString.c_str(),
112 gpu->shaderCompiler()->errorText().c_str());
113 return false;
114 }
115
116 return GrInstallVkShaderModule(gpu, *outSPIRV, stage, shaderModule, stageInfo);
117 }
118
GrInstallVkShaderModule(GrVkGpu* gpu, const SkSL::String& spirv, VkShaderStageFlagBits stage, VkShaderModule* shaderModule, VkPipelineShaderStageCreateInfo* stageInfo)119 bool GrInstallVkShaderModule(GrVkGpu* gpu,
120 const SkSL::String& spirv,
121 VkShaderStageFlagBits stage,
122 VkShaderModule* shaderModule,
123 VkPipelineShaderStageCreateInfo* stageInfo) {
124 TRACE_EVENT0("skia.shaders", "InstallVkShaderModule");
125 VkShaderModuleCreateInfo moduleCreateInfo;
126 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
127 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
128 moduleCreateInfo.pNext = nullptr;
129 moduleCreateInfo.flags = 0;
130 moduleCreateInfo.codeSize = spirv.size();
131 moduleCreateInfo.pCode = (const uint32_t*)spirv.c_str();
132
133 VkResult err;
134 GR_VK_CALL_RESULT(gpu, err, CreateShaderModule(gpu->device(), &moduleCreateInfo, nullptr,
135 shaderModule));
136 if (err) {
137 return false;
138 }
139
140 memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
141 stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
142 stageInfo->pNext = nullptr;
143 stageInfo->flags = 0;
144 stageInfo->stage = stage;
145 stageInfo->module = *shaderModule;
146 stageInfo->pName = "main";
147 stageInfo->pSpecializationInfo = nullptr;
148
149 return true;
150 }
151
GrVkFormatIsCompressed(VkFormat vkFormat)152 bool GrVkFormatIsCompressed(VkFormat vkFormat) {
153 switch (vkFormat) {
154 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
155 case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
156 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
157 case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
158 case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
159 case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
160 return true;
161 default:
162 return false;
163 }
164 SkUNREACHABLE;
165 }
166