2019-01-05 19:15:11 +03:00
|
|
|
// Copyright (c) 2018 The Khronos Group Inc.
|
|
|
|
// Copyright (c) 2018 Valve Corporation
|
|
|
|
// Copyright (c) 2018 LunarG Inc.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
#include "inst_bindless_check_pass.h"
|
|
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
|
|
// Input Operand Indices
|
|
|
|
static const int kSpvImageSampleImageIdInIdx = 0;
|
|
|
|
static const int kSpvSampledImageImageIdInIdx = 0;
|
|
|
|
static const int kSpvSampledImageSamplerIdInIdx = 1;
|
|
|
|
static const int kSpvImageSampledImageIdInIdx = 0;
|
|
|
|
static const int kSpvLoadPtrIdInIdx = 0;
|
|
|
|
static const int kSpvAccessChainBaseIdInIdx = 0;
|
|
|
|
static const int kSpvAccessChainIndex0IdInIdx = 1;
|
|
|
|
static const int kSpvTypeArrayLengthIdInIdx = 1;
|
|
|
|
static const int kSpvConstantValueInIdx = 0;
|
2019-05-18 22:27:37 +03:00
|
|
|
static const int kSpvVariableStorageClassInIdx = 0;
|
2019-01-05 19:15:11 +03:00
|
|
|
|
|
|
|
} // anonymous namespace
|
|
|
|
|
2020-09-20 06:12:57 +03:00
|
|
|
// Avoid unused variable warning/error on Linux
|
|
|
|
#ifndef NDEBUG
|
|
|
|
#define USE_ASSERT(x) assert(x)
|
|
|
|
#else
|
|
|
|
#define USE_ASSERT(x) ((void)(x))
|
|
|
|
#endif
|
|
|
|
|
2019-01-05 19:15:11 +03:00
|
|
|
namespace spvtools {
|
|
|
|
namespace opt {
|
|
|
|
|
2019-02-09 23:43:10 +03:00
|
|
|
uint32_t InstBindlessCheckPass::GenDebugReadLength(
|
2019-02-23 07:53:08 +03:00
|
|
|
uint32_t var_id, InstructionBuilder* builder) {
|
2019-02-09 23:43:10 +03:00
|
|
|
uint32_t desc_set_idx =
|
2019-02-23 07:53:08 +03:00
|
|
|
var2desc_set_[var_id] + kDebugInputBindlessOffsetLengths;
|
2019-02-09 23:43:10 +03:00
|
|
|
uint32_t desc_set_idx_id = builder->GetUintConstantId(desc_set_idx);
|
2019-03-22 23:17:07 +03:00
|
|
|
uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
|
|
|
|
return GenDebugDirectRead({desc_set_idx_id, binding_idx_id}, builder);
|
2019-02-09 23:43:10 +03:00
|
|
|
}
|
|
|
|
|
2019-03-22 23:17:07 +03:00
|
|
|
uint32_t InstBindlessCheckPass::GenDebugReadInit(uint32_t var_id,
|
|
|
|
uint32_t desc_idx_id,
|
|
|
|
InstructionBuilder* builder) {
|
|
|
|
uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
|
|
|
|
uint32_t u_desc_idx_id = GenUintCastCode(desc_idx_id, builder);
|
2020-09-20 06:12:57 +03:00
|
|
|
// If desc index checking is not enabled, we know the offset of initialization
|
|
|
|
// entries is 1, so we can avoid loading this value and just add 1 to the
|
|
|
|
// descriptor set.
|
|
|
|
if (!desc_idx_enabled_) {
|
|
|
|
uint32_t desc_set_idx_id =
|
|
|
|
builder->GetUintConstantId(var2desc_set_[var_id] + 1);
|
|
|
|
return GenDebugDirectRead({desc_set_idx_id, binding_idx_id, u_desc_idx_id},
|
|
|
|
builder);
|
|
|
|
} else {
|
|
|
|
uint32_t desc_set_base_id =
|
|
|
|
builder->GetUintConstantId(kDebugInputBindlessInitOffset);
|
|
|
|
uint32_t desc_set_idx_id =
|
|
|
|
builder->GetUintConstantId(var2desc_set_[var_id]);
|
|
|
|
return GenDebugDirectRead(
|
|
|
|
{desc_set_base_id, desc_set_idx_id, binding_idx_id, u_desc_idx_id},
|
|
|
|
builder);
|
|
|
|
}
|
2019-03-22 23:17:07 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t InstBindlessCheckPass::CloneOriginalReference(
|
|
|
|
ref_analysis* ref, InstructionBuilder* builder) {
|
2019-05-18 22:27:37 +03:00
|
|
|
// If original is image based, start by cloning descriptor load
|
|
|
|
uint32_t new_image_id = 0;
|
|
|
|
if (ref->desc_load_id != 0) {
|
|
|
|
Instruction* desc_load_inst = get_def_use_mgr()->GetDef(ref->desc_load_id);
|
|
|
|
Instruction* new_load_inst = builder->AddLoad(
|
|
|
|
desc_load_inst->type_id(),
|
|
|
|
desc_load_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx));
|
|
|
|
uid2offset_[new_load_inst->unique_id()] =
|
|
|
|
uid2offset_[desc_load_inst->unique_id()];
|
|
|
|
uint32_t new_load_id = new_load_inst->result_id();
|
|
|
|
get_decoration_mgr()->CloneDecorations(desc_load_inst->result_id(),
|
|
|
|
new_load_id);
|
|
|
|
new_image_id = new_load_id;
|
|
|
|
// Clone Image/SampledImage with new load, if needed
|
|
|
|
if (ref->image_id != 0) {
|
|
|
|
Instruction* image_inst = get_def_use_mgr()->GetDef(ref->image_id);
|
|
|
|
if (image_inst->opcode() == SpvOp::SpvOpSampledImage) {
|
|
|
|
Instruction* new_image_inst = builder->AddBinaryOp(
|
|
|
|
image_inst->type_id(), SpvOpSampledImage, new_load_id,
|
|
|
|
image_inst->GetSingleWordInOperand(kSpvSampledImageSamplerIdInIdx));
|
|
|
|
uid2offset_[new_image_inst->unique_id()] =
|
|
|
|
uid2offset_[image_inst->unique_id()];
|
|
|
|
new_image_id = new_image_inst->result_id();
|
|
|
|
} else {
|
|
|
|
assert(image_inst->opcode() == SpvOp::SpvOpImage &&
|
|
|
|
"expecting OpImage");
|
|
|
|
Instruction* new_image_inst =
|
|
|
|
builder->AddUnaryOp(image_inst->type_id(), SpvOpImage, new_load_id);
|
|
|
|
uid2offset_[new_image_inst->unique_id()] =
|
|
|
|
uid2offset_[image_inst->unique_id()];
|
|
|
|
new_image_id = new_image_inst->result_id();
|
|
|
|
}
|
|
|
|
get_decoration_mgr()->CloneDecorations(ref->image_id, new_image_id);
|
2019-03-22 23:17:07 +03:00
|
|
|
}
|
|
|
|
}
|
2019-05-18 22:27:37 +03:00
|
|
|
// Clone original reference
|
2019-03-22 23:17:07 +03:00
|
|
|
std::unique_ptr<Instruction> new_ref_inst(ref->ref_inst->Clone(context()));
|
|
|
|
uint32_t ref_result_id = ref->ref_inst->result_id();
|
|
|
|
uint32_t new_ref_id = 0;
|
|
|
|
if (ref_result_id != 0) {
|
|
|
|
new_ref_id = TakeNextId();
|
|
|
|
new_ref_inst->SetResultId(new_ref_id);
|
|
|
|
}
|
2019-05-18 22:27:37 +03:00
|
|
|
// Update new ref with new image if created
|
|
|
|
if (new_image_id != 0)
|
|
|
|
new_ref_inst->SetInOperand(kSpvImageSampleImageIdInIdx, {new_image_id});
|
2019-03-22 23:17:07 +03:00
|
|
|
// Register new reference and add to new block
|
|
|
|
Instruction* added_inst = builder->AddInstruction(std::move(new_ref_inst));
|
|
|
|
uid2offset_[added_inst->unique_id()] =
|
|
|
|
uid2offset_[ref->ref_inst->unique_id()];
|
|
|
|
if (new_ref_id != 0)
|
|
|
|
get_decoration_mgr()->CloneDecorations(ref_result_id, new_ref_id);
|
|
|
|
return new_ref_id;
|
|
|
|
}
|
|
|
|
|
2019-05-18 22:27:37 +03:00
|
|
|
uint32_t InstBindlessCheckPass::GetImageId(Instruction* inst) {
|
2019-03-22 23:17:07 +03:00
|
|
|
switch (inst->opcode()) {
|
2019-01-05 19:15:11 +03:00
|
|
|
case SpvOp::SpvOpImageSampleImplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSampleExplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSampleDrefImplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSampleDrefExplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSampleProjImplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSampleProjExplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSampleProjDrefImplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSampleProjDrefExplicitLod:
|
|
|
|
case SpvOp::SpvOpImageGather:
|
|
|
|
case SpvOp::SpvOpImageDrefGather:
|
|
|
|
case SpvOp::SpvOpImageQueryLod:
|
|
|
|
case SpvOp::SpvOpImageSparseSampleImplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSparseSampleExplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSparseSampleDrefImplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSparseSampleDrefExplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSparseSampleProjImplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSparseSampleProjExplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSparseSampleProjDrefImplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSparseSampleProjDrefExplicitLod:
|
|
|
|
case SpvOp::SpvOpImageSparseGather:
|
|
|
|
case SpvOp::SpvOpImageSparseDrefGather:
|
|
|
|
case SpvOp::SpvOpImageFetch:
|
|
|
|
case SpvOp::SpvOpImageRead:
|
|
|
|
case SpvOp::SpvOpImageQueryFormat:
|
|
|
|
case SpvOp::SpvOpImageQueryOrder:
|
|
|
|
case SpvOp::SpvOpImageQuerySizeLod:
|
|
|
|
case SpvOp::SpvOpImageQuerySize:
|
|
|
|
case SpvOp::SpvOpImageQueryLevels:
|
|
|
|
case SpvOp::SpvOpImageQuerySamples:
|
|
|
|
case SpvOp::SpvOpImageSparseFetch:
|
|
|
|
case SpvOp::SpvOpImageSparseRead:
|
|
|
|
case SpvOp::SpvOpImageWrite:
|
2019-03-22 23:17:07 +03:00
|
|
|
return inst->GetSingleWordInOperand(kSpvImageSampleImageIdInIdx);
|
2019-01-05 19:15:11 +03:00
|
|
|
default:
|
2019-03-22 23:17:07 +03:00
|
|
|
break;
|
2019-01-05 19:15:11 +03:00
|
|
|
}
|
2019-03-22 23:17:07 +03:00
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2020-09-20 06:12:57 +03:00
|
|
|
Instruction* InstBindlessCheckPass::GetPointeeTypeInst(Instruction* ptr_inst) {
|
|
|
|
uint32_t pte_ty_id = GetPointeeTypeId(ptr_inst);
|
|
|
|
return get_def_use_mgr()->GetDef(pte_ty_id);
|
2019-05-18 22:27:37 +03:00
|
|
|
}
|
|
|
|
|
2019-03-22 23:17:07 +03:00
|
|
|
bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
|
|
|
|
ref_analysis* ref) {
|
2019-05-18 22:27:37 +03:00
|
|
|
ref->ref_inst = ref_inst;
|
|
|
|
if (ref_inst->opcode() == SpvOpLoad || ref_inst->opcode() == SpvOpStore) {
|
|
|
|
ref->desc_load_id = 0;
|
|
|
|
ref->ptr_id = ref_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
|
|
|
|
Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
|
|
|
|
if (ptr_inst->opcode() != SpvOp::SpvOpAccessChain) return false;
|
|
|
|
ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
|
|
|
|
Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
|
|
|
|
if (var_inst->opcode() != SpvOp::SpvOpVariable) return false;
|
|
|
|
uint32_t storage_class =
|
|
|
|
var_inst->GetSingleWordInOperand(kSpvVariableStorageClassInIdx);
|
|
|
|
switch (storage_class) {
|
|
|
|
case SpvStorageClassUniform:
|
|
|
|
case SpvStorageClassUniformConstant:
|
|
|
|
case SpvStorageClassStorageBuffer:
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
break;
|
|
|
|
}
|
2020-09-20 06:12:57 +03:00
|
|
|
Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
|
2019-05-18 22:27:37 +03:00
|
|
|
switch (desc_type_inst->opcode()) {
|
|
|
|
case SpvOpTypeArray:
|
|
|
|
case SpvOpTypeRuntimeArray:
|
|
|
|
// A load through a descriptor array will have at least 3 operands. We
|
|
|
|
// do not want to instrument loads of descriptors here which are part of
|
|
|
|
// an image-based reference.
|
|
|
|
if (ptr_inst->NumInOperands() < 3) return false;
|
2020-09-20 06:12:57 +03:00
|
|
|
ref->desc_idx_id =
|
2019-05-18 22:27:37 +03:00
|
|
|
ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
|
|
|
|
break;
|
|
|
|
default:
|
2020-09-20 06:12:57 +03:00
|
|
|
ref->desc_idx_id = 0;
|
2019-05-18 22:27:37 +03:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
// Reference is not load or store. If not an image-based reference, return.
|
|
|
|
ref->image_id = GetImageId(ref_inst);
|
2019-03-22 23:17:07 +03:00
|
|
|
if (ref->image_id == 0) return false;
|
|
|
|
Instruction* image_inst = get_def_use_mgr()->GetDef(ref->image_id);
|
2019-05-18 22:27:37 +03:00
|
|
|
Instruction* desc_load_inst = nullptr;
|
2019-01-05 19:15:11 +03:00
|
|
|
if (image_inst->opcode() == SpvOp::SpvOpSampledImage) {
|
2019-05-18 22:27:37 +03:00
|
|
|
ref->desc_load_id =
|
2019-03-22 23:17:07 +03:00
|
|
|
image_inst->GetSingleWordInOperand(kSpvSampledImageImageIdInIdx);
|
2019-05-18 22:27:37 +03:00
|
|
|
desc_load_inst = get_def_use_mgr()->GetDef(ref->desc_load_id);
|
2019-01-05 19:15:11 +03:00
|
|
|
} else if (image_inst->opcode() == SpvOp::SpvOpImage) {
|
2019-05-18 22:27:37 +03:00
|
|
|
ref->desc_load_id =
|
2019-03-22 23:17:07 +03:00
|
|
|
image_inst->GetSingleWordInOperand(kSpvImageSampledImageIdInIdx);
|
2019-05-18 22:27:37 +03:00
|
|
|
desc_load_inst = get_def_use_mgr()->GetDef(ref->desc_load_id);
|
2019-01-05 19:15:11 +03:00
|
|
|
} else {
|
2019-05-18 22:27:37 +03:00
|
|
|
ref->desc_load_id = ref->image_id;
|
|
|
|
desc_load_inst = image_inst;
|
2019-03-22 23:17:07 +03:00
|
|
|
ref->image_id = 0;
|
2019-01-05 19:15:11 +03:00
|
|
|
}
|
2019-05-18 22:27:37 +03:00
|
|
|
if (desc_load_inst->opcode() != SpvOp::SpvOpLoad) {
|
2019-03-22 23:17:07 +03:00
|
|
|
// TODO(greg-lunarg): Handle additional possibilities?
|
|
|
|
return false;
|
2019-01-05 19:15:11 +03:00
|
|
|
}
|
2019-05-18 22:27:37 +03:00
|
|
|
ref->ptr_id = desc_load_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
|
2019-03-22 23:17:07 +03:00
|
|
|
Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
|
|
|
|
if (ptr_inst->opcode() == SpvOp::SpvOpVariable) {
|
2020-09-20 06:12:57 +03:00
|
|
|
ref->desc_idx_id = 0;
|
2019-03-22 23:17:07 +03:00
|
|
|
ref->var_id = ref->ptr_id;
|
|
|
|
} else if (ptr_inst->opcode() == SpvOp::SpvOpAccessChain) {
|
|
|
|
if (ptr_inst->NumInOperands() != 2) {
|
|
|
|
assert(false && "unexpected bindless index number");
|
|
|
|
return false;
|
|
|
|
}
|
2020-09-20 06:12:57 +03:00
|
|
|
ref->desc_idx_id =
|
2019-03-22 23:17:07 +03:00
|
|
|
ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
|
|
|
|
ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
|
|
|
|
Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
|
|
|
|
if (var_inst->opcode() != SpvOpVariable) {
|
|
|
|
assert(false && "unexpected bindless base");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// TODO(greg-lunarg): Handle additional possibilities?
|
|
|
|
return false;
|
2019-02-09 23:43:10 +03:00
|
|
|
}
|
2019-03-22 23:17:07 +03:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2020-09-20 06:12:57 +03:00
|
|
|
uint32_t InstBindlessCheckPass::FindStride(uint32_t ty_id,
|
|
|
|
uint32_t stride_deco) {
|
|
|
|
uint32_t stride = 0xdeadbeef;
|
2020-11-30 05:54:40 +03:00
|
|
|
bool found = get_decoration_mgr()->FindDecoration(
|
2020-09-20 06:12:57 +03:00
|
|
|
ty_id, stride_deco, [&stride](const Instruction& deco_inst) {
|
|
|
|
stride = deco_inst.GetSingleWordInOperand(2u);
|
2020-11-30 05:54:40 +03:00
|
|
|
return true;
|
2020-09-20 06:12:57 +03:00
|
|
|
});
|
|
|
|
USE_ASSERT(found && "stride not found");
|
|
|
|
return stride;
|
|
|
|
}
|
|
|
|
|
2020-11-30 05:54:40 +03:00
|
|
|
uint32_t InstBindlessCheckPass::ByteSize(uint32_t ty_id, uint32_t matrix_stride,
|
|
|
|
bool col_major, bool in_matrix) {
|
2020-09-20 06:12:57 +03:00
|
|
|
analysis::TypeManager* type_mgr = context()->get_type_mgr();
|
|
|
|
const analysis::Type* sz_ty = type_mgr->GetType(ty_id);
|
|
|
|
if (sz_ty->kind() == analysis::Type::kPointer) {
|
|
|
|
// Assuming PhysicalStorageBuffer pointer
|
|
|
|
return 8;
|
|
|
|
}
|
|
|
|
if (sz_ty->kind() == analysis::Type::kMatrix) {
|
2020-11-30 05:54:40 +03:00
|
|
|
assert(matrix_stride != 0 && "missing matrix stride");
|
2020-09-20 06:12:57 +03:00
|
|
|
const analysis::Matrix* m_ty = sz_ty->AsMatrix();
|
2020-11-30 05:54:40 +03:00
|
|
|
if (col_major) {
|
|
|
|
return m_ty->element_count() * matrix_stride;
|
|
|
|
} else {
|
|
|
|
const analysis::Vector* v_ty = m_ty->element_type()->AsVector();
|
|
|
|
return v_ty->element_count() * matrix_stride;
|
|
|
|
}
|
2020-09-20 06:12:57 +03:00
|
|
|
}
|
2020-11-30 05:54:40 +03:00
|
|
|
uint32_t size = 1;
|
2020-09-20 06:12:57 +03:00
|
|
|
if (sz_ty->kind() == analysis::Type::kVector) {
|
|
|
|
const analysis::Vector* v_ty = sz_ty->AsVector();
|
2020-11-30 05:54:40 +03:00
|
|
|
size = v_ty->element_count();
|
|
|
|
const analysis::Type* comp_ty = v_ty->element_type();
|
|
|
|
// if vector in row major matrix, the vector is strided so return the
|
|
|
|
// number of bytes spanned by the vector
|
|
|
|
if (in_matrix && !col_major && matrix_stride > 0) {
|
|
|
|
uint32_t comp_ty_id = type_mgr->GetId(comp_ty);
|
|
|
|
return (size - 1) * matrix_stride + ByteSize(comp_ty_id, 0, false, false);
|
|
|
|
}
|
|
|
|
sz_ty = comp_ty;
|
2020-09-20 06:12:57 +03:00
|
|
|
}
|
|
|
|
switch (sz_ty->kind()) {
|
|
|
|
case analysis::Type::kFloat: {
|
|
|
|
const analysis::Float* f_ty = sz_ty->AsFloat();
|
|
|
|
size *= f_ty->width();
|
|
|
|
} break;
|
|
|
|
case analysis::Type::kInteger: {
|
|
|
|
const analysis::Integer* i_ty = sz_ty->AsInteger();
|
|
|
|
size *= i_ty->width();
|
|
|
|
} break;
|
|
|
|
default: { assert(false && "unexpected type"); } break;
|
|
|
|
}
|
|
|
|
size /= 8;
|
|
|
|
return size;
|
|
|
|
}
|
|
|
|
|
|
|
|
uint32_t InstBindlessCheckPass::GenLastByteIdx(ref_analysis* ref,
|
|
|
|
InstructionBuilder* builder) {
|
|
|
|
// Find outermost buffer type and its access chain index
|
|
|
|
Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
|
|
|
|
Instruction* desc_ty_inst = GetPointeeTypeInst(var_inst);
|
|
|
|
uint32_t buff_ty_id;
|
|
|
|
uint32_t ac_in_idx = 1;
|
|
|
|
switch (desc_ty_inst->opcode()) {
|
|
|
|
case SpvOpTypeArray:
|
|
|
|
case SpvOpTypeRuntimeArray:
|
|
|
|
buff_ty_id = desc_ty_inst->GetSingleWordInOperand(0);
|
|
|
|
++ac_in_idx;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
assert(desc_ty_inst->opcode() == SpvOpTypeStruct &&
|
|
|
|
"unexpected descriptor type");
|
|
|
|
buff_ty_id = desc_ty_inst->result_id();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
// Process remaining access chain indices
|
|
|
|
Instruction* ac_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
|
|
|
|
uint32_t curr_ty_id = buff_ty_id;
|
2020-11-30 05:54:40 +03:00
|
|
|
uint32_t sum_id = 0u;
|
|
|
|
uint32_t matrix_stride = 0u;
|
|
|
|
bool col_major = false;
|
|
|
|
uint32_t matrix_stride_id = 0u;
|
|
|
|
bool in_matrix = false;
|
2020-09-20 06:12:57 +03:00
|
|
|
while (ac_in_idx < ac_inst->NumInOperands()) {
|
|
|
|
uint32_t curr_idx_id = ac_inst->GetSingleWordInOperand(ac_in_idx);
|
|
|
|
Instruction* curr_ty_inst = get_def_use_mgr()->GetDef(curr_ty_id);
|
|
|
|
uint32_t curr_offset_id = 0;
|
|
|
|
switch (curr_ty_inst->opcode()) {
|
|
|
|
case SpvOpTypeArray:
|
2020-11-30 05:54:40 +03:00
|
|
|
case SpvOpTypeRuntimeArray: {
|
|
|
|
// Get array stride and multiply by current index
|
|
|
|
uint32_t arr_stride = FindStride(curr_ty_id, SpvDecorationArrayStride);
|
2020-09-20 06:12:57 +03:00
|
|
|
uint32_t arr_stride_id = builder->GetUintConstantId(arr_stride);
|
2020-11-02 08:41:43 +03:00
|
|
|
uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
|
2020-09-20 06:12:57 +03:00
|
|
|
Instruction* curr_offset_inst = builder->AddBinaryOp(
|
2020-11-02 08:41:43 +03:00
|
|
|
GetUintId(), SpvOpIMul, arr_stride_id, curr_idx_32b_id);
|
2020-09-20 06:12:57 +03:00
|
|
|
curr_offset_id = curr_offset_inst->result_id();
|
|
|
|
// Get element type for next step
|
|
|
|
curr_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
|
|
|
|
} break;
|
2020-11-30 05:54:40 +03:00
|
|
|
case SpvOpTypeMatrix: {
|
|
|
|
assert(matrix_stride != 0 && "missing matrix stride");
|
|
|
|
matrix_stride_id = builder->GetUintConstantId(matrix_stride);
|
|
|
|
uint32_t vec_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
|
|
|
|
// If column major, multiply column index by matrix stride, otherwise
|
|
|
|
// by vector component size and save matrix stride for vector (row)
|
|
|
|
// index
|
|
|
|
uint32_t col_stride_id;
|
|
|
|
if (col_major) {
|
|
|
|
col_stride_id = matrix_stride_id;
|
|
|
|
} else {
|
|
|
|
Instruction* vec_ty_inst = get_def_use_mgr()->GetDef(vec_ty_id);
|
|
|
|
uint32_t comp_ty_id = vec_ty_inst->GetSingleWordInOperand(0u);
|
|
|
|
uint32_t col_stride = ByteSize(comp_ty_id, 0u, false, false);
|
|
|
|
col_stride_id = builder->GetUintConstantId(col_stride);
|
|
|
|
}
|
2020-11-02 08:41:43 +03:00
|
|
|
uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
|
2020-09-20 06:12:57 +03:00
|
|
|
Instruction* curr_offset_inst = builder->AddBinaryOp(
|
2020-11-30 05:54:40 +03:00
|
|
|
GetUintId(), SpvOpIMul, col_stride_id, curr_idx_32b_id);
|
2020-09-20 06:12:57 +03:00
|
|
|
curr_offset_id = curr_offset_inst->result_id();
|
|
|
|
// Get element type for next step
|
2020-11-30 05:54:40 +03:00
|
|
|
curr_ty_id = vec_ty_id;
|
|
|
|
in_matrix = true;
|
|
|
|
} break;
|
|
|
|
case SpvOpTypeVector: {
|
|
|
|
// If inside a row major matrix type, multiply index by matrix stride,
|
|
|
|
// else multiply by component size
|
|
|
|
uint32_t comp_ty_id = curr_ty_inst->GetSingleWordInOperand(0u);
|
|
|
|
uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
|
|
|
|
if (in_matrix && !col_major) {
|
|
|
|
Instruction* curr_offset_inst = builder->AddBinaryOp(
|
|
|
|
GetUintId(), SpvOpIMul, matrix_stride_id, curr_idx_32b_id);
|
|
|
|
curr_offset_id = curr_offset_inst->result_id();
|
|
|
|
} else {
|
|
|
|
uint32_t comp_ty_sz = ByteSize(comp_ty_id, 0u, false, false);
|
|
|
|
uint32_t comp_ty_sz_id = builder->GetUintConstantId(comp_ty_sz);
|
|
|
|
Instruction* curr_offset_inst = builder->AddBinaryOp(
|
|
|
|
GetUintId(), SpvOpIMul, comp_ty_sz_id, curr_idx_32b_id);
|
|
|
|
curr_offset_id = curr_offset_inst->result_id();
|
|
|
|
}
|
|
|
|
// Get element type for next step
|
2020-09-20 06:12:57 +03:00
|
|
|
curr_ty_id = comp_ty_id;
|
|
|
|
} break;
|
|
|
|
case SpvOpTypeStruct: {
|
|
|
|
// Get buffer byte offset for the referenced member
|
2020-11-30 05:54:40 +03:00
|
|
|
Instruction* curr_idx_inst = get_def_use_mgr()->GetDef(curr_idx_id);
|
2020-09-20 06:12:57 +03:00
|
|
|
assert(curr_idx_inst->opcode() == SpvOpConstant &&
|
|
|
|
"unexpected struct index");
|
|
|
|
uint32_t member_idx = curr_idx_inst->GetSingleWordInOperand(0);
|
|
|
|
uint32_t member_offset = 0xdeadbeef;
|
2020-11-30 05:54:40 +03:00
|
|
|
bool found = get_decoration_mgr()->FindDecoration(
|
2020-09-20 06:12:57 +03:00
|
|
|
curr_ty_id, SpvDecorationOffset,
|
|
|
|
[&member_idx, &member_offset](const Instruction& deco_inst) {
|
|
|
|
if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
|
2020-11-30 05:54:40 +03:00
|
|
|
return false;
|
2020-09-20 06:12:57 +03:00
|
|
|
member_offset = deco_inst.GetSingleWordInOperand(3u);
|
2020-11-30 05:54:40 +03:00
|
|
|
return true;
|
2020-09-20 06:12:57 +03:00
|
|
|
});
|
|
|
|
USE_ASSERT(found && "member offset not found");
|
|
|
|
curr_offset_id = builder->GetUintConstantId(member_offset);
|
2020-11-30 05:54:40 +03:00
|
|
|
// Look for matrix stride for this member if there is one. The matrix
|
|
|
|
// stride is not on the matrix type, but in a OpMemberDecorate on the
|
|
|
|
// enclosing struct type at the member index. If none found, reset
|
|
|
|
// stride to 0.
|
|
|
|
found = get_decoration_mgr()->FindDecoration(
|
|
|
|
curr_ty_id, SpvDecorationMatrixStride,
|
|
|
|
[&member_idx, &matrix_stride](const Instruction& deco_inst) {
|
|
|
|
if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
|
|
|
|
return false;
|
|
|
|
matrix_stride = deco_inst.GetSingleWordInOperand(3u);
|
|
|
|
return true;
|
|
|
|
});
|
|
|
|
if (!found) matrix_stride = 0;
|
|
|
|
// Look for column major decoration
|
|
|
|
found = get_decoration_mgr()->FindDecoration(
|
|
|
|
curr_ty_id, SpvDecorationColMajor,
|
|
|
|
[&member_idx, &col_major](const Instruction& deco_inst) {
|
|
|
|
if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
|
|
|
|
return false;
|
|
|
|
col_major = true;
|
|
|
|
return true;
|
|
|
|
});
|
|
|
|
if (!found) col_major = false;
|
2020-09-20 06:12:57 +03:00
|
|
|
// Get element type for next step
|
|
|
|
curr_ty_id = curr_ty_inst->GetSingleWordInOperand(member_idx);
|
|
|
|
} break;
|
|
|
|
default: { assert(false && "unexpected non-composite type"); } break;
|
|
|
|
}
|
|
|
|
if (sum_id == 0)
|
|
|
|
sum_id = curr_offset_id;
|
|
|
|
else {
|
|
|
|
Instruction* sum_inst =
|
|
|
|
builder->AddBinaryOp(GetUintId(), SpvOpIAdd, sum_id, curr_offset_id);
|
|
|
|
sum_id = sum_inst->result_id();
|
|
|
|
}
|
|
|
|
++ac_in_idx;
|
|
|
|
}
|
|
|
|
// Add in offset of last byte of referenced object
|
2020-11-30 05:54:40 +03:00
|
|
|
uint32_t bsize = ByteSize(curr_ty_id, matrix_stride, col_major, in_matrix);
|
2020-09-20 06:12:57 +03:00
|
|
|
uint32_t last = bsize - 1;
|
|
|
|
uint32_t last_id = builder->GetUintConstantId(last);
|
|
|
|
Instruction* sum_inst =
|
|
|
|
builder->AddBinaryOp(GetUintId(), SpvOpIAdd, sum_id, last_id);
|
|
|
|
return sum_inst->result_id();
|
|
|
|
}
|
|
|
|
|
2019-03-22 23:17:07 +03:00
|
|
|
void InstBindlessCheckPass::GenCheckCode(
|
2020-09-20 06:12:57 +03:00
|
|
|
uint32_t check_id, uint32_t error_id, uint32_t offset_id,
|
|
|
|
uint32_t length_id, uint32_t stage_idx, ref_analysis* ref,
|
2019-03-22 23:17:07 +03:00
|
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
|
|
BasicBlock* back_blk_ptr = &*new_blocks->back();
|
2019-01-05 19:15:11 +03:00
|
|
|
InstructionBuilder builder(
|
2019-03-22 23:17:07 +03:00
|
|
|
context(), back_blk_ptr,
|
2019-01-05 19:15:11 +03:00
|
|
|
IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
|
2019-03-22 23:17:07 +03:00
|
|
|
// Gen conditional branch on check_id. Valid branch generates original
|
|
|
|
// reference. Invalid generates debug output and zero result (if needed).
|
2019-01-05 19:15:11 +03:00
|
|
|
uint32_t merge_blk_id = TakeNextId();
|
|
|
|
uint32_t valid_blk_id = TakeNextId();
|
|
|
|
uint32_t invalid_blk_id = TakeNextId();
|
|
|
|
std::unique_ptr<Instruction> merge_label(NewLabel(merge_blk_id));
|
|
|
|
std::unique_ptr<Instruction> valid_label(NewLabel(valid_blk_id));
|
|
|
|
std::unique_ptr<Instruction> invalid_label(NewLabel(invalid_blk_id));
|
2019-03-22 23:17:07 +03:00
|
|
|
(void)builder.AddConditionalBranch(check_id, valid_blk_id, invalid_blk_id,
|
|
|
|
merge_blk_id, SpvSelectionControlMaskNone);
|
|
|
|
// Gen valid bounds branch
|
|
|
|
std::unique_ptr<BasicBlock> new_blk_ptr(
|
|
|
|
new BasicBlock(std::move(valid_label)));
|
2019-01-05 19:15:11 +03:00
|
|
|
builder.SetInsertPoint(&*new_blk_ptr);
|
2019-03-22 23:17:07 +03:00
|
|
|
uint32_t new_ref_id = CloneOriginalReference(ref, &builder);
|
2019-01-05 19:15:11 +03:00
|
|
|
(void)builder.AddBranch(merge_blk_id);
|
|
|
|
new_blocks->push_back(std::move(new_blk_ptr));
|
2019-03-22 23:17:07 +03:00
|
|
|
// Gen invalid block
|
2019-01-05 19:15:11 +03:00
|
|
|
new_blk_ptr.reset(new BasicBlock(std::move(invalid_label)));
|
|
|
|
builder.SetInsertPoint(&*new_blk_ptr);
|
2020-09-20 06:12:57 +03:00
|
|
|
uint32_t u_index_id = GenUintCastCode(ref->desc_idx_id, &builder);
|
2020-11-02 08:41:43 +03:00
|
|
|
if (offset_id != 0) {
|
|
|
|
// Buffer OOB
|
|
|
|
uint32_t u_offset_id = GenUintCastCode(offset_id, &builder);
|
|
|
|
uint32_t u_length_id = GenUintCastCode(length_id, &builder);
|
2020-09-20 06:12:57 +03:00
|
|
|
GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
|
2020-11-02 08:41:43 +03:00
|
|
|
{error_id, u_index_id, u_offset_id, u_length_id},
|
|
|
|
&builder);
|
|
|
|
} else if (buffer_bounds_enabled_) {
|
|
|
|
// Uninitialized Descriptor - Return additional unused zero so all error
|
|
|
|
// modes will use same debug stream write function
|
|
|
|
uint32_t u_length_id = GenUintCastCode(length_id, &builder);
|
2020-09-20 06:12:57 +03:00
|
|
|
GenDebugStreamWrite(
|
|
|
|
uid2offset_[ref->ref_inst->unique_id()], stage_idx,
|
2020-11-02 08:41:43 +03:00
|
|
|
{error_id, u_index_id, u_length_id, builder.GetUintConstantId(0)},
|
2020-09-20 06:12:57 +03:00
|
|
|
&builder);
|
2020-11-02 08:41:43 +03:00
|
|
|
} else {
|
|
|
|
// Uninitialized Descriptor - Normal error return
|
|
|
|
uint32_t u_length_id = GenUintCastCode(length_id, &builder);
|
2020-09-20 06:12:57 +03:00
|
|
|
GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
|
2020-11-02 08:41:43 +03:00
|
|
|
{error_id, u_index_id, u_length_id}, &builder);
|
|
|
|
}
|
2019-01-05 19:15:11 +03:00
|
|
|
// Remember last invalid block id
|
|
|
|
uint32_t last_invalid_blk_id = new_blk_ptr->GetLabelInst()->result_id();
|
|
|
|
// Gen zero for invalid reference
|
2019-03-22 23:17:07 +03:00
|
|
|
uint32_t ref_type_id = ref->ref_inst->type_id();
|
2019-01-05 19:15:11 +03:00
|
|
|
(void)builder.AddBranch(merge_blk_id);
|
|
|
|
new_blocks->push_back(std::move(new_blk_ptr));
|
2019-03-22 23:17:07 +03:00
|
|
|
// Gen merge block
|
2019-01-05 19:15:11 +03:00
|
|
|
new_blk_ptr.reset(new BasicBlock(std::move(merge_label)));
|
|
|
|
builder.SetInsertPoint(&*new_blk_ptr);
|
|
|
|
// Gen phi of new reference and zero, if necessary, and replace the
|
|
|
|
// result id of the original reference with that of the Phi. Kill original
|
2019-03-22 23:17:07 +03:00
|
|
|
// reference.
|
2019-01-05 19:15:11 +03:00
|
|
|
if (new_ref_id != 0) {
|
|
|
|
Instruction* phi_inst = builder.AddPhi(
|
2019-09-07 18:05:36 +03:00
|
|
|
ref_type_id, {new_ref_id, valid_blk_id, GetNullId(ref_type_id),
|
2019-01-05 19:15:11 +03:00
|
|
|
last_invalid_blk_id});
|
2019-03-22 23:17:07 +03:00
|
|
|
context()->ReplaceAllUsesWith(ref->ref_inst->result_id(),
|
|
|
|
phi_inst->result_id());
|
2019-01-05 19:15:11 +03:00
|
|
|
}
|
|
|
|
new_blocks->push_back(std::move(new_blk_ptr));
|
2019-03-22 23:17:07 +03:00
|
|
|
context()->KillInst(ref->ref_inst);
|
|
|
|
}
|
|
|
|
|
2020-09-20 06:12:57 +03:00
|
|
|
void InstBindlessCheckPass::GenDescIdxCheckCode(
|
2019-03-22 23:17:07 +03:00
|
|
|
BasicBlock::iterator ref_inst_itr,
|
|
|
|
UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
|
|
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
|
|
// Look for reference through indexed descriptor. If found, analyze and
|
|
|
|
// save components. If not, return.
|
|
|
|
ref_analysis ref;
|
|
|
|
if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
|
|
|
|
Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
|
|
|
|
if (ptr_inst->opcode() != SpvOp::SpvOpAccessChain) return;
|
|
|
|
// If index and bound both compile-time constants and index < bound,
|
|
|
|
// return without changing
|
|
|
|
Instruction* var_inst = get_def_use_mgr()->GetDef(ref.var_id);
|
2020-09-20 06:12:57 +03:00
|
|
|
Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
|
2019-03-22 23:17:07 +03:00
|
|
|
uint32_t length_id = 0;
|
|
|
|
if (desc_type_inst->opcode() == SpvOpTypeArray) {
|
|
|
|
length_id =
|
|
|
|
desc_type_inst->GetSingleWordInOperand(kSpvTypeArrayLengthIdInIdx);
|
2020-09-20 06:12:57 +03:00
|
|
|
Instruction* index_inst = get_def_use_mgr()->GetDef(ref.desc_idx_id);
|
2019-03-22 23:17:07 +03:00
|
|
|
Instruction* length_inst = get_def_use_mgr()->GetDef(length_id);
|
|
|
|
if (index_inst->opcode() == SpvOpConstant &&
|
|
|
|
length_inst->opcode() == SpvOpConstant &&
|
|
|
|
index_inst->GetSingleWordInOperand(kSpvConstantValueInIdx) <
|
|
|
|
length_inst->GetSingleWordInOperand(kSpvConstantValueInIdx))
|
|
|
|
return;
|
2020-09-20 06:12:57 +03:00
|
|
|
} else if (!desc_idx_enabled_ ||
|
2019-03-22 23:17:07 +03:00
|
|
|
desc_type_inst->opcode() != SpvOpTypeRuntimeArray) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
// Move original block's preceding instructions into first new block
|
|
|
|
std::unique_ptr<BasicBlock> new_blk_ptr;
|
|
|
|
MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
|
|
|
|
InstructionBuilder builder(
|
|
|
|
context(), &*new_blk_ptr,
|
|
|
|
IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
|
|
|
|
new_blocks->push_back(std::move(new_blk_ptr));
|
|
|
|
uint32_t error_id = builder.GetUintConstantId(kInstErrorBindlessBounds);
|
|
|
|
// If length id not yet set, descriptor array is runtime size so
|
|
|
|
// generate load of length from stage's debug input buffer.
|
|
|
|
if (length_id == 0) {
|
|
|
|
assert(desc_type_inst->opcode() == SpvOpTypeRuntimeArray &&
|
|
|
|
"unexpected bindless type");
|
|
|
|
length_id = GenDebugReadLength(ref.var_id, &builder);
|
|
|
|
}
|
|
|
|
// Generate full runtime bounds test code with true branch
|
|
|
|
// being full reference and false branch being debug output and zero
|
|
|
|
// for the referenced value.
|
2020-11-02 08:41:43 +03:00
|
|
|
uint32_t desc_idx_32b_id = Gen32BitCvtCode(ref.desc_idx_id, &builder);
|
|
|
|
uint32_t length_32b_id = Gen32BitCvtCode(length_id, &builder);
|
2020-09-20 06:12:57 +03:00
|
|
|
Instruction* ult_inst = builder.AddBinaryOp(GetBoolId(), SpvOpULessThan,
|
2020-11-02 08:41:43 +03:00
|
|
|
desc_idx_32b_id, length_32b_id);
|
|
|
|
ref.desc_idx_id = desc_idx_32b_id;
|
2020-09-20 06:12:57 +03:00
|
|
|
GenCheckCode(ult_inst->result_id(), error_id, 0u, length_id, stage_idx, &ref,
|
2019-03-22 23:17:07 +03:00
|
|
|
new_blocks);
|
|
|
|
// Move original block's remaining code into remainder/merge block and add
|
|
|
|
// to new blocks
|
|
|
|
BasicBlock* back_blk_ptr = &*new_blocks->back();
|
|
|
|
MovePostludeCode(ref_block_itr, back_blk_ptr);
|
|
|
|
}
|
|
|
|
|
2020-09-20 06:12:57 +03:00
|
|
|
void InstBindlessCheckPass::GenDescInitCheckCode(
|
2019-03-22 23:17:07 +03:00
|
|
|
BasicBlock::iterator ref_inst_itr,
|
|
|
|
UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
|
|
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
|
|
// Look for reference through descriptor. If not, return.
|
|
|
|
ref_analysis ref;
|
|
|
|
if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
|
2020-09-20 06:12:57 +03:00
|
|
|
// Determine if we can only do initialization check
|
|
|
|
bool init_check = false;
|
|
|
|
if (ref.desc_load_id != 0 || !buffer_bounds_enabled_) {
|
|
|
|
init_check = true;
|
|
|
|
} else {
|
|
|
|
// For now, only do bounds check for non-aggregate types. Otherwise
|
|
|
|
// just do descriptor initialization check.
|
|
|
|
// TODO(greg-lunarg): Do bounds check for aggregate loads and stores
|
|
|
|
Instruction* ref_ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
|
|
|
|
Instruction* pte_type_inst = GetPointeeTypeInst(ref_ptr_inst);
|
|
|
|
uint32_t pte_type_op = pte_type_inst->opcode();
|
|
|
|
if (pte_type_op == SpvOpTypeArray || pte_type_op == SpvOpTypeRuntimeArray ||
|
|
|
|
pte_type_op == SpvOpTypeStruct)
|
|
|
|
init_check = true;
|
|
|
|
}
|
|
|
|
// If initialization check and not enabled, return
|
|
|
|
if (init_check && !desc_init_enabled_) return;
|
2019-03-22 23:17:07 +03:00
|
|
|
// Move original block's preceding instructions into first new block
|
|
|
|
std::unique_ptr<BasicBlock> new_blk_ptr;
|
|
|
|
MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
|
|
|
|
InstructionBuilder builder(
|
|
|
|
context(), &*new_blk_ptr,
|
|
|
|
IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
|
|
|
|
new_blocks->push_back(std::move(new_blk_ptr));
|
2020-09-20 06:12:57 +03:00
|
|
|
// If initialization check, use reference value of zero.
|
|
|
|
// Else use the index of the last byte referenced.
|
|
|
|
uint32_t ref_id = init_check ? builder.GetUintConstantId(0u)
|
|
|
|
: GenLastByteIdx(&ref, &builder);
|
|
|
|
// Read initialization/bounds from debug input buffer. If index id not yet
|
2019-03-22 23:17:07 +03:00
|
|
|
// set, binding is single descriptor, so set index to constant 0.
|
2020-09-20 06:12:57 +03:00
|
|
|
if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
|
|
|
|
uint32_t init_id = GenDebugReadInit(ref.var_id, ref.desc_idx_id, &builder);
|
|
|
|
// Generate runtime initialization/bounds test code with true branch
|
2019-03-22 23:17:07 +03:00
|
|
|
// being full reference and false branch being debug output and zero
|
|
|
|
// for the referenced value.
|
2020-09-20 06:12:57 +03:00
|
|
|
Instruction* ult_inst =
|
|
|
|
builder.AddBinaryOp(GetBoolId(), SpvOpULessThan, ref_id, init_id);
|
|
|
|
uint32_t error =
|
|
|
|
init_check ? kInstErrorBindlessUninit : kInstErrorBindlessBuffOOB;
|
|
|
|
uint32_t error_id = builder.GetUintConstantId(error);
|
|
|
|
GenCheckCode(ult_inst->result_id(), error_id, init_check ? 0 : ref_id,
|
|
|
|
init_check ? builder.GetUintConstantId(0u) : init_id, stage_idx,
|
|
|
|
&ref, new_blocks);
|
2019-03-22 23:17:07 +03:00
|
|
|
// Move original block's remaining code into remainder/merge block and add
|
|
|
|
// to new blocks
|
|
|
|
BasicBlock* back_blk_ptr = &*new_blocks->back();
|
|
|
|
MovePostludeCode(ref_block_itr, back_blk_ptr);
|
2019-01-05 19:15:11 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
void InstBindlessCheckPass::InitializeInstBindlessCheck() {
|
|
|
|
// Initialize base class
|
|
|
|
InitializeInstrument();
|
2019-07-25 06:24:36 +03:00
|
|
|
// If runtime array length support enabled, create variable mappings. Length
|
|
|
|
// support is always enabled if descriptor init check is enabled.
|
2020-09-20 06:12:57 +03:00
|
|
|
if (desc_idx_enabled_ || buffer_bounds_enabled_)
|
2019-02-09 23:43:10 +03:00
|
|
|
for (auto& anno : get_module()->annotations())
|
|
|
|
if (anno.opcode() == SpvOpDecorate) {
|
|
|
|
if (anno.GetSingleWordInOperand(1u) == SpvDecorationDescriptorSet)
|
|
|
|
var2desc_set_[anno.GetSingleWordInOperand(0u)] =
|
|
|
|
anno.GetSingleWordInOperand(2u);
|
|
|
|
else if (anno.GetSingleWordInOperand(1u) == SpvDecorationBinding)
|
|
|
|
var2binding_[anno.GetSingleWordInOperand(0u)] =
|
|
|
|
anno.GetSingleWordInOperand(2u);
|
|
|
|
}
|
2019-01-05 19:15:11 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
Pass::Status InstBindlessCheckPass::ProcessImpl() {
|
2019-03-22 23:17:07 +03:00
|
|
|
// Perform bindless bounds check on each entry point function in module
|
2019-01-05 19:15:11 +03:00
|
|
|
InstProcessFunction pfn =
|
|
|
|
[this](BasicBlock::iterator ref_inst_itr,
|
2019-03-22 23:17:07 +03:00
|
|
|
UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
|
2019-01-05 19:15:11 +03:00
|
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
2020-09-20 06:12:57 +03:00
|
|
|
return GenDescIdxCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
|
|
|
|
new_blocks);
|
2019-01-05 19:15:11 +03:00
|
|
|
};
|
|
|
|
bool modified = InstProcessEntryPointCallTree(pfn);
|
2020-09-20 06:12:57 +03:00
|
|
|
if (desc_init_enabled_ || buffer_bounds_enabled_) {
|
2019-03-22 23:17:07 +03:00
|
|
|
// Perform descriptor initialization check on each entry point function in
|
|
|
|
// module
|
|
|
|
pfn = [this](BasicBlock::iterator ref_inst_itr,
|
|
|
|
UptrVectorIterator<BasicBlock> ref_block_itr,
|
|
|
|
uint32_t stage_idx,
|
|
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
2020-09-20 06:12:57 +03:00
|
|
|
return GenDescInitCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
|
|
|
|
new_blocks);
|
2019-03-22 23:17:07 +03:00
|
|
|
};
|
|
|
|
modified |= InstProcessEntryPointCallTree(pfn);
|
|
|
|
}
|
2019-01-05 19:15:11 +03:00
|
|
|
return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
|
|
|
|
}
|
|
|
|
|
|
|
|
Pass::Status InstBindlessCheckPass::Process() {
|
|
|
|
InitializeInstBindlessCheck();
|
|
|
|
return ProcessImpl();
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace opt
|
|
|
|
} // namespace spvtools
|