bgfx/3rdparty/glslang/hlsl/hlslParseHelper.h

452 lines
24 KiB
C
Raw Normal View History

2016-12-16 01:19:54 +03:00
//
2017-01-12 07:33:31 +03:00
// Copyright (C) 2016 Google, Inc.
// Copyright (C) 2016 LunarG, Inc.
2016-12-16 01:19:54 +03:00
//
2017-01-12 07:33:31 +03:00
// All rights reserved.
2016-12-16 01:19:54 +03:00
//
2017-01-12 07:33:31 +03:00
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
2016-12-16 01:19:54 +03:00
//
// Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
//
// Neither the name of 3Dlabs Inc. Ltd. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
2017-01-12 07:33:31 +03:00
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
2016-12-16 01:19:54 +03:00
//
#ifndef HLSL_PARSE_INCLUDED_
#define HLSL_PARSE_INCLUDED_
#include "../glslang/MachineIndependent/parseVersions.h"
#include "../glslang/MachineIndependent/ParseHelper.h"
namespace glslang {
class TAttributeMap; // forward declare
2017-05-20 06:23:01 +03:00
class TFunctionDeclarator;
2016-12-16 01:19:54 +03:00
class HlslParseContext : public TParseContextBase {
public:
HlslParseContext(TSymbolTable&, TIntermediate&, bool parsingBuiltins,
int version, EProfile, const SpvVersion& spvVersion, EShLanguage, TInfoSink&,
2016-12-17 23:38:22 +03:00
const TString sourceEntryPointName,
2016-12-16 01:19:54 +03:00
bool forwardCompatible = false, EShMessages messages = EShMsgDefault);
virtual ~HlslParseContext();
2016-12-22 09:59:02 +03:00
void initializeExtensionBehavior() override;
2016-12-16 01:19:54 +03:00
2016-12-22 09:59:02 +03:00
void setLimits(const TBuiltInResource&) override;
bool parseShaderStrings(TPpContext&, TInputScanner& input, bool versionWillBeError = false) override;
2017-03-25 06:03:12 +03:00
virtual const char* getGlobalUniformBlockName() const override { return "$Global"; }
virtual void setUniformBlockDefaults(TType& block) const override
{
block.getQualifier().layoutPacking = ElpStd140;
block.getQualifier().layoutMatrix = ElmRowMajor;
}
2016-12-16 01:19:54 +03:00
2016-12-22 09:59:02 +03:00
void reservedPpErrorCheck(const TSourceLoc&, const char* /*name*/, const char* /*op*/) override { }
bool lineContinuationCheck(const TSourceLoc&, bool /*endOfComment*/) override { return true; }
bool lineDirectiveShouldSetNextLine() const override { return true; }
2016-12-16 01:19:54 +03:00
bool builtInName(const TString&);
2016-12-22 09:59:02 +03:00
void handlePragma(const TSourceLoc&, const TVector<TString>&) override;
2017-03-25 06:03:12 +03:00
TIntermTyped* handleVariable(const TSourceLoc&, const TString* string);
2016-12-16 01:19:54 +03:00
TIntermTyped* handleBracketDereference(const TSourceLoc&, TIntermTyped* base, TIntermTyped* index);
TIntermTyped* handleBracketOperator(const TSourceLoc&, TIntermTyped* base, TIntermTyped* index);
void checkIndex(const TSourceLoc&, const TType&, int& index);
TIntermTyped* handleBinaryMath(const TSourceLoc&, const char* str, TOperator op, TIntermTyped* left, TIntermTyped* right);
TIntermTyped* handleUnaryMath(const TSourceLoc&, const char* str, TOperator op, TIntermTyped* childNode);
TIntermTyped* handleDotDereference(const TSourceLoc&, TIntermTyped* base, const TString& field);
2017-03-12 00:55:30 +03:00
bool isBuiltInMethod(const TSourceLoc&, TIntermTyped* base, const TString& field);
2016-12-16 01:19:54 +03:00
void assignLocations(TVariable& variable);
2017-03-18 06:46:01 +03:00
void handleFunctionDeclarator(const TSourceLoc&, TFunction& function, bool prototype);
2017-02-11 07:27:07 +03:00
TIntermAggregate* handleFunctionDefinition(const TSourceLoc&, TFunction&, const TAttributeMap&, TIntermNode*& entryPointTree);
TIntermNode* transformEntryPoint(const TSourceLoc&, TFunction&, const TAttributeMap&);
2017-04-01 05:51:51 +03:00
void handleEntryPointAttributes(const TSourceLoc&, const TAttributeMap&);
2016-12-16 01:19:54 +03:00
void handleFunctionBody(const TSourceLoc&, TFunction&, TIntermNode* functionBody, TIntermNode*& node);
2017-02-11 07:27:07 +03:00
void remapEntryPointIO(TFunction& function, TVariable*& returnValue, TVector<TVariable*>& inputs, TVector<TVariable*>& outputs);
2016-12-16 01:19:54 +03:00
void remapNonEntryPointIO(TFunction& function);
TIntermNode* handleReturnValue(const TSourceLoc&, TIntermTyped*);
void handleFunctionArgument(TFunction*, TIntermTyped*& arguments, TIntermTyped* newArg);
2017-01-20 22:14:16 +03:00
TIntermTyped* handleAssign(const TSourceLoc&, TOperator, TIntermTyped* left, TIntermTyped* right);
TIntermTyped* handleAssignToMatrixSwizzle(const TSourceLoc&, TOperator, TIntermTyped* left, TIntermTyped* right);
2017-01-05 07:34:04 +03:00
TIntermTyped* handleFunctionCall(const TSourceLoc&, TFunction*, TIntermTyped*);
2016-12-16 01:19:54 +03:00
void decomposeIntrinsic(const TSourceLoc&, TIntermTyped*& node, TIntermNode* arguments);
void decomposeSampleMethods(const TSourceLoc&, TIntermTyped*& node, TIntermNode* arguments);
2017-02-25 06:28:48 +03:00
void decomposeStructBufferMethods(const TSourceLoc&, TIntermTyped*& node, TIntermNode* arguments);
2016-12-16 01:19:54 +03:00
void decomposeGeometryMethods(const TSourceLoc&, TIntermTyped*& node, TIntermNode* arguments);
2017-05-20 06:23:01 +03:00
void pushFrontArguments(TIntermTyped* front, TIntermTyped*& arguments);
2017-01-05 07:34:04 +03:00
void addInputArgumentConversions(const TFunction&, TIntermTyped*&);
2017-06-03 21:11:11 +03:00
void expandArguments(const TSourceLoc&, const TFunction&, TIntermTyped*&);
2016-12-17 23:38:22 +03:00
TIntermTyped* addOutputArgumentConversions(const TFunction&, TIntermOperator&);
2016-12-16 01:19:54 +03:00
void builtInOpCheck(const TSourceLoc&, const TFunction&, TIntermOperator&);
2017-04-08 06:11:36 +03:00
TFunction* makeConstructorCall(const TSourceLoc&, const TType&);
2017-03-18 06:46:01 +03:00
void handleSemantic(TSourceLoc, TQualifier&, TBuiltInVariable, const TString& upperCase);
2016-12-16 01:19:54 +03:00
void handlePackOffset(const TSourceLoc&, TQualifier&, const glslang::TString& location,
const glslang::TString* component);
void handleRegister(const TSourceLoc&, TQualifier&, const glslang::TString* profile, const glslang::TString& desc,
int subComponent, const glslang::TString*);
2017-04-15 07:17:34 +03:00
TIntermTyped* convertConditionalExpression(const TSourceLoc&, TIntermTyped*, bool mustBeScalar = true);
2016-12-16 01:19:54 +03:00
TIntermAggregate* handleSamplerTextureCombine(const TSourceLoc& loc, TIntermTyped* argTex, TIntermTyped* argSampler);
2017-01-20 22:14:16 +03:00
bool parseMatrixSwizzleSelector(const TSourceLoc&, const TString&, int cols, int rows, TSwizzleSelectors<TMatrixSelector>&);
int getMatrixComponentsColumn(int rows, const TSwizzleSelectors<TMatrixSelector>&);
2016-12-16 01:19:54 +03:00
void assignError(const TSourceLoc&, const char* op, TString left, TString right);
void unaryOpError(const TSourceLoc&, const char* op, TString operand);
void binaryOpError(const TSourceLoc&, const char* op, TString left, TString right);
void variableCheck(TIntermTyped*& nodePtr);
void constantValueCheck(TIntermTyped* node, const char* token);
void integerCheck(const TIntermTyped* node, const char* token);
void globalCheck(const TSourceLoc&, const char* token);
bool constructorError(const TSourceLoc&, TIntermNode*, TFunction&, TOperator, TType&);
bool constructorTextureSamplerError(const TSourceLoc&, const TFunction&);
void arraySizeCheck(const TSourceLoc&, TIntermTyped* expr, TArraySize&);
void arraySizeRequiredCheck(const TSourceLoc&, const TArraySizes&);
void structArrayCheck(const TSourceLoc&, const TType& structure);
void arrayDimMerge(TType& type, const TArraySizes* sizes);
bool voidErrorCheck(const TSourceLoc&, const TString&, TBasicType);
void globalQualifierFix(const TSourceLoc&, TQualifier&);
bool structQualifierErrorCheck(const TSourceLoc&, const TPublicType& pType);
void mergeQualifiers(TQualifier& dst, const TQualifier& src);
int computeSamplerTypeIndex(TSampler&);
TSymbol* redeclareBuiltinVariable(const TSourceLoc&, const TString&, const TQualifier&, const TShaderQualifiers&);
void redeclareBuiltinBlock(const TSourceLoc&, TTypeList& typeList, const TString& blockName, const TString* instanceName, TArraySizes* arraySizes);
void paramFix(TType& type);
void specializationCheck(const TSourceLoc&, const TType&, const char* op);
void setLayoutQualifier(const TSourceLoc&, TQualifier&, TString&);
void setLayoutQualifier(const TSourceLoc&, TQualifier&, TString&, const TIntermTyped*);
void mergeObjectLayoutQualifiers(TQualifier& dest, const TQualifier& src, bool inheritOnly);
void checkNoShaderLayouts(const TSourceLoc&, const TShaderQualifiers&);
2017-05-20 06:23:01 +03:00
const TFunction* findFunction(const TSourceLoc& loc, TFunction& call, bool& builtIn, int& thisDepth, TIntermTyped*& args);
2017-04-01 05:51:51 +03:00
void declareTypedef(const TSourceLoc&, const TString& identifier, const TType&);
2017-02-11 07:27:07 +03:00
void declareStruct(const TSourceLoc&, TString& structName, TType&);
2017-03-04 07:12:06 +03:00
TSymbol* lookupUserType(const TString&, TType&);
2017-04-01 05:51:51 +03:00
TIntermNode* declareVariable(const TSourceLoc&, const TString& identifier, TType&, TIntermTyped* initializer = 0);
2017-04-08 06:11:36 +03:00
void lengthenList(const TSourceLoc&, TIntermSequence& list, int size, TIntermTyped* scalarInit);
TIntermTyped* handleConstructor(const TSourceLoc&, TIntermTyped*, const TType&);
TIntermTyped* addConstructor(const TSourceLoc&, TIntermTyped*, const TType&);
2016-12-16 01:19:54 +03:00
TIntermTyped* constructAggregate(TIntermNode*, const TType&, int, const TSourceLoc&);
TIntermTyped* constructBuiltIn(const TType&, TOperator, TIntermTyped*, const TSourceLoc&, bool subset);
void declareBlock(const TSourceLoc&, TType&, const TString* instanceName = 0, TArraySizes* arraySizes = 0);
2017-04-22 10:26:44 +03:00
void declareStructBufferCounter(const TSourceLoc& loc, const TType& bufferType, const TString& name);
2016-12-16 01:19:54 +03:00
void fixBlockLocations(const TSourceLoc&, TQualifier&, TTypeList&, bool memberWithLocation, bool memberWithoutLocation);
void fixBlockXfbOffsets(TQualifier&, TTypeList&);
void fixBlockUniformOffsets(const TQualifier&, TTypeList&);
void addQualifierToExisting(const TSourceLoc&, TQualifier, const TString& identifier);
void addQualifierToExisting(const TSourceLoc&, TQualifier, TIdentifierList&);
void updateStandaloneQualifierDefaults(const TSourceLoc&, const TPublicType&);
void wrapupSwitchSubsequence(TIntermAggregate* statements, TIntermNode* branchNode);
TIntermNode* addSwitch(const TSourceLoc&, TIntermTyped* expression, TIntermAggregate* body);
void updateImplicitArraySize(const TSourceLoc&, TIntermNode*, int index);
void nestLooping() { ++loopNestingLevel; }
void unnestLooping() { --loopNestingLevel; }
void nestAnnotations() { ++annotationNestingLevel; }
void unnestAnnotations() { --annotationNestingLevel; }
int getAnnotationNestingLevel() { return annotationNestingLevel; }
void pushScope() { symbolTable.push(); }
void popScope() { symbolTable.pop(0); }
2017-05-20 06:23:01 +03:00
void pushThisScope(const TType&, const TVector<TFunctionDeclarator>&);
2017-03-25 06:03:12 +03:00
void popThisScope() { symbolTable.pop(0); }
void pushImplicitThis(TVariable* thisParameter) { implicitThisStack.push_back(thisParameter); }
void popImplicitThis() { implicitThisStack.pop_back(); }
TVariable* getImplicitThis(int thisDepth) const { return implicitThisStack[implicitThisStack.size() - thisDepth]; }
void pushNamespace(const TString& name);
void popNamespace();
2017-04-01 05:51:51 +03:00
void getFullNamespaceName(const TString*&) const;
2017-03-25 06:03:12 +03:00
void addScopeMangler(TString&);
2017-03-12 00:55:30 +03:00
2016-12-16 01:19:54 +03:00
void pushSwitchSequence(TIntermSequence* sequence) { switchSequenceStack.push_back(sequence); }
void popSwitchSequence() { switchSequenceStack.pop_back(); }
2017-04-01 05:51:51 +03:00
virtual void growGlobalUniformBlock(const TSourceLoc&, TType&, const TString& memberName, TTypeList* typeList = nullptr) override;
2017-02-11 07:27:07 +03:00
2016-12-16 01:19:54 +03:00
// Apply L-value conversions. E.g, turning a write to a RWTexture into an ImageStore.
2017-06-03 21:11:11 +03:00
TIntermTyped* handleLvalue(const TSourceLoc&, const char* op, TIntermTyped*& node);
TIntermTyped* handleSamplerLvalue(const TSourceLoc&, const char* op, TIntermTyped*& node);
2016-12-16 01:19:54 +03:00
bool lValueErrorCheck(const TSourceLoc&, const char* op, TIntermTyped*) override;
TLayoutFormat getLayoutFromTxType(const TSourceLoc&, const TType&);
bool handleOutputGeometry(const TSourceLoc&, const TLayoutGeometry& geometry);
bool handleInputGeometry(const TSourceLoc&, const TLayoutGeometry& geometry);
2017-05-06 10:12:46 +03:00
// Determine loop control from attributes
TLoopControl handleLoopControl(const TAttributeMap& attributes) const;
2016-12-17 23:38:22 +03:00
// Potentially rename shader entry point function
2017-04-01 05:51:51 +03:00
void renameShaderFunction(const TString*& name) const;
2016-12-17 23:38:22 +03:00
// Reset data for incrementally built referencing of flattened composite structures
void initFlattening() { flattenLevel.push_back(0); flattenOffset.push_back(0); }
void finalizeFlattening() { flattenLevel.pop_back(); flattenOffset.pop_back(); }
2017-03-04 07:12:06 +03:00
// Share struct buffer deep types
void shareStructBufferType(TType&);
2016-12-16 01:19:54 +03:00
protected:
2016-12-17 23:38:22 +03:00
struct TFlattenData {
TFlattenData() : nextBinding(TQualifier::layoutBindingEnd) { }
TFlattenData(int nb) : nextBinding(nb) { }
TVector<TVariable*> members; // individual flattened variables
TVector<int> offsets; // offset to next tree level
int nextBinding; // next binding to use.
};
2017-04-01 05:51:51 +03:00
void fixConstInit(const TSourceLoc&, const TString& identifier, TType& type, TIntermTyped*& initializer);
2016-12-16 01:19:54 +03:00
void inheritGlobalDefaults(TQualifier& dst) const;
TVariable* makeInternalVariable(const char* name, const TType&) const;
2017-01-05 07:34:04 +03:00
TVariable* makeInternalVariable(const TString& name, const TType& type) const {
return makeInternalVariable(name.c_str(), type);
}
2017-04-08 06:11:36 +03:00
TIntermSymbol* makeInternalVariableNode(const TSourceLoc&, const char* name, const TType&) const;
2017-04-01 05:51:51 +03:00
TVariable* declareNonArray(const TSourceLoc&, const TString& identifier, const TType&, bool track);
void declareArray(const TSourceLoc&, const TString& identifier, const TType&, TSymbol*&, bool track);
2016-12-16 01:19:54 +03:00
TIntermNode* executeInitializer(const TSourceLoc&, TIntermTyped* initializer, TVariable* variable);
2017-04-08 06:11:36 +03:00
TIntermTyped* convertInitializerList(const TSourceLoc&, const TType&, TIntermTyped* initializer, TIntermTyped* scalarInit);
bool isScalarConstructor(const TIntermNode*);
2016-12-16 01:19:54 +03:00
TOperator mapAtomicOp(const TSourceLoc& loc, TOperator op, bool isImage);
// Return true if this node requires L-value conversion (e.g, to an imageStore).
bool shouldConvertLValue(const TIntermNode*) const;
// Array and struct flattening
2017-01-05 07:34:04 +03:00
TIntermTyped* flattenAccess(TIntermTyped* base, int member);
2017-06-03 21:11:11 +03:00
TIntermTyped* flattenAccess(int uniqueId, int member, const TType&);
bool shouldFlatten(const TType&) const;
2016-12-17 23:38:22 +03:00
bool wasFlattened(const TIntermTyped* node) const;
bool wasFlattened(int id) const { return flattenMap.find(id) != flattenMap.end(); }
int addFlattenedMember(const TSourceLoc& loc, const TVariable&, const TType&, TFlattenData&, const TString& name, bool track);
bool isFinalFlattening(const TType& type) const { return !(type.isStruct() || type.isArray()); }
2017-06-03 21:11:11 +03:00
// Structure splitting (splits interstage built-in types into its own struct)
2017-01-05 07:34:04 +03:00
TIntermTyped* splitAccessStruct(const TSourceLoc& loc, TIntermTyped*& base, int& member);
void splitAccessArray(const TSourceLoc& loc, TIntermTyped* base, TIntermTyped* index);
TType& split(TType& type, TString name, const TType* outerStructType = nullptr);
void split(TIntermTyped*);
void split(const TVariable&);
bool wasSplit(const TIntermTyped* node) const;
bool wasSplit(int id) const { return splitIoVars.find(id) != splitIoVars.end(); }
TVariable* getSplitIoVar(const TIntermTyped* node) const;
TVariable* getSplitIoVar(const TVariable* var) const;
TVariable* getSplitIoVar(int id) const;
void addInterstageIoToLinkage();
2017-02-11 07:27:07 +03:00
void addPatchConstantInvocation();
2017-04-22 10:26:44 +03:00
TIntermTyped* makeIntegerIndex(TIntermTyped*);
2017-01-05 07:34:04 +03:00
2017-04-08 06:11:36 +03:00
void fixBuiltInIoType(TType&);
2017-04-01 05:51:51 +03:00
2016-12-16 01:19:54 +03:00
void flatten(const TSourceLoc& loc, const TVariable& variable);
2016-12-17 23:38:22 +03:00
int flatten(const TSourceLoc& loc, const TVariable& variable, const TType&, TFlattenData&, TString name);
int flattenStruct(const TSourceLoc& loc, const TVariable& variable, const TType&, TFlattenData&, TString name);
int flattenArray(const TSourceLoc& loc, const TVariable& variable, const TType&, TFlattenData&, TString name);
2016-12-16 01:19:54 +03:00
2017-02-11 07:27:07 +03:00
bool hasUniform(const TQualifier& qualifier) const;
void clearUniform(TQualifier& qualifier);
bool isInputBuiltIn(const TQualifier& qualifier) const;
bool hasInput(const TQualifier& qualifier) const;
void correctOutput(TQualifier& qualifier);
bool isOutputBuiltIn(const TQualifier& qualifier) const;
bool hasOutput(const TQualifier& qualifier) const;
void correctInput(TQualifier& qualifier);
void correctUniform(TQualifier& qualifier);
void clearUniformInputOutput(TQualifier& qualifier);
2017-02-25 06:28:48 +03:00
// Test method names
bool isStructBufferMethod(const TString& name) const;
2017-05-20 06:23:01 +03:00
void counterBufferType(const TSourceLoc& loc, TType& type);
2017-02-25 06:28:48 +03:00
2017-04-29 22:09:05 +03:00
// Return standard sample position array
TIntermConstantUnion* getSamplePosArray(int count);
2017-03-04 07:12:06 +03:00
TType* getStructBufferContentType(const TType& type) const;
bool isStructBufferType(const TType& type) const { return getStructBufferContentType(type) != nullptr; }
TIntermTyped* indexStructBufferContent(const TSourceLoc& loc, TIntermTyped* buffer) const;
2017-04-22 10:26:44 +03:00
TIntermTyped* getStructBufferCounter(const TSourceLoc& loc, TIntermTyped* buffer);
2017-05-20 06:23:01 +03:00
TString getStructBuffCounterName(const TString&) const;
void addStructBuffArguments(const TSourceLoc& loc, TIntermAggregate*&);
void addStructBufferHiddenCounterParam(const TSourceLoc& loc, TParameter&, TIntermAggregate*&);
2017-03-04 07:12:06 +03:00
// Return true if this type is a reference. This is not currently a type method in case that's
// a language specific answer.
bool isReference(const TType& type) const { return isStructBufferType(type); }
2017-04-22 10:26:44 +03:00
// Return true if this a buffer type that has an associated counter buffer.
2017-05-20 06:23:01 +03:00
bool hasStructBuffCounter(const TType&) const;
2017-04-22 10:26:44 +03:00
// Finalization step: remove unused buffer blocks from linkage (we don't know until the
// shader is entirely compiled)
void removeUnusedStructBufferCounters();
2017-02-11 07:27:07 +03:00
// Pass through to base class after remembering builtin mappings.
using TParseContextBase::trackLinkage;
void trackLinkage(TSymbol& variable) override;
2017-01-05 07:34:04 +03:00
2017-01-12 07:33:31 +03:00
void finish() override; // post-processing
2017-01-05 07:34:04 +03:00
2017-04-01 05:51:51 +03:00
// Linkage symbol helpers
TIntermSymbol* findLinkageSymbol(TBuiltInVariable biType) const;
2016-12-16 01:19:54 +03:00
// Current state of parsing
int annotationNestingLevel; // 0 if outside all annotations
HlslParseContext(HlslParseContext&);
HlslParseContext& operator=(HlslParseContext&);
static const int maxSamplerIndex = EsdNumDims * (EbtNumTypes * (2 * 2 * 2)); // see computeSamplerTypeIndex()
TQualifier globalBufferDefaults;
TQualifier globalUniformDefaults;
TQualifier globalInputDefaults;
TQualifier globalOutputDefaults;
TString currentCaller; // name of last function body entered (not valid when at global scope)
TIdSetType inductiveLoopIds;
TVector<TIntermTyped*> needsIndexLimitationChecking;
//
// Geometry shader input arrays:
// - array sizing is based on input primitive and/or explicit size
//
// Tessellation control output arrays:
// - array sizing is based on output layout(vertices=...) and/or explicit size
//
// Both:
// - array sizing is retroactive
// - built-in block redeclarations interact with this
//
// Design:
// - use a per-context "resize-list", a list of symbols whose array sizes
// can be fixed
//
// - the resize-list starts empty at beginning of user-shader compilation, it does
// not have built-ins in it
//
// - on built-in array use: copyUp() symbol and add it to the resize-list
//
// - on user array declaration: add it to the resize-list
//
// - on block redeclaration: copyUp() symbol and add it to the resize-list
// * note, that appropriately gives an error if redeclaring a block that
// was already used and hence already copied-up
//
2017-01-12 07:33:31 +03:00
// - on seeing a layout declaration that sizes the array, fix everything in the
2016-12-16 01:19:54 +03:00
// resize-list, giving errors for mismatch
//
// - on seeing an array size declaration, give errors on mismatch between it and previous
// array-sizing declarations
//
TVector<TSymbol*> ioArraySymbolResizeList;
2016-12-17 23:38:22 +03:00
TMap<int, TFlattenData> flattenMap;
TVector<int> flattenLevel; // nested postfix operator level for flattening
TVector<int> flattenOffset; // cumulative offset for flattening
2017-02-11 07:27:07 +03:00
// IO-type map. Maps a pure symbol-table form of a structure-member list into
// each of the (up to) three kinds of IO, as each as different allowed decorations,
// but HLSL allows mixing all in the same structure.
struct tIoKinds {
TTypeList* input;
TTypeList* output;
TTypeList* uniform;
};
TMap<const TTypeList*, tIoKinds> ioTypeMap;
2017-01-05 07:34:04 +03:00
// Structure splitting data:
TMap<int, TVariable*> splitIoVars; // variables with the builtin interstage IO removed, indexed by unique ID.
2017-03-04 07:12:06 +03:00
// Structuredbuffer shared types. Typically there are only a few.
TVector<TType*> structBufferTypes;
2017-04-22 10:26:44 +03:00
TMap<TString, bool> structBufferCounter;
2017-03-04 07:12:06 +03:00
2017-01-05 07:34:04 +03:00
// The builtin interstage IO map considers e.g, EvqPosition on input and output separately, so that we
// can build the linkage correctly if position appears on both sides. Otherwise, multiple positions
// are considered identical.
struct tInterstageIoData {
2017-02-11 07:27:07 +03:00
tInterstageIoData(TBuiltInVariable bi, TStorageQualifier q) :
builtIn(bi), storage(q) { }
2017-01-05 07:34:04 +03:00
tInterstageIoData(const TType& memberType, const TType& storageType) :
builtIn(memberType.getQualifier().builtIn),
storage(storageType.getQualifier().storage) { }
TBuiltInVariable builtIn;
TStorageQualifier storage;
// ordering for maps
bool operator<(const tInterstageIoData d) const {
return (builtIn != d.builtIn) ? (builtIn < d.builtIn) : (storage < d.storage);
}
};
2017-02-11 07:27:07 +03:00
TMap<tInterstageIoData, TVariable*> interstageBuiltInIo; // individual builtin interstage IO vars, indexed by builtin type.
2017-04-08 06:11:36 +03:00
TVariable* inputPatch;
2017-01-05 07:34:04 +03:00
// We have to move array references to structs containing builtin interstage IO to the split variables.
// This is only handled for one level. This stores the index, because we'll need it in the future, since
// unlike normal array references, here the index happens before we discover what it applies to.
TIntermTyped* builtInIoIndex;
TIntermTyped* builtInIoBase;
2016-12-16 01:19:54 +03:00
unsigned int nextInLocation;
unsigned int nextOutLocation;
2016-12-17 23:38:22 +03:00
2017-02-11 07:27:07 +03:00
TString sourceEntryPointName;
TFunction* entryPointFunction;
TIntermNode* entryPointFunctionBody;
TString patchConstantFunctionName; // hull shader patch constant function name, from function level attribute.
TMap<TBuiltInVariable, TSymbol*> builtInLinkageSymbols; // used for tessellation, finding declared builtins
2017-03-25 06:03:12 +03:00
TVector<TString> currentTypePrefix; // current scoping prefix for nested structures
TVector<TVariable*> implicitThisStack; // currently active 'this' variables for nested structures
2017-04-08 06:11:36 +03:00
TVariable* gsStreamOutput; // geometry shader stream outputs, for emit (Append method)
2017-05-20 06:23:01 +03:00
// This tracks the first (mip level) argument to the .mips[][] operator. Since this can be nested as
// in tx.mips[tx.mips[0][1].x][2], we need a stack. We also track the TSourceLoc for error reporting
// purposes.
struct tMipsOperatorData {
tMipsOperatorData(TSourceLoc l, TIntermTyped* m) : loc(l), mipLevel(m) { }
TSourceLoc loc;
TIntermTyped* mipLevel;
};
TVector<tMipsOperatorData> mipsOperatorMipArg;
2016-12-16 01:19:54 +03:00
};
2017-03-25 06:03:12 +03:00
// This is the prefix we use for builtin methods to avoid namespace collisions with
// global scope user functions.
// TODO: this would be better as a nonparseable character, but that would
// require changing the scanner.
#define BUILTIN_PREFIX "__BI_"
2016-12-16 01:19:54 +03:00
} // end namespace glslang
#endif // HLSL_PARSE_INCLUDED_