18
18
#define DEBUG_TYPE " irgensil"
19
19
20
20
#include " swift/AST/ASTContext.h"
21
+ #include " swift/AST/DiagnosticsIRGen.h"
21
22
#include " swift/AST/IRGenOptions.h"
22
23
#include " swift/AST/ParameterList.h"
23
24
#include " swift/AST/Pattern.h"
57
58
#include " llvm/IR/Intrinsics.h"
58
59
#include " llvm/IR/Module.h"
59
60
#include " llvm/Support/Debug.h"
61
+ #include " llvm/Support/MathExtras.h"
60
62
#include " llvm/Support/SaveAndRestore.h"
61
63
#include " llvm/Transforms/Utils/Local.h"
62
64
@@ -436,6 +438,11 @@ class IRGenSILFunction :
436
438
// / Calculates EstimatedStackSize.
437
439
void estimateStackSize ();
438
440
441
+ inline bool isAddress (SILValue v) const {
442
+ SILType type = v->getType ();
443
+ return type.isAddress () || type.getASTType () == IGM.Context .TheRawPointerType ;
444
+ }
445
+
439
446
void setLoweredValue (SILValue v, LoweredValue &&lv) {
440
447
auto inserted = LoweredValues.insert ({v, std::move (lv)});
441
448
assert (inserted.second && " already had lowered value for sil value?!" );
@@ -444,31 +451,31 @@ class IRGenSILFunction :
444
451
445
452
// / Create a new Address corresponding to the given SIL address value.
446
453
void setLoweredAddress (SILValue v, const Address &address) {
447
- assert (v-> getType (). isAddress () && " address for non-address value?!" );
454
+ assert (isAddress (v ) && " address for non-address value?!" );
448
455
setLoweredValue (v, address);
449
456
}
450
457
451
458
void setLoweredStackAddress (SILValue v, const StackAddress &address) {
452
- assert (v-> getType (). isAddress () && " address for non-address value?!" );
459
+ assert (isAddress (v ) && " address for non-address value?!" );
453
460
setLoweredValue (v, address);
454
461
}
455
462
456
463
void setLoweredDynamicallyEnforcedAddress (SILValue v,
457
464
const Address &address,
458
465
llvm::Value *scratch) {
459
- assert (v-> getType (). isAddress () && " address for non-address value?!" );
466
+ assert (isAddress (v ) && " address for non-address value?!" );
460
467
setLoweredValue (v, DynamicallyEnforcedAddress{address, scratch});
461
468
}
462
469
463
470
void setContainerOfUnallocatedAddress (SILValue v,
464
471
const Address &buffer) {
465
- assert (v-> getType (). isAddress () && " address for non-address value?!" );
472
+ assert (isAddress (v ) && " address for non-address value?!" );
466
473
setLoweredValue (v,
467
474
LoweredValue (buffer, LoweredValue::ContainerForUnallocatedAddress));
468
475
}
469
476
470
477
void overwriteAllocatedAddress (SILValue v, const Address &address) {
471
- assert (v-> getType (). isAddress () && " address for non-address value?!" );
478
+ assert (isAddress (v ) && " address for non-address value?!" );
472
479
auto it = LoweredValues.find (v);
473
480
assert (it != LoweredValues.end () && " no existing entry for overwrite?" );
474
481
assert (it->second .isUnallocatedAddressInBuffer () &&
@@ -1623,6 +1630,9 @@ void LoweredValue::getExplosion(IRGenFunction &IGF, SILType type,
1623
1630
Explosion &ex) const {
1624
1631
switch (kind) {
1625
1632
case Kind::StackAddress:
1633
+ ex.add (Storage.get <StackAddress>(kind).getAddressPointer ());
1634
+ return ;
1635
+
1626
1636
case Kind::ContainedAddress:
1627
1637
case Kind::DynamicallyEnforcedAddress:
1628
1638
case Kind::CoroutineState:
@@ -2958,9 +2968,127 @@ static std::unique_ptr<CallEmission> getCallEmissionForLoweredValue(
2958
2968
return callEmission;
2959
2969
}
2960
2970
2971
+ // / Get the size passed to stackAlloc().
2972
+ static llvm::Value *getStackAllocationSize (IRGenSILFunction &IGF,
2973
+ SILValue vCapacity,
2974
+ SILValue vStride,
2975
+ SourceLoc loc) {
2976
+ auto &Diags = IGF.IGM .Context .Diags ;
2977
+
2978
+ // Check for a negative capacity, which is invalid.
2979
+ auto capacity = IGF.getLoweredSingletonExplosion (vCapacity);
2980
+ Optional<int64_t > capacityValue;
2981
+ if (auto capacityConst = dyn_cast<llvm::ConstantInt>(capacity)) {
2982
+ capacityValue = capacityConst->getSExtValue ();
2983
+ if (*capacityValue < 0 ) {
2984
+ Diags.diagnose (loc, diag::temporary_allocation_size_negative);
2985
+ }
2986
+ }
2987
+
2988
+ // Check for a negative stride, which should never occur because the caller
2989
+ // should always be using MemoryLayout<T>.stride to produce this value.
2990
+ auto stride = IGF.getLoweredSingletonExplosion (vStride);
2991
+ Optional<int64_t > strideValue;
2992
+ if (auto strideConst = dyn_cast<llvm::ConstantInt>(stride)) {
2993
+ strideValue = strideConst->getSExtValue ();
2994
+ if (*strideValue < 0 ) {
2995
+ llvm_unreachable (" Builtin.stackAlloc() caller passed an invalid stride" );
2996
+ }
2997
+ }
2998
+
2999
+ // Get the byte count (the product of capacity and stride.)
3000
+ llvm::Value *result = nullptr ;
3001
+ if (capacityValue && strideValue) {
3002
+ int64_t byteCount = 0 ;
3003
+ auto overflow = llvm::MulOverflow (*capacityValue, *strideValue, byteCount);
3004
+ if (overflow) {
3005
+ Diags.diagnose (loc, diag::temporary_allocation_size_overflow);
3006
+ }
3007
+ result = llvm::ConstantInt::get (IGF.IGM .SizeTy , byteCount);
3008
+
3009
+ } else {
3010
+ // If either value is not known at compile-time, preconditions must be
3011
+ // tested at runtime by Builtin.stackAlloc()'s caller. See
3012
+ // _byteCountForTemporaryAllocation(of:capacity:).
3013
+ result = IGF.Builder .CreateMul (capacity, stride);
3014
+ }
3015
+
3016
+ // If the caller requests a zero-byte allocation, allocate one byte instead
3017
+ // to ensure that the resulting pointer is valid and unique on the stack.
3018
+ return IGF.Builder .CreateIntrinsicCall (llvm::Intrinsic::umax,
3019
+ {IGF.IGM .SizeTy }, {llvm::ConstantInt::get (IGF.IGM .SizeTy , 1 ), result});
3020
+ }
3021
+
3022
+ // / Get the alignment passed to stackAlloc() as a compile-time constant.
3023
+ // /
3024
+ // / If the specified alignment is not known at compile time or is not valid,
3025
+ // / the default maximum alignment is substituted.
3026
+ static Alignment getStackAllocationAlignment (IRGenSILFunction &IGF,
3027
+ SILValue v,
3028
+ SourceLoc loc) {
3029
+ auto &Diags = IGF.IGM .Context .Diags ;
3030
+
3031
+ // Check for a non-positive alignment, which is invalid.
3032
+ auto align = IGF.getLoweredSingletonExplosion (v);
3033
+ if (auto alignConst = dyn_cast<llvm::ConstantInt>(align)) {
3034
+ auto alignValue = alignConst->getSExtValue ();
3035
+ if (alignValue <= 0 ) {
3036
+ Diags.diagnose (loc, diag::temporary_allocation_alignment_not_positive);
3037
+ } else if (!llvm::isPowerOf2_64 (alignValue)) {
3038
+ Diags.diagnose (loc, diag::temporary_allocation_alignment_not_power_of_2);
3039
+ } else {
3040
+ return Alignment (alignValue);
3041
+ }
3042
+ }
3043
+
3044
+ // If the alignment is not known at compile-time, preconditions must be tested
3045
+ // at runtime by Builtin.stackAlloc()'s caller. See
3046
+ // _isStackAllocationSafe(byteCount:alignment:).
3047
+ return Alignment (MaximumAlignment);
3048
+ }
3049
+
3050
+ // / Emit a call to a stack allocation builtin (stackAlloc() or stackDealloc().)
3051
+ // /
3052
+ // / Returns whether or not `i` was such a builtin (true if so, false if it was
3053
+ // / some other builtin.)
3054
+ static bool emitStackAllocBuiltinCall (IRGenSILFunction &IGF,
3055
+ swift::BuiltinInst *i) {
3056
+ if (i->getBuiltinKind () == BuiltinValueKind::StackAlloc) {
3057
+ // Stack-allocate a buffer with the specified size/alignment.
3058
+ auto loc = i->getLoc ().getSourceLoc ();
3059
+ auto size = getStackAllocationSize (
3060
+ IGF, i->getOperand (0 ), i->getOperand (1 ), loc);
3061
+ auto align = getStackAllocationAlignment (IGF, i->getOperand (2 ), loc);
3062
+
3063
+ auto stackAddress = IGF.emitDynamicAlloca (IGF.IGM .Int8Ty , size, align,
3064
+ false , " temp_alloc" );
3065
+ IGF.setLoweredStackAddress (i, stackAddress);
3066
+
3067
+ return true ;
3068
+
3069
+ } else if (i->getBuiltinKind () == BuiltinValueKind::StackDealloc) {
3070
+ // Deallocate a stack address previously allocated with the StackAlloc
3071
+ // builtin above.
3072
+ auto address = i->getOperand (0 );
3073
+ auto stackAddress = IGF.getLoweredStackAddress (address);
3074
+
3075
+ if (stackAddress.getAddress ().isValid ()) {
3076
+ IGF.emitDeallocateDynamicAlloca (stackAddress, false );
3077
+ }
3078
+
3079
+ return true ;
3080
+ }
3081
+
3082
+ return false ;
3083
+ }
3084
+
2961
3085
void IRGenSILFunction::visitBuiltinInst (swift::BuiltinInst *i) {
2962
3086
const BuiltinInfo &builtin = getSILModule ().getBuiltinInfo (i->getName ());
2963
3087
3088
+ if (emitStackAllocBuiltinCall (*this , i)) {
3089
+ return ;
3090
+ }
3091
+
2964
3092
auto argValues = i->getArguments ();
2965
3093
Explosion args;
2966
3094
SmallVector<SILType, 4 > argTypes;
0 commit comments