diff --git a/compiler/asserts.go b/compiler/asserts.go index c7267163..f1ae259e 100644 --- a/compiler/asserts.go +++ b/compiler/asserts.go @@ -176,23 +176,11 @@ func (b *builder) createNilCheck(inst ssa.Value, ptr llvm.Value, blockPrefix str } // Compare against nil. - var isnil llvm.Value - if ptr.Type().PointerAddressSpace() == 0 { - // Do the nil check using the isnil builtin, which marks the parameter - // as nocapture. - // The reason it has to go through a builtin, is that a regular icmp - // instruction may capture the pointer in LLVM semantics, see - // https://reviews.llvm.org/D60047 for details. Pointer capturing - // unfortunately breaks escape analysis, so we use this trick to let the - // functionattr pass know that this pointer doesn't really escape. - ptr = b.CreateBitCast(ptr, b.i8ptrType, "") - isnil = b.createRuntimeCall("isnil", []llvm.Value{ptr}, "") - } else { - // Do the nil check using a regular icmp. This can happen with function - // pointers on AVR, which don't benefit from escape analysis anyway. - nilptr := llvm.ConstPointerNull(ptr.Type()) - isnil = b.CreateICmp(llvm.IntEQ, ptr, nilptr, "") - } + // We previously used a hack to make sure this wouldn't break escape + // analysis, but this is not necessary anymore since + // https://reviews.llvm.org/D60047 has been merged. + nilptr := llvm.ConstPointerNull(ptr.Type()) + isnil := b.CreateICmp(llvm.IntEQ, ptr, nilptr, "") // Emit the nil check in IR. b.createRuntimeAssert(isnil, blockPrefix, "nilPanic") diff --git a/compiler/compiler.go b/compiler/compiler.go index 46dc70d8..720eb8df 100644 --- a/compiler/compiler.go +++ b/compiler/compiler.go @@ -339,9 +339,6 @@ func Compile(pkgName string, machine llvm.TargetMachine, config *compileopts.Con c.mod.NamedFunction("runtime.alloc").AddAttributeAtIndex(0, getAttr(attrName)) } - // See createNilCheck in asserts.go. - c.mod.NamedFunction("runtime.isnil").AddAttributeAtIndex(1, nocapture) - // On *nix systems, the "abort" functuion in libc is used to handle fatal panics. // Mark it as noreturn so LLVM can optimize away code. if abort := c.mod.NamedFunction("abort"); !abort.IsNil() && abort.IsDeclaration() { diff --git a/src/runtime/panic.go b/src/runtime/panic.go index 8f8d7366..2efeed20 100644 --- a/src/runtime/panic.go +++ b/src/runtime/panic.go @@ -27,14 +27,6 @@ func _recover() interface{} { return nil } -// See emitNilCheck in compiler/asserts.go. -// This function is a dummy function that has its first and only parameter -// marked 'nocapture' to work around a limitation in LLVM: a regular pointer -// comparison captures the pointer. -func isnil(ptr *uint8) bool { - return ptr == nil -} - // Panic when trying to dereference a nil pointer. func nilPanic() { runtimePanic("nil pointer dereference") diff --git a/transform/func-lowering.go b/transform/func-lowering.go index 9687cc76..ed385b5f 100644 --- a/transform/func-lowering.go +++ b/transform/func-lowering.go @@ -197,19 +197,8 @@ func LowerFuncValues(mod llvm.Module) { panic("expected inttoptr") } for _, ptrUse := range getUses(callIntPtr) { - if !ptrUse.IsABitCastInst().IsNil() { - for _, bitcastUse := range getUses(ptrUse) { - if bitcastUse.IsACallInst().IsNil() || bitcastUse.CalledValue().IsAFunction().IsNil() { - panic("expected a call instruction") - } - switch bitcastUse.CalledValue().Name() { - case "runtime.isnil": - bitcastUse.ReplaceAllUsesWith(llvm.ConstInt(ctx.Int1Type(), 0, false)) - bitcastUse.EraseFromParentAsInstruction() - default: - panic("expected a call to runtime.isnil") - } - } + if !ptrUse.IsAICmpInst().IsNil() { + ptrUse.ReplaceAllUsesWith(llvm.ConstInt(ctx.Int1Type(), 0, false)) } else if !ptrUse.IsACallInst().IsNil() && ptrUse.CalledValue() == callIntPtr { addFuncLoweringSwitch(mod, builder, funcID, ptrUse, func(funcPtr llvm.Value, params []llvm.Value) llvm.Value { return builder.CreateCall(funcPtr, params, "") diff --git a/transform/optimizer.go b/transform/optimizer.go index 2677e690..9c77c312 100644 --- a/transform/optimizer.go +++ b/transform/optimizer.go @@ -98,24 +98,6 @@ func Optimize(mod llvm.Module, config *compileopts.Config, optLevel, sizeLevel i OptimizeAllocs(mod) OptimizeStringToBytes(mod) - // Lower runtime.isnil calls to regular nil comparisons. - isnil := mod.NamedFunction("runtime.isnil") - if !isnil.IsNil() { - builder := mod.Context().NewBuilder() - defer builder.Dispose() - for _, use := range getUses(isnil) { - builder.SetInsertPointBefore(use) - ptr := use.Operand(0) - if !ptr.IsABitCastInst().IsNil() { - ptr = ptr.Operand(0) - } - nilptr := llvm.ConstPointerNull(ptr.Type()) - icmp := builder.CreateICmp(llvm.IntEQ, ptr, nilptr, "") - use.ReplaceAllUsesWith(icmp) - use.EraseFromParentAsInstruction() - } - } - } else { // Must be run at any optimization level. err := LowerInterfaces(mod) diff --git a/transform/testdata/func-lowering.ll b/transform/testdata/func-lowering.ll index b9aea6c0..b5692c70 100644 --- a/transform/testdata/func-lowering.ll +++ b/transform/testdata/func-lowering.ll @@ -19,8 +19,6 @@ declare void @"internal/task.start"(i32, i8*, i8*, i8*) declare void @runtime.nilPanic(i8*, i8*) -declare i1 @runtime.isnil(i8*, i8*, i8*) - declare void @"main$1"(i32, i8*, i8*) declare void @"main$2"(i32, i8*, i8*) @@ -38,9 +36,8 @@ define void @runFunc1(i8*, i32, i8, i8* %context, i8* %parentHandle) { entry: %3 = call i32 @runtime.getFuncPtr(i8* %0, i32 %1, %runtime.typecodeID* @"reflect/types.type:func:{basic:int8}{}", i8* undef, i8* null) %4 = inttoptr i32 %3 to void (i8, i8*, i8*)* - %5 = bitcast void (i8, i8*, i8*)* %4 to i8* - %6 = call i1 @runtime.isnil(i8* %5, i8* undef, i8* null) - br i1 %6, label %fpcall.nil, label %fpcall.next + %5 = icmp eq void (i8, i8*, i8*)* %4, null + br i1 %5, label %fpcall.nil, label %fpcall.next fpcall.nil: call void @runtime.nilPanic(i8* undef, i8* null) @@ -58,9 +55,8 @@ define void @runFunc2(i8*, i32, i8, i8* %context, i8* %parentHandle) { entry: %3 = call i32 @runtime.getFuncPtr(i8* %0, i32 %1, %runtime.typecodeID* @"reflect/types.type:func:{basic:uint8}{}", i8* undef, i8* null) %4 = inttoptr i32 %3 to void (i8, i8*, i8*)* - %5 = bitcast void (i8, i8*, i8*)* %4 to i8* - %6 = call i1 @runtime.isnil(i8* %5, i8* undef, i8* null) - br i1 %6, label %fpcall.nil, label %fpcall.next + %5 = icmp eq void (i8, i8*, i8*)* %4, null + br i1 %5, label %fpcall.nil, label %fpcall.next fpcall.nil: call void @runtime.nilPanic(i8* undef, i8* null) diff --git a/transform/testdata/func-lowering.out.ll b/transform/testdata/func-lowering.out.ll index af63fdc1..97621730 100644 --- a/transform/testdata/func-lowering.out.ll +++ b/transform/testdata/func-lowering.out.ll @@ -19,8 +19,6 @@ declare void @"internal/task.start"(i32, i8*, i8*, i8*) declare void @runtime.nilPanic(i8*, i8*) -declare i1 @runtime.isnil(i8*, i8*, i8*) - declare void @"main$1"(i32, i8*, i8*) declare void @"main$2"(i32, i8*, i8*) @@ -38,9 +36,8 @@ define void @runFunc1(i8*, i32, i8, i8* %context, i8* %parentHandle) { entry: %3 = icmp eq i32 %1, 0 %4 = select i1 %3, void (i8, i8*, i8*)* null, void (i8, i8*, i8*)* @funcInt8 - %5 = bitcast void (i8, i8*, i8*)* %4 to i8* - %6 = call i1 @runtime.isnil(i8* %5, i8* undef, i8* null) - br i1 %6, label %fpcall.nil, label %fpcall.next + %5 = icmp eq void (i8, i8*, i8*)* %4, null + br i1 %5, label %fpcall.nil, label %fpcall.next fpcall.nil: call void @runtime.nilPanic(i8* undef, i8* null)