From 92d1f155da5ec0e58737bc0c2cb67f52bf665347 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn=20Steinbrink?= Date: Thu, 1 May 2014 19:32:07 +0200 Subject: [PATCH] Emit LLVM lifetime intrinsics to improve stack usage and codegen in general Lifetime intrinsics help to reduce stack usage, because LLVM can apply stack coloring to reuse the stack slots of dead allocas for new ones. For example these functions now both use the same amount of stack, while previous `bar()` used five times as much as `foo()`: ````rust fn foo() { println("{}", 5); } fn bar() { println("{}", 5); println("{}", 5); println("{}", 5); println("{}", 5); println("{}", 5); } ```` On top of that, LLVM can also optimize out certain operations when it knows that memory is dead after a certain point. For example, it can sometimes remove the zeroing used to cancel the drop glue. This is possible when the glue drop itself was already removed because the zeroing dominated the drop glue call. For example in: ````rust pub fn bar(x: (Box, int)) -> (Box, int) { x } ```` With optimizations, this currently results in: ````llvm define void @_ZN3bar20h330fa42547df8179niaE({ i64*, i64 }* noalias nocapture nonnull sret, { i64*, i64 }* noalias nocapture nonnull) unnamed_addr #0 { "_ZN29_$LP$Box$LT$int$GT$$C$int$RP$39glue_drop.$x22glue_drop$x22$LP$1347$RP$17h88cf42702e5a322aE.exit": %2 = bitcast { i64*, i64 }* %1 to i8* %3 = bitcast { i64*, i64 }* %0 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %3, i8* %2, i64 16, i32 8, i1 false) tail call void @llvm.memset.p0i8.i64(i8* %2, i8 0, i64 16, i32 8, i1 false) ret void } ```` But with lifetime intrinsics we get: ````llvm define void @_ZN3bar20h330fa42547df8179niaE({ i64*, i64 }* noalias nocapture nonnull sret, { i64*, i64 }* noalias nocapture nonnull) unnamed_addr #0 { "_ZN29_$LP$Box$LT$int$GT$$C$int$RP$39glue_drop.$x22glue_drop$x22$LP$1347$RP$17h88cf42702e5a322aE.exit": %2 = bitcast { i64*, i64 }* %1 to i8* %3 = bitcast { i64*, i64 }* %0 to i8* tail call void @llvm.memcpy.p0i8.p0i8.i64(i8* %3, i8* %2, i64 16, i32 8, i1 false) tail call void @llvm.lifetime.end(i64 16, i8* %2) ret void } ```` Fixes #15665 --- src/librustc/middle/trans/_match.rs | 1 + src/librustc/middle/trans/base.rs | 34 +++++++++++++++++++++++++++- src/librustc/middle/trans/cleanup.rs | 32 ++++++++++++++++++++++++++ src/librustc/middle/trans/context.rs | 3 +++ src/librustc/middle/trans/datum.rs | 6 ++++- src/librustc/middle/trans/expr.rs | 5 ++-- src/librustc/middle/trans/tvec.rs | 8 +++---- 7 files changed, 81 insertions(+), 8 deletions(-) diff --git a/src/librustc/middle/trans/_match.rs b/src/librustc/middle/trans/_match.rs index aeb171c068a85..20526fa91078d 100644 --- a/src/librustc/middle/trans/_match.rs +++ b/src/librustc/middle/trans/_match.rs @@ -1603,6 +1603,7 @@ fn mk_binding_alloca<'a,A>(bcx: &'a Block<'a>, // Subtle: be sure that we *populate* the memory *before* // we schedule the cleanup. let bcx = populate(arg, bcx, llval, var_ty); + bcx.fcx.schedule_lifetime_end(cleanup_scope, llval); bcx.fcx.schedule_drop_mem(cleanup_scope, llval, var_ty); // Now that memory is initialized and has cleanup scheduled, diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index 7d7922ebfa90c..a9d5b30b9c7f4 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -1070,6 +1070,34 @@ pub fn with_cond<'a>( next_cx } +pub fn call_lifetime_start(cx: &Block, ptr: ValueRef) { + if cx.sess().opts.optimize == config::No { + return; + } + + let _icx = push_ctxt("lifetime_start"); + let ccx = cx.ccx(); + + let llsize = C_u64(ccx, machine::llsize_of_alloc(ccx, val_ty(ptr).element_type())); + let ptr = PointerCast(cx, ptr, Type::i8p(ccx)); + let lifetime_start = ccx.get_intrinsic(&"llvm.lifetime.start"); + Call(cx, lifetime_start, [llsize, ptr], []); +} + +pub fn call_lifetime_end(cx: &Block, ptr: ValueRef) { + if cx.sess().opts.optimize == config::No { + return; + } + + let _icx = push_ctxt("lifetime_end"); + let ccx = cx.ccx(); + + let llsize = C_u64(ccx, machine::llsize_of_alloc(ccx, val_ty(ptr).element_type())); + let ptr = PointerCast(cx, ptr, Type::i8p(ccx)); + let lifetime_end = ccx.get_intrinsic(&"llvm.lifetime.end"); + Call(cx, lifetime_end, [llsize, ptr], []); +} + pub fn call_memcpy(cx: &Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, align: u32) { let _icx = push_ctxt("call_memcpy"); let ccx = cx.ccx(); @@ -1157,6 +1185,8 @@ pub fn alloca_maybe_zeroed(cx: &Block, ty: Type, name: &str, zero: bool) -> Valu let b = cx.fcx.ccx.builder(); b.position_before(cx.fcx.alloca_insert_pt.get().unwrap()); memzero(&b, p, ty); + } else { + call_lifetime_start(cx, p); } p } @@ -1169,7 +1199,9 @@ pub fn arrayalloca(cx: &Block, ty: Type, v: ValueRef) -> ValueRef { } } debuginfo::clear_source_location(cx.fcx); - return ArrayAlloca(cx, ty, v); + let p = ArrayAlloca(cx, ty, v); + call_lifetime_start(cx, p); + p } // Creates and returns space for, or returns the argument representing, the diff --git a/src/librustc/middle/trans/cleanup.rs b/src/librustc/middle/trans/cleanup.rs index 6e40445d8f903..c14429b2086ab 100644 --- a/src/librustc/middle/trans/cleanup.rs +++ b/src/librustc/middle/trans/cleanup.rs @@ -226,6 +226,20 @@ impl<'a> CleanupMethods<'a> for FunctionContext<'a> { self.trans_cleanups_to_exit_scope(ReturnExit) } + fn schedule_lifetime_end(&self, + cleanup_scope: ScopeId, + val: ValueRef) { + let drop = box LifetimeEnd { + ptr: val, + }; + + debug!("schedule_lifetime_end({:?}, val={})", + cleanup_scope, + self.ccx.tn.val_to_string(val)); + + self.schedule_clean(cleanup_scope, drop as Box); + } + fn schedule_drop_mem(&self, cleanup_scope: ScopeId, val: ValueRef, @@ -902,6 +916,21 @@ impl Cleanup for FreeValue { } } +pub struct LifetimeEnd { + ptr: ValueRef, +} + +impl Cleanup for LifetimeEnd { + fn clean_on_unwind(&self) -> bool { + false + } + + fn trans<'a>(&self, bcx: &'a Block<'a>) -> &'a Block<'a> { + base::call_lifetime_end(bcx, self.ptr); + bcx + } +} + pub fn temporary_scope(tcx: &ty::ctxt, id: ast::NodeId) -> ScopeId { @@ -957,6 +986,9 @@ pub trait CleanupMethods<'a> { cleanup_scope: ast::NodeId, exit: uint) -> BasicBlockRef; fn return_exit_block(&'a self) -> BasicBlockRef; + fn schedule_lifetime_end(&self, + cleanup_scope: ScopeId, + val: ValueRef); fn schedule_drop_mem(&self, cleanup_scope: ScopeId, val: ValueRef, diff --git a/src/librustc/middle/trans/context.rs b/src/librustc/middle/trans/context.rs index be39d435ee458..f3457f3b0ae06 100644 --- a/src/librustc/middle/trans/context.rs +++ b/src/librustc/middle/trans/context.rs @@ -425,6 +425,9 @@ fn declare_intrinsic(ccx: &CrateContext, key: & &'static str) -> Option mk_struct!{t_i32, i1}); ifn!("llvm.umul.with.overflow.i64" fn(t_i64, t_i64) -> mk_struct!{t_i64, i1}); + ifn!("llvm.lifetime.start" fn(t_i64,i8p) -> void); + ifn!("llvm.lifetime.end" fn(t_i64, i8p) -> void); + ifn!("llvm.expect.i1" fn(i1, i1) -> i1); // Some intrinsics were introduced in later versions of LLVM, but they have diff --git a/src/librustc/middle/trans/datum.rs b/src/librustc/middle/trans/datum.rs index d4b2b04745b58..4ea895c89bf25 100644 --- a/src/librustc/middle/trans/datum.rs +++ b/src/librustc/middle/trans/datum.rs @@ -124,6 +124,7 @@ pub fn lvalue_scratch_datum<'a, A>(bcx: &'a Block<'a>, // Subtle. Populate the scratch memory *before* scheduling cleanup. let bcx = populate(arg, bcx, scratch); + bcx.fcx.schedule_lifetime_end(scope, scratch); bcx.fcx.schedule_drop_mem(scope, scratch, ty); DatumBlock::new(bcx, Datum::new(scratch, ty, Lvalue)) @@ -169,7 +170,10 @@ fn add_rvalue_clean(mode: RvalueMode, ty: ty::t) { match mode { ByValue => { fcx.schedule_drop_immediate(scope, val, ty); } - ByRef => { fcx.schedule_drop_mem(scope, val, ty); } + ByRef => { + fcx.schedule_lifetime_end(scope, val); + fcx.schedule_drop_mem(scope, val, ty); + } } } diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index 68f577faefed0..4cb1edbe1e77f 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -1145,8 +1145,9 @@ pub fn trans_adt<'a>(bcx: &'a Block<'a>, let dest = adt::trans_field_ptr(bcx, repr, addr, discr, i); let e_ty = expr_ty_adjusted(bcx, &**e); bcx = trans_into(bcx, &**e, SaveIn(dest)); - fcx.schedule_drop_mem(cleanup::CustomScope(custom_cleanup_scope), - dest, e_ty); + let scope = cleanup::CustomScope(custom_cleanup_scope); + fcx.schedule_lifetime_end(scope, dest); + fcx.schedule_drop_mem(scope, dest, e_ty); } for base in optbase.iter() { diff --git a/src/librustc/middle/trans/tvec.rs b/src/librustc/middle/trans/tvec.rs index 07571b2f4c4a5..1241a85e95cfc 100644 --- a/src/librustc/middle/trans/tvec.rs +++ b/src/librustc/middle/trans/tvec.rs @@ -170,6 +170,7 @@ pub fn trans_slice_vstore<'a>( let llfixed_ty = type_of::type_of(bcx.ccx(), fixed_ty).ptr_to(); let llfixed_casted = BitCast(bcx, llfixed, llfixed_ty); let cleanup_scope = cleanup::temporary_scope(bcx.tcx(), content_expr.id); + fcx.schedule_lifetime_end(cleanup_scope, llfixed_casted); fcx.schedule_drop_mem(cleanup_scope, llfixed_casted, fixed_ty); // Generate the content into the backing array. @@ -364,10 +365,9 @@ pub fn write_content<'a>( i, bcx.val_to_string(lleltptr)); bcx = expr::trans_into(bcx, &**element, SaveIn(lleltptr)); - fcx.schedule_drop_mem( - cleanup::CustomScope(temp_scope), - lleltptr, - vt.unit_ty); + let scope = cleanup::CustomScope(temp_scope); + fcx.schedule_lifetime_end(scope, lleltptr); + fcx.schedule_drop_mem(scope, lleltptr, vt.unit_ty); } fcx.pop_custom_cleanup_scope(temp_scope); }