From d014b36f05b008405d69af93f4f77f67ef752c42 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Mon, 2 Sep 2019 13:26:34 +0200 Subject: compiler: Honor stack frames when calling exit BIFs Since exit BIFs were known not to return nor touch the stack, we allowed them to be called regardless of how the stack looked. This was a pretty small improvement which turned out to be a annoying hindrance to the new BIF tracing, as we'd overwrite the return address if we turned the specialized BIF call to an ordinary call. This commit removes that optimization. Modules compiled before OTP 23 will still work, but exit BIFs that are traced will produce incorrect stack traces. --- lib/compiler/src/beam_jump.erl | 2 - lib/compiler/src/beam_ssa_pre_codegen.erl | 37 ++- lib/compiler/src/beam_validator.erl | 341 +++++++++++---------- lib/compiler/test/beam_validator_SUITE.erl | 33 +- .../test/beam_validator_SUITE_data/call_last.S | 21 +- .../beam_validator_SUITE_data/call_without_stack.S | 21 ++ .../test/beam_validator_SUITE_data/freg_uninit.S | 2 + .../beam_validator_SUITE_data/merge_undefined.S | 77 +++-- 8 files changed, 311 insertions(+), 223 deletions(-) create mode 100644 lib/compiler/test/beam_validator_SUITE_data/call_without_stack.S diff --git a/lib/compiler/src/beam_jump.erl b/lib/compiler/src/beam_jump.erl index 74f80ca70e..85fd89bbe8 100644 --- a/lib/compiler/src/beam_jump.erl +++ b/lib/compiler/src/beam_jump.erl @@ -580,8 +580,6 @@ is_unreachable_after(I) -> is_exit_instruction(I). -spec is_exit_instruction(instruction()) -> boolean(). -is_exit_instruction({call_ext,_,{extfunc,M,F,A}}) -> - erl_bifs:is_exit_bif(M, F, A); is_exit_instruction(if_end) -> true; is_exit_instruction({case_end,_}) -> true; is_exit_instruction({try_case_end,_}) -> true; diff --git a/lib/compiler/src/beam_ssa_pre_codegen.erl b/lib/compiler/src/beam_ssa_pre_codegen.erl index 61c42fdb6d..bf2503a2a5 100644 --- a/lib/compiler/src/beam_ssa_pre_codegen.erl +++ b/lib/compiler/src/beam_ssa_pre_codegen.erl @@ -707,20 +707,30 @@ legacy_bs_is([], _Last, _IsYreg, Count, Copies, Acc) -> exception_trampolines(#st{ssa=Blocks0}=St) -> RPO = reverse(beam_ssa:rpo(Blocks0)), - Blocks = et_1(RPO, #{}, Blocks0), + Blocks = et_1(RPO, #{}, #{}, Blocks0), St#st{ssa=Blocks}. -et_1([L | Ls], Trampolines, Blocks) -> +et_1([L | Ls], Trampolines, Exceptions, Blocks) -> #{ L := #b_blk{is=Is,last=Last0}=Block0 } = Blocks, case {Is, Last0} of - {[#b_set{op=exception_trampoline}], #b_br{succ=Succ}} -> - et_1(Ls, Trampolines#{ L => Succ }, maps:remove(L, Blocks)); + {[#b_set{op=exception_trampoline,args=[Arg]}], #b_br{succ=Succ}} -> + et_1(Ls, + Trampolines#{ L => Succ }, + Exceptions#{ L => Arg }, + maps:remove(L, Blocks)); {_, #b_br{succ=Same,fail=Same}} when Same =:= ?EXCEPTION_BLOCK -> %% The exception block is just a marker saying that we should raise %% an exception (= {f,0}) instead of jumping to a particular fail %% block. Since it's not a reachable block we can't allow %% unconditional jumps to it except through a trampoline. error({illegal_jump_to_exception_block, L}); + {_, #b_br{succ=Same,fail=Same}} + when map_get(Same, Trampolines) =:= ?EXCEPTION_BLOCK -> + %% This block always fails at runtime (and we are not in a + %% try/catch); rewrite the terminator to a return. + Last = #b_ret{arg=map_get(Same, Exceptions)}, + Block = Block0#b_blk{last=Last}, + et_1(Ls, Trampolines, Exceptions, Blocks#{ L := Block }); {_, #b_br{succ=Succ0,fail=Fail0}} -> Succ = maps:get(Succ0, Trampolines, Succ0), Fail = maps:get(Fail0, Trampolines, Fail0), @@ -728,14 +738,14 @@ et_1([L | Ls], Trampolines, Blocks) -> Succ =/= Succ0; Fail =/= Fail0 -> Last = Last0#b_br{succ=Succ,fail=Fail}, Block = Block0#b_blk{last=Last}, - et_1(Ls, Trampolines, Blocks#{ L := Block }); + et_1(Ls, Trampolines, Exceptions, Blocks#{ L := Block }); Succ =:= Succ0, Fail =:= Fail0 -> - et_1(Ls, Trampolines, Blocks) + et_1(Ls, Trampolines, Exceptions, Blocks) end; {_, _} -> - et_1(Ls, Trampolines, Blocks) + et_1(Ls, Trampolines, Exceptions, Blocks) end; -et_1([], _Trampolines, Blocks) -> +et_1([], _Trampolines, _Exceptions, Blocks) -> Blocks. %% sanitize(St0) -> St. @@ -1331,14 +1341,9 @@ need_frame_1([#b_set{op=call,args=[Func|_]}|Is], Context) -> #b_remote{mod=#b_literal{val=Mod}, name=#b_literal{val=Name}, arity=Arity} when is_atom(Mod), is_atom(Name) -> - case erl_bifs:is_exit_bif(Mod, Name, Arity) of - true -> - false; - false -> - Context =:= body orelse - Is =/= [] orelse - is_trap_bif(Mod, Name, Arity) - end; + Context =:= body orelse + Is =/= [] orelse + is_trap_bif(Mod, Name, Arity); #b_remote{} -> %% This is an apply(), which always needs a frame. true; diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl index 911b5eb777..d55aeed9a9 100644 --- a/lib/compiler/src/beam_validator.erl +++ b/lib/compiler/src/beam_validator.erl @@ -395,28 +395,33 @@ valfun_1({init,Reg}, Vst) -> create_tag(initialized, init, [], Reg, Vst); valfun_1({test_heap,Heap,Live}, Vst) -> test_heap(Heap, Live, Vst); -valfun_1({bif,Op,{f,0},Ss,Dst}=I, Vst) -> - case will_bif_succeed(Op, Ss, Vst) of +valfun_1({bif,Op,{f,0},Ss,Dst}=I, Vst0) -> + case will_bif_succeed(Op, Ss, Vst0) of yes -> %% This BIF cannot fail, handle it here without updating catch %% state. - validate_bif(Op, cannot_fail, Ss, Dst, Vst); + validate_bif(Op, cannot_fail, Ss, Dst, Vst0); no -> %% The stack will be scanned, so Y registers must be initialized. + Vst = branch_exception(Vst0), verify_y_init(Vst), kill_state(Vst); maybe -> %% The BIF can fail, make sure that any catch state is updated. + Vst = branch_exception(Vst0), valfun_2(I, Vst) end; -valfun_1({gc_bif,Op,{f,0},Live,Ss,Dst}=I, Vst) -> - case will_bif_succeed(Op, Ss, Vst) of +valfun_1({gc_bif,Op,{f,0},Live,Ss,Dst}=I, Vst0) -> + case will_bif_succeed(Op, Ss, Vst0) of yes -> - validate_gc_bif(Op, cannot_fail, Ss, Dst, Live, Vst); + validate_gc_bif(Op, cannot_fail, Ss, Dst, Live, Vst0); no -> + Vst = branch_exception(Vst0), verify_y_init(Vst), kill_state(Vst); maybe -> + Vst = branch_exception(Vst0), + assert_float_checked(Vst), valfun_2(I, Vst) end; %% Put instructions. @@ -510,26 +515,42 @@ valfun_1({'%',_}, Vst) -> Vst; valfun_1({line,_}, Vst) -> Vst; -%% Exception generating calls -valfun_1({call_ext,Live,Func}=I, Vst) -> - case will_call_succeed(Func, Vst) of - yes -> - %% This call cannot fail, handle it here without updating catch - %% state. - call(Func, Live, Vst); - no -> - %% The stack will be scanned, so Y registers must be initialized. - verify_live(Live, Vst), - verify_y_init(Vst), - kill_state(Vst); - maybe -> - %% The call can fail, make sure that any catch state is updated. - valfun_2(I, Vst) - end; +%% +%% Calls; these may be okay when the try/catch state or stack is undecided, +%% depending on whether they always succeed or always fail. +%% +valfun_1({apply,Live}, Vst) -> + validate_body_call(apply, Live+2, Vst); +valfun_1({apply_last,Live,N}, Vst) -> + validate_tail_call(N, apply, Live+2, Vst); +valfun_1({call_fun,Live}, Vst) -> + Fun = {x,Live}, + assert_term(Fun, Vst), + + %% An exception is raised on error, hence branching to 0. + branch(0, Vst, + fun(SuccVst0) -> + SuccVst = update_type(fun meet/2, #t_fun{arity=Live}, + Fun, SuccVst0), + validate_body_call('fun', Live+1, SuccVst) + end); +valfun_1({call,Live,Func}, Vst) -> + validate_body_call(Func, Live, Vst); +valfun_1({call_ext,Live,Func}, Vst) -> + validate_body_call(Func, Live, Vst); +valfun_1({call_only,Live,Func}, Vst) -> + validate_tail_call(none, Func, Live, Vst); +valfun_1({call_ext_only,Live,Func}, Vst) -> + validate_tail_call(none, Func, Live, Vst); +valfun_1({call_last,Live,Func,N}, Vst) -> + validate_tail_call(N, Func, Live, Vst); +valfun_1({call_ext_last,Live,Func,N}, Vst) -> + validate_tail_call(N, Func, Live, Vst); valfun_1(_I, #vst{current=#st{ct=undecided}}) -> error(unknown_catch_try_state); %% %% Allocate and deallocate, et.al +%% valfun_1({allocate,Stk,Live}, Vst) -> allocate(uninitialized, Stk, 0, Live, Vst); valfun_1({allocate_heap,Stk,Heap,Live}, Vst) -> @@ -612,9 +633,58 @@ valfun_1({jump,{f,Lbl}}, Vst) -> %% The next instruction is never executed. kill_state(SuccVst) end); -valfun_1(I, Vst) -> +valfun_1(I, Vst0) -> + Vst = branch_exception(Vst0), valfun_2(I, Vst). +validate_tail_call(Deallocate, Func, Live, #vst{current=#st{numy=NumY}}=Vst0) -> + assert_float_checked(Vst0), + case will_call_succeed(Func, Vst0) of + yes when Deallocate =:= NumY -> + %% This call cannot fail, handle it without updating catch state. + tail_call(Func, Live, Vst0); + maybe when Deallocate =:= NumY -> + %% The call can fail, make sure that any catch state is updated. + Vst = branch_exception(Vst0), + tail_call(Func, Live, Vst); + no -> + %% The stack will be scanned, so Y registers must be initialized. + %% + %% Note that the compiler is allowed to emit garbage values for + %% "Deallocate" as we know that it will not be used in this case. + Vst = branch_exception(Vst0), + verify_live(Live, Vst), + verify_y_init(Vst), + kill_state(Vst); + _ when Deallocate =/= NumY -> + error({allocated, NumY}) + end. + +validate_body_call(Func, Live, + #vst{current=#st{numy=NumY}}=Vst0) when is_integer(NumY)-> + assert_float_checked(Vst0), + case will_call_succeed(Func, Vst0) of + yes -> + call(Func, Live, Vst0); + maybe -> + Vst = branch_exception(Vst0), + call(Func, Live, Vst); + no -> + Vst = branch_exception(Vst0), + verify_live(Live, Vst), + verify_y_init(Vst), + kill_state(Vst) + end; +validate_body_call(_, _, #vst{current=#st{numy=NumY}}) -> + error({allocated, NumY}). + +assert_float_checked(Vst) -> + case get_fls(Vst) of + undefined -> ok; + checked -> ok; + Fls -> error({unsafe_instruction,{float_error_state,Fls}}) + end. + init_try_catch_branch(Kind, Dst, Fail, Vst0) -> Tag = {Kind, [Fail]}, Vst = create_tag(Tag, 'try_catch', [], Dst, Vst0), @@ -633,10 +703,10 @@ init_try_catch_branch(Kind, Dst, Fail, Vst0) -> #vst{current=#st{ct=Tags}=St0} = SuccVst0, St = St0#st{ct=[Tag|Tags]}, SuccVst = SuccVst0#vst{current=St}, - + %% All potentially-throwing instructions after this one will %% implicitly branch to the current try/catch handler; see - %% valfun_2/2 + %% the base case of valfun_1/2 SuccVst end). @@ -649,19 +719,20 @@ init_catch_handler_1(Reg, uninitialized, Vst) -> init_catch_handler_1(_, _, Vst) -> Vst. -valfun_2(I, #vst{current=#st{ct=[{_,[Fail]}|_]}}=Vst) when is_integer(Fail) -> +branch_exception(#vst{current=#st{ct=[{_,[Fail]}|_]}}=Vst) + when is_integer(Fail) -> %% We have an active try/catch tag and we can jump there from this %% instruction, so we need to update the branched state of the try/catch %% handler. - valfun_3(I, fork_state(Fail, Vst)); -valfun_2(I, #vst{current=#st{ct=[]}}=Vst) -> - valfun_3(I, Vst); -valfun_2(_, _) -> + fork_state(Fail, Vst); +branch_exception(#vst{current=#st{ct=[]}}=Vst) -> + Vst; +branch_exception(_) -> error(ambiguous_catch_try_state). %% Handle the remaining floating point instructions here. %% Floating point. -valfun_3({fconv,Src,{fr,_}=Dst}, Vst) -> +valfun_2({fconv,Src,{fr,_}=Dst}, Vst) -> assert_term(Src, Vst), %% An exception is raised on error, hence branching to 0. @@ -670,72 +741,32 @@ valfun_3({fconv,Src,{fr,_}=Dst}, Vst) -> SuccVst = update_type(fun meet/2, number, Src, SuccVst0), set_freg(Dst, SuccVst) end); -valfun_3({bif,fadd,_,[_,_]=Ss,Dst}, Vst) -> +valfun_2({bif,fadd,_,[_,_]=Ss,Dst}, Vst) -> float_op(Ss, Dst, Vst); -valfun_3({bif,fdiv,_,[_,_]=Ss,Dst}, Vst) -> +valfun_2({bif,fdiv,_,[_,_]=Ss,Dst}, Vst) -> float_op(Ss, Dst, Vst); -valfun_3({bif,fmul,_,[_,_]=Ss,Dst}, Vst) -> +valfun_2({bif,fmul,_,[_,_]=Ss,Dst}, Vst) -> float_op(Ss, Dst, Vst); -valfun_3({bif,fnegate,_,[_]=Ss,Dst}, Vst) -> +valfun_2({bif,fnegate,_,[_]=Ss,Dst}, Vst) -> float_op(Ss, Dst, Vst); -valfun_3({bif,fsub,_,[_,_]=Ss,Dst}, Vst) -> +valfun_2({bif,fsub,_,[_,_]=Ss,Dst}, Vst) -> float_op(Ss, Dst, Vst); -valfun_3(fclearerror, Vst) -> +valfun_2(fclearerror, Vst) -> case get_fls(Vst) of - undefined -> ok; - checked -> ok; - Fls -> error({bad_floating_point_state,Fls}) + undefined -> ok; + checked -> ok; + Fls -> error({bad_floating_point_state,Fls}) end, set_fls(cleared, Vst); -valfun_3({fcheckerror,_}, Vst) -> +valfun_2({fcheckerror,_}, Vst) -> assert_fls(cleared, Vst), set_fls(checked, Vst); -valfun_3(I, Vst) -> - %% The instruction is not a float instruction. - case get_fls(Vst) of - undefined -> - valfun_4(I, Vst); - checked -> - valfun_4(I, Vst); - Fls -> - error({unsafe_instruction,{float_error_state,Fls}}) - end. +valfun_2(I, Vst) -> + assert_float_checked(Vst), + valfun_3(I, Vst). %% Instructions that can cause exceptions. -valfun_4({apply,Live}, Vst) -> - call(apply, Live+2, Vst); -valfun_4({apply_last,Live,_}, Vst) -> - tail_call(apply, Live+2, Vst); -valfun_4({call_fun,Live}, Vst) -> - Fun = {x,Live}, - assert_term(Fun, Vst), - - %% An exception is raised on error, hence branching to 0. - branch(0, Vst, - fun(SuccVst0) -> - SuccVst = update_type(fun meet/2, #t_fun{arity=Live}, - Fun, SuccVst0), - call('fun', Live+1, SuccVst) - end); -valfun_4({call,Live,Func}, Vst) -> - call(Func, Live, Vst); -valfun_4({call_ext,Live,Func}, Vst) -> - %% Exception BIFs has already been taken care of above. - call(Func, Live, Vst); -valfun_4({call_only,Live,Func}, Vst) -> - tail_call(Func, Live, Vst); -valfun_4({call_ext_only,Live,Func}, Vst) -> - tail_call(Func, Live, Vst); -valfun_4({call_last,Live,Func,StkSize}, #vst{current=#st{numy=StkSize}}=Vst) -> - tail_call(Func, Live, Vst); -valfun_4({call_last,_,_,_}, #vst{current=#st{numy=NumY}}) -> - error({allocated,NumY}); -valfun_4({call_ext_last,Live,Func,StkSize}, - #vst{current=#st{numy=StkSize}}=Vst) -> - tail_call(Func, Live, Vst); -valfun_4({call_ext_last,_,_,_}, #vst{current=#st{numy=NumY}}) -> - error({allocated,NumY}); -valfun_4({make_fun2,{f,Lbl},_,_,NumFree}, #vst{ft=Ft}=Vst0) -> +valfun_3({make_fun2,{f,Lbl},_,_,NumFree}, #vst{ft=Ft}=Vst0) -> #{ arity := Arity0 } = gb_trees:get(Lbl, Ft), Arity = Arity0 - NumFree, @@ -747,22 +778,22 @@ valfun_4({make_fun2,{f,Lbl},_,_,NumFree}, #vst{ft=Ft}=Vst0) -> create_term(#t_fun{arity=Arity}, make_fun, [], {x,0}, Vst); %% Other BIFs -valfun_4({bif,raise,{f,0},Src,_Dst}, Vst) -> +valfun_3({bif,raise,{f,0},Src,_Dst}, Vst) -> validate_src(Src, Vst), kill_state(Vst); -valfun_4(raw_raise=I, Vst) -> +valfun_3(raw_raise=I, Vst) -> call(I, 3, Vst); -valfun_4({bif,Op,{f,Fail},Ss,Dst}, Vst) -> +valfun_3({bif,Op,{f,Fail},Ss,Dst}, Vst) -> validate_src(Ss, Vst), validate_bif(Op, Fail, Ss, Dst, Vst); -valfun_4({gc_bif,Op,{f,Fail},Live,Ss,Dst}, Vst) -> +valfun_3({gc_bif,Op,{f,Fail},Live,Ss,Dst}, Vst) -> validate_gc_bif(Op, Fail, Ss, Dst, Live, Vst); -valfun_4(return, #vst{current=#st{numy=none}}=Vst) -> +valfun_3(return, #vst{current=#st{numy=none}}=Vst) -> assert_durable_term({x,0}, Vst), kill_state(Vst); -valfun_4(return, #vst{current=#st{numy=NumY}}) -> +valfun_3(return, #vst{current=#st{numy=NumY}}) -> error({stack_frame,NumY}); -valfun_4({loop_rec,{f,Fail},Dst}, Vst) -> +valfun_3({loop_rec,{f,Fail},Dst}, Vst) -> %% This term may not be part of the root set until remove_message/0 is %% executed. If control transfers to the loop_rec_end/1 instruction, no %% part of this term must be stored in a Y register. @@ -771,55 +802,55 @@ valfun_4({loop_rec,{f,Fail},Dst}, Vst) -> {Ref, SuccVst} = new_value(any, loop_rec, [], SuccVst0), mark_fragile(Dst, set_reg_vref(Ref, Dst, SuccVst)) end); -valfun_4({wait,_}, Vst) -> +valfun_3({wait,_}, Vst) -> verify_y_init(Vst), kill_state(Vst); -valfun_4({wait_timeout,_,Src}, Vst) -> +valfun_3({wait_timeout,_,Src}, Vst) -> assert_term(Src, Vst), verify_y_init(Vst), prune_x_regs(0, Vst); -valfun_4({loop_rec_end,_}, Vst) -> +valfun_3({loop_rec_end,_}, Vst) -> verify_y_init(Vst), kill_state(Vst); -valfun_4(timeout, Vst) -> +valfun_3(timeout, Vst) -> prune_x_regs(0, Vst); -valfun_4(send, Vst) -> +valfun_3(send, Vst) -> call(send, 2, Vst); %% Match instructions. -valfun_4({select_val,Src,{f,Fail},{list,Choices}}, Vst) -> +valfun_3({select_val,Src,{f,Fail},{list,Choices}}, Vst) -> assert_term(Src, Vst), assert_choices(Choices), validate_select_val(Fail, Choices, Src, Vst); -valfun_4({select_tuple_arity,Tuple,{f,Fail},{list,Choices}}, Vst) -> +valfun_3({select_tuple_arity,Tuple,{f,Fail},{list,Choices}}, Vst) -> assert_type(#t_tuple{}, Tuple, Vst), assert_arities(Choices), validate_select_tuple_arity(Fail, Choices, Tuple, Vst); %% New bit syntax matching instructions. -valfun_4({test,bs_start_match3,{f,Fail},Live,[Src],Dst}, Vst) -> +valfun_3({test,bs_start_match3,{f,Fail},Live,[Src],Dst}, Vst) -> validate_bs_start_match(Fail, Live, bsm_match_state(), Src, Dst, Vst); -valfun_4({test,bs_start_match2,{f,Fail},Live,[Src,Slots],Dst}, Vst) -> +valfun_3({test,bs_start_match2,{f,Fail},Live,[Src,Slots],Dst}, Vst) -> validate_bs_start_match(Fail, Live, bsm_match_state(Slots), Src, Dst, Vst); -valfun_4({test,bs_match_string,{f,Fail},[Ctx,_,_]}, Vst) -> +valfun_3({test,bs_match_string,{f,Fail},[Ctx,_,_]}, Vst) -> assert_type(#t_bs_context{}, Ctx, Vst), branch(Fail, Vst, fun(V) -> V end); -valfun_4({test,bs_skip_bits2,{f,Fail},[Ctx,Src,_,_]}, Vst) -> +valfun_3({test,bs_skip_bits2,{f,Fail},[Ctx,Src,_,_]}, Vst) -> assert_type(#t_bs_context{}, Ctx, Vst), assert_term(Src, Vst), branch(Fail, Vst, fun(V) -> V end); -valfun_4({test,bs_test_tail2,{f,Fail},[Ctx,_]}, Vst) -> +valfun_3({test,bs_test_tail2,{f,Fail},[Ctx,_]}, Vst) -> assert_type(#t_bs_context{}, Ctx, Vst), branch(Fail, Vst, fun(V) -> V end); -valfun_4({test,bs_test_unit,{f,Fail},[Ctx,_]}, Vst) -> +valfun_3({test,bs_test_unit,{f,Fail},[Ctx,_]}, Vst) -> assert_type(#t_bs_context{}, Ctx, Vst), branch(Fail, Vst, fun(V) -> V end); -valfun_4({test,bs_skip_utf8,{f,Fail},[Ctx,Live,_]}, Vst) -> +valfun_3({test,bs_skip_utf8,{f,Fail},[Ctx,Live,_]}, Vst) -> validate_bs_skip_utf(Fail, Ctx, Live, Vst); -valfun_4({test,bs_skip_utf16,{f,Fail},[Ctx,Live,_]}, Vst) -> +valfun_3({test,bs_skip_utf16,{f,Fail},[Ctx,Live,_]}, Vst) -> validate_bs_skip_utf(Fail, Ctx, Live, Vst); -valfun_4({test,bs_skip_utf32,{f,Fail},[Ctx,Live,_]}, Vst) -> +valfun_3({test,bs_skip_utf32,{f,Fail},[Ctx,Live,_]}, Vst) -> validate_bs_skip_utf(Fail, Ctx, Live, Vst); -valfun_4({test,bs_get_integer2=Op,{f,Fail},Live, +valfun_3({test,bs_get_integer2=Op,{f,Fail},Live, [Ctx,{integer,Size},Unit,{field_flags,Flags}],Dst},Vst) when Size * Unit =< 64 -> Type = case member(unsigned, Flags) of @@ -831,66 +862,66 @@ valfun_4({test,bs_get_integer2=Op,{f,Fail},Live, #t_integer{} end, validate_bs_get(Op, Fail, Ctx, Live, Type, Dst, Vst); -valfun_4({test,bs_get_integer2=Op,{f,Fail},Live, +valfun_3({test,bs_get_integer2=Op,{f,Fail},Live, [Ctx,_Size,_Unit,_Flags],Dst},Vst) -> validate_bs_get(Op, Fail, Ctx, Live, #t_integer{}, Dst, Vst); -valfun_4({test,bs_get_float2=Op,{f,Fail},Live,[Ctx,_,_,_],Dst}, Vst) -> +valfun_3({test,bs_get_float2=Op,{f,Fail},Live,[Ctx,_,_,_],Dst}, Vst) -> validate_bs_get(Op, Fail, Ctx, Live, float, Dst, Vst); -valfun_4({test,bs_get_binary2=Op,{f,Fail},Live,[Ctx,_,Unit,_],Dst}, Vst) -> +valfun_3({test,bs_get_binary2=Op,{f,Fail},Live,[Ctx,_,Unit,_],Dst}, Vst) -> validate_bs_get(Op, Fail, Ctx, Live, #t_bitstring{unit=Unit}, Dst, Vst); -valfun_4({test,bs_get_utf8=Op,{f,Fail},Live,[Ctx,_],Dst}, Vst) -> +valfun_3({test,bs_get_utf8=Op,{f,Fail},Live,[Ctx,_],Dst}, Vst) -> Type = beam_types:make_integer(0, ?UNICODE_MAX), validate_bs_get(Op, Fail, Ctx, Live, Type, Dst, Vst); -valfun_4({test,bs_get_utf16=Op,{f,Fail},Live,[Ctx,_],Dst}, Vst) -> +valfun_3({test,bs_get_utf16=Op,{f,Fail},Live,[Ctx,_],Dst}, Vst) -> Type = beam_types:make_integer(0, ?UNICODE_MAX), validate_bs_get(Op, Fail, Ctx, Live, Type, Dst, Vst); -valfun_4({test,bs_get_utf32=Op,{f,Fail},Live,[Ctx,_],Dst}, Vst) -> +valfun_3({test,bs_get_utf32=Op,{f,Fail},Live,[Ctx,_],Dst}, Vst) -> Type = beam_types:make_integer(0, ?UNICODE_MAX), validate_bs_get(Op, Fail, Ctx, Live, Type, Dst, Vst); -valfun_4({bs_save2,Ctx,SavePoint}, Vst) -> +valfun_3({bs_save2,Ctx,SavePoint}, Vst) -> bsm_save(Ctx, SavePoint, Vst); -valfun_4({bs_restore2,Ctx,SavePoint}, Vst) -> +valfun_3({bs_restore2,Ctx,SavePoint}, Vst) -> bsm_restore(Ctx, SavePoint, Vst); -valfun_4({bs_get_position, Ctx, Dst, Live}, Vst0) -> +valfun_3({bs_get_position, Ctx, Dst, Live}, Vst0) -> assert_type(#t_bs_context{}, Ctx, Vst0), verify_live(Live, Vst0), verify_y_init(Vst0), Vst = prune_x_regs(Live, Vst0), create_term(#t_abstract{kind=ms_position}, bs_get_position, [Ctx], Dst, Vst, Vst0); -valfun_4({bs_set_position, Ctx, Pos}, Vst) -> +valfun_3({bs_set_position, Ctx, Pos}, Vst) -> assert_type(#t_bs_context{}, Ctx, Vst), assert_type(#t_abstract{kind=ms_position}, Pos, Vst), Vst; %% Other test instructions. -valfun_4({test,has_map_fields,{f,Lbl},Src,{list,List}}, Vst) -> +valfun_3({test,has_map_fields,{f,Lbl},Src,{list,List}}, Vst) -> assert_type(#t_map{}, Src, Vst), assert_unique_map_keys(List), branch(Lbl, Vst, fun(V) -> V end); -valfun_4({test,is_atom,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_atom,{f,Lbl},[Src]}, Vst) -> type_test(Lbl, #t_atom{}, Src, Vst); -valfun_4({test,is_binary,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_binary,{f,Lbl},[Src]}, Vst) -> type_test(Lbl, #t_bitstring{unit=8}, Src, Vst); -valfun_4({test,is_bitstr,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_bitstr,{f,Lbl},[Src]}, Vst) -> type_test(Lbl, #t_bitstring{}, Src, Vst); -valfun_4({test,is_boolean,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_boolean,{f,Lbl},[Src]}, Vst) -> type_test(Lbl, beam_types:make_boolean(), Src, Vst); -valfun_4({test,is_float,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_float,{f,Lbl},[Src]}, Vst) -> type_test(Lbl, float, Src, Vst); -valfun_4({test,is_tuple,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_tuple,{f,Lbl},[Src]}, Vst) -> type_test(Lbl, #t_tuple{}, Src, Vst); -valfun_4({test,is_integer,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_integer,{f,Lbl},[Src]}, Vst) -> type_test(Lbl, #t_integer{}, Src, Vst); -valfun_4({test,is_nonempty_list,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_nonempty_list,{f,Lbl},[Src]}, Vst) -> type_test(Lbl, cons, Src, Vst); -valfun_4({test,is_number,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_number,{f,Lbl},[Src]}, Vst) -> type_test(Lbl, number, Src, Vst); -valfun_4({test,is_list,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_list,{f,Lbl},[Src]}, Vst) -> type_test(Lbl, list, Src, Vst); -valfun_4({test,is_map,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_map,{f,Lbl},[Src]}, Vst) -> type_test(Lbl, #t_map{}, Src, Vst); -valfun_4({test,is_nil,{f,Lbl},[Src]}, Vst) -> +valfun_3({test,is_nil,{f,Lbl},[Src]}, Vst) -> %% is_nil is an exact check against the 'nil' value, and should not be %% treated as a simple type test. assert_term(Src, Vst), @@ -901,16 +932,16 @@ valfun_4({test,is_nil,{f,Lbl},[Src]}, Vst) -> fun(SuccVst) -> update_eq_types(Src, nil, SuccVst) end); -valfun_4({test,test_arity,{f,Lbl},[Tuple,Sz]}, Vst) when is_integer(Sz) -> +valfun_3({test,test_arity,{f,Lbl},[Tuple,Sz]}, Vst) when is_integer(Sz) -> assert_type(#t_tuple{}, Tuple, Vst), Type = #t_tuple{exact=true,size=Sz}, type_test(Lbl, Type, Tuple, Vst); -valfun_4({test,is_tagged_tuple,{f,Lbl},[Src,Sz,Atom]}, Vst) -> +valfun_3({test,is_tagged_tuple,{f,Lbl},[Src,Sz,Atom]}, Vst) -> assert_term(Src, Vst), Es = #{ 1 => get_literal_type(Atom) }, Type = #t_tuple{exact=true,size=Sz,elements=Es}, type_test(Lbl, Type, Src, Vst); -valfun_4({test,is_eq_exact,{f,Lbl},[Src,Val]=Ss}, Vst) -> +valfun_3({test,is_eq_exact,{f,Lbl},[Src,Val]=Ss}, Vst) -> validate_src(Ss, Vst), branch(Lbl, Vst, fun(FailVst) -> @@ -919,7 +950,7 @@ valfun_4({test,is_eq_exact,{f,Lbl},[Src,Val]=Ss}, Vst) -> fun(SuccVst) -> update_eq_types(Src, Val, SuccVst) end); -valfun_4({test,is_ne_exact,{f,Lbl},[Src,Val]=Ss}, Vst) -> +valfun_3({test,is_ne_exact,{f,Lbl},[Src,Val]=Ss}, Vst) -> validate_src(Ss, Vst), branch(Lbl, Vst, fun(FailVst) -> @@ -928,30 +959,30 @@ valfun_4({test,is_ne_exact,{f,Lbl},[Src,Val]=Ss}, Vst) -> fun(SuccVst) -> update_ne_types(Src, Val, SuccVst) end); -valfun_4({test,_Op,{f,Lbl},Src}, Vst) -> +valfun_3({test,_Op,{f,Lbl},Src}, Vst) -> %% is_pid, is_reference, et cetera. validate_src(Src, Vst), branch(Lbl, Vst, fun(V) -> V end); -valfun_4({bs_add,{f,Fail},[A,B,_],Dst}, Vst) -> +valfun_3({bs_add,{f,Fail},[A,B,_],Dst}, Vst) -> assert_term(A, Vst), assert_term(B, Vst), branch(Fail, Vst, fun(SuccVst) -> create_term(#t_integer{}, bs_add, [A, B], Dst, SuccVst) end); -valfun_4({bs_utf8_size,{f,Fail},A,Dst}, Vst) -> +valfun_3({bs_utf8_size,{f,Fail},A,Dst}, Vst) -> assert_term(A, Vst), branch(Fail, Vst, fun(SuccVst) -> create_term(#t_integer{}, bs_utf8_size, [A], Dst, SuccVst) end); -valfun_4({bs_utf16_size,{f,Fail},A,Dst}, Vst) -> +valfun_3({bs_utf16_size,{f,Fail},A,Dst}, Vst) -> assert_term(A, Vst), branch(Fail, Vst, fun(SuccVst) -> create_term(#t_integer{}, bs_utf16_size, [A], Dst, SuccVst) end); -valfun_4({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) -> +valfun_3({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) -> verify_live(Live, Vst0), verify_y_init(Vst0), if @@ -967,7 +998,7 @@ valfun_4({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) -> create_term(#t_bitstring{unit=8}, bs_init2, [], Dst, SuccVst, SuccVst0) end); -valfun_4({bs_init_bits,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) -> +valfun_3({bs_init_bits,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) -> verify_live(Live, Vst0), verify_y_init(Vst0), if @@ -982,7 +1013,7 @@ valfun_4({bs_init_bits,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) -> SuccVst = prune_x_regs(Live, SuccVst0), create_term(#t_bitstring{}, bs_init_bits, [], Dst, SuccVst) end); -valfun_4({bs_append,{f,Fail},Bits,Heap,Live,Unit,Bin,_Flags,Dst}, Vst0) -> +valfun_3({bs_append,{f,Fail},Bits,Heap,Live,Unit,Bin,_Flags,Dst}, Vst0) -> verify_live(Live, Vst0), verify_y_init(Vst0), assert_term(Bits, Vst0), @@ -994,7 +1025,7 @@ valfun_4({bs_append,{f,Fail},Bits,Heap,Live,Unit,Bin,_Flags,Dst}, Vst0) -> create_term(#t_bitstring{unit=Unit}, bs_append, [Bin], Dst, SuccVst, SuccVst0) end); -valfun_4({bs_private_append,{f,Fail},Bits,Unit,Bin,_Flags,Dst}, Vst) -> +valfun_3({bs_private_append,{f,Fail},Bits,Unit,Bin,_Flags,Dst}, Vst) -> assert_term(Bits, Vst), assert_term(Bin, Vst), branch(Fail, Vst, @@ -1002,55 +1033,55 @@ valfun_4({bs_private_append,{f,Fail},Bits,Unit,Bin,_Flags,Dst}, Vst) -> create_term(#t_bitstring{unit=Unit}, bs_private_append, [Bin], Dst, SuccVst) end); -valfun_4({bs_put_string,Sz,_}, Vst) when is_integer(Sz) -> +valfun_3({bs_put_string,Sz,_}, Vst) when is_integer(Sz) -> Vst; -valfun_4({bs_put_binary,{f,Fail},Sz,_,_,Src}, Vst) -> +valfun_3({bs_put_binary,{f,Fail},Sz,_,_,Src}, Vst) -> assert_term(Sz, Vst), assert_term(Src, Vst), branch(Fail, Vst, fun(SuccVst) -> update_type(fun meet/2, #t_bitstring{}, Src, SuccVst) end); -valfun_4({bs_put_float,{f,Fail},Sz,_,_,Src}, Vst) -> +valfun_3({bs_put_float,{f,Fail},Sz,_,_,Src}, Vst) -> assert_term(Sz, Vst), assert_term(Src, Vst), branch(Fail, Vst, fun(SuccVst) -> update_type(fun meet/2, float, Src, SuccVst) end); -valfun_4({bs_put_integer,{f,Fail},Sz,_,_,Src}, Vst) -> +valfun_3({bs_put_integer,{f,Fail},Sz,_,_,Src}, Vst) -> assert_term(Sz, Vst), assert_term(Src, Vst), branch(Fail, Vst, fun(SuccVst) -> update_type(fun meet/2, #t_integer{}, Src, SuccVst) end); -valfun_4({bs_put_utf8,{f,Fail},_,Src}, Vst) -> +valfun_3({bs_put_utf8,{f,Fail},_,Src}, Vst) -> assert_term(Src, Vst), branch(Fail, Vst, fun(SuccVst) -> update_type(fun meet/2, #t_integer{}, Src, SuccVst) end); -valfun_4({bs_put_utf16,{f,Fail},_,Src}, Vst) -> +valfun_3({bs_put_utf16,{f,Fail},_,Src}, Vst) -> assert_term(Src, Vst), branch(Fail, Vst, fun(SuccVst) -> update_type(fun meet/2, #t_integer{}, Src, SuccVst) end); -valfun_4({bs_put_utf32,{f,Fail},_,Src}, Vst) -> +valfun_3({bs_put_utf32,{f,Fail},_,Src}, Vst) -> assert_term(Src, Vst), branch(Fail, Vst, fun(SuccVst) -> update_type(fun meet/2, #t_integer{}, Src, SuccVst) end); %% Map instructions. -valfun_4({put_map_assoc=Op,{f,Fail},Src,Dst,Live,{list,List}}, Vst) -> +valfun_3({put_map_assoc=Op,{f,Fail},Src,Dst,Live,{list,List}}, Vst) -> verify_put_map(Op, Fail, Src, Dst, Live, List, Vst); -valfun_4({put_map_exact=Op,{f,Fail},Src,Dst,Live,{list,List}}, Vst) -> +valfun_3({put_map_exact=Op,{f,Fail},Src,Dst,Live,{list,List}}, Vst) -> verify_put_map(Op, Fail, Src, Dst, Live, List, Vst); -valfun_4({get_map_elements,{f,Fail},Src,{list,List}}, Vst) -> +valfun_3({get_map_elements,{f,Fail},Src,{list,List}}, Vst) -> verify_get_map(Fail, Src, List, Vst); -valfun_4(_, _) -> +valfun_3(_, _) -> error(unknown_instruction). verify_get_map(Fail, Src, List, Vst0) -> diff --git a/lib/compiler/test/beam_validator_SUITE.erl b/lib/compiler/test/beam_validator_SUITE.erl index 68b665fbc3..e4e34ec0d2 100644 --- a/lib/compiler/test/beam_validator_SUITE.erl +++ b/lib/compiler/test/beam_validator_SUITE.erl @@ -36,7 +36,7 @@ val_dsetel/1,bad_tuples/1,bad_try_catch_nesting/1, receive_stacked/1,aliased_types/1,type_conflict/1, infer_on_eq/1,infer_dead_value/1,infer_on_ne/1, - branch_to_try_handler/1]). + branch_to_try_handler/1,call_without_stack/1]). -include_lib("common_test/include/ct.hrl"). @@ -67,7 +67,7 @@ groups() -> bad_tuples,bad_try_catch_nesting, receive_stacked,aliased_types,type_conflict, infer_on_eq,infer_dead_value,infer_on_ne, - branch_to_try_handler]}]. + branch_to_try_handler,call_without_stack]}]. init_per_suite(Config) -> test_lib:recompile(?MODULE), @@ -150,19 +150,34 @@ stack(Config) when is_list(Config) -> call_last(Config) when is_list(Config) -> Errors = do_val(call_last, Config), - [{{t,a,1},{{call_last,1,{f,8},2},9,{allocated,1}}}, + [{{t,a,1}, + {{call_last,1,{f,8},2},9,{allocated,1}}}, {{t,b,1}, - {{call_ext_last,2,{extfunc,lists,seq,2},2}, - 10, - {allocated,1}}}] = Errors, + {{call_ext_last,2,{extfunc,lists,seq,2},2},10,{allocated,1}}}, + {{t,baz,2}, + {{call_ext_only,2,{extfunc,erlang,put,2}},5,{allocated,0}}}, + {{t,biz,2}, + {{call_only,2,{f,10}},5,{allocated,0}}}] = Errors, + ok. + +call_without_stack(Config) when is_list(Config) -> + Errors = do_val(call_without_stack, Config), + [{{t,local,2}, + {{call,2,{f,2}},4,{allocated,none}}}, + {{t,remote,2}, + {{call_ext,2,{extfunc,lists,seq,2}},4,{allocated,none}}}] = Errors, ok. merge_undefined(Config) when is_list(Config) -> Errors = do_val(merge_undefined, Config), - [{{t,handle_call,2}, + [{{t,undecided,2}, {{call_ext,2,{extfunc,debug,filter,2}}, 22, - {uninitialized_reg,{y,_}}}}] = Errors, + {allocated,undecided}}}, + {{t,uninitialized,2}, + {{call_ext,2,{extfunc,io,format,2}}, + 17, + {uninitialized_reg,{y,1}}}}] = Errors, ok. uninit(Config) when is_list(Config) -> @@ -265,7 +280,7 @@ freg_uninit(Config) when is_list(Config) -> {uninitialized_reg,{fr,1}}}}, {{t,sum_2,2}, {{bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}, - 9, + 10, {uninitialized_reg,{fr,0}}}}] = Errors, ok. diff --git a/lib/compiler/test/beam_validator_SUITE_data/call_last.S b/lib/compiler/test/beam_validator_SUITE_data/call_last.S index 827b6c0ae6..ff81da1b57 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/call_last.S +++ b/lib/compiler/test/beam_validator_SUITE_data/call_last.S @@ -1,6 +1,6 @@ {module, call_last}. %% version = 0 -{exports, [{a,1},{b,1},{bar,1},{foo,1},{module_info,0},{module_info,1}]}. +{exports, [{a,1},{b,1},{bar,1},{foo,1},{baz,2},{biz,2}]}. {attributes, []}. @@ -53,19 +53,16 @@ {'%live',1}. return. - -{function, module_info, 0, 10}. +{function, baz, 2, 10}. {label,9}. - {func_info,{atom,t},{atom,module_info},0}. + {func_info,{atom,t},{atom,baz},2}. {label,10}. - {move,{atom,t},{x,0}}. - {call_ext_only,1,{extfunc,erlang,get_module_info,1}}. - + {allocate,0,2}. + {call_ext_only,2,{extfunc,erlang,put,2}}. -{function, module_info, 1, 12}. +{function, biz, 2, 12}. {label,11}. - {func_info,{atom,t},{atom,module_info},1}. + {func_info,{atom,t},{atom,biz},2}. {label,12}. - {move,{x,0},{x,1}}. - {move,{atom,t},{x,0}}. - {call_ext_only,2,{extfunc,erlang,get_module_info,2}}. + {allocate,0,2}. + {call_only,2,{f,10}}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/call_without_stack.S b/lib/compiler/test/beam_validator_SUITE_data/call_without_stack.S new file mode 100644 index 0000000000..9ccbc163e3 --- /dev/null +++ b/lib/compiler/test/beam_validator_SUITE_data/call_without_stack.S @@ -0,0 +1,21 @@ +{module, call_without_stack}. %% version = 0 + +{exports, [{remote,2},{local,2}]}. + +{attributes, []}. + +{labels, 9}. + +{function, remote, 2, 2}. + {label,1}. + {func_info,{atom,t},{atom,remote},2}. + {label,2}. + {call_ext,2,{extfunc,lists,seq,2}}. + if_end. + +{function, local, 2, 4}. + {label,3}. + {func_info,{atom,t},{atom,local},2}. + {label,4}. + {call,2,{f,2}}. + if_end. diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S index 71e833446a..2d4cbc9388 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S +++ b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S @@ -21,12 +21,14 @@ {label,3}. {func_info,{atom,t},{atom,sum_2},2}. {label,4}. + {allocate,0,2}. {fconv,{x,0},{fr,0}}. {fconv,{x,1},{fr,1}}. fclearerror. {fcheckerror,{f,0}}. {call,2,{f,6}}. {bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}. + {deallocate,0}. return. {function, foo, 2, 6}. diff --git a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S index aa344807e4..3035471f04 100644 --- a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S +++ b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S @@ -1,15 +1,14 @@ {module, merge_undefined}. %% version = 0 -{exports, [{bar,2},{foo,1},{handle_call,2},{module_info,0},{module_info,1}]}. +{exports, [{uninitialized,2},{undecided,2}]}. {attributes, []}. {labels, 15}. - -{function, handle_call, 2, 2}. +{function, uninitialized, 2, 2}. {label,1}. - {func_info,{atom,t},{atom,handle_call},2}. + {func_info,{atom,t},{atom,uninitialized},2}. {label,2}. {test,is_atom,{f,1},[{x,0}]}. {select_val,{x,0},{f,1},{list,[{atom,gurka},{f,3},{atom,delete},{f,4}]}}. @@ -21,7 +20,7 @@ {move,{atom,nisse},{x,0}}. {call_ext,1,{extfunc,erlang,exit,1}}. {label,4}. - {allocate_heap,1,6,2}. + {allocate_heap,2,6,2}. {move,{x,1},{y,0}}. {put_list,{integer,112},nil,{x,0}}. {put_list,{integer,126},{x,0},{x,0}}. @@ -51,37 +50,57 @@ {call_ext,1,{extfunc,erlang,exit,1}}. {label,6}. {move,{y,0},{x,0}}. - {call_last,1,{f,8},1}. + {call_last,1,{f,14},1}. - -{function, foo, 1, 8}. +{function, undecided, 2, 8}. {label,7}. - {func_info,{atom,t},{atom,foo},1}. + {func_info,{atom,t},{atom,undecided},2}. {label,8}. - {move,{atom,ok},{x,0}}. - return. - - -{function, bar, 2, 10}. + {test,is_atom,{f,7},[{x,0}]}. + {select_val,{x,0},{f,1},{list,[{atom,gurka},{f,9},{atom,delete},{f,10}]}}. {label,9}. - {func_info,{atom,t},{atom,bar},2}. + {allocate_heap,2,6,2}. + {test,is_eq_exact,{f,11},[{x,0},{atom,ok}]}. + %% This is unreachable since {x,0} is known not to be 'ok'. We should not + %% fail with "uninitialized y registers" on erlang:exit/1 + {move,{atom,nisse},{x,0}}. + {call_ext,1,{extfunc,erlang,exit,1}}. {label,10}. - {move,{atom,ok},{x,0}}. - return. - - -{function, module_info, 0, 12}. + {allocate_heap,1,6,2}. + {move,{x,1},{y,0}}. + {put_list,{integer,112},nil,{x,0}}. + {put_list,{integer,126},{x,0},{x,0}}. + {put_list,{y,0},nil,{x,1}}. + {'%live',2}. + {call_ext,2,{extfunc,io,format,2}}. + {test,is_ne_exact,{f,12},[{x,0},{atom,ok}]}. {label,11}. - {func_info,{atom,t},{atom,module_info},0}. + %% The number of allocated Y registers are in conflict here. + {move,{atom,logReader},{x,1}}. + {move,{atom,console},{x,0}}. + {call_ext,2,{extfunc,debug,filter,2}}. + {test_heap,14,1}. + {put_list,{atom,logReader},nil,{x,1}}. + {put_list,{atom,console},{x,1},{x,1}}. + {put_tuple,3,{x,2}}. + {put,{atom,debug}}. + {put,{atom,filter}}. + {put,{x,1}}. + {put_tuple,2,{x,1}}. + {put,{x,2}}. + {put,{x,0}}. + {put_tuple,2,{x,0}}. + {put,{atom,badmatch}}. + {put,{x,1}}. + {'%live',1}. + {call_ext,1,{extfunc,erlang,exit,1}}. {label,12}. - {move,{atom,t},{x,0}}. - {call_ext_only,1,{extfunc,erlang,get_module_info,1}}. - + {move,{y,0},{x,0}}. + {call_last,1,{f,8},1}. -{function, module_info, 1, 14}. +{function, foo, 1, 14}. {label,13}. - {func_info,{atom,t},{atom,module_info},1}. + {func_info,{atom,t},{atom,foo},1}. {label,14}. - {move,{x,0},{x,1}}. - {move,{atom,t},{x,0}}. - {call_ext_only,2,{extfunc,erlang,get_module_info,2}}. + {move,{atom,ok},{x,0}}. + return. -- cgit v1.2.1 From 9c0be17312ff517474621108cb0379eef211c8b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Thu, 5 Sep 2019 11:49:54 +0200 Subject: beam_makeops: Instructions marked -no_next must not fall through --- erts/emulator/utils/beam_makeops | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/erts/emulator/utils/beam_makeops b/erts/emulator/utils/beam_makeops index 605a402f2a..cc21f9b5b4 100755 --- a/erts/emulator/utils/beam_makeops +++ b/erts/emulator/utils/beam_makeops @@ -1315,7 +1315,7 @@ sub combine_instruction_group { my $inc = 0; unless ($i == $#slots) { - $flags = "-no_next"; + $flags = "-micro_instruction"; my $next_offset = $label_to_offset{$next}; $inc = ($offset + $size) - $next_offset; $transfer_to_next = "I += $inc;\n" if $inc; @@ -1553,8 +1553,10 @@ sub code_gen { my $dispatch_next; my $instr_offset = $group_size + $offset + 1; - if ($flags =~ /-no_next/) { + if ($flags =~ /-micro_instruction/) { $dispatch_next = ""; + } elsif ($flags =~ /-no_next/) { + $dispatch_next = "ASSERT(!\"Fell through '$name' (-no_next)\");"; } elsif ($flags =~ /-no_prefetch/) { $dispatch_next = "\nI += $instr_offset;\n" . "ASSERT(VALID_INSTR(*I));\n" . -- cgit v1.2.1 From a6d6c9746d1a56170c921af4b81577dd1309da80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Wed, 4 Sep 2019 13:00:07 +0200 Subject: erts: Fix undefined behavior in $DISPATCHX() Some call instructions kept the export entry outside of Arg(0) for better argument packing, so the $DISPATCHX() macro faked a new instruction pointer starting at "one behind" the given argument. Some of these saved the argument on the C stack and passed that onwards to this macro, which could provoke undefined behavior if we were to jump out of the block, for example if we needed to save_calls. This commit fixes the issue by letting the macro take an argument directly, and removing the jump on save_calls. I've also taken the opportunity to move all dispatch-related macros to macros.tab as it's a pinch cleaner to gather everything there. --- erts/emulator/beam/beam_emu.c | 114 +---------------------------- erts/emulator/beam/bif_instrs.tab | 9 ++- erts/emulator/beam/instrs.tab | 72 +++++++++--------- erts/emulator/beam/macros.tab | 84 +++++++++++++++++++++ erts/emulator/hipe/hipe_instrs.tab | 5 +- erts/emulator/internal_doc/beam_makeops.md | 20 ++--- 6 files changed, 137 insertions(+), 167 deletions(-) diff --git a/erts/emulator/beam/beam_emu.c b/erts/emulator/beam/beam_emu.c index 9f8b56a5d5..27062089c6 100644 --- a/erts/emulator/beam/beam_emu.c +++ b/erts/emulator/beam/beam_emu.c @@ -250,72 +250,6 @@ void** beam_ops; #define Q(N) (N*sizeof(Eterm *)) #define l(N) (freg[N].fd) -/* - * Check that we haven't used the reductions and jump to function pointed to by - * the I register. If we are out of reductions, do a context switch. - */ - -#define DispatchMacro() \ - do { \ - BeamInstr dis_next; \ - dis_next = *I; \ - CHECK_ARGS(I); \ - if (FCALLS > 0 || FCALLS > neg_o_reds) { \ - FCALLS--; \ - Goto(dis_next); \ - } else { \ - goto context_switch; \ - } \ - } while (0) \ - -#define DispatchMacroFun() \ - do { \ - BeamInstr dis_next; \ - dis_next = *I; \ - CHECK_ARGS(I); \ - if (FCALLS > 0 || FCALLS > neg_o_reds) { \ - FCALLS--; \ - Goto(dis_next); \ - } else { \ - goto context_switch_fun; \ - } \ - } while (0) - -#define DispatchMacrox() \ - do { \ - if (FCALLS > 0) { \ - BeamInstr dis_next; \ - SET_I(((Export *) Arg(0))->addressv[erts_active_code_ix()]); \ - dis_next = *I; \ - FCALLS--; \ - CHECK_ARGS(I); \ - Goto(dis_next); \ - } else if (ERTS_PROC_GET_SAVED_CALLS_BUF(c_p) \ - && FCALLS > neg_o_reds) { \ - goto save_calls1; \ - } else { \ - SET_I(((Export *) Arg(0))->addressv[erts_active_code_ix()]); \ - CHECK_ARGS(I); \ - goto context_switch; \ - } \ - } while (0) - -#ifdef DEBUG -/* - * To simplify breakpoint setting, put the code in one place only and jump to it. - */ -# define Dispatch() goto do_dispatch -# define Dispatchx() goto do_dispatchx -# define Dispatchfun() goto do_dispatchfun -#else -/* - * Inline for speed. - */ -# define Dispatch() DispatchMacro() -# define Dispatchx() DispatchMacrox() -# define Dispatchfun() DispatchMacroFun() -#endif - #define Arg(N) I[(N)+1] #define GetSource(raw, dst) \ @@ -348,19 +282,6 @@ do { \ } \ } while(0) -#define DispatchReturn \ -do { \ - if (FCALLS > 0 || FCALLS > neg_o_reds) { \ - FCALLS--; \ - Goto(*I); \ - } \ - else { \ - c_p->current = NULL; \ - c_p->arity = 1; \ - goto context_switch3; \ - } \ -} while (0) - #ifdef DEBUG /* Better static type testing by the C compiler */ # define BEAM_IS_TUPLE(Src) is_tuple(Src) @@ -768,27 +689,9 @@ void process_main(Eterm * x_reg_array, FloatDef* f_reg_array) #endif #include "beam_hot.h" - -#ifdef DEBUG - /* - * Set a breakpoint here to get control just after a call instruction. - * I points to the first instruction in the called function. - * - * In gdb, use 'call dis(I-5, 1)' to show the name of the function. - */ - do_dispatch: - DispatchMacro(); - - do_dispatchx: - DispatchMacrox(); - - do_dispatchfun: - DispatchMacroFun(); - -#endif - /* - * Jumped to from the Dispatch() macro when the reductions are used up. + * The labels are jumped to from the $DISPATCH() macros when the reductions + * are used up. * * Since the I register points just beyond the FuncBegin instruction, we * can get the module, function, and arity for the function being @@ -982,19 +885,6 @@ void process_main(Eterm * x_reg_array, FloatDef* f_reg_array) } #endif return; /* Never executed */ - - save_calls1: - { - BeamInstr dis_next; - - save_calls(c_p, (Export *) Arg(0)); - - SET_I(((Export *) Arg(0))->addressv[erts_active_code_ix()]); - - dis_next = *I; - FCALLS--; - Goto(dis_next); - } } /* diff --git a/erts/emulator/beam/bif_instrs.tab b/erts/emulator/beam/bif_instrs.tab index f1877882a1..c02dcd543c 100644 --- a/erts/emulator/beam/bif_instrs.tab +++ b/erts/emulator/beam/bif_instrs.tab @@ -283,7 +283,7 @@ call_bif(Exp) { $SAVE_CONTINUATION_POINTER($NEXT_INSTRUCTION); SET_I(c_p->i); SWAPIN; - Dispatch(); + $DISPATCH(); } /* @@ -371,7 +371,7 @@ call_bif_only(Exp) { */ SET_I(c_p->i); SWAPIN; - Dispatch(); + $DISPATCH(); } /* @@ -416,7 +416,7 @@ send() { $SAVE_CONTINUATION_POINTER($NEXT_INSTRUCTION); SET_I(c_p->i); SWAPIN; - Dispatch(); + $DISPATCH(); } else { goto find_func_info; } @@ -557,6 +557,7 @@ nif_bif.apply_bif() { } nif_bif.epilogue() { + //| -no_next ERTS_REQ_PROC_MAIN_LOCK(c_p); ERTS_HOLE_CHECK(c_p); if (ERTS_IS_GC_DESIRED(c_p)) { @@ -578,7 +579,7 @@ nif_bif.epilogue() { c_p->flags &= ~F_HIBERNATE_SCHED; goto do_schedule; } - Dispatch(); + $DISPATCH(); } { BeamInstr *cp = cp_val(*E); diff --git a/erts/emulator/beam/instrs.tab b/erts/emulator/beam/instrs.tab index 38b1e5909b..9396c09182 100644 --- a/erts/emulator/beam/instrs.tab +++ b/erts/emulator/beam/instrs.tab @@ -120,7 +120,7 @@ dealloc_ret.execute() { E = ADD_BYTE_OFFSET(E, num_bytes); $RETURN(); CHECK_TERM(x(0)); - DispatchReturn; + $DISPATCH_RETURN(); } move_deallocate_return(Src, Deallocate) { @@ -137,31 +137,19 @@ move_deallocate_return(Src, Deallocate) { x(0) = src; $RETURN(); CHECK_TERM(x(0)); - DispatchReturn; + $DISPATCH_RETURN(); } // Call instructions -DISPATCH_REL(CallDest) { - //| -no_next - $SET_I_REL($CallDest); - DTRACE_LOCAL_CALL(c_p, erts_code_to_codemfa(I)); - Dispatch(); -} - -DISPATCH_ABS(CallDest) { - //| -no_next - SET_I((BeamInstr *) $CallDest); - DTRACE_LOCAL_CALL(c_p, erts_code_to_codemfa(I)); - Dispatch(); -} - i_call(CallDest) { + //| -no_next $SAVE_CONTINUATION_POINTER($NEXT_INSTRUCTION); $DISPATCH_REL($CallDest); } move_call(Src, CallDest) { + //| -no_next Eterm call_dest = $CallDest; Eterm src = $Src; $SAVE_CONTINUATION_POINTER($NEXT_INSTRUCTION); @@ -170,11 +158,13 @@ move_call(Src, CallDest) { } i_call_last(CallDest, Deallocate) { + //| -no_next $deallocate($Deallocate); $DISPATCH_REL($CallDest); } move_call_last(Src, CallDest, Deallocate) { + //| -no_next Eterm call_dest = $CallDest; Eterm src = $Src; $deallocate($Deallocate); @@ -183,59 +173,59 @@ move_call_last(Src, CallDest, Deallocate) { } i_call_only(CallDest) { + //| -no_next $DISPATCH_REL($CallDest); } move_call_only(Src, CallDest) { + //| -no_next Eterm call_dest = $CallDest; Eterm src = $Src; x(0) = src; $DISPATCH_REL(call_dest); } -DISPATCHX(Dest) { - //| -no_next - DTRACE_GLOBAL_CALL_FROM_EXPORT(c_p, $Dest); - // Dispatchx assumes the Export* is in Arg(0) - I = (&$Dest) - 1; - Dispatchx(); -} - i_call_ext(Dest) { + //| -no_next $SAVE_CONTINUATION_POINTER($NEXT_INSTRUCTION); - $DISPATCHX($Dest); + $DISPATCH_EXPORT($Dest); } i_move_call_ext(Src, CallDest) { + //| -no_next Eterm call_dest = $CallDest; Eterm src = $Src; $SAVE_CONTINUATION_POINTER($NEXT_INSTRUCTION); x(0) = src; - $DISPATCHX(call_dest); + $DISPATCH_EXPORT(call_dest); } i_call_ext_only(Dest) { - $DISPATCHX($Dest); + //| -no_next + $DISPATCH_EXPORT($Dest); } i_move_call_ext_only(CallDest, Src) { + //| -no_next Eterm call_dest = $CallDest; Eterm src = $Src; x(0) = src; - $DISPATCHX(call_dest); + $DISPATCH_EXPORT(call_dest); } i_call_ext_last(Dest, Deallocate) { + //| -no_next $deallocate($Deallocate); - $DISPATCHX($Dest); + $DISPATCH_EXPORT($Dest); } i_move_call_ext_last(CallDest, Deallocate, Src) { + //| -no_next Eterm call_dest = $CallDest; Eterm src = $Src; $deallocate($Deallocate); x(0) = src; - $DISPATCHX(call_dest); + $DISPATCH_EXPORT(call_dest); } APPLY(I, Deallocate, Next) { @@ -251,6 +241,7 @@ HANDLE_APPLY_ERROR() { } i_apply() { + //| -no_next BeamInstr *next; $APPLY(NULL, 0, next); if (ERTS_LIKELY(next != NULL)) { @@ -261,6 +252,7 @@ i_apply() { } i_apply_last(Deallocate) { + //| -no_next BeamInstr *next; $APPLY(I, $Deallocate, next); if (ERTS_LIKELY(next != NULL)) { @@ -271,6 +263,7 @@ i_apply_last(Deallocate) { } i_apply_only() { + //| -no_next BeamInstr *next; $APPLY(I, 0, next); if (ERTS_LIKELY(next != NULL)) { @@ -287,6 +280,7 @@ FIXED_APPLY(Arity, I, Deallocate, Next) { } apply(Arity) { + //| -no_next BeamInstr *next; $FIXED_APPLY($Arity, NULL, 0, next); if (ERTS_LIKELY(next != NULL)) { @@ -297,6 +291,7 @@ apply(Arity) { } apply_last(Arity, Deallocate) { + //| -no_next BeamInstr *next; $FIXED_APPLY($Arity, I, $Deallocate, next); if (ERTS_LIKELY(next != NULL)) { @@ -316,13 +311,8 @@ HANDLE_APPLY_FUN_ERROR() { goto find_func_info; } -DISPATCH_FUN(I) { - //| -no_next - SET_I($I); - Dispatchfun(); -} - i_apply_fun() { + //| -no_next BeamInstr *next; $APPLY_FUN(next); if (ERTS_LIKELY(next != NULL)) { @@ -333,6 +323,7 @@ i_apply_fun() { } i_apply_fun_last(Deallocate) { + //| -no_next BeamInstr *next; $APPLY_FUN(next); if (ERTS_LIKELY(next != NULL)) { @@ -343,6 +334,7 @@ i_apply_fun_last(Deallocate) { } i_apply_fun_only() { + //| -no_next BeamInstr *next; $APPLY_FUN(next); if (ERTS_LIKELY(next != NULL)) { @@ -359,6 +351,7 @@ CALL_FUN(Fun, Next) { } i_call_fun(Fun) { + //| -no_next BeamInstr *next; $CALL_FUN($Fun, next); if (ERTS_LIKELY(next != NULL)) { @@ -369,6 +362,7 @@ i_call_fun(Fun) { } i_call_fun_last(Fun, Deallocate) { + //| -no_next BeamInstr *next; $CALL_FUN($Fun, next); if (ERTS_LIKELY(next != NULL)) { @@ -381,10 +375,12 @@ i_call_fun_last(Fun, Deallocate) { return() { //| -no_next $RETURN(); + DTRACE_RETURN_FROM_PC(c_p); CHECK_TERM(r(0)); HEAP_SPACE_VERIFIED(0); - DispatchReturn; + + $DISPATCH_RETURN(); } get_list(Src, Hd, Tl) { @@ -677,7 +673,7 @@ move_return(Src) { //| -no_next x(0) = $Src; $RETURN(); - DispatchReturn; + $DISPATCH_RETURN(); } move_x1(Src) { diff --git a/erts/emulator/beam/macros.tab b/erts/emulator/beam/macros.tab index 9d183e1f41..848e35d45c 100644 --- a/erts/emulator/beam/macros.tab +++ b/erts/emulator/beam/macros.tab @@ -136,6 +136,90 @@ AH(NeedStack, NeedHeap, Live) { *E = NIL; } + +// +// Helpers for call instructions +// + +DISPATCH() { + BeamInstr dis_next; + + dis_next = *I; + CHECK_ARGS(I); + + if (FCALLS > 0 || FCALLS > neg_o_reds) { + FCALLS--; + Goto(dis_next); + } else { + goto context_switch; + } +} + +DISPATCH_ABS(CallDest) { + SET_I((BeamInstr *) $CallDest); + DTRACE_LOCAL_CALL(c_p, erts_code_to_codemfa(I)); + + $DISPATCH(); +} + +DISPATCH_EXPORT(Export) { + BeamInstr dis_next; + Export *ep; + + ep = (Export*)($Export); + + DTRACE_GLOBAL_CALL_FROM_EXPORT(c_p, ep); + + SET_I(ep->addressv[erts_active_code_ix()]); + CHECK_ARGS(I); + dis_next = *I; + + if (ERTS_UNLIKELY(FCALLS <= 0)) { + if (ERTS_PROC_GET_SAVED_CALLS_BUF(c_p) && FCALLS > neg_o_reds) { + save_calls(c_p, ep); + } else { + goto context_switch; + } + } + + FCALLS--; + Goto(dis_next); +} + +DISPATCH_FUN(I) { + BeamInstr dis_next; + + SET_I($I); + + dis_next = *I; + CHECK_ARGS(I); + + if (FCALLS > 0 || FCALLS > neg_o_reds) { + FCALLS--; + Goto(dis_next); + } else { + goto context_switch_fun; + } +} + +DISPATCH_REL(CallDest) { + $SET_I_REL($CallDest); + DTRACE_LOCAL_CALL(c_p, erts_code_to_codemfa(I)); + + $DISPATCH(); +} + +DISPATCH_RETURN() { + if (FCALLS > 0 || FCALLS > neg_o_reds) { + FCALLS--; + Goto(*I); + } else { + c_p->current = NULL; + c_p->arity = 1; + goto context_switch3; + } +} + // Save the continuation pointer in the reserved slot at the // top of the stack as preparation for doing a function call. diff --git a/erts/emulator/hipe/hipe_instrs.tab b/erts/emulator/hipe/hipe_instrs.tab index 62162fcb9c..8aa8544b2a 100644 --- a/erts/emulator/hipe/hipe_instrs.tab +++ b/erts/emulator/hipe/hipe_instrs.tab @@ -93,7 +93,7 @@ hipe_trap.post() { /*fall through*/ case HIPE_MODE_SWITCH_RES_CALL_BEAM: SET_I(c_p->i); - Dispatch(); + $DISPATCH(); case HIPE_MODE_SWITCH_RES_CALL_CLOSURE: /* This can be used to call any function value, but currently it's only used to call closures referring to unloaded @@ -104,8 +104,7 @@ hipe_trap.post() { next = call_fun(c_p, c_p->arity - 1, reg, THE_NON_VALUE); HEAVY_SWAPIN; if (next != NULL) { - SET_I(next); - Dispatchfun(); + $DISPATCH_FUN(next); } goto find_func_info; } diff --git a/erts/emulator/internal_doc/beam_makeops.md b/erts/emulator/internal_doc/beam_makeops.md index 2880099b70..267af78412 100644 --- a/erts/emulator/internal_doc/beam_makeops.md +++ b/erts/emulator/internal_doc/beam_makeops.md @@ -1457,26 +1457,26 @@ all instructions. It expands to the address of the next instruction. Here is an example: i_call(CallDest) { - SET_CP(c_p, $NEXT_INSTRUCTION); + //| -no_next + $SAVE_CONTINUATION_POINTER($NEXT_INSTRUCTION); $DISPATCH_REL($CallDest); } -When calling a function, the return address is first stored in `c_p->cp` -(using the `SET_CP()` macro defined in `beam_emu.c`), and then control is +When calling a function, the return address is first stored in `E[0]` +(using the `$SAVE_CONTINUATION_POINTER()` macro), and then control is transferred to the callee. Here is the generated code: OpCase(i_call_f): { - SET_CP(c_p, I+1); - ASSERT(VALID_INSTR(*(I + (fb(BeamExtraData(I[0]))) + 0))); - I += fb(BeamExtraData(I[0])) + 0;; - DTRACE_LOCAL_CALL(c_p, erts_code_to_codemfa(I)); - Dispatch();; + ASSERT(VALID_INSTR(*(I+2))); + *E = (BeamInstr) (I+2);; + + /* ... dispatch code intentionally left out ... */ } -We can see that that `$NEXT_INSTRUCTION` has been expanded to `I+1`. +We can see that that `$NEXT_INSTRUCTION` has been expanded to `I+2`. That makes sense since the size of the `i_call_f/1` instruction is -one word. +two words. ##### The IP_ADJUSTMENT pre-bound variable ##### -- cgit v1.2.1 From ed2d3f36349bc1d14f7b469fc6b3ea9bf5ec010d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Mon, 2 Sep 2019 16:36:36 +0200 Subject: erts: Add operator stubs to erlang.erl --- erts/preloaded/ebin/erlang.beam | Bin 100508 -> 103624 bytes erts/preloaded/src/erlang.erl | 116 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 116 insertions(+) diff --git a/erts/preloaded/ebin/erlang.beam b/erts/preloaded/ebin/erlang.beam index 78a9a4eef2..1530982393 100644 Binary files a/erts/preloaded/ebin/erlang.beam and b/erts/preloaded/ebin/erlang.beam differ diff --git a/erts/preloaded/src/erlang.erl b/erts/preloaded/src/erlang.erl index f1a1de4ab2..94595ab78f 100644 --- a/erts/preloaded/src/erlang.erl +++ b/erts/preloaded/src/erlang.erl @@ -189,6 +189,27 @@ -export([dt_get_tag/0, dt_get_tag_data/0, dt_prepend_vm_tag_data/1, dt_append_vm_tag_data/1, dt_put_tag/1, dt_restore_tag/1, dt_spread_tag/1]). +%% Operators + +-export(['=='/2, '=:='/2, + '/='/2, '=/='/2, + '=<'/2, '>='/2, + '<'/2, '>'/2]). + +-export(['-'/1, '+'/1, + '-'/2, '+'/2, + '/'/2, '*'/2, + 'div'/2, 'rem'/2, + 'bsl'/2, 'bsr'/2, + 'bor'/2, 'band'/2, + 'bxor'/2, 'bnot'/1]). + +-export(['and'/2, 'or'/2, + 'xor'/2, 'not'/1]). + +-export(['--'/2, '++'/2]). + +-export(['!'/2]). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%% Simple native code BIFs @@ -3932,3 +3953,98 @@ gc_info(Ref, N, {OrigColls,OrigRecl}) -> {Ref, {_,Colls, Recl}} -> gc_info(Ref, N-1, {Colls+OrigColls,Recl+OrigRecl}) end. + +%% Operators + +-spec erlang:'=='(term(), term()) -> boolean(). +'=='(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'=:='(term(), term()) -> boolean(). +'=:='(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'/='(term(), term()) -> boolean(). +'/='(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'=/='(term(), term()) -> boolean(). +'=/='(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'=<'(term(), term()) -> boolean(). +'=<'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'>='(term(), term()) -> boolean(). +'>='(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'<'(term(), term()) -> boolean(). +'<'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'>'(term(), term()) -> boolean(). +'>'(_A, _B) -> + erlang:nif_error(undefined). + +-spec erlang:'-'(number()) -> number(). +'-'(_A) -> + erlang:nif_error(undefined). +-spec erlang:'+'(number()) -> number(). +'+'(_A) -> + erlang:nif_error(undefined). +-spec erlang:'-'(number(), number()) -> number(). +'-'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'+'(number(), number()) -> number(). +'+'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'/'(number(), number()) -> float(). +'/'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'*'(number(), number()) -> number(). +'*'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'div'(integer(), integer()) -> integer(). +'div'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'rem'(integer(), integer()) -> integer(). +'rem'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'bsl'(integer(), integer()) -> integer(). +'bsl'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'bsr'(integer(), integer()) -> integer(). +'bsr'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'bor'(integer(), integer()) -> integer(). +'bor'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'band'(integer(), integer()) -> integer(). +'band'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'bxor'(integer(), integer()) -> integer(). +'bxor'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'bnot'(integer()) -> integer(). +'bnot'(_A) -> + erlang:nif_error(undefined). + +-spec erlang:'--'(list(), list()) -> list(). +'--'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'++'(list(), term()) -> term(). +'++'(_A, _B) -> + erlang:nif_error(undefined). + +-spec erlang:'and'(boolean(), boolean()) -> boolean(). +'and'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'or'(boolean(), boolean()) -> boolean(). +'or'(_A, _B) -> + erlang:nif_error(undefined). + +-spec erlang:'xor'(boolean(), boolean()) -> boolean(). +'xor'(_A, _B) -> + erlang:nif_error(undefined). +-spec erlang:'not'(boolean()) -> boolean(). +'not'(_A) -> + erlang:nif_error(undefined). + +-spec erlang:'!'(dst(), term()) -> term(). +'!'(_Dst, _Msg) -> + erlang:nif_error(undefined). -- cgit v1.2.1 From 20cf78bed119da47c66c3efd77c8199b424582b7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Tue, 27 Aug 2019 10:21:18 +0200 Subject: erts: Replace ad-hoc export->beam[] wrangling with unions --- erts/emulator/beam/beam_bif_load.c | 56 +++++++++++++----------- erts/emulator/beam/beam_bp.c | 21 ++++++--- erts/emulator/beam/beam_bp.h | 2 +- erts/emulator/beam/beam_emu.c | 85 +++++++++++++++++++----------------- erts/emulator/beam/beam_load.c | 44 +++++++++++-------- erts/emulator/beam/bif.c | 9 ++-- erts/emulator/beam/erl_bif_trace.c | 48 ++++++++++---------- erts/emulator/beam/erl_nfunc_sched.c | 6 +-- erts/emulator/beam/erl_nfunc_sched.h | 2 +- erts/emulator/beam/export.c | 13 +++--- erts/emulator/beam/export.h | 84 +++++++++++++++++++++++++++-------- 11 files changed, 227 insertions(+), 143 deletions(-) diff --git a/erts/emulator/beam/beam_bif_load.c b/erts/emulator/beam/beam_bif_load.c index 04b2ed64b7..587a61d814 100644 --- a/erts/emulator/beam/beam_bif_load.c +++ b/erts/emulator/beam/beam_bif_load.c @@ -835,21 +835,26 @@ BIF_RETTYPE finish_after_on_load_2(BIF_ALIST_2) */ num_exps = export_list_size(code_ix); for (i = 0; i < num_exps; i++) { - Export *ep = export_list(i,code_ix); - if (ep == NULL || ep->info.mfa.module != BIF_ARG_1) { - continue; - } - if (ep->beam[1] != 0) { - ep->addressv[code_ix] = (void *) ep->beam[1]; - ep->beam[1] = 0; - } else { - if (ep->addressv[code_ix] == ep->beam && - BeamIsOpCode(ep->beam[0], op_apply_bif)) { - continue; - } - ep->addressv[code_ix] = ep->beam; - ep->beam[0] = BeamOpCodeAddr(op_call_error_handler); - } + Export *ep = export_list(i, code_ix); + + if (ep == NULL || ep->info.mfa.module != BIF_ARG_1) { + continue; + } + + DBG_CHECK_EXPORT(ep, code_ix); + + if (ep->trampoline.not_loaded.deferred != 0) { + ep->addressv[code_ix] = (void*)ep->trampoline.not_loaded.deferred; + ep->trampoline.not_loaded.deferred = 0; + } else { + if (ep->addressv[code_ix] == ep->trampoline.raw && + BeamIsOpCode(ep->trampoline.op, op_apply_bif)) { + continue; + } + + ep->addressv[code_ix] = ep->trampoline.raw; + ep->trampoline.op = BeamOpCodeAddr(op_call_error_handler); + } } modp->curr.code_hdr->on_load_function_ptr = NULL; @@ -872,10 +877,11 @@ BIF_RETTYPE finish_after_on_load_2(BIF_ALIST_2) if (ep == NULL || ep->info.mfa.module != BIF_ARG_1) { continue; } - if (BeamIsOpCode(ep->beam[0], op_apply_bif)) { + if (BeamIsOpCode(ep->trampoline.op, op_apply_bif)) { continue; } - ep->beam[1] = 0; + + ep->trampoline.not_loaded.deferred = 0; } } erts_release_code_write_permission(); @@ -1884,25 +1890,25 @@ delete_code(Module* modp) for (i = 0; i < num_exps; i++) { Export *ep = export_list(i, code_ix); if (ep != NULL && (ep->info.mfa.module == module)) { - if (ep->addressv[code_ix] == ep->beam) { - if (BeamIsOpCode(ep->beam[0], op_apply_bif)) { + if (ep->addressv[code_ix] == ep->trampoline.raw) { + if (BeamIsOpCode(ep->trampoline.op, op_apply_bif)) { continue; } - else if (BeamIsOpCode(ep->beam[0], op_i_generic_breakpoint)) { + else if (BeamIsOpCode(ep->trampoline.op, op_i_generic_breakpoint)) { ERTS_LC_ASSERT(erts_thr_progress_is_blocking()); ASSERT(modp->curr.num_traced_exports > 0); DBG_TRACE_MFA_P(&ep->info.mfa, "export trace cleared, code_ix=%d", code_ix); - erts_clear_export_break(modp, &ep->info); + erts_clear_export_break(modp, ep); } else { - ASSERT(BeamIsOpCode(ep->beam[0], op_call_error_handler) || + ASSERT(BeamIsOpCode(ep->trampoline.op, op_call_error_handler) || !erts_initialized); } } - ep->addressv[code_ix] = ep->beam; - ep->beam[0] = BeamOpCodeAddr(op_call_error_handler); - ep->beam[1] = 0; + ep->addressv[code_ix] = ep->trampoline.raw; + ep->trampoline.op = BeamOpCodeAddr(op_call_error_handler); + ep->trampoline.not_loaded.deferred = 0; DBG_TRACE_MFA_P(&ep->info.mfa, "export invalidation, code_ix=%d", code_ix); } diff --git a/erts/emulator/beam/beam_bp.c b/erts/emulator/beam/beam_bp.c index 10940072ae..1b92fe0a1f 100644 --- a/erts/emulator/beam/beam_bp.c +++ b/erts/emulator/beam/beam_bp.c @@ -263,7 +263,7 @@ erts_bp_match_export(BpFunctions* f, ErtsCodeMFA *mfa, int specified) ASSERT(0); } - pc = ep->beam; + pc = ep->trampoline.raw; if (ep->addressv[code_ix] == pc) { if (BeamIsOpCode(*pc, op_apply_bif) || BeamIsOpCode(*pc, op_call_error_handler)) { @@ -630,13 +630,22 @@ erts_clear_module_break(Module *modp) { } void -erts_clear_export_break(Module* modp, ErtsCodeInfo *ci) +erts_clear_export_break(Module* modp, Export *ep) { + ErtsCodeInfo *ci; + ERTS_LC_ASSERT(erts_thr_progress_is_blocking()); + ci = &ep->info; + + ASSERT(erts_codeinfo_to_code(ci) == ep->trampoline.raw); + + ASSERT(BeamIsOpCode(ep->trampoline.op, op_i_generic_breakpoint)); + ep->trampoline.op = 0; + clear_function_break(ci, ERTS_BPF_ALL); erts_commit_staged_bp(); - *erts_codeinfo_to_code(ci) = (BeamInstr) 0; + consolidate_bp_data(modp, ci, 0); ASSERT(ci->u.gen_bp == NULL); } @@ -776,9 +785,9 @@ erts_bif_trace(int bif_index, Process* p, Eterm* args, BeamInstr* I) Export* ep = bif_export[bif_index]; Uint32 flags = 0, flags_meta = 0; ErtsTracer meta_tracer = erts_tracer_nil; - int applying = (I == ep->beam); /* Yup, the apply code for a bif - * is actually in the - * export entry */ + int applying = (I == ep->trampoline.raw); /* Yup, the apply code for a bif + * is actually in the + * export entry */ BeamInstr* cp = (BeamInstr *) p->stop[0]; GenericBp* g; GenericBpData* bp = NULL; diff --git a/erts/emulator/beam/beam_bp.h b/erts/emulator/beam/beam_bp.h index a64765822b..9f7ec16b71 100644 --- a/erts/emulator/beam/beam_bp.h +++ b/erts/emulator/beam/beam_bp.h @@ -142,7 +142,7 @@ void erts_clear_count_break(BpFunctions *f); void erts_clear_all_breaks(BpFunctions* f); int erts_clear_module_break(Module *modp); -void erts_clear_export_break(Module *modp, ErtsCodeInfo* ci); +void erts_clear_export_break(Module *modp, Export *ep); BeamInstr erts_generic_breakpoint(Process* c_p, ErtsCodeInfo *ci, Eterm* reg); BeamInstr erts_trace_break(Process *p, ErtsCodeInfo *ci, Eterm *args, diff --git a/erts/emulator/beam/beam_emu.c b/erts/emulator/beam/beam_emu.c index 27062089c6..48824ef9da 100644 --- a/erts/emulator/beam/beam_emu.c +++ b/erts/emulator/beam/beam_emu.c @@ -111,10 +111,10 @@ do { \ #define CHECK_ALIGNED(Dst) ASSERT((((Uint)&Dst) & (sizeof(Uint)-1)) == 0) -#define GET_BIF_MODULE(p) (p->info.mfa.module) -#define GET_BIF_FUNCTION(p) (p->info.mfa.function) -#define GET_BIF_ARITY(p) (p->info.mfa.arity) -#define GET_BIF_ADDRESS(p) ((BifFunction) (p->beam[1])) +#define GET_BIF_MODULE(p) ((p)->info.mfa.module) +#define GET_BIF_FUNCTION(p) ((p)->info.mfa.function) +#define GET_BIF_ARITY(p) ((p)->info.mfa.arity) +#define GET_BIF_ADDRESS(p) ((BifFunction)((p)->trampoline.bif.func)) #define TermWords(t) (((t) / (sizeof(BeamInstr)/sizeof(Eterm))) + !!((t) % (sizeof(BeamInstr)/sizeof(Eterm)))) @@ -894,43 +894,47 @@ void process_main(Eterm * x_reg_array, FloatDef* f_reg_array) static void init_emulator_finish(void) { - int i; - Export* ep; + int i; #if defined(ARCH_64) && defined(CODE_MODEL_SMALL) - for (i = 0; i < NUMBER_OF_OPCODES; i++) { - BeamInstr instr = BeamOpCodeAddr(i); - if (instr >= (1ull << 32)) { - erts_exit(ERTS_ERROR_EXIT, - "This run-time was supposed be compiled with all code below 2Gb,\n" - "but the instruction '%s' is located at %016lx.\n", - opc[i].name, instr); - } - } + for (i = 0; i < NUMBER_OF_OPCODES; i++) { + BeamInstr instr = BeamOpCodeAddr(i); + if (instr >= (1ull << 32)) { + erts_exit(ERTS_ERROR_EXIT, + "This run-time was supposed be compiled with all code below 2Gb,\n" + "but the instruction '%s' is located at %016lx.\n", + opc[i].name, instr); + } + } #endif - beam_apply[0] = BeamOpCodeAddr(op_i_apply); - beam_apply[1] = BeamOpCodeAddr(op_normal_exit); - beam_exit[0] = BeamOpCodeAddr(op_error_action_code); - beam_continue_exit[0] = BeamOpCodeAddr(op_continue_exit); - beam_return_to_trace[0] = BeamOpCodeAddr(op_i_return_to_trace); - beam_return_trace[0] = BeamOpCodeAddr(op_return_trace); - beam_exception_trace[0] = BeamOpCodeAddr(op_return_trace); /* UGLY */ - beam_return_time_trace[0] = BeamOpCodeAddr(op_i_return_time_trace); + beam_apply[0] = BeamOpCodeAddr(op_i_apply); + beam_apply[1] = BeamOpCodeAddr(op_normal_exit); + beam_exit[0] = BeamOpCodeAddr(op_error_action_code); + beam_continue_exit[0] = BeamOpCodeAddr(op_continue_exit); + beam_return_to_trace[0] = BeamOpCodeAddr(op_i_return_to_trace); + beam_return_trace[0] = BeamOpCodeAddr(op_return_trace); + beam_exception_trace[0] = BeamOpCodeAddr(op_return_trace); /* UGLY */ + beam_return_time_trace[0] = BeamOpCodeAddr(op_i_return_time_trace); - /* - * Enter all BIFs into the export table. - */ - for (i = 0; i < BIF_SIZE; i++) { - ep = erts_export_put(bif_table[i].module, - bif_table[i].name, - bif_table[i].arity); - bif_export[i] = ep; - ep->beam[0] = BeamOpCodeAddr(op_apply_bif); - ep->beam[1] = (BeamInstr) bif_table[i].f; - /* XXX: set func info for bifs */ - ep->info.op = BeamOpCodeAddr(op_i_func_info_IaaI); - } + /* + * Enter all BIFs into the export table. + */ + for (i = 0; i < BIF_SIZE; i++) { + Export *ep = erts_export_put(bif_table[i].module, + bif_table[i].name, + bif_table[i].arity); + + ep->info.op = BeamOpCodeAddr(op_i_func_info_IaaI); + ep->info.mfa.module = bif_table[i].module; + ep->info.mfa.function = bif_table[i].name; + ep->info.mfa.arity = bif_table[i].arity; + + ep->trampoline.op = BeamOpCodeAddr(op_apply_bif); + ep->trampoline.bif.func = (BeamInstr) bif_table[i].f; + + bif_export[i] = ep; + } } /* @@ -1988,7 +1992,7 @@ apply_bif_error_adjustment(Process *p, Export *ep, * and apply_last_IP. */ if (I - && BeamIsOpCode(ep->beam[0], op_apply_bif) + && BeamIsOpCode(ep->trampoline.op, op_apply_bif) && (ep == bif_export[BIF_error_1] || ep == bif_export[BIF_error_2] || ep == bif_export[BIF_exit_1] @@ -3110,10 +3114,11 @@ erts_is_builtin(Eterm Mod, Eterm Name, int arity) e.info.mfa.arity = arity; if ((ep = export_get(&e)) == NULL) { - return 0; + return 0; } - return ep->addressv[erts_active_code_ix()] == ep->beam && - BeamIsOpCode(ep->beam[0], op_apply_bif); + + return ep->addressv[erts_active_code_ix()] == ep->trampoline.raw && + BeamIsOpCode(ep->trampoline.op, op_apply_bif); } diff --git a/erts/emulator/beam/beam_load.c b/erts/emulator/beam/beam_load.c index 3d5683f19f..2b4b16983c 100644 --- a/erts/emulator/beam/beam_load.c +++ b/erts/emulator/beam/beam_load.c @@ -845,17 +845,25 @@ erts_finish_loading(Binary* magic, Process* c_p, if (ep == NULL || ep->info.mfa.module != module) { continue; } - if (ep->addressv[code_ix] == ep->beam) { - if (BeamIsOpCode(ep->beam[0], op_apply_bif)) { + + DBG_CHECK_EXPORT(ep, code_ix); + + if (ep->addressv[code_ix] == ep->trampoline.raw) { + if (BeamIsOpCode(ep->trampoline.op, op_apply_bif)) { continue; - } else if (BeamIsOpCode(ep->beam[0], op_i_generic_breakpoint)) { + } else if (BeamIsOpCode(ep->trampoline.op, op_i_generic_breakpoint)) { ERTS_LC_ASSERT(erts_thr_progress_is_blocking()); ASSERT(mod_tab_p->curr.num_traced_exports > 0); - erts_clear_export_break(mod_tab_p, &ep->info); - ep->addressv[code_ix] = (BeamInstr *) ep->beam[1]; - ep->beam[1] = 0; + + erts_clear_export_break(mod_tab_p, ep); + + ep->addressv[code_ix] = + (BeamInstr*)ep->trampoline.breakpoint.address; + ep->trampoline.breakpoint.address = 0; + + ASSERT(ep->addressv[code_ix] != ep->trampoline.raw); } - ASSERT(ep->beam[1] == 0); + ASSERT(ep->trampoline.breakpoint.address == 0); } } ASSERT(mod_tab_p->curr.num_breakpoints == 0); @@ -1478,8 +1486,8 @@ load_import_table(LoaderState* stp) * the BIF function. */ if ((e = erts_active_export_entry(mod, func, arity)) != NULL) { - if (BeamIsOpCode(e->beam[0], op_apply_bif)) { - stp->import[i].bf = (BifFunction) e->beam[1]; + if (BeamIsOpCode(e->trampoline.op, op_apply_bif)) { + stp->import[i].bf = (BifFunction) e->trampoline.bif.func; if (func == am_load_nif && mod == am_erlang && arity == 2) { stp->may_load_nif = 1; } @@ -1572,7 +1580,7 @@ is_bif(Eterm mod, Eterm func, unsigned arity) if (e == NULL) { return 0; } - if (! BeamIsOpCode(e->beam[0], op_apply_bif)) { + if (! BeamIsOpCode(e->trampoline.op, op_apply_bif)) { return 0; } if (mod == am_erlang && func == am_apply && arity == 3) { @@ -5226,7 +5234,7 @@ final_touch(LoaderState* stp, struct erl_module_instance* inst_p) * callable yet. Keep any function in the current * code callable. */ - ep->beam[1] = (BeamInstr) address; + ep->trampoline.not_loaded.deferred = (BeamInstr) address; } else ep->addressv[erts_staging_code_ix()] = address; @@ -5406,7 +5414,7 @@ transform_engine(LoaderState* st) if (i >= st->num_imports || st->import[i].bf == NULL) goto restart; if (bif_number != -1 && - bif_export[bif_number]->beam[1] != (BeamInstr) st->import[i].bf) { + bif_export[bif_number]->trampoline.bif.func != (BeamInstr) st->import[i].bf) { goto restart; } } @@ -6286,12 +6294,12 @@ exported_from_module(Process* p, /* Process whose heap to use. */ if (ep->info.mfa.module == mod) { Eterm tuple; - - if (ep->addressv[code_ix] == ep->beam && - BeamIsOpCode(ep->beam[0], op_call_error_handler)) { - /* There is a call to the function, but it does not exist. */ - continue; - } + + if (ep->addressv[code_ix] == ep->trampoline.raw && + BeamIsOpCode(ep->trampoline.op, op_call_error_handler)) { + /* There is a call to the function, but it does not exist. */ + continue; + } if (hp == hend) { int need = 10 * 5; diff --git a/erts/emulator/beam/bif.c b/erts/emulator/beam/bif.c index 7afbbfd894..aba3af3424 100644 --- a/erts/emulator/beam/bif.c +++ b/erts/emulator/beam/bif.c @@ -4971,15 +4971,18 @@ void erts_init_trap_export(Export* ep, Eterm m, Eterm f, Uint a, Eterm (*bif)(BIF_ALIST)) { int i; + sys_memset((void *) ep, 0, sizeof(Export)); + for (i=0; iaddressv[i] = ep->beam; + ep->addressv[i] = ep->trampoline.raw; } + ep->info.mfa.module = m; ep->info.mfa.function = f; ep->info.mfa.arity = a; - ep->beam[0] = BeamOpCodeAddr(op_apply_bif); - ep->beam[1] = (BeamInstr) bif; + ep->trampoline.op = BeamOpCodeAddr(op_apply_bif); + ep->trampoline.bif.func = (BeamInstr) bif; } void erts_init_bif(void) diff --git a/erts/emulator/beam/erl_bif_trace.c b/erts/emulator/beam/erl_bif_trace.c index 80ba7d1b3c..6fed112627 100644 --- a/erts/emulator/beam/erl_bif_trace.c +++ b/erts/emulator/beam/erl_bif_trace.c @@ -1047,7 +1047,7 @@ static int function_is_traced(Process *p, e.info.mfa.function = mfa[1]; e.info.mfa.arity = mfa[2]; if ((ep = export_get(&e)) != NULL) { - pc = ep->beam; + pc = ep->trampoline.raw; if (ep->addressv[erts_active_code_ix()] == pc && ! BeamIsOpCode(*pc, op_call_error_handler)) { @@ -1446,12 +1446,12 @@ erts_set_trace_pattern(Process*p, ErtsCodeMFA *mfa, int specified, #ifdef DEBUG ep->info.op = BeamOpCodeAddr(op_i_func_info_IaaI); #endif - ep->beam[0] = BeamOpCodeAddr(op_trace_jump_W); - ep->beam[1] = (BeamInstr) ep->addressv[code_ix]; + ep->trampoline.op = BeamOpCodeAddr(op_trace_jump_W); + ep->trampoline.trace.address = (BeamInstr) ep->addressv[code_ix]; } erts_set_call_trace_bif(ci, match_prog_set, 0); if (ep->addressv[code_ix] != pc) { - ep->beam[0] = BeamOpCodeAddr(op_i_generic_breakpoint); + ep->trampoline.op = BeamOpCodeAddr(op_i_generic_breakpoint); } } else if (!on && flags.breakpoint) { /* Turn off breakpoint tracing -- nothing to do here. */ @@ -1461,8 +1461,8 @@ erts_set_trace_pattern(Process*p, ErtsCodeMFA *mfa, int specified, * before turning on breakpoint tracing. */ erts_clear_call_trace_bif(ci, 0); - if (BeamIsOpCode(ep->beam[0], op_i_generic_breakpoint)) { - ep->beam[0] = BeamOpCodeAddr(op_trace_jump_W); + if (BeamIsOpCode(ep->trampoline.op, op_i_generic_breakpoint)) { + ep->trampoline.op = BeamOpCodeAddr(op_trace_jump_W); } } } @@ -1736,7 +1736,7 @@ install_exp_breakpoints(BpFunctions* f) for (i = 0; i < ne; i++) { Export* ep = ErtsContainerStruct(fp[i].ci, Export, info); - ep->addressv[code_ix] = ep->beam; + ep->addressv[code_ix] = ep->trampoline.raw; } } @@ -1751,11 +1751,12 @@ uninstall_exp_breakpoints(BpFunctions* f) for (i = 0; i < ne; i++) { Export* ep = ErtsContainerStruct(fp[i].ci, Export, info); - if (ep->addressv[code_ix] != ep->beam) { - continue; - } - ASSERT(BeamIsOpCode(ep->beam[0], op_trace_jump_W)); - ep->addressv[code_ix] = (BeamInstr *) ep->beam[1]; + if (ep->addressv[code_ix] != ep->trampoline.raw) { + continue; + } + + ASSERT(BeamIsOpCode(ep->trampoline.op, op_trace_jump_W)); + ep->addressv[code_ix] = (BeamInstr *) ep->trampoline.trace.address; } } @@ -1770,13 +1771,14 @@ clean_export_entries(BpFunctions* f) for (i = 0; i < ne; i++) { Export* ep = ErtsContainerStruct(fp[i].ci, Export, info); - if (ep->addressv[code_ix] == ep->beam) { - continue; - } - if (BeamIsOpCode(ep->beam[0], op_trace_jump_W)) { - ep->beam[0] = (BeamInstr) 0; - ep->beam[1] = (BeamInstr) 0; - } + if (ep->addressv[code_ix] == ep->trampoline.raw) { + continue; + } + + if (BeamIsOpCode(ep->trampoline.op, op_trace_jump_W)) { + ep->trampoline.op = (BeamInstr) 0; + ep->trampoline.trace.address = (BeamInstr) 0; + } } } @@ -1790,8 +1792,8 @@ setup_bif_trace(void) GenericBp* g = ep->info.u.gen_bp; if (g) { if (ExportIsBuiltIn(ep)) { - ASSERT(ep->beam[1]); - ep->beam[1] = (BeamInstr) bif_table[i].traced; + ASSERT(ep->trampoline.bif.func != 0); + ep->trampoline.bif.func = (BeamInstr) bif_table[i].traced; } } } @@ -1808,8 +1810,8 @@ reset_bif_trace(void) GenericBp* g = ep->info.u.gen_bp; if (g && g->data[active].flags == 0) { if (ExportIsBuiltIn(ep)) { - ASSERT(ep->beam[1]); - ep->beam[1] = (BeamInstr) bif_table[i].f; + ASSERT(ep->trampoline.bif.func != 0); + ep->trampoline.bif.func = (BeamInstr) bif_table[i].f; } } } diff --git a/erts/emulator/beam/erl_nfunc_sched.c b/erts/emulator/beam/erl_nfunc_sched.c index b2658ef180..fc9ec4e9f5 100644 --- a/erts/emulator/beam/erl_nfunc_sched.c +++ b/erts/emulator/beam/erl_nfunc_sched.c @@ -41,7 +41,7 @@ erts_new_proc_nif_export(Process *c_p, int argc) nep = erts_alloc(ERTS_ALC_T_NIF_TRAP_EXPORT, size); for (i = 0; i < ERTS_NUM_CODE_IX; i++) - nep->exp.addressv[i] = &nep->exp.beam[0]; + nep->exp.addressv[i] = &nep->exp.trampoline.raw[0]; nep->argc = -1; /* unused marker */ nep->argv_size = argc; @@ -168,8 +168,8 @@ erts_nif_export_schedule(Process *c_p, Process *dirty_shadow_proc, nep->exp.info.mfa.module = mod; nep->exp.info.mfa.function = func; nep->exp.info.mfa.arity = (Uint) argc; - nep->exp.beam[0] = (BeamInstr) instr; /* call_nif || apply_bif */ - nep->exp.beam[1] = (BeamInstr) dfunc; + nep->exp.trampoline.op = (BeamInstr) instr; /* call_nif || apply_bif */ + nep->exp.trampoline.raw[1] = (BeamInstr) dfunc; nep->func = ifunc; used_proc->arity = argc; used_proc->freason = TRAP; diff --git a/erts/emulator/beam/erl_nfunc_sched.h b/erts/emulator/beam/erl_nfunc_sched.h index 5c6486cbb8..54d011695a 100644 --- a/erts/emulator/beam/erl_nfunc_sched.h +++ b/erts/emulator/beam/erl_nfunc_sched.h @@ -208,7 +208,7 @@ erts_proc_shadow2real(Process *c_p) #define ERTS_I_BEAM_OP_TO_NIF_EXPORT(I) \ (ASSERT(BeamIsOpCode(*(I), op_apply_bif) || \ BeamIsOpCode(*(I), op_call_nif)), \ - ((NifExport *) (((char *) (I)) - offsetof(NifExport, exp.beam[0])))) + ((NifExport *) (((char *) (I)) - offsetof(NifExport, exp.trampoline.raw[0])))) #include "erl_message.h" diff --git a/erts/emulator/beam/export.c b/erts/emulator/beam/export.c index b928f03b2f..24957c8131 100644 --- a/erts/emulator/beam/export.c +++ b/erts/emulator/beam/export.c @@ -129,14 +129,15 @@ export_alloc(struct export_entry* tmpl_e) obj->info.mfa.module = tmpl->info.mfa.module; obj->info.mfa.function = tmpl->info.mfa.function; obj->info.mfa.arity = tmpl->info.mfa.arity; - obj->beam[0] = 0; + + memset(&obj->trampoline, 0, sizeof(obj->trampoline)); + if (BeamOpsAreInitialized()) { - obj->beam[0] = BeamOpCodeAddr(op_call_error_handler); + obj->trampoline.op = BeamOpCodeAddr(op_call_error_handler); } - obj->beam[1] = 0; for (ix=0; ixaddressv[ix] = obj->beam; + obj->addressv[ix] = obj->trampoline.raw; blob->entryv[ix].slot.index = -1; blob->entryv[ix].ep = &blob->exp; @@ -253,8 +254,8 @@ erts_find_function(Eterm m, Eterm f, unsigned int a, ErtsCodeIndex code_ix) ee = hash_get(&export_tables[code_ix].htable, init_template(&templ, m, f, a)); if (ee == NULL || - (ee->ep->addressv[code_ix] == ee->ep->beam && - ! BeamIsOpCode(ee->ep->beam[0], op_i_generic_breakpoint))) { + (ee->ep->addressv[code_ix] == ee->ep->trampoline.raw && + ! BeamIsOpCode(ee->ep->trampoline.op, op_i_generic_breakpoint))) { return NULL; } return ee->ep; diff --git a/erts/emulator/beam/export.h b/erts/emulator/beam/export.h index ae8dfa4cf8..1246446418 100644 --- a/erts/emulator/beam/export.h +++ b/erts/emulator/beam/export.h @@ -33,22 +33,71 @@ typedef struct export { void* addressv[ERTS_NUM_CODE_IX]; /* Pointer to code for function. */ - ErtsCodeInfo info; /* MUST be just before beam[] */ - - /* - * beam[0]: This entry is 0 unless the 'addressv' field points to it. - * Threaded code instruction to load function - * (em_call_error_handler), execute BIF (em_apply_bif), - * or a breakpoint instruction (op_i_generic_breakpoint). - * beam[1]: Function pointer to BIF function (for BIFs only), - * or pointer to threaded code if the module has an - * on_load function that has not been run yet, or pointer - * to code if function beam[0] is a breakpoint instruction. - * Otherwise: 0. - */ - BeamInstr beam[2]; + /* This is a small trampoline function that can be used for lazy code + * loading, global call tracing, and so on. It's only valid when + * addressv points to it and should otherwise be left zeroed. + * + * Needless to say, the order of the fields below is significant. */ + ErtsCodeInfo info; + union { + BeamInstr op; /* Union discriminant. */ + + struct { + BeamInstr op; /* op_apply_bif */ + BeamInstr func; /* A direct pointer to the BIF */ + } bif; + + struct { + BeamInstr op; /* op_i_generic_breakpoint */ + BeamInstr address; /* Address of the original function */ + } breakpoint; + + /* This is used when a module refers to (imports) a function that + * hasn't been loaded yet. Upon loading we create an export entry which + * redirects to the error_handler so that the appropriate module will + * be loaded when called (or crash). + * + * This is also used when a module has an on_load callback as we need + * to defer all calls until the callback returns. `deferred` contains + * the address of the original function in this case, and there's an + * awkward condiditon where `deferred` may be set while op is zero. See + * erlang:finish_after_on_load/2 for details. */ + struct { + BeamInstr op; /* op_call_error_handler, or 0 during the last + * phase of code loading when on_load is + * present. See above. */ + BeamInstr deferred; + } not_loaded; + + struct { + BeamInstr op; /* op_trace_jump_W */ + BeamInstr address; /* Address of the traced function */ + } trace; + + BeamInstr raw[2]; /* For use in address comparisons, should not + * be tampered directly. */ + } trampoline; } Export; +#ifdef DEBUG +#define DBG_CHECK_EXPORT(EP, CX) \ + do { \ + if((EP)->addressv[CX] == (EP)->trampoline.raw) { \ + /* The entry currently points at the trampoline, so the + * instructions must be valid. */ \ + ASSERT(((BeamIsOpCode((EP)->trampoline.op, op_apply_bif)) && \ + (EP)->trampoline.bif.func != 0) || \ + ((BeamIsOpCode((EP)->trampoline.op, op_i_generic_breakpoint)) && \ + (EP)->trampoline.breakpoint.address != 0) || \ + ((BeamIsOpCode((EP)->trampoline.op, op_trace_jump_W)) && \ + (EP)->trampoline.trace.address != 0) || \ + /* (EP)->trampoline.not_loaded.deferred may be zero. */ \ + (BeamIsOpCode((EP)->trampoline.op, op_call_error_handler))); \ + } \ + } while(0) +#else +#define DBG_CHECK_EXPORT(EP, CX) ((void)(EP), (void)(CX)) +#endif void init_export_table(void); void export_info(fmtfn_t, void *); @@ -71,9 +120,10 @@ extern erts_mtx_t export_staging_lock; #define export_staging_unlock() erts_mtx_unlock(&export_staging_lock) #include "beam_load.h" /* For em_* extern declarations */ -#define ExportIsBuiltIn(EntryPtr) \ -(((EntryPtr)->addressv[erts_active_code_ix()] == (EntryPtr)->beam) && \ - (BeamIsOpCode((EntryPtr)->beam[0], op_apply_bif))) + +#define ExportIsBuiltIn(EntryPtr) \ + (((EntryPtr)->addressv[erts_active_code_ix()] == (EntryPtr)->trampoline.raw) && \ + (BeamIsOpCode((EntryPtr)->trampoline.op, op_apply_bif))) #if ERTS_GLB_INLINE_INCL_FUNC_DEF -- cgit v1.2.1 From 40356bd2ae0e4f4c31204b3dd13d14541442f1a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Tue, 27 Aug 2019 13:11:25 +0200 Subject: erts: Refactor BIF tracing This commit replaces our current BIF-specific tracing functionality with the general function/export tracing used for everything else, fixing a few longstanding issues: * BIFs that trapped to themselves, for example lists:reverse/2, would generate a call trace message for each trap but only a single return trace message. * BIFs that trapped elsewhere, like erlang:delete_module/1, would lose their return trace messages altogether. * Return/exception trace messages on tail calls would point at the function "above" the caller. * Call count tracing simply didn't work. --- erts/emulator/Makefile.in | 5 +- erts/emulator/beam/beam_bif_load.c | 16 +- erts/emulator/beam/beam_bp.c | 294 ++------------------------- erts/emulator/beam/beam_bp.h | 14 +- erts/emulator/beam/beam_emu.c | 275 +++++++++++++------------ erts/emulator/beam/beam_load.c | 181 ++++++++--------- erts/emulator/beam/beam_load.h | 2 +- erts/emulator/beam/bif.c | 46 ++++- erts/emulator/beam/bif.tab | 23 ++- erts/emulator/beam/bif_instrs.tab | 109 +++++++--- erts/emulator/beam/erl_alloc.c | 6 - erts/emulator/beam/erl_alloc.types | 1 - erts/emulator/beam/erl_bif_info.c | 73 +++++-- erts/emulator/beam/erl_bif_trace.c | 140 ++----------- erts/emulator/beam/erl_nfunc_sched.c | 53 +---- erts/emulator/beam/erl_nfunc_sched.h | 50 +---- erts/emulator/beam/erl_nif.c | 100 +++++---- erts/emulator/beam/erl_trace.h | 6 - erts/emulator/beam/export.c | 4 + erts/emulator/beam/export.h | 21 +- erts/emulator/beam/global.h | 6 + erts/emulator/beam/ops.tab | 161 ++++++--------- erts/emulator/test/call_trace_SUITE.erl | 72 +++++-- erts/emulator/test/dirty_bif_SUITE.erl | 4 +- erts/emulator/test/match_spec_SUITE.erl | 3 +- erts/emulator/test/trace_call_time_SUITE.erl | 70 ++++--- erts/emulator/utils/make_tables | 74 +++---- 27 files changed, 731 insertions(+), 1078 deletions(-) diff --git a/erts/emulator/Makefile.in b/erts/emulator/Makefile.in index b2b8acd1b0..70718575e9 100644 --- a/erts/emulator/Makefile.in +++ b/erts/emulator/Makefile.in @@ -599,7 +599,6 @@ endif $(TTF_DIR)/erl_bif_table.c \ $(TTF_DIR)/erl_bif_table.h \ -$(TTF_DIR)/erl_bif_wrap.c \ $(TTF_DIR)/erl_bif_list.h \ $(TTF_DIR)/erl_atom_table.c \ $(TTF_DIR)/erl_atom_table.h \ @@ -885,7 +884,6 @@ RUN_OBJS += \ $(OBJDIR)/erl_bif_persistent.o \ $(OBJDIR)/erl_bif_atomics.o $(OBJDIR)/erl_bif_counters.o \ $(OBJDIR)/erl_bif_trace.o $(OBJDIR)/erl_bif_unique.o \ - $(OBJDIR)/erl_bif_wrap.o $(OBJDIR)/erl_nfunc_sched.o \ $(OBJDIR)/erl_guard_bifs.o $(OBJDIR)/erl_dirty_bif_wrap.o \ $(OBJDIR)/erl_trace.o $(OBJDIR)/copy.o \ $(OBJDIR)/utils.o $(OBJDIR)/bif.o \ @@ -920,7 +918,8 @@ RUN_OBJS += \ $(OBJDIR)/erl_ptab.o $(OBJDIR)/erl_map.o \ $(OBJDIR)/erl_msacc.o $(OBJDIR)/erl_lock_flags.o \ $(OBJDIR)/erl_io_queue.o $(OBJDIR)/erl_db_catree.o \ - $(ESOCK_RUN_OBJS) $(OBJDIR)/erl_flxctr.o + $(ESOCK_RUN_OBJS) $(OBJDIR)/erl_flxctr.o \ + $(OBJDIR)/erl_nfunc_sched.o LTTNG_OBJS = $(OBJDIR)/erlang_lttng.o diff --git a/erts/emulator/beam/beam_bif_load.c b/erts/emulator/beam/beam_bif_load.c index 587a61d814..a406e14741 100644 --- a/erts/emulator/beam/beam_bif_load.c +++ b/erts/emulator/beam/beam_bif_load.c @@ -847,8 +847,7 @@ BIF_RETTYPE finish_after_on_load_2(BIF_ALIST_2) ep->addressv[code_ix] = (void*)ep->trampoline.not_loaded.deferred; ep->trampoline.not_loaded.deferred = 0; } else { - if (ep->addressv[code_ix] == ep->trampoline.raw && - BeamIsOpCode(ep->trampoline.op, op_apply_bif)) { + if (ep->bif_table_index != -1) { continue; } @@ -877,7 +876,7 @@ BIF_RETTYPE finish_after_on_load_2(BIF_ALIST_2) if (ep == NULL || ep->info.mfa.module != BIF_ARG_1) { continue; } - if (BeamIsOpCode(ep->trampoline.op, op_apply_bif)) { + if (ep->bif_table_index != -1) { continue; } @@ -1891,10 +1890,7 @@ delete_code(Module* modp) Export *ep = export_list(i, code_ix); if (ep != NULL && (ep->info.mfa.module == module)) { if (ep->addressv[code_ix] == ep->trampoline.raw) { - if (BeamIsOpCode(ep->trampoline.op, op_apply_bif)) { - continue; - } - else if (BeamIsOpCode(ep->trampoline.op, op_i_generic_breakpoint)) { + if (BeamIsOpCode(ep->trampoline.op, op_i_generic_breakpoint)) { ERTS_LC_ASSERT(erts_thr_progress_is_blocking()); ASSERT(modp->curr.num_traced_exports > 0); DBG_TRACE_MFA_P(&ep->info.mfa, @@ -1906,6 +1902,12 @@ delete_code(Module* modp) !erts_initialized); } } + + if (ep->bif_table_index != -1 && ep->is_bif_traced) { + /* Code unloading kills both global and local call tracing. */ + ep->is_bif_traced = 0; + } + ep->addressv[code_ix] = ep->trampoline.raw; ep->trampoline.op = BeamOpCodeAddr(op_call_error_handler); ep->trampoline.not_loaded.deferred = 0; diff --git a/erts/emulator/beam/beam_bp.c b/erts/emulator/beam/beam_bp.c index 1b92fe0a1f..1bb20f6ae3 100644 --- a/erts/emulator/beam/beam_bp.c +++ b/erts/emulator/beam/beam_bp.c @@ -207,9 +207,6 @@ erts_bp_match_functions(BpFunctions* f, ErtsCodeMFA *mfa, int specified) if (erts_is_function_native(ci)) { continue; } - if (is_nil(ci->mfa.module)) { /* Ignore BIF stub */ - continue; - } switch (specified) { case 3: if (ci->mfa.arity != mfa->arity) @@ -244,8 +241,10 @@ erts_bp_match_export(BpFunctions* f, ErtsCodeMFA *mfa, int specified) f->matching = (BpFunction *) Alloc(num_exps*sizeof(BpFunction)); ne = 0; for (i = 0; i < num_exps; i++) { - Export* ep = export_list(i, code_ix); - BeamInstr* pc; + BeamInstr *func; + Export* ep; + + ep = export_list(i, code_ix); switch (specified) { case 3: @@ -263,19 +262,20 @@ erts_bp_match_export(BpFunctions* f, ErtsCodeMFA *mfa, int specified) ASSERT(0); } - pc = ep->trampoline.raw; - if (ep->addressv[code_ix] == pc) { - if (BeamIsOpCode(*pc, op_apply_bif) || - BeamIsOpCode(*pc, op_call_error_handler)) { - continue; - } - ASSERT(BeamIsOpCode(*pc, op_i_generic_breakpoint)); - } else if (erts_is_function_native(erts_code_to_codeinfo(ep->addressv[code_ix]))) { - continue; - } + func = ep->addressv[code_ix]; + + if (func == ep->trampoline.raw) { + if (BeamIsOpCode(*func, op_call_error_handler)) { + continue; + } + ASSERT(BeamIsOpCode(*func, op_i_generic_breakpoint)); + } else if (erts_is_function_native(erts_code_to_codeinfo(func))) { + continue; + } f->matching[ne].ci = &ep->info; f->matching[ne].mod = erts_get_module(ep->info.mfa.module, code_ix); + ne++; } @@ -305,18 +305,6 @@ erts_consolidate_bp_data(BpFunctions* f, int local) } } -void -erts_consolidate_bif_bp_data(void) -{ - int i; - - ERTS_LC_ASSERT(erts_has_code_write_permission()); - for (i = 0; i < BIF_SIZE; i++) { - Export *ep = bif_export[i]; - consolidate_bp_data(0, &ep->info, 0); - } -} - static void consolidate_bp_data(Module* modp, ErtsCodeInfo *ci, int local) { @@ -495,32 +483,13 @@ erts_set_mtrace_break(BpFunctions* f, Binary *match_spec, ErtsTracer tracer) } void -erts_set_call_trace_bif(ErtsCodeInfo *ci, Binary *match_spec, int local) +erts_set_export_trace(ErtsCodeInfo *ci, Binary *match_spec, int local) { Uint flags = local ? ERTS_BPF_LOCAL_TRACE : ERTS_BPF_GLOBAL_TRACE; set_function_break(ci, match_spec, flags, 0, erts_tracer_nil); } -void -erts_set_mtrace_bif(ErtsCodeInfo *ci, Binary *match_spec, ErtsTracer tracer) -{ - set_function_break(ci, match_spec, ERTS_BPF_META_TRACE, 0, tracer); -} - -void -erts_set_time_trace_bif(ErtsCodeInfo *ci, enum erts_break_op count_op) -{ - set_function_break(ci, NULL, - ERTS_BPF_TIME_TRACE|ERTS_BPF_TIME_TRACE_ACTIVE, - count_op, erts_tracer_nil); -} - -void -erts_clear_time_trace_bif(ErtsCodeInfo *ci) { - clear_function_break(ci, ERTS_BPF_TIME_TRACE|ERTS_BPF_TIME_TRACE_ACTIVE); -} - void erts_set_debug_break(BpFunctions* f) { set_break(f, NULL, ERTS_BPF_DEBUG, 0, erts_tracer_nil); @@ -547,7 +516,7 @@ erts_clear_trace_break(BpFunctions* f) } void -erts_clear_call_trace_bif(ErtsCodeInfo *ci, int local) +erts_clear_export_trace(ErtsCodeInfo *ci, int local) { GenericBp* g = ci->u.gen_bp; @@ -565,12 +534,6 @@ erts_clear_mtrace_break(BpFunctions* f) clear_break(f, ERTS_BPF_META_TRACE); } -void -erts_clear_mtrace_bif(ErtsCodeInfo *ci) -{ - clear_function_break(ci, ERTS_BPF_META_TRACE); -} - void erts_clear_debug_break(BpFunctions* f) { @@ -771,229 +734,6 @@ erts_generic_breakpoint(Process* c_p, ErtsCodeInfo *info, Eterm* reg) } } -/* - * Entry point called by the trace wrap functions in erl_bif_wrap.c - * - * The trace wrap functions are themselves called through the export - * entries instead of the original BIF functions. - */ -Eterm -erts_bif_trace(int bif_index, Process* p, Eterm* args, BeamInstr* I) -{ - Eterm result; - Eterm (*func)(Process*, Eterm*, BeamInstr*); - Export* ep = bif_export[bif_index]; - Uint32 flags = 0, flags_meta = 0; - ErtsTracer meta_tracer = erts_tracer_nil; - int applying = (I == ep->trampoline.raw); /* Yup, the apply code for a bif - * is actually in the - * export entry */ - BeamInstr* cp = (BeamInstr *) p->stop[0]; - GenericBp* g; - GenericBpData* bp = NULL; - Uint bp_flags = 0; - int return_to_trace = 0; - - ERTS_CHK_HAVE_ONLY_MAIN_PROC_LOCK(p); - - g = ep->info.u.gen_bp; - if (g) { - bp = &g->data[erts_active_bp_ix()]; - bp_flags = bp->flags; - } - - /* - * Make continuation pointer OK, it is not during direct BIF calls, - * but it is correct during apply of bif. - */ - if (!applying) { - p->stop[0] = (Eterm) I; - } else { - fixup_cp_before_trace(p, &return_to_trace); - } - if (bp_flags & (ERTS_BPF_LOCAL_TRACE|ERTS_BPF_GLOBAL_TRACE) && - IS_TRACED_FL(p, F_TRACE_CALLS)) { - int local = !!(bp_flags & ERTS_BPF_LOCAL_TRACE); - flags = erts_call_trace(p, &ep->info, bp->local_ms, args, - local, &ERTS_TRACER(p)); - } - if (bp_flags & ERTS_BPF_META_TRACE) { - ErtsTracer old_tracer; - - meta_tracer = erts_atomic_read_nob(&bp->meta_tracer->tracer); - old_tracer = meta_tracer; - flags_meta = erts_call_trace(p, &ep->info, bp->meta_ms, args, - 0, &meta_tracer); - - if (!ERTS_TRACER_COMPARE(old_tracer, meta_tracer)) { - ErtsTracer new_tracer = erts_tracer_nil; - erts_tracer_update(&new_tracer, meta_tracer); - if (old_tracer == erts_atomic_cmpxchg_acqb( - &bp->meta_tracer->tracer, - (erts_aint_t)new_tracer, - (erts_aint_t)old_tracer)) { - ERTS_TRACER_CLEAR(&old_tracer); - } else { - ERTS_TRACER_CLEAR(&new_tracer); - } - } - } - if (bp_flags & ERTS_BPF_TIME_TRACE_ACTIVE && - IS_TRACED_FL(p, F_TRACE_CALLS)) { - erts_trace_time_call(p, &ep->info, bp->time); - } - - /* Restore original continuation pointer (if changed). */ - p->stop[0] = (Eterm) cp; - - func = bif_table[bif_index].f; - - result = func(p, args, I); - - if (erts_nif_export_check_save_trace(p, result, - applying, ep, - flags, - flags_meta, I, - meta_tracer)) { - /* - * erts_bif_trace_epilogue() will be called - * later when appropriate via the NIF export - * scheduling functionality... - */ - return result; - } - - return erts_bif_trace_epilogue(p, result, applying, ep, - flags, flags_meta, I, - meta_tracer); -} - -Eterm -erts_bif_trace_epilogue(Process *p, Eterm result, int applying, - Export* ep, Uint32 flags, - Uint32 flags_meta, BeamInstr* I, - ErtsTracer meta_tracer) -{ - BeamInstr *cp = NULL; - - if (applying && (flags & MATCH_SET_RETURN_TO_TRACE)) { - BeamInstr i_return_trace = beam_return_trace[0]; - BeamInstr i_return_to_trace = beam_return_to_trace[0]; - BeamInstr i_return_time_trace = beam_return_time_trace[0]; - Eterm *cpp; - - /* Maybe advance cp to skip trace stack frames */ - cpp = p->stop; - while (is_not_CP(*cpp)) { - cpp++; - } - for (cp = cp_val(*cpp++); ;) { - if (*cp == i_return_trace) { - /* Skip stack frame variables */ - while (is_not_CP(*cpp)) cpp++; - cpp += 2; /* Skip return_trace parameters */ - } else if (*cp == i_return_time_trace) { - /* Skip stack frame variables */ - while (is_not_CP(*cpp)) cpp++; - cpp += 1; /* Skip return_time_trace parameters */ - } else if (*cp == i_return_to_trace) { - /* A return_to trace message is going to be generated - * by normal means, so we do not have to. - */ - cp = NULL; - break; - } else { - break; - } - cp = cp_val(*cpp++); - } - } - - /* Try to get these in the order - * they usually appear in normal code... */ - if (is_non_value(result)) { - Uint reason = p->freason; - if (reason != TRAP) { - Eterm class; - Eterm value = p->fvalue; - /* Expand error value like in handle_error() */ - if (reason & EXF_ARGLIST) { - Eterm *tp; - ASSERT(is_tuple(value)); - tp = tuple_val(value); - value = tp[1]; - } - if ((reason & EXF_THROWN) && (p->catches <= 0)) { - Eterm *hp = HAlloc(p, 3); - value = TUPLE2(hp, am_nocatch, value); - reason = EXC_ERROR; - } - /* Note: expand_error_value() could theoretically - * allocate on the heap, but not for any error - * returned by a BIF, and it would do no harm, - * just be annoying. - */ - value = expand_error_value(p, reason, value); - class = exception_tag[GET_EXC_CLASS(reason)]; - - if (flags_meta & MATCH_SET_EXCEPTION_TRACE) { - erts_trace_exception(p, &ep->info.mfa, class, value, - &meta_tracer); - } - if (flags & MATCH_SET_EXCEPTION_TRACE) { - erts_trace_exception(p, &ep->info.mfa, class, value, - &ERTS_TRACER(p)); - } - if ((flags & MATCH_SET_RETURN_TO_TRACE) && p->catches > 0) { - /* can only happen if(local)*/ - Eterm *ptr = p->stop; - ASSERT(!applying || is_CP(*ptr)); - ASSERT(ptr <= STACK_START(p)); - /* Search the nearest stack frame for a catch */ - while (++ptr < STACK_START(p)) { - if (is_CP(*ptr)) break; - if (is_catch(*ptr)) { - if (applying) { - /* Apply of BIF, cp is in calling function */ - if (cp) erts_trace_return_to(p, cp); - } else { - /* Direct bif call, I points into - * calling function */ - erts_trace_return_to(p, I); - } - } - } - } - if ((flags_meta|flags) & MATCH_SET_EXCEPTION_TRACE) { - erts_proc_lock(p, ERTS_PROC_LOCKS_ALL_MINOR); - ERTS_TRACE_FLAGS(p) |= F_EXCEPTION_TRACE; - erts_proc_unlock(p, ERTS_PROC_LOCKS_ALL_MINOR); - } - } - } else { - if (flags_meta & MATCH_SET_RX_TRACE) { - erts_trace_return(p, &ep->info.mfa, result, &meta_tracer); - } - /* MATCH_SET_RETURN_TO_TRACE cannot occur if(meta) */ - if (flags & MATCH_SET_RX_TRACE) { - erts_trace_return(p, &ep->info.mfa, result, &ERTS_TRACER(p)); - } - if (flags & MATCH_SET_RETURN_TO_TRACE && - IS_TRACED_FL(p, F_TRACE_RETURN_TO)) { - /* can only happen if(local)*/ - if (applying) { - /* Apply of BIF, cp is in calling function */ - if (cp) erts_trace_return_to(p, cp); - } else { - /* Direct bif call, I points into calling function */ - erts_trace_return_to(p, I); - } - } - } - ERTS_CHK_HAVE_ONLY_MAIN_PROC_LOCK(p); - return result; -} - static ErtsTracer do_call_trace(Process* c_p, ErtsCodeInfo* info, Eterm* reg, int local, Binary* ms, ErtsTracer tracer) diff --git a/erts/emulator/beam/beam_bp.h b/erts/emulator/beam/beam_bp.h index 9f7ec16b71..54e84e7e4f 100644 --- a/erts/emulator/beam/beam_bp.h +++ b/erts/emulator/beam/beam_bp.h @@ -119,20 +119,16 @@ void erts_bp_free_matched_functions(BpFunctions* f); void erts_install_breakpoints(BpFunctions* f); void erts_uninstall_breakpoints(BpFunctions* f); void erts_consolidate_bp_data(BpFunctions* f, int local); -void erts_consolidate_bif_bp_data(void); void erts_set_trace_break(BpFunctions *f, Binary *match_spec); void erts_clear_trace_break(BpFunctions *f); -void erts_set_call_trace_bif(ErtsCodeInfo *ci, Binary *match_spec, int local); -void erts_clear_call_trace_bif(ErtsCodeInfo *ci, int local); +void erts_set_export_trace(ErtsCodeInfo *ci, Binary *match_spec, int local); +void erts_clear_export_trace(ErtsCodeInfo *ci, int local); void erts_set_mtrace_break(BpFunctions *f, Binary *match_spec, ErtsTracer tracer); void erts_clear_mtrace_break(BpFunctions *f); -void erts_set_mtrace_bif(ErtsCodeInfo *ci, Binary *match_spec, - ErtsTracer tracer); -void erts_clear_mtrace_bif(ErtsCodeInfo *ci); void erts_set_debug_break(BpFunctions *f); void erts_clear_debug_break(BpFunctions *f); @@ -151,8 +147,6 @@ BeamInstr erts_trace_break(Process *p, ErtsCodeInfo *ci, Eterm *args, int erts_is_trace_break(ErtsCodeInfo *ci, Binary **match_spec_ret, int local); int erts_is_mtrace_break(ErtsCodeInfo *ci, Binary **match_spec_ret, ErtsTracer *tracer_ret); -int erts_is_mtrace_bif(ErtsCodeInfo *ci, Binary **match_spec_ret, - ErtsTracer *tracer_ret); int erts_is_native_break(ErtsCodeInfo *ci); int erts_is_count_break(ErtsCodeInfo *ci, Uint *count_ret); int erts_is_time_break(Process *p, ErtsCodeInfo *ci, Eterm *call_time); @@ -163,10 +157,6 @@ void erts_schedule_time_break(Process *p, Uint out); void erts_set_time_break(BpFunctions *f, enum erts_break_op); void erts_clear_time_break(BpFunctions *f); -int erts_is_time_trace_bif(Process *p, ErtsCodeInfo *ci, Eterm *call_time); -void erts_set_time_trace_bif(ErtsCodeInfo *ci, enum erts_break_op); -void erts_clear_time_trace_bif(ErtsCodeInfo *ci); - ErtsCodeInfo *erts_find_local_func(ErtsCodeMFA *mfa); #if ERTS_GLB_INLINE_INCL_FUNC_DEF diff --git a/erts/emulator/beam/beam_emu.c b/erts/emulator/beam/beam_emu.c index 48824ef9da..5ff4549818 100644 --- a/erts/emulator/beam/beam_emu.c +++ b/erts/emulator/beam/beam_emu.c @@ -111,10 +111,9 @@ do { \ #define CHECK_ALIGNED(Dst) ASSERT((((Uint)&Dst) & (sizeof(Uint)-1)) == 0) -#define GET_BIF_MODULE(p) ((p)->info.mfa.module) -#define GET_BIF_FUNCTION(p) ((p)->info.mfa.function) -#define GET_BIF_ARITY(p) ((p)->info.mfa.arity) -#define GET_BIF_ADDRESS(p) ((BifFunction)((p)->trampoline.bif.func)) +#define GET_EXPORT_MODULE(p) ((p)->info.mfa.module) +#define GET_EXPORT_FUNCTION(p) ((p)->info.mfa.function) +#define GET_EXPORT_ARITY(p) ((p)->info.mfa.arity) #define TermWords(t) (((t) / (sizeof(BeamInstr)/sizeof(Eterm))) + !!((t) % (sizeof(BeamInstr)/sizeof(Eterm)))) @@ -296,6 +295,7 @@ do { \ */ static void init_emulator_finish(void) ERTS_NOINLINE; static ErtsCodeMFA *ubif2mfa(void* uf) ERTS_NOINLINE; +static BeamInstr *printable_return_address(Process* p, Eterm *E) ERTS_NOINLINE; static BeamInstr* handle_error(Process* c_p, BeamInstr* pc, Eterm* reg, ErtsCodeMFA* bif_mfa) ERTS_NOINLINE; static BeamInstr* call_error_handler(Process* p, ErtsCodeMFA* mfa, @@ -887,6 +887,43 @@ void process_main(Eterm * x_reg_array, FloatDef* f_reg_array) return; /* Never executed */ } +/* + * Enter all BIFs into the export table. + * + * Note that they will all call the error_handler until their modules have been + * loaded, which may prevent the system from booting if BIFs from non-preloaded + * modules are apply/3'd while loading code. Ordinary BIF calls will work fine + * however since they won't go through export entries. + */ +static void install_bifs(void) { + int i; + + for (i = 0; i < BIF_SIZE; i++) { + BifEntry *entry; + Export *ep; + int j; + + entry = &bif_table[i]; + + ep = erts_export_put(entry->module, entry->name, entry->arity); + + ep->info.op = BeamOpCodeAddr(op_i_func_info_IaaI); + ep->info.mfa.module = entry->module; + ep->info.mfa.function = entry->name; + ep->info.mfa.arity = entry->arity; + ep->bif_table_index = i; + + memset(&ep->trampoline, 0, sizeof(ep->trampoline)); + ep->trampoline.op = BeamOpCodeAddr(op_call_error_handler); + + for (j = 0; j < ERTS_NUM_CODE_IX; j++) { + ep->addressv[j] = ep->trampoline.raw; + } + + bif_export[i] = ep; + } +} + /* * One-time initialization of emulator. Does not need to be * in process_main(). @@ -894,9 +931,9 @@ void process_main(Eterm * x_reg_array, FloatDef* f_reg_array) static void init_emulator_finish(void) { +#if defined(ARCH_64) && defined(CODE_MODEL_SMALL) int i; -#if defined(ARCH_64) && defined(CODE_MODEL_SMALL) for (i = 0; i < NUMBER_OF_OPCODES; i++) { BeamInstr instr = BeamOpCodeAddr(i); if (instr >= (1ull << 32)) { @@ -917,24 +954,7 @@ init_emulator_finish(void) beam_exception_trace[0] = BeamOpCodeAddr(op_return_trace); /* UGLY */ beam_return_time_trace[0] = BeamOpCodeAddr(op_i_return_time_trace); - /* - * Enter all BIFs into the export table. - */ - for (i = 0; i < BIF_SIZE; i++) { - Export *ep = erts_export_put(bif_table[i].module, - bif_table[i].name, - bif_table[i].arity); - - ep->info.op = BeamOpCodeAddr(op_i_func_info_IaaI); - ep->info.mfa.module = bif_table[i].module; - ep->info.mfa.function = bif_table[i].name; - ep->info.mfa.arity = bif_table[i].arity; - - ep->trampoline.op = BeamOpCodeAddr(op_apply_bif); - ep->trampoline.bif.func = (BeamInstr) bif_table[i].f; - - bif_export[i] = ep; - } + install_bifs(); } /* @@ -1234,6 +1254,33 @@ Eterm error_atom[NUMBER_EXIT_CODES] = { am_badkey, /* 19 */ }; +/* Returns the return address at E[0] in printable form, skipping tracing in + * the same manner as gather_stacktrace. + * + * This is needed to generate correct stacktraces when throwing errors from + * instructions that return like an ordinary function, such as call_nif. */ +static BeamInstr *printable_return_address(Process* p, Eterm *E) { + Eterm *ptr = E; + + ASSERT(is_CP(*ptr)); + + while (ptr < STACK_START(p)) { + BeamInstr *cp = cp_val(*ptr); + + if (cp == beam_exception_trace || cp == beam_return_trace) { + ptr += 3; + } else if (cp == beam_return_time_trace) { + ptr += 2; + } else if (cp == beam_return_to_trace) { + ptr += 1; + } else { + return cp; + } + } + + ERTS_ASSERT(!"No continuation pointer on stack"); +} + /* * To fully understand the error handling, one must keep in mind that * when an exception is thrown, the search for a handler can jump back @@ -1527,19 +1574,24 @@ expand_error_value(Process* c_p, Uint freason, Eterm Value) { static void -gather_stacktrace(Process* p, Eterm *ptr, struct StackTrace* s, int depth) +gather_stacktrace(Process* p, struct StackTrace* s, int depth) { - BeamInstr *prev; - BeamInstr i_return_trace; + BeamInstr i_return_time_trace; BeamInstr i_return_to_trace; + BeamInstr i_return_trace; + BeamInstr *prev; + Eterm *ptr; if (depth == 0) { return; } - prev = s->depth ? s->trace[s->depth-1] : s->pc; - i_return_trace = beam_return_trace[0]; + i_return_time_trace = beam_return_time_trace[0]; i_return_to_trace = beam_return_to_trace[0]; + i_return_trace = beam_return_trace[0]; + + prev = s->depth ? s->trace[s->depth-1] : s->pc; + ptr = p->stop; /* * Traverse the stack backwards and add all unique continuation @@ -1552,16 +1604,15 @@ gather_stacktrace(Process* p, Eterm *ptr, struct StackTrace* s, int depth) while (ptr < STACK_START(p) && depth > 0) { if (is_CP(*ptr)) { - if (*cp_val(*ptr) == i_return_trace) { - /* Skip stack frame variables */ - do ++ptr; while (is_not_CP(*ptr)); - /* Skip return_trace parameters */ + BeamInstr *cp = cp_val(*ptr); + + if (*cp == i_return_time_trace) { ptr += 2; - } else if (*cp_val(*ptr) == i_return_to_trace) { - /* Skip stack frame variables */ - do ++ptr; while (is_not_CP(*ptr)); + } else if (*cp == i_return_to_trace) { + ptr += 1; + } else if (*cp == i_return_trace) { + ptr += 3; } else { - BeamInstr *cp = cp_val(*ptr); if (cp != prev) { /* Record non-duplicates only */ prev = cp; @@ -1614,7 +1665,6 @@ static void save_stacktrace(Process* c_p, BeamInstr* pc, Eterm* reg, ErtsCodeMFA *bif_mfa, Eterm args) { struct StackTrace* s; - Eterm *stack_start; int sz; int depth = erts_backtrace_depth; /* max depth (never negative) */ @@ -1632,33 +1682,6 @@ save_stacktrace(Process* c_p, BeamInstr* pc, Eterm* reg, s->freason = c_p->freason; s->depth = 0; - /* - * If we crash on an instruction that returns to a return/exception trace - * instruction, we must set the stacktrace 'pc' to the actual return - * address or we'll lose the top stackframe when gathering the stack - * trace. - */ - stack_start = STACK_TOP(c_p); - if (stack_start < STACK_START(c_p) && is_CP(*stack_start)) { - BeamInstr *cp = cp_val(*stack_start); - - if (cp == pc) { - if (pc == beam_exception_trace || pc == beam_return_trace) { - ASSERT(&stack_start[3] <= STACK_START(c_p)); - /* Fake having failed on the first instruction in the function - * pointed to by the tag. */ - pc = cp_val(stack_start[1]); - stack_start += 3; - } else if (pc == beam_return_to_trace) { - ASSERT(&stack_start[2] <= STACK_START(c_p)); - pc = cp_val(stack_start[1]); - /* Skip both the trace tag and the new 'pc' to avoid - * duplicated entries. */ - stack_start += 2; - } - } - } - /* * If the failure was in a BIF other than 'error/1', 'error/2', * 'exit/1' or 'throw/1', save BIF-MFA and save the argument @@ -1721,13 +1744,13 @@ save_stacktrace(Process* c_p, BeamInstr* pc, Eterm* reg, } /* Save the actual stack trace */ - gather_stacktrace(c_p, stack_start, s, depth); + gather_stacktrace(c_p, s, depth); } void erts_save_stacktrace(Process* p, struct StackTrace* s, int depth) { - gather_stacktrace(p, STACK_TOP(p), s, depth); + gather_stacktrace(p, s, depth); } /* @@ -1986,83 +2009,66 @@ apply_bif_error_adjustment(Process *p, Export *ep, Eterm *reg, Uint arity, BeamInstr *I, Uint stack_offset) { + int apply_only; + Uint need; + + need = stack_offset /* bytes */ / sizeof(Eterm); + apply_only = stack_offset == 0; + /* * I is only set when the apply is a tail call, i.e., * from the instructions i_apply_only, i_apply_last_P, * and apply_last_IP. */ - if (I - && BeamIsOpCode(ep->trampoline.op, op_apply_bif) - && (ep == bif_export[BIF_error_1] - || ep == bif_export[BIF_error_2] - || ep == bif_export[BIF_exit_1] - || ep == bif_export[BIF_throw_1])) { - /* - * We are about to tail apply one of the BIFs - * erlang:error/1, erlang:error/2, erlang:exit/1, - * or erlang:throw/1. Error handling of these BIFs is - * special! - * - * We need the topmost continuation pointer to point into the - * calling function when handling the error after the BIF has - * been applied. This in order to get the topmost stackframe - * correct. - * - * Note that these BIFs will unconditionally cause an - * exception to be raised. That is, our modifications of the - * stack will be corrected by the error handling code. - */ - int apply_only = stack_offset == 0; - BeamInstr *cpp; - Eterm *E; + if (!(I && (ep == bif_export[BIF_error_1] || + ep == bif_export[BIF_error_2] || + ep == bif_export[BIF_exit_1] || + ep == bif_export[BIF_throw_1]))) { + return; + } - E = p->stop; + /* + * We are about to tail apply one of the BIFs erlang:error/1, + * erlang:error/2, erlang:exit/1, or erlang:throw/1. Error handling of + * these BIFs is special! + * + * We need the topmost continuation pointer to point into the calling + * function when handling the error after the BIF has been applied. This in + * order to get the topmost stackframe correct. + * + * Note that these BIFs will unconditionally cause an exception to be + * raised. That is, our modifications of the stack will be corrected by the + * error handling code. + */ + if (need == 0) { + need = 1; /* i_apply_only */ + } - while (is_not_CP(*E)) { - E++; - } - cpp = cp_val(E[0]); + if (p->stop - p->htop < need) { + erts_garbage_collect(p, (int) need, reg, arity+1); + } + if (apply_only) { /* - * If we find an exception/return-to trace continuation - * pointer as the topmost continuation pointer, we do not - * need to do anything since the information will already - * be available for generation of the stacktrace. + * Called from the i_apply_only instruction. + * + * Push the continuation pointer for the current function to the stack. */ - - if (cpp != beam_exception_trace - && cpp != beam_return_trace - && cpp != beam_return_to_trace) { - Uint need = stack_offset /* bytes */ / sizeof(Eterm); - if (need == 0) - need = 1; /* i_apply_only */ - if (p->stop - p->htop < need) - erts_garbage_collect(p, (int) need, reg, arity+1); - if (apply_only) { - /* - * Called from the i_apply_only instruction. - * - * Push the continuation pointer for the current - * function to the stack. - */ - p->stop -= need; - p->stop[0] = make_cp(I); - } else { - /* - * Called from an i_apply_last_* instruction. - * - * The calling instruction will deallocate a stack - * frame of size 'stack_offset'. - * - * Push the continuation pointer for the current - * function to the stack, and then add a dummy - * stackframe for the i_apply_last* instruction - * to discard. - */ - p->stop[0] = make_cp(I); - p->stop -= need; - } - } + p->stop -= need; + p->stop[0] = make_cp(I); + } else { + /* + * Called from an i_apply_last_* instruction. + * + * The calling instruction will deallocate a stack frame of size + * 'stack_offset'. + * + * Push the continuation pointer for the current function to the stack, + * and then add a dummy stackframe for the i_apply_last* instruction + * to discard. + */ + p->stop[0] = make_cp(I); + p->stop -= need; } } @@ -3117,8 +3123,7 @@ erts_is_builtin(Eterm Mod, Eterm Name, int arity) return 0; } - return ep->addressv[erts_active_code_ix()] == ep->trampoline.raw && - BeamIsOpCode(ep->trampoline.op, op_apply_bif); + return ep->bif_table_index != -1; } diff --git a/erts/emulator/beam/beam_load.c b/erts/emulator/beam/beam_load.c index 2b4b16983c..45122fe933 100644 --- a/erts/emulator/beam/beam_load.c +++ b/erts/emulator/beam/beam_load.c @@ -141,7 +141,7 @@ typedef struct { * eventually patch with a pointer into * the export entry. */ - BifFunction bf; /* Pointer to BIF function if BIF; + Export *bif; /* Pointer to export entry if BIF; * NULL otherwise. */ } ImportEntry; @@ -849,9 +849,7 @@ erts_finish_loading(Binary* magic, Process* c_p, DBG_CHECK_EXPORT(ep, code_ix); if (ep->addressv[code_ix] == ep->trampoline.raw) { - if (BeamIsOpCode(ep->trampoline.op, op_apply_bif)) { - continue; - } else if (BeamIsOpCode(ep->trampoline.op, op_i_generic_breakpoint)) { + if (BeamIsOpCode(ep->trampoline.op, op_i_generic_breakpoint)) { ERTS_LC_ASSERT(erts_thr_progress_is_blocking()); ASSERT(mod_tab_p->curr.num_traced_exports > 0); @@ -1479,15 +1477,14 @@ load_import_table(LoaderState* stp) } stp->import[i].arity = arity; stp->import[i].patches = 0; - stp->import[i].bf = NULL; + stp->import[i].bif = NULL; /* - * If the export entry refers to a BIF, get the pointer to - * the BIF function. + * If the export entry refers to a BIF, save a pointer to the BIF entry. */ if ((e = erts_active_export_entry(mod, func, arity)) != NULL) { - if (BeamIsOpCode(e->trampoline.op, op_apply_bif)) { - stp->import[i].bf = (BifFunction) e->trampoline.bif.func; + if (e->bif_table_index != -1) { + stp->import[i].bif = e; if (func == am_load_nif && mod == am_erlang && arity == 2) { stp->may_load_nif = 1; } @@ -1538,33 +1535,6 @@ read_export_table(LoaderState* stp) LoadError2(stp, "export table entry %u: label %u not resolved", i, n); } stp->export[i].address = address = stp->codev + value; - - /* - * Find out if there is a BIF with the same name. - */ - - if (!is_bif(stp->module, func, arity)) { - continue; - } - - /* - * This is a stub for a BIF. - * - * It should not be exported, and the information in its - * func_info instruction should be invalidated so that it - * can be filtered out by module_info(functions) and by - * any other functions that walk through all local functions. - */ - - if (stp->labels[n].num_patches > 0) { - LoadError3(stp, "there are local calls to the stub for " - "the BIF %T:%T/%d", - stp->module, func, arity); - } - stp->export[i].address = NULL; - address[-1] = 0; - address[-2] = NIL; - address[-3] = NIL; } return 1; @@ -1572,25 +1542,16 @@ read_export_table(LoaderState* stp) return 0; } - static int is_bif(Eterm mod, Eterm func, unsigned arity) { - Export* e = erts_active_export_entry(mod, func, arity); - if (e == NULL) { - return 0; - } - if (! BeamIsOpCode(e->trampoline.op, op_apply_bif)) { - return 0; - } - if (mod == am_erlang && func == am_apply && arity == 3) { - /* - * erlang:apply/3 is a special case -- it is implemented - * as an instruction and it is OK to redefine it. - */ - return 0; + Export *e = erts_active_export_entry(mod, func, arity); + + if (e != NULL) { + return e->bif_table_index != -1; } - return 1; + + return 0; } static int @@ -2542,10 +2503,14 @@ load_code(LoaderState* stp) if (i >= stp->num_imports) { LoadError1(stp, "invalid import table index %d", i); } - if (stp->import[i].bf == NULL) { + if (stp->import[i].bif == NULL) { LoadError1(stp, "not a BIF: import table index %d", i); } - code[ci++] = (BeamInstr) stp->import[i].bf; + { + int bif_index = stp->import[i].bif->bif_table_index; + BifEntry *bif_entry = &bif_table[bif_index]; + code[ci++] = (BeamInstr) bif_entry->f; + } break; case 'P': /* Byte offset into tuple or stack */ case 'Q': /* Like 'P', but packable */ @@ -2853,18 +2818,43 @@ load_code(LoaderState* stp) switch (stp->specific_op) { case op_i_func_info_IaaI: { + int padding_required; Sint offset; + if (function_number >= stp->num_functions) { LoadError1(stp, "too many functions in module (header said %u)", stp->num_functions); } - if (stp->may_load_nif) { + /* Native function calls may be larger than their stubs, so + * we'll need to make sure any potentially-native function stub + * is padded with enough room. + * + * Note that the padding is applied for the previous function, + * not the current one, so we check whether the old F/A is + * a BIF. */ + padding_required = last_func_start && (stp->may_load_nif || + is_bif(stp->module, stp->function, stp->arity)); + + /* + * Save context for error messages. + */ + stp->function = code[ci-2]; + stp->arity = code[ci-1]; + + /* + * Save current offset of into the line instruction array. + */ + if (stp->func_line) { + stp->func_line[function_number] = stp->current_li; + } + + if (padding_required) { const int finfo_ix = ci - FUNC_INFO_SZ; - if (finfo_ix - last_func_start < BEAM_NIF_MIN_FUNC_SZ && last_func_start) { + if (finfo_ix - last_func_start < BEAM_NATIVE_MIN_FUNC_SZ) { /* Must make room for call_nif op */ - int pad = BEAM_NIF_MIN_FUNC_SZ - (finfo_ix - last_func_start); - ASSERT(pad > 0 && pad < BEAM_NIF_MIN_FUNC_SZ); + int pad = BEAM_NATIVE_MIN_FUNC_SZ - (finfo_ix - last_func_start); + ASSERT(pad > 0 && pad < BEAM_NATIVE_MIN_FUNC_SZ); CodeNeed(pad); sys_memmove(&code[finfo_ix+pad], &code[finfo_ix], FUNC_INFO_SZ*sizeof(BeamInstr)); @@ -2875,20 +2865,6 @@ load_code(LoaderState* stp) } last_func_start = ci; - /* - * Save current offset of into the line instruction array. - */ - - if (stp->func_line) { - stp->func_line[function_number] = stp->current_li; - } - - /* - * Save context for error messages. - */ - stp->function = code[ci-2]; - stp->arity = code[ci-1]; - /* When this assert is triggered, it is normally a sign that the size of the ops.tab i_func_info instruction is not the same as FUNC_INFO_SZ */ @@ -2918,7 +2894,6 @@ load_code(LoaderState* stp) case op_i_bs_match_string_yfWW: new_string_patch(stp, ci-1); break; - case op_catch_yf: /* code[ci-3] &&lb_catch_yf * code[ci-2] y-register offset in E @@ -3192,6 +3167,26 @@ is_killed_by_make_fun(LoaderState* stp, GenOpArg Reg, GenOpArg idx) } } +/* Test whether Bif is "heavy" and should always go through its export entry */ +static int +is_heavy_bif(LoaderState* stp, GenOpArg Bif) +{ + Export *bif_export; + + if (Bif.type != TAG_u || Bif.val >= stp->num_imports) { + return 0; + } + + bif_export = stp->import[Bif.val].bif; + + if (bif_export) { + int bif_index = bif_export->bif_table_index; + return bif_table[bif_index].kind == BIF_KIND_HEAVY; + } + + return 0; +} + /* * Generate an instruction for element/2. */ @@ -5219,25 +5214,28 @@ final_touch(LoaderState* stp, struct erl_module_instance* inst_p) */ for (i = 0; i < stp->num_exps; i++) { - Export* ep; - BeamInstr* address = stp->export[i].address; + Export* ep; + BeamInstr* address = stp->export[i].address; - if (address == NULL) { - /* Skip stub for a BIF */ - continue; - } - ep = erts_export_put(stp->module, stp->export[i].function, - stp->export[i].arity); - if (on_load) { - /* - * on_load: Don't make any of the exported functions - * callable yet. Keep any function in the current - * code callable. - */ + ep = erts_export_put(stp->module, + stp->export[i].function, + stp->export[i].arity); + + /* Fill in BIF stubs with a proper call to said BIF. */ + if (ep->bif_table_index != -1) { + erts_write_bif_wrapper(ep, address); + } + + if (on_load) { + /* + * on_load: Don't make any of the exported functions + * callable yet. Keep any function in the current + * code callable. + */ ep->trampoline.not_loaded.deferred = (BeamInstr) address; - } - else + } else { ep->addressv[erts_staging_code_ix()] = address; + } } /* @@ -5411,15 +5409,14 @@ transform_engine(LoaderState* st) i = instr->a[ap].val; ASSERT(i < st->num_imports); - if (i >= st->num_imports || st->import[i].bf == NULL) + if (i >= st->num_imports || st->import[i].bif == NULL) goto restart; if (bif_number != -1 && - bif_export[bif_number]->trampoline.bif.func != (BeamInstr) st->import[i].bf) { + bif_export[bif_number] != st->import[i].bif) { goto restart; } } break; - #endif #if defined(TOP_is_not_bif) case TOP_is_not_bif: @@ -5449,7 +5446,7 @@ transform_engine(LoaderState* st) * they are special. */ if (i < st->num_imports) { - if (st->import[i].bf != NULL || + if (st->import[i].bif != NULL || (st->import[i].module == am_erlang && st->import[i].function == am_apply && (st->import[i].arity == 2 || st->import[i].arity == 3))) { diff --git a/erts/emulator/beam/beam_load.h b/erts/emulator/beam/beam_load.h index 156c3c45e2..e7127c5b08 100644 --- a/erts/emulator/beam/beam_load.h +++ b/erts/emulator/beam/beam_load.h @@ -106,7 +106,7 @@ typedef struct beam_code_header { }BeamCodeHeader; -# define BEAM_NIF_MIN_FUNC_SZ 4 +# define BEAM_NATIVE_MIN_FUNC_SZ 4 void erts_release_literal_area(struct ErtsLiteralArea_* literal_area); int erts_is_module_native(BeamCodeHeader* code); diff --git a/erts/emulator/beam/bif.c b/erts/emulator/beam/bif.c index aba3af3424..593ddf9a29 100644 --- a/erts/emulator/beam/bif.c +++ b/erts/emulator/beam/bif.c @@ -4982,7 +4982,17 @@ void erts_init_trap_export(Export* ep, Eterm m, Eterm f, Uint a, ep->info.mfa.function = f; ep->info.mfa.arity = a; ep->trampoline.op = BeamOpCodeAddr(op_apply_bif); - ep->trampoline.bif.func = (BeamInstr) bif; + ep->trampoline.raw[1] = (BeamInstr)bif; +} + +/* + * Writes a BIF call wrapper to the given address. + */ +void erts_write_bif_wrapper(Export *export, BeamInstr *address) { + BifEntry *entry = &bif_table[export->bif_table_index]; + + address[0] = BeamOpCodeAddr(op_apply_bif); + address[1] = (BeamInstr)entry->f; } void erts_init_bif(void) @@ -5073,7 +5083,7 @@ static BIF_RETTYPE dirty_bif_trap(BIF_ALIST) * correct by call to dirty_bif_trap()... */ - ASSERT(BIF_P->arity == nep->exp.info.mfa.arity); + ASSERT(BIF_P->arity == nep->trampoline.info.mfa.arity); erts_nif_export_restore(BIF_P, nep, THE_NON_VALUE); @@ -5129,6 +5139,7 @@ erts_schedule_bif(Process *proc, if (!ERTS_PROC_IS_EXITING(c_p)) { Export *exp; BifFunction dbif, ibif; + BeamInstr call_instr; BeamInstr *pc; /* @@ -5163,27 +5174,40 @@ erts_schedule_bif(Process *proc, if (i == NULL) { ERTS_INTERNAL_ERROR("Missing instruction pointer"); } + + if (BeamIsOpCode(*i, op_i_generic_breakpoint)) { + ErtsCodeInfo *ci; + GenericBp *bp; + + ci = erts_code_to_codeinfo(i); + bp = ci->u.gen_bp; + + call_instr = bp->orig_instr; + } else { + call_instr = *i; + } + #ifdef HIPE - else if (proc->flags & F_HIPE_MODE) { + if (proc->flags & F_HIPE_MODE) { /* Pointer to bif export in i */ exp = (Export *) i; pc = cp_val(c_p->stop[0]); mfa = &exp->info.mfa; - } + } else /* !! This is part of the if clause below !! */ #endif - else if (BeamIsOpCode(*i, op_call_bif_e)) { - /* Pointer to bif export in i+1 */ - exp = (Export *) i[1]; + if (BeamIsOpCode(call_instr, op_call_light_bif_be)) { + /* Pointer to bif export in i+2 */ + exp = (Export *) i[2]; pc = i; mfa = &exp->info.mfa; } - else if (BeamIsOpCode(*i, op_call_bif_only_e)) { - /* Pointer to bif export in i+1 */ - exp = (Export *) i[1]; + else if (BeamIsOpCode(call_instr, op_call_light_bif_only_be)) { + /* Pointer to bif export in i+2 */ + exp = (Export *) i[2]; pc = i; mfa = &exp->info.mfa; } - else if (BeamIsOpCode(*i, op_apply_bif)) { + else if (BeamIsOpCode(call_instr, op_apply_bif)) { pc = cp_val(c_p->stop[0]); mfa = erts_code_to_codemfa(i); } diff --git a/erts/emulator/beam/bif.tab b/erts/emulator/beam/bif.tab index c9f5177bd3..19dabc0514 100644 --- a/erts/emulator/beam/bif.tab +++ b/erts/emulator/beam/bif.tab @@ -25,13 +25,14 @@ # # ::= "bif" * | # "ubif" * | -# "gcbif" * +# "hbif" * # ::= ":" "/" # -# ubif: Use for operators and guard BIFs that never build anything -# on the heap (such as tuple_size/1) and operators. +# ubif: Use for operators and guard BIFs. # -# gcbif: Use for guard BIFs that may build on the heap (such as abs/1). +# hbif: Use for BIFs that perform garbage collection or need up-to-date +# information on where they were called from. These must be called +# through the export entry. # # bif: Use for all other BIFs. # @@ -60,7 +61,7 @@ bif erlang:display_string/1 bif erlang:display_nl/0 ubif erlang:element/2 bif erlang:erase/0 -bif erlang:erase/1 +hbif erlang:erase/1 bif erlang:exit/1 bif erlang:exit/2 bif erlang:exit_signal/2 @@ -70,7 +71,7 @@ ubif erlang:float/1 bif erlang:float_to_list/1 bif erlang:float_to_list/2 bif erlang:fun_info/2 -bif erts_internal:garbage_collect/1 +hbif erts_internal:garbage_collect/1 bif erlang:get/0 bif erlang:get/1 bif erlang:get_keys/1 @@ -127,10 +128,10 @@ bif erlang:ports/0 bif erlang:pre_loaded/0 bif erlang:process_flag/2 bif erts_internal:process_flag/3 -bif erlang:process_info/1 -bif erlang:process_info/2 +hbif erlang:process_info/1 +hbif erlang:process_info/2 bif erlang:processes/0 -bif erlang:put/2 +hbif erlang:put/2 bif erlang:register/2 bif erlang:registered/0 ubif erlang:round/1 @@ -174,7 +175,7 @@ bif erts_internal:port_connect/2 bif erts_internal:request_system_task/3 bif erts_internal:request_system_task/4 -bif erts_internal:check_process_code/1 +hbif erts_internal:check_process_code/1 bif erts_internal:map_to_tuple_keys/1 bif erts_internal:term_type/1 @@ -466,7 +467,7 @@ bif code:is_module_native/1 # New Bifs in R9C. # -bif erlang:hibernate/3 +hbif erlang:hibernate/3 bif error_logger:warning_map/0 # diff --git a/erts/emulator/beam/bif_instrs.tab b/erts/emulator/beam/bif_instrs.tab index c02dcd543c..04d36b721c 100644 --- a/erts/emulator/beam/bif_instrs.tab +++ b/erts/emulator/beam/bif_instrs.tab @@ -212,26 +212,32 @@ i_length.execute(Fail, Live, Dst) { // Call a BIF, store the result in x(0) and transfer control to the // next instruction. // -call_bif(Exp) { +call_light_bif(Bif, Exp) { + Export *export; ErtsBifFunc bf; + Eterm result; ErlHeapFragment *live_hf_end; - Export *export = (Export*) $Exp; + + bf = (ErtsBifFunc) $Bif; + export = (Export*) $Exp; if (!((FCALLS - 1) > 0 || (FCALLS-1) > neg_o_reds)) { /* * If we have run out of reductions, do a context * switch before calling the BIF. */ - c_p->arity = GET_BIF_ARITY(export); + c_p->arity = GET_EXPORT_ARITY(export); c_p->current = &export->info.mfa; goto context_switch3; } - ERTS_MSACC_SET_BIF_STATE_CACHED_X(GET_BIF_MODULE(export), - GET_BIF_ADDRESS(export)); + if (ERTS_UNLIKELY(export->is_bif_traced)) { + $SAVE_CONTINUATION_POINTER($NEXT_INSTRUCTION); + $DISPATCH_EXPORT(export); + } - bf = GET_BIF_ADDRESS(export); + ERTS_MSACC_SET_BIF_STATE_CACHED_X(GET_EXPORT_MODULE(export), bf); PRE_BIF_SWAPOUT(c_p); ERTS_DBG_CHK_REDS(c_p, FCALLS); @@ -243,21 +249,26 @@ call_bif(Exp) { ERTS_VERIFY_UNUSED_TEMP_ALLOC(c_p); live_hf_end = c_p->mbuf; ERTS_CHK_MBUF_SZ(c_p); + result = (*bf)(c_p, reg, I); + + /* Only heavy BIFs may GC. */ + ASSERT(E == c_p->stop); + ERTS_CHK_MBUF_SZ(c_p); ASSERT(!ERTS_PROC_IS_EXITING(c_p) || is_non_value(result)); ERTS_VERIFY_UNUSED_TEMP_ALLOC(c_p); ERTS_HOLE_CHECK(c_p); ERTS_REQ_PROC_MAIN_LOCK(c_p); if (ERTS_IS_GC_DESIRED(c_p)) { - Uint arity = GET_BIF_ARITY(export); + Uint arity = GET_EXPORT_ARITY(export); result = erts_gc_after_bif_call_lhf(c_p, live_hf_end, result, reg, arity); E = c_p->stop; } - PROCESS_MAIN_CHK_LOCKS(c_p); HTOP = HEAP_TOP(c_p); FCALLS = c_p->fcalls; + PROCESS_MAIN_CHK_LOCKS(c_p); ERTS_DBG_CHK_REDS(c_p, FCALLS); /* @@ -282,7 +293,6 @@ call_bif(Exp) { */ $SAVE_CONTINUATION_POINTER($NEXT_INSTRUCTION); SET_I(c_p->i); - SWAPIN; $DISPATCH(); } @@ -299,27 +309,36 @@ call_bif(Exp) { // Call a BIF tail-recursively, storing the result in x(0) and doing // a return to the continuation poiner. // - -call_bif_only(Exp) { +call_light_bif_only(Bif, Exp) { + ErlHeapFragment *live_hf_end; ErtsBifFunc bf; + Export *export; Eterm result; - ErlHeapFragment *live_hf_end; - Export *export = (Export*) $Exp; + + bf = (ErtsBifFunc) $Bif; + export = (Export*) $Exp; if (!((FCALLS - 1) > 0 || (FCALLS-1) > neg_o_reds)) { /* * If we have run out of reductions, do a context * switch before calling the BIF. */ - c_p->arity = GET_BIF_ARITY(export); + c_p->arity = GET_EXPORT_ARITY(export); c_p->current = &export->info.mfa; goto context_switch3; } - ERTS_MSACC_SET_BIF_STATE_CACHED_X(GET_BIF_MODULE(export), - GET_BIF_ADDRESS(export)); + if (ERTS_UNLIKELY(export->is_bif_traced)) { + /* Set up a dummy stack frame so we can perform a normal call. Loader + * transformations ensure that the next instruction after this is + * 'deallocate_return 0'. */ + $AH(0, 0, GET_EXPORT_ARITY(export)); - bf = GET_BIF_ADDRESS(export); + $SAVE_CONTINUATION_POINTER($NEXT_INSTRUCTION); + $DISPATCH_EXPORT(export); + } + + ERTS_MSACC_SET_BIF_STATE_CACHED_X(GET_EXPORT_MODULE(export), bf); PRE_BIF_SWAPOUT(c_p); ERTS_DBG_CHK_REDS(c_p, FCALLS); @@ -331,21 +350,26 @@ call_bif_only(Exp) { ERTS_VERIFY_UNUSED_TEMP_ALLOC(c_p); live_hf_end = c_p->mbuf; ERTS_CHK_MBUF_SZ(c_p); + result = (*bf)(c_p, reg, I); + + /* Only heavy BIFs may GC. */ + ASSERT(E == c_p->stop); + ERTS_CHK_MBUF_SZ(c_p); ASSERT(!ERTS_PROC_IS_EXITING(c_p) || is_non_value(result)); ERTS_VERIFY_UNUSED_TEMP_ALLOC(c_p); ERTS_HOLE_CHECK(c_p); ERTS_REQ_PROC_MAIN_LOCK(c_p); if (ERTS_IS_GC_DESIRED(c_p)) { - Uint arity = GET_BIF_ARITY(export); + Uint arity = GET_EXPORT_ARITY(export); result = erts_gc_after_bif_call_lhf(c_p, live_hf_end, result, reg, arity); E = c_p->stop; } - PROCESS_MAIN_CHK_LOCKS(c_p); HTOP = HEAP_TOP(c_p); FCALLS = c_p->fcalls; + PROCESS_MAIN_CHK_LOCKS(c_p); ERTS_DBG_CHK_REDS(c_p, FCALLS); /* @@ -370,7 +394,6 @@ call_bif_only(Exp) { * to the continuation pointer on the stack will be done. */ SET_I(c_p->i); - SWAPIN; $DISPATCH(); } @@ -445,7 +468,7 @@ nif_bif.call_nif() { * I[2]: Pointer to erl_module_nif * I[3]: Function pointer to dirty NIF * - * This layout is determined by the NifExport struct + * This layout is determined by the ErtsNativeFunc struct */ ERTS_MSACC_SET_STATE_CACHED_M_X(ERTS_MSACC_STATE_NIF); @@ -497,8 +520,8 @@ nif_bif.call_nif() { nif_bif.apply_bif() { /* - * At this point, I points to the code[0] in the export entry for - * the BIF: + * At this point, I points to the code[0] in the native function wrapper + * for the BIF: * * code[-3]: Module * code[-2]: Function @@ -515,21 +538,19 @@ nif_bif.apply_bif() { codemfa = erts_code_to_codemfa(I); - ERTS_MSACC_SET_BIF_STATE_CACHED_X(codemfa->module, (BifFunction)Arg(0)); - + ERTS_MSACC_SET_BIF_STATE_CACHED_X(codemfa->module, (BifFunction)I[1]); /* In case we apply process_info/1,2 or load_nif/1 */ c_p->current = codemfa; $SET_CP_I_ABS(I); /* In case we apply check_process_code/2. */ c_p->arity = 0; /* To allow garbage collection on ourselves - * (check_process_code/2). - */ + * (check_process_code/2, put/2, etc). */ DTRACE_BIF_ENTRY(c_p, codemfa); SWAPOUT; ERTS_DBG_CHK_REDS(c_p, FCALLS - 1); c_p->fcalls = FCALLS - 1; - vbf = (BifFunction) Arg(0); + vbf = (BifFunction) I[1]; PROCESS_MAIN_CHK_LOCKS(c_p); bif_nif_arity = codemfa->arity; ASSERT(bif_nif_arity <= 4); @@ -582,9 +603,39 @@ nif_bif.epilogue() { $DISPATCH(); } { - BeamInstr *cp = cp_val(*E); + BeamInstr *cp = printable_return_address(c_p, E); ASSERT(VALID_INSTR(*cp)); I = handle_error(c_p, cp, reg, c_p->current); } goto post_error_handling; } + +i_load_nif() { + //| -no_next + if (erts_try_seize_code_write_permission(c_p)) { + Eterm result; + + PRE_BIF_SWAPOUT(c_p); + result = erts_load_nif(c_p, I, r(0), r(1)); + erts_release_code_write_permission(); + ERTS_REQ_PROC_MAIN_LOCK(c_p); + SWAPIN; + + if (ERTS_LIKELY(is_value(result))) { + r(0) = result; + $NEXT0(); + } else { + static ErtsCodeMFA mfa = {am_erlang, am_load_nif, 2}; + c_p->freason = BADARG; + I = handle_error(c_p, I, reg, &mfa); + goto post_error_handling; + } + } else { + /* Yield and try again */ + $SET_CP_I_ABS(I); + SWAPOUT; + c_p->current = NULL; + c_p->arity = 2; + goto do_schedule; + } +} diff --git a/erts/emulator/beam/erl_alloc.c b/erts/emulator/beam/erl_alloc.c index b9f0334172..1bbc7d7f1e 100644 --- a/erts/emulator/beam/erl_alloc.c +++ b/erts/emulator/beam/erl_alloc.c @@ -653,8 +653,6 @@ erts_alloc_init(int *argc, char **argv, ErtsAllocInitOpts *eaiop) = erts_timer_type_size(ERTS_ALC_T_HL_PTIMER); fix_type_sizes[ERTS_ALC_FIX_TYPE_IX(ERTS_ALC_T_BIF_TIMER)] = erts_timer_type_size(ERTS_ALC_T_BIF_TIMER); - fix_type_sizes[ERTS_ALC_FIX_TYPE_IX(ERTS_ALC_T_NIF_EXP_TRACE)] - = sizeof(NifExportTrace); fix_type_sizes[ERTS_ALC_FIX_TYPE_IX(ERTS_ALC_T_MREF_NSCHED_ENT)] = sizeof(ErtsNSchedMagicRefTableEntry); fix_type_sizes[ERTS_ALC_FIX_TYPE_IX(ERTS_ALC_T_MINDIRECTION)] @@ -2392,10 +2390,6 @@ erts_memory(fmtfn_t *print_to_p, void *print_to_arg, void *proc, Eterm earg) &size.processes_used, fi, ERTS_ALC_T_BIF_TIMER); - add_fix_values(&size.processes, - &size.processes_used, - fi, - ERTS_ALC_T_NIF_EXP_TRACE); } if (want.atom || want.atom_used) { diff --git a/erts/emulator/beam/erl_alloc.types b/erts/emulator/beam/erl_alloc.types index 349977ebe7..cd978a8d57 100644 --- a/erts/emulator/beam/erl_alloc.types +++ b/erts/emulator/beam/erl_alloc.types @@ -332,7 +332,6 @@ type DB_MS_PSDO_PROC LONG_LIVED ETS db_match_pseudo_proc type SCHDLR_DATA LONG_LIVED SYSTEM scheduler_data type NIF_TRAP_EXPORT STANDARD PROCESSES nif_trap_export_entry -type NIF_EXP_TRACE FIXED_SIZE PROCESSES nif_export_trace type EXPORT LONG_LIVED CODE export_entry type MONITOR FIXED_SIZE PROCESSES monitor type MONITOR_SUSPEND STANDARD PROCESSES monitor_suspend diff --git a/erts/emulator/beam/erl_bif_info.c b/erts/emulator/beam/erl_bif_info.c index 8844012e37..946fefa14f 100644 --- a/erts/emulator/beam/erl_bif_info.c +++ b/erts/emulator/beam/erl_bif_info.c @@ -158,8 +158,9 @@ static Eterm os_version_tuple; static Eterm current_function(Process* p, ErtsHeapFactory *hfact, Process* rp, int full_info, Uint reserve_size, int flags); -static Eterm current_stacktrace(ErtsHeapFactory *hfact, Process* rp, - Uint reserve_size); +static Eterm +current_stacktrace(Process* p, ErtsHeapFactory *hfact, Process* rp, + Uint reserve_size, int flags); Eterm erts_bld_bin_list(Uint **hpp, Uint *szp, ErlOffHeap* oh, Eterm tail) @@ -1383,7 +1384,7 @@ process_info_aux(Process *c_p, break; case ERTS_PI_IX_CURRENT_STACKTRACE: - res = current_stacktrace(hfact, rp, reserve_size); + res = current_stacktrace(c_p, hfact, rp, reserve_size, flags); break; case ERTS_PI_IX_INITIAL_CALL: @@ -2021,21 +2022,44 @@ current_function(Process *c_p, ErtsHeapFactory *hfact, Process* rp, } if (c_p == rp && !(flags & ERTS_PI_FLAG_REQUEST_FOR_OTHER)) { - FunctionInfo fi2; - BeamInstr* continuation_ptr; + BeamInstr* return_address; + FunctionInfo caller_fi; + Eterm *ptr; - /* - * The current function is erlang:process_info/{1,2}, - * which is not the answer that the application want. - * We will use the continuation pointer stored at the - * top of the stack instead. - */ - continuation_ptr = (BeamInstr *) rp->stop[0]; - erts_lookup_function_info(&fi2, continuation_ptr, full_info); - if (fi2.mfa) { - fi = fi2; - rp->current = fi2.mfa; - } + /* + * The current function is erlang:process_info/{1,2}, and we've + * historically returned the *calling* function in that case. We + * therefore use the continuation pointer stored at the top of the + * stack instead, which is safe since process_info is a "heavy" BIF + * that is only called through its export entry. + */ + + return_address = NULL; + ptr = STACK_TOP(rp); + ASSERT(is_CP(*ptr)); + + while (ptr < STACK_START(rp)) { + BeamInstr *cp = cp_val(*ptr); + + if (*cp == BeamOpCodeAddr(op_return_trace)) { + ptr += 3; + } else if (*cp == BeamOpCodeAddr(op_i_return_time_trace)) { + ptr += 2; + } else if (*cp == BeamOpCodeAddr(op_i_return_to_trace)) { + ptr += 1; + } else { + return_address = cp; + break; + } + } + + ASSERT(return_address != NULL); + + erts_lookup_function_info(&caller_fi, return_address, full_info); + if (caller_fi.mfa) { + fi = caller_fi; + rp->current = caller_fi.mfa; + } } /* @@ -2056,8 +2080,8 @@ current_function(Process *c_p, ErtsHeapFactory *hfact, Process* rp, } static Eterm -current_stacktrace(ErtsHeapFactory *hfact, Process* rp, - Uint reserve_size) +current_stacktrace(Process *p, ErtsHeapFactory *hfact, Process* rp, + Uint reserve_size, int flags) { Uint sz; struct StackTrace* s; @@ -2074,9 +2098,14 @@ current_stacktrace(ErtsHeapFactory *hfact, Process* rp, sz = offsetof(struct StackTrace, trace) + sizeof(BeamInstr *)*depth; s = (struct StackTrace *) erts_alloc(ERTS_ALC_T_TMP, sz); s->depth = 0; - if (depth > 0 && rp->i) { - s->trace[s->depth++] = rp->i; - depth--; + s->pc = NULL; + + /* We skip current pc when requesting our own stack trace since it will + * inevitably point to process_info/1,2 */ + if ((p != rp || (flags & ERTS_PI_FLAG_REQUEST_FOR_OTHER)) && + depth > 0 && rp->i) { + s->trace[s->depth++] = rp->i; + depth--; } erts_save_stacktrace(rp, s, depth); diff --git a/erts/emulator/beam/erl_bif_trace.c b/erts/emulator/beam/erl_bif_trace.c index 6fed112627..e03c97fe10 100644 --- a/erts/emulator/beam/erl_bif_trace.c +++ b/erts/emulator/beam/erl_bif_trace.c @@ -80,9 +80,6 @@ static Eterm trace_info_func(Process* p, Eterm pid_spec, Eterm key); static Eterm trace_info_on_load(Process* p, Eterm key); static Eterm trace_info_event(Process* p, Eterm event, Eterm key); - -static void reset_bif_trace(void); -static void setup_bif_trace(void); static void install_exp_breakpoints(BpFunctions* f); static void uninstall_exp_breakpoints(BpFunctions* f); static void clean_export_entries(BpFunctions* f); @@ -1053,8 +1050,7 @@ static int function_is_traced(Process *p, int r = 0; - ASSERT(BeamIsOpCode(*pc, op_apply_bif) || - BeamIsOpCode(*pc, op_i_generic_breakpoint)); + ASSERT(BeamIsOpCode(*pc, op_i_generic_breakpoint)); if (erts_is_trace_break(&ep->info, ms, 0)) { return FUNC_TRACE_GLOBAL_TRACE; @@ -1426,18 +1422,21 @@ erts_set_trace_pattern(Process*p, ErtsCodeMFA *mfa, int specified, int n; BpFunction* fp; - /* - * First work on normal functions (not real BIFs). - */ - erts_bp_match_export(&finish_bp.e, mfa, specified); fp = finish_bp.e.matching; n = finish_bp.e.matched; for (i = 0; i < n; i++) { ErtsCodeInfo *ci = fp[i].ci; - BeamInstr* pc = erts_codeinfo_to_code(ci); - Export* ep = ErtsContainerStruct(ci, Export, info); + BeamInstr* pc; + Export* ep; + + pc = erts_codeinfo_to_code(ci); + ep = ErtsContainerStruct(ci, Export, info); + + if (ep->bif_table_index != -1) { + ep->is_bif_traced = !!on; + } if (on && !flags.breakpoint) { /* Turn on global call tracing */ @@ -1449,7 +1448,7 @@ erts_set_trace_pattern(Process*p, ErtsCodeMFA *mfa, int specified, ep->trampoline.op = BeamOpCodeAddr(op_trace_jump_W); ep->trampoline.trace.address = (BeamInstr) ep->addressv[code_ix]; } - erts_set_call_trace_bif(ci, match_prog_set, 0); + erts_set_export_trace(ci, match_prog_set, 0); if (ep->addressv[code_ix] != pc) { ep->trampoline.op = BeamOpCodeAddr(op_i_generic_breakpoint); } @@ -1460,90 +1459,13 @@ erts_set_trace_pattern(Process*p, ErtsCodeMFA *mfa, int specified, * Turn off global tracing, either explicitly or implicitly * before turning on breakpoint tracing. */ - erts_clear_call_trace_bif(ci, 0); + erts_clear_export_trace(ci, 0); if (BeamIsOpCode(ep->trampoline.op, op_i_generic_breakpoint)) { ep->trampoline.op = BeamOpCodeAddr(op_trace_jump_W); } } } - /* - ** OK, now for the bif's - */ - for (i = 0; i < BIF_SIZE; ++i) { - Export *ep = bif_export[i]; - - if (!ExportIsBuiltIn(ep)) { - continue; - } - - if (bif_table[i].f == bif_table[i].traced) { - /* Trace wrapper same as regular function - untraceable */ - continue; - } - - switch (specified) { - case 3: - if (mfa->arity != ep->info.mfa.arity) - continue; - case 2: - if (mfa->function != ep->info.mfa.function) - continue; - case 1: - if (mfa->module != ep->info.mfa.module) - continue; - case 0: - break; - default: - ASSERT(0); - } - - if (! flags.breakpoint) { /* Export entry call trace */ - if (on) { - erts_clear_call_trace_bif(&ep->info, 1); - erts_clear_mtrace_bif(&ep->info); - erts_set_call_trace_bif(&ep->info, match_prog_set, 0); - } else { /* off */ - erts_clear_call_trace_bif(&ep->info, 0); - } - matches++; - } else { /* Breakpoint call trace */ - int m = 0; - - if (on) { - if (flags.local) { - erts_clear_call_trace_bif(&ep->info, 0); - erts_set_call_trace_bif(&ep->info, match_prog_set, 1); - m = 1; - } - if (flags.meta) { - erts_set_mtrace_bif(&ep->info, meta_match_prog_set, - meta_tracer); - m = 1; - } - if (flags.call_time) { - erts_set_time_trace_bif(&ep->info, on); - /* I don't want to remove any other tracers */ - m = 1; - } - } else { /* off */ - if (flags.local) { - erts_clear_call_trace_bif(&ep->info, 1); - m = 1; - } - if (flags.meta) { - erts_clear_mtrace_bif(&ep->info); - m = 1; - } - if (flags.call_time) { - erts_clear_time_trace_bif(&ep->info); - m = 1; - } - } - matches += m; - } - } - /* ** So, now for breakpoint tracing */ @@ -1670,7 +1592,6 @@ erts_finish_breakpointing(void) install_exp_breakpoints(&finish_bp.e); } } - setup_bif_trace(); return 1; case 1: /* @@ -1699,7 +1620,6 @@ erts_finish_breakpointing(void) uninstall_exp_breakpoints(&finish_bp.e); } } - reset_bif_trace(); return 1; case 3: /* @@ -1710,7 +1630,6 @@ erts_finish_breakpointing(void) * updated). If any breakpoints have been totally disabled, * deallocate the GenericBp structs for them. */ - erts_consolidate_bif_bp_data(); clean_export_entries(&finish_bp.e); erts_consolidate_bp_data(&finish_bp.e, 0); erts_consolidate_bp_data(&finish_bp.f, 1); @@ -1782,41 +1701,6 @@ clean_export_entries(BpFunctions* f) } } -static void -setup_bif_trace(void) -{ - int i; - - for (i = 0; i < BIF_SIZE; ++i) { - Export *ep = bif_export[i]; - GenericBp* g = ep->info.u.gen_bp; - if (g) { - if (ExportIsBuiltIn(ep)) { - ASSERT(ep->trampoline.bif.func != 0); - ep->trampoline.bif.func = (BeamInstr) bif_table[i].traced; - } - } - } -} - -static void -reset_bif_trace(void) -{ - int i; - ErtsBpIndex active = erts_active_bp_ix(); - - for (i = 0; i < BIF_SIZE; ++i) { - Export *ep = bif_export[i]; - GenericBp* g = ep->info.u.gen_bp; - if (g && g->data[active].flags == 0) { - if (ExportIsBuiltIn(ep)) { - ASSERT(ep->trampoline.bif.func != 0); - ep->trampoline.bif.func = (BeamInstr) bif_table[i].f; - } - } - } -} - /* * Sequential tracing * diff --git a/erts/emulator/beam/erl_nfunc_sched.c b/erts/emulator/beam/erl_nfunc_sched.c index fc9ec4e9f5..0b9e54dfc5 100644 --- a/erts/emulator/beam/erl_nfunc_sched.c +++ b/erts/emulator/beam/erl_nfunc_sched.c @@ -33,22 +33,16 @@ NifExport * erts_new_proc_nif_export(Process *c_p, int argc) { - size_t size; - int i; NifExport *nep, *old_nep; + size_t size; size = sizeof(NifExport) + (argc-1)*sizeof(Eterm); nep = erts_alloc(ERTS_ALC_T_NIF_TRAP_EXPORT, size); - for (i = 0; i < ERTS_NUM_CODE_IX; i++) - nep->exp.addressv[i] = &nep->exp.trampoline.raw[0]; - nep->argc = -1; /* unused marker */ nep->argv_size = argc; - nep->trace = NULL; old_nep = ERTS_PROC_SET_NIF_TRAP_EXPORT(c_p, nep); if (old_nep) { - ASSERT(!nep->trace); erts_free(ERTS_ALC_T_NIF_TRAP_EXPORT, old_nep); } return nep; @@ -65,39 +59,6 @@ erts_destroy_nif_export(Process *p) } } -void -erts_nif_export_save_trace(Process *c_p, NifExport *nep, int applying, - Export* ep, Uint32 flags, - Uint32 flags_meta, BeamInstr* I, - ErtsTracer meta_tracer) -{ - NifExportTrace *netp; - ASSERT(nep && nep->argc >= 0); - ASSERT(!nep->trace); - netp = erts_alloc(ERTS_ALC_T_NIF_EXP_TRACE, - sizeof(NifExportTrace)); - netp->applying = applying; - netp->ep = ep; - netp->flags = flags; - netp->flags_meta = flags_meta; - netp->I = I; - netp->meta_tracer = NIL; - erts_tracer_update(&netp->meta_tracer, meta_tracer); - nep->trace = netp; -} - -void -erts_nif_export_restore_trace(Process *c_p, Eterm result, NifExport *nep) -{ - NifExportTrace *netp = nep->trace; - nep->trace = NULL; - erts_bif_trace_epilogue(c_p, result, netp->applying, netp->ep, - netp->flags, netp->flags_meta, - netp->I, netp->meta_tracer); - erts_tracer_update(&netp->meta_tracer, NIL); - erts_free(ERTS_ALC_T_NIF_EXP_TRACE, netp); -} - NifExport * erts_nif_export_schedule(Process *c_p, Process *dirty_shadow_proc, ErtsCodeMFA *mfa, BeamInstr *pc, @@ -165,14 +126,14 @@ erts_nif_export_schedule(Process *c_p, Process *dirty_shadow_proc, reg[i] = argv[i]; } ASSERT(is_atom(mod) && is_atom(func)); - nep->exp.info.mfa.module = mod; - nep->exp.info.mfa.function = func; - nep->exp.info.mfa.arity = (Uint) argc; - nep->exp.trampoline.op = (BeamInstr) instr; /* call_nif || apply_bif */ - nep->exp.trampoline.raw[1] = (BeamInstr) dfunc; + nep->trampoline.info.mfa.module = mod; + nep->trampoline.info.mfa.function = func; + nep->trampoline.info.mfa.arity = (Uint) argc; + nep->trampoline.call_op = (BeamInstr) instr; /* call_nif || apply_bif */ + nep->trampoline.dfunc = (BeamInstr) dfunc; nep->func = ifunc; used_proc->arity = argc; used_proc->freason = TRAP; - used_proc->i = (BeamInstr*) nep->exp.addressv[0]; + used_proc->i = (BeamInstr*)&nep->trampoline.call_op; return nep; } diff --git a/erts/emulator/beam/erl_nfunc_sched.h b/erts/emulator/beam/erl_nfunc_sched.h index 54d011695a..033ba58ded 100644 --- a/erts/emulator/beam/erl_nfunc_sched.h +++ b/erts/emulator/beam/erl_nfunc_sched.h @@ -25,15 +25,6 @@ #include "bif.h" #include "error.h" -typedef struct { - int applying; - Export* ep; - Uint32 flags; - Uint32 flags_meta; - BeamInstr* I; - ErtsTracer meta_tracer; -} NifExportTrace; - /* * NIF exports need a few more items than the Export struct provides, * including the erl_module_nif* and a NIF function pointer, so the @@ -45,11 +36,15 @@ typedef struct { */ typedef struct { - Export exp; + struct { + ErtsCodeInfo info; + BeamInstr call_op; /* call_nif || apply_bif */ + BeamInstr dfunc; + } trampoline; + struct erl_module_nif* m; /* NIF module, or NULL if BIF */ void *func; /* Indirect NIF or BIF to execute (may be unused) */ ErtsCodeMFA *current;/* Current as set when originally called */ - NifExportTrace *trace; /* --- The following is only used on error --- */ BeamInstr *pc; /* Program counter */ ErtsCodeMFA *mfa; /* MFA of original call */ @@ -59,11 +54,6 @@ typedef struct { } NifExport; NifExport *erts_new_proc_nif_export(Process *c_p, int argc); -void erts_nif_export_save_trace(Process *c_p, NifExport *nep, int applying, - Export* ep, Uint32 flags, - Uint32 flags_meta, BeamInstr* I, - ErtsTracer meta_tracer); -void erts_nif_export_restore_trace(Process *c_p, Eterm result, NifExport *nep); void erts_destroy_nif_export(Process *p); NifExport *erts_nif_export_schedule(Process *c_p, Process *dirty_shadow_proc, ErtsCodeMFA *mfa, BeamInstr *pc, @@ -81,11 +71,6 @@ ERTS_GLB_INLINE void erts_nif_export_restore(Process *c_p, NifExport *ep, Eterm result); ERTS_GLB_INLINE void erts_nif_export_restore_error(Process* c_p, BeamInstr **pc, Eterm *reg, ErtsCodeMFA **nif_mfa); -ERTS_GLB_INLINE int erts_nif_export_check_save_trace(Process *c_p, Eterm result, - int applying, Export* ep, - Uint32 flags, - Uint32 flags_meta, BeamInstr* I, - ErtsTracer meta_tracer); ERTS_GLB_INLINE Process *erts_proc_shadow2real(Process *c_p); #if ERTS_GLB_INLINE_INCL_FUNC_DEF @@ -147,8 +132,6 @@ erts_nif_export_restore(Process *c_p, NifExport *ep, Eterm result) c_p->current = ep->current; ep->argc = -1; /* Unused nif-export marker... */ - if (ep->trace) - erts_nif_export_restore_trace(c_p, result, ep); } ERTS_GLB_INLINE void @@ -166,25 +149,6 @@ erts_nif_export_restore_error(Process* c_p, BeamInstr **pc, erts_nif_export_restore(c_p, nep, THE_NON_VALUE); } -ERTS_GLB_INLINE int -erts_nif_export_check_save_trace(Process *c_p, Eterm result, - int applying, Export* ep, - Uint32 flags, - Uint32 flags_meta, BeamInstr* I, - ErtsTracer meta_tracer) -{ - if (is_non_value(result) && c_p->freason == TRAP) { - NifExport *nep = ERTS_PROC_GET_NIF_TRAP_EXPORT(c_p); - if (nep && nep->argc >= 0) { - erts_nif_export_save_trace(c_p, nep, applying, ep, - flags, flags_meta, - I, meta_tracer); - return 1; - } - } - return 0; -} - ERTS_GLB_INLINE Process * erts_proc_shadow2real(Process *c_p) { @@ -208,7 +172,7 @@ erts_proc_shadow2real(Process *c_p) #define ERTS_I_BEAM_OP_TO_NIF_EXPORT(I) \ (ASSERT(BeamIsOpCode(*(I), op_apply_bif) || \ BeamIsOpCode(*(I), op_call_nif)), \ - ((NifExport *) (((char *) (I)) - offsetof(NifExport, exp.trampoline.raw[0])))) + ((NifExport *) (((char *) (I)) - offsetof(NifExport, trampoline.call_op)))) #include "erl_message.h" diff --git a/erts/emulator/beam/erl_nif.c b/erts/emulator/beam/erl_nif.c index 46f7e864fd..5a5ea07d89 100644 --- a/erts/emulator/beam/erl_nif.c +++ b/erts/emulator/beam/erl_nif.c @@ -2944,7 +2944,7 @@ static_schedule_dirty_nif(ErlNifEnv* env, erts_aint32_t dirty_psflg, * parts (located in code). */ - ep = ErtsContainerStruct(proc->current, NifExport, exp.info.mfa); + ep = ErtsContainerStruct(proc->current, NifExport, trampoline.info.mfa); mod = proc->current->module; func = proc->current->function; fp = (NativeFunPtr) ep->func; @@ -2988,7 +2988,7 @@ execute_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) execution_state(env, &proc, NULL); - ep = ErtsContainerStruct(proc->current, NifExport, exp.info.mfa); + ep = ErtsContainerStruct(proc->current, NifExport, trampoline.info.mfa); fp = ep->func; ASSERT(ep); ASSERT(!env->exception_thrown); @@ -4117,7 +4117,23 @@ static struct erl_module_nif* create_lib(const ErlNifEntry* src) return lib; }; -BIF_RETTYPE load_nif_2(BIF_ALIST_2) +/* load_nif/2 is implemented as an instruction as it needs to know where it + * was called from, and it's a pain to get that information in a BIF. + * + * This is a small stub that rejects apply(erlang, load_nif, [Path, Args]). */ +BIF_RETTYPE load_nif_2(BIF_ALIST_2) { + if (BIF_P->flags & F_HIPE_MODE) { + BIF_RET(load_nif_error(BIF_P, "notsup", + "Calling load_nif from HiPE compiled modules " + "not supported")); + } + + BIF_RET(load_nif_error(BIF_P, "bad_lib", + "load_nif/2 must be explicitly called from the NIF " + "module. It cannot be called through apply/3.")); +} + +Eterm erts_load_nif(Process *c_p, BeamInstr *I, Eterm filename, Eterm args) { static const char bad_lib[] = "bad_lib"; static const char upgrade[] = "upgrade"; @@ -4139,13 +4155,6 @@ BIF_RETTYPE load_nif_2(BIF_ALIST_2) struct erl_module_nif* lib = NULL; struct erl_module_instance* this_mi; struct erl_module_instance* prev_mi; - BeamInstr* caller_cp; - - if (BIF_P->flags & F_HIPE_MODE) { - ret = load_nif_error(BIF_P, "notsup", "Calling load_nif from HiPE compiled " - "modules not supported"); - BIF_RET(ret); - } encoding = erts_get_native_filename_encoding(); if (encoding == ERL_FILENAME_WIN_WCHAR) { @@ -4153,30 +4162,19 @@ BIF_RETTYPE load_nif_2(BIF_ALIST_2) /* since lib_name is used in error messages */ encoding = ERL_FILENAME_UTF8; } - lib_name = erts_convert_filename_to_encoding(BIF_ARG_1, NULL, 0, + lib_name = erts_convert_filename_to_encoding(filename, NULL, 0, ERTS_ALC_T_TMP, 1, 0, encoding, NULL, 0); if (!lib_name) { - BIF_ERROR(BIF_P, BADARG); - } - - if (!erts_try_seize_code_write_permission(BIF_P)) { - erts_free(ERTS_ALC_T_TMP, lib_name); - ERTS_BIF_YIELD2(bif_export[BIF_load_nif_2], - BIF_P, BIF_ARG_1, BIF_ARG_2); + return THE_NON_VALUE; } /* Block system (is this the right place to do it?) */ - erts_proc_unlock(BIF_P, ERTS_PROC_LOCK_MAIN); + erts_proc_unlock(c_p, ERTS_PROC_LOCK_MAIN); erts_thr_progress_block(); /* Find calling module */ - ASSERT(BIF_P->current != NULL); - ASSERT(BIF_P->current->module == am_erlang - && BIF_P->current->function == am_load_nif - && BIF_P->current->arity == 2); - caller_cp = cp_val(BIF_P->stop[0]); - caller = find_function_from_pc(caller_cp); + caller = find_function_from_pc(I); ASSERT(caller != NULL); mod_atom = caller->module; ASSERT(is_atom(mod_atom)); @@ -4196,7 +4194,7 @@ BIF_RETTYPE load_nif_2(BIF_ALIST_2) this_mi = &module_p->curr; prev_mi = &module_p->old; if (in_area(caller, module_p->old.code_hdr, module_p->old.code_length)) { - ret = load_nif_error(BIF_P, "old_code", "Calling load_nif from old " + ret = load_nif_error(c_p, "old_code", "Calling load_nif from old " "module '%T' not allowed", mod_atom); goto error; } else if (module_p->on_load) { @@ -4210,52 +4208,52 @@ BIF_RETTYPE load_nif_2(BIF_ALIST_2) } if (this_mi->nif != NULL) { - ret = load_nif_error(BIF_P,"reload","NIF library already loaded" + ret = load_nif_error(c_p,"reload","NIF library already loaded" " (reload disallowed since OTP 20)."); } else if (init_func == NULL && (err=erts_sys_ddll_open(lib_name, &handle, &errdesc)) != ERL_DE_NO_ERROR) { const char slogan[] = "Failed to load NIF library"; if (strstr(errdesc.str, lib_name) != NULL) { - ret = load_nif_error(BIF_P, "load_failed", "%s: '%s'", slogan, errdesc.str); + ret = load_nif_error(c_p, "load_failed", "%s: '%s'", slogan, errdesc.str); } else { - ret = load_nif_error(BIF_P, "load_failed", "%s %s: '%s'", slogan, lib_name, errdesc.str); + ret = load_nif_error(c_p, "load_failed", "%s %s: '%s'", slogan, lib_name, errdesc.str); } } else if (init_func == NULL && erts_sys_ddll_load_nif_init(handle, &init_func, &errdesc) != ERL_DE_NO_ERROR) { - ret = load_nif_error(BIF_P, bad_lib, "Failed to find library init" + ret = load_nif_error(c_p, bad_lib, "Failed to find library init" " function: '%s'", errdesc.str); } else if ((taint ? erts_add_taint(mod_atom) : 0, (entry = erts_sys_ddll_call_nif_init(init_func)) == NULL)) { - ret = load_nif_error(BIF_P, bad_lib, "Library init-call unsuccessful"); + ret = load_nif_error(c_p, bad_lib, "Library init-call unsuccessful"); } else if (entry->major > ERL_NIF_MAJOR_VERSION || (entry->major == ERL_NIF_MAJOR_VERSION && entry->minor > ERL_NIF_MINOR_VERSION)) { char* fmt = "That '%T' NIF library needs %s or newer. Either try to" " recompile the NIF lib or use a newer erts runtime."; - ret = load_nif_error(BIF_P, bad_lib, fmt, mod_atom, entry->min_erts); + ret = load_nif_error(c_p, bad_lib, fmt, mod_atom, entry->min_erts); } else if (entry->major < ERL_NIF_MIN_REQUIRED_MAJOR_VERSION_ON_LOAD || (entry->major==2 && entry->minor == 5)) { /* experimental maps */ char* fmt = "That old NIF library (%d.%d) is not compatible with this " "erts runtime (%d.%d). Try recompile the NIF lib."; - ret = load_nif_error(BIF_P, bad_lib, fmt, entry->major, entry->minor, + ret = load_nif_error(c_p, bad_lib, fmt, entry->major, entry->minor, ERL_NIF_MAJOR_VERSION, ERL_NIF_MINOR_VERSION); } else if (AT_LEAST_VERSION(entry, 2, 1) && sys_strcmp(entry->vm_variant, ERL_NIF_VM_VARIANT) != 0) { - ret = load_nif_error(BIF_P, bad_lib, "Library (%s) not compiled for " + ret = load_nif_error(c_p, bad_lib, "Library (%s) not compiled for " "this vm variant (%s).", entry->vm_variant, ERL_NIF_VM_VARIANT); } else if (!erts_is_atom_str((char*)entry->name, mod_atom, 1)) { - ret = load_nif_error(BIF_P, bad_lib, "Library module name '%s' does not" + ret = load_nif_error(c_p, bad_lib, "Library module name '%s' does not" " match calling module '%T'", entry->name, mod_atom); } else { @@ -4274,7 +4272,7 @@ BIF_RETTYPE load_nif_2(BIF_ALIST_2) if (!erts_atom_get(f->name, sys_strlen(f->name), &f_atom, ERTS_ATOM_ENC_LATIN1) || (ci_pp = get_func_pp(this_mi->code_hdr, f_atom, f->arity))==NULL) { - ret = load_nif_error(BIF_P,bad_lib,"Function not found %T:%s/%u", + ret = load_nif_error(c_p,bad_lib,"Function not found %T:%s/%u", mod_atom, f->name, f->arity); } else if (f->flags) { @@ -4286,16 +4284,13 @@ BIF_RETTYPE load_nif_2(BIF_ALIST_2) * a load error. */ if (f->flags != ERL_NIF_DIRTY_JOB_IO_BOUND && f->flags != ERL_NIF_DIRTY_JOB_CPU_BOUND) - ret = load_nif_error(BIF_P, bad_lib, "Illegal flags field value %d for NIF %T:%s/%u", + ret = load_nif_error(c_p, bad_lib, "Illegal flags field value %d for NIF %T:%s/%u", f->flags, mod_atom, f->name, f->arity); } - else if (erts_codeinfo_to_code(ci_pp[1]) - erts_codeinfo_to_code(ci_pp[0]) - < BEAM_NIF_MIN_FUNC_SZ) - { - ret = load_nif_error(BIF_P,bad_lib,"No explicit call to load_nif" - " in module (%T:%s/%u too small)", - mod_atom, f->name, f->arity); - } + + ASSERT(erts_codeinfo_to_code(ci_pp[1]) - erts_codeinfo_to_code(ci_pp[0]) + >= BEAM_NATIVE_MIN_FUNC_SZ); + /*erts_fprintf(stderr, "Found NIF %T:%s/%u\r\n", mod_atom, f->name, f->arity);*/ } @@ -4314,23 +4309,23 @@ BIF_RETTYPE load_nif_2(BIF_ALIST_2) if (prev_mi->nif != NULL) { /**************** Upgrade ***************/ void* prev_old_data = prev_mi->nif->priv_data; if (entry->upgrade == NULL) { - ret = load_nif_error(BIF_P, upgrade, "Upgrade not supported by this NIF library."); + ret = load_nif_error(c_p, upgrade, "Upgrade not supported by this NIF library."); goto error; } - erts_pre_nif(&env, BIF_P, lib, NULL); - veto = entry->upgrade(&env, &lib->priv_data, &prev_mi->nif->priv_data, BIF_ARG_2); + erts_pre_nif(&env, c_p, lib, NULL); + veto = entry->upgrade(&env, &lib->priv_data, &prev_mi->nif->priv_data, args); erts_post_nif(&env); if (veto) { prev_mi->nif->priv_data = prev_old_data; - ret = load_nif_error(BIF_P, upgrade, "Library upgrade-call unsuccessful (%d).", veto); + ret = load_nif_error(c_p, upgrade, "Library upgrade-call unsuccessful (%d).", veto); } } else if (entry->load != NULL) { /********* Initial load ***********/ - erts_pre_nif(&env, BIF_P, lib, NULL); - veto = entry->load(&env, &lib->priv_data, BIF_ARG_2); + erts_pre_nif(&env, c_p, lib, NULL); + veto = entry->load(&env, &lib->priv_data, args); erts_post_nif(&env); if (veto) { - ret = load_nif_error(BIF_P, "load", "Library load-call unsuccessful (%d).", veto); + ret = load_nif_error(c_p, "load", "Library load-call unsuccessful (%d).", veto); } } if (ret == am_ok) { @@ -4384,8 +4379,7 @@ BIF_RETTYPE load_nif_2(BIF_ALIST_2) } erts_thr_progress_unblock(); - erts_proc_lock(BIF_P, ERTS_PROC_LOCK_MAIN); - erts_release_code_write_permission(); + erts_proc_lock(c_p, ERTS_PROC_LOCK_MAIN); erts_free(ERTS_ALC_T_TMP, lib_name); BIF_RET(ret); diff --git a/erts/emulator/beam/erl_trace.h b/erts/emulator/beam/erl_trace.h index f564549ab9..c0f31e0cb6 100644 --- a/erts/emulator/beam/erl_trace.h +++ b/erts/emulator/beam/erl_trace.h @@ -142,12 +142,6 @@ void monitor_generic(Process *p, Eterm type, Eterm spec); Uint erts_trace_flag2bit(Eterm flag); int erts_trace_flags(Eterm List, Uint *pMask, ErtsTracer *pTracer, int *pCpuTimestamp); -Eterm erts_bif_trace(int bif_index, Process* p, Eterm* args, BeamInstr *I); -Eterm -erts_bif_trace_epilogue(Process *p, Eterm result, int applying, - Export* ep, Uint32 flags, - Uint32 flags_meta, BeamInstr* I, - ErtsTracer meta_tracer); void erts_send_pending_trace_msgs(ErtsSchedulerData *esdp); #define ERTS_CHK_PEND_TRACE_MSGS(ESDP) \ diff --git a/erts/emulator/beam/export.c b/erts/emulator/beam/export.c index 24957c8131..ca16bfd20e 100644 --- a/erts/emulator/beam/export.c +++ b/erts/emulator/beam/export.c @@ -129,6 +129,8 @@ export_alloc(struct export_entry* tmpl_e) obj->info.mfa.module = tmpl->info.mfa.module; obj->info.mfa.function = tmpl->info.mfa.function; obj->info.mfa.arity = tmpl->info.mfa.arity; + obj->bif_table_index = -1; + obj->is_bif_traced = 0; memset(&obj->trampoline, 0, sizeof(obj->trampoline)); @@ -205,6 +207,8 @@ static struct export_entry* init_template(struct export_templ* templ, templ->exp.info.mfa.module = m; templ->exp.info.mfa.function = f; templ->exp.info.mfa.arity = a; + templ->exp.bif_table_index = -1; + templ->exp.is_bif_traced = 0; return &templ->entry; } diff --git a/erts/emulator/beam/export.h b/erts/emulator/beam/export.h index 1246446418..0190624f79 100644 --- a/erts/emulator/beam/export.h +++ b/erts/emulator/beam/export.h @@ -31,7 +31,13 @@ typedef struct export { - void* addressv[ERTS_NUM_CODE_IX]; /* Pointer to code for function. */ + /* Pointer to code for function. */ + void* addressv[ERTS_NUM_CODE_IX]; + + /* Index into bif_table[], or -1 if not a BIF. */ + int bif_table_index; + /* Non-zero if this is a BIF that's traced. */ + int is_bif_traced; /* This is a small trampoline function that can be used for lazy code * loading, global call tracing, and so on. It's only valid when @@ -42,11 +48,6 @@ typedef struct export union { BeamInstr op; /* Union discriminant. */ - struct { - BeamInstr op; /* op_apply_bif */ - BeamInstr func; /* A direct pointer to the BIF */ - } bif; - struct { BeamInstr op; /* op_i_generic_breakpoint */ BeamInstr address; /* Address of the original function */ @@ -85,9 +86,7 @@ typedef struct export if((EP)->addressv[CX] == (EP)->trampoline.raw) { \ /* The entry currently points at the trampoline, so the * instructions must be valid. */ \ - ASSERT(((BeamIsOpCode((EP)->trampoline.op, op_apply_bif)) && \ - (EP)->trampoline.bif.func != 0) || \ - ((BeamIsOpCode((EP)->trampoline.op, op_i_generic_breakpoint)) && \ + ASSERT(((BeamIsOpCode((EP)->trampoline.op, op_i_generic_breakpoint)) && \ (EP)->trampoline.breakpoint.address != 0) || \ ((BeamIsOpCode((EP)->trampoline.op, op_trace_jump_W)) && \ (EP)->trampoline.trace.address != 0) || \ @@ -121,10 +120,6 @@ extern erts_mtx_t export_staging_lock; #include "beam_load.h" /* For em_* extern declarations */ -#define ExportIsBuiltIn(EntryPtr) \ - (((EntryPtr)->addressv[erts_active_code_ix()] == (EntryPtr)->trampoline.raw) && \ - (BeamIsOpCode((EntryPtr)->trampoline.op, op_apply_bif))) - #if ERTS_GLB_INLINE_INCL_FUNC_DEF ERTS_GLB_INLINE Export* diff --git a/erts/emulator/beam/global.h b/erts/emulator/beam/global.h index 40c65461bc..acd42319cb 100644 --- a/erts/emulator/beam/global.h +++ b/erts/emulator/beam/global.h @@ -122,6 +122,10 @@ void erts_nif_demonitored(ErtsResource* resource); extern void erts_add_taint(Eterm mod_atom); extern Eterm erts_nif_taints(Process* p); extern void erts_print_nif_taints(fmtfn_t to, void* to_arg); + +/* Loads the specified NIF. The caller must have code write permission. */ +Eterm erts_load_nif(Process *c_p, BeamInstr *I, Eterm filename, Eterm args); + void erts_unload_nif(struct erl_module_nif* nif); extern void erl_nif_init(void); extern int erts_nif_get_funcs(struct erl_module_nif*, @@ -885,6 +889,8 @@ void erts_bif_info_init(void); /* bif.c */ +void erts_write_bif_wrapper(Export *export, BeamInstr *address); + void erts_queue_monitor_message(Process *, ErtsProcLocks*, Eterm, diff --git a/erts/emulator/beam/ops.tab b/erts/emulator/beam/ops.tab index c0ca9260a0..a72ed6bce8 100644 --- a/erts/emulator/beam/ops.tab +++ b/erts/emulator/beam/ops.tab @@ -77,19 +77,32 @@ return # To ensure that a "move Src x(0)" instruction can be combined with # the following call instruction, we need to make sure that there is # no line/1 instruction between the move and the call. -# -# A tail-recursive call to an external function (BIF or non-BIF) will -# never be saved on the stack, so there is no reason to keep the line -# instruction. + +move S X0=x==0 | line Loc | call Ar Func => \ + line Loc | move S X0 | call Ar Func move S X0=x==0 | line Loc | call_ext Ar Func => \ line Loc | move S X0 | call_ext Ar Func + +# +# A tail call will not refer to the current function on error unless it's a +# BIF, so we can omit the line instruction for non-BIFs. +# + +move S X0=x==0 | line Loc | call_ext_last Ar Func=u$is_bif D => \ + line Loc | move S X0 | call_ext_last Ar Func D +move S X0=x==0 | line Loc | call_ext_only Ar Func=u$is_bif => \ + line Loc | move S X0 | call_ext_only Ar Func + move S X0=x==0 | line Loc | call_ext_last Ar Func D => \ move S X0 | call_ext_last Ar Func D move S X0=x==0 | line Loc | call_ext_only Ar Func => \ move S X0 | call_ext_only Ar Func -move S X0=x==0 | line Loc | call Ar Func => \ - line Loc | move S X0 | call Ar Func + +move S X0=x==0 | line Loc | call_last Ar Func D => \ + move S X0 | call_last Ar Func D +move S X0=x==0 | line Loc | call_only Ar Func => \ + move S X0 | call_only Ar Func line Loc | func_info M F A => func_info M F A | line Loc @@ -787,62 +800,22 @@ allocate_init t t? y # External function and bif calls. ################################################################# -# -# The BIFs erts_internal:check_process_code/1 must be called like a function, -# to ensure that c_p->i (program counter) is set correctly (an ordinary -# BIF call doesn't set it). -# - -call_ext u==1 Bif=u$bif:erts_internal:check_process_code/1 => i_call_ext Bif -call_ext_last u==1 Bif=u$bif:erts_internal:check_process_code/1 D => i_call_ext_last Bif D -call_ext_only u==1 Bif=u$bif:erts_internal:check_process_code/1 => i_call_ext_only Bif - -# -# The BIFs erts_internal:garbage_collect/1 must be called like a function, -# to allow them to invoke the garbage collector. (The stack pointer must -# be saved and p->arity must be zeroed, which is not done on ordinary BIF calls.) -# -call_ext u==1 Bif=u$bif:erts_internal:garbage_collect/1 => i_call_ext Bif -call_ext_last u==1 Bif=u$bif:erts_internal:garbage_collect/1 D => i_call_ext_last Bif D -call_ext_only u==1 Bif=u$bif:erts_internal:garbage_collect/1 => i_call_ext_only Bif +# Expands into call_light_bif(_only)/2 +call_light_bif/1 +call_light_bif_only/1 +call_light_bif_last/2 # -# put/2 and erase/1 must be able to do garbage collection, so we must call -# them like functions. +# The load_nif/2 BIF is an instruction. # -call_ext u==2 Bif=u$bif:erlang:put/2 => i_call_ext Bif -call_ext_last u==2 Bif=u$bif:erlang:put/2 D => i_call_ext_last Bif D -call_ext_only u==2 Bif=u$bif:erlang:put/2 => i_call_ext_only Bif - -call_ext u==1 Bif=u$bif:erlang:erase/1 => i_call_ext Bif -call_ext_last u==1 Bif=u$bif:erlang:erase/1 D => i_call_ext_last Bif D -call_ext_only u==1 Bif=u$bif:erlang:erase/1 => i_call_ext_only Bif - -# -# The process_info/1,2 BIF should be called like a function, to force -# the emulator to set c_p->current before calling it (a BIF call doesn't -# set it). -# -# In addition, we force the use of a non-tail-recursive call. This will ensure -# that c_p->cp points into the function making the call. -# - -call_ext u==1 Bif=u$bif:erlang:process_info/1 => i_call_ext Bif -call_ext_last u==1 Bif=u$bif:erlang:process_info/1 D => i_call_ext Bif | deallocate_return D -call_ext_only Ar=u==1 Bif=u$bif:erlang:process_info/1 => allocate u Ar | i_call_ext Bif | deallocate_return u - -call_ext u==2 Bif=u$bif:erlang:process_info/2 => i_call_ext Bif -call_ext_last u==2 Bif=u$bif:erlang:process_info/2 D => i_call_ext Bif | deallocate_return D -call_ext_only Ar=u==2 Bif=u$bif:erlang:process_info/2 => allocate u Ar | i_call_ext Bif | deallocate_return u - -# -# load_nif/2 also needs to know calling function like process_info -# -call_ext u==2 Bif=u$bif:erlang:load_nif/2 => i_call_ext Bif -call_ext_last u==2 Bif=u$bif:erlang:load_nif/2 D => i_call_ext Bif | deallocate_return D -call_ext_only Ar=u==2 Bif=u$bif:erlang:load_nif/2 => allocate u Ar | i_call_ext Bif | deallocate_return u +call_ext u==2 u$func:erlang:load_nif/2 => i_load_nif +call_ext_last u==2 u$func:erlang:load_nif/2 D => i_load_nif | deallocate_return D +call_ext_only u==2 u$func:erlang:load_nif/2 => i_load_nif | return +%cold +i_load_nif +%hot # # apply/2 is an instruction, not a BIF. @@ -860,33 +833,6 @@ call_ext u==3 u$bif:erlang:apply/3 => i_apply call_ext_last u==3 u$bif:erlang:apply/3 D => i_apply_last D call_ext_only u==3 u$bif:erlang:apply/3 => i_apply_only -# -# The exit/1 and throw/1 BIFs never execute the instruction following them; -# thus there is no need to generate any return instruction. -# - -call_ext_last u==1 Bif=u$bif:erlang:exit/1 D => call_bif Bif -call_ext_last u==1 Bif=u$bif:erlang:throw/1 D => call_bif Bif - -call_ext_only u==1 Bif=u$bif:erlang:exit/1 => call_bif Bif -call_ext_only u==1 Bif=u$bif:erlang:throw/1 => call_bif Bif - -# -# The error/1 and error/2 BIFs never execute the instruction following them; -# thus there is no need to generate any return instruction. -# However, they generate stack backtraces, so if the call instruction -# is call_ext_only/2 instruction, we explicitly do an allocate/2 to store -# the continuation pointer on the stack. -# - -call_ext_last u==1 Bif=u$bif:erlang:error/1 D => call_bif Bif -call_ext_last u==2 Bif=u$bif:erlang:error/2 D => call_bif Bif - -call_ext_only Ar=u==1 Bif=u$bif:erlang:error/1 => \ - allocate u Ar | call_bif Bif -call_ext_only Ar=u==2 Bif=u$bif:erlang:error/2 => \ - allocate u Ar | call_bif Bif - # # The yield/0 BIF is an instruction # @@ -999,17 +945,24 @@ call_ext_only u==0 u$func:os:perf_counter/0 => \ i_perf_counter | return # -# The general case for BIFs that have no special instructions. -# A BIF used in the tail must be followed by a return instruction. +# BIFs like process_info/1,2 require up-to-date information about the current +# emulator state, which the ordinary call_light_bif instruction doesn't save. # -# To make trapping and stack backtraces work correctly, we make sure that -# the continuation pointer is always stored on the stack. -call_ext u Bif=u$is_bif => call_bif Bif +call_ext u Bif=u$is_bif | is_heavy_bif(Bif) => \ + i_call_ext Bif +call_ext_last u Bif=u$is_bif D | is_heavy_bif(Bif) => \ + i_call_ext Bif | deallocate_return D +call_ext_only Ar=u Bif=u$is_bif | is_heavy_bif(Bif) => \ + allocate u Ar | i_call_ext Bif | deallocate_return u -call_ext_last u Bif=u$is_bif D => deallocate D | call_bif_only Bif +# +# The general case for BIFs that have no special requirements. +# -call_ext_only Ar=u Bif=u$is_bif => call_bif_only Bif +call_ext u Bif=u$is_bif => call_light_bif Bif +call_ext_last u Bif=u$is_bif D => call_light_bif_last Bif D +call_ext_only Ar=u Bif=u$is_bif => call_light_bif_only Bif # # Any remaining calls are calls to Erlang functions, not BIFs. @@ -1034,14 +987,32 @@ i_apply_fun i_apply_fun_last Q i_apply_fun_only +# +# When a BIF is traced, these instructions make a body call through the export +# entry instead of calling the BIF directly (setting up a temporary stack frame +# if needed). We therefore retain the stack frame in call_light_bif_last, and +# add a deallocate_return after call_light_bif_only to remove the temporary +# stack frame before returning. +# + +call_light_bif Bif=u$is_bif => \ + call_light_bif Bif Bif + +call_light_bif_last Bif=u$is_bif D => \ + call_light_bif Bif Bif | deallocate_return D + +call_light_bif_only Bif=u$is_bif => \ + call_light_bif_only Bif Bif | deallocate_return u + +call_light_bif b e +call_light_bif_only b e + %cold -i_hibernate +i_hibernate i_perf_counter -%hot -call_bif e -call_bif_only e +%hot # # Calls to non-building and guard BIFs. diff --git a/erts/emulator/test/call_trace_SUITE.erl b/erts/emulator/test/call_trace_SUITE.erl index 742592f88e..477b0f5bb3 100644 --- a/erts/emulator/test/call_trace_SUITE.erl +++ b/erts/emulator/test/call_trace_SUITE.erl @@ -832,21 +832,27 @@ deep_exception() -> R1 -> ct:fail({returned,abbr(R1)}) catch error:badarg -> ok end, - expect(fun ({trace,S,call,{lists,reverse,[L1,L2]}}) + expect(fun ({trace,S,call,{lists,reverse,[L1,L2]}}, Traps) when is_list(L1), is_list(L2), S == Self -> - next; + %% Each trapping call to reverse/2 must have a corresponding + %% exception_from + {next, Traps + 1}; ({trace,S,exception_from, - {lists,reverse,2},{error,badarg}}) + {lists,reverse,2},{error,badarg}}, Traps) + when S == Self, Traps > 1 -> + {next, Traps - 1}; + ({trace,S,exception_from, + {lists,reverse,2},{error,badarg}}, 1) when S == Self -> expected; - ('_') -> + ('_', _Traps) -> {trace,Self,exception_from, {lists,reverse,2},{error,badarg}}; - (_) -> + (_, _Traps) -> {unexpected, {trace,Self,exception_from, {lists,reverse,2},{error,badarg}}} - end), + end, 0), deep_exception(?LINE, deep_5, [1,2], 7, [{trace,Self,call,{erlang,error,[undef]}}, {trace,Self,exception_from,{erlang,error,1}, @@ -896,21 +902,27 @@ deep_exception() -> R2 -> ct:fail({returned,abbr(R2)}) catch error:badarg -> ok end, - expect(fun ({trace,S,call,{lists,reverse,[L1,L2]}}) + expect(fun ({trace,S,call,{lists,reverse,[L1,L2]}}, Traps) when is_list(L1), is_list(L2), S == Self -> - next; + %% Each trapping call to reverse/2 must have a corresponding + %% exception_from + {next, Traps + 1}; + ({trace,S,exception_from, + {lists,reverse,2},{error,badarg}}, Traps) + when S == Self, Traps > 1 -> + {next, Traps - 1}; ({trace,S,exception_from, - {lists,reverse,2},{error,badarg}}) + {lists,reverse,2},{error,badarg}}, 1) when S == Self -> expected; - ('_') -> + ('_', _Traps) -> {trace,Self,exception_from, {lists,reverse,2},{error,badarg}}; - (_) -> + (_, _Traps) -> {unexpected, {trace,Self,exception_from, {lists,reverse,2},{error,badarg}}} - end), + end, 0), deep_exception(?LINE, apply, [?MODULE,deep_5,[1,2]], 7, [{trace,Self,call,{erlang,error,[undef]}}, {trace,Self,exception_from,{erlang,error,1}, @@ -975,21 +987,27 @@ deep_exception() -> R3 -> ct:fail({returned,abbr(R3)}) catch error:badarg -> ok end, - expect(fun ({trace,S,call,{lists,reverse,[L1,L2]}}) + expect(fun ({trace,S,call,{lists,reverse,[L1,L2]}}, Traps) when is_list(L1), is_list(L2), S == Self -> - next; + %% Each trapping call to reverse/2 must have a corresponding + %% exception_from + {next, Traps + 1}; + ({trace,S,exception_from, + {lists,reverse,2},{error,badarg}}, Traps) + when S == Self, Traps > 1 -> + {next, Traps - 1}; ({trace,S,exception_from, - {lists,reverse,2},{error,badarg}}) + {lists,reverse,2},{error,badarg}}, 1) when S == Self -> expected; - ('_') -> + ('_', _Traps) -> {trace,Self,exception_from, {lists,reverse,2},{error,badarg}}; - (_) -> + (_, _Traps) -> {unexpected, {trace,Self,exception_from, {lists,reverse,2},{error,badarg}}} - end), + end, 0), deep_exception(?LINE, apply, [fun () -> ?MODULE:deep_5(1,2) end, []], 7, [{trace,Self,call,{erlang,error,[undef]}}, @@ -1249,6 +1267,24 @@ expect(Message) -> ct:fail(no_trace_message) end. +expect(Validator, State0) when is_function(Validator) -> + receive + M -> + case Validator(M, State0) of + expected -> + ok = io:format("Expected and got ~p", [abbr(M)]); + {next, State} -> + ok = io:format("Expected and got ~p", [abbr(M)]), + expect(Validator, State); + {unexpected,Message} -> + io:format("Expected ~p; got ~p", [abbr(Message),abbr(M)]), + ct:fail({unexpected,abbr([M|flush()])}) + end + after 5000 -> + io:format("Expected ~p; got nothing", [abbr(Validator('_'))]), + ct:fail(no_trace_message) + end. + trace_info(What, Key) -> get(tracer) ! {apply,self(),{erlang,trace_info,[What,Key]}}, Res = receive diff --git a/erts/emulator/test/dirty_bif_SUITE.erl b/erts/emulator/test/dirty_bif_SUITE.erl index 4f5ad0295a..2ded862b8a 100644 --- a/erts/emulator/test/dirty_bif_SUITE.erl +++ b/erts/emulator/test/dirty_bif_SUITE.erl @@ -397,7 +397,9 @@ dirty_process_trace(Config) when is_list(Config) -> access_dirty_process( Config, fun() -> - erlang:trace_pattern({erts_debug,dirty_io,2}, + %% BIFs can only be traced when their modules are loaded. + code:ensure_loaded(erts_debug), + 1 = erlang:trace_pattern({erts_debug,dirty_io,2}, [{'_',[],[{return_trace}]}], [local,meta]), ok diff --git a/erts/emulator/test/match_spec_SUITE.erl b/erts/emulator/test/match_spec_SUITE.erl index 21de6b1002..686b431876 100644 --- a/erts/emulator/test/match_spec_SUITE.erl +++ b/erts/emulator/test/match_spec_SUITE.erl @@ -198,7 +198,8 @@ caller_and_return_to(Config) -> {trace,Tracee,call,{?MODULE,do_the_put,[test]},{?MODULE,do_put,1}}, {trace,Tracee,call,{erlang,integer_to_list,[1]},{?MODULE,do_the_put,1}}, {trace,Tracee,return_to,{?MODULE,do_the_put,1}}, - {trace,Tracee,call,{erlang,put,[test,"1"]},{?MODULE,do_put,1}}, + {trace,Tracee,call,{erlang,put,[test,"1"]},{?MODULE,do_the_put,1}}, + {trace,Tracee,return_to,{?MODULE,do_the_put,1}}, {trace,Tracee,return_to,{?MODULE,do_put,1}}, %% These last trace messages are a bit strange... diff --git a/erts/emulator/test/trace_call_time_SUITE.erl b/erts/emulator/test/trace_call_time_SUITE.erl index 26f96a1766..9bab4cbbd8 100644 --- a/erts/emulator/test/trace_call_time_SUITE.erl +++ b/erts/emulator/test/trace_call_time_SUITE.erl @@ -254,8 +254,7 @@ combo(Config) when is_list(Config) -> 2 = erlang:trace_pattern({erlang, term_to_binary, '_'}, [], [local]), 2 = erlang:trace_pattern({erlang, term_to_binary, '_'}, true, [call_time]), 2 = erlang:trace_pattern({erlang, term_to_binary, '_'}, MetaMs, [{meta,MetaTracer}]), - %% not implemented - %2 = erlang:trace_pattern({erlang, term_to_binary, '_'}, true, [call_count]), + 2 = erlang:trace_pattern({erlang, term_to_binary, '_'}, true, [call_count]), 1 = erlang:trace(Self, true, [{tracer,LocalTracer} | Flags]), %% @@ -284,9 +283,7 @@ combo(Config) when is_list(Config) -> {value,{match_spec,[]}} = lists:keysearch(match_spec, 1, TraceInfoBif), {value,{meta, MetaTracer}} = lists:keysearch(meta, 1, TraceInfoBif), {value,{meta_match_spec,MetaMs}} = lists:keysearch(meta_match_spec, 1, TraceInfoBif), - %% not implemented - {value,{call_count,false}} = lists:keysearch(call_count, 1, TraceInfoBif), - %{value,{call_count,0}} = lists:keysearch(call_count, 1, TraceInfoBif), + {value,{call_count,0}} = lists:keysearch(call_count, 1, TraceInfoBif), {value,{call_time,[]}} = lists:keysearch(call_time, 1, TraceInfoBif), %% @@ -429,6 +426,8 @@ called_function(Config) when is_list(Config) -> %% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% dead_tracer(Config) when is_list(Config) -> + TracedMFAs = dead_tracer_mfas(), + Self = self(), FirstTracer = tracer(), StartTracing = fun() -> turn_on_tracing(Self) end, @@ -443,14 +442,14 @@ dead_tracer(Config) when is_list(Config) -> erlang:yield(), %% Collect and check that we only get call_time info for the current process. - Info1 = collect_all_info(), + Info1 = collect_all_info(TracedMFAs), [] = other_than_self(Info1), io:format("~p\n", [Info1]), %% Note that we have not turned off tracing for the current process, %% but that the tracer has terminated. No more call_time information should be recorded. [1,2,3] = seq(1, 3, fun(I) -> I + 1 end), - [] = collect_all_info(), + [] = collect_all_info(TracedMFAs), %% When we start a second tracer process, that tracer process must %% not inherit the tracing flags and the dead tracer (even though @@ -459,7 +458,7 @@ dead_tracer(Config) when is_list(Config) -> tell_tracer(SecondTracer, StartTracing), Seq20 = lists:seq(1, 20), Seq20 = seq(1, 20, fun(I) -> I + 1 end), - Info2 = collect_all_info(), + Info2 = collect_all_info(TracedMFAs), io:format("~p\n", [Info2]), [] = other_than_self(Info2), SecondTracer ! quit, @@ -495,9 +494,21 @@ turn_on_tracing(Pid) -> _ = now(), ok. -collect_all_info() -> - collect_all_info([{?MODULE,F,A} || {F,A} <- module_info(functions)] ++ - erlang:system_info(snifs)). +%% We want to trace functions local to this module as well as all BIFs, and for +%% the latter we need to ensure that their modules are loaded. +dead_tracer_mfas() -> + Modules = [M || {M,_F,_A} <- erlang:system_info(snifs)], + Whitelist0 = gb_sets:from_list(Modules), + Whitelist = case code:ensure_modules_loaded(Modules) of + {error, Reasons} -> + Blacklist = gb_sets:from_list([M || {M, _} <- Reasons]), + gb_sets:subtract(Whitelist0, Blacklist); + ok -> + Whitelist0 + end, + EligibleSNIFs = [MFA || {M,_F,_A}=MFA <- erlang:system_info(snifs), + gb_sets:is_element(M, Whitelist)], + [{?MODULE,F,A} || {F,A} <- module_info(functions)] ++ EligibleSNIFs. collect_all_info([MFA|T]) -> CallTime = erlang:trace_info(MFA, call_time), @@ -567,21 +578,29 @@ seq_r(Start, Stop, Succ, R) -> seq_r(Succ(Start), Stop, Succ, [Start | R]). % Check call time tracing data and print mismatches -check_trace_info(Mfa, [{Pid, C,_,_}] = Expect, Time) -> - case erlang:trace_info(Mfa, call_time) of - % Time tests are somewhat problematic. We want to know if Time (EXPECTED_TIME) and S*1000000 + Us (ACTUAL_TIME) - % is the same. - % If the ratio EXPECTED_TIME/ACTUAL_TIME is ~ 1 or if EXPECTED_TIME - ACTUAL_TIME is near zero, the test is ok. - {call_time,[{Pid,C,S,Us}]} when S >= 0, Us >= 0, abs(1 - Time/(S*1000000 + Us)) < ?R_ERROR; abs(Time - S*1000000 - Us) < ?US_ERROR -> +check_trace_info(Mfa, [{Pid, ExpectedC,_,_}] = Expect, Time) -> + {call_time,[{Pid,C,S,Us}]} = erlang:trace_info(Mfa, call_time), + {Mod, Name, Arity} = Mfa, + IsBuiltin = erlang:is_builtin(Mod, Name, Arity), + if + %% Call count on BIFs may exceed number of calls as they often trap to + %% themselves. + IsBuiltin, C >= ExpectedC, S >= 0, Us >= 0, + abs(1 - Time/(S*1000000 + Us)) < ?R_ERROR; + abs(Time - S*1000000 - Us) < ?US_ERROR -> ok; - {call_time,[{Pid,C,S,Us}]} -> + not IsBuiltin, C =:= ExpectedC, S >= 0, Us >= 0, + abs(1 - Time/(S*1000000 + Us)) < ?R_ERROR; + abs(Time - S*1000000 - Us) < ?US_ERROR -> + ok; + true -> Sum = S*1000000 + Us, - io:format("Expected ~p -> {call_time, ~p (Time ~p us)}~n - got ~w s. ~w us. = ~w us. - ~w -> delta ~w (ratio ~.2f, should be 1.0)~n", - [Mfa, Expect, Time, S, Us, Sum, Time, Sum - Time, Time/Sum]), - time_error; - Other -> - io:format("Expected ~p -> {call_time, ~p (Time ~p us)}~n - got ~p~n", [ Mfa, Expect, Time, Other]), - time_count_error + io:format("Expected ~p -> {call_time, ~p (Time ~p us)}~n - got ~w " + "s. ~w us. = ~w us. - ~w -> delta ~w (ratio ~.2f, " + "should be 1.0)~n", + [Mfa, Expect, Time, + S, Us, Sum, Time, Sum - Time, Time/Sum]), + time_error end; check_trace_info(Mfa, Expect, _) -> case erlang:trace_info(Mfa, call_time) of @@ -670,9 +689,12 @@ loop() -> quit -> ok; {Pid, execute, Fun } when is_function(Fun) -> + %% Make sure we always run with the same amount of reductions. + erlang:yield(), Pid ! {self(), answer, erlang:apply(Fun, [])}, loop(); {Pid, execute, {M, F, A}} -> + erlang:yield(), Pid ! {self(), answer, erlang:apply(M, F, A)}, loop() end. diff --git a/erts/emulator/utils/make_tables b/erts/emulator/utils/make_tables index deee5c2344..ef0df28fc8 100755 --- a/erts/emulator/utils/make_tables +++ b/erts/emulator/utils/make_tables @@ -35,7 +35,6 @@ use File::Basename; # Output: # <-src>/erl_am.c # <-src>/erl_bif_table.c -# <-src>/erl_bif_wrap.c # <-src>/erl_dirty_bif_wrap.c # <-src>/erl_guard_bifs.c # <-src>/hipe_nbif_impl.c @@ -92,7 +91,7 @@ while (<>) { my($type, @args) = split; if ($type eq 'atom') { save_atoms(@args); - } elsif ($type eq 'bif' or $type eq 'ubif' or $type eq 'gcbif') { + } elsif ($type eq 'bif' or $type eq 'ubif' or $type eq 'hbif') { if (@args > 2) { error("$type only allows two arguments"); } @@ -124,14 +123,22 @@ while (<>) { error("invalid sched_type: $sched_type"); } - my $wrapper; - if ($type eq 'bif') { - $wrapper = "wrap_$alias"; - } else { - $wrapper = $alias; - } + my $kind; + if ($type eq 'bif') { + $kind = 'BIF_KIND_REGULAR'; + } + elsif ($type eq 'hbif') { + $kind = 'BIF_KIND_HEAVY'; + } + elsif ($type eq 'ubif') { + $kind = 'BIF_KIND_GUARD'; + } + else { + error("invalid bif_type: $type"); + } + push(@bif, ["am_$atom_alias{$mod}","am_$atom_alias{$name}",$arity, - $alias3,$wrapper,$alias]); + $alias3,$alias,$kind]); push(@bif_info, [$type, $sched_type, $alias3, $alias]); } elsif ($type eq 'dirty-cpu' or $type eq 'dirty-io' or $type eq 'dirty-cpu-test' or $type eq 'dirty-io-test') { @@ -196,7 +203,7 @@ open_file("$include/erl_bif_list.h"); my $i; for ($i = 0; $i < @bif; $i++) { # module atom, function atom, arity, C function, table index - print "BIF_LIST($bif[$i]->[0],$bif[$i]->[1],$bif[$i]->[2],$bif[$i]->[3],$bif[$i]->[5],$i)\n"; + print "BIF_LIST($bif[$i]->[0],$bif[$i]->[1],$bif[$i]->[2],$bif[$i]->[3],$bif[$i]->[4],$i)\n"; } # @@ -208,15 +215,24 @@ my $bif_size = @bif; print <[3]; print "Eterm $name($args);\n"; - print "Eterm wrap_$name($args);\n"; print "Eterm erts_gc_$name(Process* p, Eterm* reg, Uint live);\n" if $bif_info[$i]->[0] eq 'gcbif'; print "Eterm $bif_info[$i]->[2]($args);\n" @@ -282,25 +296,6 @@ for ($i = 0; $i < @bif; $i++) { } print "};\n\n"; -# -# Generate the bif wrappers file. -# - -open_file("$src/erl_bif_wrap.c"); -my $i; -includes("export.h", "sys.h", "erl_vm.h", "global.h", "erl_process.h", "bif.h", - "erl_bif_table.h", "erl_atom_table.h"); -for ($i = 0; $i < @bif; $i++) { - next if $bif[$i]->[3] eq $bif[$i]->[4]; # Skip unwrapped bifs - my $arity = $bif[$i]->[2]; - my $func = $bif_info[$i]->[3]; - print "Eterm\n"; - print "wrap_$func(Process* p, Eterm* args, UWord* I)\n"; - print "{\n"; - print " return erts_bif_trace($i, p, args, I);\n"; - print "}\n\n"; -} - # # Generate erl_gc_bifs.c. # @@ -309,18 +304,11 @@ open_file("$src/erl_guard_bifs.c"); my $i; includes("export.h", "sys.h", "erl_vm.h", "global.h", "erl_process.h", "bif.h", "erl_bif_table.h"); -print "const ErtsGcBif erts_gc_bifs[] = {\n"; -for ($i = 0; $i < @bif; $i++) { - next unless $bif_info[$i]->[0] eq 'gcbif'; - print " {$bif[$i]->[3], erts_gc_$bif[$i]->[3], BIF_$bif[$i]->[5]},\n"; -} -print " {NULL, NULL, -1}\n"; -print "};\n"; print "const ErtsUBif erts_u_bifs[] = {\n"; for ($i = 0; $i < @bif; $i++) { next unless $bif_info[$i]->[0] eq 'ubif'; - print " {$bif[$i]->[3], BIF_$bif[$i]->[5]},\n"; + print " {$bif[$i]->[3], BIF_$bif[$i]->[4]},\n"; } print " {NULL, -1}\n"; print "};\n"; @@ -368,7 +356,7 @@ EOF my $i; for ($i = 0; $i < @bif; $i++) { print <[5](Process *c_p, Eterm *regs); +Eterm nbif_impl_$bif[$i]->[4](Process *c_p, Eterm *regs); EOF } @@ -388,9 +376,9 @@ EOF for ($i = 0; $i < @bif; $i++) { print <[5](Process *c_p, Eterm *regs) +Eterm nbif_impl_$bif[$i]->[4](Process *c_p, Eterm *regs) { - return $bif[$i]->[3](c_p, regs, (UWord *) bif_export\[BIF_$bif[$i]->[5]\]); + return $bif[$i]->[3](c_p, regs, (UWord *) bif_export\[BIF_$bif[$i]->[4]\]); } EOF -- cgit v1.2.1 From 2baffccacba2890fbf42c4f1d4ca22b7f5cbad13 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Mon, 2 Sep 2019 16:44:05 +0200 Subject: erts: Crash if a module doesn't have stubs for all its BIFs --- erts/emulator/beam/beam_load.c | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/erts/emulator/beam/beam_load.c b/erts/emulator/beam/beam_load.c index 45122fe933..8fe046095f 100644 --- a/erts/emulator/beam/beam_load.c +++ b/erts/emulator/beam/beam_load.c @@ -5238,6 +5238,28 @@ final_touch(LoaderState* stp, struct erl_module_instance* inst_p) } } +#ifdef DEBUG + /* Ensure that we've loaded stubs for all BIFs in this module. */ + for (i = 0; i < BIF_SIZE; i++) { + BifEntry *entry = &bif_table[i]; + + if (stp->module == entry->module) { + Export *ep = erts_export_put(entry->module, + entry->name, + entry->arity); + BeamInstr *addr = ep->addressv[erts_staging_code_ix()]; + + if (!ErtsInArea(addr, stp->codev, stp->ci * sizeof(BeamInstr))) { + erts_exit(ERTS_ABORT_EXIT, + "Module %T doesn't export BIF %T/%i\n", + entry->module, + entry->name, + entry->arity); + } + } + } +#endif + /* * Import functions and patch all callers. */ -- cgit v1.2.1 From 0bc7a426a879865ac49098f88b6d47b20f7b59c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Tue, 3 Sep 2019 15:38:23 +0200 Subject: erts: Rename NifExport to ErtsNativeFunc All it does is wrap a native function for scheduling, and it no longer has anything to do with exports. --- erts/emulator/beam/beam_bif_load.c | 2 +- erts/emulator/beam/beam_emu.c | 12 ++--- erts/emulator/beam/bif.c | 30 ++++++------- erts/emulator/beam/erl_alloc.types | 2 +- erts/emulator/beam/erl_gc.c | 4 +- erts/emulator/beam/erl_nfunc_sched.c | 36 +++++++-------- erts/emulator/beam/erl_nfunc_sched.h | 87 +++++++++++++++++++----------------- erts/emulator/beam/erl_nif.c | 62 ++++++++++++------------- erts/emulator/beam/erl_process.c | 10 ++--- erts/emulator/beam/erl_process.h | 14 +++--- erts/emulator/beam/error.h | 14 +++--- 11 files changed, 138 insertions(+), 135 deletions(-) diff --git a/erts/emulator/beam/beam_bif_load.c b/erts/emulator/beam/beam_bif_load.c index a406e14741..dcbff99f54 100644 --- a/erts/emulator/beam/beam_bif_load.c +++ b/erts/emulator/beam/beam_bif_load.c @@ -1138,7 +1138,7 @@ check_process_code(Process* rp, Module* modp, int *redsp, int fcalls) *redsp += 1; - if (erts_check_nif_export_in_area(rp, mod_start, mod_size)) + if (erts_check_nfunc_in_area(rp, mod_start, mod_size)) return am_true; *redsp += (STACK_START(rp) - rp->stop) / 32; diff --git a/erts/emulator/beam/beam_emu.c b/erts/emulator/beam/beam_emu.c index 5ff4549818..e7f116acb2 100644 --- a/erts/emulator/beam/beam_emu.c +++ b/erts/emulator/beam/beam_emu.c @@ -1167,7 +1167,7 @@ void erts_dirty_process_main(ErtsSchedulerData *esdp) * I[2]: Pointer to erl_module_nif * I[3]: Function pointer to dirty NIF * - * This layout is determined by the NifExport struct + * This layout is determined by the ErtsNativeFunc struct */ ERTS_MSACC_SET_STATE_CACHED_M_X(ERTS_MSACC_STATE_NIF); @@ -1310,14 +1310,14 @@ handle_error(Process* c_p, BeamInstr* pc, Eterm* reg, ErtsCodeMFA *bif_mfa) ASSERT(c_p->freason != TRAP); /* Should have been handled earlier. */ - if (c_p->freason & EXF_RESTORE_NIF) - erts_nif_export_restore_error(c_p, &pc, reg, &bif_mfa); + if (c_p->freason & EXF_RESTORE_NFUNC) + erts_nfunc_restore_error(c_p, &pc, reg, &bif_mfa); #ifdef DEBUG if (bif_mfa) { - /* Verify that bif_mfa does not point into our nif export */ - NifExport *nep = ERTS_PROC_GET_NIF_TRAP_EXPORT(c_p); - ASSERT(!nep || !ErtsInArea(bif_mfa, (char *)nep, sizeof(NifExport))); + /* Verify that bif_mfa does not point into our native function wrapper */ + ErtsNativeFunc *nep = ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(c_p); + ASSERT(!nep || !ErtsInArea(bif_mfa, (char *)nep, sizeof(ErtsNativeFunc))); } #endif diff --git a/erts/emulator/beam/bif.c b/erts/emulator/beam/bif.c index 593ddf9a29..1fd02c4b51 100644 --- a/erts/emulator/beam/bif.c +++ b/erts/emulator/beam/bif.c @@ -5044,7 +5044,7 @@ void erts_init_bif(void) } /* - * Scheduling of BIFs via NifExport... + * Scheduling of BIFs via ErtsNativeFunc... */ #define ERTS_WANT_NFUNC_SCHED_INTERNALS__ #include "erl_nfunc_sched.h" @@ -5059,7 +5059,7 @@ schedule(Process *c_p, Process *dirty_shadow_proc, int argc, Eterm *argv) { ERTS_LC_ASSERT(ERTS_PROC_LOCK_MAIN & erts_proc_lc_my_proc_locks(c_p)); - (void) erts_nif_export_schedule(c_p, dirty_shadow_proc, + (void) erts_nfunc_schedule(c_p, dirty_shadow_proc, mfa, pc, BeamOpCodeAddr(op_apply_bif), dfunc, ifunc, module, function, @@ -5069,14 +5069,14 @@ schedule(Process *c_p, Process *dirty_shadow_proc, static BIF_RETTYPE dirty_bif_result(BIF_ALIST_1) { - NifExport *nep = (NifExport *) ERTS_PROC_GET_NIF_TRAP_EXPORT(BIF_P); - erts_nif_export_restore(BIF_P, nep, BIF_ARG_1); + ErtsNativeFunc *nep = (ErtsNativeFunc *) ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(BIF_P); + erts_nfunc_restore(BIF_P, nep, BIF_ARG_1); BIF_RET(BIF_ARG_1); } static BIF_RETTYPE dirty_bif_trap(BIF_ALIST) { - NifExport *nep = (NifExport *) ERTS_PROC_GET_NIF_TRAP_EXPORT(BIF_P); + ErtsNativeFunc *nep = (ErtsNativeFunc *) ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(BIF_P); /* * Arity and argument registers already set @@ -5085,7 +5085,7 @@ static BIF_RETTYPE dirty_bif_trap(BIF_ALIST) ASSERT(BIF_P->arity == nep->trampoline.info.mfa.arity); - erts_nif_export_restore(BIF_P, nep, THE_NON_VALUE); + erts_nfunc_restore(BIF_P, nep, THE_NON_VALUE); BIF_P->i = (BeamInstr *) nep->func; BIF_P->freason = TRAP; @@ -5100,8 +5100,8 @@ static BIF_RETTYPE dirty_bif_exception(BIF_ALIST_2) freason = signed_val(BIF_ARG_1); - /* Restore orig info for error and clear nif export in handle_error() */ - freason |= EXF_RESTORE_NIF; + /* Restore orig info for error and clear nif wrapper in handle_error() */ + freason |= EXF_RESTORE_NFUNC; BIF_P->fvalue = BIF_ARG_2; @@ -5236,7 +5236,7 @@ erts_schedule_bif(Process *proc, static BIF_RETTYPE call_bif(Process *c_p, Eterm *reg, BeamInstr *I) { - NifExport *nep = ERTS_I_BEAM_OP_TO_NIF_EXPORT(I); + ErtsNativeFunc *nep = ERTS_I_BEAM_OP_TO_NFUNC(I); ErtsBifFunc bif = (ErtsBifFunc) nep->func; BIF_RETTYPE ret; @@ -5249,12 +5249,12 @@ call_bif(Process *c_p, Eterm *reg, BeamInstr *I) ret = (*bif)(c_p, reg, I); if (is_value(ret)) - erts_nif_export_restore(c_p, nep, ret); + erts_nfunc_restore(c_p, nep, ret); else if (c_p->freason != TRAP) - c_p->freason |= EXF_RESTORE_NIF; /* restore in handle_error() */ + c_p->freason |= EXF_RESTORE_NFUNC; /* restore in handle_error() */ else if (nep->func == ERTS_SCHED_BIF_TRAP_MARKER) { /* BIF did an ordinary trap... */ - erts_nif_export_restore(c_p, nep, ret); + erts_nfunc_restore(c_p, nep, ret); } /* else: * BIF rescheduled itself using erts_schedule_bif(). @@ -5271,7 +5271,7 @@ erts_call_dirty_bif(ErtsSchedulerData *esdp, Process *c_p, BeamInstr *I, Eterm * int exiting; Process *dirty_shadow_proc; ErtsBifFunc bf; - NifExport *nep; + ErtsNativeFunc *nep; #ifdef DEBUG Eterm *c_p_htop; erts_aint32_t state; @@ -5284,8 +5284,8 @@ erts_call_dirty_bif(ErtsSchedulerData *esdp, Process *c_p, BeamInstr *I, Eterm * #endif - nep = ERTS_I_BEAM_OP_TO_NIF_EXPORT(I); - ASSERT(nep == ERTS_PROC_GET_NIF_TRAP_EXPORT(c_p)); + nep = ERTS_I_BEAM_OP_TO_NFUNC(I); + ASSERT(nep == ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(c_p)); nep->func = ERTS_SCHED_BIF_TRAP_MARKER; diff --git a/erts/emulator/beam/erl_alloc.types b/erts/emulator/beam/erl_alloc.types index cd978a8d57..3e643a6223 100644 --- a/erts/emulator/beam/erl_alloc.types +++ b/erts/emulator/beam/erl_alloc.types @@ -331,7 +331,7 @@ type DB_HEIR_DATA STANDARD ETS db_heir_data type DB_MS_PSDO_PROC LONG_LIVED ETS db_match_pseudo_proc type SCHDLR_DATA LONG_LIVED SYSTEM scheduler_data -type NIF_TRAP_EXPORT STANDARD PROCESSES nif_trap_export_entry +type NFUNC_TRAP_WRAPPER STANDARD PROCESSES nfunc_trap_wrapper type EXPORT LONG_LIVED CODE export_entry type MONITOR FIXED_SIZE PROCESSES monitor type MONITOR_SUSPEND STANDARD PROCESSES monitor_suspend diff --git a/erts/emulator/beam/erl_gc.c b/erts/emulator/beam/erl_gc.c index f387960b08..4a6f204cb5 100644 --- a/erts/emulator/beam/erl_gc.c +++ b/erts/emulator/beam/erl_gc.c @@ -2588,7 +2588,7 @@ setup_rootset(Process *p, Eterm *objv, int nobj, Rootset *rootset) /* * If a NIF or BIF has saved arguments, they need to be added */ - if (erts_setup_nif_export_rootset(p, &roots[n].v, &roots[n].sz)) + if (erts_setup_nfunc_rootset(p, &roots[n].v, &roots[n].sz)) n++; ASSERT(n <= rootset->size); @@ -3236,7 +3236,7 @@ offset_one_rootset(Process *p, Sint offs, char* area, Uint area_size, offset_heap_ptr(objv, nobj, offs, area, area_size); } offset_off_heap(p, offs, area, area_size); - if (erts_setup_nif_export_rootset(p, &v, &sz)) + if (erts_setup_nfunc_rootset(p, &v, &sz)) offset_heap_ptr(v, sz, offs, area, area_size); } diff --git a/erts/emulator/beam/erl_nfunc_sched.c b/erts/emulator/beam/erl_nfunc_sched.c index 0b9e54dfc5..a37f020cb4 100644 --- a/erts/emulator/beam/erl_nfunc_sched.c +++ b/erts/emulator/beam/erl_nfunc_sched.c @@ -30,37 +30,37 @@ #include "erl_nfunc_sched.h" #include "erl_trace.h" -NifExport * -erts_new_proc_nif_export(Process *c_p, int argc) +ErtsNativeFunc * +erts_new_proc_nfunc(Process *c_p, int argc) { - NifExport *nep, *old_nep; + ErtsNativeFunc *nep, *old_nep; size_t size; - size = sizeof(NifExport) + (argc-1)*sizeof(Eterm); - nep = erts_alloc(ERTS_ALC_T_NIF_TRAP_EXPORT, size); + size = sizeof(ErtsNativeFunc) + (argc-1)*sizeof(Eterm); + nep = erts_alloc(ERTS_ALC_T_NFUNC_TRAP_WRAPPER, size); nep->argc = -1; /* unused marker */ nep->argv_size = argc; - old_nep = ERTS_PROC_SET_NIF_TRAP_EXPORT(c_p, nep); + old_nep = ERTS_PROC_SET_NFUNC_TRAP_WRAPPER(c_p, nep); if (old_nep) { - erts_free(ERTS_ALC_T_NIF_TRAP_EXPORT, old_nep); + erts_free(ERTS_ALC_T_NFUNC_TRAP_WRAPPER, old_nep); } return nep; } void -erts_destroy_nif_export(Process *p) +erts_destroy_nfunc(Process *p) { - NifExport *nep = ERTS_PROC_SET_NIF_TRAP_EXPORT(p, NULL); + ErtsNativeFunc *nep = ERTS_PROC_SET_NFUNC_TRAP_WRAPPER(p, NULL); if (nep) { if (nep->m) - erts_nif_export_cleanup_nif_mod(nep); - erts_free(ERTS_ALC_T_NIF_TRAP_EXPORT, nep); + erts_nfunc_cleanup_nif_mod(nep); + erts_free(ERTS_ALC_T_NFUNC_TRAP_WRAPPER, nep); } } -NifExport * -erts_nif_export_schedule(Process *c_p, Process *dirty_shadow_proc, +ErtsNativeFunc * +erts_nfunc_schedule(Process *c_p, Process *dirty_shadow_proc, ErtsCodeMFA *mfa, BeamInstr *pc, BeamInstr instr, void *dfunc, void *ifunc, @@ -70,7 +70,7 @@ erts_nif_export_schedule(Process *c_p, Process *dirty_shadow_proc, Process *used_proc; ErtsSchedulerData *esdp; Eterm* reg; - NifExport* nep; + ErtsNativeFunc* nep; int i; ERTS_LC_ASSERT(erts_proc_lc_my_proc_locks(c_p) @@ -93,10 +93,10 @@ erts_nif_export_schedule(Process *c_p, Process *dirty_shadow_proc, reg = esdp->x_reg_array; if (mfa) - nep = erts_get_proc_nif_export(c_p, (int) mfa->arity); + nep = erts_get_proc_nfunc(c_p, (int) mfa->arity); else { /* If no mfa, this is not the first schedule... */ - nep = ERTS_PROC_GET_NIF_TRAP_EXPORT(c_p); + nep = ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(c_p); ASSERT(nep && nep->argc >= 0); } @@ -114,9 +114,9 @@ erts_nif_export_schedule(Process *c_p, Process *dirty_shadow_proc, nep->argc = (int) mfa->arity; nep->m = NULL; - ASSERT(!erts_check_nif_export_in_area(c_p, + ASSERT(!erts_check_nfunc_in_area(c_p, (char *) nep, - (sizeof(NifExport) + (sizeof(ErtsNativeFunc) + (sizeof(Eterm) *(nep->argc-1))))); } diff --git a/erts/emulator/beam/erl_nfunc_sched.h b/erts/emulator/beam/erl_nfunc_sched.h index 033ba58ded..4044d59f82 100644 --- a/erts/emulator/beam/erl_nfunc_sched.h +++ b/erts/emulator/beam/erl_nfunc_sched.h @@ -26,13 +26,14 @@ #include "error.h" /* - * NIF exports need a few more items than the Export struct provides, - * including the erl_module_nif* and a NIF function pointer, so the - * NifExport below adds those. The Export member must be first in the - * struct. A number of values are stored for error handling purposes - * only. + * Native function wrappers are used to schedule native functions on both + * normal and dirty schedulers. * - * 'argc' is >= 0 when NifExport is in use, and < 0 when not. + * A number of values are only stored for error handling, and the fields + * following `current` can be omitted when a wrapper is statically "scheduled" + * through placement in a function stub. + * + * 'argc' is >= 0 when ErtsNativeFunc is in use, and < 0 when not. */ typedef struct { @@ -51,49 +52,51 @@ typedef struct { int argc; /* Number of arguments in original call */ int argv_size; /* Allocated size of argv */ Eterm argv[1]; /* Saved arguments from the original call */ -} NifExport; - -NifExport *erts_new_proc_nif_export(Process *c_p, int argc); -void erts_destroy_nif_export(Process *p); -NifExport *erts_nif_export_schedule(Process *c_p, Process *dirty_shadow_proc, - ErtsCodeMFA *mfa, BeamInstr *pc, - BeamInstr instr, - void *dfunc, void *ifunc, - Eterm mod, Eterm func, - int argc, const Eterm *argv); -void erts_nif_export_cleanup_nif_mod(NifExport *ep); /* erl_nif.c */ -ERTS_GLB_INLINE NifExport *erts_get_proc_nif_export(Process *c_p, int extra); -ERTS_GLB_INLINE int erts_setup_nif_export_rootset(Process* proc, Eterm** objv, - Uint* nobj); -ERTS_GLB_INLINE int erts_check_nif_export_in_area(Process *p, - char *start, Uint size); -ERTS_GLB_INLINE void erts_nif_export_restore(Process *c_p, NifExport *ep, - Eterm result); -ERTS_GLB_INLINE void erts_nif_export_restore_error(Process* c_p, BeamInstr **pc, - Eterm *reg, ErtsCodeMFA **nif_mfa); +} ErtsNativeFunc; + +ErtsNativeFunc *erts_new_proc_nfunc(Process *c_p, int argc); +void erts_destroy_nfunc(Process *p); +ErtsNativeFunc *erts_nfunc_schedule(Process *c_p, Process *dirty_shadow_proc, + ErtsCodeMFA *mfa, BeamInstr *pc, + BeamInstr instr, + void *dfunc, void *ifunc, + Eterm mod, Eterm func, + int argc, const Eterm *argv); +void erts_nfunc_cleanup_nif_mod(ErtsNativeFunc *ep); /* erl_nif.c */ +ERTS_GLB_INLINE ErtsNativeFunc *erts_get_proc_nfunc(Process *c_p, int extra); +ERTS_GLB_INLINE int erts_setup_nfunc_rootset(Process* proc, Eterm** objv, + Uint* nobj); +ERTS_GLB_INLINE int erts_check_nfunc_in_area(Process *p, + char *start, Uint size); +ERTS_GLB_INLINE void erts_nfunc_restore(Process *c_p, ErtsNativeFunc *ep, + Eterm result); +ERTS_GLB_INLINE void erts_nfunc_restore_error(Process* c_p, + BeamInstr **pc, + Eterm *reg, + ErtsCodeMFA **nif_mfa); ERTS_GLB_INLINE Process *erts_proc_shadow2real(Process *c_p); #if ERTS_GLB_INLINE_INCL_FUNC_DEF -ERTS_GLB_INLINE NifExport * -erts_get_proc_nif_export(Process *c_p, int argc) +ERTS_GLB_INLINE ErtsNativeFunc * +erts_get_proc_nfunc(Process *c_p, int argc) { - NifExport *nep = ERTS_PROC_GET_NIF_TRAP_EXPORT(c_p); + ErtsNativeFunc *nep = ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(c_p); if (!nep || (nep->argc < 0 && nep->argv_size < argc)) - return erts_new_proc_nif_export(c_p, argc); + return erts_new_proc_nfunc(c_p, argc); return nep; } /* * If a process has saved arguments, they need to be part of the GC * rootset. The function below is called from setup_rootset() in - * erl_gc.c. Any exception term saved in the NifExport is also made + * erl_gc.c. Any exception term saved in the ErtsNativeFunc is also made * part of the GC rootset here; it always resides in rootset[0]. */ ERTS_GLB_INLINE int -erts_setup_nif_export_rootset(Process* proc, Eterm** objv, Uint* nobj) +erts_setup_nfunc_rootset(Process* proc, Eterm** objv, Uint* nobj) { - NifExport* ep = (NifExport*) ERTS_PROC_GET_NIF_TRAP_EXPORT(proc); + ErtsNativeFunc* ep = (ErtsNativeFunc*) ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(proc); if (!ep || ep->argc <= 0) return 0; @@ -104,12 +107,12 @@ erts_setup_nif_export_rootset(Process* proc, Eterm** objv, Uint* nobj) } /* - * Check if nif export points into code area... + * Check if native func wrapper points into code area... */ ERTS_GLB_INLINE int -erts_check_nif_export_in_area(Process *p, char *start, Uint size) +erts_check_nfunc_in_area(Process *p, char *start, Uint size) { - NifExport *nep = ERTS_PROC_GET_NIF_TRAP_EXPORT(p); + ErtsNativeFunc *nep = ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(p); if (!nep || nep->argc < 0) return 0; if (ErtsInArea(nep->pc, start, size)) @@ -122,7 +125,7 @@ erts_check_nif_export_in_area(Process *p, char *start, Uint size) } ERTS_GLB_INLINE void -erts_nif_export_restore(Process *c_p, NifExport *ep, Eterm result) +erts_nfunc_restore(Process *c_p, ErtsNativeFunc *ep, Eterm result) { ASSERT(!ERTS_SCHEDULER_IS_DIRTY(erts_get_scheduler_data())); ERTS_LC_ASSERT(!(c_p->static_flags @@ -135,10 +138,10 @@ erts_nif_export_restore(Process *c_p, NifExport *ep, Eterm result) } ERTS_GLB_INLINE void -erts_nif_export_restore_error(Process* c_p, BeamInstr **pc, +erts_nfunc_restore_error(Process* c_p, BeamInstr **pc, Eterm *reg, ErtsCodeMFA **nif_mfa) { - NifExport *nep = (NifExport *) ERTS_PROC_GET_NIF_TRAP_EXPORT(c_p); + ErtsNativeFunc *nep = (ErtsNativeFunc *) ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(c_p); int ix; ASSERT(nep); @@ -146,7 +149,7 @@ erts_nif_export_restore_error(Process* c_p, BeamInstr **pc, *nif_mfa = nep->mfa; for (ix = 0; ix < nep->argc; ix++) reg[ix] = nep->argv[ix]; - erts_nif_export_restore(c_p, nep, THE_NON_VALUE); + erts_nfunc_restore(c_p, nep, THE_NON_VALUE); } ERTS_GLB_INLINE Process * @@ -169,10 +172,10 @@ erts_proc_shadow2real(Process *c_p) #if defined(ERTS_WANT_NFUNC_SCHED_INTERNALS__) && !defined(ERTS_NFUNC_SCHED_INTERNALS__) #define ERTS_NFUNC_SCHED_INTERNALS__ -#define ERTS_I_BEAM_OP_TO_NIF_EXPORT(I) \ +#define ERTS_I_BEAM_OP_TO_NFUNC(I) \ (ASSERT(BeamIsOpCode(*(I), op_apply_bif) || \ BeamIsOpCode(*(I), op_call_nif)), \ - ((NifExport *) (((char *) (I)) - offsetof(NifExport, trampoline.call_op)))) + ((ErtsNativeFunc *) (((char *) (I)) - offsetof(ErtsNativeFunc, trampoline.call_op)))) #include "erl_message.h" diff --git a/erts/emulator/beam/erl_nif.c b/erts/emulator/beam/erl_nif.c index 5a5ea07d89..04e6c15982 100644 --- a/erts/emulator/beam/erl_nif.c +++ b/erts/emulator/beam/erl_nif.c @@ -309,10 +309,10 @@ void erts_post_nif(ErlNifEnv* env) /* - * Initialize a NifExport struct. Create it if needed and store it in the + * Initialize a ErtsNativeFunc struct. Create it if needed and store it in the * proc. The direct_fp function is what will be invoked by op_call_nif, and * the indirect_fp function, if not NULL, is what the direct_fp function - * will call. If the allocated NifExport isn't enough to hold all of argv, + * will call. If the allocated ErtsNativeFunc isn't enough to hold all of argv, * allocate a larger one. Save 'current' and registers if first time this * call is scheduled. */ @@ -321,7 +321,7 @@ static ERTS_INLINE ERL_NIF_TERM schedule(ErlNifEnv* env, NativeFunPtr direct_fp, NativeFunPtr indirect_fp, Eterm mod, Eterm func_name, int argc, const ERL_NIF_TERM argv[]) { - NifExport *ep; + ErtsNativeFunc *ep; Process *c_p, *dirty_shadow_proc; execution_state(env, &c_p, NULL); @@ -332,7 +332,7 @@ schedule(ErlNifEnv* env, NativeFunPtr direct_fp, NativeFunPtr indirect_fp, ERTS_LC_ASSERT(ERTS_PROC_LOCK_MAIN & erts_proc_lc_my_proc_locks(c_p)); - ep = erts_nif_export_schedule(c_p, dirty_shadow_proc, + ep = erts_nfunc_schedule(c_p, dirty_shadow_proc, c_p->current, cp_val(c_p->stop[0]), BeamOpCodeAddr(op_call_nif), @@ -356,7 +356,7 @@ erts_call_dirty_nif(ErtsSchedulerData *esdp, Process *c_p, BeamInstr *I, Eterm * { int exiting; ERL_NIF_TERM *argv = (ERL_NIF_TERM *) reg; - NifExport *nep = ERTS_I_BEAM_OP_TO_NIF_EXPORT(I); + ErtsNativeFunc *nep = ERTS_I_BEAM_OP_TO_NFUNC(I); ErtsCodeMFA *codemfa = erts_code_to_codemfa(I); NativeFunPtr dirty_nif = (NativeFunPtr) I[1]; ErlNifEnv env; @@ -364,7 +364,7 @@ erts_call_dirty_nif(ErtsSchedulerData *esdp, Process *c_p, BeamInstr *I, Eterm * #ifdef DEBUG erts_aint32_t state = erts_atomic32_read_nob(&c_p->state); - ASSERT(nep == ERTS_PROC_GET_NIF_TRAP_EXPORT(c_p)); + ASSERT(nep == ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(c_p)); ASSERT(!c_p->scheduler_data); ASSERT((state & ERTS_PSFLG_DIRTY_RUNNING) @@ -2823,7 +2823,7 @@ int enif_consume_timeslice(ErlNifEnv* env, int percent) } static ERTS_INLINE void -nif_export_cleanup_nif_mod(NifExport *ep) +nfunc_cleanup_nif_mod(ErtsNativeFunc *ep) { if (erts_refc_dectest(&ep->m->rt_dtor_cnt, 0) == 0 && ep->m->mod == NULL) close_lib(ep->m); @@ -2831,17 +2831,17 @@ nif_export_cleanup_nif_mod(NifExport *ep) } void -erts_nif_export_cleanup_nif_mod(NifExport *ep) +erts_nfunc_cleanup_nif_mod(ErtsNativeFunc *ep) { - nif_export_cleanup_nif_mod(ep); + nfunc_cleanup_nif_mod(ep); } static ERTS_INLINE void -nif_export_restore(Process *c_p, NifExport *ep, Eterm res) +nfunc_restore(Process *c_p, ErtsNativeFunc *ep, Eterm res) { - erts_nif_export_restore(c_p, ep, res); + erts_nfunc_restore(c_p, ep, res); ASSERT(ep->m); - nif_export_cleanup_nif_mod(ep); + nfunc_cleanup_nif_mod(ep); } @@ -2858,15 +2858,15 @@ static ERL_NIF_TERM dirty_nif_finalizer(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) { Process* proc; - NifExport* ep; + ErtsNativeFunc* ep; execution_state(env, &proc, NULL); ASSERT(argc == 1); ASSERT(!ERTS_SCHEDULER_IS_DIRTY(erts_proc_sched_data(proc))); - ep = (NifExport*) ERTS_PROC_GET_NIF_TRAP_EXPORT(proc); + ep = (ErtsNativeFunc*) ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(proc); ASSERT(ep); - nif_export_restore(proc, ep, argv[0]); + nfunc_restore(proc, ep, argv[0]); return argv[0]; } @@ -2878,21 +2878,22 @@ dirty_nif_exception(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) { ERL_NIF_TERM ret; Process* proc; - NifExport* ep; + ErtsNativeFunc* ep; Eterm exception; execution_state(env, &proc, NULL); ASSERT(argc == 1); ASSERT(!ERTS_SCHEDULER_IS_DIRTY(erts_proc_sched_data(proc))); - ep = (NifExport*) ERTS_PROC_GET_NIF_TRAP_EXPORT(proc); + ep = (ErtsNativeFunc*) ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(proc); ASSERT(ep); exception = argv[0]; /* argv overwritten by restore below... */ - nif_export_cleanup_nif_mod(ep); + nfunc_cleanup_nif_mod(ep); ret = enif_raise_exception(env, exception); - /* Restore orig info for error and clear nif export in handle_error() */ - proc->freason |= EXF_RESTORE_NIF; + /* Restore orig info for error and clear native func wrapper in + * handle_error() */ + proc->freason |= EXF_RESTORE_NFUNC; return ret; } @@ -2929,7 +2930,7 @@ static_schedule_dirty_nif(ErlNifEnv* env, erts_aint32_t dirty_psflg, int argc, const ERL_NIF_TERM argv[]) { Process *proc; - NifExport *ep; + ErtsNativeFunc *ep; Eterm mod, func; NativeFunPtr fp; @@ -2939,12 +2940,11 @@ static_schedule_dirty_nif(ErlNifEnv* env, erts_aint32_t dirty_psflg, * Called in order to schedule statically determined * dirty NIF calls... * - * Note that 'current' does not point into a NifExport - * structure; only a structure with similar - * parts (located in code). + * Note that 'current' does not point into a ErtsNativeFunc + * structure; only a structure with similar parts (located in code). */ - ep = ErtsContainerStruct(proc->current, NifExport, trampoline.info.mfa); + ep = ErtsContainerStruct(proc->current, ErtsNativeFunc, trampoline.info.mfa); mod = proc->current->module; func = proc->current->function; fp = (NativeFunPtr) ep->func; @@ -2983,12 +2983,12 @@ execute_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) { Process* proc; NativeFunPtr fp; - NifExport* ep; + ErtsNativeFunc* ep; ERL_NIF_TERM result; execution_state(env, &proc, NULL); - ep = ErtsContainerStruct(proc->current, NifExport, trampoline.info.mfa); + ep = ErtsContainerStruct(proc->current, ErtsNativeFunc, trampoline.info.mfa); fp = ep->func; ASSERT(ep); ASSERT(!env->exception_thrown); @@ -3001,20 +3001,20 @@ execute_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]) result = (*fp)(env, argc, argv); - ASSERT(ep == ERTS_PROC_GET_NIF_TRAP_EXPORT(proc)); + ASSERT(ep == ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(proc)); if (is_value(result) || proc->freason != TRAP) { /* Done (not rescheduled)... */ ASSERT(ep->func == ERTS_DBG_NIF_NOT_SCHED_MARKER); if (!env->exception_thrown) - nif_export_restore(proc, ep, result); + nfunc_restore(proc, ep, result); else { - nif_export_cleanup_nif_mod(ep); + nfunc_cleanup_nif_mod(ep); /* * Restore orig info for error and clear nif * export in handle_error() */ - proc->freason |= EXF_RESTORE_NIF; + proc->freason |= EXF_RESTORE_NFUNC; } } diff --git a/erts/emulator/beam/erl_process.c b/erts/emulator/beam/erl_process.c index 4b4337ce17..c6a158d0b1 100644 --- a/erts/emulator/beam/erl_process.c +++ b/erts/emulator/beam/erl_process.c @@ -708,10 +708,10 @@ erts_pre_init_process(void) erts_psd_required_locks[ERTS_PSD_DELAYED_GC_TASK_QS].set_locks = ERTS_PSD_DELAYED_GC_TASK_QS_SET_LOCKS; - erts_psd_required_locks[ERTS_PSD_NIF_TRAP_EXPORT].get_locks - = ERTS_PSD_NIF_TRAP_EXPORT_GET_LOCKS; - erts_psd_required_locks[ERTS_PSD_NIF_TRAP_EXPORT].set_locks - = ERTS_PSD_NIF_TRAP_EXPORT_SET_LOCKS; + erts_psd_required_locks[ERTS_PSD_NFUNC_TRAP_WRAPPER].get_locks + = ERTS_PSD_NFUNC_TRAP_WRAPPER_GET_LOCKS; + erts_psd_required_locks[ERTS_PSD_NFUNC_TRAP_WRAPPER].set_locks + = ERTS_PSD_NFUNC_TRAP_WRAPPER_SET_LOCKS; erts_psd_required_locks[ERTS_PSD_ETS_OWNED_TABLES].get_locks = ERTS_PSD_ETS_OWNED_TABLES_GET_LOCKS; @@ -11976,7 +11976,7 @@ delete_process(Process* p) if (pbt) erts_free(ERTS_ALC_T_BPD, (void *) pbt); - erts_destroy_nif_export(p); + erts_destroy_nfunc(p); /* Cleanup psd */ diff --git a/erts/emulator/beam/erl_process.h b/erts/emulator/beam/erl_process.h index a50d9efb89..a965e975d4 100644 --- a/erts/emulator/beam/erl_process.h +++ b/erts/emulator/beam/erl_process.h @@ -812,7 +812,7 @@ erts_reset_max_len(ErtsRunQueue *rq, ErtsRunQueueInfo *rqi) #define ERTS_PSD_SCHED_ID 2 #define ERTS_PSD_CALL_TIME_BP 3 #define ERTS_PSD_DELAYED_GC_TASK_QS 4 -#define ERTS_PSD_NIF_TRAP_EXPORT 5 +#define ERTS_PSD_NFUNC_TRAP_WRAPPER 5 #define ERTS_PSD_ETS_OWNED_TABLES 6 #define ERTS_PSD_ETS_FIXED_TABLES 7 #define ERTS_PSD_DIST_ENTRY 8 @@ -849,8 +849,8 @@ typedef struct { #define ERTS_PSD_DELAYED_GC_TASK_QS_GET_LOCKS ERTS_PROC_LOCK_MAIN #define ERTS_PSD_DELAYED_GC_TASK_QS_SET_LOCKS ERTS_PROC_LOCK_MAIN -#define ERTS_PSD_NIF_TRAP_EXPORT_GET_LOCKS ERTS_PROC_LOCK_MAIN -#define ERTS_PSD_NIF_TRAP_EXPORT_SET_LOCKS ERTS_PROC_LOCK_MAIN +#define ERTS_PSD_NFUNC_TRAP_WRAPPER_GET_LOCKS ERTS_PROC_LOCK_MAIN +#define ERTS_PSD_NFUNC_TRAP_WRAPPER_SET_LOCKS ERTS_PROC_LOCK_MAIN #define ERTS_PSD_ETS_OWNED_TABLES_GET_LOCKS ERTS_PROC_LOCK_STATUS #define ERTS_PSD_ETS_OWNED_TABLES_SET_LOCKS ERTS_PROC_LOCK_STATUS @@ -2037,10 +2037,10 @@ erts_psd_set(Process *p, int ix, void *data) #define ERTS_PROC_SET_DELAYED_GC_TASK_QS(P, PBT) \ ((ErtsProcSysTaskQs *) erts_psd_set((P), ERTS_PSD_DELAYED_GC_TASK_QS, (void *) (PBT))) -#define ERTS_PROC_GET_NIF_TRAP_EXPORT(P) \ - erts_psd_get((P), ERTS_PSD_NIF_TRAP_EXPORT) -#define ERTS_PROC_SET_NIF_TRAP_EXPORT(P, NTE) \ - erts_psd_set((P), ERTS_PSD_NIF_TRAP_EXPORT, (void *) (NTE)) +#define ERTS_PROC_GET_NFUNC_TRAP_WRAPPER(P) \ + erts_psd_get((P), ERTS_PSD_NFUNC_TRAP_WRAPPER) +#define ERTS_PROC_SET_NFUNC_TRAP_WRAPPER(P, NTE) \ + erts_psd_set((P), ERTS_PSD_NFUNC_TRAP_WRAPPER, (void *) (NTE)) #define ERTS_PROC_GET_DIST_ENTRY(P) \ ((DistEntry *) erts_psd_get((P), ERTS_PSD_DIST_ENTRY)) diff --git a/erts/emulator/beam/error.h b/erts/emulator/beam/error.h index 64c08b1570..44a9809a18 100644 --- a/erts/emulator/beam/error.h +++ b/erts/emulator/beam/error.h @@ -66,13 +66,13 @@ #define EXF_OFFSET EXTAG_BITS #define EXF_BITS 7 -#define EXF_PANIC (1<<(0+EXF_OFFSET)) /* ignore catches */ -#define EXF_THROWN (1<<(1+EXF_OFFSET)) /* nonlocal return */ -#define EXF_LOG (1<<(2+EXF_OFFSET)) /* write to logger on termination */ -#define EXF_NATIVE (1<<(3+EXF_OFFSET)) /* occurred in native code */ -#define EXF_SAVETRACE (1<<(4+EXF_OFFSET)) /* save stack trace in internal form */ -#define EXF_ARGLIST (1<<(5+EXF_OFFSET)) /* has arglist for top of trace */ -#define EXF_RESTORE_NIF (1<<(6+EXF_OFFSET)) /* restore original bif/nif */ +#define EXF_PANIC (1<<(0+EXF_OFFSET)) /* ignore catches */ +#define EXF_THROWN (1<<(1+EXF_OFFSET)) /* nonlocal return */ +#define EXF_LOG (1<<(2+EXF_OFFSET)) /* write to logger on termination */ +#define EXF_NATIVE (1<<(3+EXF_OFFSET)) /* occurred in native code */ +#define EXF_SAVETRACE (1<<(4+EXF_OFFSET)) /* save stack trace in internal form */ +#define EXF_ARGLIST (1<<(5+EXF_OFFSET)) /* has arglist for top of trace */ +#define EXF_RESTORE_NFUNC (1<<(6+EXF_OFFSET)) /* restore original bif/nif */ #define EXC_FLAGBITS (((1<<(EXF_BITS+EXF_OFFSET))-1) \ & ~((1<<(EXF_OFFSET))-1)) -- cgit v1.2.1 From 987d7c601f2a6460885afaa603405eaabaa72223 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Tue, 3 Sep 2019 15:41:37 +0200 Subject: erts: Rename apply_bif to call_bif, matching NIFs --- erts/emulator/beam/beam_emu.c | 2 +- erts/emulator/beam/bif.c | 8 ++++---- erts/emulator/beam/bif_instrs.tab | 6 +++--- erts/emulator/beam/erl_nfunc_sched.c | 2 +- erts/emulator/beam/erl_nfunc_sched.h | 4 ++-- erts/emulator/beam/erl_process.c | 2 +- erts/emulator/beam/ops.tab | 2 +- 7 files changed, 13 insertions(+), 13 deletions(-) diff --git a/erts/emulator/beam/beam_emu.c b/erts/emulator/beam/beam_emu.c index e7f116acb2..116bc220c3 100644 --- a/erts/emulator/beam/beam_emu.c +++ b/erts/emulator/beam/beam_emu.c @@ -1181,7 +1181,7 @@ void erts_dirty_process_main(ErtsSchedulerData *esdp) ERTS_UNREQ_PROC_MAIN_LOCK(c_p); ASSERT(!ERTS_PROC_IS_EXITING(c_p)); - if (BeamIsOpCode(*I, op_apply_bif)) { + if (BeamIsOpCode(*I, op_call_bif)) { exiting = erts_call_dirty_bif(esdp, c_p, I, reg); } else { diff --git a/erts/emulator/beam/bif.c b/erts/emulator/beam/bif.c index 1fd02c4b51..07f0134020 100644 --- a/erts/emulator/beam/bif.c +++ b/erts/emulator/beam/bif.c @@ -4981,7 +4981,7 @@ void erts_init_trap_export(Export* ep, Eterm m, Eterm f, Uint a, ep->info.mfa.module = m; ep->info.mfa.function = f; ep->info.mfa.arity = a; - ep->trampoline.op = BeamOpCodeAddr(op_apply_bif); + ep->trampoline.op = BeamOpCodeAddr(op_call_bif); ep->trampoline.raw[1] = (BeamInstr)bif; } @@ -4991,7 +4991,7 @@ void erts_init_trap_export(Export* ep, Eterm m, Eterm f, Uint a, void erts_write_bif_wrapper(Export *export, BeamInstr *address) { BifEntry *entry = &bif_table[export->bif_table_index]; - address[0] = BeamOpCodeAddr(op_apply_bif); + address[0] = BeamOpCodeAddr(op_call_bif); address[1] = (BeamInstr)entry->f; } @@ -5060,7 +5060,7 @@ schedule(Process *c_p, Process *dirty_shadow_proc, { ERTS_LC_ASSERT(ERTS_PROC_LOCK_MAIN & erts_proc_lc_my_proc_locks(c_p)); (void) erts_nfunc_schedule(c_p, dirty_shadow_proc, - mfa, pc, BeamOpCodeAddr(op_apply_bif), + mfa, pc, BeamOpCodeAddr(op_call_bif), dfunc, ifunc, module, function, argc, argv); @@ -5207,7 +5207,7 @@ erts_schedule_bif(Process *proc, pc = i; mfa = &exp->info.mfa; } - else if (BeamIsOpCode(call_instr, op_apply_bif)) { + else if (BeamIsOpCode(call_instr, op_call_bif)) { pc = cp_val(c_p->stop[0]); mfa = erts_code_to_codemfa(i); } diff --git a/erts/emulator/beam/bif_instrs.tab b/erts/emulator/beam/bif_instrs.tab index 04d36b721c..9955344441 100644 --- a/erts/emulator/beam/bif_instrs.tab +++ b/erts/emulator/beam/bif_instrs.tab @@ -446,7 +446,7 @@ send() { } call_nif := nif_bif.call_nif.epilogue; -apply_bif := nif_bif.apply_bif.epilogue; +call_bif := nif_bif.call_bif.epilogue; nif_bif.head() { Eterm nif_bif_result; @@ -518,7 +518,7 @@ nif_bif.call_nif() { DTRACE_NIF_RETURN(c_p, codemfa); } -nif_bif.apply_bif() { +nif_bif.call_bif() { /* * At this point, I points to the code[0] in the native function wrapper * for the BIF: @@ -526,7 +526,7 @@ nif_bif.apply_bif() { * code[-3]: Module * code[-2]: Function * code[-1]: Arity - * code[0]: &&apply_bif + * code[0]: &&call_bif * code[1]: Function pointer to BIF function */ diff --git a/erts/emulator/beam/erl_nfunc_sched.c b/erts/emulator/beam/erl_nfunc_sched.c index a37f020cb4..8263a6e9b7 100644 --- a/erts/emulator/beam/erl_nfunc_sched.c +++ b/erts/emulator/beam/erl_nfunc_sched.c @@ -129,7 +129,7 @@ erts_nfunc_schedule(Process *c_p, Process *dirty_shadow_proc, nep->trampoline.info.mfa.module = mod; nep->trampoline.info.mfa.function = func; nep->trampoline.info.mfa.arity = (Uint) argc; - nep->trampoline.call_op = (BeamInstr) instr; /* call_nif || apply_bif */ + nep->trampoline.call_op = (BeamInstr) instr; /* call_bif || call_nif */ nep->trampoline.dfunc = (BeamInstr) dfunc; nep->func = ifunc; used_proc->arity = argc; diff --git a/erts/emulator/beam/erl_nfunc_sched.h b/erts/emulator/beam/erl_nfunc_sched.h index 4044d59f82..920dea39e9 100644 --- a/erts/emulator/beam/erl_nfunc_sched.h +++ b/erts/emulator/beam/erl_nfunc_sched.h @@ -39,7 +39,7 @@ typedef struct { struct { ErtsCodeInfo info; - BeamInstr call_op; /* call_nif || apply_bif */ + BeamInstr call_op; /* call_bif || call_nif */ BeamInstr dfunc; } trampoline; @@ -173,7 +173,7 @@ erts_proc_shadow2real(Process *c_p) #define ERTS_NFUNC_SCHED_INTERNALS__ #define ERTS_I_BEAM_OP_TO_NFUNC(I) \ - (ASSERT(BeamIsOpCode(*(I), op_apply_bif) || \ + (ASSERT(BeamIsOpCode(*(I), op_call_bif) || \ BeamIsOpCode(*(I), op_call_nif)), \ ((ErtsNativeFunc *) (((char *) (I)) - offsetof(ErtsNativeFunc, trampoline.call_op)))) diff --git a/erts/emulator/beam/erl_process.c b/erts/emulator/beam/erl_process.c index c6a158d0b1..0d76bb844b 100644 --- a/erts/emulator/beam/erl_process.c +++ b/erts/emulator/beam/erl_process.c @@ -6479,7 +6479,7 @@ schedule_out_process(ErtsRunQueue *c_rq, erts_aint32_t state, Process *p, ASSERT(!(state & (ERTS_PSFLG_DIRTY_IO_PROC |ERTS_PSFLG_DIRTY_CPU_PROC)) || (BeamIsOpCode(*p->i, op_call_nif) - || BeamIsOpCode(*p->i, op_apply_bif))); + || BeamIsOpCode(*p->i, op_call_bif))); a = state; diff --git a/erts/emulator/beam/ops.tab b/erts/emulator/beam/ops.tab index a72ed6bce8..c51fbd58d9 100644 --- a/erts/emulator/beam/ops.tab +++ b/erts/emulator/beam/ops.tab @@ -587,7 +587,7 @@ put_list s s d %cold normal_exit continue_exit -apply_bif +call_bif call_nif call_error_handler error_action_code -- cgit v1.2.1 From 638a39df62055f76d052a4d4910d97675c34b72f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Mon, 9 Sep 2019 11:35:51 +0200 Subject: erts: Use local trap exports for list functions This prevents trapping from showing up in tracing, fixing certain tests that assume the old and erroneous trace behavior for BIFs. Note that this does not fix bogus arguments showing up on exceptions as many of these functions still pass the remainder of of the list to themselves when trapping. --- erts/emulator/beam/atom.names | 2 ++ erts/emulator/beam/erl_bif_lists.c | 68 +++++++++++++++++++++++++++++--------- erts/emulator/beam/erl_init.c | 1 + erts/emulator/beam/global.h | 3 ++ 4 files changed, 59 insertions(+), 15 deletions(-) diff --git a/erts/emulator/beam/atom.names b/erts/emulator/beam/atom.names index 93ba56dccd..4c67c2029f 100644 --- a/erts/emulator/beam/atom.names +++ b/erts/emulator/beam/atom.names @@ -404,6 +404,7 @@ atom min_bin_vheap_size atom minor atom minor_version atom Minus='-' +atom MinusMinus='--' atom module atom module_info atom monitored_by @@ -494,6 +495,7 @@ atom packet atom packet_size atom parallelism atom Plus='+' +atom PlusPlus='++' atom pause atom pending atom pending_driver diff --git a/erts/emulator/beam/erl_bif_lists.c b/erts/emulator/beam/erl_bif_lists.c index fa2edfef1e..9d485abc35 100644 --- a/erts/emulator/beam/erl_bif_lists.c +++ b/erts/emulator/beam/erl_bif_lists.c @@ -32,8 +32,46 @@ #include "bif.h" #include "erl_binary.h" +static Export plusplus_trap_export; +static Export append_trap_export; -static Eterm keyfind(int Bif, Process* p, Eterm Key, Eterm Pos, Eterm List); +static Export minusminus_trap_export; +static Export subtract_trap_export; + +static Export member_trap_export; + +static Export reverse_trap_export; + +static Export keymember_trap_export; +static Export keysearch_trap_export; +static Export keyfind_trap_export; + +void erts_init_bif_lists(void) { + erts_init_trap_export(&plusplus_trap_export, am_erlang, am_PlusPlus, 2, + ebif_plusplus_2); + erts_init_trap_export(&append_trap_export, am_erlang, am_append, 2, + append_2); + + erts_init_trap_export(&minusminus_trap_export, am_erlang, am_MinusMinus, 2, + ebif_minusminus_2); + erts_init_trap_export(&subtract_trap_export, am_lists, am_subtract, 2, + subtract_2); + + erts_init_trap_export(&reverse_trap_export, am_lists, am_reverse, 2, + lists_reverse_2); + + erts_init_trap_export(&member_trap_export, am_lists, am_member, 2, + lists_member_2); + + erts_init_trap_export(&keymember_trap_export, am_lists, am_keymember, 3, + lists_keymember_3); + erts_init_trap_export(&keysearch_trap_export, am_lists, am_keysearch, 3, + lists_keysearch_3); + erts_init_trap_export(&keyfind_trap_export, am_lists, am_keyfind, 3, + lists_keyfind_3); +} + +static Eterm keyfind(Export* Bif, Process* p, Eterm Key, Eterm Pos, Eterm List); /* erlang:'++'/2 * @@ -308,12 +346,12 @@ static Eterm append(Export *bif_entry, BIF_ALIST_2) { Eterm ebif_plusplus_2(BIF_ALIST_2) { - return append(bif_export[BIF_ebif_plusplus_2], BIF_CALL_ARGS); + return append(&plusplus_trap_export, BIF_CALL_ARGS); } BIF_RETTYPE append_2(BIF_ALIST_2) { - return append(bif_export[BIF_append_2], BIF_CALL_ARGS); + return append(&append_trap_export, BIF_CALL_ARGS); } /* erlang:'--'/2 @@ -1039,11 +1077,11 @@ static Eterm subtract(Export *bif_entry, BIF_ALIST_2) { } BIF_RETTYPE ebif_minusminus_2(BIF_ALIST_2) { - return subtract(bif_export[BIF_ebif_minusminus_2], BIF_CALL_ARGS); + return subtract(&minusminus_trap_export, BIF_CALL_ARGS); } BIF_RETTYPE subtract_2(BIF_ALIST_2) { - return subtract(bif_export[BIF_subtract_2], BIF_CALL_ARGS); + return subtract(&subtract_trap_export, BIF_CALL_ARGS); } @@ -1068,7 +1106,7 @@ BIF_RETTYPE lists_member_2(BIF_ALIST_2) while (is_list(list)) { if (--max_iter < 0) { BUMP_ALL_REDS(BIF_P); - BIF_TRAP2(bif_export[BIF_lists_member_2], BIF_P, term, list); + BIF_TRAP2(&member_trap_export, BIF_P, term, list); } item = CAR(list_val(list)); if ((item == term) || (non_immed_key && eq(item, term))) { @@ -1130,7 +1168,7 @@ static BIF_RETTYPE lists_reverse_alloc(Process *c_p, } ASSERT(is_list(tail) && cells_left == 0); - BIF_TRAP2(bif_export[BIF_lists_reverse_2], c_p, list, tail); + BIF_TRAP2(&reverse_trap_export, c_p, list, tail); } static BIF_RETTYPE lists_reverse_onheap(Process *c_p, @@ -1179,7 +1217,7 @@ static BIF_RETTYPE lists_reverse_onheap(Process *c_p, } BUMP_ALL_REDS(c_p); - BIF_TRAP2(bif_export[BIF_lists_reverse_2], c_p, list, tail); + BIF_TRAP2(&reverse_trap_export, c_p, list, tail); } BIF_ERROR(c_p, BADARG); @@ -1209,7 +1247,7 @@ lists_keymember_3(BIF_ALIST_3) { Eterm res; - res = keyfind(BIF_lists_keymember_3, BIF_P, + res = keyfind(&keymember_trap_export, BIF_P, BIF_ARG_1, BIF_ARG_2, BIF_ARG_3); if (is_value(res) && is_tuple(res)) { return am_true; @@ -1223,7 +1261,7 @@ lists_keysearch_3(BIF_ALIST_3) { Eterm res; - res = keyfind(BIF_lists_keysearch_3, BIF_P, + res = keyfind(&keysearch_trap_export, BIF_P, BIF_ARG_1, BIF_ARG_2, BIF_ARG_3); if (is_non_value(res) || is_not_tuple(res)) { return res; @@ -1236,12 +1274,12 @@ lists_keysearch_3(BIF_ALIST_3) BIF_RETTYPE lists_keyfind_3(BIF_ALIST_3) { - return keyfind(BIF_lists_keyfind_3, BIF_P, + return keyfind(&keyfind_trap_export, BIF_P, BIF_ARG_1, BIF_ARG_2, BIF_ARG_3); } static Eterm -keyfind(int Bif, Process* p, Eterm Key, Eterm Pos, Eterm List) +keyfind(Export *Bif, Process* p, Eterm Key, Eterm Pos, Eterm List) { int max_iter = 10 * CONTEXT_REDS; Sint pos; @@ -1257,7 +1295,7 @@ keyfind(int Bif, Process* p, Eterm Key, Eterm Pos, Eterm List) while (is_list(List)) { if (--max_iter < 0) { BUMP_ALL_REDS(p); - BIF_TRAP3(bif_export[Bif], p, Key, Pos, List); + BIF_TRAP3(Bif, p, Key, Pos, List); } term = CAR(list_val(List)); List = CDR(list_val(List)); @@ -1282,7 +1320,7 @@ keyfind(int Bif, Process* p, Eterm Key, Eterm Pos, Eterm List) while (is_list(List)) { if (--max_iter < 0) { BUMP_ALL_REDS(p); - BIF_TRAP3(bif_export[Bif], p, Key, Pos, List); + BIF_TRAP3(Bif, p, Key, Pos, List); } term = CAR(list_val(List)); List = CDR(list_val(List)); @@ -1300,7 +1338,7 @@ keyfind(int Bif, Process* p, Eterm Key, Eterm Pos, Eterm List) while (is_list(List)) { if (--max_iter < 0) { BUMP_ALL_REDS(p); - BIF_TRAP3(bif_export[Bif], p, Key, Pos, List); + BIF_TRAP3(Bif, p, Key, Pos, List); } term = CAR(list_val(List)); List = CDR(list_val(List)); diff --git a/erts/emulator/beam/erl_init.c b/erts/emulator/beam/erl_init.c index 547e4064a2..4d0ebbd1ed 100644 --- a/erts/emulator/beam/erl_init.c +++ b/erts/emulator/beam/erl_init.c @@ -354,6 +354,7 @@ erl_init(int ncpu, erts_init_bif_chksum(); erts_init_bif_binary(); erts_init_bif_guard(); + erts_init_bif_lists(); erts_init_bif_persistent_term(); erts_init_bif_re(); erts_init_unicode(); /* after RE to get access to PCRE unicode */ diff --git a/erts/emulator/beam/global.h b/erts/emulator/beam/global.h index acd42319cb..5bd0882834 100644 --- a/erts/emulator/beam/global.h +++ b/erts/emulator/beam/global.h @@ -912,6 +912,9 @@ Eterm erts_trapping_length_1(Process* p, Eterm* args); Eterm erl_is_function(Process* p, Eterm arg1, Eterm arg2); +/* beam_bif_lists.c */ +void erts_init_bif_lists(void); + /* beam_bif_load.c */ Eterm erts_check_process_code(Process *c_p, Eterm module, int *redsp, int fcalls); Eterm erts_proc_copy_literal_area(Process *c_p, int *redsp, int fcalls, int gc_allowed); -- cgit v1.2.1 From c42be1bb5540ed0ffafdc46b506a1bbe0cbaa2df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Tue, 10 Sep 2019 16:23:31 +0200 Subject: erts: Let call_bif/call_nif take arguments instead of hardcoding them --- erts/emulator/beam/beam_debug.c | 2 +- erts/emulator/beam/beam_emu.c | 4 ++-- erts/emulator/beam/bif.c | 8 ++++---- erts/emulator/beam/bif_instrs.tab | 17 ++++++++++------- erts/emulator/beam/erl_nfunc_sched.h | 4 ++-- erts/emulator/beam/erl_nif.c | 6 +++--- erts/emulator/beam/erl_process.c | 4 ++-- erts/emulator/beam/ops.tab | 4 ++-- 8 files changed, 26 insertions(+), 23 deletions(-) diff --git a/erts/emulator/beam/beam_debug.c b/erts/emulator/beam/beam_debug.c index 4d52435139..6a9a6b7dc9 100644 --- a/erts/emulator/beam/beam_debug.c +++ b/erts/emulator/beam/beam_debug.c @@ -332,7 +332,7 @@ erts_debug_disassemble_1(BIF_ALIST_1) "unknown " HEXF "\n", instr); code_ptr++; } - if (i == op_call_nif) { + if (i == op_call_nif_WWW) { /* * The rest of the code will not be executed. Don't disassemble any * more code in this function. diff --git a/erts/emulator/beam/beam_emu.c b/erts/emulator/beam/beam_emu.c index 116bc220c3..20c4f83a81 100644 --- a/erts/emulator/beam/beam_emu.c +++ b/erts/emulator/beam/beam_emu.c @@ -1181,11 +1181,11 @@ void erts_dirty_process_main(ErtsSchedulerData *esdp) ERTS_UNREQ_PROC_MAIN_LOCK(c_p); ASSERT(!ERTS_PROC_IS_EXITING(c_p)); - if (BeamIsOpCode(*I, op_call_bif)) { + if (BeamIsOpCode(*I, op_call_bif_W)) { exiting = erts_call_dirty_bif(esdp, c_p, I, reg); } else { - ASSERT(BeamIsOpCode(*I, op_call_nif)); + ASSERT(BeamIsOpCode(*I, op_call_nif_WWW)); exiting = erts_call_dirty_nif(esdp, c_p, I, reg); } diff --git a/erts/emulator/beam/bif.c b/erts/emulator/beam/bif.c index 07f0134020..d3fd99932a 100644 --- a/erts/emulator/beam/bif.c +++ b/erts/emulator/beam/bif.c @@ -4981,7 +4981,7 @@ void erts_init_trap_export(Export* ep, Eterm m, Eterm f, Uint a, ep->info.mfa.module = m; ep->info.mfa.function = f; ep->info.mfa.arity = a; - ep->trampoline.op = BeamOpCodeAddr(op_call_bif); + ep->trampoline.op = BeamOpCodeAddr(op_call_bif_W); ep->trampoline.raw[1] = (BeamInstr)bif; } @@ -4991,7 +4991,7 @@ void erts_init_trap_export(Export* ep, Eterm m, Eterm f, Uint a, void erts_write_bif_wrapper(Export *export, BeamInstr *address) { BifEntry *entry = &bif_table[export->bif_table_index]; - address[0] = BeamOpCodeAddr(op_call_bif); + address[0] = BeamOpCodeAddr(op_call_bif_W); address[1] = (BeamInstr)entry->f; } @@ -5060,7 +5060,7 @@ schedule(Process *c_p, Process *dirty_shadow_proc, { ERTS_LC_ASSERT(ERTS_PROC_LOCK_MAIN & erts_proc_lc_my_proc_locks(c_p)); (void) erts_nfunc_schedule(c_p, dirty_shadow_proc, - mfa, pc, BeamOpCodeAddr(op_call_bif), + mfa, pc, BeamOpCodeAddr(op_call_bif_W), dfunc, ifunc, module, function, argc, argv); @@ -5207,7 +5207,7 @@ erts_schedule_bif(Process *proc, pc = i; mfa = &exp->info.mfa; } - else if (BeamIsOpCode(call_instr, op_call_bif)) { + else if (BeamIsOpCode(call_instr, op_call_bif_W)) { pc = cp_val(c_p->stop[0]); mfa = erts_code_to_codemfa(i); } diff --git a/erts/emulator/beam/bif_instrs.tab b/erts/emulator/beam/bif_instrs.tab index 9955344441..42abd4b6c6 100644 --- a/erts/emulator/beam/bif_instrs.tab +++ b/erts/emulator/beam/bif_instrs.tab @@ -456,7 +456,7 @@ nif_bif.head() { ErtsCodeMFA *codemfa; } -nif_bif.call_nif() { +nif_bif.call_nif(Func, NifMod, DirtyFunc) { /* * call_nif is always first instruction in function: * @@ -466,11 +466,14 @@ nif_bif.call_nif() { * I[0]: &&call_nif * I[1]: Function pointer to NIF function * I[2]: Pointer to erl_module_nif - * I[3]: Function pointer to dirty NIF + * I[3]: Function pointer to dirty NIF. This is not used in this + * instruction, but dirty schedulers look at it. * * This layout is determined by the ErtsNativeFunc struct */ + (void)$DirtyFunc; + ERTS_MSACC_SET_STATE_CACHED_M_X(ERTS_MSACC_STATE_NIF); codemfa = erts_code_to_codemfa(I); @@ -488,12 +491,12 @@ nif_bif.call_nif() { ASSERT(!ERTS_PROC_IS_EXITING(c_p)); { typedef Eterm NifF(struct enif_environment_t*, int argc, Eterm argv[]); - NifF* fp = vbf = (NifF*) I[1]; + NifF* fp = vbf = (NifF*) $Func; struct enif_environment_t env; ASSERT(c_p->scheduler_data); live_hf_end = c_p->mbuf; ERTS_CHK_MBUF_SZ(c_p); - erts_pre_nif(&env, c_p, (struct erl_module_nif*)I[2], NULL); + erts_pre_nif(&env, c_p, (struct erl_module_nif*)$NifMod, NULL); ASSERT((c_p->scheduler_data)->current_nif == NULL); (c_p->scheduler_data)->current_nif = &env; @@ -518,7 +521,7 @@ nif_bif.call_nif() { DTRACE_NIF_RETURN(c_p, codemfa); } -nif_bif.call_bif() { +nif_bif.call_bif(Func) { /* * At this point, I points to the code[0] in the native function wrapper * for the BIF: @@ -538,7 +541,7 @@ nif_bif.call_bif() { codemfa = erts_code_to_codemfa(I); - ERTS_MSACC_SET_BIF_STATE_CACHED_X(codemfa->module, (BifFunction)I[1]); + ERTS_MSACC_SET_BIF_STATE_CACHED_X(codemfa->module, (BifFunction)$Func); /* In case we apply process_info/1,2 or load_nif/1 */ c_p->current = codemfa; @@ -550,7 +553,7 @@ nif_bif.call_bif() { SWAPOUT; ERTS_DBG_CHK_REDS(c_p, FCALLS - 1); c_p->fcalls = FCALLS - 1; - vbf = (BifFunction) I[1]; + vbf = (BifFunction)$Func; PROCESS_MAIN_CHK_LOCKS(c_p); bif_nif_arity = codemfa->arity; ASSERT(bif_nif_arity <= 4); diff --git a/erts/emulator/beam/erl_nfunc_sched.h b/erts/emulator/beam/erl_nfunc_sched.h index 920dea39e9..4dae242d4f 100644 --- a/erts/emulator/beam/erl_nfunc_sched.h +++ b/erts/emulator/beam/erl_nfunc_sched.h @@ -173,8 +173,8 @@ erts_proc_shadow2real(Process *c_p) #define ERTS_NFUNC_SCHED_INTERNALS__ #define ERTS_I_BEAM_OP_TO_NFUNC(I) \ - (ASSERT(BeamIsOpCode(*(I), op_call_bif) || \ - BeamIsOpCode(*(I), op_call_nif)), \ + (ASSERT(BeamIsOpCode(*(I), op_call_bif_W) || \ + BeamIsOpCode(*(I), op_call_nif_WWW)), \ ((ErtsNativeFunc *) (((char *) (I)) - offsetof(ErtsNativeFunc, trampoline.call_op)))) diff --git a/erts/emulator/beam/erl_nif.c b/erts/emulator/beam/erl_nif.c index 04e6c15982..6e27b4b7cb 100644 --- a/erts/emulator/beam/erl_nif.c +++ b/erts/emulator/beam/erl_nif.c @@ -335,7 +335,7 @@ schedule(ErlNifEnv* env, NativeFunPtr direct_fp, NativeFunPtr indirect_fp, ep = erts_nfunc_schedule(c_p, dirty_shadow_proc, c_p->current, cp_val(c_p->stop[0]), - BeamOpCodeAddr(op_call_nif), + BeamOpCodeAddr(op_call_nif_WWW), direct_fp, indirect_fp, mod, func_name, argc, (const Eterm *) argv); @@ -4347,12 +4347,12 @@ Eterm erts_load_nif(Process *c_p, BeamInstr *I, Eterm filename, Eterm args) code_ptr = erts_codeinfo_to_code(ci); if (ci->u.gen_bp == NULL) { - code_ptr[0] = BeamOpCodeAddr(op_call_nif); + code_ptr[0] = BeamOpCodeAddr(op_call_nif_WWW); } else { /* Function traced, patch the original instruction word */ GenericBp* g = ci->u.gen_bp; ASSERT(BeamIsOpCode(code_ptr[0], op_i_generic_breakpoint)); - g->orig_instr = BeamOpCodeAddr(op_call_nif); + g->orig_instr = BeamOpCodeAddr(op_call_nif_WWW); } if (f->flags) { code_ptr[3] = (BeamInstr) f->fptr; diff --git a/erts/emulator/beam/erl_process.c b/erts/emulator/beam/erl_process.c index 0d76bb844b..15cc07ded1 100644 --- a/erts/emulator/beam/erl_process.c +++ b/erts/emulator/beam/erl_process.c @@ -6478,8 +6478,8 @@ schedule_out_process(ErtsRunQueue *c_rq, erts_aint32_t state, Process *p, ASSERT(!(state & (ERTS_PSFLG_DIRTY_IO_PROC |ERTS_PSFLG_DIRTY_CPU_PROC)) - || (BeamIsOpCode(*p->i, op_call_nif) - || BeamIsOpCode(*p->i, op_call_bif))); + || (BeamIsOpCode(*p->i, op_call_nif_WWW) + || BeamIsOpCode(*p->i, op_call_bif_W))); a = state; diff --git a/erts/emulator/beam/ops.tab b/erts/emulator/beam/ops.tab index c51fbd58d9..1d336e4b7b 100644 --- a/erts/emulator/beam/ops.tab +++ b/erts/emulator/beam/ops.tab @@ -587,8 +587,8 @@ put_list s s d %cold normal_exit continue_exit -call_bif -call_nif +call_bif W +call_nif W W W call_error_handler error_action_code return_trace -- cgit v1.2.1 From 1c49fba50b7536865a340664f4004f501c3a2109 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?John=20H=C3=B6gberg?= Date: Tue, 10 Sep 2019 16:42:49 +0200 Subject: erts: Share printable_return_address with erl_bif_info --- erts/emulator/beam/beam_emu.c | 20 ++++++-------------- erts/emulator/beam/bif_instrs.tab | 2 +- erts/emulator/beam/erl_bif_info.c | 23 +---------------------- erts/emulator/beam/global.h | 1 + 4 files changed, 9 insertions(+), 37 deletions(-) diff --git a/erts/emulator/beam/beam_emu.c b/erts/emulator/beam/beam_emu.c index 20c4f83a81..e8a0c666e3 100644 --- a/erts/emulator/beam/beam_emu.c +++ b/erts/emulator/beam/beam_emu.c @@ -295,7 +295,6 @@ do { \ */ static void init_emulator_finish(void) ERTS_NOINLINE; static ErtsCodeMFA *ubif2mfa(void* uf) ERTS_NOINLINE; -static BeamInstr *printable_return_address(Process* p, Eterm *E) ERTS_NOINLINE; static BeamInstr* handle_error(Process* c_p, BeamInstr* pc, Eterm* reg, ErtsCodeMFA* bif_mfa) ERTS_NOINLINE; static BeamInstr* call_error_handler(Process* p, ErtsCodeMFA* mfa, @@ -1259,7 +1258,7 @@ Eterm error_atom[NUMBER_EXIT_CODES] = { * * This is needed to generate correct stacktraces when throwing errors from * instructions that return like an ordinary function, such as call_nif. */ -static BeamInstr *printable_return_address(Process* p, Eterm *E) { +BeamInstr *erts_printable_return_address(Process* p, Eterm *E) { Eterm *ptr = E; ASSERT(is_CP(*ptr)); @@ -1576,9 +1575,6 @@ expand_error_value(Process* c_p, Uint freason, Eterm Value) { static void gather_stacktrace(Process* p, struct StackTrace* s, int depth) { - BeamInstr i_return_time_trace; - BeamInstr i_return_to_trace; - BeamInstr i_return_trace; BeamInstr *prev; Eterm *ptr; @@ -1586,11 +1582,7 @@ gather_stacktrace(Process* p, struct StackTrace* s, int depth) return; } - i_return_time_trace = beam_return_time_trace[0]; - i_return_to_trace = beam_return_to_trace[0]; - i_return_trace = beam_return_trace[0]; - - prev = s->depth ? s->trace[s->depth-1] : s->pc; + prev = s->depth ? s->trace[s->depth - 1] : s->pc; ptr = p->stop; /* @@ -1606,12 +1598,12 @@ gather_stacktrace(Process* p, struct StackTrace* s, int depth) if (is_CP(*ptr)) { BeamInstr *cp = cp_val(*ptr); - if (*cp == i_return_time_trace) { + if (cp == beam_exception_trace || cp == beam_return_trace) { + ptr += 3; + } else if (cp == beam_return_time_trace) { ptr += 2; - } else if (*cp == i_return_to_trace) { + } else if (cp == beam_return_to_trace) { ptr += 1; - } else if (*cp == i_return_trace) { - ptr += 3; } else { if (cp != prev) { /* Record non-duplicates only */ diff --git a/erts/emulator/beam/bif_instrs.tab b/erts/emulator/beam/bif_instrs.tab index 42abd4b6c6..de5305bde4 100644 --- a/erts/emulator/beam/bif_instrs.tab +++ b/erts/emulator/beam/bif_instrs.tab @@ -606,7 +606,7 @@ nif_bif.epilogue() { $DISPATCH(); } { - BeamInstr *cp = printable_return_address(c_p, E); + BeamInstr *cp = erts_printable_return_address(c_p, E); ASSERT(VALID_INSTR(*cp)); I = handle_error(c_p, cp, reg, c_p->current); } diff --git a/erts/emulator/beam/erl_bif_info.c b/erts/emulator/beam/erl_bif_info.c index 946fefa14f..21e8585e83 100644 --- a/erts/emulator/beam/erl_bif_info.c +++ b/erts/emulator/beam/erl_bif_info.c @@ -2024,7 +2024,6 @@ current_function(Process *c_p, ErtsHeapFactory *hfact, Process* rp, if (c_p == rp && !(flags & ERTS_PI_FLAG_REQUEST_FOR_OTHER)) { BeamInstr* return_address; FunctionInfo caller_fi; - Eterm *ptr; /* * The current function is erlang:process_info/{1,2}, and we've @@ -2033,27 +2032,7 @@ current_function(Process *c_p, ErtsHeapFactory *hfact, Process* rp, * stack instead, which is safe since process_info is a "heavy" BIF * that is only called through its export entry. */ - - return_address = NULL; - ptr = STACK_TOP(rp); - ASSERT(is_CP(*ptr)); - - while (ptr < STACK_START(rp)) { - BeamInstr *cp = cp_val(*ptr); - - if (*cp == BeamOpCodeAddr(op_return_trace)) { - ptr += 3; - } else if (*cp == BeamOpCodeAddr(op_i_return_time_trace)) { - ptr += 2; - } else if (*cp == BeamOpCodeAddr(op_i_return_to_trace)) { - ptr += 1; - } else { - return_address = cp; - break; - } - } - - ASSERT(return_address != NULL); + return_address = erts_printable_return_address(rp, STACK_TOP(rp)); erts_lookup_function_info(&caller_fi, return_address, full_info); if (caller_fi.mfa) { diff --git a/erts/emulator/beam/global.h b/erts/emulator/beam/global.h index 5bd0882834..b86709b093 100644 --- a/erts/emulator/beam/global.h +++ b/erts/emulator/beam/global.h @@ -1156,6 +1156,7 @@ void erts_dirty_process_main(ErtsSchedulerData *); Eterm build_stacktrace(Process* c_p, Eterm exc); Eterm expand_error_value(Process* c_p, Uint freason, Eterm Value); void erts_save_stacktrace(Process* p, struct StackTrace* s, int depth); +BeamInstr *erts_printable_return_address(Process* p, Eterm *E) ERTS_NOINLINE; /* erl_init.c */ -- cgit v1.2.1