diff options
author | Randolph Chung <tausq@debian.org> | 2004-04-23 02:54:21 +0000 |
---|---|---|
committer | Randolph Chung <tausq@debian.org> | 2004-04-23 02:54:21 +0000 |
commit | abc485a155fa7a84d07dc3cf376f3be050602cc9 (patch) | |
tree | 0c05d4eb95773ce30180657d396c57f498ace187 /gdb/hppa-hpux-tdep.c | |
parent | 369aa5203706c51ab15c9ecc8edfff92c4e10d69 (diff) | |
download | binutils-gdb-abc485a155fa7a84d07dc3cf376f3be050602cc9.tar.gz |
2004-04-22 Randolph Chung <tausq@debian.org>
* hppa-tdep.h (find_unwind_entry, hppa_get_field, hppa_extract_5_load)
(hppa_extract_5R_store, hppa_extract_5r_store, hppa_extract_17)
(hppa_extract_21, hppa_extract_14, hppa_low_sign_extend)
(hppa_sign_extend): Add prototype.
* hppa-tdep.c (get_field, extract_5_load, extract_5R_store)
(extract_5r_store, extract_17, extract_21, extract_14, low_sign_extend)
(sign_extend): Rename with hppa_ prefix and make non-static. Other
hppa targets will also use these functions.
(find_unwind_entry): Remove prototype (moved to hppa-tdep.h).
(hppa_in_solib_call_trampoline, hppa_in_solib_return_trampoline)
(hppa_skip_trampoline_code): Move to hppa-hpux-tdep.c
(hppa_gdbarch_init): Remove gdbarch setting of
skip_trampoline_code, in_solib_call_trampoline and
in_solib_return_trampoline.
* hppa-hpux-tdep.c (hppa32_hpux_in_solib_call_trampoline)
(hppa64_hpux_in_solib_call_trampoline): New functions, split from
hppa_in_solib_call_trampoline.
(hppa_hpux_in_solib_return_trampoline, hppa_hpux_skip_trampoline_code):
Moved from hppa-tdep.c.
(hppa_hpux_init_abi): Set gdbarch for skip_trampoline_code,
in_solib_call_trampoline and in_solib_return_trampoline.
Diffstat (limited to 'gdb/hppa-hpux-tdep.c')
-rw-r--r-- | gdb/hppa-hpux-tdep.c | 522 |
1 files changed, 522 insertions, 0 deletions
diff --git a/gdb/hppa-hpux-tdep.c b/gdb/hppa-hpux-tdep.c index cf5fccbb818..a047d42859e 100644 --- a/gdb/hppa-hpux-tdep.c +++ b/gdb/hppa-hpux-tdep.c @@ -164,6 +164,515 @@ hppa64_hpux_frame_find_saved_regs_in_sigtramp (struct frame_info *fi, } } +/* Return one if PC is in the call path of a trampoline, else return zero. + + Note we return one for *any* call trampoline (long-call, arg-reloc), not + just shared library trampolines (import, export). */ + +static int +hppa32_hpux_in_solib_call_trampoline (CORE_ADDR pc, char *name) +{ + struct minimal_symbol *minsym; + struct unwind_table_entry *u; + static CORE_ADDR dyncall = 0; + static CORE_ADDR sr4export = 0; + + /* FIXME XXX - dyncall and sr4export must be initialized whenever we get a + new exec file */ + + /* First see if PC is in one of the two C-library trampolines. */ + if (!dyncall) + { + minsym = lookup_minimal_symbol ("$$dyncall", NULL, NULL); + if (minsym) + dyncall = SYMBOL_VALUE_ADDRESS (minsym); + else + dyncall = -1; + } + + if (!sr4export) + { + minsym = lookup_minimal_symbol ("_sr4export", NULL, NULL); + if (minsym) + sr4export = SYMBOL_VALUE_ADDRESS (minsym); + else + sr4export = -1; + } + + if (pc == dyncall || pc == sr4export) + return 1; + + minsym = lookup_minimal_symbol_by_pc (pc); + if (minsym && strcmp (DEPRECATED_SYMBOL_NAME (minsym), ".stub") == 0) + return 1; + + /* Get the unwind descriptor corresponding to PC, return zero + if no unwind was found. */ + u = find_unwind_entry (pc); + if (!u) + return 0; + + /* If this isn't a linker stub, then return now. */ + if (u->stub_unwind.stub_type == 0) + return 0; + + /* By definition a long-branch stub is a call stub. */ + if (u->stub_unwind.stub_type == LONG_BRANCH) + return 1; + + /* The call and return path execute the same instructions within + an IMPORT stub! So an IMPORT stub is both a call and return + trampoline. */ + if (u->stub_unwind.stub_type == IMPORT) + return 1; + + /* Parameter relocation stubs always have a call path and may have a + return path. */ + if (u->stub_unwind.stub_type == PARAMETER_RELOCATION + || u->stub_unwind.stub_type == EXPORT) + { + CORE_ADDR addr; + + /* Search forward from the current PC until we hit a branch + or the end of the stub. */ + for (addr = pc; addr <= u->region_end; addr += 4) + { + unsigned long insn; + + insn = read_memory_integer (addr, 4); + + /* Does it look like a bl? If so then it's the call path, if + we find a bv or be first, then we're on the return path. */ + if ((insn & 0xfc00e000) == 0xe8000000) + return 1; + else if ((insn & 0xfc00e001) == 0xe800c000 + || (insn & 0xfc000000) == 0xe0000000) + return 0; + } + + /* Should never happen. */ + warning ("Unable to find branch in parameter relocation stub.\n"); + return 0; + } + + /* Unknown stub type. For now, just return zero. */ + return 0; +} + +static int +hppa64_hpux_in_solib_call_trampoline (CORE_ADDR pc, char *name) +{ + /* PA64 has a completely different stub/trampoline scheme. Is it + better? Maybe. It's certainly harder to determine with any + certainty that we are in a stub because we can not refer to the + unwinders to help. + + The heuristic is simple. Try to lookup the current PC value in th + minimal symbol table. If that fails, then assume we are not in a + stub and return. + + Then see if the PC value falls within the section bounds for the + section containing the minimal symbol we found in the first + step. If it does, then assume we are not in a stub and return. + + Finally peek at the instructions to see if they look like a stub. */ + struct minimal_symbol *minsym; + asection *sec; + CORE_ADDR addr; + int insn, i; + + minsym = lookup_minimal_symbol_by_pc (pc); + if (! minsym) + return 0; + + sec = SYMBOL_BFD_SECTION (minsym); + + if (bfd_get_section_vma (sec->owner, sec) <= pc + && pc < (bfd_get_section_vma (sec->owner, sec) + + bfd_section_size (sec->owner, sec))) + return 0; + + /* We might be in a stub. Peek at the instructions. Stubs are 3 + instructions long. */ + insn = read_memory_integer (pc, 4); + + /* Find out where we think we are within the stub. */ + if ((insn & 0xffffc00e) == 0x53610000) + addr = pc; + else if ((insn & 0xffffffff) == 0xe820d000) + addr = pc - 4; + else if ((insn & 0xffffc00e) == 0x537b0000) + addr = pc - 8; + else + return 0; + + /* Now verify each insn in the range looks like a stub instruction. */ + insn = read_memory_integer (addr, 4); + if ((insn & 0xffffc00e) != 0x53610000) + return 0; + + /* Now verify each insn in the range looks like a stub instruction. */ + insn = read_memory_integer (addr + 4, 4); + if ((insn & 0xffffffff) != 0xe820d000) + return 0; + + /* Now verify each insn in the range looks like a stub instruction. */ + insn = read_memory_integer (addr + 8, 4); + if ((insn & 0xffffc00e) != 0x537b0000) + return 0; + + /* Looks like a stub. */ + return 1; +} + +/* Return one if PC is in the return path of a trampoline, else return zero. + + Note we return one for *any* call trampoline (long-call, arg-reloc), not + just shared library trampolines (import, export). */ + +static int +hppa_hpux_in_solib_return_trampoline (CORE_ADDR pc, char *name) +{ + struct unwind_table_entry *u; + + /* Get the unwind descriptor corresponding to PC, return zero + if no unwind was found. */ + u = find_unwind_entry (pc); + if (!u) + return 0; + + /* If this isn't a linker stub or it's just a long branch stub, then + return zero. */ + if (u->stub_unwind.stub_type == 0 || u->stub_unwind.stub_type == LONG_BRANCH) + return 0; + + /* The call and return path execute the same instructions within + an IMPORT stub! So an IMPORT stub is both a call and return + trampoline. */ + if (u->stub_unwind.stub_type == IMPORT) + return 1; + + /* Parameter relocation stubs always have a call path and may have a + return path. */ + if (u->stub_unwind.stub_type == PARAMETER_RELOCATION + || u->stub_unwind.stub_type == EXPORT) + { + CORE_ADDR addr; + + /* Search forward from the current PC until we hit a branch + or the end of the stub. */ + for (addr = pc; addr <= u->region_end; addr += 4) + { + unsigned long insn; + + insn = read_memory_integer (addr, 4); + + /* Does it look like a bl? If so then it's the call path, if + we find a bv or be first, then we're on the return path. */ + if ((insn & 0xfc00e000) == 0xe8000000) + return 0; + else if ((insn & 0xfc00e001) == 0xe800c000 + || (insn & 0xfc000000) == 0xe0000000) + return 1; + } + + /* Should never happen. */ + warning ("Unable to find branch in parameter relocation stub.\n"); + return 0; + } + + /* Unknown stub type. For now, just return zero. */ + return 0; + +} + +/* Figure out if PC is in a trampoline, and if so find out where + the trampoline will jump to. If not in a trampoline, return zero. + + Simple code examination probably is not a good idea since the code + sequences in trampolines can also appear in user code. + + We use unwinds and information from the minimal symbol table to + determine when we're in a trampoline. This won't work for ELF + (yet) since it doesn't create stub unwind entries. Whether or + not ELF will create stub unwinds or normal unwinds for linker + stubs is still being debated. + + This should handle simple calls through dyncall or sr4export, + long calls, argument relocation stubs, and dyncall/sr4export + calling an argument relocation stub. It even handles some stubs + used in dynamic executables. */ + +static CORE_ADDR +hppa_hpux_skip_trampoline_code (CORE_ADDR pc) +{ + long orig_pc = pc; + long prev_inst, curr_inst, loc; + static CORE_ADDR dyncall = 0; + static CORE_ADDR dyncall_external = 0; + static CORE_ADDR sr4export = 0; + struct minimal_symbol *msym; + struct unwind_table_entry *u; + + /* FIXME XXX - dyncall and sr4export must be initialized whenever we get a + new exec file */ + + if (!dyncall) + { + msym = lookup_minimal_symbol ("$$dyncall", NULL, NULL); + if (msym) + dyncall = SYMBOL_VALUE_ADDRESS (msym); + else + dyncall = -1; + } + + if (!dyncall_external) + { + msym = lookup_minimal_symbol ("$$dyncall_external", NULL, NULL); + if (msym) + dyncall_external = SYMBOL_VALUE_ADDRESS (msym); + else + dyncall_external = -1; + } + + if (!sr4export) + { + msym = lookup_minimal_symbol ("_sr4export", NULL, NULL); + if (msym) + sr4export = SYMBOL_VALUE_ADDRESS (msym); + else + sr4export = -1; + } + + /* Addresses passed to dyncall may *NOT* be the actual address + of the function. So we may have to do something special. */ + if (pc == dyncall) + { + pc = (CORE_ADDR) read_register (22); + + /* If bit 30 (counting from the left) is on, then pc is the address of + the PLT entry for this function, not the address of the function + itself. Bit 31 has meaning too, but only for MPE. */ + if (pc & 0x2) + pc = (CORE_ADDR) read_memory_integer (pc & ~0x3, TARGET_PTR_BIT / 8); + } + if (pc == dyncall_external) + { + pc = (CORE_ADDR) read_register (22); + pc = (CORE_ADDR) read_memory_integer (pc & ~0x3, TARGET_PTR_BIT / 8); + } + else if (pc == sr4export) + pc = (CORE_ADDR) (read_register (22)); + + /* Get the unwind descriptor corresponding to PC, return zero + if no unwind was found. */ + u = find_unwind_entry (pc); + if (!u) + return 0; + + /* If this isn't a linker stub, then return now. */ + /* elz: attention here! (FIXME) because of a compiler/linker + error, some stubs which should have a non zero stub_unwind.stub_type + have unfortunately a value of zero. So this function would return here + as if we were not in a trampoline. To fix this, we go look at the partial + symbol information, which reports this guy as a stub. + (FIXME): Unfortunately, we are not that lucky: it turns out that the + partial symbol information is also wrong sometimes. This is because + when it is entered (somread.c::som_symtab_read()) it can happen that + if the type of the symbol (from the som) is Entry, and the symbol is + in a shared library, then it can also be a trampoline. This would + be OK, except that I believe the way they decide if we are ina shared library + does not work. SOOOO..., even if we have a regular function w/o trampolines + its minimal symbol can be assigned type mst_solib_trampoline. + Also, if we find that the symbol is a real stub, then we fix the unwind + descriptor, and define the stub type to be EXPORT. + Hopefully this is correct most of the times. */ + if (u->stub_unwind.stub_type == 0) + { + +/* elz: NOTE (FIXME!) once the problem with the unwind information is fixed + we can delete all the code which appears between the lines */ +/*--------------------------------------------------------------------------*/ + msym = lookup_minimal_symbol_by_pc (pc); + + if (msym == NULL || MSYMBOL_TYPE (msym) != mst_solib_trampoline) + return orig_pc == pc ? 0 : pc & ~0x3; + + else if (msym != NULL && MSYMBOL_TYPE (msym) == mst_solib_trampoline) + { + struct objfile *objfile; + struct minimal_symbol *msymbol; + int function_found = 0; + + /* go look if there is another minimal symbol with the same name as + this one, but with type mst_text. This would happen if the msym + is an actual trampoline, in which case there would be another + symbol with the same name corresponding to the real function */ + + ALL_MSYMBOLS (objfile, msymbol) + { + if (MSYMBOL_TYPE (msymbol) == mst_text + && DEPRECATED_STREQ (DEPRECATED_SYMBOL_NAME (msymbol), DEPRECATED_SYMBOL_NAME (msym))) + { + function_found = 1; + break; + } + } + + if (function_found) + /* the type of msym is correct (mst_solib_trampoline), but + the unwind info is wrong, so set it to the correct value */ + u->stub_unwind.stub_type = EXPORT; + else + /* the stub type info in the unwind is correct (this is not a + trampoline), but the msym type information is wrong, it + should be mst_text. So we need to fix the msym, and also + get out of this function */ + { + MSYMBOL_TYPE (msym) = mst_text; + return orig_pc == pc ? 0 : pc & ~0x3; + } + } + +/*--------------------------------------------------------------------------*/ + } + + /* It's a stub. Search for a branch and figure out where it goes. + Note we have to handle multi insn branch sequences like ldil;ble. + Most (all?) other branches can be determined by examining the contents + of certain registers and the stack. */ + + loc = pc; + curr_inst = 0; + prev_inst = 0; + while (1) + { + /* Make sure we haven't walked outside the range of this stub. */ + if (u != find_unwind_entry (loc)) + { + warning ("Unable to find branch in linker stub"); + return orig_pc == pc ? 0 : pc & ~0x3; + } + + prev_inst = curr_inst; + curr_inst = read_memory_integer (loc, 4); + + /* Does it look like a branch external using %r1? Then it's the + branch from the stub to the actual function. */ + if ((curr_inst & 0xffe0e000) == 0xe0202000) + { + /* Yup. See if the previous instruction loaded + a value into %r1. If so compute and return the jump address. */ + if ((prev_inst & 0xffe00000) == 0x20200000) + return (hppa_extract_21 (prev_inst) + hppa_extract_17 (curr_inst)) & ~0x3; + else + { + warning ("Unable to find ldil X,%%r1 before ble Y(%%sr4,%%r1)."); + return orig_pc == pc ? 0 : pc & ~0x3; + } + } + + /* Does it look like a be 0(sr0,%r21)? OR + Does it look like a be, n 0(sr0,%r21)? OR + Does it look like a bve (r21)? (this is on PA2.0) + Does it look like a bve, n(r21)? (this is also on PA2.0) + That's the branch from an + import stub to an export stub. + + It is impossible to determine the target of the branch via + simple examination of instructions and/or data (consider + that the address in the plabel may be the address of the + bind-on-reference routine in the dynamic loader). + + So we have try an alternative approach. + + Get the name of the symbol at our current location; it should + be a stub symbol with the same name as the symbol in the + shared library. + + Then lookup a minimal symbol with the same name; we should + get the minimal symbol for the target routine in the shared + library as those take precedence of import/export stubs. */ + if ((curr_inst == 0xe2a00000) || + (curr_inst == 0xe2a00002) || + (curr_inst == 0xeaa0d000) || + (curr_inst == 0xeaa0d002)) + { + struct minimal_symbol *stubsym, *libsym; + + stubsym = lookup_minimal_symbol_by_pc (loc); + if (stubsym == NULL) + { + warning ("Unable to find symbol for 0x%lx", loc); + return orig_pc == pc ? 0 : pc & ~0x3; + } + + libsym = lookup_minimal_symbol (DEPRECATED_SYMBOL_NAME (stubsym), NULL, NULL); + if (libsym == NULL) + { + warning ("Unable to find library symbol for %s\n", + DEPRECATED_SYMBOL_NAME (stubsym)); + return orig_pc == pc ? 0 : pc & ~0x3; + } + + return SYMBOL_VALUE (libsym); + } + + /* Does it look like bl X,%rp or bl X,%r0? Another way to do a + branch from the stub to the actual function. */ + /*elz */ + else if ((curr_inst & 0xffe0e000) == 0xe8400000 + || (curr_inst & 0xffe0e000) == 0xe8000000 + || (curr_inst & 0xffe0e000) == 0xe800A000) + return (loc + hppa_extract_17 (curr_inst) + 8) & ~0x3; + + /* Does it look like bv (rp)? Note this depends on the + current stack pointer being the same as the stack + pointer in the stub itself! This is a branch on from the + stub back to the original caller. */ + /*else if ((curr_inst & 0xffe0e000) == 0xe840c000) */ + else if ((curr_inst & 0xffe0f000) == 0xe840c000) + { + /* Yup. See if the previous instruction loaded + rp from sp - 8. */ + if (prev_inst == 0x4bc23ff1) + return (read_memory_integer + (read_register (HPPA_SP_REGNUM) - 8, 4)) & ~0x3; + else + { + warning ("Unable to find restore of %%rp before bv (%%rp)."); + return orig_pc == pc ? 0 : pc & ~0x3; + } + } + + /* elz: added this case to capture the new instruction + at the end of the return part of an export stub used by + the PA2.0: BVE, n (rp) */ + else if ((curr_inst & 0xffe0f000) == 0xe840d000) + { + return (read_memory_integer + (read_register (HPPA_SP_REGNUM) - 24, TARGET_PTR_BIT / 8)) & ~0x3; + } + + /* What about be,n 0(sr0,%rp)? It's just another way we return to + the original caller from the stub. Used in dynamic executables. */ + else if (curr_inst == 0xe0400002) + { + /* The value we jump to is sitting in sp - 24. But that's + loaded several instructions before the be instruction. + I guess we could check for the previous instruction being + mtsp %r1,%sr0 if we want to do sanity checking. */ + return (read_memory_integer + (read_register (HPPA_SP_REGNUM) - 24, TARGET_PTR_BIT / 8)) & ~0x3; + } + + /* Haven't found the branch yet, but we're still in the stub. + Keep looking. */ + loc += 4; + } +} + + /* Exception handling support for the HP-UX ANSI C++ compiler. The compiler (aCC) provides a callback for exception events; GDB can set a breakpoint on this callback and find out what @@ -716,7 +1225,20 @@ child_get_current_exception_event (void) static void hppa_hpux_init_abi (struct gdbarch_info info, struct gdbarch *gdbarch) { + struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch); + set_gdbarch_deprecated_pc_in_sigtramp (gdbarch, hppa_hpux_pc_in_sigtramp); + + if (tdep->bytes_per_address == 4) + set_gdbarch_in_solib_call_trampoline (gdbarch, + hppa32_hpux_in_solib_call_trampoline); + else + set_gdbarch_in_solib_call_trampoline (gdbarch, + hppa64_hpux_in_solib_call_trampoline); + + set_gdbarch_in_solib_return_trampoline (gdbarch, + hppa_hpux_in_solib_return_trampoline); + set_gdbarch_skip_trampoline_code (gdbarch, hppa_hpux_skip_trampoline_code); } static void |