summaryrefslogtreecommitdiff
path: root/sysdeps/x86/cacheinfo.h
diff options
context:
space:
mode:
authorH.J. Lu <hjl.tools@gmail.com>2021-07-22 20:26:25 -0700
committerH.J. Lu <hjl.tools@gmail.com>2021-07-28 13:23:57 -0700
commit91cc803d27bda34919717b496b53cf279e44a922 (patch)
tree0a6d0b00a94015f1554b6de690a620600dc7513d /sysdeps/x86/cacheinfo.h
parentc25c32165d8b7c506442fdc0304f7a3a223e1f42 (diff)
downloadglibc-91cc803d27bda34919717b496b53cf279e44a922.tar.gz
x86-64: Add Avoid_Short_Distance_REP_MOVSB
commit 3ec5d83d2a237d39e7fd6ef7a0bc8ac4c171a4a5 Author: H.J. Lu <hjl.tools@gmail.com> Date: Sat Jan 25 14:19:40 2020 -0800 x86-64: Avoid rep movsb with short distance [BZ #27130] introduced some regressions on Intel processors without Fast Short REP MOV (FSRM). Add Avoid_Short_Distance_REP_MOVSB to avoid rep movsb with short distance only on Intel processors with FSRM. bench-memmove-large on Skylake server shows that cycles of __memmove_evex_unaligned_erms improves for the following data size: before after Improvement length=4127, align1=3, align2=0: 479.38 349.25 27% length=4223, align1=9, align2=5: 405.62 333.25 18% length=8223, align1=3, align2=0: 786.12 496.38 37% length=8319, align1=9, align2=5: 727.50 501.38 31% length=16415, align1=3, align2=0: 1436.88 840.00 41% length=16511, align1=9, align2=5: 1375.50 836.38 39% length=32799, align1=3, align2=0: 2890.00 1860.12 36% length=32895, align1=9, align2=5: 2891.38 1931.88 33%
Diffstat (limited to 'sysdeps/x86/cacheinfo.h')
-rw-r--r--sysdeps/x86/cacheinfo.h9
1 files changed, 9 insertions, 0 deletions
diff --git a/sysdeps/x86/cacheinfo.h b/sysdeps/x86/cacheinfo.h
index eba8dbc4a6..41d2c81369 100644
--- a/sysdeps/x86/cacheinfo.h
+++ b/sysdeps/x86/cacheinfo.h
@@ -49,6 +49,11 @@ long int __x86_rep_stosb_threshold attribute_hidden = 2048;
/* Threshold to stop using Enhanced REP MOVSB. */
long int __x86_rep_movsb_stop_threshold attribute_hidden;
+/* A bit-wise OR of string/memory requirements for optimal performance
+ e.g. X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB. These bits
+ are used at runtime to tune implementation behavior. */
+int __x86_string_control attribute_hidden;
+
static void
init_cacheinfo (void)
{
@@ -71,5 +76,9 @@ init_cacheinfo (void)
__x86_rep_movsb_threshold = cpu_features->rep_movsb_threshold;
__x86_rep_stosb_threshold = cpu_features->rep_stosb_threshold;
__x86_rep_movsb_stop_threshold = cpu_features->rep_movsb_stop_threshold;
+
+ if (CPU_FEATURES_ARCH_P (cpu_features, Avoid_Short_Distance_REP_MOVSB))
+ __x86_string_control
+ |= X86_STRING_CONTROL_AVOID_SHORT_DISTANCE_REP_MOVSB;
}
#endif