提交 83a3a6f0 编写于 作者: Y ysr

6973570: OrderAccess::storestore() scales poorly on multi-socket x64 and...

6973570: OrderAccess::storestore() scales poorly on multi-socket x64 and sparc: cache-line ping-ponging
Summary: volatile store to static variable removed in favour of a volatile store to stack to avoid excessive cache coherency traffic; verified that the volatile store is not elided by any of our current compilers.
Reviewed-by: dholmes, dice, jcoomes, kvn
上级 75eb81af
/* /*
* Copyright (c) 2003, 2008, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
...@@ -36,8 +36,8 @@ inline void OrderAccess::acquire() { ...@@ -36,8 +36,8 @@ inline void OrderAccess::acquire() {
} }
inline void OrderAccess::release() { inline void OrderAccess::release() {
jint* dummy = (jint*)&dummy; jint* local_dummy = (jint*)&local_dummy;
__asm__ volatile("stw %%g0, [%0]" : : "r" (dummy) : "memory"); __asm__ volatile("stw %%g0, [%0]" : : "r" (local_dummy) : "memory");
} }
inline void OrderAccess::fence() { inline void OrderAccess::fence() {
......
/* /*
* Copyright (c) 2003, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
...@@ -30,16 +30,18 @@ inline void OrderAccess::loadstore() { acquire(); } ...@@ -30,16 +30,18 @@ inline void OrderAccess::loadstore() { acquire(); }
inline void OrderAccess::storeload() { fence(); } inline void OrderAccess::storeload() { fence(); }
inline void OrderAccess::acquire() { inline void OrderAccess::acquire() {
volatile intptr_t dummy; volatile intptr_t local_dummy;
#ifdef AMD64 #ifdef AMD64
__asm__ volatile ("movq 0(%%rsp), %0" : "=r" (dummy) : : "memory"); __asm__ volatile ("movq 0(%%rsp), %0" : "=r" (local_dummy) : : "memory");
#else #else
__asm__ volatile ("movl 0(%%esp),%0" : "=r" (dummy) : : "memory"); __asm__ volatile ("movl 0(%%esp),%0" : "=r" (local_dummy) : : "memory");
#endif // AMD64 #endif // AMD64
} }
inline void OrderAccess::release() { inline void OrderAccess::release() {
dummy = 0; // Avoid hitting the same cache-line from
// different threads.
volatile jint local_dummy = 0;
} }
inline void OrderAccess::fence() { inline void OrderAccess::fence() {
......
/* /*
* Copyright (c) 2003, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
...@@ -42,8 +42,8 @@ inline void OrderAccess::acquire() { ...@@ -42,8 +42,8 @@ inline void OrderAccess::acquire() {
} }
inline void OrderAccess::release() { inline void OrderAccess::release() {
jint* dummy = (jint*)&dummy; jint* local_dummy = (jint*)&local_dummy;
__asm__ volatile("stw %%g0, [%0]" : : "r" (dummy) : "memory"); __asm__ volatile("stw %%g0, [%0]" : : "r" (local_dummy) : "memory");
} }
inline void OrderAccess::fence() { inline void OrderAccess::fence() {
...@@ -57,7 +57,9 @@ inline void OrderAccess::acquire() { ...@@ -57,7 +57,9 @@ inline void OrderAccess::acquire() {
} }
inline void OrderAccess::release() { inline void OrderAccess::release() {
dummy = 0; // Avoid hitting the same cache-line from
// different threads.
volatile jint local_dummy = 0;
} }
inline void OrderAccess::fence() { inline void OrderAccess::fence() {
......
/* /*
* Copyright (c) 2003, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
...@@ -40,7 +40,9 @@ inline void OrderAccess::acquire() { ...@@ -40,7 +40,9 @@ inline void OrderAccess::acquire() {
} }
inline void OrderAccess::release() { inline void OrderAccess::release() {
dummy = 0; // Avoid hitting the same cache-line from
// different threads.
volatile jint local_dummy = 0;
} }
inline void OrderAccess::fence() { inline void OrderAccess::fence() {
...@@ -53,11 +55,11 @@ inline void OrderAccess::fence() { ...@@ -53,11 +55,11 @@ inline void OrderAccess::fence() {
extern "C" { extern "C" {
inline void _OrderAccess_acquire() { inline void _OrderAccess_acquire() {
volatile intptr_t dummy; volatile intptr_t local_dummy;
#ifdef AMD64 #ifdef AMD64
__asm__ volatile ("movq 0(%%rsp), %0" : "=r" (dummy) : : "memory"); __asm__ volatile ("movq 0(%%rsp), %0" : "=r" (local_dummy) : : "memory");
#else #else
__asm__ volatile ("movl 0(%%esp),%0" : "=r" (dummy) : : "memory"); __asm__ volatile ("movl 0(%%esp),%0" : "=r" (local_dummy) : : "memory");
#endif // AMD64 #endif // AMD64
} }
inline void _OrderAccess_fence() { inline void _OrderAccess_fence() {
......
/* /*
* Copyright (c) 2003, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
...@@ -41,7 +41,7 @@ inline void OrderAccess::acquire() { ...@@ -41,7 +41,7 @@ inline void OrderAccess::acquire() {
inline void OrderAccess::release() { inline void OrderAccess::release() {
// A volatile store has release semantics. // A volatile store has release semantics.
dummy = 0; volatile jint local_dummy = 0;
} }
inline void OrderAccess::fence() { inline void OrderAccess::fence() {
......
/* /*
* Copyright (c) 2003, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
...@@ -25,8 +25,6 @@ ...@@ -25,8 +25,6 @@
# include "incls/_precompiled.incl" # include "incls/_precompiled.incl"
# include "incls/_orderAccess.cpp.incl" # include "incls/_orderAccess.cpp.incl"
volatile intptr_t OrderAccess::dummy = 0;
void OrderAccess::StubRoutines_fence() { void OrderAccess::StubRoutines_fence() {
// Use a stub if it exists. It may not exist during bootstrap so do // Use a stub if it exists. It may not exist during bootstrap so do
// nothing in that case but assert if no fence code exists after threads have been created // nothing in that case but assert if no fence code exists after threads have been created
......
/* /*
* Copyright (c) 2003, 2009, Oracle and/or its affiliates. All rights reserved. * Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
* *
* This code is free software; you can redistribute it and/or modify it * This code is free software; you can redistribute it and/or modify it
...@@ -166,6 +166,12 @@ ...@@ -166,6 +166,12 @@
// and release must include a sequence point, usually via a volatile memory // and release must include a sequence point, usually via a volatile memory
// access. Other ways to guarantee a sequence point are, e.g., use of // access. Other ways to guarantee a sequence point are, e.g., use of
// indirect calls and linux's __asm__ volatile. // indirect calls and linux's __asm__ volatile.
// Note: as of 6973570, we have replaced the originally static "dummy" field
// (see above) by a volatile store to the stack. All of the versions of the
// compilers that we currently use (SunStudio, gcc and VC++) respect the
// semantics of volatile here. If you build HotSpot using other
// compilers, you may need to verify that no compiler reordering occurs
// across the sequence point respresented by the volatile access.
// //
// //
// os::is_MP Considered Redundant // os::is_MP Considered Redundant
...@@ -297,10 +303,6 @@ class OrderAccess : AllStatic { ...@@ -297,10 +303,6 @@ class OrderAccess : AllStatic {
static void release_store_ptr_fence(volatile intptr_t* p, intptr_t v); static void release_store_ptr_fence(volatile intptr_t* p, intptr_t v);
static void release_store_ptr_fence(volatile void* p, void* v); static void release_store_ptr_fence(volatile void* p, void* v);
// In order to force a memory access, implementations may
// need a volatile externally visible dummy variable.
static volatile intptr_t dummy;
private: private:
// This is a helper that invokes the StubRoutines::fence_entry() // This is a helper that invokes the StubRoutines::fence_entry()
// routine if it exists, It should only be used by platforms that // routine if it exists, It should only be used by platforms that
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册