提交 c0df12e2 编写于 作者: K kvn

7009756: volatile variables could be broken throw reflection API

Summary: Use Atomic::load() and Atomic::store() to access a volatile long.
Reviewed-by: iveresov, jrose, dholmes, never
上级 968b294b
/*
* Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
......@@ -54,6 +54,8 @@ inline void Atomic::dec (volatile jint* dest) { (void)add (-1, dest);
inline void Atomic::dec_ptr(volatile intptr_t* dest) { (void)add_ptr(-1, dest); }
inline void Atomic::dec_ptr(volatile void* dest) { (void)add_ptr(-1, dest); }
inline jlong Atomic::load(volatile jlong* src) { return *src; }
inline jint Atomic::add (jint add_value, volatile jint* dest) {
intptr_t rv;
__asm__ volatile(
......
/*
* Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
......@@ -100,11 +100,6 @@ inline jint Atomic::cmpxchg (jint exchange_value, volatile jint*
return exchange_value;
}
extern "C" {
// defined in linux_x86.s
jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong, bool);
}
#ifdef AMD64
inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
......@@ -164,9 +159,9 @@ inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void*
return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
}
#else
//inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
//inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
inline jlong Atomic::load(volatile jlong* src) { return *src; }
#else // !AMD64
inline intptr_t Atomic::add_ptr(intptr_t add_value, volatile intptr_t* dest) {
return (intptr_t)Atomic::add((jint)add_value, (volatile jint*)dest);
......@@ -189,6 +184,12 @@ inline intptr_t Atomic::xchg_ptr(intptr_t exchange_value, volatile intptr_t* des
return (intptr_t)xchg((jint)exchange_value, (volatile jint*)dest);
}
extern "C" {
// defined in linux_x86.s
jlong _Atomic_cmpxchg_long(jlong, volatile jlong*, jlong, bool);
void _Atomic_move_long(volatile jlong* src, volatile jlong* dst);
}
inline jlong Atomic::cmpxchg (jlong exchange_value, volatile jlong* dest, jlong compare_value) {
return _Atomic_cmpxchg_long(exchange_value, dest, compare_value, os::is_MP());
}
......@@ -200,6 +201,21 @@ inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t*
inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) {
return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
}
inline jlong Atomic::load(volatile jlong* src) {
volatile jlong dest;
_Atomic_move_long(src, &dest);
return dest;
}
inline void Atomic::store(jlong store_value, jlong* dest) {
_Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest);
}
inline void Atomic::store(jlong store_value, volatile jlong* dest) {
_Atomic_move_long((volatile jlong*)&store_value, dest);
}
#endif // AMD64
#endif // OS_CPU_LINUX_X86_VM_ATOMIC_LINUX_X86_INLINE_HPP
#
# Copyright (c) 2004, 2007, Oracle and/or its affiliates. All rights reserved.
# Copyright (c) 2004, 2011, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
......@@ -38,6 +38,7 @@
.globl _mmx_Copy_arrayof_conjoint_jshorts
.globl _Atomic_cmpxchg_long
.globl _Atomic_move_long
.text
......@@ -653,3 +654,15 @@ _Atomic_cmpxchg_long:
popl %ebx
ret
# Support for jlong Atomic::load and Atomic::store.
# void _Atomic_move_long(volatile jlong* src, volatile jlong* dst)
.p2align 4,,15
.type _Atomic_move_long,@function
_Atomic_move_long:
movl 4(%esp), %eax # src
fildll (%eax)
movl 8(%esp), %eax # dest
fistpll (%eax)
ret
/*
* Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
......@@ -25,6 +25,7 @@
#ifndef OS_CPU_LINUX_X86_VM_ORDERACCESS_LINUX_X86_INLINE_HPP
#define OS_CPU_LINUX_X86_VM_ORDERACCESS_LINUX_X86_INLINE_HPP
#include "runtime/atomic.hpp"
#include "runtime/orderAccess.hpp"
#include "vm_version_x86.hpp"
......@@ -64,11 +65,11 @@ inline void OrderAccess::fence() {
inline jbyte OrderAccess::load_acquire(volatile jbyte* p) { return *p; }
inline jshort OrderAccess::load_acquire(volatile jshort* p) { return *p; }
inline jint OrderAccess::load_acquire(volatile jint* p) { return *p; }
inline jlong OrderAccess::load_acquire(volatile jlong* p) { return *p; }
inline jlong OrderAccess::load_acquire(volatile jlong* p) { return Atomic::load(p); }
inline jubyte OrderAccess::load_acquire(volatile jubyte* p) { return *p; }
inline jushort OrderAccess::load_acquire(volatile jushort* p) { return *p; }
inline juint OrderAccess::load_acquire(volatile juint* p) { return *p; }
inline julong OrderAccess::load_acquire(volatile julong* p) { return *p; }
inline julong OrderAccess::load_acquire(volatile julong* p) { return Atomic::load((volatile jlong*)p); }
inline jfloat OrderAccess::load_acquire(volatile jfloat* p) { return *p; }
inline jdouble OrderAccess::load_acquire(volatile jdouble* p) { return *p; }
......@@ -79,11 +80,11 @@ inline void* OrderAccess::load_ptr_acquire(const volatile void* p) { return *
inline void OrderAccess::release_store(volatile jbyte* p, jbyte v) { *p = v; }
inline void OrderAccess::release_store(volatile jshort* p, jshort v) { *p = v; }
inline void OrderAccess::release_store(volatile jint* p, jint v) { *p = v; }
inline void OrderAccess::release_store(volatile jlong* p, jlong v) { *p = v; }
inline void OrderAccess::release_store(volatile jlong* p, jlong v) { Atomic::store(v, p); }
inline void OrderAccess::release_store(volatile jubyte* p, jubyte v) { *p = v; }
inline void OrderAccess::release_store(volatile jushort* p, jushort v) { *p = v; }
inline void OrderAccess::release_store(volatile juint* p, juint v) { *p = v; }
inline void OrderAccess::release_store(volatile julong* p, julong v) { *p = v; }
inline void OrderAccess::release_store(volatile julong* p, julong v) { Atomic::store((jlong)v, (volatile jlong*)p); }
inline void OrderAccess::release_store(volatile jfloat* p, jfloat v) { *p = v; }
inline void OrderAccess::release_store(volatile jdouble* p, jdouble v) { *p = v; }
......@@ -178,7 +179,7 @@ inline void OrderAccess::release_store_fence(volatile jlong* p, jlong v)
: "0" (v), "r" (p)
: "memory");
#else
*p = v; fence();
release_store(p, v); fence();
#endif // AMD64
}
......
/*
* Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
......@@ -151,14 +151,22 @@ inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void*
return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
}
extern "C" void _Atomic_load_long(volatile jlong* src, volatile jlong* dst);
extern "C" void _Atomic_move_long(volatile jlong* src, volatile jlong* dst);
inline jlong Atomic::load(volatile jlong* src) {
volatile jlong dest;
_Atomic_load_long(src, &dest);
_Atomic_move_long(src, &dest);
return dest;
}
inline void Atomic::store(jlong store_value, jlong* dest) {
_Atomic_move_long((volatile jlong*)&store_value, (volatile jlong*)dest);
}
inline void Atomic::store(jlong store_value, volatile jlong* dest) {
_Atomic_move_long((volatile jlong*)&store_value, dest);
}
#endif // AMD64
#ifdef _GNU_SOURCE
......
/*
* Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
......@@ -25,6 +25,7 @@
#ifndef OS_CPU_SOLARIS_X86_VM_ORDERACCESS_SOLARIS_X86_INLINE_HPP
#define OS_CPU_SOLARIS_X86_VM_ORDERACCESS_SOLARIS_X86_INLINE_HPP
#include "runtime/atomic.hpp"
#include "runtime/orderAccess.hpp"
#include "vm_version_x86.hpp"
......@@ -80,11 +81,11 @@ extern "C" {
inline jbyte OrderAccess::load_acquire(volatile jbyte* p) { return *p; }
inline jshort OrderAccess::load_acquire(volatile jshort* p) { return *p; }
inline jint OrderAccess::load_acquire(volatile jint* p) { return *p; }
inline jlong OrderAccess::load_acquire(volatile jlong* p) { return *p; }
inline jlong OrderAccess::load_acquire(volatile jlong* p) { return Atomic::load(p); }
inline jubyte OrderAccess::load_acquire(volatile jubyte* p) { return *p; }
inline jushort OrderAccess::load_acquire(volatile jushort* p) { return *p; }
inline juint OrderAccess::load_acquire(volatile juint* p) { return *p; }
inline julong OrderAccess::load_acquire(volatile julong* p) { return *p; }
inline julong OrderAccess::load_acquire(volatile julong* p) { return Atomic::load((volatile jlong*)p); }
inline jfloat OrderAccess::load_acquire(volatile jfloat* p) { return *p; }
inline jdouble OrderAccess::load_acquire(volatile jdouble* p) { return *p; }
......@@ -95,11 +96,11 @@ inline void* OrderAccess::load_ptr_acquire(const volatile void* p) { return *
inline void OrderAccess::release_store(volatile jbyte* p, jbyte v) { *p = v; }
inline void OrderAccess::release_store(volatile jshort* p, jshort v) { *p = v; }
inline void OrderAccess::release_store(volatile jint* p, jint v) { *p = v; }
inline void OrderAccess::release_store(volatile jlong* p, jlong v) { *p = v; }
inline void OrderAccess::release_store(volatile jlong* p, jlong v) { Atomic::store(v, p); }
inline void OrderAccess::release_store(volatile jubyte* p, jubyte v) { *p = v; }
inline void OrderAccess::release_store(volatile jushort* p, jushort v) { *p = v; }
inline void OrderAccess::release_store(volatile juint* p, juint v) { *p = v; }
inline void OrderAccess::release_store(volatile julong* p, julong v) { *p = v; }
inline void OrderAccess::release_store(volatile julong* p, julong v) { Atomic::store((jlong)v, (volatile jlong*)p); }
inline void OrderAccess::release_store(volatile jfloat* p, jfloat v) { *p = v; }
inline void OrderAccess::release_store(volatile jdouble* p, jdouble v) { *p = v; }
......@@ -123,11 +124,11 @@ inline void OrderAccess::store_ptr_fence(void** p, void* v) { *p = v;
inline void OrderAccess::release_store_fence(volatile jbyte* p, jbyte v) { *p = v; fence(); }
inline void OrderAccess::release_store_fence(volatile jshort* p, jshort v) { *p = v; fence(); }
inline void OrderAccess::release_store_fence(volatile jint* p, jint v) { *p = v; fence(); }
inline void OrderAccess::release_store_fence(volatile jlong* p, jlong v) { *p = v; fence(); }
inline void OrderAccess::release_store_fence(volatile jlong* p, jlong v) { release_store(p, v); fence(); }
inline void OrderAccess::release_store_fence(volatile jubyte* p, jubyte v) { *p = v; fence(); }
inline void OrderAccess::release_store_fence(volatile jushort* p, jushort v) { *p = v; fence(); }
inline void OrderAccess::release_store_fence(volatile juint* p, juint v) { *p = v; fence(); }
inline void OrderAccess::release_store_fence(volatile julong* p, julong v) { *p = v; fence(); }
inline void OrderAccess::release_store_fence(volatile julong* p, julong v) { release_store(p, v); fence(); }
inline void OrderAccess::release_store_fence(volatile jfloat* p, jfloat v) { *p = v; fence(); }
inline void OrderAccess::release_store_fence(volatile jdouble* p, jdouble v) { *p = v; fence(); }
......
//
// Copyright (c) 2003, 2009, Oracle and/or its affiliates. All rights reserved.
// Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
// DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
//
// This code is free software; you can redistribute it and/or modify it
......@@ -104,8 +104,9 @@
popl %ebx
.end
// Support for void Atomic::load(volatile jlong* src, volatile jlong* dest).
.inline _Atomic_load_long,2
// Support for jlong Atomic::load and Atomic::store.
// void _Atomic_move_long(volatile jlong* src, volatile jlong* dst)
.inline _Atomic_move_long,2
movl 0(%esp), %eax // src
fildll (%eax)
movl 4(%esp), %eax // dest
......
/*
* Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
......@@ -137,10 +137,10 @@ inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void*
return (void*)cmpxchg((jlong)exchange_value, (volatile jlong*)dest, (jlong)compare_value);
}
inline jlong Atomic::load(volatile jlong* src) { return *src; }
#else // !AMD64
//inline void Atomic::store (jlong store_value, jlong* dest) { *dest = store_value; }
//inline void Atomic::store (jlong store_value, volatile jlong* dest) { *dest = store_value; }
inline jint Atomic::add (jint add_value, volatile jint* dest) {
int mp = os::is_MP();
__asm {
......@@ -254,6 +254,33 @@ inline intptr_t Atomic::cmpxchg_ptr(intptr_t exchange_value, volatile intptr_t*
inline void* Atomic::cmpxchg_ptr(void* exchange_value, volatile void* dest, void* compare_value) {
return (void*)cmpxchg((jint)exchange_value, (volatile jint*)dest, (jint)compare_value);
}
inline jlong Atomic::load(volatile jlong* src) {
volatile jlong dest;
volatile jlong* pdest = &dest;
__asm {
mov eax, src
fild qword ptr [eax]
mov eax, pdest
fistp qword ptr [eax]
}
return dest;
}
inline void Atomic::store(jlong store_value, volatile jlong* dest) {
volatile jlong* src = &store_value;
__asm {
mov eax, src
fild qword ptr [eax]
mov eax, dest
fistp qword ptr [eax]
}
}
inline void Atomic::store(jlong store_value, jlong* dest) {
Atomic::store(store_value, (volatile jlong*)dest);
}
#endif // AMD64
#pragma warning(default: 4035) // Enables warnings reporting missing return statement
......
/*
* Copyright (c) 2003, 2010, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
......@@ -25,6 +25,7 @@
#ifndef OS_CPU_WINDOWS_X86_VM_ORDERACCESS_WINDOWS_X86_INLINE_HPP
#define OS_CPU_WINDOWS_X86_VM_ORDERACCESS_WINDOWS_X86_INLINE_HPP
#include "runtime/atomic.hpp"
#include "runtime/orderAccess.hpp"
#include "vm_version_x86.hpp"
......@@ -65,11 +66,11 @@ inline void OrderAccess::fence() {
inline jbyte OrderAccess::load_acquire(volatile jbyte* p) { return *p; }
inline jshort OrderAccess::load_acquire(volatile jshort* p) { return *p; }
inline jint OrderAccess::load_acquire(volatile jint* p) { return *p; }
inline jlong OrderAccess::load_acquire(volatile jlong* p) { return *p; }
inline jlong OrderAccess::load_acquire(volatile jlong* p) { return Atomic::load(p); }
inline jubyte OrderAccess::load_acquire(volatile jubyte* p) { return *p; }
inline jushort OrderAccess::load_acquire(volatile jushort* p) { return *p; }
inline juint OrderAccess::load_acquire(volatile juint* p) { return *p; }
inline julong OrderAccess::load_acquire(volatile julong* p) { return *p; }
inline julong OrderAccess::load_acquire(volatile julong* p) { return Atomic::load((volatile jlong*)p); }
inline jfloat OrderAccess::load_acquire(volatile jfloat* p) { return *p; }
inline jdouble OrderAccess::load_acquire(volatile jdouble* p) { return *p; }
......@@ -80,11 +81,11 @@ inline void* OrderAccess::load_ptr_acquire(const volatile void* p) { return *
inline void OrderAccess::release_store(volatile jbyte* p, jbyte v) { *p = v; }
inline void OrderAccess::release_store(volatile jshort* p, jshort v) { *p = v; }
inline void OrderAccess::release_store(volatile jint* p, jint v) { *p = v; }
inline void OrderAccess::release_store(volatile jlong* p, jlong v) { *p = v; }
inline void OrderAccess::release_store(volatile jlong* p, jlong v) { Atomic::store(v, p); }
inline void OrderAccess::release_store(volatile jubyte* p, jubyte v) { *p = v; }
inline void OrderAccess::release_store(volatile jushort* p, jushort v) { *p = v; }
inline void OrderAccess::release_store(volatile juint* p, juint v) { *p = v; }
inline void OrderAccess::release_store(volatile julong* p, julong v) { *p = v; }
inline void OrderAccess::release_store(volatile julong* p, julong v) { Atomic::store((jlong)v, (volatile jlong*)p); }
inline void OrderAccess::release_store(volatile jfloat* p, jfloat v) { *p = v; }
inline void OrderAccess::release_store(volatile jdouble* p, jdouble v) { *p = v; }
......@@ -188,7 +189,7 @@ inline void OrderAccess::release_store_fence(volatile jint* p, jint v) {
#endif // AMD64
}
inline void OrderAccess::release_store_fence(volatile jlong* p, jlong v) { *p = v; fence(); }
inline void OrderAccess::release_store_fence(volatile jlong* p, jlong v) { release_store(p, v); fence(); }
inline void OrderAccess::release_store_fence(volatile jubyte* p, jubyte v) { release_store_fence((volatile jbyte*)p, (jbyte)v); }
inline void OrderAccess::release_store_fence(volatile jushort* p, jushort v) { release_store_fence((volatile jshort*)p, (jshort)v); }
......
/*
* Copyright (c) 2000, 2010, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
......@@ -154,12 +154,11 @@ jint Unsafe_invocation_key_to_method_slot(jint key) {
#define GET_FIELD_VOLATILE(obj, offset, type_name, v) \
oop p = JNIHandles::resolve(obj); \
volatile type_name v = *(volatile type_name*)index_oop_from_field_offset_long(p, offset)
volatile type_name v = OrderAccess::load_acquire((volatile type_name*)index_oop_from_field_offset_long(p, offset));
#define SET_FIELD_VOLATILE(obj, offset, type_name, x) \
oop p = JNIHandles::resolve(obj); \
*(volatile type_name*)index_oop_from_field_offset_long(p, offset) = x; \
OrderAccess::fence();
OrderAccess::release_store_fence((volatile type_name*)index_oop_from_field_offset_long(p, offset), x);
// Macros for oops that check UseCompressedOops
......@@ -181,7 +180,8 @@ jint Unsafe_invocation_key_to_method_slot(jint key) {
v = oopDesc::decode_heap_oop(n); \
} else { \
v = *(volatile oop*)index_oop_from_field_offset_long(p, offset); \
}
} \
OrderAccess::acquire();
// Get/SetObject must be special-cased, since it works with handles.
......@@ -248,14 +248,22 @@ UNSAFE_ENTRY(void, Unsafe_SetObjectVolatile(JNIEnv *env, jobject unsafe, jobject
UnsafeWrapper("Unsafe_SetObjectVolatile");
oop x = JNIHandles::resolve(x_h);
oop p = JNIHandles::resolve(obj);
void* addr = index_oop_from_field_offset_long(p, offset);
OrderAccess::release();
if (UseCompressedOops) {
oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
oop_store((narrowOop*)addr, x);
} else {
oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
oop_store((oop*)addr, x);
}
OrderAccess::fence();
UNSAFE_END
#if defined(SPARC) || defined(X86)
// Sparc and X86 have atomic jlong (8 bytes) instructions
#else
// Keep old code for platforms which may not have atomic jlong (8 bytes) instructions
// Volatile long versions must use locks if !VM_Version::supports_cx8().
// support_cx8 is a surrogate for 'supports atomic long memory ops'.
......@@ -291,6 +299,7 @@ UNSAFE_ENTRY(void, Unsafe_SetLongVolatile(JNIEnv *env, jobject unsafe, jobject o
}
UNSAFE_END
#endif // not SPARC and not X86
#define DEFINE_GETSETOOP(jboolean, Boolean) \
\
......@@ -320,6 +329,16 @@ UNSAFE_END \
\
// END DEFINE_GETSETOOP.
DEFINE_GETSETOOP(jboolean, Boolean)
DEFINE_GETSETOOP(jbyte, Byte)
DEFINE_GETSETOOP(jshort, Short);
DEFINE_GETSETOOP(jchar, Char);
DEFINE_GETSETOOP(jint, Int);
DEFINE_GETSETOOP(jlong, Long);
DEFINE_GETSETOOP(jfloat, Float);
DEFINE_GETSETOOP(jdouble, Double);
#undef DEFINE_GETSETOOP
#define DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean) \
\
......@@ -336,47 +355,49 @@ UNSAFE_END \
\
// END DEFINE_GETSETOOP_VOLATILE.
DEFINE_GETSETOOP(jboolean, Boolean)
DEFINE_GETSETOOP(jbyte, Byte)
DEFINE_GETSETOOP(jshort, Short);
DEFINE_GETSETOOP(jchar, Char);
DEFINE_GETSETOOP(jint, Int);
DEFINE_GETSETOOP(jlong, Long);
DEFINE_GETSETOOP(jfloat, Float);
DEFINE_GETSETOOP(jdouble, Double);
DEFINE_GETSETOOP_VOLATILE(jboolean, Boolean)
DEFINE_GETSETOOP_VOLATILE(jbyte, Byte)
DEFINE_GETSETOOP_VOLATILE(jshort, Short);
DEFINE_GETSETOOP_VOLATILE(jchar, Char);
DEFINE_GETSETOOP_VOLATILE(jint, Int);
// no long -- handled specially
DEFINE_GETSETOOP_VOLATILE(jfloat, Float);
DEFINE_GETSETOOP_VOLATILE(jdouble, Double);
#undef DEFINE_GETSETOOP
#if defined(SPARC) || defined(X86)
// Sparc and X86 have atomic jlong (8 bytes) instructions
DEFINE_GETSETOOP_VOLATILE(jlong, Long);
#endif
#undef DEFINE_GETSETOOP_VOLATILE
// The non-intrinsified versions of setOrdered just use setVolatile
UNSAFE_ENTRY(void, Unsafe_SetOrderedInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint x)) \
UnsafeWrapper("Unsafe_SetOrderedInt"); \
SET_FIELD_VOLATILE(obj, offset, jint, x); \
UNSAFE_ENTRY(void, Unsafe_SetOrderedInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint x))
UnsafeWrapper("Unsafe_SetOrderedInt");
SET_FIELD_VOLATILE(obj, offset, jint, x);
UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetOrderedObject(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jobject x_h))
UnsafeWrapper("Unsafe_SetOrderedObject");
oop x = JNIHandles::resolve(x_h);
oop p = JNIHandles::resolve(obj);
void* addr = index_oop_from_field_offset_long(p, offset);
OrderAccess::release();
if (UseCompressedOops) {
oop_store((narrowOop*)index_oop_from_field_offset_long(p, offset), x);
oop_store((narrowOop*)addr, x);
} else {
oop_store((oop*)index_oop_from_field_offset_long(p, offset), x);
oop_store((oop*)addr, x);
}
OrderAccess::fence();
UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_SetOrderedLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong x))
UnsafeWrapper("Unsafe_SetOrderedLong");
#if defined(SPARC) || defined(X86)
// Sparc and X86 have atomic jlong (8 bytes) instructions
SET_FIELD_VOLATILE(obj, offset, jlong, x);
#else
// Keep old code for platforms which may not have atomic long (8 bytes) instructions
{
if (VM_Version::supports_cx8()) {
SET_FIELD_VOLATILE(obj, offset, jlong, x);
......@@ -388,6 +409,7 @@ UNSAFE_ENTRY(void, Unsafe_SetOrderedLong(JNIEnv *env, jobject unsafe, jobject ob
*addr = x;
}
}
#endif
UNSAFE_END
////// Data in the C heap.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册