提交 4b0c3bd9 编写于 作者: C ccheung

8014431: cleanup warnings indicated by the -Wunused-value compiler option on linux

Reviewed-by: dholmes, coleenp
Contributed-by: jeremymanson@google.com, calvin.cheung@oracle.com
上级 118491a3
......@@ -214,7 +214,7 @@ ifeq ($(USE_CLANG), true)
WARNINGS_ARE_ERRORS += -Wno-return-type -Wno-empty-body
endif
WARNING_FLAGS = -Wpointer-arith -Wsign-compare -Wundef -Wunused-function
WARNING_FLAGS = -Wpointer-arith -Wsign-compare -Wundef -Wunused-function -Wunused-value
ifeq ($(USE_CLANG),)
# Since GCC 4.3, -Wconversion has changed its meanings to warn these implicit
......
......@@ -83,7 +83,7 @@ class StubGenerator: public StubCodeGenerator {
private:
#ifdef PRODUCT
#define inc_counter_np(counter) (0)
#define inc_counter_np(counter) ((void)0)
#else
void inc_counter_np_(int& counter) {
__ incrementl(ExternalAddress((address)&counter));
......
......@@ -81,7 +81,7 @@ class StubGenerator: public StubCodeGenerator {
private:
#ifdef PRODUCT
#define inc_counter_np(counter) (0)
#define inc_counter_np(counter) ((void)0)
#else
void inc_counter_np_(int& counter) {
// This can destroy rscratch1 if counter is far from the code cache
......
......@@ -506,7 +506,7 @@ ComputeLinearScanOrder::ComputeLinearScanOrder(Compilation* c, BlockBegin* start
_loop_map(0, 0), // initialized later with correct size
_compilation(c)
{
TRACE_LINEAR_SCAN(2, "***** computing linear-scan block order");
TRACE_LINEAR_SCAN(2, tty->print_cr("***** computing linear-scan block order"));
init_visited();
count_edges(start_block, NULL);
......@@ -683,7 +683,7 @@ void ComputeLinearScanOrder::clear_non_natural_loops(BlockBegin* start_block) {
}
void ComputeLinearScanOrder::assign_loop_depth(BlockBegin* start_block) {
TRACE_LINEAR_SCAN(3, "----- computing loop-depth and weight");
TRACE_LINEAR_SCAN(3, tty->print_cr("----- computing loop-depth and weight"));
init_visited();
assert(_work_list.is_empty(), "work list must be empty before processing");
......@@ -868,7 +868,7 @@ void ComputeLinearScanOrder::append_block(BlockBegin* cur) {
}
void ComputeLinearScanOrder::compute_order(BlockBegin* start_block) {
TRACE_LINEAR_SCAN(3, "----- computing final block order");
TRACE_LINEAR_SCAN(3, tty->print_cr("----- computing final block order"));
// the start block is always the first block in the linear scan order
_linear_scan_order = new BlockList(_num_blocks);
......
......@@ -96,7 +96,7 @@
CLEAR_PENDING_EXCEPTION; \
return (result); \
} \
(0
(void)(0
#define KILL_COMPILE_ON_ANY \
THREAD); \
......@@ -104,7 +104,7 @@
fatal("unhandled ci exception"); \
CLEAR_PENDING_EXCEPTION; \
} \
(0
(void)(0
inline const char* bool_to_str(bool b) {
......
......@@ -124,7 +124,7 @@ class DescriptorStream : public ResourceObj {
fatal(STREAM->parse_error()); \
} \
return NULL; \
} 0
} (void)0
#define READ() STREAM->read(); CHECK_FOR_PARSE_ERROR()
#define PEEK() STREAM->peek(); CHECK_FOR_PARSE_ERROR()
......@@ -133,7 +133,7 @@ class DescriptorStream : public ResourceObj {
#define EXPECTED(c, ch) STREAM->assert_char(c, ch); CHECK_FOR_PARSE_ERROR()
#define EXPECT_END() STREAM->expect_end(); CHECK_FOR_PARSE_ERROR()
#define CHECK_STREAM STREAM); CHECK_FOR_PARSE_ERROR(); (0
#define CHECK_STREAM STREAM); CHECK_FOR_PARSE_ERROR(); ((void)0
#ifndef PRODUCT
void Identifier::print_on(outputStream* str) const {
......
......@@ -86,9 +86,9 @@ class StackMapTable;
// These macros are used similarly to CHECK macros but also check
// the status of the verifier and return if that has an error.
#define CHECK_VERIFY(verifier) \
CHECK); if ((verifier)->has_error()) return; (0
CHECK); if ((verifier)->has_error()) return; ((void)0
#define CHECK_VERIFY_(verifier, result) \
CHECK_(result)); if ((verifier)->has_error()) return (result); (0
CHECK_(result)); if ((verifier)->has_error()) return (result); ((void)0
class TypeOrigin VALUE_OBJ_CLASS_SPEC {
private:
......
......@@ -989,7 +989,7 @@ Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
assert(changes.involves_context(context_type), "irrelevant dependency");
Klass* new_type = changes.new_type();
count_find_witness_calls();
(void)count_find_witness_calls();
NOT_PRODUCT(deps_find_witness_singles++);
// Current thread must be in VM (not native mode, as in CI):
......
......@@ -2615,7 +2615,8 @@ void nmethod::print_relocations() {
relocation_begin()-1+ip[1]);
for (; ip < index_end; ip++)
tty->print_cr(" (%d ?)", ip[0]);
tty->print_cr(" @" INTPTR_FORMAT ": index_size=%d", ip, *ip++);
tty->print_cr(" @" INTPTR_FORMAT ": index_size=%d", ip, *ip);
ip++;
tty->print_cr("reloc_end @" INTPTR_FORMAT ":", ip);
}
}
......
......@@ -412,7 +412,7 @@ void CardTableModRefBS::resize_covered_region(MemRegion new_region) {
}
// Touch the last card of the covered region to show that it
// is committed (or SEGV).
debug_only(*byte_for(_covered[ind].last());)
debug_only((void) (*byte_for(_covered[ind].last()));)
debug_only(verify_guard();)
}
......
......@@ -529,7 +529,9 @@ void Universe::reinitialize_vtable_of(KlassHandle k_h, TRAPS) {
if (vt) vt->initialize_vtable(false, CHECK);
if (ko->oop_is_instance()) {
InstanceKlass* ik = (InstanceKlass*)ko;
for (KlassHandle s_h(THREAD, ik->subklass()); s_h() != NULL; s_h = (THREAD, s_h()->next_sibling())) {
for (KlassHandle s_h(THREAD, ik->subklass());
s_h() != NULL;
s_h = KlassHandle(THREAD, s_h()->next_sibling())) {
reinitialize_vtable_of(s_h, CHECK);
}
}
......
......@@ -4384,7 +4384,7 @@ static void verify_memory_slice(const MergeMemNode* m, int alias_idx, Node* n) {
}
}
#else // !ASSERT
#define verify_memory_slice(m,i,n) (0) // PRODUCT version is no-op
#define verify_memory_slice(m,i,n) (void)(0) // PRODUCT version is no-op
#endif
......
......@@ -619,7 +619,7 @@ void collector_func_load(char* name,
void* null_argument_3);
#pragma weak collector_func_load
#define collector_func_load(x0,x1,x2,x3,x4,x5,x6) \
( collector_func_load ? collector_func_load(x0,x1,x2,x3,x4,x5,x6),0 : 0 )
( collector_func_load ? collector_func_load(x0,x1,x2,x3,x4,x5,x6),(void)0 : (void)0 )
#endif // __APPLE__
#endif // !_WINDOWS
......
......@@ -2731,7 +2731,7 @@ VMRegPair *SharedRuntime::find_callee_arguments(Symbol* sig, bool has_receiver,
// ResourceObject, so do not put any ResourceMarks in here.
char *s = sig->as_C_string();
int len = (int)strlen(s);
*s++; len--; // Skip opening paren
s++; len--; // Skip opening paren
char *t = s+len;
while( *(--t) != ')' ) ; // Find close paren
......
......@@ -247,7 +247,7 @@ template <> void DCmdArgument<NanoTimeArgument>::init_value(TRAPS) {
} else {
_value._time = 0;
_value._nanotime = 0;
strcmp(_value._unit, "ns");
strcpy(_value._unit, "ns");
}
}
......
......@@ -194,15 +194,15 @@ class Exceptions {
#define HAS_PENDING_EXCEPTION (((ThreadShadow*)THREAD)->has_pending_exception())
#define CLEAR_PENDING_EXCEPTION (((ThreadShadow*)THREAD)->clear_pending_exception())
#define CHECK THREAD); if (HAS_PENDING_EXCEPTION) return ; (0
#define CHECK_(result) THREAD); if (HAS_PENDING_EXCEPTION) return result; (0
#define CHECK THREAD); if (HAS_PENDING_EXCEPTION) return ; (void)(0
#define CHECK_(result) THREAD); if (HAS_PENDING_EXCEPTION) return result; (void)(0
#define CHECK_0 CHECK_(0)
#define CHECK_NH CHECK_(Handle())
#define CHECK_NULL CHECK_(NULL)
#define CHECK_false CHECK_(false)
#define CHECK_AND_CLEAR THREAD); if (HAS_PENDING_EXCEPTION) { CLEAR_PENDING_EXCEPTION; return; } (0
#define CHECK_AND_CLEAR_(result) THREAD); if (HAS_PENDING_EXCEPTION) { CLEAR_PENDING_EXCEPTION; return result; } (0
#define CHECK_AND_CLEAR THREAD); if (HAS_PENDING_EXCEPTION) { CLEAR_PENDING_EXCEPTION; return; } (void)(0
#define CHECK_AND_CLEAR_(result) THREAD); if (HAS_PENDING_EXCEPTION) { CLEAR_PENDING_EXCEPTION; return result; } (void)(0
#define CHECK_AND_CLEAR_0 CHECK_AND_CLEAR_(0)
#define CHECK_AND_CLEAR_NH CHECK_AND_CLEAR_(Handle())
#define CHECK_AND_CLEAR_NULL CHECK_AND_CLEAR_(NULL)
......@@ -282,7 +282,7 @@ class Exceptions {
CLEAR_PENDING_EXCEPTION; \
ex->print(); \
ShouldNotReachHere(); \
} (0
} (void)(0
// ExceptionMark is a stack-allocated helper class for local exception handling.
// It is used with the EXCEPTION_MARK macro.
......
......@@ -340,8 +340,12 @@ bool GenericTaskQueue<E, F, N>::push_slow(E t, uint dirty_n_elems) {
if (dirty_n_elems == N - 1) {
// Actually means 0, so do the push.
uint localBot = _bottom;
// g++ complains if the volatile result of the assignment is unused.
const_cast<E&>(_elems[localBot] = t);
// g++ complains if the volatile result of the assignment is
// unused, so we cast the volatile away. We cannot cast directly
// to void, because gcc treats that as not using the result of the
// assignment. However, casting to E& means that we trigger an
// unused-value warning. So, we cast the E& to void.
(void)const_cast<E&>(_elems[localBot] = t);
OrderAccess::release_store(&_bottom, increment_index(localBot));
TASKQUEUE_STATS_ONLY(stats.record_push());
return true;
......@@ -397,7 +401,12 @@ bool GenericTaskQueue<E, F, N>::pop_global(E& t) {
return false;
}
const_cast<E&>(t = _elems[oldAge.top()]);
// g++ complains if the volatile result of the assignment is
// unused, so we cast the volatile away. We cannot cast directly
// to void, because gcc treats that as not using the result of the
// assignment. However, casting to E& means that we trigger an
// unused-value warning. So, we cast the E& to void.
(void) const_cast<E&>(t = _elems[oldAge.top()]);
Age newAge(oldAge);
newAge.increment();
Age resAge = _age.cmpxchg(newAge, oldAge);
......@@ -640,8 +649,12 @@ GenericTaskQueue<E, F, N>::push(E t) {
uint dirty_n_elems = dirty_size(localBot, top);
assert(dirty_n_elems < N, "n_elems out of range.");
if (dirty_n_elems < max_elems()) {
// g++ complains if the volatile result of the assignment is unused.
const_cast<E&>(_elems[localBot] = t);
// g++ complains if the volatile result of the assignment is
// unused, so we cast the volatile away. We cannot cast directly
// to void, because gcc treats that as not using the result of the
// assignment. However, casting to E& means that we trigger an
// unused-value warning. So, we cast the E& to void.
(void) const_cast<E&>(_elems[localBot] = t);
OrderAccess::release_store(&_bottom, increment_index(localBot));
TASKQUEUE_STATS_ONLY(stats.record_push());
return true;
......@@ -665,7 +678,12 @@ GenericTaskQueue<E, F, N>::pop_local(E& t) {
// This is necessary to prevent any read below from being reordered
// before the store just above.
OrderAccess::fence();
const_cast<E&>(t = _elems[localBot]);
// g++ complains if the volatile result of the assignment is
// unused, so we cast the volatile away. We cannot cast directly
// to void, because gcc treats that as not using the result of the
// assignment. However, casting to E& means that we trigger an
// unused-value warning. So, we cast the E& to void.
(void) const_cast<E&>(t = _elems[localBot]);
// This is a second read of "age"; the "size()" above is the first.
// If there's still at least one element in the queue, based on the
// "_bottom" and "age" we've read, then there can be no interference with
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册