diff --git a/src/cpu/ppc/vm/compiledIC_ppc.cpp b/src/cpu/ppc/vm/compiledIC_ppc.cpp
index 1b54594d09cdf86c62c21d2c74adde1219085081..762a329e25973e15c77a99815df1badb0fe2e1ef 100644
--- a/src/cpu/ppc/vm/compiledIC_ppc.cpp
+++ b/src/cpu/ppc/vm/compiledIC_ppc.cpp
@@ -94,7 +94,7 @@ bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
 
 const int IC_pos_in_java_to_interp_stub = 8;
 #define __ _masm.
-void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
+address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
 #ifdef COMPILER2
   // Get the mark within main instrs section which is set to the address of the call.
   address call_addr = cbuf.insts_mark();
@@ -106,8 +106,7 @@ void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
   // Start the stub.
   address stub = __ start_a_stub(CompiledStaticCall::to_interp_stub_size());
   if (stub == NULL) {
-    Compile::current()->env()->record_out_of_memory_failure();
-    return;
+    return NULL; // CodeCache is full
   }
 
   // For java_to_interp stubs we use R11_scratch1 as scratch register
@@ -149,6 +148,7 @@ void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
 
  // End the stub.
   __ end_a_stub();
+  return stub;
 #else
   ShouldNotReachHere();
 #endif
diff --git a/src/cpu/ppc/vm/ppc.ad b/src/cpu/ppc/vm/ppc.ad
index 9fd4d731662fdd72030343a8a00254d8ed4e2f91..b3ca8ad9c7fbd11b4b1fe9f0da138d8852449b99 100644
--- a/src/cpu/ppc/vm/ppc.ad
+++ b/src/cpu/ppc/vm/ppc.ad
@@ -1171,7 +1171,7 @@ void CallStubImpl::emit_trampoline_stub(MacroAssembler &_masm, int destination_t
   // Start the stub.
   address stub = __ start_a_stub(Compile::MAX_stubs_size/2);
   if (stub == NULL) {
-    Compile::current()->env()->record_out_of_memory_failure();
+    ciEnv::current()->record_failure("CodeCache is full");
     return;
   }
 
@@ -1249,7 +1249,7 @@ EmitCallOffsets emit_call_with_trampoline_stub(MacroAssembler &_masm, address en
 
     // Emit the trampoline stub which will be related to the branch-and-link below.
     CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, offsets.insts_call_instruction_offset);
-    if (Compile::current()->env()->failing()) { return offsets; } // Code cache may be full.
+    if (ciEnv::current()->failing()) { return offsets; } // Code cache may be full.
     __ relocate(rtype);
   }
 
@@ -3488,7 +3488,7 @@ encode %{
 
         // Emit the trampoline stub which will be related to the branch-and-link below.
         CallStubImpl::emit_trampoline_stub(_masm, entry_point_toc_offset, start_offset);
-        if (Compile::current()->env()->failing()) { return; } // Code cache may be full.
+        if (ciEnv::current()->failing()) { return; } // Code cache may be full.
         __ relocate(_optimized_virtual ?
                     relocInfo::opt_virtual_call_type : relocInfo::static_call_type);
       }
@@ -3501,7 +3501,11 @@ encode %{
       __ bl(__ pc());  // Emits a relocation.
 
       // The stub for call to interpreter.
-      CompiledStaticCall::emit_to_interp_stub(cbuf);
+      address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
+      if (stub == NULL) {
+        ciEnv::current()->record_failure("CodeCache is full"); 
+        return;
+      }
     }
   %}
 
@@ -3546,7 +3550,11 @@ encode %{
 
     assert(_method, "execute next statement conditionally");
     // The stub for call to interpreter.
-    CompiledStaticCall::emit_to_interp_stub(cbuf);
+    address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
+    if (stub == NULL) {
+      ciEnv::current()->record_failure("CodeCache is full"); 
+      return;
+    }
 
     // Restore original sp.
     __ ld(R11_scratch1, 0, R1_SP); // Load caller sp.
diff --git a/src/cpu/sparc/vm/c1_CodeStubs_sparc.cpp b/src/cpu/sparc/vm/c1_CodeStubs_sparc.cpp
index 8584037f1cabc3f80da90ab1f9c6135ade8d7a54..7c6a1d745713ca86cd96acdba5c9cb19656ade5b 100644
--- a/src/cpu/sparc/vm/c1_CodeStubs_sparc.cpp
+++ b/src/cpu/sparc/vm/c1_CodeStubs_sparc.cpp
@@ -431,6 +431,9 @@ void ArrayCopyStub::emit_code(LIR_Assembler* ce) {
   __ mov(length()->as_register(),  O4);
 
   ce->emit_static_call_stub();
+  if (ce->compilation()->bailed_out()) {
+    return; // CodeCache is full
+  }
 
   __ call(SharedRuntime::get_resolve_static_call_stub(), relocInfo::static_call_type);
   __ delayed()->nop();
diff --git a/src/cpu/sparc/vm/compiledIC_sparc.cpp b/src/cpu/sparc/vm/compiledIC_sparc.cpp
index dee64dffbbe0547380f3392d8408a63c6cfa57c8..5429e33cf0b1ae3037442a025376299d0ad02181 100644
--- a/src/cpu/sparc/vm/compiledIC_sparc.cpp
+++ b/src/cpu/sparc/vm/compiledIC_sparc.cpp
@@ -53,7 +53,7 @@ bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
 // ----------------------------------------------------------------------------
 
 #define __ _masm.
-void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
+address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
 #ifdef COMPILER2
   // Stub is fixed up when the corresponding call is converted from calling
   // compiled code to calling interpreted code.
@@ -64,9 +64,10 @@ void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
 
   MacroAssembler _masm(&cbuf);
 
-  address base =
-  __ start_a_stub(to_interp_stub_size()*2);
-  if (base == NULL) return;  // CodeBuffer::expand failed.
+  address base = __ start_a_stub(to_interp_stub_size());
+  if (base == NULL) {
+    return NULL;  // CodeBuffer::expand failed.
+  }
 
   // Static stub relocation stores the instruction address of the call.
   __ relocate(static_stub_Relocation::spec(mark));
@@ -81,6 +82,7 @@ void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
 
   // Update current stubs pointer and restore code_end.
   __ end_a_stub();
+  return base;
 #else
   ShouldNotReachHere();
 #endif
diff --git a/src/cpu/sparc/vm/sparc.ad b/src/cpu/sparc/vm/sparc.ad
index 3d43f6b87fd4e8439f70893f032d298f18c5c46d..0b46f32f50715b3a2dfd833ffb44bf4c1e5803f3 100644
--- a/src/cpu/sparc/vm/sparc.ad
+++ b/src/cpu/sparc/vm/sparc.ad
@@ -1775,9 +1775,11 @@ int HandlerImpl::emit_exception_handler(CodeBuffer& cbuf) {
   AddressLiteral exception_blob(OptoRuntime::exception_blob()->entry_point());
   MacroAssembler _masm(&cbuf);
 
-  address base =
-  __ start_a_stub(size_exception_handler());
-  if (base == NULL)  return 0;  // CodeBuffer::expand failed
+  address base = __ start_a_stub(size_exception_handler());
+  if (base == NULL) {
+    ciEnv::current()->record_failure("CodeCache is full");
+    return 0;  // CodeBuffer::expand failed
+  }
 
   int offset = __ offset();
 
@@ -1798,9 +1800,11 @@ int HandlerImpl::emit_deopt_handler(CodeBuffer& cbuf) {
   AddressLiteral deopt_blob(SharedRuntime::deopt_blob()->unpack());
   MacroAssembler _masm(&cbuf);
 
-  address base =
-  __ start_a_stub(size_deopt_handler());
-  if (base == NULL)  return 0;  // CodeBuffer::expand failed
+  address base = __ start_a_stub(size_deopt_handler());
+  if (base == NULL) {
+    ciEnv::current()->record_failure("CodeCache is full");
+    return 0;  // CodeBuffer::expand failed
+  }
 
   int offset = __ offset();
   __ save_frame(0);
@@ -2601,7 +2605,12 @@ encode %{
       emit_call_reloc(cbuf, $meth$$method, relocInfo::static_call_type);
     }
     if (_method) {  // Emit stub for static call.
-      CompiledStaticCall::emit_to_interp_stub(cbuf);
+      address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
+      // Stub does not fit into scratch buffer if TraceJumps is enabled
+      if (stub == NULL && !(TraceJumps && Compile::current()->in_scratch_emit_size())) {
+        ciEnv::current()->record_failure("CodeCache is full");
+        return;
+      } 
     }
   %}
 
diff --git a/src/cpu/x86/vm/c1_CodeStubs_x86.cpp b/src/cpu/x86/vm/c1_CodeStubs_x86.cpp
index a6e7731bdd53f387ccdde535f8ef1cbf00166f58..b96cb138509d6e159b5c38ed3e083defe0134a00 100644
--- a/src/cpu/x86/vm/c1_CodeStubs_x86.cpp
+++ b/src/cpu/x86/vm/c1_CodeStubs_x86.cpp
@@ -502,6 +502,9 @@ void ArrayCopyStub::emit_code(LIR_Assembler* ce) {
   ce->align_call(lir_static_call);
 
   ce->emit_static_call_stub();
+  if (ce->compilation()->bailed_out()) {
+    return; // CodeCache is full
+  }
   AddressLiteral resolve(SharedRuntime::get_resolve_static_call_stub(),
                          relocInfo::static_call_type);
   __ call(resolve);
diff --git a/src/cpu/x86/vm/compiledIC_x86.cpp b/src/cpu/x86/vm/compiledIC_x86.cpp
index 9537ef971f7a22adcde0593566ac0cff48f08c7e..29eba2fb99d93712400af09b4ee2794de5af53d0 100644
--- a/src/cpu/x86/vm/compiledIC_x86.cpp
+++ b/src/cpu/x86/vm/compiledIC_x86.cpp
@@ -50,7 +50,7 @@ bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
 // ----------------------------------------------------------------------------
 
 #define __ _masm.
-void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
+address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
   // Stub is fixed up when the corresponding call is converted from
   // calling compiled code to calling interpreted code.
   // movq rbx, 0
@@ -62,9 +62,10 @@ void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
   // That's why we must use the macroassembler to generate a stub.
   MacroAssembler _masm(&cbuf);
 
-  address base =
-  __ start_a_stub(to_interp_stub_size()*2);
-  if (base == NULL) return;  // CodeBuffer::expand failed.
+  address base = __ start_a_stub(to_interp_stub_size());
+  if (base == NULL) {
+    return NULL;  // CodeBuffer::expand failed.
+  }
   // Static stub relocation stores the instruction address of the call.
   __ relocate(static_stub_Relocation::spec(mark), Assembler::imm_operand);
   // Static stub relocation also tags the Method* in the code-stream.
@@ -74,6 +75,7 @@ void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
 
   // Update current stubs pointer and restore insts_end.
   __ end_a_stub();
+  return base;
 }
 #undef __
 
diff --git a/src/cpu/x86/vm/x86.ad b/src/cpu/x86/vm/x86.ad
index 8d402c1c6ae3dbe3cc25b8e315a49a586420acb3..4e2499a0a18d078b3a15eaedbe797c3b2351b793 100644
--- a/src/cpu/x86/vm/x86.ad
+++ b/src/cpu/x86/vm/x86.ad
@@ -550,7 +550,10 @@ int HandlerImpl::emit_exception_handler(CodeBuffer& cbuf) {
   // That's why we must use the macroassembler to generate a handler.
   MacroAssembler _masm(&cbuf);
   address base = __ start_a_stub(size_exception_handler());
-  if (base == NULL)  return 0;  // CodeBuffer::expand failed
+  if (base == NULL) {
+    ciEnv::current()->record_failure("CodeCache is full");
+    return 0;  // CodeBuffer::expand failed
+  }
   int offset = __ offset();
   __ jump(RuntimeAddress(OptoRuntime::exception_blob()->entry_point()));
   assert(__ offset() - offset <= (int) size_exception_handler(), "overflow");
@@ -565,7 +568,10 @@ int HandlerImpl::emit_deopt_handler(CodeBuffer& cbuf) {
   // That's why we must use the macroassembler to generate a handler.
   MacroAssembler _masm(&cbuf);
   address base = __ start_a_stub(size_deopt_handler());
-  if (base == NULL)  return 0;  // CodeBuffer::expand failed
+  if (base == NULL) {
+    ciEnv::current()->record_failure("CodeCache is full");
+    return 0;  // CodeBuffer::expand failed
+  }
   int offset = __ offset();
 
 #ifdef _LP64
diff --git a/src/cpu/x86/vm/x86_32.ad b/src/cpu/x86/vm/x86_32.ad
index 42aba5b213a6339db948e164ccdb446abc611c3b..512ec20291918f98306e29d00a2f8f0a82e7da51 100644
--- a/src/cpu/x86/vm/x86_32.ad
+++ b/src/cpu/x86/vm/x86_32.ad
@@ -1870,7 +1870,11 @@ encode %{
                      static_call_Relocation::spec(), RELOC_IMM32 );
     }
     if (_method) {  // Emit stub for static call.
-      CompiledStaticCall::emit_to_interp_stub(cbuf);
+      address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
+      if (stub == NULL) {
+        ciEnv::current()->record_failure("CodeCache is full");
+        return;
+      } 
     }
   %}
 
diff --git a/src/cpu/x86/vm/x86_64.ad b/src/cpu/x86/vm/x86_64.ad
index 52bc63507a69cd3c51e9d19b444d5a40b77b16f8..ed304df26351473b3eb9f55da303b7921c8eebc3 100644
--- a/src/cpu/x86/vm/x86_64.ad
+++ b/src/cpu/x86/vm/x86_64.ad
@@ -2125,7 +2125,11 @@ encode %{
     }
     if (_method) {
       // Emit stub for static call.
-      CompiledStaticCall::emit_to_interp_stub(cbuf);
+      address stub = CompiledStaticCall::emit_to_interp_stub(cbuf);
+      if (stub == NULL) {
+        ciEnv::current()->record_failure("CodeCache is full");
+        return;
+      } 
     }
   %}
 
diff --git a/src/cpu/zero/vm/compiledIC_zero.cpp b/src/cpu/zero/vm/compiledIC_zero.cpp
index 143dc31738090153bdd93fee47e33e3153919db0..185a8b169c0beb27f5f8fc5d1a346a8e745a6ee7 100644
--- a/src/cpu/zero/vm/compiledIC_zero.cpp
+++ b/src/cpu/zero/vm/compiledIC_zero.cpp
@@ -60,8 +60,9 @@ bool CompiledIC::is_icholder_call_site(virtual_call_Relocation* call_site) {
 
 // ----------------------------------------------------------------------------
 
-void CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
+address CompiledStaticCall::emit_to_interp_stub(CodeBuffer &cbuf) {
   ShouldNotReachHere(); // Only needed for COMPILER2.
+  return NULL;
 }
 
 int CompiledStaticCall::to_interp_stub_size() {
diff --git a/src/cpu/zero/vm/cppInterpreter_zero.cpp b/src/cpu/zero/vm/cppInterpreter_zero.cpp
index 15cfc86aaa42ba2bc1ed5933a0471f44465b5db5..242ce1cfdfb44d4a98a3d4d56d22dbbf03e2d75a 100644
--- a/src/cpu/zero/vm/cppInterpreter_zero.cpp
+++ b/src/cpu/zero/vm/cppInterpreter_zero.cpp
@@ -220,9 +220,16 @@ void CppInterpreter::main_loop(int recurse, TRAPS) {
   // Push our result
   for (int i = 0; i < result_slots; i++) {
     // Adjust result to smaller
-    intptr_t res = result[-i];
+    union {
+      intptr_t res;
+      jint res_jint;
+    };
+    res = result[-i];
     if (result_slots == 1) {
-      res = narrow(method->result_type(), res);
+      BasicType t = method->result_type();
+      if (is_subword_type(t)) {
+        res_jint = (jint)narrow(t, res_jint);
+      }
     }
     stack->push(res);
   }
diff --git a/src/share/vm/c1/c1_LIRAssembler.cpp b/src/share/vm/c1/c1_LIRAssembler.cpp
index ef2b10623a45a4148acd3f2f24a226ce78a144c5..3447e0703e292a905e2660ecbee70221b2f586bb 100644
--- a/src/share/vm/c1/c1_LIRAssembler.cpp
+++ b/src/share/vm/c1/c1_LIRAssembler.cpp
@@ -464,6 +464,7 @@ void LIR_Assembler::emit_call(LIR_OpJavaCall* op) {
 
   // emit the static call stub stuff out of line
   emit_static_call_stub();
+  CHECK_BAILOUT();
 
   switch (op->code()) {
   case lir_static_call:
diff --git a/src/share/vm/classfile/vmSymbols.hpp b/src/share/vm/classfile/vmSymbols.hpp
index b857cb3bc457a43ba093b9315d05b792213eae53..528175816b29651d6899efbca21380d34d65e2ea 100644
--- a/src/share/vm/classfile/vmSymbols.hpp
+++ b/src/share/vm/classfile/vmSymbols.hpp
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 1997, 2014, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 1997, 2016, Oracle and/or its affiliates. All rights reserved.
  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
  *
  * This code is free software; you can redistribute it and/or modify it
@@ -517,6 +517,7 @@
   template(int_StringBuffer_signature,                "(I)Ljava/lang/StringBuffer;")                              \
   template(char_StringBuffer_signature,               "(C)Ljava/lang/StringBuffer;")                              \
   template(int_String_signature,                      "(I)Ljava/lang/String;")                                    \
+  template(codesource_permissioncollection_signature, "(Ljava/security/CodeSource;Ljava/security/PermissionCollection;)V") \
   /* signature symbols needed by intrinsics */                                                                    \
   VM_INTRINSICS_DO(VM_INTRINSIC_IGNORE, VM_SYMBOL_IGNORE, VM_SYMBOL_IGNORE, template, VM_ALIAS_IGNORE)            \
                                                                                                                   \
diff --git a/src/share/vm/code/compiledIC.hpp b/src/share/vm/code/compiledIC.hpp
index b797329ac727066be32533b3344009315d62dd0c..de72423bb7567acc854c410623fe29ea80d99d41 100644
--- a/src/share/vm/code/compiledIC.hpp
+++ b/src/share/vm/code/compiledIC.hpp
@@ -320,7 +320,7 @@ class CompiledStaticCall: public NativeCall {
   friend CompiledStaticCall* compiledStaticCall_at(Relocation* call_site);
 
   // Code
-  static void emit_to_interp_stub(CodeBuffer &cbuf);
+  static address emit_to_interp_stub(CodeBuffer &cbuf);
   static int to_interp_stub_size();
   static int reloc_to_interp_stub();
 
diff --git a/src/share/vm/interpreter/bytecodeInterpreter.cpp b/src/share/vm/interpreter/bytecodeInterpreter.cpp
index eed4a35f0fc1f584fe3381907c2a5c39950882f2..59e1fb95b3477ba29f2463012da1146a7b5722e6 100644
--- a/src/share/vm/interpreter/bytecodeInterpreter.cpp
+++ b/src/share/vm/interpreter/bytecodeInterpreter.cpp
@@ -593,8 +593,9 @@ BytecodeInterpreter::run(interpreterState istate) {
 /* 0xDC */ &&opc_default,     &&opc_default,        &&opc_default,      &&opc_default,
 
 /* 0xE0 */ &&opc_default,     &&opc_default,        &&opc_default,      &&opc_default,
-/* 0xE4 */ &&opc_default,     &&opc_fast_aldc,      &&opc_fast_aldc_w,  &&opc_return_register_finalizer,
-/* 0xE8 */ &&opc_invokehandle,&&opc_default,        &&opc_default,      &&opc_default,
+/* 0xE4 */ &&opc_default,     &&opc_default,        &&opc_fast_aldc,    &&opc_fast_aldc_w,
+/* 0xE8 */ &&opc_return_register_finalizer,
+                              &&opc_invokehandle,   &&opc_default,      &&opc_default,
 /* 0xEC */ &&opc_default,     &&opc_default,        &&opc_default,      &&opc_default,
 
 /* 0xF0 */ &&opc_default,     &&opc_default,        &&opc_default,      &&opc_default,
diff --git a/src/share/vm/opto/compile.cpp b/src/share/vm/opto/compile.cpp
index dc88381a114d83735a9ebd8ab70deb4b129bfed7..e021924b3aeccddaf054c37e8657a6e7a18ac7fe 100644
--- a/src/share/vm/opto/compile.cpp
+++ b/src/share/vm/opto/compile.cpp
@@ -608,6 +608,10 @@ uint Compile::scratch_emit_size(const Node* n) {
     n->as_MachBranch()->label_set(&fakeL, 0);
   }
   n->emit(buf, this->regalloc());
+
+  // Emitting into the scratch buffer should not fail
+  assert (!failing(), err_msg_res("Must not have pending failure. Reason is: %s", failure_reason()));
+
   if (is_branch) // Restore label.
     n->as_MachBranch()->label_set(saveL, save_bnum);
 
diff --git a/src/share/vm/opto/output.cpp b/src/share/vm/opto/output.cpp
index 0563ebba5d939ec6439e1757e11edd723c43b7d9..95de2d619b8cffb0c7d90397ba85f9865203a893 100644
--- a/src/share/vm/opto/output.cpp
+++ b/src/share/vm/opto/output.cpp
@@ -1502,6 +1502,13 @@ void Compile::fill_buffer(CodeBuffer* cb, uint* blk_starts) {
       n->emit(*cb, _regalloc);
       current_offset  = cb->insts_size();
 
+      // Above we only verified that there is enough space in the instruction section.
+      // However, the instruction may emit stubs that cause code buffer expansion.
+      // Bail out here if expansion failed due to a lack of code cache space.
+      if (failing()) {
+        return;
+      }
+
 #ifdef ASSERT
       if (n->size(_regalloc) < (current_offset-instr_offset)) {
         n->dump();
@@ -1630,11 +1637,14 @@ void Compile::fill_buffer(CodeBuffer* cb, uint* blk_starts) {
   if (_method) {
     // Emit the exception handler code.
     _code_offsets.set_value(CodeOffsets::Exceptions, HandlerImpl::emit_exception_handler(*cb));
+    if (failing()) {
+      return; // CodeBuffer::expand failed
+    }
     // Emit the deopt handler code.
     _code_offsets.set_value(CodeOffsets::Deopt, HandlerImpl::emit_deopt_handler(*cb));
 
     // Emit the MethodHandle deopt handler code (if required).
-    if (has_method_handle_invokes()) {
+    if (has_method_handle_invokes() && !failing()) {
       // We can use the same code as for the normal deopt handler, we
       // just need a different entry point address.
       _code_offsets.set_value(CodeOffsets::DeoptMH, HandlerImpl::emit_deopt_handler(*cb));
diff --git a/src/share/vm/prims/jvm.cpp b/src/share/vm/prims/jvm.cpp
index 76886fbccc4caeac95ffe69e6bebdf1d8661d766..2e228b59dbc4a1d1f5dee39b60de0aeecf6a77e5 100644
--- a/src/share/vm/prims/jvm.cpp
+++ b/src/share/vm/prims/jvm.cpp
@@ -1290,18 +1290,22 @@ static bool is_authorized(Handle context, instanceKlassHandle klass, TRAPS) {
 // and null permissions - which gives no permissions.
 oop create_dummy_access_control_context(TRAPS) {
   InstanceKlass* pd_klass = InstanceKlass::cast(SystemDictionary::ProtectionDomain_klass());
-  // new ProtectionDomain(null,null);
-  oop null_protection_domain = pd_klass->allocate_instance(CHECK_NULL);
-  Handle null_pd(THREAD, null_protection_domain);
+  Handle obj = pd_klass->allocate_instance_handle(CHECK_NULL);
+  // Call constructor ProtectionDomain(null, null);
+  JavaValue result(T_VOID);
+  JavaCalls::call_special(&result, obj, KlassHandle(THREAD, pd_klass),
+                          vmSymbols::object_initializer_name(),
+                          vmSymbols::codesource_permissioncollection_signature(),
+                          Handle(), Handle(), CHECK_NULL);
 
   // new ProtectionDomain[] {pd};
   objArrayOop context = oopFactory::new_objArray(pd_klass, 1, CHECK_NULL);
-  context->obj_at_put(0, null_pd());
+  context->obj_at_put(0, obj());
 
   // new AccessControlContext(new ProtectionDomain[] {pd})
   objArrayHandle h_context(THREAD, context);
-  oop result = java_security_AccessControlContext::create(h_context, false, Handle(), CHECK_NULL);
-  return result;
+  oop acc = java_security_AccessControlContext::create(h_context, false, Handle(), CHECK_NULL);
+  return acc;
 }
 
 JVM_ENTRY(jobject, JVM_DoPrivileged(JNIEnv *env, jclass cls, jobject action, jobject context, jboolean wrapException))