changeset 20620:4d5dc0d0f879 hs25.40-b18

Merge
author amurillo
date Fri, 07 Nov 2014 09:22:57 -0800
parents b0c7e7f1bbbe (current diff) b12a2a9b05ca (diff)
children b8ca8ec1daea
files
diffstat 51 files changed, 1979 insertions(+), 145 deletions(-) [+]
line wrap: on
line diff
--- a/make/bsd/makefiles/mapfile-vers-debug	Wed Nov 05 12:54:08 2014 -0800
+++ b/make/bsd/makefiles/mapfile-vers-debug	Fri Nov 07 09:22:57 2014 -0800
@@ -187,6 +187,9 @@
                 _JVM_IsSupportedJNIVersion
                 _JVM_IsThreadAlive
                 _JVM_IsVMGeneratedMethodIx
+                _JVM_KnownToNotExist
+                _JVM_GetResourceLookupCacheURLs
+                _JVM_GetResourceLookupCache
                 _JVM_LatestUserDefinedLoader
                 _JVM_Listen
                 _JVM_LoadClass0
--- a/make/bsd/makefiles/mapfile-vers-product	Wed Nov 05 12:54:08 2014 -0800
+++ b/make/bsd/makefiles/mapfile-vers-product	Fri Nov 07 09:22:57 2014 -0800
@@ -187,6 +187,9 @@
                 _JVM_IsSupportedJNIVersion
                 _JVM_IsThreadAlive
                 _JVM_IsVMGeneratedMethodIx
+                _JVM_KnownToNotExist
+                _JVM_GetResourceLookupCacheURLs
+                _JVM_GetResourceLookupCache
                 _JVM_LatestUserDefinedLoader
                 _JVM_Listen
                 _JVM_LoadClass0
--- a/make/hotspot_version	Wed Nov 05 12:54:08 2014 -0800
+++ b/make/hotspot_version	Fri Nov 07 09:22:57 2014 -0800
@@ -35,7 +35,7 @@
 
 HS_MAJOR_VER=25
 HS_MINOR_VER=40
-HS_BUILD_NUMBER=17
+HS_BUILD_NUMBER=18
 
 JDK_MAJOR_VER=1
 JDK_MINOR_VER=8
--- a/make/linux/makefiles/mapfile-vers-debug	Wed Nov 05 12:54:08 2014 -0800
+++ b/make/linux/makefiles/mapfile-vers-debug	Fri Nov 07 09:22:57 2014 -0800
@@ -217,6 +217,9 @@
                 JVM_RegisterSignal;
                 JVM_ReleaseUTF;
                 JVM_ResolveClass;
+                JVM_KnownToNotExist;
+                JVM_GetResourceLookupCacheURLs;
+                JVM_GetResourceLookupCache;
                 JVM_ResumeThread;
                 JVM_Send;
                 JVM_SendTo;
--- a/make/linux/makefiles/mapfile-vers-product	Wed Nov 05 12:54:08 2014 -0800
+++ b/make/linux/makefiles/mapfile-vers-product	Fri Nov 07 09:22:57 2014 -0800
@@ -217,6 +217,9 @@
                 JVM_RegisterSignal;
                 JVM_ReleaseUTF;
                 JVM_ResolveClass;
+                JVM_KnownToNotExist;
+                JVM_GetResourceLookupCacheURLs;
+                JVM_GetResourceLookupCache;
                 JVM_ResumeThread;
                 JVM_Send;
                 JVM_SendTo;
--- a/make/solaris/makefiles/mapfile-vers	Wed Nov 05 12:54:08 2014 -0800
+++ b/make/solaris/makefiles/mapfile-vers	Fri Nov 07 09:22:57 2014 -0800
@@ -189,6 +189,9 @@
                 JVM_IsSupportedJNIVersion;
                 JVM_IsThreadAlive;
                 JVM_IsVMGeneratedMethodIx;
+                JVM_KnownToNotExist;
+                JVM_GetResourceLookupCacheURLs;
+                JVM_GetResourceLookupCache;
                 JVM_LatestUserDefinedLoader;
                 JVM_Listen;
                 JVM_LoadClass0;
--- a/src/os_cpu/linux_x86/vm/os_linux_x86.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/os_cpu/linux_x86/vm/os_linux_x86.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -909,7 +909,7 @@
    */
   char* hint = (char*) (Linux::initial_thread_stack_bottom() -
                         ((StackYellowPages + StackRedPages + 1) * page_size));
-  char* codebuf = os::reserve_memory(page_size, hint);
+  char* codebuf = os::attempt_reserve_memory_at(page_size, hint);
   if ( (codebuf == NULL) || (!os::commit_memory(codebuf, page_size, true)) ) {
     return; // No matter, we tried, best effort.
   }
--- a/src/share/vm/classfile/classLoader.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/classfile/classLoader.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -610,7 +610,7 @@
 }
 #endif
 
-void ClassLoader::setup_search_path(const char *class_path) {
+void ClassLoader::setup_search_path(const char *class_path, bool canonicalize) {
   int offset = 0;
   int len = (int)strlen(class_path);
   int end = 0;
@@ -625,7 +625,13 @@
     char* path = NEW_RESOURCE_ARRAY(char, end - start + 1);
     strncpy(path, &class_path[start], end - start);
     path[end - start] = '\0';
-    update_class_path_entry_list(path, false);
+    if (canonicalize) {
+      char* canonical_path = NEW_RESOURCE_ARRAY(char, JVM_MAXPATHLEN + 1);
+      if (get_canonical_path(path, canonical_path, JVM_MAXPATHLEN)) {
+        path = canonical_path;
+      }
+    }
+    update_class_path_entry_list(path, /*check_for_duplicates=*/canonicalize);
 #if INCLUDE_CDS
     if (DumpSharedSpaces) {
       check_shared_classpath(path);
--- a/src/share/vm/classfile/classLoader.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/classfile/classLoader.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -129,8 +129,8 @@
   bool _has_error;
   bool _throw_exception;
   volatile ClassPathEntry* _resolved_entry;
+ public:
   ClassPathEntry* resolve_entry(TRAPS);
- public:
   bool is_jar_file();
   const char* name()  { return _path; }
   LazyClassPathEntry(const char* path, const struct stat* st, bool throw_exception);
@@ -218,7 +218,7 @@
   static void setup_meta_index(const char* meta_index_path, const char* meta_index_dir,
                                int start_index);
   static void setup_bootstrap_search_path();
-  static void setup_search_path(const char *class_path);
+  static void setup_search_path(const char *class_path, bool canonicalize=false);
 
   static void load_zip_library();
   static ClassPathEntry* create_class_path_entry(const char *path, const struct stat* st,
@@ -329,6 +329,10 @@
     return e;
   }
 
+  static int num_classpath_entries() {
+    return _num_entries;
+  }
+
 #if INCLUDE_CDS
   // Sharing dump and restore
   static void copy_package_info_buckets(char** top, char* end);
--- a/src/share/vm/classfile/classLoaderData.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/classfile/classLoaderData.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -747,7 +747,7 @@
 
 // Move class loader data from main list to the unloaded list for unloading
 // and deallocation later.
-bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure) {
+bool ClassLoaderDataGraph::do_unloading(BoolObjectClosure* is_alive_closure, bool clean_alive) {
   ClassLoaderData* data = _head;
   ClassLoaderData* prev = NULL;
   bool seen_dead_loader = false;
@@ -756,16 +756,8 @@
   // purging and we don't want to rewalk the previously unloaded class loader data.
   _saved_unloading = _unloading;
 
-  // mark metadata seen on the stack and code cache so we can delete
-  // unneeded entries.
-  bool has_redefined_a_class = JvmtiExport::has_redefined_a_class();
-  MetadataOnStackMark md_on_stack;
   while (data != NULL) {
     if (data->is_alive(is_alive_closure)) {
-      if (has_redefined_a_class) {
-        data->classes_do(InstanceKlass::purge_previous_versions);
-      }
-      data->free_deallocate_list();
       prev = data;
       data = data->next();
       continue;
@@ -787,6 +779,11 @@
     _unloading = dead;
   }
 
+  if (clean_alive) {
+    // Clean previous versions and the deallocate list.
+    ClassLoaderDataGraph::clean_metaspaces();
+  }
+
   if (seen_dead_loader) {
     post_class_unload_events();
   }
@@ -794,6 +791,26 @@
   return seen_dead_loader;
 }
 
+void ClassLoaderDataGraph::clean_metaspaces() {
+  // mark metadata seen on the stack and code cache so we can delete unneeded entries.
+  bool has_redefined_a_class = JvmtiExport::has_redefined_a_class();
+  MetadataOnStackMark md_on_stack(has_redefined_a_class);
+
+  if (has_redefined_a_class) {
+    // purge_previous_versions also cleans weak method links. Because
+    // one method's MDO can reference another method from another
+    // class loader, we need to first clean weak method links for all
+    // class loaders here. Below, we can then free redefined methods
+    // for all class loaders.
+    for (ClassLoaderData* data = _head; data != NULL; data = data->next()) {
+      data->classes_do(InstanceKlass::purge_previous_versions);
+    }
+  }
+
+  // Need to purge the previous version before deallocating.
+  free_deallocate_lists();
+}
+
 void ClassLoaderDataGraph::purge() {
   assert(SafepointSynchronize::is_at_safepoint(), "must be at safepoint!");
   ClassLoaderData* list = _unloading;
@@ -821,6 +838,14 @@
 #endif
 }
 
+void ClassLoaderDataGraph::free_deallocate_lists() {
+  for (ClassLoaderData* cld = _head; cld != NULL; cld = cld->next()) {
+    // We need to keep this data until InstanceKlass::purge_previous_version has been
+    // called on all alive classes. See the comment in ClassLoaderDataGraph::clean_metaspaces.
+    cld->free_deallocate_list();
+  }
+}
+
 // CDS support
 
 // Global metaspaces for writing information to the shared archive.  When
--- a/src/share/vm/classfile/classLoaderData.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/classfile/classLoaderData.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -71,6 +71,7 @@
 
   static ClassLoaderData* add(Handle class_loader, bool anonymous, TRAPS);
   static void post_class_unload_events(void);
+  static void clean_metaspaces();
  public:
   static ClassLoaderData* find_or_create(Handle class_loader, TRAPS);
   static void purge();
@@ -89,7 +90,7 @@
   static void classes_do(void f(Klass* const));
   static void loaded_classes_do(KlassClosure* klass_closure);
   static void classes_unloading_do(void f(Klass* const));
-  static bool do_unloading(BoolObjectClosure* is_alive);
+  static bool do_unloading(BoolObjectClosure* is_alive, bool clean_alive);
 
   // CMS support.
   static void remember_new_clds(bool remember) { _saved_head = (remember ? _head : NULL); }
@@ -105,6 +106,8 @@
     }
   }
 
+  static void free_deallocate_lists();
+
   static void dump_on(outputStream * const out) PRODUCT_RETURN;
   static void dump() { dump_on(tty); }
   static void verify();
--- a/src/share/vm/classfile/classLoaderExt.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/classfile/classLoaderExt.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -64,6 +64,15 @@
     ClassLoader::add_to_list(new_entry);
   }
   static void setup_search_paths() {}
+
+  static void init_lookup_cache(TRAPS) {}
+  static void copy_lookup_cache_to_archive(char** top, char* end) {}
+  static char* restore_lookup_cache_from_archive(char* buffer) {return buffer;}
+  static inline bool is_lookup_cache_enabled() {return false;}
+
+  static bool known_to_not_exist(JNIEnv *env, jobject loader, const char *classname, TRAPS) {return false;}
+  static jobjectArray get_lookup_cache_urls(JNIEnv *env, jobject loader, TRAPS) {return NULL;}
+  static jintArray get_lookup_cache(JNIEnv *env, jobject loader, const char *pkgname, TRAPS) {return NULL;}
 };
 
 #endif // SHARE_VM_CLASSFILE_CLASSLOADEREXT_HPP
--- a/src/share/vm/classfile/metadataOnStackMark.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/classfile/metadataOnStackMark.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -31,25 +31,23 @@
 #include "runtime/synchronizer.hpp"
 #include "runtime/thread.hpp"
 #include "services/threadService.hpp"
-#include "utilities/growableArray.hpp"
-
+#include "utilities/chunkedList.hpp"
 
-// Keep track of marked on-stack metadata so it can be cleared.
-GrowableArray<Metadata*>* _marked_objects = NULL;
+volatile MetadataOnStackBuffer* MetadataOnStackMark::_used_buffers = NULL;
+volatile MetadataOnStackBuffer* MetadataOnStackMark::_free_buffers = NULL;
+
 NOT_PRODUCT(bool MetadataOnStackMark::_is_active = false;)
 
 // Walk metadata on the stack and mark it so that redefinition doesn't delete
 // it.  Class unloading also walks the previous versions and might try to
 // delete it, so this class is used by class unloading also.
-MetadataOnStackMark::MetadataOnStackMark() {
+MetadataOnStackMark::MetadataOnStackMark(bool visit_code_cache) {
   assert(SafepointSynchronize::is_at_safepoint(), "sanity check");
+  assert(_used_buffers == NULL, "sanity check");
   NOT_PRODUCT(_is_active = true;)
-  if (_marked_objects == NULL) {
-    _marked_objects = new (ResourceObj::C_HEAP, mtClass) GrowableArray<Metadata*>(1000, true);
-  }
 
   Threads::metadata_do(Metadata::mark_on_stack);
-  if (JvmtiExport::has_redefined_a_class()) {
+  if (visit_code_cache) {
     CodeCache::alive_nmethods_do(nmethod::mark_on_stack);
   }
   CompileBroker::mark_on_stack();
@@ -62,15 +60,93 @@
   // Unmark everything that was marked.   Can't do the same walk because
   // redefine classes messes up the code cache so the set of methods
   // might not be the same.
-  for (int i = 0; i< _marked_objects->length(); i++) {
-    _marked_objects->at(i)->set_on_stack(false);
+
+  retire_buffer_for_thread(Thread::current());
+
+  MetadataOnStackBuffer* buffer = const_cast<MetadataOnStackBuffer* >(_used_buffers);
+  while (buffer != NULL) {
+    // Clear on stack state for all metadata.
+    size_t size = buffer->size();
+    for (size_t i  = 0; i < size; i++) {
+      Metadata* md = buffer->at(i);
+      md->set_on_stack(false);
+    }
+
+    MetadataOnStackBuffer* next = buffer->next_used();
+
+    // Move the buffer to the free list.
+    buffer->clear();
+    buffer->set_next_used(NULL);
+    buffer->set_next_free(const_cast<MetadataOnStackBuffer*>(_free_buffers));
+    _free_buffers = buffer;
+
+    // Step to next used buffer.
+    buffer = next;
   }
-  _marked_objects->clear();   // reuse growable array for next time.
+
+  _used_buffers = NULL;
+
   NOT_PRODUCT(_is_active = false;)
 }
 
+void MetadataOnStackMark::retire_buffer(MetadataOnStackBuffer* buffer) {
+  if (buffer == NULL) {
+    return;
+  }
+
+  MetadataOnStackBuffer* old_head;
+
+  do {
+    old_head = const_cast<MetadataOnStackBuffer*>(_used_buffers);
+    buffer->set_next_used(old_head);
+  } while (Atomic::cmpxchg_ptr(buffer, &_used_buffers, old_head) != old_head);
+}
+
+void MetadataOnStackMark::retire_buffer_for_thread(Thread* thread) {
+  retire_buffer(thread->metadata_on_stack_buffer());
+  thread->set_metadata_on_stack_buffer(NULL);
+}
+
+bool MetadataOnStackMark::has_buffer_for_thread(Thread* thread) {
+  return thread->metadata_on_stack_buffer() != NULL;
+}
+
+MetadataOnStackBuffer* MetadataOnStackMark::allocate_buffer() {
+  MetadataOnStackBuffer* allocated;
+  MetadataOnStackBuffer* new_head;
+
+  do {
+    allocated = const_cast<MetadataOnStackBuffer*>(_free_buffers);
+    if (allocated == NULL) {
+      break;
+    }
+    new_head = allocated->next_free();
+  } while (Atomic::cmpxchg_ptr(new_head, &_free_buffers, allocated) != allocated);
+
+  if (allocated == NULL) {
+    allocated = new MetadataOnStackBuffer();
+  }
+
+  assert(!allocated->is_full(), err_msg("Should not be full: " PTR_FORMAT, p2i(allocated)));
+
+  return allocated;
+}
+
 // Record which objects are marked so we can unmark the same objects.
-void MetadataOnStackMark::record(Metadata* m) {
+void MetadataOnStackMark::record(Metadata* m, Thread* thread) {
   assert(_is_active, "metadata on stack marking is active");
-  _marked_objects->push(m);
+
+  MetadataOnStackBuffer* buffer =  thread->metadata_on_stack_buffer();
+
+  if (buffer != NULL && buffer->is_full()) {
+    retire_buffer(buffer);
+    buffer = NULL;
+  }
+
+  if (buffer == NULL) {
+    buffer = allocate_buffer();
+    thread->set_metadata_on_stack_buffer(buffer);
+  }
+
+  buffer->push(m);
 }
--- a/src/share/vm/classfile/metadataOnStackMark.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/classfile/metadataOnStackMark.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -26,9 +26,12 @@
 #define SHARE_VM_CLASSFILE_METADATAONSTACKMARK_HPP
 
 #include "memory/allocation.hpp"
+#include "utilities/chunkedList.hpp"
 
 class Metadata;
 
+typedef ChunkedList<Metadata*, mtInternal> MetadataOnStackBuffer;
+
 // Helper class to mark and unmark metadata used on the stack as either handles
 // or executing methods, so that it can't be deleted during class redefinition
 // and class unloading.
@@ -36,10 +39,20 @@
 // metadata during parsing, relocated methods, and methods in backtraces.
 class MetadataOnStackMark : public StackObj {
   NOT_PRODUCT(static bool _is_active;)
+
+  static volatile MetadataOnStackBuffer* _used_buffers;
+  static volatile MetadataOnStackBuffer* _free_buffers;
+
+  static MetadataOnStackBuffer* allocate_buffer();
+  static void retire_buffer(MetadataOnStackBuffer* buffer);
+
  public:
-  MetadataOnStackMark();
-  ~MetadataOnStackMark();
-  static void record(Metadata* m);
+  MetadataOnStackMark(bool visit_code_cache);
+   ~MetadataOnStackMark();
+
+  static void record(Metadata* m, Thread* thread);
+  static void retire_buffer_for_thread(Thread* thread);
+  static bool has_buffer_for_thread(Thread* thread);
 };
 
 #endif // SHARE_VM_CLASSFILE_METADATAONSTACKMARK_HPP
--- a/src/share/vm/classfile/systemDictionary.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/classfile/systemDictionary.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -1691,9 +1691,9 @@
 
 // Assumes classes in the SystemDictionary are only unloaded at a safepoint
 // Note: anonymous classes are not in the SD.
-bool SystemDictionary::do_unloading(BoolObjectClosure* is_alive) {
+bool SystemDictionary::do_unloading(BoolObjectClosure* is_alive, bool clean_alive) {
   // First, mark for unload all ClassLoaderData referencing a dead class loader.
-  bool unloading_occurred = ClassLoaderDataGraph::do_unloading(is_alive);
+  bool unloading_occurred = ClassLoaderDataGraph::do_unloading(is_alive, clean_alive);
   if (unloading_occurred) {
     dictionary()->do_unloading();
     constraints()->purge_loader_constraints();
--- a/src/share/vm/classfile/systemDictionary.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/classfile/systemDictionary.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -175,6 +175,8 @@
   do_klass(URL_klass,                                   java_net_URL,                              Pre                 ) \
   do_klass(Jar_Manifest_klass,                          java_util_jar_Manifest,                    Pre                 ) \
   do_klass(sun_misc_Launcher_klass,                     sun_misc_Launcher,                         Pre                 ) \
+  do_klass(sun_misc_Launcher_AppClassLoader_klass,      sun_misc_Launcher_AppClassLoader,          Pre                 ) \
+  do_klass(sun_misc_Launcher_ExtClassLoader_klass,      sun_misc_Launcher_ExtClassLoader,          Pre                 ) \
   do_klass(CodeSource_klass,                            java_security_CodeSource,                  Pre                 ) \
                                                                                                                          \
   /* It's NULL in non-1.4 JDKs. */                                                                                       \
@@ -339,7 +341,7 @@
 
   // Unload (that is, break root links to) all unmarked classes and
   // loaders.  Returns "true" iff something was unloaded.
-  static bool do_unloading(BoolObjectClosure* is_alive);
+  static bool do_unloading(BoolObjectClosure* is_alive, bool clean_alive = true);
 
   // Used by DumpSharedSpaces only to remove classes that failed verification
   static void remove_classes_in_error_state();
--- a/src/share/vm/classfile/vmSymbols.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/classfile/vmSymbols.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -116,6 +116,7 @@
   template(java_lang_AssertionStatusDirectives,       "java/lang/AssertionStatusDirectives")      \
   template(getBootClassPathEntryForClass_name,        "getBootClassPathEntryForClass")            \
   template(sun_misc_PostVMInitHook,                   "sun/misc/PostVMInitHook")                  \
+  template(sun_misc_Launcher_AppClassLoader,          "sun/misc/Launcher$AppClassLoader")         \
   template(sun_misc_Launcher_ExtClassLoader,          "sun/misc/Launcher$ExtClassLoader")         \
                                                                                                   \
   /* Java runtime version access */                                                               \
--- a/src/share/vm/code/nmethod.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/code/nmethod.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -1720,11 +1720,17 @@
   set_unload_reported();
 }
 
-void static clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive) {
+void static clean_ic_if_metadata_is_dead(CompiledIC *ic, BoolObjectClosure *is_alive, bool mark_on_stack) {
   if (ic->is_icholder_call()) {
     // The only exception is compiledICHolder oops which may
     // yet be marked below. (We check this further below).
     CompiledICHolder* cichk_oop = ic->cached_icholder();
+
+    if (mark_on_stack) {
+      Metadata::mark_on_stack(cichk_oop->holder_method());
+      Metadata::mark_on_stack(cichk_oop->holder_klass());
+    }
+
     if (cichk_oop->holder_method()->method_holder()->is_loader_alive(is_alive) &&
         cichk_oop->holder_klass()->is_loader_alive(is_alive)) {
       return;
@@ -1732,6 +1738,10 @@
   } else {
     Metadata* ic_oop = ic->cached_metadata();
     if (ic_oop != NULL) {
+      if (mark_on_stack) {
+        Metadata::mark_on_stack(ic_oop);
+      }
+
       if (ic_oop->is_klass()) {
         if (((Klass*)ic_oop)->is_loader_alive(is_alive)) {
           return;
@@ -1792,7 +1802,7 @@
     while(iter.next()) {
       if (iter.type() == relocInfo::virtual_call_type) {
         CompiledIC *ic = CompiledIC_at(&iter);
-        clean_ic_if_metadata_is_dead(ic, is_alive);
+        clean_ic_if_metadata_is_dead(ic, is_alive, false);
       }
     }
   }
@@ -1860,6 +1870,53 @@
   return clean_if_nmethod_is_unloaded(csc, csc->destination(), is_alive, from);
 }
 
+bool nmethod::unload_if_dead_at(RelocIterator* iter_at_oop, BoolObjectClosure *is_alive, bool unloading_occurred) {
+  assert(iter_at_oop->type() == relocInfo::oop_type, "Wrong relocation type");
+
+  oop_Relocation* r = iter_at_oop->oop_reloc();
+  // Traverse those oops directly embedded in the code.
+  // Other oops (oop_index>0) are seen as part of scopes_oops.
+  assert(1 == (r->oop_is_immediate()) +
+         (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
+         "oop must be found in exactly one place");
+  if (r->oop_is_immediate() && r->oop_value() != NULL) {
+    // Unload this nmethod if the oop is dead.
+    if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
+      return true;;
+    }
+  }
+
+  return false;
+}
+
+void nmethod::mark_metadata_on_stack_at(RelocIterator* iter_at_metadata) {
+  assert(iter_at_metadata->type() == relocInfo::metadata_type, "Wrong relocation type");
+
+  metadata_Relocation* r = iter_at_metadata->metadata_reloc();
+  // In this metadata, we must only follow those metadatas directly embedded in
+  // the code.  Other metadatas (oop_index>0) are seen as part of
+  // the metadata section below.
+  assert(1 == (r->metadata_is_immediate()) +
+         (r->metadata_addr() >= metadata_begin() && r->metadata_addr() < metadata_end()),
+         "metadata must be found in exactly one place");
+  if (r->metadata_is_immediate() && r->metadata_value() != NULL) {
+    Metadata* md = r->metadata_value();
+    if (md != _method) Metadata::mark_on_stack(md);
+  }
+}
+
+void nmethod::mark_metadata_on_stack_non_relocs() {
+    // Visit the metadata section
+    for (Metadata** p = metadata_begin(); p < metadata_end(); p++) {
+      if (*p == Universe::non_oop_word() || *p == NULL)  continue;  // skip non-oops
+      Metadata* md = *p;
+      Metadata::mark_on_stack(md);
+    }
+
+    // Visit metadata not embedded in the other places.
+    if (_method != NULL) Metadata::mark_on_stack(_method);
+}
+
 bool nmethod::do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred) {
   ResourceMark rm;
 
@@ -1889,6 +1946,11 @@
     unloading_occurred = true;
   }
 
+  // When class redefinition is used all metadata in the CodeCache has to be recorded,
+  // so that unused "previous versions" can be purged. Since walking the CodeCache can
+  // be expensive, the "mark on stack" is piggy-backed on this parallel unloading code.
+  bool mark_metadata_on_stack = a_class_was_redefined;
+
   // Exception cache
   clean_exception_cache(is_alive);
 
@@ -1904,7 +1966,7 @@
       if (unloading_occurred) {
         // If class unloading occurred we first iterate over all inline caches and
         // clear ICs where the cached oop is referring to an unloaded klass or method.
-        clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive);
+        clean_ic_if_metadata_is_dead(CompiledIC_at(&iter), is_alive, mark_metadata_on_stack);
       }
 
       postponed |= clean_if_nmethod_is_unloaded(CompiledIC_at(&iter), is_alive, this);
@@ -1920,24 +1982,21 @@
 
     case relocInfo::oop_type:
       if (!is_unloaded) {
-        // Unload check
-        oop_Relocation* r = iter.oop_reloc();
-        // Traverse those oops directly embedded in the code.
-        // Other oops (oop_index>0) are seen as part of scopes_oops.
-        assert(1 == (r->oop_is_immediate()) +
-                  (r->oop_addr() >= oops_begin() && r->oop_addr() < oops_end()),
-              "oop must be found in exactly one place");
-        if (r->oop_is_immediate() && r->oop_value() != NULL) {
-          if (can_unload(is_alive, r->oop_addr(), unloading_occurred)) {
-            is_unloaded = true;
-          }
-        }
+        is_unloaded = unload_if_dead_at(&iter, is_alive, unloading_occurred);
       }
       break;
 
+    case relocInfo::metadata_type:
+      if (mark_metadata_on_stack) {
+        mark_metadata_on_stack_at(&iter);
+      }
     }
   }
 
+  if (mark_metadata_on_stack) {
+    mark_metadata_on_stack_non_relocs();
+  }
+
   if (is_unloaded) {
     return postponed;
   }
@@ -2085,7 +2144,7 @@
     while (iter.next()) {
       if (iter.type() == relocInfo::metadata_type ) {
         metadata_Relocation* r = iter.metadata_reloc();
-        // In this lmetadata, we must only follow those metadatas directly embedded in
+        // In this metadata, we must only follow those metadatas directly embedded in
         // the code.  Other metadatas (oop_index>0) are seen as part of
         // the metadata section below.
         assert(1 == (r->metadata_is_immediate()) +
@@ -2119,7 +2178,7 @@
     f(md);
   }
 
-  // Call function Method*, not embedded in these other places.
+  // Visit metadata not embedded in the other places.
   if (_method != NULL) f(_method);
 }
 
--- a/src/share/vm/code/nmethod.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/code/nmethod.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -614,9 +614,16 @@
   //  The parallel versions are used by G1.
   bool do_unloading_parallel(BoolObjectClosure* is_alive, bool unloading_occurred);
   void do_unloading_parallel_postponed(BoolObjectClosure* is_alive, bool unloading_occurred);
+
+ private:
   //  Unload a nmethod if the *root object is dead.
   bool can_unload(BoolObjectClosure* is_alive, oop* root, bool unloading_occurred);
+  bool unload_if_dead_at(RelocIterator *iter_at_oop, BoolObjectClosure* is_alive, bool unloading_occurred);
 
+  void mark_metadata_on_stack_at(RelocIterator* iter_at_metadata);
+  void mark_metadata_on_stack_non_relocs();
+
+ public:
   void preserve_callee_argument_oops(frame fr, const RegisterMap *reg_map,
                                      OopClosure* f);
   void oops_do(OopClosure* f) { oops_do(f, false); }
--- a/src/share/vm/gc_implementation/g1/concurrentMark.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/gc_implementation/g1/concurrentMark.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -23,6 +23,7 @@
  */
 
 #include "precompiled.hpp"
+#include "classfile/metadataOnStackMark.hpp"
 #include "classfile/symbolTable.hpp"
 #include "code/codeCache.hpp"
 #include "gc_implementation/g1/concurrentMark.inline.hpp"
@@ -2602,17 +2603,27 @@
     G1RemarkGCTraceTime trace("Unloading", G1Log::finer());
 
     if (ClassUnloadingWithConcurrentMark) {
+      // Cleaning of klasses depends on correct information from MetadataMarkOnStack. The CodeCache::mark_on_stack
+      // part is too slow to be done serially, so it is handled during the weakRefsWorkParallelPart phase.
+      // Defer the cleaning until we have complete on_stack data.
+      MetadataOnStackMark md_on_stack(false /* Don't visit the code cache at this point */);
+
       bool purged_classes;
 
       {
         G1RemarkGCTraceTime trace("System Dictionary Unloading", G1Log::finest());
-        purged_classes = SystemDictionary::do_unloading(&g1_is_alive);
+        purged_classes = SystemDictionary::do_unloading(&g1_is_alive, false /* Defer klass cleaning */);
       }
 
       {
         G1RemarkGCTraceTime trace("Parallel Unloading", G1Log::finest());
         weakRefsWorkParallelPart(&g1_is_alive, purged_classes);
       }
+
+      {
+        G1RemarkGCTraceTime trace("Deallocate Metadata", G1Log::finest());
+        ClassLoaderDataGraph::free_deallocate_lists();
+      }
     }
 
     if (G1StringDedup::is_enabled()) {
--- a/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/gc_implementation/g1/g1CollectedHeap.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -27,6 +27,7 @@
 #endif
 
 #include "precompiled.hpp"
+#include "classfile/metadataOnStackMark.hpp"
 #include "code/codeCache.hpp"
 #include "code/icBuffer.hpp"
 #include "gc_implementation/g1/bufferingOopClosure.hpp"
@@ -5133,6 +5134,10 @@
         clean_nmethod(claimed_nmethods[i]);
       }
     }
+
+    // The nmethod cleaning helps out and does the CodeCache part of MetadataOnStackMark.
+    // Need to retire the buffers now that this thread has stopped cleaning nmethods.
+    MetadataOnStackMark::retire_buffer_for_thread(Thread::current());
   }
 
   void work_second_pass(uint worker_id) {
@@ -5185,6 +5190,9 @@
     // G1 specific cleanup work that has
     // been moved here to be done in parallel.
     ik->clean_dependent_nmethods();
+    if (JvmtiExport::has_redefined_a_class()) {
+      InstanceKlass::purge_previous_versions(ik);
+    }
   }
 
   void work() {
@@ -5219,8 +5227,18 @@
       _klass_cleaning_task(is_alive) {
   }
 
+  void pre_work_verification() {
+    assert(!MetadataOnStackMark::has_buffer_for_thread(Thread::current()), "Should be empty");
+  }
+
+  void post_work_verification() {
+    assert(!MetadataOnStackMark::has_buffer_for_thread(Thread::current()), "Should be empty");
+  }
+
   // The parallel work done by all worker threads.
   void work(uint worker_id) {
+    pre_work_verification();
+
     // Do first pass of code cache cleaning.
     _code_cache_task.work_first_pass(worker_id);
 
@@ -5239,6 +5257,8 @@
 
     // Clean all klasses that were not unloaded.
     _klass_cleaning_task.work();
+
+    post_work_verification();
   }
 };
 
--- a/src/share/vm/memory/metadataFactory.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/memory/metadataFactory.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -64,6 +64,12 @@
 
   template <typename T>
   static void free_array(ClassLoaderData* loader_data, Array<T>* data) {
+    if (DumpSharedSpaces) {
+      // FIXME: the freeing code is buggy, especially when PrintSharedSpaces is enabled.
+      // Disable for now -- this means if you specify bad classes in your classlist you
+      // may have wasted space inside the archive.
+      return;
+    }
     if (data != NULL) {
       assert(loader_data != NULL, "shouldn't pass null");
       assert(!data->is_shared(), "cannot deallocate array in shared spaces");
--- a/src/share/vm/memory/metaspaceShared.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/memory/metaspaceShared.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -24,6 +24,7 @@
 
 #include "precompiled.hpp"
 #include "classfile/dictionary.hpp"
+#include "classfile/classLoaderExt.hpp"
 #include "classfile/loaderConstraints.hpp"
 #include "classfile/placeholders.hpp"
 #include "classfile/sharedClassUtil.hpp"
@@ -39,6 +40,7 @@
 #include "runtime/signature.hpp"
 #include "runtime/vm_operations.hpp"
 #include "runtime/vmThread.hpp"
+#include "utilities/hashtable.hpp"
 #include "utilities/hashtable.inline.hpp"
 
 PRAGMA_FORMAT_MUTE_WARNINGS_FOR_GCC
@@ -533,6 +535,8 @@
   ClassLoader::copy_package_info_table(&md_top, md_end);
   ClassLoader::verify();
 
+  ClassLoaderExt::copy_lookup_cache_to_archive(&md_top, md_end);
+
   // Write the other data to the output array.
   WriteClosure wc(md_top, md_end);
   MetaspaceShared::serialize(&wc);
@@ -745,6 +749,8 @@
   }
   tty->print_cr("Loading classes to share: done.");
 
+  ClassLoaderExt::init_lookup_cache(THREAD);
+
   if (PrintSharedSpaces) {
     tty->print_cr("Shared spaces: preloaded %d classes", class_count);
   }
@@ -1056,6 +1062,8 @@
   buffer += sizeof(intptr_t);
   buffer += len;
 
+  buffer = ClassLoaderExt::restore_lookup_cache_from_archive(buffer);
+
   intptr_t* array = (intptr_t*)buffer;
   ReadClosure rc(&array);
   serialize(&rc);
--- a/src/share/vm/memory/metaspaceShared.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/memory/metaspaceShared.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -32,9 +32,9 @@
 
 #define LargeSharedArchiveSize    (300*M)
 #define HugeSharedArchiveSize     (800*M)
-#define ReadOnlyRegionPercentage  0.4
-#define ReadWriteRegionPercentage 0.55
-#define MiscDataRegionPercentage  0.03
+#define ReadOnlyRegionPercentage  0.39
+#define ReadWriteRegionPercentage 0.50
+#define MiscDataRegionPercentage  0.09
 #define MiscCodeRegionPercentage  0.02
 #define LargeThresholdClassCount  5000
 #define HugeThresholdClassCount   40000
--- a/src/share/vm/oops/constantPool.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/oops/constantPool.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -1817,11 +1817,22 @@
 
 void ConstantPool::set_on_stack(const bool value) {
   if (value) {
-    _flags |= _on_stack;
+    int old_flags = *const_cast<volatile int *>(&_flags);
+    while ((old_flags & _on_stack) == 0) {
+      int new_flags = old_flags | _on_stack;
+      int result = Atomic::cmpxchg(new_flags, &_flags, old_flags);
+
+      if (result == old_flags) {
+        // Succeeded.
+        MetadataOnStackMark::record(this, Thread::current());
+        return;
+      }
+      old_flags = result;
+    }
   } else {
+    // Clearing is done single-threadedly.
     _flags &= ~_on_stack;
   }
-  if (value) MetadataOnStackMark::record(this);
 }
 
 // JSR 292 support for patching constant pool oops after the class is linked and
--- a/src/share/vm/oops/instanceKlass.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/oops/instanceKlass.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -2890,6 +2890,22 @@
   OsrList_lock->unlock();
 }
 
+int InstanceKlass::mark_osr_nmethods(const Method* m) {
+  // This is a short non-blocking critical region, so the no safepoint check is ok.
+  MutexLockerEx ml(OsrList_lock, Mutex::_no_safepoint_check_flag);
+  nmethod* osr = osr_nmethods_head();
+  int found = 0;
+  while (osr != NULL) {
+    assert(osr->is_osr_method(), "wrong kind of nmethod found in chain");
+    if (osr->method() == m) {
+      osr->mark_for_deoptimization();
+      found++;
+    }
+    osr = osr->osr_link();
+  }
+  return found;
+}
+
 nmethod* InstanceKlass::lookup_osr_nmethod(const Method* m, int bci, int comp_level, bool match_level) const {
   // This is a short non-blocking critical region, so the no safepoint check is ok.
   OsrList_lock->lock_without_safepoint_check();
--- a/src/share/vm/oops/instanceKlass.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/oops/instanceKlass.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -783,6 +783,7 @@
   void set_osr_nmethods_head(nmethod* h)     { _osr_nmethods_head = h; };
   void add_osr_nmethod(nmethod* n);
   void remove_osr_nmethod(nmethod* n);
+  int mark_osr_nmethods(const Method* m);
   nmethod* lookup_osr_nmethod(const Method* m, int bci, int level, bool match_level) const;
 
   // Breakpoint support (see methods on Method* for details)
--- a/src/share/vm/oops/method.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/oops/method.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -1863,9 +1863,12 @@
 void Method::set_on_stack(const bool value) {
   // Set both the method itself and its constant pool.  The constant pool
   // on stack means some method referring to it is also on the stack.
-  _access_flags.set_on_stack(value);
   constants()->set_on_stack(value);
-  if (value) MetadataOnStackMark::record(this);
+
+  bool succeeded = _access_flags.set_on_stack(value);
+  if (value && succeeded) {
+    MetadataOnStackMark::record(this, Thread::current());
+  }
 }
 
 // Called when the class loader is unloaded to make all methods weak.
--- a/src/share/vm/oops/method.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/oops/method.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -793,6 +793,10 @@
    return method_holder()->lookup_osr_nmethod(this, InvocationEntryBci, level, match_level) != NULL;
   }
 
+  int mark_osr_nmethods() {
+    return method_holder()->mark_osr_nmethods(this);
+  }
+
   nmethod* lookup_osr_nmethod_for(int bci, int level, bool match_level) {
     return method_holder()->lookup_osr_nmethod(this, bci, level, match_level);
   }
--- a/src/share/vm/opto/lcm.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/opto/lcm.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -484,9 +484,7 @@
         iop == Op_CreateEx ||   // Create-exception must start block
         iop == Op_CheckCastPP
         ) {
-      // select the node n
-      // remove n from worklist and retain the order of remaining nodes
-      worklist.remove((uint)i);
+      worklist.map(i,worklist.pop());
       return n;
     }
 
@@ -572,9 +570,7 @@
   assert(idx >= 0, "index should be set");
   Node *n = worklist[(uint)idx];      // Get the winner
 
-  // select the node n
-  // remove n from worklist and retain the order of remaining nodes
-  worklist.remove((uint)idx);
+  worklist.map((uint)idx, worklist.pop());     // Compress worklist
   return n;
 }
 
--- a/src/share/vm/prims/jni.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/prims/jni.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -5080,6 +5080,7 @@
 void TestNewSize_test();
 void TestKlass_test();
 void Test_linked_list();
+void TestChunkedList_test();
 #if INCLUDE_ALL_GCS
 void TestOldFreeSpaceCalculation_test();
 void TestG1BiasedArray_test();
@@ -5108,6 +5109,7 @@
     run_unit_test(TestNewSize_test());
     run_unit_test(TestKlass_test());
     run_unit_test(Test_linked_list());
+    run_unit_test(TestChunkedList_test());
 #if INCLUDE_VM_STRUCTS
     run_unit_test(VMStructs::test());
 #endif
--- a/src/share/vm/prims/jvm.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/prims/jvm.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -24,6 +24,7 @@
 
 #include "precompiled.hpp"
 #include "classfile/classLoader.hpp"
+#include "classfile/classLoaderExt.hpp"
 #include "classfile/javaAssertions.hpp"
 #include "classfile/javaClasses.hpp"
 #include "classfile/symbolTable.hpp"
@@ -393,6 +394,14 @@
     }
   }
 
+  const char* enableSharedLookupCache = "false";
+#if INCLUDE_CDS
+  if (ClassLoaderExt::is_lookup_cache_enabled()) {
+    enableSharedLookupCache = "true";
+  }
+#endif
+  PUTPROP(props, "sun.cds.enableSharedLookupCache", enableSharedLookupCache);
+
   return properties;
 JVM_END
 
@@ -766,6 +775,36 @@
 JVM_END
 
 
+JVM_ENTRY(jboolean, JVM_KnownToNotExist(JNIEnv *env, jobject loader, const char *classname))
+  JVMWrapper("JVM_KnownToNotExist");
+#if INCLUDE_CDS
+  return ClassLoaderExt::known_to_not_exist(env, loader, classname, CHECK_(false));
+#else
+  return false;
+#endif
+JVM_END
+
+
+JVM_ENTRY(jobjectArray, JVM_GetResourceLookupCacheURLs(JNIEnv *env, jobject loader))
+  JVMWrapper("JVM_GetResourceLookupCacheURLs");
+#if INCLUDE_CDS
+  return ClassLoaderExt::get_lookup_cache_urls(env, loader, CHECK_NULL);
+#else
+  return NULL;
+#endif
+JVM_END
+
+
+JVM_ENTRY(jintArray, JVM_GetResourceLookupCache(JNIEnv *env, jobject loader, const char *resource_name))
+  JVMWrapper("JVM_GetResourceLookupCache");
+#if INCLUDE_CDS
+  return ClassLoaderExt::get_lookup_cache(env, loader, resource_name, CHECK_NULL);
+#else
+  return NULL;
+#endif
+JVM_END
+
+
 // Returns a class loaded by the bootstrap class loader; or null
 // if not found.  ClassNotFoundException is not thrown.
 //
--- a/src/share/vm/prims/jvm.h	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/prims/jvm.h	Fri Nov 07 09:22:57 2014 -0800
@@ -1548,6 +1548,31 @@
 JNIEXPORT jobjectArray JNICALL
 JVM_GetThreadStateNames(JNIEnv* env, jint javaThreadState, jintArray values);
 
+/*
+ * Returns true if the JVM's lookup cache indicates that this class is
+ * known to NOT exist for the given loader.
+ */
+JNIEXPORT jboolean JNICALL
+JVM_KnownToNotExist(JNIEnv *env, jobject loader, const char *classname);
+
+/*
+ * Returns an array of all URLs that are stored in the JVM's lookup cache
+ * for the given loader. NULL if the lookup cache is unavailable.
+ */
+JNIEXPORT jobjectArray JNICALL
+JVM_GetResourceLookupCacheURLs(JNIEnv *env, jobject loader);
+
+/*
+ * Returns an array of all URLs that *may* contain the resource_name for the
+ * given loader. This function returns an integer array, each element
+ * of which can be used to index into the array returned by
+ * JVM_GetResourceLookupCacheURLs of the same loader to determine the
+ * URLs.
+ */
+JNIEXPORT jintArray JNICALL
+JVM_GetResourceLookupCache(JNIEnv *env, jobject loader, const char *resource_name);
+
+
 /* =========================================================================
  * The following defines a private JVM interface that the JDK can query
  * for the JVM version and capabilities.  sun.misc.Version defines
--- a/src/share/vm/prims/jvmtiClassFileReconstituter.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/prims/jvmtiClassFileReconstituter.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -54,6 +54,7 @@
 void JvmtiClassFileReconstituter::write_field_infos() {
   HandleMark hm(thread());
   Array<AnnotationArray*>* fields_anno = ikh()->fields_annotations();
+  Array<AnnotationArray*>* fields_type_anno = ikh()->fields_type_annotations();
 
   // Compute the real number of Java fields
   int java_fields = ikh()->java_fields_count();
@@ -68,6 +69,7 @@
     // int offset = ikh()->field_offset( index );
     int generic_signature_index = fs.generic_signature_index();
     AnnotationArray* anno = fields_anno == NULL ? NULL : fields_anno->at(fs.index());
+    AnnotationArray* type_anno = fields_type_anno == NULL ? NULL : fields_type_anno->at(fs.index());
 
     // JVMSpec|   field_info {
     // JVMSpec|         u2 access_flags;
@@ -93,6 +95,9 @@
     if (anno != NULL) {
       ++attr_count;     // has RuntimeVisibleAnnotations attribute
     }
+    if (type_anno != NULL) {
+      ++attr_count;     // has RuntimeVisibleTypeAnnotations attribute
+    }
 
     write_u2(attr_count);
 
@@ -110,6 +115,9 @@
     if (anno != NULL) {
       write_annotations_attribute("RuntimeVisibleAnnotations", anno);
     }
+    if (type_anno != NULL) {
+      write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno);
+    }
   }
 }
 
@@ -550,6 +558,7 @@
   AnnotationArray* anno = method->annotations();
   AnnotationArray* param_anno = method->parameter_annotations();
   AnnotationArray* default_anno = method->annotation_default();
+  AnnotationArray* type_anno = method->type_annotations();
 
   // skip generated default interface methods
   if (method->is_overpass()) {
@@ -585,6 +594,9 @@
   if (param_anno != NULL) {
     ++attr_count;     // has RuntimeVisibleParameterAnnotations attribute
   }
+  if (type_anno != NULL) {
+    ++attr_count;     // has RuntimeVisibleTypeAnnotations attribute
+  }
 
   write_u2(attr_count);
   if (const_method->code_size() > 0) {
@@ -609,6 +621,9 @@
   if (param_anno != NULL) {
     write_annotations_attribute("RuntimeVisibleParameterAnnotations", param_anno);
   }
+  if (type_anno != NULL) {
+    write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno);
+  }
 }
 
 // Write the class attributes portion of ClassFile structure
@@ -618,6 +633,7 @@
   u2 inner_classes_length = inner_classes_attribute_length();
   Symbol* generic_signature = ikh()->generic_signature();
   AnnotationArray* anno = ikh()->class_annotations();
+  AnnotationArray* type_anno = ikh()->class_type_annotations();
 
   int attr_count = 0;
   if (generic_signature != NULL) {
@@ -635,6 +651,9 @@
   if (anno != NULL) {
     ++attr_count;     // has RuntimeVisibleAnnotations attribute
   }
+  if (type_anno != NULL) {
+    ++attr_count;     // has RuntimeVisibleTypeAnnotations attribute
+  }
   if (cpool()->operands() != NULL) {
     ++attr_count;
   }
@@ -656,6 +675,9 @@
   if (anno != NULL) {
     write_annotations_attribute("RuntimeVisibleAnnotations", anno);
   }
+  if (type_anno != NULL) {
+    write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno);
+  }
   if (cpool()->operands() != NULL) {
     write_bootstrapmethod_attribute();
   }
--- a/src/share/vm/prims/jvmtiRedefineClasses.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/prims/jvmtiRedefineClasses.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -135,7 +135,7 @@
 
   // Mark methods seen on stack and everywhere else so old methods are not
   // cleaned up if they're on the stack.
-  MetadataOnStackMark md_on_stack;
+  MetadataOnStackMark md_on_stack(true);
   HandleMark hm(thread);   // make sure any handles created are deleted
                            // before the stack walk again.
 
@@ -1569,6 +1569,29 @@
     return false;
   }
 
+  // rewrite constant pool references in the class_type_annotations:
+  if (!rewrite_cp_refs_in_class_type_annotations(scratch_class, THREAD)) {
+    // propagate failure back to caller
+    return false;
+  }
+
+  // rewrite constant pool references in the fields_type_annotations:
+  if (!rewrite_cp_refs_in_fields_type_annotations(scratch_class, THREAD)) {
+    // propagate failure back to caller
+    return false;
+  }
+
+  // rewrite constant pool references in the methods_type_annotations:
+  if (!rewrite_cp_refs_in_methods_type_annotations(scratch_class, THREAD)) {
+    // propagate failure back to caller
+    return false;
+  }
+
+  // There can be type annotations in the Code part of a method_info attribute.
+  // These annotations are not accessible, even by reflection.
+  // Currently they are not even parsed by the ClassFileParser.
+  // If runtime access is added they will also need to be rewritten.
+
   // rewrite source file name index:
   u2 source_file_name_idx = scratch_class->source_file_name_index();
   if (source_file_name_idx != 0) {
@@ -2239,6 +2262,588 @@
 } // end rewrite_cp_refs_in_methods_default_annotations()
 
 
+// Rewrite constant pool references in a class_type_annotations field.
+bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations(
+       instanceKlassHandle scratch_class, TRAPS) {
+
+  AnnotationArray* class_type_annotations = scratch_class->class_type_annotations();
+  if (class_type_annotations == NULL || class_type_annotations->length() == 0) {
+    // no class_type_annotations so nothing to do
+    return true;
+  }
+
+  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+    ("class_type_annotations length=%d", class_type_annotations->length()));
+
+  int byte_i = 0;  // byte index into class_type_annotations
+  return rewrite_cp_refs_in_type_annotations_typeArray(class_type_annotations,
+      byte_i, "ClassFile", THREAD);
+} // end rewrite_cp_refs_in_class_type_annotations()
+
+
+// Rewrite constant pool references in a fields_type_annotations field.
+bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations(
+       instanceKlassHandle scratch_class, TRAPS) {
+
+  Array<AnnotationArray*>* fields_type_annotations = scratch_class->fields_type_annotations();
+  if (fields_type_annotations == NULL || fields_type_annotations->length() == 0) {
+    // no fields_type_annotations so nothing to do
+    return true;
+  }
+
+  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+    ("fields_type_annotations length=%d", fields_type_annotations->length()));
+
+  for (int i = 0; i < fields_type_annotations->length(); i++) {
+    AnnotationArray* field_type_annotations = fields_type_annotations->at(i);
+    if (field_type_annotations == NULL || field_type_annotations->length() == 0) {
+      // this field does not have any annotations so skip it
+      continue;
+    }
+
+    int byte_i = 0;  // byte index into field_type_annotations
+    if (!rewrite_cp_refs_in_type_annotations_typeArray(field_type_annotations,
+           byte_i, "field_info", THREAD)) {
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("bad field_type_annotations at %d", i));
+      // propagate failure back to caller
+      return false;
+    }
+  }
+
+  return true;
+} // end rewrite_cp_refs_in_fields_type_annotations()
+
+
+// Rewrite constant pool references in a methods_type_annotations field.
+bool VM_RedefineClasses::rewrite_cp_refs_in_methods_type_annotations(
+       instanceKlassHandle scratch_class, TRAPS) {
+
+  for (int i = 0; i < scratch_class->methods()->length(); i++) {
+    Method* m = scratch_class->methods()->at(i);
+    AnnotationArray* method_type_annotations = m->constMethod()->type_annotations();
+
+    if (method_type_annotations == NULL || method_type_annotations->length() == 0) {
+      // this method does not have any annotations so skip it
+      continue;
+    }
+
+    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("methods type_annotations length=%d", method_type_annotations->length()));
+
+    int byte_i = 0;  // byte index into method_type_annotations
+    if (!rewrite_cp_refs_in_type_annotations_typeArray(method_type_annotations,
+           byte_i, "method_info", THREAD)) {
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("bad method_type_annotations at %d", i));
+      // propagate failure back to caller
+      return false;
+    }
+  }
+
+  return true;
+} // end rewrite_cp_refs_in_methods_type_annotations()
+
+
+// Rewrite constant pool references in a type_annotations
+// field. This "structure" is adapted from the
+// RuntimeVisibleTypeAnnotations_attribute described in
+// section 4.7.20 of the Java SE 8 Edition of the VM spec:
+//
+// type_annotations_typeArray {
+//   u2              num_annotations;
+//   type_annotation annotations[num_annotations];
+// }
+//
+bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotations_typeArray(
+       AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
+       const char * location_mesg, TRAPS) {
+
+  if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
+    // not enough room for num_annotations field
+    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+      ("length() is too small for num_annotations field"));
+    return false;
+  }
+
+  u2 num_annotations = Bytes::get_Java_u2((address)
+                         type_annotations_typeArray->adr_at(byte_i_ref));
+  byte_i_ref += 2;
+
+  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+    ("num_type_annotations=%d", num_annotations));
+
+  int calc_num_annotations = 0;
+  for (; calc_num_annotations < num_annotations; calc_num_annotations++) {
+    if (!rewrite_cp_refs_in_type_annotation_struct(type_annotations_typeArray,
+           byte_i_ref, location_mesg, THREAD)) {
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("bad type_annotation_struct at %d", calc_num_annotations));
+      // propagate failure back to caller
+      return false;
+    }
+  }
+  assert(num_annotations == calc_num_annotations, "sanity check");
+
+  if (byte_i_ref != type_annotations_typeArray->length()) {
+    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+      ("read wrong amount of bytes at end of processing "
+       "type_annotations_typeArray (%d of %d bytes were read)",
+       byte_i_ref, type_annotations_typeArray->length()));
+    return false;
+  }
+
+  return true;
+} // end rewrite_cp_refs_in_type_annotations_typeArray()
+
+
+// Rewrite constant pool references in a type_annotation
+// field. This "structure" is adapted from the
+// RuntimeVisibleTypeAnnotations_attribute described in
+// section 4.7.20 of the Java SE 8 Edition of the VM spec:
+//
+// type_annotation {
+//   u1 target_type;
+//   union {
+//     type_parameter_target;
+//     supertype_target;
+//     type_parameter_bound_target;
+//     empty_target;
+//     method_formal_parameter_target;
+//     throws_target;
+//     localvar_target;
+//     catch_target;
+//     offset_target;
+//     type_argument_target;
+//   } target_info;
+//   type_path target_path;
+//   annotation anno;
+// }
+//
+bool VM_RedefineClasses::rewrite_cp_refs_in_type_annotation_struct(
+       AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
+       const char * location_mesg, TRAPS) {
+
+  if (!skip_type_annotation_target(type_annotations_typeArray,
+         byte_i_ref, location_mesg, THREAD)) {
+    return false;
+  }
+
+  if (!skip_type_annotation_type_path(type_annotations_typeArray,
+         byte_i_ref, THREAD)) {
+    return false;
+  }
+
+  if (!rewrite_cp_refs_in_annotation_struct(type_annotations_typeArray,
+         byte_i_ref, THREAD)) {
+    return false;
+  }
+
+  return true;
+} // end rewrite_cp_refs_in_type_annotation_struct()
+
+
+// Read, verify and skip over the target_type and target_info part
+// so that rewriting can continue in the later parts of the struct.
+//
+// u1 target_type;
+// union {
+//   type_parameter_target;
+//   supertype_target;
+//   type_parameter_bound_target;
+//   empty_target;
+//   method_formal_parameter_target;
+//   throws_target;
+//   localvar_target;
+//   catch_target;
+//   offset_target;
+//   type_argument_target;
+// } target_info;
+//
+bool VM_RedefineClasses::skip_type_annotation_target(
+       AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
+       const char * location_mesg, TRAPS) {
+
+  if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
+    // not enough room for a target_type let alone the rest of a type_annotation
+    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+      ("length() is too small for a target_type"));
+    return false;
+  }
+
+  u1 target_type = type_annotations_typeArray->at(byte_i_ref);
+  byte_i_ref += 1;
+  RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("target_type=0x%.2x", target_type));
+  RC_TRACE_WITH_THREAD(0x02000000, THREAD, ("location=%s", location_mesg));
+
+  // Skip over target_info
+  switch (target_type) {
+    case 0x00:
+    // kind: type parameter declaration of generic class or interface
+    // location: ClassFile
+    case 0x01:
+    // kind: type parameter declaration of generic method or constructor
+    // location: method_info
+
+    {
+      // struct:
+      // type_parameter_target {
+      //   u1 type_parameter_index;
+      // }
+      //
+      if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
+        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+          ("length() is too small for a type_parameter_target"));
+        return false;
+      }
+
+      u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
+      byte_i_ref += 1;
+
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("type_parameter_target: type_parameter_index=%d",
+         type_parameter_index));
+    } break;
+
+    case 0x10:
+    // kind: type in extends clause of class or interface declaration
+    //       (including the direct superclass of an anonymous class declaration),
+    //       or in implements clause of interface declaration
+    // location: ClassFile
+
+    {
+      // struct:
+      // supertype_target {
+      //   u2 supertype_index;
+      // }
+      //
+      if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
+        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+          ("length() is too small for a supertype_target"));
+        return false;
+      }
+
+      u2 supertype_index = Bytes::get_Java_u2((address)
+                             type_annotations_typeArray->adr_at(byte_i_ref));
+      byte_i_ref += 2;
+
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("supertype_target: supertype_index=%d", supertype_index));
+    } break;
+
+    case 0x11:
+    // kind: type in bound of type parameter declaration of generic class or interface
+    // location: ClassFile
+    case 0x12:
+    // kind: type in bound of type parameter declaration of generic method or constructor
+    // location: method_info
+
+    {
+      // struct:
+      // type_parameter_bound_target {
+      //   u1 type_parameter_index;
+      //   u1 bound_index;
+      // }
+      //
+      if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
+        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+          ("length() is too small for a type_parameter_bound_target"));
+        return false;
+      }
+
+      u1 type_parameter_index = type_annotations_typeArray->at(byte_i_ref);
+      byte_i_ref += 1;
+      u1 bound_index = type_annotations_typeArray->at(byte_i_ref);
+      byte_i_ref += 1;
+
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("type_parameter_bound_target: type_parameter_index=%d, bound_index=%d",
+         type_parameter_index, bound_index));
+    } break;
+
+    case 0x13:
+    // kind: type in field declaration
+    // location: field_info
+    case 0x14:
+    // kind: return type of method, or type of newly constructed object
+    // location: method_info
+    case 0x15:
+    // kind: receiver type of method or constructor
+    // location: method_info
+
+    {
+      // struct:
+      // empty_target {
+      // }
+      //
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("empty_target"));
+    } break;
+
+    case 0x16:
+    // kind: type in formal parameter declaration of method, constructor, or lambda expression
+    // location: method_info
+
+    {
+      // struct:
+      // formal_parameter_target {
+      //   u1 formal_parameter_index;
+      // }
+      //
+      if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
+        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+          ("length() is too small for a formal_parameter_target"));
+        return false;
+      }
+
+      u1 formal_parameter_index = type_annotations_typeArray->at(byte_i_ref);
+      byte_i_ref += 1;
+
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("formal_parameter_target: formal_parameter_index=%d",
+         formal_parameter_index));
+    } break;
+
+    case 0x17:
+    // kind: type in throws clause of method or constructor
+    // location: method_info
+
+    {
+      // struct:
+      // throws_target {
+      //   u2 throws_type_index
+      // }
+      //
+      if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
+        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+          ("length() is too small for a throws_target"));
+        return false;
+      }
+
+      u2 throws_type_index = Bytes::get_Java_u2((address)
+                               type_annotations_typeArray->adr_at(byte_i_ref));
+      byte_i_ref += 2;
+
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("throws_target: throws_type_index=%d", throws_type_index));
+    } break;
+
+    case 0x40:
+    // kind: type in local variable declaration
+    // location: Code
+    case 0x41:
+    // kind: type in resource variable declaration
+    // location: Code
+
+    {
+      // struct:
+      // localvar_target {
+      //   u2 table_length;
+      //   struct {
+      //     u2 start_pc;
+      //     u2 length;
+      //     u2 index;
+      //   } table[table_length];
+      // }
+      //
+      if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
+        // not enough room for a table_length let alone the rest of a localvar_target
+        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+          ("length() is too small for a localvar_target table_length"));
+        return false;
+      }
+
+      u2 table_length = Bytes::get_Java_u2((address)
+                          type_annotations_typeArray->adr_at(byte_i_ref));
+      byte_i_ref += 2;
+
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("localvar_target: table_length=%d", table_length));
+
+      int table_struct_size = 2 + 2 + 2; // 3 u2 variables per table entry
+      int table_size = table_length * table_struct_size;
+
+      if ((byte_i_ref + table_size) > type_annotations_typeArray->length()) {
+        // not enough room for a table
+        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+          ("length() is too small for a table array of length %d", table_length));
+        return false;
+      }
+
+      // Skip over table
+      byte_i_ref += table_size;
+    } break;
+
+    case 0x42:
+    // kind: type in exception parameter declaration
+    // location: Code
+
+    {
+      // struct:
+      // catch_target {
+      //   u2 exception_table_index;
+      // }
+      //
+      if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
+        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+          ("length() is too small for a catch_target"));
+        return false;
+      }
+
+      u2 exception_table_index = Bytes::get_Java_u2((address)
+                                   type_annotations_typeArray->adr_at(byte_i_ref));
+      byte_i_ref += 2;
+
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("catch_target: exception_table_index=%d", exception_table_index));
+    } break;
+
+    case 0x43:
+    // kind: type in instanceof expression
+    // location: Code
+    case 0x44:
+    // kind: type in new expression
+    // location: Code
+    case 0x45:
+    // kind: type in method reference expression using ::new
+    // location: Code
+    case 0x46:
+    // kind: type in method reference expression using ::Identifier
+    // location: Code
+
+    {
+      // struct:
+      // offset_target {
+      //   u2 offset;
+      // }
+      //
+      if ((byte_i_ref + 2) > type_annotations_typeArray->length()) {
+        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+          ("length() is too small for a offset_target"));
+        return false;
+      }
+
+      u2 offset = Bytes::get_Java_u2((address)
+                    type_annotations_typeArray->adr_at(byte_i_ref));
+      byte_i_ref += 2;
+
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("offset_target: offset=%d", offset));
+    } break;
+
+    case 0x47:
+    // kind: type in cast expression
+    // location: Code
+    case 0x48:
+    // kind: type argument for generic constructor in new expression or
+    //       explicit constructor invocation statement
+    // location: Code
+    case 0x49:
+    // kind: type argument for generic method in method invocation expression
+    // location: Code
+    case 0x4A:
+    // kind: type argument for generic constructor in method reference expression using ::new
+    // location: Code
+    case 0x4B:
+    // kind: type argument for generic method in method reference expression using ::Identifier
+    // location: Code
+
+    {
+      // struct:
+      // type_argument_target {
+      //   u2 offset;
+      //   u1 type_argument_index;
+      // }
+      //
+      if ((byte_i_ref + 3) > type_annotations_typeArray->length()) {
+        RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+          ("length() is too small for a type_argument_target"));
+        return false;
+      }
+
+      u2 offset = Bytes::get_Java_u2((address)
+                    type_annotations_typeArray->adr_at(byte_i_ref));
+      byte_i_ref += 2;
+      u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
+      byte_i_ref += 1;
+
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("type_argument_target: offset=%d, type_argument_index=%d",
+         offset, type_argument_index));
+    } break;
+
+    default:
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("unknown target_type"));
+#ifdef ASSERT
+      ShouldNotReachHere();
+#endif
+      return false;
+  }
+
+  return true;
+} // end skip_type_annotation_target()
+
+
+// Read, verify and skip over the type_path part so that rewriting
+// can continue in the later parts of the struct.
+//
+// type_path {
+//   u1 path_length;
+//   {
+//     u1 type_path_kind;
+//     u1 type_argument_index;
+//   } path[path_length];
+// }
+//
+bool VM_RedefineClasses::skip_type_annotation_type_path(
+       AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS) {
+
+  if ((byte_i_ref + 1) > type_annotations_typeArray->length()) {
+    // not enough room for a path_length let alone the rest of the type_path
+    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+      ("length() is too small for a type_path"));
+    return false;
+  }
+
+  u1 path_length = type_annotations_typeArray->at(byte_i_ref);
+  byte_i_ref += 1;
+
+  RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+    ("type_path: path_length=%d", path_length));
+
+  int calc_path_length = 0;
+  for (; calc_path_length < path_length; calc_path_length++) {
+    if ((byte_i_ref + 1 + 1) > type_annotations_typeArray->length()) {
+      // not enough room for a path
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("length() is too small for path entry %d of %d",
+         calc_path_length, path_length));
+      return false;
+    }
+
+    u1 type_path_kind = type_annotations_typeArray->at(byte_i_ref);
+    byte_i_ref += 1;
+    u1 type_argument_index = type_annotations_typeArray->at(byte_i_ref);
+    byte_i_ref += 1;
+
+    RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+      ("type_path: path[%d]: type_path_kind=%d, type_argument_index=%d",
+       calc_path_length, type_path_kind, type_argument_index));
+
+    if (type_path_kind > 3 || (type_path_kind != 3 && type_argument_index != 0)) {
+      // not enough room for a path
+      RC_TRACE_WITH_THREAD(0x02000000, THREAD,
+        ("inconsistent type_path values"));
+      return false;
+    }
+  }
+  assert(path_length == calc_path_length, "sanity check");
+
+  return true;
+} // end skip_type_annotation_type_path()
+
+
 // Rewrite constant pool references in the method's stackmap table.
 // These "structures" are adapted from the StackMapTable_attribute that
 // is described in section 4.8.4 of the 6.0 version of the VM spec
@@ -3223,23 +3828,6 @@
 
 void VM_RedefineClasses::swap_annotations(instanceKlassHandle the_class,
                                           instanceKlassHandle scratch_class) {
-  // Since there is currently no rewriting of type annotations indexes
-  // into the CP, we null out type annotations on scratch_class before
-  // we swap annotations with the_class rather than facing the
-  // possibility of shipping annotations with broken indexes to
-  // Java-land.
-  ClassLoaderData* loader_data = scratch_class->class_loader_data();
-  AnnotationArray* new_class_type_annotations = scratch_class->class_type_annotations();
-  if (new_class_type_annotations != NULL) {
-    MetadataFactory::free_array<u1>(loader_data, new_class_type_annotations);
-    scratch_class->annotations()->set_class_type_annotations(NULL);
-  }
-  Array<AnnotationArray*>* new_field_type_annotations = scratch_class->fields_type_annotations();
-  if (new_field_type_annotations != NULL) {
-    Annotations::free_contents(loader_data, new_field_type_annotations);
-    scratch_class->annotations()->set_fields_type_annotations(NULL);
-  }
-
   // Swap annotation fields values
   Annotations* old_annotations = the_class->annotations();
   the_class->set_annotations(scratch_class->annotations());
--- a/src/share/vm/prims/jvmtiRedefineClasses.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/prims/jvmtiRedefineClasses.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -457,6 +457,17 @@
     instanceKlassHandle scratch_class, TRAPS);
   bool rewrite_cp_refs_in_element_value(
     AnnotationArray* class_annotations, int &byte_i_ref, TRAPS);
+  bool rewrite_cp_refs_in_type_annotations_typeArray(
+    AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
+    const char * location_mesg, TRAPS);
+  bool rewrite_cp_refs_in_type_annotation_struct(
+    AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
+    const char * location_mesg, TRAPS);
+  bool skip_type_annotation_target(
+    AnnotationArray* type_annotations_typeArray, int &byte_i_ref,
+    const char * location_mesg, TRAPS);
+  bool skip_type_annotation_type_path(
+    AnnotationArray* type_annotations_typeArray, int &byte_i_ref, TRAPS);
   bool rewrite_cp_refs_in_fields_annotations(
     instanceKlassHandle scratch_class, TRAPS);
   void rewrite_cp_refs_in_method(methodHandle method,
@@ -468,6 +479,12 @@
     instanceKlassHandle scratch_class, TRAPS);
   bool rewrite_cp_refs_in_methods_parameter_annotations(
     instanceKlassHandle scratch_class, TRAPS);
+  bool rewrite_cp_refs_in_class_type_annotations(
+    instanceKlassHandle scratch_class, TRAPS);
+  bool rewrite_cp_refs_in_fields_type_annotations(
+    instanceKlassHandle scratch_class, TRAPS);
+  bool rewrite_cp_refs_in_methods_type_annotations(
+    instanceKlassHandle scratch_class, TRAPS);
   void rewrite_cp_refs_in_stack_map_table(methodHandle method, TRAPS);
   void rewrite_cp_refs_in_verification_type_info(
          address& stackmap_addr_ref, address stackmap_end, u2 frame_i,
--- a/src/share/vm/prims/whitebox.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/prims/whitebox.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -104,6 +104,28 @@
   return closure.found();
 WB_END
 
+WB_ENTRY(jboolean, WB_ClassKnownToNotExist(JNIEnv* env, jobject o, jobject loader, jstring name))
+  ThreadToNativeFromVM ttnfv(thread);   // can't be in VM when we call JNI
+  const char* class_name = env->GetStringUTFChars(name, NULL);
+  jboolean result = JVM_KnownToNotExist(env, loader, class_name);
+  env->ReleaseStringUTFChars(name, class_name);
+  return result;
+WB_END
+
+WB_ENTRY(jobjectArray, WB_GetLookupCacheURLs(JNIEnv* env, jobject o, jobject loader))
+  ThreadToNativeFromVM ttnfv(thread);   // can't be in VM when we call JNI
+  return JVM_GetResourceLookupCacheURLs(env, loader);
+WB_END
+
+WB_ENTRY(jintArray, WB_GetLookupCacheMatches(JNIEnv* env, jobject o, jobject loader, jstring name))
+  ThreadToNativeFromVM ttnfv(thread);   // can't be in VM when we call JNI
+  const char* resource_name = env->GetStringUTFChars(name, NULL);
+  jintArray result = JVM_GetResourceLookupCache(env, loader, resource_name);
+
+  env->ReleaseStringUTFChars(name, resource_name);
+  return result;
+WB_END
+
 WB_ENTRY(jlong, WB_GetCompressedOopsMaxHeapSize(JNIEnv* env, jobject o)) {
   return (jlong)Arguments::max_heap_for_compressed_oops();
 }
@@ -382,19 +404,10 @@
   CHECK_JNI_EXCEPTION_(env, result);
   MutexLockerEx mu(Compile_lock);
   methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid));
-  nmethod* code;
   if (is_osr) {
-    int bci = InvocationEntryBci;
-    while ((code = mh->lookup_osr_nmethod_for(bci, CompLevel_none, false)) != NULL) {
-      code->mark_for_deoptimization();
-      ++result;
-      bci = code->osr_entry_bci() + 1;
-    }
-  } else {
-    code = mh->code();
-  }
-  if (code != NULL) {
-    code->mark_for_deoptimization();
+    result += mh->mark_osr_nmethods();
+  } else if (mh->code() != NULL) {
+    mh->code()->mark_for_deoptimization();
     ++result;
   }
   result += CodeCache::mark_for_deoptimization(mh());
@@ -939,6 +952,11 @@
   {CC"isObjectInOldGen",   CC"(Ljava/lang/Object;)Z", (void*)&WB_isObjectInOldGen  },
   {CC"getHeapOopSize",     CC"()I",                   (void*)&WB_GetHeapOopSize    },
   {CC"isClassAlive0",      CC"(Ljava/lang/String;)Z", (void*)&WB_IsClassAlive      },
+  {CC"classKnownToNotExist",
+                           CC"(Ljava/lang/ClassLoader;Ljava/lang/String;)Z",(void*)&WB_ClassKnownToNotExist},
+  {CC"getLookupCacheURLs", CC"(Ljava/lang/ClassLoader;)[Ljava/net/URL;",    (void*)&WB_GetLookupCacheURLs},
+  {CC"getLookupCacheMatches", CC"(Ljava/lang/ClassLoader;Ljava/lang/String;)[I",
+                                                      (void*)&WB_GetLookupCacheMatches},
   {CC"parseCommandLine",
       CC"(Ljava/lang/String;[Lsun/hotspot/parser/DiagnosticCommand;)[Ljava/lang/Object;",
       (void*) &WB_ParseCommandLine
--- a/src/share/vm/runtime/simpleThresholdPolicy.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/runtime/simpleThresholdPolicy.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -196,7 +196,6 @@
     // Don't trigger other compiles in testing mode
     return NULL;
   }
-  nmethod *osr_nm = NULL;
 
   handle_counter_overflow(method());
   if (method() != inlinee()) {
@@ -210,14 +209,16 @@
   if (bci == InvocationEntryBci) {
     method_invocation_event(method, inlinee, comp_level, nm, thread);
   } else {
+    // method == inlinee if the event originated in the main method
     method_back_branch_event(method, inlinee, bci, comp_level, nm, thread);
-    // method == inlinee if the event originated in the main method
-    int highest_level = inlinee->highest_osr_comp_level();
-    if (highest_level > comp_level) {
-      osr_nm = inlinee->lookup_osr_nmethod_for(bci, highest_level, false);
+    // Check if event led to a higher level OSR compilation
+    nmethod* osr_nm = inlinee->lookup_osr_nmethod_for(bci, comp_level, false);
+    if (osr_nm != NULL && osr_nm->comp_level() > comp_level) {
+      // Perform OSR with new nmethod
+      return osr_nm;
     }
   }
-  return osr_nm;
+  return NULL;
 }
 
 // Check if the method can be compiled, change level if necessary
--- a/src/share/vm/runtime/thread.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/runtime/thread.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -234,6 +234,8 @@
   // This initial value ==> never claimed.
   _oops_do_parity = 0;
 
+  _metadata_on_stack_buffer = NULL;
+
   // the handle mark links itself to last_handle_mark
   new HandleMark(this);
 
--- a/src/share/vm/runtime/thread.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/runtime/thread.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -42,11 +42,10 @@
 #include "runtime/threadLocalStorage.hpp"
 #include "runtime/thread_ext.hpp"
 #include "runtime/unhandledOops.hpp"
-#include "utilities/macros.hpp"
-
 #include "trace/traceBackend.hpp"
 #include "trace/traceMacros.hpp"
 #include "utilities/exceptions.hpp"
+#include "utilities/macros.hpp"
 #include "utilities/top.hpp"
 #if INCLUDE_ALL_GCS
 #include "gc_implementation/g1/dirtyCardQueue.hpp"
@@ -83,6 +82,10 @@
 class ThreadClosure;
 class IdealGraphPrinter;
 
+class Metadata;
+template <class T, MEMFLAGS F> class ChunkedList;
+typedef ChunkedList<Metadata*, mtInternal> MetadataOnStackBuffer;
+
 DEBUG_ONLY(class ResourceMark;)
 
 class WorkerThread;
@@ -256,6 +259,9 @@
   jlong _allocated_bytes;                       // Cumulative number of bytes allocated on
                                                 // the Java heap
 
+  // Thread-local buffer used by MetadataOnStackMark.
+  MetadataOnStackBuffer* _metadata_on_stack_buffer;
+
   TRACE_DATA _trace_data;                       // Thread-local data for tracing
 
   ThreadExt _ext;
@@ -517,7 +523,10 @@
   // creation fails due to lack of memory, too many threads etc.
   bool set_as_starting_thread();
 
- protected:
+  void set_metadata_on_stack_buffer(MetadataOnStackBuffer* buffer) { _metadata_on_stack_buffer = buffer; }
+  MetadataOnStackBuffer* metadata_on_stack_buffer() const          { return _metadata_on_stack_buffer; }
+
+protected:
   // OS data associated with the thread
   OSThread* _osthread;  // Platform-specific thread information
 
--- a/src/share/vm/services/runtimeService.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/services/runtimeService.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -46,6 +46,7 @@
 PerfCounter*  RuntimeService::_thread_interrupt_signaled_count = NULL;
 PerfCounter*  RuntimeService::_interrupted_before_count = NULL;
 PerfCounter*  RuntimeService::_interrupted_during_count = NULL;
+double RuntimeService::_last_safepoint_sync_time_sec = 0.0;
 
 void RuntimeService::init() {
   // Make sure the VM version is initialized
@@ -128,6 +129,7 @@
 
   // update the time stamp to begin recording safepoint time
   _safepoint_timer.update();
+  _last_safepoint_sync_time_sec = 0.0;
   if (UsePerfData) {
     _total_safepoints->inc();
     if (_app_timer.is_updated()) {
@@ -140,6 +142,9 @@
   if (UsePerfData) {
     _sync_time_ticks->inc(_safepoint_timer.ticks_since_update());
   }
+  if (PrintGCApplicationStoppedTime) {
+    _last_safepoint_sync_time_sec = last_safepoint_time_sec();
+  }
 }
 
 void RuntimeService::record_safepoint_end() {
@@ -155,8 +160,10 @@
     gclog_or_tty->date_stamp(PrintGCDateStamps);
     gclog_or_tty->stamp(PrintGCTimeStamps);
     gclog_or_tty->print_cr("Total time for which application threads "
-                           "were stopped: %3.7f seconds",
-                           last_safepoint_time_sec());
+                           "were stopped: %3.7f seconds, "
+                           "Stopping threads took: %3.7f seconds",
+                           last_safepoint_time_sec(),
+                           _last_safepoint_sync_time_sec);
   }
 
   // update the time stamp to begin recording app time
--- a/src/share/vm/services/runtimeService.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/services/runtimeService.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -40,6 +40,7 @@
 
   static TimeStamp _safepoint_timer;
   static TimeStamp _app_timer;
+  static double _last_safepoint_sync_time_sec;
 
 public:
   static void init();
--- a/src/share/vm/utilities/accessFlags.cpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/utilities/accessFlags.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -62,6 +62,21 @@
   } while(f != old_flags);
 }
 
+// Returns true iff this thread succeeded setting the bit.
+bool AccessFlags::atomic_set_one_bit(jint bit) {
+  // Atomically update the flags with the bit given
+  jint old_flags, new_flags, f;
+  bool is_setting_bit = false;
+  do {
+    old_flags = _flags;
+    new_flags = old_flags | bit;
+    is_setting_bit = old_flags != new_flags;
+    f = Atomic::cmpxchg(new_flags, &_flags, old_flags);
+  } while(f != old_flags);
+
+  return is_setting_bit;
+}
+
 #if !defined(PRODUCT) || INCLUDE_JVMTI
 
 void AccessFlags::print_on(outputStream* st) const {
--- a/src/share/vm/utilities/accessFlags.hpp	Wed Nov 05 12:54:08 2014 -0800
+++ b/src/share/vm/utilities/accessFlags.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -170,6 +170,7 @@
 
   // Atomic update of flags
   void atomic_set_bits(jint bits);
+  bool atomic_set_one_bit(jint bit);
   void atomic_clear_bits(jint bits);
 
  private:
@@ -230,12 +231,13 @@
                                          atomic_set_bits(JVM_ACC_FIELD_HAS_GENERIC_SIGNATURE);
                                        }
 
-  void set_on_stack(const bool value)
+  bool set_on_stack(const bool value)
                                        {
                                          if (value) {
-                                           atomic_set_bits(JVM_ACC_ON_STACK);
+                                           return atomic_set_one_bit(JVM_ACC_ON_STACK);
                                          } else {
                                            atomic_clear_bits(JVM_ACC_ON_STACK);
+                                           return true; // Ignored
                                          }
                                        }
   // Conversion
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/share/vm/utilities/chunkedList.cpp	Fri Nov 07 09:22:57 2014 -0800
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#include "precompiled.hpp"
+#include "utilities/chunkedList.hpp"
+#include "utilities/debug.hpp"
+
+/////////////// Unit tests ///////////////
+
+#ifndef PRODUCT
+
+template <typename T>
+class TestChunkedList {
+  typedef ChunkedList<T, mtOther> ChunkedListT;
+
+ public:
+  static void testEmpty() {
+    ChunkedListT buffer;
+    assert(buffer.size() == 0, "assert");
+  }
+
+  static void testFull() {
+    ChunkedListT buffer;
+    for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) {
+      buffer.push((T)i);
+    }
+    assert(buffer.size() == ChunkedListT::BufferSize, "assert");
+    assert(buffer.is_full(), "assert");
+  }
+
+  static void testSize() {
+    ChunkedListT buffer;
+    for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) {
+      assert(buffer.size() == i, "assert");
+      buffer.push((T)i);
+      assert(buffer.size() == i + 1, "assert");
+    }
+  }
+
+  static void testClear() {
+    ChunkedListT buffer;
+
+    buffer.clear();
+    assert(buffer.size() == 0, "assert");
+
+    for (uintptr_t i = 0; i < ChunkedListT::BufferSize / 2; i++) {
+      buffer.push((T)i);
+    }
+    buffer.clear();
+    assert(buffer.size() == 0, "assert");
+
+    for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) {
+      buffer.push((T)i);
+    }
+    buffer.clear();
+    assert(buffer.size() == 0, "assert");
+  }
+
+  static void testAt() {
+    ChunkedListT buffer;
+
+    for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) {
+      buffer.push((T)i);
+      assert(buffer.at(i) == (T)i, "assert");
+    }
+
+    for (uintptr_t i = 0; i < ChunkedListT::BufferSize; i++) {
+      assert(buffer.at(i) == (T)i, "assert");
+    }
+  }
+
+  static void test() {
+    testEmpty();
+    testFull();
+    testSize();
+    testClear();
+    testAt();
+  }
+};
+
+class Metadata;
+
+void TestChunkedList_test() {
+  TestChunkedList<Metadata*>::test();
+  TestChunkedList<size_t>::test();
+}
+
+#endif
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/share/vm/utilities/chunkedList.hpp	Fri Nov 07 09:22:57 2014 -0800
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ *
+ */
+
+#ifndef SHARE_VM_UTILITIES_CHUNKED_LIST_HPP
+#define SHARE_VM_UTILITIES_CHUNKED_LIST_HPP
+
+#include "memory/allocation.hpp"
+#include "utilities/debug.hpp"
+
+template <class T, MEMFLAGS F> class ChunkedList : public CHeapObj<F> {
+  template <class U> friend class TestChunkedList;
+
+  static const size_t BufferSize = 64;
+
+  T  _values[BufferSize];
+  T* _top;
+
+  ChunkedList<T, F>* _next_used;
+  ChunkedList<T, F>* _next_free;
+
+  T const * end() const {
+    return &_values[BufferSize];
+  }
+
+ public:
+  ChunkedList<T, F>() : _top(_values), _next_used(NULL), _next_free(NULL) {}
+
+  bool is_full() const {
+    return _top == end();
+  }
+
+  void clear() {
+    _top = _values;
+    // Don't clear the next pointers since that would interfere
+    // with other threads trying to iterate through the lists.
+  }
+
+  void push(T m) {
+    assert(!is_full(), "Buffer is full");
+    *_top = m;
+    _top++;
+  }
+
+  void set_next_used(ChunkedList<T, F>* buffer) { _next_used = buffer; }
+  void set_next_free(ChunkedList<T, F>* buffer) { _next_free = buffer; }
+
+  ChunkedList<T, F>* next_used() const          { return _next_used; }
+  ChunkedList<T, F>* next_free() const          { return _next_free; }
+
+  size_t size() const {
+    return pointer_delta(_top, _values, sizeof(T));
+  }
+
+  T at(size_t i) {
+    assert(i < size(), err_msg("IOOBE i: " SIZE_FORMAT " size(): " SIZE_FORMAT, i, size()));
+    return _values[i];
+  }
+};
+
+#endif // SHARE_VM_UTILITIES_CHUNKED_LIST_HPP
--- a/test/compiler/whitebox/CompilerWhiteBoxTest.java	Wed Nov 05 12:54:08 2014 -0800
+++ b/test/compiler/whitebox/CompilerWhiteBoxTest.java	Fri Nov 07 09:22:57 2014 -0800
@@ -72,9 +72,9 @@
     /** Flag for verbose output, true if {@code -Dverbose} specified */
     protected static final boolean IS_VERBOSE
             = System.getProperty("verbose") != null;
-    /** count of invocation to triger compilation */
+    /** invocation count to trigger compilation */
     protected static final int THRESHOLD;
-    /** count of invocation to triger OSR compilation */
+    /** invocation count to trigger OSR compilation */
     protected static final long BACKEDGE_THRESHOLD;
     /** Value of {@code java.vm.info} (interpreted|mixed|comp mode) */
     protected static final String MODE = System.getProperty("java.vm.info");
@@ -206,7 +206,6 @@
      *                          is compiled, or if {@linkplain #method} has zero
      *                          compilation level.
      */
-
     protected final void checkNotCompiled(int compLevel) {
         if (WHITE_BOX.isMethodQueuedForCompilation(method)) {
             throw new RuntimeException(method + " must not be in queue");
@@ -227,20 +226,30 @@
      *                          compilation level.
      */
     protected final void checkNotCompiled() {
+        checkNotCompiled(true);
+        checkNotCompiled(false);
+    }
+
+    /**
+     * Checks, that {@linkplain #method} is not (OSR-)compiled.
+     *
+     * @param isOsr Check for OSR compilation if true
+     * @throws RuntimeException if {@linkplain #method} is in compiler queue or
+     *                          is compiled, or if {@linkplain #method} has zero
+     *                          compilation level.
+     */
+    protected final void checkNotCompiled(boolean isOsr) {
+        waitBackgroundCompilation();
         if (WHITE_BOX.isMethodQueuedForCompilation(method)) {
             throw new RuntimeException(method + " must not be in queue");
         }
-        if (WHITE_BOX.isMethodCompiled(method, false)) {
-            throw new RuntimeException(method + " must be not compiled");
-        }
-        if (WHITE_BOX.getMethodCompilationLevel(method, false) != 0) {
-            throw new RuntimeException(method + " comp_level must be == 0");
+        if (WHITE_BOX.isMethodCompiled(method, isOsr)) {
+            throw new RuntimeException(method + " must not be " +
+                                       (isOsr ? "osr_" : "") + "compiled");
         }
-        if (WHITE_BOX.isMethodCompiled(method, true)) {
-            throw new RuntimeException(method + " must be not osr_compiled");
-        }
-        if (WHITE_BOX.getMethodCompilationLevel(method, true) != 0) {
-            throw new RuntimeException(method + " osr_comp_level must be == 0");
+        if (WHITE_BOX.getMethodCompilationLevel(method, isOsr) != 0) {
+            throw new RuntimeException(method + (isOsr ? " osr_" : " ") +
+                                       "comp_level must be == 0");
         }
     }
 
@@ -306,12 +315,21 @@
      * Waits for completion of background compilation of {@linkplain #method}.
      */
     protected final void waitBackgroundCompilation() {
+        waitBackgroundCompilation(method);
+    }
+
+    /**
+     * Waits for completion of background compilation of the given executable.
+     *
+     * @param executable Executable
+     */
+    protected static final void waitBackgroundCompilation(Executable executable) {
         if (!BACKGROUND_COMPILATION) {
             return;
         }
         final Object obj = new Object();
         for (int i = 0; i < 10
-                && WHITE_BOX.isMethodQueuedForCompilation(method); ++i) {
+                && WHITE_BOX.isMethodQueuedForCompilation(executable); ++i) {
             synchronized (obj) {
                 try {
                     obj.wait(1000);
@@ -425,14 +443,14 @@
     /** constructor test case */
     CONSTRUCTOR_TEST(Helper.CONSTRUCTOR, Helper.CONSTRUCTOR_CALLABLE, false),
     /** method test case */
-    METOD_TEST(Helper.METHOD, Helper.METHOD_CALLABLE, false),
+    METHOD_TEST(Helper.METHOD, Helper.METHOD_CALLABLE, false),
     /** static method test case */
     STATIC_TEST(Helper.STATIC, Helper.STATIC_CALLABLE, false),
     /** OSR constructor test case */
     OSR_CONSTRUCTOR_TEST(Helper.OSR_CONSTRUCTOR,
             Helper.OSR_CONSTRUCTOR_CALLABLE, true),
     /** OSR method test case */
-    OSR_METOD_TEST(Helper.OSR_METHOD, Helper.OSR_METHOD_CALLABLE, true),
+    OSR_METHOD_TEST(Helper.OSR_METHOD, Helper.OSR_METHOD_CALLABLE, true),
     /** OSR static method test case */
     OSR_STATIC_TEST(Helper.OSR_STATIC, Helper.OSR_STATIC_CALLABLE, true);
 
@@ -494,7 +512,7 @@
                 = new Callable<Integer>() {
             @Override
             public Integer call() throws Exception {
-                return new Helper(null).hashCode();
+                return new Helper(null, CompilerWhiteBoxTest.BACKEDGE_THRESHOLD).hashCode();
             }
         };
 
@@ -504,7 +522,7 @@
 
             @Override
             public Integer call() throws Exception {
-                return helper.osrMethod();
+                return helper.osrMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD);
             }
         };
 
@@ -512,7 +530,7 @@
                 = new Callable<Integer>() {
             @Override
             public Integer call() throws Exception {
-                return osrStaticMethod();
+                return osrStaticMethod(CompilerWhiteBoxTest.BACKEDGE_THRESHOLD);
             }
         };
 
@@ -532,25 +550,24 @@
             }
             try {
                 OSR_CONSTRUCTOR = Helper.class.getDeclaredConstructor(
-                        Object.class);
+                        Object.class, long.class);
             } catch (NoSuchMethodException | SecurityException e) {
                 throw new RuntimeException(
-                        "exception on getting method Helper.<init>(Object)", e);
+                        "exception on getting method Helper.<init>(Object, long)", e);
             }
             METHOD = getMethod("method");
             STATIC = getMethod("staticMethod");
-            OSR_METHOD = getMethod("osrMethod");
-            OSR_STATIC = getMethod("osrStaticMethod");
+            OSR_METHOD = getMethod("osrMethod", long.class);
+            OSR_STATIC = getMethod("osrStaticMethod", long.class);
         }
 
-        private static Method getMethod(String name) {
+        private static Method getMethod(String name, Class<?>... parameterTypes) {
             try {
-                return Helper.class.getDeclaredMethod(name);
+                return Helper.class.getDeclaredMethod(name, parameterTypes);
             } catch (NoSuchMethodException | SecurityException e) {
                 throw new RuntimeException(
                         "exception on getting method Helper." + name, e);
             }
-
         }
 
         private static int staticMethod() {
@@ -561,17 +578,84 @@
             return 42;
         }
 
-        private static int osrStaticMethod() {
+        /**
+         * Deoptimizes all non-osr versions of the given executable after
+         * compilation finished.
+         *
+         * @param e Executable
+         * @throws Exception
+         */
+        private static void waitAndDeoptimize(Executable e) {
+            CompilerWhiteBoxTest.waitBackgroundCompilation(e);
+            if (WhiteBox.getWhiteBox().isMethodQueuedForCompilation(e)) {
+                throw new RuntimeException(e + " must not be in queue");
+            }
+            // Deoptimize non-osr versions of executable
+            WhiteBox.getWhiteBox().deoptimizeMethod(e, false);
+        }
+
+        /**
+         * Executes the method multiple times to make sure we have
+         * enough profiling information before triggering an OSR
+         * compilation. Otherwise the C2 compiler may add uncommon traps.
+         *
+         * @param m Method to be executed
+         * @return Number of times the method was executed
+         * @throws Exception
+         */
+        private static int warmup(Method m) throws Exception {
+            waitAndDeoptimize(m);
+            Helper helper = new Helper();
             int result = 0;
-            for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) {
+            for (long i = 0; i < CompilerWhiteBoxTest.THRESHOLD; ++i) {
+                result += (int)m.invoke(helper, 1);
+            }
+            // Wait to make sure OSR compilation is not blocked by
+            // non-OSR compilation in the compile queue
+            CompilerWhiteBoxTest.waitBackgroundCompilation(m);
+            return result;
+        }
+
+        /**
+         * Executes the constructor multiple times to make sure we
+         * have enough profiling information before triggering an OSR
+         * compilation. Otherwise the C2 compiler may add uncommon traps.
+         *
+         * @param c Constructor to be executed
+         * @return Number of times the constructor was executed
+         * @throws Exception
+         */
+        private static int warmup(Constructor c) throws Exception {
+            waitAndDeoptimize(c);
+            int result = 0;
+            for (long i = 0; i < CompilerWhiteBoxTest.THRESHOLD; ++i) {
+                result += c.newInstance(null, 1).hashCode();
+            }
+            // Wait to make sure OSR compilation is not blocked by
+            // non-OSR compilation in the compile queue
+            CompilerWhiteBoxTest.waitBackgroundCompilation(c);
+            return result;
+        }
+
+        private static int osrStaticMethod(long limit) throws Exception {
+            int result = 0;
+            if (limit != 1) {
+                result = warmup(OSR_STATIC);
+            }
+            // Trigger osr compilation
+            for (long i = 0; i < limit; ++i) {
                 result += staticMethod();
             }
             return result;
         }
 
-        private int osrMethod() {
+        private int osrMethod(long limit) throws Exception {
             int result = 0;
-            for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) {
+            if (limit != 1) {
+                result = warmup(OSR_METHOD);
+            }
+            // Trigger osr compilation
+            for (long i = 0; i < limit; ++i) {
                 result += method();
             }
             return result;
@@ -585,9 +669,13 @@
         }
 
         // for OSR constructor test case
-        private Helper(Object o) {
+        private Helper(Object o, long limit) throws Exception {
             int result = 0;
-            for (long i = 0; i < CompilerWhiteBoxTest.BACKEDGE_THRESHOLD; ++i) {
+            if (limit != 1) {
+                result = warmup(OSR_CONSTRUCTOR);
+            }
+            // Trigger osr compilation
+            for (long i = 0; i < limit; ++i) {
                 result += method();
             }
             x = result;
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/compiler/whitebox/DeoptimizeMultipleOSRTest.java	Fri Nov 07 09:22:57 2014 -0800
@@ -0,0 +1,95 @@
+/*
+ * Copyright (c) 2013, 2014, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+import sun.hotspot.WhiteBox;
+import java.lang.reflect.Executable;
+import java.lang.reflect.Method;
+
+/*
+ * @test DeoptimizeMultipleOSRTest
+ * @bug 8061817
+ * @library /testlibrary /testlibrary/whitebox
+ * @build DeoptimizeMultipleOSRTest
+ * @run main ClassFileInstaller sun.hotspot.WhiteBox
+ *                              sun.hotspot.WhiteBox$WhiteBoxPermission
+ * @run main/othervm -Xbootclasspath/a:. -XX:+UnlockDiagnosticVMOptions -XX:+WhiteBoxAPI -XX:CompileCommand=compileonly,DeoptimizeMultipleOSRTest::triggerOSR DeoptimizeMultipleOSRTest
+ * @summary testing of WB::deoptimizeMethod()
+ */
+public class DeoptimizeMultipleOSRTest {
+    private static final WhiteBox WHITE_BOX = WhiteBox.getWhiteBox();
+    private static final long BACKEDGE_THRESHOLD = 150000;
+    private Method method;
+    private int counter = 0;
+
+    public static void main(String[] args) throws Exception {
+        DeoptimizeMultipleOSRTest test = new DeoptimizeMultipleOSRTest();
+        test.test();
+    }
+
+    /**
+     * Triggers two different OSR compilations for the same method and
+     * checks if WhiteBox.deoptimizeMethod() deoptimizes both.
+     *
+     * @throws Exception
+     */
+    public void test() throws Exception {
+        method = DeoptimizeMultipleOSRTest.class.getDeclaredMethod("triggerOSR", boolean.class, long.class);
+        // Trigger two OSR compiled versions
+        triggerOSR(true, BACKEDGE_THRESHOLD);
+        triggerOSR(false, BACKEDGE_THRESHOLD);
+        // Wait for compilation
+        CompilerWhiteBoxTest.waitBackgroundCompilation(method);
+        // Deoptimize
+        WHITE_BOX.deoptimizeMethod(method, true);
+        if (WHITE_BOX.isMethodCompiled(method, true)) {
+            throw new AssertionError("Not all OSR compiled versions were deoptimized");
+        }
+    }
+
+    /**
+     * Triggers OSR compilations by executing loops.
+     *
+     * @param first Determines which loop to execute
+     * @param limit The number of loop iterations
+     */
+    public void triggerOSR(boolean first, long limit) {
+        if (limit != 1) {
+            // Warmup method to avoid uncommon traps
+            for (int i = 0; i < limit; ++i) {
+                triggerOSR(first, 1);
+            }
+            CompilerWhiteBoxTest.waitBackgroundCompilation(method);
+        }
+        if (first) {
+            // Trigger OSR compilation 1
+            for (int i = 0; i < limit; ++i) {
+                counter++;
+            }
+        } else {
+            // Trigger OSR compilation 2
+            for (int i = 0; i < limit; ++i) {
+                counter++;
+            }
+        }
+    }
+}
--- a/test/compiler/whitebox/MakeMethodNotCompilableTest.java	Wed Nov 05 12:54:08 2014 -0800
+++ b/test/compiler/whitebox/MakeMethodNotCompilableTest.java	Fri Nov 07 09:22:57 2014 -0800
@@ -131,14 +131,15 @@
             throw new RuntimeException(method
                     + " is not compilable after clearMethodState()");
         }
-
+        // Make method not (OSR-)compilable (depending on testCase.isOsr())
         makeNotCompilable();
         if (isCompilable()) {
             throw new RuntimeException(method + " must be not compilable");
         }
-
+        // Try to (OSR-)compile method
         compile();
-        checkNotCompiled();
+        // Method should not be (OSR-)compiled
+        checkNotCompiled(testCase.isOsr());
         if (isCompilable()) {
             throw new RuntimeException(method + " must be not compilable");
         }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/test/runtime/RedefineTests/RedefineAnnotations.java	Fri Nov 07 09:22:57 2014 -0800
@@ -0,0 +1,410 @@
+/*
+ * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved.
+ * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
+ *
+ * This code is free software; you can redistribute it and/or modify it
+ * under the terms of the GNU General Public License version 2 only, as
+ * published by the Free Software Foundation.
+ *
+ * This code is distributed in the hope that it will be useful, but WITHOUT
+ * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ * version 2 for more details (a copy is included in the LICENSE file that
+ * accompanied this code).
+ *
+ * You should have received a copy of the GNU General Public License version
+ * 2 along with this work; if not, write to the Free Software Foundation,
+ * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
+ *
+ * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
+ * or visit www.oracle.com if you need additional information or have any
+ * questions.
+ */
+
+/*
+ * @test
+ * @library /testlibrary
+ * @summary Test that type annotations are retained after a retransform
+ * @run main RedefineAnnotations buildagent
+ * @run main/othervm -javaagent:redefineagent.jar RedefineAnnotations
+ */
+
+import static com.oracle.java.testlibrary.Asserts.assertTrue;
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.lang.NoSuchFieldException;
+import java.lang.NoSuchMethodException;
+import java.lang.RuntimeException;
+import java.lang.annotation.Annotation;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.instrument.ClassFileTransformer;
+import java.lang.instrument.IllegalClassFormatException;
+import java.lang.instrument.Instrumentation;
+import java.lang.instrument.UnmodifiableClassException;
+import java.lang.reflect.AnnotatedArrayType;
+import java.lang.reflect.AnnotatedParameterizedType;
+import java.lang.reflect.AnnotatedType;
+import java.lang.reflect.AnnotatedWildcardType;
+import java.lang.reflect.Executable;
+import java.lang.reflect.TypeVariable;
+import java.security.ProtectionDomain;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import jdk.internal.org.objectweb.asm.ClassReader;
+import jdk.internal.org.objectweb.asm.ClassVisitor;
+import jdk.internal.org.objectweb.asm.ClassWriter;
+import jdk.internal.org.objectweb.asm.FieldVisitor;
+import static jdk.internal.org.objectweb.asm.Opcodes.ASM5;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.TYPE_USE)
+@interface TestAnn {
+    String site();
+}
+
+public class RedefineAnnotations {
+    static Instrumentation inst;
+    public static void premain(String agentArgs, Instrumentation inst) {
+        RedefineAnnotations.inst = inst;
+    }
+
+    static class Transformer implements ClassFileTransformer {
+
+        public byte[] asm(ClassLoader loader, String className,
+                Class<?> classBeingRedefined,
+                ProtectionDomain protectionDomain, byte[] classfileBuffer)
+            throws IllegalClassFormatException {
+
+            ClassWriter cw = new ClassWriter(0);
+            ClassVisitor cv = new ReAddDummyFieldsClassVisitor(ASM5, cw) { };
+            ClassReader cr = new ClassReader(classfileBuffer);
+            cr.accept(cv, 0);
+            return cw.toByteArray();
+        }
+
+        public class ReAddDummyFieldsClassVisitor extends ClassVisitor {
+
+            LinkedList<F> fields = new LinkedList<>();
+
+            public ReAddDummyFieldsClassVisitor(int api, ClassVisitor cv) {
+                super(api, cv);
+            }
+
+            @Override public FieldVisitor visitField(int access, String name,
+                    String desc, String signature, Object value) {
+                if (name.startsWith("dummy")) {
+                    // Remove dummy field
+                    fields.addLast(new F(access, name, desc, signature, value));
+                    return null;
+                }
+                return cv.visitField(access, name, desc, signature, value);
+            }
+
+            @Override public void visitEnd() {
+                F f;
+                while ((f = fields.pollFirst()) != null) {
+                    // Re-add dummy fields
+                    cv.visitField(f.access, f.name, f.desc, f.signature, f.value);
+                }
+            }
+
+            private class F {
+                private int access;
+                private String name;
+                private String desc;
+                private String signature;
+                private Object value;
+                F(int access, String name, String desc, String signature, Object value) {
+                    this.access = access;
+                    this.name = name;
+                    this.desc = desc;
+                    this.signature = signature;
+                    this.value = value;
+                }
+            }
+        }
+
+        @Override public byte[] transform(ClassLoader loader, String className,
+                Class<?> classBeingRedefined,
+                ProtectionDomain protectionDomain, byte[] classfileBuffer)
+            throws IllegalClassFormatException {
+
+            if (className.contains("TypeAnnotatedTestClass")) {
+                try {
+                    // Here we remove and re-add the dummy fields. This shuffles the constant pool
+                    return asm(loader, className, classBeingRedefined, protectionDomain, classfileBuffer);
+                } catch (Throwable e) {
+                    // The retransform native code that called this method does not propagate
+                    // exceptions. Instead of getting an uninformative generic error, catch
+                    // problems here and print it, then exit.
+                    e.printStackTrace();
+                    System.exit(1);
+                }
+            }
+            return null;
+        }
+    }
+
+    private static void buildAgent() {
+        try {
+            ClassFileInstaller.main("RedefineAnnotations");
+        } catch (Exception e) {
+            throw new RuntimeException("Could not write agent classfile", e);
+        }
+
+        try {
+            PrintWriter pw = new PrintWriter("MANIFEST.MF");
+            pw.println("Premain-Class: RedefineAnnotations");
+            pw.println("Agent-Class: RedefineAnnotations");
+            pw.println("Can-Retransform-Classes: true");
+            pw.close();
+        } catch (FileNotFoundException e) {
+            throw new RuntimeException("Could not write manifest file for the agent", e);
+        }
+
+        sun.tools.jar.Main jarTool = new sun.tools.jar.Main(System.out, System.err, "jar");
+        if (!jarTool.run(new String[] { "-cmf", "MANIFEST.MF", "redefineagent.jar", "RedefineAnnotations.class" })) {
+            throw new RuntimeException("Could not write the agent jar file");
+        }
+    }
+
+    public static void main(String argv[]) throws NoSuchFieldException, NoSuchMethodException {
+        if (argv.length == 1 && argv[0].equals("buildagent")) {
+            buildAgent();
+            return;
+        }
+
+        if (inst == null) {
+            throw new RuntimeException("Instrumentation object was null");
+        }
+
+        RedefineAnnotations test = new RedefineAnnotations();
+        test.testTransformAndVerify();
+    }
+
+    // Class type annotations
+    private Annotation classTypeParameterTA;
+    private Annotation extendsTA;
+    private Annotation implementsTA;
+
+    // Field type annotations
+    private Annotation fieldTA;
+    private Annotation innerTA;
+    private Annotation[] arrayTA = new Annotation[4];
+    private Annotation[] mapTA = new Annotation[5];
+
+    // Method type annotations
+    private Annotation returnTA, methodTypeParameterTA, formalParameterTA, throwsTA;
+
+    private void testTransformAndVerify()
+        throws NoSuchFieldException, NoSuchMethodException {
+
+        Class<TypeAnnotatedTestClass> c = TypeAnnotatedTestClass.class;
+        Class<?> myClass = c;
+
+        /*
+         * Verify that the expected annotations are where they should be before transform.
+         */
+        verifyClassTypeAnnotations(c);
+        verifyFieldTypeAnnotations(c);
+        verifyMethodTypeAnnotations(c);
+
+        try {
+            inst.addTransformer(new Transformer(), true);
+            inst.retransformClasses(myClass);
+        } catch (UnmodifiableClassException e) {
+            throw new RuntimeException(e);
+        }
+
+        /*
+         * Verify that the expected annotations are where they should be after transform.
+         * Also verify that before and after are equal.
+         */
+        verifyClassTypeAnnotations(c);
+        verifyFieldTypeAnnotations(c);
+        verifyMethodTypeAnnotations(c);
+    }
+
+    private void verifyClassTypeAnnotations(Class c) {
+        Annotation anno;
+
+        anno = c.getTypeParameters()[0].getAnnotations()[0];
+        verifyTestAnn(classTypeParameterTA, anno, "classTypeParameter");
+        classTypeParameterTA = anno;
+
+        anno = c.getAnnotatedSuperclass().getAnnotations()[0];
+        verifyTestAnn(extendsTA, anno, "extends");
+        extendsTA = anno;
+
+        anno = c.getAnnotatedInterfaces()[0].getAnnotations()[0];
+        verifyTestAnn(implementsTA, anno, "implements");
+        implementsTA = anno;
+    }
+
+    private void verifyFieldTypeAnnotations(Class c)
+        throws NoSuchFieldException, NoSuchMethodException {
+
+        verifyBasicFieldTypeAnnotations(c);
+        verifyInnerFieldTypeAnnotations(c);
+        verifyArrayFieldTypeAnnotations(c);
+        verifyMapFieldTypeAnnotations(c);
+    }
+
+    private void verifyBasicFieldTypeAnnotations(Class c)
+        throws NoSuchFieldException, NoSuchMethodException {
+
+        Annotation anno = c.getDeclaredField("typeAnnotatedBoolean").getAnnotatedType().getAnnotations()[0];
+        verifyTestAnn(fieldTA, anno, "field");
+        fieldTA = anno;
+    }
+
+    private void verifyInnerFieldTypeAnnotations(Class c)
+        throws NoSuchFieldException, NoSuchMethodException {
+
+        AnnotatedType at = c.getDeclaredField("typeAnnotatedInner").getAnnotatedType();
+        Annotation anno = at.getAnnotations()[0];
+        verifyTestAnn(innerTA, anno, "inner");
+        innerTA = anno;
+    }
+
+    private void verifyArrayFieldTypeAnnotations(Class c)
+        throws NoSuchFieldException, NoSuchMethodException {
+
+        Annotation anno;
+        AnnotatedType at;
+
+        at = c.getDeclaredField("typeAnnotatedArray").getAnnotatedType();
+        anno = at.getAnnotations()[0];
+        verifyTestAnn(arrayTA[0], anno, "array1");
+        arrayTA[0] = anno;
+
+        for (int i = 1; i <= 3; i++) {
+            at = ((AnnotatedArrayType) at).getAnnotatedGenericComponentType();
+            anno = at.getAnnotations()[0];
+            verifyTestAnn(arrayTA[i], anno, "array" + (i + 1));
+            arrayTA[i] = anno;
+        }
+    }
+
+    private void verifyMapFieldTypeAnnotations(Class c)
+        throws NoSuchFieldException, NoSuchMethodException {
+
+        Annotation anno;
+        AnnotatedType atBase;
+        AnnotatedType atParameter;
+        atBase = c.getDeclaredField("typeAnnotatedMap").getAnnotatedType();
+
+        anno = atBase.getAnnotations()[0];
+        verifyTestAnn(mapTA[0], anno, "map1");
+        mapTA[0] = anno;
+
+        atParameter =
+            ((AnnotatedParameterizedType) atBase).
+            getAnnotatedActualTypeArguments()[0];
+        anno = ((AnnotatedWildcardType) atParameter).getAnnotations()[0];
+        verifyTestAnn(mapTA[1], anno, "map2");
+        mapTA[1] = anno;
+
+        anno =
+            ((AnnotatedWildcardType) atParameter).
+            getAnnotatedUpperBounds()[0].getAnnotations()[0];
+        verifyTestAnn(mapTA[2], anno, "map3");
+        mapTA[2] = anno;
+
+        atParameter =
+            ((AnnotatedParameterizedType) atBase).
+            getAnnotatedActualTypeArguments()[1];
+        anno = ((AnnotatedParameterizedType) atParameter).getAnnotations()[0];
+        verifyTestAnn(mapTA[3], anno, "map4");
+        mapTA[3] = anno;
+
+        anno =
+            ((AnnotatedParameterizedType) atParameter).
+            getAnnotatedActualTypeArguments()[0].getAnnotations()[0];
+        verifyTestAnn(mapTA[4], anno, "map5");
+        mapTA[4] = anno;
+    }
+
+    private void verifyMethodTypeAnnotations(Class c)
+        throws NoSuchFieldException, NoSuchMethodException {
+        Annotation anno;
+        Executable typeAnnotatedMethod =
+            c.getDeclaredMethod("typeAnnotatedMethod", TypeAnnotatedTestClass.class);
+
+        anno = typeAnnotatedMethod.getAnnotatedReturnType().getAnnotations()[0];
+        verifyTestAnn(returnTA, anno, "return");
+        returnTA = anno;
+
+        anno = typeAnnotatedMethod.getTypeParameters()[0].getAnnotations()[0];
+        verifyTestAnn(methodTypeParameterTA, anno, "methodTypeParameter");
+        methodTypeParameterTA = anno;
+
+        anno = typeAnnotatedMethod.getAnnotatedParameterTypes()[0].getAnnotations()[0];
+        verifyTestAnn(formalParameterTA, anno, "formalParameter");
+        formalParameterTA = anno;
+
+        anno = typeAnnotatedMethod.getAnnotatedExceptionTypes()[0].getAnnotations()[0];
+        verifyTestAnn(throwsTA, anno, "throws");
+        throwsTA = anno;
+    }
+
+    private static void verifyTestAnn(Annotation verifyAgainst, Annotation anno, String expectedSite) {
+        verifyTestAnnSite(anno, expectedSite);
+
+        // When called before transform verifyAgainst will be null, when called
+        // after transform it will be the annotation from before the transform
+        if (verifyAgainst != null) {
+            assertTrue(anno.equals(verifyAgainst),
+                       "Annotations do not match before and after." +
+                       " Before: \"" + verifyAgainst + "\", After: \"" + anno + "\"");
+        }
+    }
+
+    private static void verifyTestAnnSite(Annotation testAnn, String expectedSite) {
+        String expectedAnn = "@TestAnn(site=" + expectedSite + ")";
+        assertTrue(testAnn.toString().equals(expectedAnn),
+                   "Expected \"" + expectedAnn + "\", got \"" + testAnn + "\"");
+    }
+
+    public static class TypeAnnotatedTestClass <@TestAnn(site="classTypeParameter") S,T>
+            extends @TestAnn(site="extends") Thread
+            implements @TestAnn(site="implements") Runnable {
+
+        public @TestAnn(site="field") boolean typeAnnotatedBoolean;
+
+        public
+            RedefineAnnotations.
+            @TestAnn(site="inner") TypeAnnotatedTestClass
+            typeAnnotatedInner;
+
+        public
+            @TestAnn(site="array4") boolean
+            @TestAnn(site="array1") []
+            @TestAnn(site="array2") []
+            @TestAnn(site="array3") []
+            typeAnnotatedArray;
+
+        public @TestAnn(site="map1") Map
+            <@TestAnn(site="map2") ? extends @TestAnn(site="map3") String,
+            @TestAnn(site="map4")  List<@TestAnn(site="map5")  Object>> typeAnnotatedMap;
+
+        public int dummy1;
+        public int dummy2;
+        public int dummy3;
+
+        @TestAnn(site="return") <@TestAnn(site="methodTypeParameter") U,V> Class
+            typeAnnotatedMethod(@TestAnn(site="formalParameter") TypeAnnotatedTestClass arg)
+            throws @TestAnn(site="throws") ClassNotFoundException {
+
+            @TestAnn(site="local_variable_type") int foo = 0;
+            throw new ClassNotFoundException();
+        }
+
+        public void run() {}
+    }
+}
--- a/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java	Wed Nov 05 12:54:08 2014 -0800
+++ b/test/testlibrary/whitebox/sun/hotspot/WhiteBox.java	Fri Nov 07 09:22:57 2014 -0800
@@ -30,6 +30,7 @@
 import java.util.function.Function;
 import java.util.stream.Stream;
 import java.security.BasicPermission;
+import java.net.URL;
 
 import sun.hotspot.parser.DiagnosticCommand;
 
@@ -84,6 +85,11 @@
   }
   private native boolean isClassAlive0(String name);
 
+  // Resource/Class Lookup Cache
+  public native boolean classKnownToNotExist(ClassLoader loader, String name);
+  public native URL[] getLookupCacheURLs(ClassLoader loader);
+  public native int[] getLookupCacheMatches(ClassLoader loader, String name);
+
   // G1
   public native boolean g1InConcurrentMark();
   public native boolean g1IsHumongous(Object o);