70
70
( arrayOopDesc::header_size(T_DOUBLE) * HeapWordSize \
71
71
+ ((julong)max_jint * sizeof (double )) )
72
72
73
-
74
73
#define UNSAFE_ENTRY (result_type, header ) \
75
74
JVM_ENTRY (static result_type, header)
76
75
77
76
#define UNSAFE_LEAF (result_type, header ) \
78
77
JVM_LEAF (static result_type, header)
79
78
79
+ // Note that scoped accesses (cf. scopedMemoryAccess.cpp) can install
80
+ // an async handshake on the entry to an Unsafe method. When that happens,
81
+ // it is expected that we are not allowed to touch the underlying memory
82
+ // that might have gotten unmapped. Therefore, we check at the entry
83
+ // to unsafe functions, if we have such async exception conditions,
84
+ // and return immediately if that is the case.
85
+ //
86
+ // We also use NoSafepointVerifier to block potential safepoints.
87
+ // It would be problematic if an async exception handshake were installed later on
88
+ // during another safepoint in the function, but before the memory access happens,
89
+ // as the memory will be freed after the handshake is installed. We must notice
90
+ // the installed handshake and return early before doing the memory access to prevent
91
+ // accesses to freed memory.
92
+ //
93
+ // Note also that we MUST do a scoped memory access in the VM (or Java) thread
94
+ // state. Since we rely on a handshake to check for threads that are accessing
95
+ // scoped memory, and we need the handshaking thread to wait until we get to a
96
+ // safepoint, in order to make sure we are not in the middle of accessing memory
97
+ // that is about to be freed. (i.e. there can be no UNSAFE_LEAF_SCOPED)
98
+ #define UNSAFE_ENTRY_SCOPED (result_type, header ) \
99
+ JVM_ENTRY (static result_type, header) \
100
+ if (thread->has_async_exception_condition ()) {return (result_type)0 ;} \
101
+ NoSafepointVerifier nsv;
102
+
80
103
#define UNSAFE_END JVM_END
81
104
82
105
@@ -279,11 +302,11 @@ UNSAFE_ENTRY(jobject, Unsafe_GetUncompressedObject(JNIEnv *env, jobject unsafe,
279
302
280
303
#define DEFINE_GETSETOOP (java_type, Type ) \
281
304
\
282
- UNSAFE_ENTRY (java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
305
+ UNSAFE_ENTRY_SCOPED (java_type, Unsafe_Get##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
283
306
return MemoryAccess<java_type>(thread, obj, offset).get (); \
284
307
} UNSAFE_END \
285
308
\
286
- UNSAFE_ENTRY (void , Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
309
+ UNSAFE_ENTRY_SCOPED (void , Unsafe_Put##Type(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
287
310
MemoryAccess<java_type>(thread, obj, offset).put (x); \
288
311
} UNSAFE_END \
289
312
\
@@ -302,11 +325,11 @@ DEFINE_GETSETOOP(jdouble, Double);
302
325
303
326
#define DEFINE_GETSETOOP_VOLATILE (java_type, Type ) \
304
327
\
305
- UNSAFE_ENTRY (java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
328
+ UNSAFE_ENTRY_SCOPED (java_type, Unsafe_Get##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset)) { \
306
329
return MemoryAccess<java_type>(thread, obj, offset).get_volatile (); \
307
330
} UNSAFE_END \
308
331
\
309
- UNSAFE_ENTRY (void , Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
332
+ UNSAFE_ENTRY_SCOPED (void , Unsafe_Put##Type##Volatile(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, java_type x)) { \
310
333
MemoryAccess<java_type>(thread, obj, offset).put_volatile (x); \
311
334
} UNSAFE_END \
312
335
\
@@ -362,7 +385,7 @@ UNSAFE_LEAF(void, Unsafe_FreeMemory0(JNIEnv *env, jobject unsafe, jlong addr)) {
362
385
os::free (p);
363
386
} UNSAFE_END
364
387
365
- UNSAFE_ENTRY (void , Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
388
+ UNSAFE_ENTRY_SCOPED (void , Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong size, jbyte value)) {
366
389
size_t sz = (size_t )size;
367
390
368
391
oop base = JNIHandles::resolve (obj);
@@ -371,7 +394,7 @@ UNSAFE_ENTRY(void, Unsafe_SetMemory0(JNIEnv *env, jobject unsafe, jobject obj, j
371
394
Copy::fill_to_memory_atomic (p, sz, value);
372
395
} UNSAFE_END
373
396
374
- UNSAFE_ENTRY (void , Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
397
+ UNSAFE_ENTRY_SCOPED (void , Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size)) {
375
398
size_t sz = (size_t )size;
376
399
377
400
oop srcp = JNIHandles::resolve (srcObj);
@@ -390,39 +413,19 @@ UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcOb
390
413
}
391
414
} UNSAFE_END
392
415
393
- // This function is a leaf since if the source and destination are both in native memory
394
- // the copy may potentially be very large, and we don't want to disable GC if we can avoid it.
395
- // If either source or destination (or both) are on the heap, the function will enter VM using
396
- // JVM_ENTRY_FROM_LEAF
397
- UNSAFE_LEAF (void , Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
416
+ UNSAFE_ENTRY_SCOPED (void , Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject srcObj, jlong srcOffset, jobject dstObj, jlong dstOffset, jlong size, jlong elemSize)) {
398
417
size_t sz = (size_t )size;
399
418
size_t esz = (size_t )elemSize;
400
419
401
- if (srcObj == nullptr && dstObj == nullptr ) {
402
- // Both src & dst are in native memory
403
- address src = (address)srcOffset;
404
- address dst = (address)dstOffset;
405
-
406
- {
407
- JavaThread* thread = JavaThread::thread_from_jni_environment (env);
408
- GuardUnsafeAccess guard (thread);
409
- Copy::conjoint_swap (src, dst, sz, esz);
410
- }
411
- } else {
412
- // At least one of src/dst are on heap, transition to VM to access raw pointers
413
-
414
- JVM_ENTRY_FROM_LEAF (env, void , Unsafe_CopySwapMemory0) {
415
- oop srcp = JNIHandles::resolve (srcObj);
416
- oop dstp = JNIHandles::resolve (dstObj);
420
+ oop srcp = JNIHandles::resolve (srcObj);
421
+ oop dstp = JNIHandles::resolve (dstObj);
417
422
418
- address src = (address)index_oop_from_field_offset_long (srcp, srcOffset);
419
- address dst = (address)index_oop_from_field_offset_long (dstp, dstOffset);
423
+ address src = (address)index_oop_from_field_offset_long (srcp, srcOffset);
424
+ address dst = (address)index_oop_from_field_offset_long (dstp, dstOffset);
420
425
421
- {
422
- GuardUnsafeAccess guard (thread);
423
- Copy::conjoint_swap (src, dst, sz, esz);
424
- }
425
- } JVM_END
426
+ {
427
+ GuardUnsafeAccess guard (thread);
428
+ Copy::conjoint_swap (src, dst, sz, esz);
426
429
}
427
430
} UNSAFE_END
428
431
@@ -718,13 +721,13 @@ UNSAFE_ENTRY(jobject, Unsafe_CompareAndExchangeReference(JNIEnv *env, jobject un
718
721
return JNIHandles::make_local (THREAD, res);
719
722
} UNSAFE_END
720
723
721
- UNSAFE_ENTRY (jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
724
+ UNSAFE_ENTRY_SCOPED (jint, Unsafe_CompareAndExchangeInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
722
725
oop p = JNIHandles::resolve (obj);
723
726
volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long (p, offset);
724
727
return Atomic::cmpxchg (addr, e, x);
725
728
} UNSAFE_END
726
729
727
- UNSAFE_ENTRY (jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
730
+ UNSAFE_ENTRY_SCOPED (jlong, Unsafe_CompareAndExchangeLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
728
731
oop p = JNIHandles::resolve (obj);
729
732
volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long (p, offset);
730
733
return Atomic::cmpxchg (addr, e, x);
@@ -739,13 +742,13 @@ UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetReference(JNIEnv *env, jobject unsafe
739
742
return ret == e;
740
743
} UNSAFE_END
741
744
742
- UNSAFE_ENTRY (jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
745
+ UNSAFE_ENTRY_SCOPED (jboolean, Unsafe_CompareAndSetInt(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jint e, jint x)) {
743
746
oop p = JNIHandles::resolve (obj);
744
747
volatile jint* addr = (volatile jint*)index_oop_from_field_offset_long (p, offset);
745
748
return Atomic::cmpxchg (addr, e, x) == e;
746
749
} UNSAFE_END
747
750
748
- UNSAFE_ENTRY (jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
751
+ UNSAFE_ENTRY_SCOPED (jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, jobject obj, jlong offset, jlong e, jlong x)) {
749
752
oop p = JNIHandles::resolve (obj);
750
753
volatile jlong* addr = (volatile jlong*)index_oop_from_field_offset_long (p, offset);
751
754
return Atomic::cmpxchg (addr, e, x) == e;
0 commit comments