Skip to content

Commit 819de07

Browse files
committed
8363998: Implement Compressed Class Pointers for 32-bit
Reviewed-by: rkennke, coleenp
1 parent f40381e commit 819de07

File tree

17 files changed

+151
-73
lines changed

17 files changed

+151
-73
lines changed

src/hotspot/cpu/arm/c1_FrameMap_arm.hpp

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright (c) 2008, 2019, Oracle and/or its affiliates. All rights reserved.
2+
* Copyright (c) 2008, 2025, Oracle and/or its affiliates. All rights reserved.
33
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
44
*
55
* This code is free software; you can redistribute it and/or modify it
@@ -95,8 +95,6 @@
9595
}
9696

9797
static int adjust_reg_range(int range) {
98-
// Reduce the number of available regs (to free Rheap_base) in case of compressed oops
99-
if (UseCompressedOops || UseCompressedClassPointers) return range - 1;
10098
return range;
10199
}
102100

src/hotspot/cpu/arm/c1_LIRAssembler_arm.cpp

Lines changed: 4 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -2229,16 +2229,9 @@ void LIR_Assembler::emit_arraycopy(LIR_OpArrayCopy* op) {
22292229
// We don't know the array types are compatible
22302230
if (basic_type != T_OBJECT) {
22312231
// Simple test for basic type arrays
2232-
if (UseCompressedClassPointers) {
2233-
// We don't need decode because we just need to compare
2234-
__ ldr_u32(tmp, Address(src, oopDesc::klass_offset_in_bytes()));
2235-
__ ldr_u32(tmp2, Address(dst, oopDesc::klass_offset_in_bytes()));
2236-
__ cmp_32(tmp, tmp2);
2237-
} else {
2238-
__ load_klass(tmp, src);
2239-
__ load_klass(tmp2, dst);
2240-
__ cmp(tmp, tmp2);
2241-
}
2232+
__ load_klass(tmp, src);
2233+
__ load_klass(tmp2, dst);
2234+
__ cmp(tmp, tmp2);
22422235
__ b(*stub->entry(), ne);
22432236
} else {
22442237
// For object arrays, if src is a sub class of dst then we can
@@ -2461,12 +2454,7 @@ void LIR_Assembler::emit_load_klass(LIR_OpLoadKlass* op) {
24612454
if (info != nullptr) {
24622455
add_debug_info_for_null_check_here(info);
24632456
}
2464-
2465-
if (UseCompressedClassPointers) { // On 32 bit arm??
2466-
__ ldr_u32(result, Address(obj, oopDesc::klass_offset_in_bytes()));
2467-
} else {
2468-
__ ldr(result, Address(obj, oopDesc::klass_offset_in_bytes()));
2469-
}
2457+
__ ldr(result, Address(obj, oopDesc::klass_offset_in_bytes()));
24702458
}
24712459

24722460
void LIR_Assembler::emit_profile_call(LIR_OpProfileCall* op) {

src/hotspot/share/cds/metaspaceShared.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -245,7 +245,7 @@ static bool shared_base_too_high(char* specified_base, char* aligned_base, size_
245245
static char* compute_shared_base(size_t cds_max) {
246246
char* specified_base = (char*)SharedBaseAddress;
247247
size_t alignment = MetaspaceShared::core_region_alignment();
248-
if (UseCompressedClassPointers) {
248+
if (UseCompressedClassPointers && CompressedKlassPointers::needs_class_space()) {
249249
alignment = MAX2(alignment, Metaspace::reserve_alignment());
250250
}
251251

src/hotspot/share/memory/metaspace.cpp

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -834,14 +834,20 @@ void Metaspace::global_initialize() {
834834

835835
}
836836

837-
#endif // _LP64
837+
#else
838+
// +UseCompressedClassPointers on 32-bit: does not need class space. Klass can live wherever.
839+
if (UseCompressedClassPointers) {
840+
const address start = (address)os::vm_min_address(); // but not in the zero page
841+
const address end = (address)CompressedKlassPointers::max_klass_range_size();
842+
CompressedKlassPointers::initialize(start, end - start);
843+
}
844+
#endif // __LP64
838845

839846
// Initialize non-class virtual space list, and its chunk manager:
840847
MetaspaceContext::initialize_nonclass_space_context();
841848

842849
_tracer = new MetaspaceTracer();
843850

844-
#ifdef _LP64
845851
if (UseCompressedClassPointers) {
846852
// Note: "cds" would be a better fit but keep this for backward compatibility.
847853
LogTarget(Info, gc, metaspace) lt;
@@ -852,8 +858,6 @@ void Metaspace::global_initialize() {
852858
CompressedKlassPointers::print_mode(&ls);
853859
}
854860
}
855-
#endif
856-
857861
}
858862

859863
void Metaspace::post_initialize() {

src/hotspot/share/memory/metaspace/virtualSpaceNode.cpp

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -260,6 +260,16 @@ VirtualSpaceNode* VirtualSpaceNode::create_node(size_t word_size,
260260
if (!rs.is_reserved()) {
261261
vm_exit_out_of_memory(word_size * BytesPerWord, OOM_MMAP_ERROR, "Failed to reserve memory for metaspace");
262262
}
263+
264+
#ifndef _LP64
265+
// On 32-bit, with +UseCompressedClassPointers, the whole address space is the encoding range. We therefore
266+
// don't need a class space. However, as a pragmatic workaround for pesty overflow problems on 32-bit, we leave
267+
// a small area at the end of the address space out of the encoding range. We just assume no Klass will ever live
268+
// there (it won't, for no OS we support on 32-bit has user-addressable memory up there).
269+
assert(!UseCompressedClassPointers ||
270+
rs.end() <= (char*)CompressedKlassPointers::max_klass_range_size(), "Weirdly high address");
271+
#endif // _LP64
272+
263273
MemTracker::record_virtual_memory_tag(rs, mtMetaspace);
264274
assert_is_aligned(rs.base(), chunklevel::MAX_CHUNK_BYTE_SIZE);
265275
InternalStats::inc_num_vsnodes_births();

src/hotspot/share/oops/compressedKlass.cpp

Lines changed: 37 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -44,24 +44,33 @@ narrowKlass CompressedKlassPointers::_lowest_valid_narrow_klass_id = (narrowKlas
4444
narrowKlass CompressedKlassPointers::_highest_valid_narrow_klass_id = (narrowKlass)-1;
4545
size_t CompressedKlassPointers::_protection_zone_size = 0;
4646

47-
#ifdef _LP64
48-
4947
size_t CompressedKlassPointers::max_klass_range_size() {
50-
// We disallow klass range sizes larger than 4GB even if the encoding
51-
// range would allow for a larger Klass range (e.g. Base=zero, shift=3 -> 32GB).
52-
// That is because many CPU-specific compiler decodings do not want the
53-
// shifted narrow Klass to spill over into the third quadrant of the 64-bit target
54-
// address, e.g. to use a 16-bit move for a simplified base addition.
55-
return MIN2(4 * G, max_encoding_range_size());
48+
#ifdef _LP64
49+
const size_t encoding_allows = nth_bit(narrow_klass_pointer_bits() + max_shift());
50+
constexpr size_t cap = 4 * G;
51+
return MIN2(encoding_allows, cap);
52+
#else
53+
// 32-bit: only 32-bit "narrow" Klass pointers allowed. If we ever support smaller narrow
54+
// Klass pointers here, coding needs to be revised.
55+
// We keep one page safety zone free to guard against size_t overflows on 32-bit. In practice
56+
// this is irrelevant because these upper address space parts are not user-addressable on
57+
// any of our 32-bit platforms.
58+
return align_down(UINT_MAX, os::vm_page_size());
59+
#endif
5660
}
5761

5862
void CompressedKlassPointers::pre_initialize() {
5963
if (UseCompactObjectHeaders) {
6064
_narrow_klass_pointer_bits = narrow_klass_pointer_bits_coh;
6165
_max_shift = max_shift_coh;
6266
} else {
67+
#ifdef _LP64
6368
_narrow_klass_pointer_bits = narrow_klass_pointer_bits_noncoh;
6469
_max_shift = max_shift_noncoh;
70+
#else
71+
_narrow_klass_pointer_bits = 32;
72+
_max_shift = 0;
73+
#endif
6574
}
6675
}
6776

@@ -84,6 +93,10 @@ void CompressedKlassPointers::sanity_check_after_initialization() {
8493
ASSERT_HERE(_base != (address)-1);
8594
ASSERT_HERE(_shift != -1);
8695

96+
// We should need a class space if address space is larger than what narrowKlass can address
97+
const bool should_need_class_space = (BytesPerWord * BitsPerByte) > narrow_klass_pointer_bits();
98+
ASSERT_HERE(should_need_class_space == needs_class_space());
99+
87100
const size_t klass_align = klass_alignment_in_bytes();
88101

89102
// must be aligned enough hold 64-bit data
@@ -96,7 +109,9 @@ void CompressedKlassPointers::sanity_check_after_initialization() {
96109

97110
// Check that Klass range is fully engulfed in the encoding range
98111
const address encoding_start = _base;
99-
const address encoding_end = (address)(p2u(_base) + (uintptr_t)nth_bit(narrow_klass_pointer_bits() + _shift));
112+
const address encoding_end = (address)
113+
LP64_ONLY(p2u(_base) + (uintptr_t)nth_bit(narrow_klass_pointer_bits() + _shift))
114+
NOT_LP64(max_klass_range_size());
100115
ASSERT_HERE_2(_klass_range_start >= _base && _klass_range_end <= encoding_end,
101116
"Resulting encoding range does not fully cover the class range");
102117

@@ -239,6 +254,7 @@ void CompressedKlassPointers::initialize(address addr, size_t len) {
239254

240255
} else {
241256

257+
#ifdef _LP64
242258
// Traditional (non-compact) header mode
243259
const uintptr_t unscaled_max = nth_bit(narrow_klass_pointer_bits());
244260
const uintptr_t zerobased_max = nth_bit(narrow_klass_pointer_bits() + max_shift());
@@ -250,6 +266,7 @@ void CompressedKlassPointers::initialize(address addr, size_t len) {
250266
address const end = addr + len;
251267
_base = (end <= (address)unscaled_max) ? nullptr : addr;
252268
#else
269+
253270
// We try, in order of preference:
254271
// -unscaled (base=0 shift=0)
255272
// -zero-based (base=0 shift>0)
@@ -270,11 +287,19 @@ void CompressedKlassPointers::initialize(address addr, size_t len) {
270287
}
271288
}
272289
#endif // AARCH64
290+
#else
291+
// 32-bit "compressed class pointer" mode
292+
_base = nullptr;
293+
_shift = 0;
294+
// as our "protection zone", we just assume the lowest protected parts of
295+
// the user address space.
296+
_protection_zone_size = os::vm_min_address();
297+
#endif // LP64
273298
}
274299

275300
calc_lowest_highest_narrow_klass_id();
276301

277-
// Initialize klass decode mode and check compability with decode instructions
302+
// Initialize JIT-specific decoding settings
278303
if (!set_klass_decode_mode()) {
279304

280305
// Give fatal error if this is a specified address
@@ -288,9 +313,8 @@ void CompressedKlassPointers::initialize(address addr, size_t len) {
288313
p2i(_base), _shift);
289314
}
290315
}
291-
#ifdef ASSERT
292-
sanity_check_after_initialization();
293-
#endif
316+
317+
DEBUG_ONLY(sanity_check_after_initialization();)
294318
}
295319

296320
void CompressedKlassPointers::print_mode(outputStream* st) {
@@ -341,4 +365,3 @@ bool CompressedKlassPointers::is_in_protection_zone(address addr) {
341365
(addr >= base() && addr < base() + _protection_zone_size) : false;
342366
}
343367

344-
#endif // _LP64

src/hotspot/share/oops/compressedKlass.hpp

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,7 @@ class CompressedKlassPointers : public AllStatic {
143143
static char* reserve_address_space_for_unscaled_encoding(size_t size, bool aslr);
144144
static char* reserve_address_space_for_zerobased_encoding(size_t size, bool aslr);
145145
static char* reserve_address_space_for_16bit_move(size_t size, bool aslr);
146+
146147
static void calc_lowest_highest_narrow_klass_id();
147148

148149
#ifdef ASSERT
@@ -187,12 +188,16 @@ class CompressedKlassPointers : public AllStatic {
187188
// The maximum possible shift; the actual shift employed later can be smaller (see initialize())
188189
static int max_shift() { check_init(_max_shift); return _max_shift; }
189190

190-
// Returns the maximum encoding range, given the current geometry (narrow klass bit size and shift)
191-
static size_t max_encoding_range_size() { return nth_bit(narrow_klass_pointer_bits() + max_shift()); }
192-
193-
// Returns the maximum allowed klass range size.
191+
// Returns the maximum allowed klass range size. It is calculated from the length of the encoding range
192+
// resulting from the current encoding settings (base, shift), capped to a certain max. value.
194193
static size_t max_klass_range_size();
195194

195+
// On 64-bit, we need the class space to confine Klass structures to the encoding range, which is determined
196+
// by bit size of narrowKlass IDs and the shift. On 32-bit, we support compressed class pointer only
197+
// "pro-forma": narrowKlass have the same size as addresses (32 bits), and therefore the encoding range is
198+
// equal to the address space size. Here, we don't need a class space.
199+
static constexpr bool needs_class_space() { return LP64_ONLY(true) NOT_LP64(false); }
200+
196201
// Reserve a range of memory that is to contain Klass strucutures which are referenced by narrow Klass IDs.
197202
// If optimize_for_zero_base is true, the implementation will attempt to reserve optimized for zero-based encoding.
198203
static char* reserve_address_space_for_compressed_classes(size_t size, bool aslr, bool optimize_for_zero_base);
@@ -201,6 +206,7 @@ class CompressedKlassPointers : public AllStatic {
201206
// set this encoding scheme. Used by CDS at runtime to re-instate the scheme used to pre-compute klass ids for
202207
// archived heap objects. In this case, we don't have the freedom to choose base and shift; they are handed to
203208
// us from CDS.
209+
// Note: CDS with +UCCP for 32-bit currently unsupported.
204210
static void initialize_for_given_encoding(address addr, size_t len, address requested_base, int requested_shift);
205211

206212
// Given an address range [addr, addr+len) which the encoding is supposed to

src/hotspot/share/oops/compressedKlass.inline.hpp

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -99,8 +99,12 @@ inline bool CompressedKlassPointers::is_valid_narrow_klass_id(narrowKlass nk) {
9999
}
100100

101101
inline address CompressedKlassPointers::encoding_range_end() {
102+
#ifdef _LP64
102103
const int max_bits = narrow_klass_pointer_bits() + _shift;
103104
return (address)((uintptr_t)_base + nth_bit(max_bits));
105+
#else
106+
return (address)SIZE_MAX;
107+
#endif
104108
}
105109

106110
#endif // SHARE_OOPS_COMPRESSEDKLASS_INLINE_HPP

src/hotspot/share/oops/objLayout.cpp

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ int ObjLayout::_oop_base_offset_in_bytes = 0;
3232
bool ObjLayout::_oop_has_klass_gap = false;
3333

3434
void ObjLayout::initialize() {
35+
#ifdef _LP64
3536
assert(_klass_mode == Undefined, "ObjLayout initialized twice");
3637
if (UseCompactObjectHeaders) {
3738
_klass_mode = Compact;
@@ -46,4 +47,13 @@ void ObjLayout::initialize() {
4647
_oop_base_offset_in_bytes = sizeof(markWord) + sizeof(Klass*);
4748
_oop_has_klass_gap = false;
4849
}
50+
#else
51+
assert(_klass_mode == Undefined, "ObjLayout initialized twice");
52+
assert(!UseCompactObjectHeaders, "COH unsupported on 32-bit");
53+
// We support +-UseCompressedClassPointers on 32-bit, but the layout
54+
// is exactly the same as it was with uncompressed klass pointers
55+
_klass_mode = UseCompressedClassPointers ? Compressed : Uncompressed;
56+
_oop_base_offset_in_bytes = sizeof(markWord) + sizeof(Klass*);
57+
_oop_has_klass_gap = false;
58+
#endif
4959
}

src/hotspot/share/oops/objLayout.inline.hpp

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -38,11 +38,7 @@ inline ObjLayout::Mode ObjLayout::klass_mode() {
3838
assert(_klass_mode == Uncompressed, "Klass mode does not match flags");
3939
}
4040
#endif
41-
#ifdef _LP64
4241
return _klass_mode;
43-
#else
44-
return Uncompressed;
45-
#endif
4642
}
4743

4844
#endif // SHARE_OOPS_OBJLAYOUT_INLINE_HPP

0 commit comments

Comments
 (0)