3535#include " gc/g1/g1Trace.hpp"
3636#include " gc/g1/g1YoungGCAllocationFailureInjector.inline.hpp"
3737#include " gc/shared/continuationGCSupport.inline.hpp"
38+ #include " gc/shared/partialArraySplitter.inline.hpp"
3839#include " gc/shared/partialArrayState.hpp"
39- #include " gc/shared/partialArrayTaskStepper.inline .hpp"
40+ #include " gc/shared/partialArrayTaskStats .hpp"
4041#include " gc/shared/stringdedup/stringDedup.hpp"
4142#include " gc/shared/taskqueue.inline.hpp"
4243#include " memory/allocation.inline.hpp"
@@ -80,8 +81,7 @@ G1ParScanThreadState::G1ParScanThreadState(G1CollectedHeap* g1h,
8081 _surviving_young_words(nullptr ),
8182 _surviving_words_length(collection_set->young_region_length () + 1),
8283 _old_gen_is_full(false ),
83- _partial_array_state_allocator(g1h->partial_array_state_manager ()),
84- _partial_array_stepper(num_workers, ParGCArrayScanChunk),
84+ _partial_array_splitter(g1h->partial_array_state_manager (), num_workers),
8585 _string_dedup_requests(),
8686 _max_num_optional_regions(collection_set->optional_region_length ()),
8787 _numa(g1h->numa ()),
@@ -169,9 +169,12 @@ void G1ParScanThreadState::verify_task(oop* task) const {
169169}
170170
171171void G1ParScanThreadState::verify_task (PartialArrayState* task) const {
172- // Must be in the collection set--it's already been copied.
173- oop p = task->source ();
174- assert (_g1h->is_in_cset (p), " p=" PTR_FORMAT, p2i (p));
172+ assert (task != nullptr , " invariant" );
173+ // Source isn't used for processing, so not recorded in task.
174+ assert (task->source () == nullptr , " invariant" );
175+ oop p = task->destination ();
176+ assert (_g1h->is_in_reserved (p),
177+ " task=" PTR_FORMAT " dest=" PTR_FORMAT, p2i (task), p2i (p));
175178}
176179
177180void G1ParScanThreadState::verify_task (ScannerTask task) const {
@@ -222,38 +225,17 @@ void G1ParScanThreadState::do_oop_evac(T* p) {
222225}
223226
224227MAYBE_INLINE_EVACUATION
225- void G1ParScanThreadState::do_partial_array (PartialArrayState* state) {
226- oop to_obj = state->destination ();
227-
228- #ifdef ASSERT
229- oop from_obj = state->source ();
230- assert (_g1h->is_in_reserved (from_obj), " must be in heap." );
231- assert (from_obj->is_forwarded (), " must be forwarded" );
232- assert (from_obj != to_obj, " should not be chunking self-forwarded objects" );
233- assert (to_obj->is_objArray (), " must be obj array" );
234- #endif // ASSERT
235-
236- objArrayOop to_array = objArrayOop (to_obj);
237-
238- // Claim a chunk and get number of additional tasks to enqueue.
239- PartialArrayTaskStepper::Step step = _partial_array_stepper.next (state);
240- // Push any additional partial scan tasks needed. Pushed before processing
241- // the claimed chunk to allow other workers to steal while we're processing.
242- if (step._ncreate > 0 ) {
243- state->add_references (step._ncreate );
244- for (uint i = 0 ; i < step._ncreate ; ++i) {
245- push_on_queue (ScannerTask (state));
246- }
247- }
248-
228+ void G1ParScanThreadState::do_partial_array (PartialArrayState* state, bool stolen) {
229+ // Access state before release by claim().
230+ objArrayOop to_array = objArrayOop (state->destination ());
231+ PartialArraySplitter::Claim claim =
232+ _partial_array_splitter.claim (state, _task_queue, stolen);
249233 G1HeapRegionAttr dest_attr = _g1h->region_attr (to_array);
250234 G1SkipCardEnqueueSetter x (&_scanner, dest_attr.is_new_survivor ());
251235 // Process claimed task.
252236 to_array->oop_iterate_range (&_scanner,
253- checked_cast<int >(step._index ),
254- checked_cast<int >(step._index + _partial_array_stepper.chunk_size ()));
255- // Release reference to the state, now that we're done with it.
256- _partial_array_state_allocator.release (state);
237+ checked_cast<int >(claim._start ),
238+ checked_cast<int >(claim._end ));
257239}
258240
259241MAYBE_INLINE_EVACUATION
@@ -265,27 +247,10 @@ void G1ParScanThreadState::start_partial_objarray(G1HeapRegionAttr dest_attr,
265247 assert (to_obj->is_objArray (), " precondition" );
266248
267249 objArrayOop to_array = objArrayOop (to_obj);
268-
269250 size_t array_length = to_array->length ();
270- PartialArrayTaskStepper::Step step = _partial_array_stepper.start (array_length);
271-
272- // Push any needed partial scan tasks. Pushed before processing the
273- // initial chunk to allow other workers to steal while we're processing.
274- if (step._ncreate > 0 ) {
275- assert (step._index < array_length, " invariant" );
276- assert (((array_length - step._index ) % _partial_array_stepper.chunk_size ()) == 0 ,
277- " invariant" );
278- PartialArrayState* state =
279- _partial_array_state_allocator.allocate (from_obj, to_obj,
280- step._index ,
281- array_length,
282- step._ncreate );
283- for (uint i = 0 ; i < step._ncreate ; ++i) {
284- push_on_queue (ScannerTask (state));
285- }
286- } else {
287- assert (step._index == array_length, " invariant" );
288- }
251+ size_t initial_chunk_size =
252+ // The source array is unused when processing states.
253+ _partial_array_splitter.start (_task_queue, nullptr , to_array, array_length);
289254
290255 // Skip the card enqueue iff the object (to_array) is in survivor region.
291256 // However, G1HeapRegion::is_survivor() is too expensive here.
@@ -296,18 +261,18 @@ void G1ParScanThreadState::start_partial_objarray(G1HeapRegionAttr dest_attr,
296261 // Process the initial chunk. No need to process the type in the
297262 // klass, as it will already be handled by processing the built-in
298263 // module.
299- to_array->oop_iterate_range (&_scanner, 0 , checked_cast<int >(step. _index ));
264+ to_array->oop_iterate_range (&_scanner, 0 , checked_cast<int >(initial_chunk_size ));
300265}
301266
302267MAYBE_INLINE_EVACUATION
303- void G1ParScanThreadState::dispatch_task (ScannerTask task) {
268+ void G1ParScanThreadState::dispatch_task (ScannerTask task, bool stolen ) {
304269 verify_task (task);
305270 if (task.is_narrow_oop_ptr ()) {
306271 do_oop_evac (task.to_narrow_oop_ptr ());
307272 } else if (task.is_oop_ptr ()) {
308273 do_oop_evac (task.to_oop_ptr ());
309274 } else {
310- do_partial_array (task.to_partial_array_state ());
275+ do_partial_array (task.to_partial_array_state (), stolen );
311276 }
312277}
313278
@@ -320,11 +285,11 @@ void G1ParScanThreadState::trim_queue_to_threshold(uint threshold) {
320285 do {
321286 while (_task_queue->pop_overflow (task)) {
322287 if (!_task_queue->try_push_to_taskqueue (task)) {
323- dispatch_task (task);
288+ dispatch_task (task, false );
324289 }
325290 }
326291 while (_task_queue->pop_local (task, threshold)) {
327- dispatch_task (task);
292+ dispatch_task (task, false );
328293 }
329294 } while (!_task_queue->overflow_empty ());
330295}
@@ -333,7 +298,7 @@ ATTRIBUTE_FLATTEN
333298void G1ParScanThreadState::steal_and_trim_queue (G1ScannerTasksQueueSet* task_queues) {
334299 ScannerTask stolen_task;
335300 while (task_queues->steal (_worker_id, stolen_task)) {
336- dispatch_task (stolen_task);
301+ dispatch_task (stolen_task, true );
337302 // Processing stolen task may have added tasks to our queue.
338303 trim_queue ();
339304 }
@@ -717,6 +682,14 @@ void G1ParScanThreadState::update_numa_stats(uint node_index) {
717682 }
718683}
719684
685+ #if TASKQUEUE_STATS
686+
687+ PartialArrayTaskStats* G1ParScanThreadState::partial_array_task_stats () {
688+ return _partial_array_splitter.stats ();
689+ }
690+
691+ #endif // TASKQUEUE_STATS
692+
720693G1ParScanThreadStateSet::G1ParScanThreadStateSet (G1CollectedHeap* g1h,
721694 uint num_workers,
722695 G1CollectionSet* collection_set,
@@ -744,3 +717,15 @@ G1ParScanThreadStateSet::~G1ParScanThreadStateSet() {
744717 FREE_C_HEAP_ARRAY (size_t , _surviving_young_words_total);
745718 FREE_C_HEAP_ARRAY (BufferNodeList, _rdc_buffers);
746719}
720+
721+ #if TASKQUEUE_STATS
722+
723+ void G1ParScanThreadStateSet::print_partial_array_task_stats () {
724+ auto get_stats = [&](uint i) {
725+ return state_for_worker (i)->partial_array_task_stats ();
726+ };
727+ PartialArrayTaskStats::log_set (_num_workers, get_stats,
728+ " Partial Array Task Stats" );
729+ }
730+
731+ #endif // TASKQUEUE_STATS
0 commit comments