Skip to content
This repository was archived by the owner on Sep 24, 2025. It is now read-only.

Commit 8cdd5f8

Browse files
committed
update comments
1 parent 8d77d8f commit 8cdd5f8

File tree

2 files changed

+3
-10
lines changed

2 files changed

+3
-10
lines changed

rollout.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -10,10 +10,6 @@
1010
MAX_ITERATIONS=2
1111

1212
logging.basicConfig(
13-
# Example of format string
14-
# 2022-06-29 11:22:26,152: rank0[822018][MainThread]: INFO: composer.trainer.trainer: Using precision Precision.FP32
15-
# Including the PID and thread name to help with debugging dataloader workers and callbacks that spawn background
16-
# threads / processes
1713
format=
1814
f'[ROLLOUT]%(asctime)s: rank{dist.get_global_rank()}[%(process)d][%(threadName)s]: %(levelname)s: %(name)s: %(message)s',
1915
)
@@ -70,8 +66,9 @@
7066
# TODO: start generating rollouts for the experience buffer
7167

7268
# Send the experience buffer to the train agent.
69+
# We do not block here. We can continue generating rollouts while the experience buffer is being sent.
7370
experience_buffer = torch.tensor([20+i])
74-
experience_buffer_work = torch.distributed.broadcast(group=experience_buffer_group, src=1,tensor=experience_buffer, async_op=True) # don't block, send it off and continue generating rollouts
71+
experience_buffer_work = torch.distributed.broadcast(group=experience_buffer_group, src=1,tensor=experience_buffer, async_op=True)
7572
log.info(f"Sent experience buffer {experience_buffer}")
7673

7774
log.info(f"Completed iteration {i + 1}/{MAX_ITERATIONS}")

train.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,6 @@
1212

1313

1414
logging.basicConfig(
15-
# Example of format string
16-
# 2022-06-29 11:22:26,152: rank0[822018][MainThread]: INFO: composer.trainer.trainer: Using precision Precision.FP32
17-
# Including the PID and thread name to help with debugging dataloader workers and callbacks that spawn background
18-
# threads / processes
1915
format=
2016
f'[TRAIN]%(asctime)s: rank{dist.get_global_rank()}[%(process)d][%(threadName)s]: %(levelname)s: %(name)s: %(message)s',
2117
)
@@ -71,7 +67,7 @@
7167
# TODO: distributed the experiences results to each of the training ranks
7268
# TODO: train the model
7369

74-
# simulate "long training!""
70+
# simulate "long" training!
7571
import time
7672
time.sleep(20)
7773

0 commit comments

Comments
 (0)