Skip to content

Commit 705e0cf

Browse files
committed
Removed S3 instructions in favour of S3 documentation
1 parent 80ef70e commit 705e0cf

File tree

2 files changed

+8
-28
lines changed

2 files changed

+8
-28
lines changed

demo-notebooks/guided-demos/3_pytorch_lightning_demo.ipynb

+7
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,13 @@
9191
"cluster.details()"
9292
]
9393
},
94+
{
95+
"cell_type": "markdown",
96+
"metadata": {},
97+
"source": [
98+
"Note: For this example external S3 compatible storage is required. Please refer to our [documentation](https://github.com/project-codeflare/codeflare-sdk/blob/main/docs/s3-compatible-storage.md) for steps on how to configure this training script."
99+
]
100+
},
94101
{
95102
"cell_type": "code",
96103
"execution_count": null,

demo-notebooks/guided-demos/pytorch_lightning.py

+1-28
Original file line numberDiff line numberDiff line change
@@ -14,32 +14,7 @@
1414
# Based on https://docs.ray.io/en/latest/train/getting-started-pytorch-lightning.html
1515

1616
"""
17-
# For S3 persistent storage replace the following environment variables with your AWS credentials then uncomment the S3 run_config
18-
# See here for information on how to set up an S3 bucket https://docs.aws.amazon.com/AmazonS3/latest/userguide/creating-bucket.html
19-
20-
os.environ["AWS_ACCESS_KEY_ID"] = "XXXXXXXX"
21-
os.environ["AWS_SECRET_ACCESS_KEY"] = "XXXXXXXX"
22-
os.environ["AWS_DEFAULT_REGION"] = "XXXXXXXX"
23-
"""
24-
25-
"""
26-
# For Minio persistent storage uncomment the following code and fill in the name, password and API URL then uncomment the minio run_config.
27-
# See here for information on how to set up a minio bucket https://ai-on-openshift.io/tools-and-applications/minio/minio/
28-
29-
def get_minio_run_config():
30-
import s3fs
31-
import pyarrow.fs
32-
33-
s3_fs = s3fs.S3FileSystem(
34-
key = os.getenv('MINIO_ACCESS_KEY', "XXXXX"),
35-
secret = os.getenv('MINIO_SECRET_ACCESS_KEY', "XXXXX"),
36-
endpoint_url = os.getenv('MINIO_URL', "XXXXX")
37-
)
38-
39-
custom_fs = pyarrow.fs.PyFileSystem(pyarrow.fs.FSSpecHandler(s3_fs))
40-
41-
run_config = ray.train.RunConfig(storage_path='training', storage_filesystem=custom_fs)
42-
return run_config
17+
Note: This example requires an S3 compatible storage bucket for distributed training. Please visit our documentation for more information -> https://github.com/project-codeflare/codeflare-sdk/blob/main/docs/s3-compatible-storage.md
4318
"""
4419

4520

@@ -110,8 +85,6 @@ def train_func():
11085
trainer = TorchTrainer(
11186
train_func,
11287
scaling_config=scaling_config,
113-
# run_config = ray.train.RunConfig(storage_path="s3://BUCKET_NAME/SUB_PATH/", name="unique_run_name") # Uncomment and update the S3 URI for S3 persistent storage.
114-
# run_config=get_minio_run_config(), # Uncomment for minio persistent storage.
11588
)
11689
result: ray.train.Result = trainer.fit()
11790

0 commit comments

Comments
 (0)