Skip to content

Commit 2c1f149

Browse files
authored
quick-start: use anon for GCP (#751)
It mitigates #740
1 parent b1ce093 commit 2c1f149

File tree

1 file changed

+6
-6
lines changed

1 file changed

+6
-6
lines changed

docs/quick-start.md

+6-6
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@ using JSON metadata:
3939
``` py
4040
from datachain import Column, DataChain
4141

42-
meta = DataChain.from_json("gs://datachain-demo/dogs-and-cats/*json", object_name="meta")
43-
images = DataChain.from_storage("gs://datachain-demo/dogs-and-cats/*jpg")
42+
meta = DataChain.from_json("gs://datachain-demo/dogs-and-cats/*json", object_name="meta", anon=True)
43+
images = DataChain.from_storage("gs://datachain-demo/dogs-and-cats/*jpg", anon=True)
4444

4545
images_id = images.map(id=lambda file: file.path.split('.')[-2])
4646
annotated = images_id.merge(meta, on="id", right_on="meta.id")
@@ -78,7 +78,7 @@ def is_positive_dialogue_ending(file) -> bool:
7878

7979
chain = (
8080
DataChain.from_storage("gs://datachain-demo/chatbot-KiT/",
81-
object_name="file", type="text")
81+
object_name="file", type="text", anon=True)
8282
.settings(parallel=8, cache=True)
8383
.map(is_positive=is_positive_dialogue_ending)
8484
.save("file_response")
@@ -132,7 +132,7 @@ def eval_dialogue(file: File) -> bool:
132132
return result.lower().startswith("success")
133133

134134
chain = (
135-
DataChain.from_storage("gs://datachain-demo/chatbot-KiT/", object_name="file")
135+
DataChain.from_storage("gs://datachain-demo/chatbot-KiT/", object_name="file", anon=True)
136136
.map(is_success=eval_dialogue)
137137
.save("mistral_files")
138138
)
@@ -177,7 +177,7 @@ def eval_dialog(file: File) -> ChatCompletionResponse:
177177
{"role": "user", "content": file.read()}])
178178

179179
chain = (
180-
DataChain.from_storage("gs://datachain-demo/chatbot-KiT/", object_name="file")
180+
DataChain.from_storage("gs://datachain-demo/chatbot-KiT/", object_name="file", anon=True)
181181
.settings(parallel=4, cache=True)
182182
.map(response=eval_dialog)
183183
.map(status=lambda response: response.choices[0].message.content.lower()[:7])
@@ -273,7 +273,7 @@ from datachain import C, DataChain
273273
processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
274274

275275
chain = (
276-
DataChain.from_storage("gs://datachain-demo/dogs-and-cats/", type="image")
276+
DataChain.from_storage("gs://datachain-demo/dogs-and-cats/", type="image", anon=True)
277277
.map(label=lambda name: name.split(".")[0], params=["file.name"])
278278
.select("file", "label").to_pytorch(
279279
transform=processor.image_processor,

0 commit comments

Comments
 (0)