diff --git a/.github/workflows/samples_tutorials_flow_deploy_azure_app_service.yml b/.github/workflows/samples_tutorials_flow_deploy_azure_app_service.yml new file mode 100644 index 00000000000..5834f7a19a3 --- /dev/null +++ b/.github/workflows/samples_tutorials_flow_deploy_azure_app_service.yml @@ -0,0 +1,89 @@ +# This code is autogenerated. +# Code is generated by running custom script: python3 readme.py +# Any manual changes to this file may cause incorrect behavior. +# Any manual changes will be overwritten if the code is regenerated. + +name: samples_tutorials_flow_deploy_azure_app_service +on: + schedule: + - cron: "48 20 * * *" # Every day starting at 4:48 BJT + pull_request: + branches: [ main ] + paths: [ examples/**, .github/workflows/samples_tutorials_flow_deploy_azure_app_service.yml ] + workflow_dispatch: + +jobs: + samples_readme_ci: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Setup Python 3.9 environment + uses: actions/setup-python@v4 + with: + python-version: "3.9" + - name: Generate config.json + run: echo ${{ secrets.TEST_WORKSPACE_CONFIG_JSON }} > ${{ github.workspace }}/examples/config.json + - name: Prepare requirements + working-directory: examples + run: | + if [[ -e requirements.txt ]]; then + python -m pip install --upgrade pip + pip install -r requirements.txt + fi + - name: Prepare dev requirements + working-directory: examples + run: | + python -m pip install --upgrade pip + pip install -r dev_requirements.txt + - name: Refine .env file + working-directory: examples/tutorials/flow-deploy/azure-app-service + run: | + AOAI_API_KEY=${{ secrets.AOAI_API_KEY_TEST }} + AOAI_API_ENDPOINT=${{ secrets.AOAI_API_ENDPOINT_TEST }} + AOAI_API_ENDPOINT=$(echo ${AOAI_API_ENDPOINT//\//\\/}) + if [[ -e .env.example ]]; then + echo "env replacement" + sed -i -e "s//$AOAI_API_KEY/g" -e "s//$AOAI_API_ENDPOINT/g" .env.example + mv .env.example .env + fi + - name: Create run.yml + working-directory: examples/tutorials/flow-deploy/azure-app-service + run: | + gpt_base=${{ secrets.AOAI_API_ENDPOINT_TEST }} + gpt_base=$(echo ${gpt_base//\//\\/}) + if [[ -e run.yml ]]; then + sed -i -e "s/\${azure_open_ai_connection.api_key}/${{ secrets.AOAI_API_KEY_TEST }}/g" -e "s/\${azure_open_ai_connection.api_base}/$gpt_base/g" run.yml + fi + - name: Azure Login + uses: azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + - name: Extract Steps examples/tutorials/flow-deploy/azure-app-service/README.md + working-directory: ${{ github.workspace }} + run: | + python scripts/readme/extract_steps_from_readme.py -f examples/tutorials/flow-deploy/azure-app-service/README.md -o examples/tutorials/flow-deploy/azure-app-service + - name: Cat script + working-directory: examples/tutorials/flow-deploy/azure-app-service + run: | + cat bash_script.sh + - name: Run scripts + working-directory: examples/tutorials/flow-deploy/azure-app-service + run: | + export aoai_api_key=${{secrets.AOAI_API_KEY_TEST }} + export aoai_api_endpoint=${{ secrets.AOAI_API_ENDPOINT_TEST }} + export test_workspace_sub_id=${{ secrets.TEST_WORKSPACE_SUB_ID }} + export test_workspace_rg=${{ secrets.TEST_WORKSPACE_RG }} + export test_workspace_name=${{ secrets.TEST_WORKSPACE_NAME }} + bash bash_script.sh + - name: Pip List for Debug + if : ${{ always() }} + working-directory: examples/tutorials/flow-deploy/azure-app-service + run: | + pip list + - name: Upload artifact + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: artifact + path: examples/tutorials/flow-deploy/azure-app-service/bash_script.sh \ No newline at end of file diff --git a/.github/workflows/samples_tutorials_flow_deploy_docker.yml b/.github/workflows/samples_tutorials_flow_deploy_docker.yml new file mode 100644 index 00000000000..66da874aee6 --- /dev/null +++ b/.github/workflows/samples_tutorials_flow_deploy_docker.yml @@ -0,0 +1,89 @@ +# This code is autogenerated. +# Code is generated by running custom script: python3 readme.py +# Any manual changes to this file may cause incorrect behavior. +# Any manual changes will be overwritten if the code is regenerated. + +name: samples_tutorials_flow_deploy_docker +on: + schedule: + - cron: "53 20 * * *" # Every day starting at 4:53 BJT + pull_request: + branches: [ main ] + paths: [ examples/**, .github/workflows/samples_tutorials_flow_deploy_docker.yml ] + workflow_dispatch: + +jobs: + samples_readme_ci: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Setup Python 3.9 environment + uses: actions/setup-python@v4 + with: + python-version: "3.9" + - name: Generate config.json + run: echo ${{ secrets.TEST_WORKSPACE_CONFIG_JSON }} > ${{ github.workspace }}/examples/config.json + - name: Prepare requirements + working-directory: examples + run: | + if [[ -e requirements.txt ]]; then + python -m pip install --upgrade pip + pip install -r requirements.txt + fi + - name: Prepare dev requirements + working-directory: examples + run: | + python -m pip install --upgrade pip + pip install -r dev_requirements.txt + - name: Refine .env file + working-directory: examples/tutorials/flow-deploy/docker + run: | + AOAI_API_KEY=${{ secrets.AOAI_API_KEY_TEST }} + AOAI_API_ENDPOINT=${{ secrets.AOAI_API_ENDPOINT_TEST }} + AOAI_API_ENDPOINT=$(echo ${AOAI_API_ENDPOINT//\//\\/}) + if [[ -e .env.example ]]; then + echo "env replacement" + sed -i -e "s//$AOAI_API_KEY/g" -e "s//$AOAI_API_ENDPOINT/g" .env.example + mv .env.example .env + fi + - name: Create run.yml + working-directory: examples/tutorials/flow-deploy/docker + run: | + gpt_base=${{ secrets.AOAI_API_ENDPOINT_TEST }} + gpt_base=$(echo ${gpt_base//\//\\/}) + if [[ -e run.yml ]]; then + sed -i -e "s/\${azure_open_ai_connection.api_key}/${{ secrets.AOAI_API_KEY_TEST }}/g" -e "s/\${azure_open_ai_connection.api_base}/$gpt_base/g" run.yml + fi + - name: Azure Login + uses: azure/login@v1 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + - name: Extract Steps examples/tutorials/flow-deploy/docker/README.md + working-directory: ${{ github.workspace }} + run: | + python scripts/readme/extract_steps_from_readme.py -f examples/tutorials/flow-deploy/docker/README.md -o examples/tutorials/flow-deploy/docker + - name: Cat script + working-directory: examples/tutorials/flow-deploy/docker + run: | + cat bash_script.sh + - name: Run scripts + working-directory: examples/tutorials/flow-deploy/docker + run: | + export aoai_api_key=${{secrets.AOAI_API_KEY_TEST }} + export aoai_api_endpoint=${{ secrets.AOAI_API_ENDPOINT_TEST }} + export test_workspace_sub_id=${{ secrets.TEST_WORKSPACE_SUB_ID }} + export test_workspace_rg=${{ secrets.TEST_WORKSPACE_RG }} + export test_workspace_name=${{ secrets.TEST_WORKSPACE_NAME }} + bash bash_script.sh + - name: Pip List for Debug + if : ${{ always() }} + working-directory: examples/tutorials/flow-deploy/docker + run: | + pip list + - name: Upload artifact + if: ${{ always() }} + uses: actions/upload-artifact@v3 + with: + name: artifact + path: examples/tutorials/flow-deploy/docker/bash_script.sh \ No newline at end of file diff --git a/examples/README.md b/examples/README.md index afa8b79064a..8d0cd66ea63 100644 --- a/examples/README.md +++ b/examples/README.md @@ -27,6 +27,8 @@ | path | status | description | ------|--------|------------- | [chat-with-pdf](tutorials/e2e-development/chat-with-pdf.md) | [![samples_tutorials_e2e_development_chat_with_pdf](https://github.com/microsoft/promptflow/actions/workflows/samples_tutorials_e2e_development_chat_with_pdf.yml/badge.svg?branch=main)](https://github.com/microsoft/promptflow/actions/workflows/samples_tutorials_e2e_development_chat_with_pdf.yml) | Retrieval Augmented Generation (or RAG) has become a prevalent pattern to build intelligent application with Large Language Models (or LLMs) since it can infuse external knowledge into the model, which is not trained with those up-to-date or proprietary information | +| [azure-app-service](tutorials/flow-deploy/azure-app-service/README.md) | [![samples_tutorials_flow_deploy_azure_app_service](https://github.com/microsoft/promptflow/actions/workflows/samples_tutorials_flow_deploy_azure_app_service.yml/badge.svg?branch=main)](https://github.com/microsoft/promptflow/actions/workflows/samples_tutorials_flow_deploy_azure_app_service.yml) | This example demos how to deploy a flow using Azure App Service | +| [docker](tutorials/flow-deploy/docker/README.md) | [![samples_tutorials_flow_deploy_docker](https://github.com/microsoft/promptflow/actions/workflows/samples_tutorials_flow_deploy_docker.yml/badge.svg?branch=main)](https://github.com/microsoft/promptflow/actions/workflows/samples_tutorials_flow_deploy_docker.yml) | This example demos how to deploy flow as a docker app | ### Flows ([flows](flows)) @@ -75,11 +77,6 @@ | [connections](connections/README.md) | [![samples_connections](https://github.com/microsoft/promptflow/actions/workflows/samples_connections.yml/badge.svg?branch=main)](https://github.com/microsoft/promptflow/actions/workflows/samples_connections.yml) | This folder contains example `YAML` files for creating `connection` using `pf` cli | -### Flow Deploy ([tutorials/flow-deploy/](tutorials/flow-deploy/)) - -| path | status | description | -------|--------|------------- -| [deploy.md](tutorials/flow-deploy/deploy.md) | | deploy flow as endpoint ## SDK examples diff --git a/examples/configuration.ipynb b/examples/configuration.ipynb index 8f3e40a3e1a..c993738e4aa 100644 --- a/examples/configuration.ipynb +++ b/examples/configuration.ipynb @@ -12,17 +12,14 @@ "---\n", "\n", "**Requirements** - In order to benefit from this tutorial, you will need:\n", - "- A basic understanding of Machine Learning\n", "- An Azure account with an active subscription. [Create an account for free](https://azure.microsoft.com/free/?WT.mc_id=A261C142F)\n", "- An Azure ML workspace\n", "- A python environment\n", - "- Install dependent packages for samples via `pip install -r requirements-azure.txt`\n", - "- Installed Azure Machine Learning Python SDK v2 - [install instructions](../README.md) - check the getting started section\n", + "- Install dependent packages for samples via `pip install -r requirements.txt`\n", "\n", "**Learning Objectives** - By the end of this tutorial, you should be able to:\n", "- Connect to your AML workspace from the Python SDK using different auth credentials\n", "- Create workspace config file\n", - "- Create Compute clusters which required by jobs notebooks. [Check this notebook to create a compute cluster](../resources/compute/compute.ipynb)\n", "\n", "**Motivations** - This notebook covers the scenario that user define components using yaml then use these components to build pipeline." ] diff --git a/examples/connections/azure_openai.yml b/examples/connections/azure_openai.yml index 89e930a5e86..acfccefff4b 100644 --- a/examples/connections/azure_openai.yml +++ b/examples/connections/azure_openai.yml @@ -1,6 +1,6 @@ $schema: https://azuremlschemas.azureedge.net/promptflow/latest/AzureOpenAIConnection.schema.json name: open_ai_connection type: azure_open_ai -api_key: "" +api_key: "" api_base: "aoai-api-endpoint" api_type: "azure" diff --git a/examples/tutorials/e2e-development/chat-with-pdf.md b/examples/tutorials/e2e-development/chat-with-pdf.md index 4e0fde5c209..f7e846fbc70 100644 --- a/examples/tutorials/e2e-development/chat-with-pdf.md +++ b/examples/tutorials/e2e-development/chat-with-pdf.md @@ -309,7 +309,7 @@ the other choices, please refer to [flow deploy docs](https://microsoft.github.i Use the command below to build a flow as docker format app: -```shell +```bash pf flow build --source . --output build --format docker ``` diff --git a/examples/tutorials/flow-deploy/azure-app-service/README.md b/examples/tutorials/flow-deploy/azure-app-service/README.md index dff26305ab2..7f757a025c6 100644 --- a/examples/tutorials/flow-deploy/azure-app-service/README.md +++ b/examples/tutorials/flow-deploy/azure-app-service/README.md @@ -1,19 +1,26 @@ # Deploy flow using Azure App Service +This example demos how to deploy a flow using Azure App Service. + [Azure App Service](https://learn.microsoft.com/azure/app-service/) is an HTTP-based service for hosting web applications, REST APIs, and mobile back ends. The scripts (`deploy.sh` for bash and `deploy.ps1` for powershell) under this folder are here to help deploy the docker image to Azure App Service. -This example demos how to deploy [web-classification](../../flows/standard/web-classification/README.md) deploy a flow using Azure App Service. +We will use [web-classification](../../flows/standard/web-classification/README.md) as example in this tutorial. ## Build a flow as docker format app +Note that all dependent connections must be created before building as docker. +```bash +# create connection if not created before +pf connection create --file ../../../connections/azure_openai.yml --set api_key= api_base= --name open_ai_connection +``` + Use the command below to build a flow as docker format app: ```bash -pf flow build --source ../../flows/standard/web-classification --output build --format docker +pf flow build --source ../../../flows/standard/web-classification --output build --format docker ``` -Note that all dependent connections must be created before building as docker. ## Deploy with Azure App Service The two scripts will do the following things: diff --git a/examples/tutorials/flow-deploy/docker/README.md b/examples/tutorials/flow-deploy/docker/README.md index fbd2a567b6a..069518e28c3 100644 --- a/examples/tutorials/flow-deploy/docker/README.md +++ b/examples/tutorials/flow-deploy/docker/README.md @@ -1,17 +1,22 @@ # Deploy a flow using Docker -This example demos how to deploy [web-classification](../../flows/standard/web-classification/README.md) as a docker app. +This example demos how to deploy flow as a docker app. +We will use [web-classification](../../flows/standard/web-classification/README.md) as example in this tutorial. ## Build a flow as docker format app +Note that all dependent connections must be created before building as docker. +```bash +# create connection if not created before +pf connection create --file ../../../connections/azure_openai.yml --set api_key= api_base= --name open_ai_connection +``` + Use the command below to build a flow as docker format app: ```bash -pf flow build --source ../../flows/standard/web-classification --output build --format docker +pf flow build --source ../../../flows/standard/web-classification --output build --format docker ``` -Note that all dependent connections must be created before building as docker. - ## Deploy with Docker ### Build Docker image @@ -19,7 +24,7 @@ Like other Dockerfile, you need to build the image first. You can tag the image Run the command below to build image: -```bash +```shell docker build build -t web-classification-serve ``` @@ -43,7 +48,7 @@ You'll need to set up the environment variables in the container to make the con You can run the docker image directly set via below commands: -```bash +```shell # The started service will listen on port 8080.You can map the port to any port on the host machine as you want. docker run -p 8080:8080 -e OPEN_AI_CONNECTION_API_KEY= web-classification-serve ``` @@ -51,6 +56,6 @@ docker run -p 8080:8080 -e OPEN_AI_CONNECTION_API_KEY= web-classif ### Test the endpoint After start the service, you can use curl to test it: -```bash +```shell curl http://localhost:8080/score --data '{"url":"https://play.google.com/store/apps/details?id=com.twitter.android"}' -X POST -H "Content-Type: application/json" ``` \ No newline at end of file diff --git a/examples/tutorials/run-management/cloud-run-management.ipynb b/examples/tutorials/run-management/cloud-run-management.ipynb index 36fe6394598..3b0d88057fe 100644 --- a/examples/tutorials/run-management/cloud-run-management.ipynb +++ b/examples/tutorials/run-management/cloud-run-management.ipynb @@ -218,7 +218,7 @@ "base_run = pf.runs.create_or_update(\n", " run=run,\n", " runtime=runtime,\n", - ")\n" + ")" ] }, { @@ -300,7 +300,7 @@ " connections={\n", " \"classify_with_llm\": {\"connection\": \"azure_open_ai_connection\"},\n", " \"summarize_text_content\": {\"connection\": \"azure_open_ai_connection\"},\n", - " }\n", + " },\n", ")\n", "\n", "base_run = pf.runs.create_or_update(\n", diff --git a/scripts/readme/ghactions_driver/readme_templates/README.md.jinja2 b/scripts/readme/ghactions_driver/readme_templates/README.md.jinja2 index b28f91a6c2b..f4d85b54524 100644 --- a/scripts/readme/ghactions_driver/readme_templates/README.md.jinja2 +++ b/scripts/readme/ghactions_driver/readme_templates/README.md.jinja2 @@ -58,11 +58,6 @@ {% for connection in connections.readmes %}| [{{ connection.name }}]({{ connection.path }}) | [![{{connection.pipeline_name}}](https://github.com/microsoft/promptflow/actions/workflows/{{connection.yaml_name}}/badge.svg?branch={{branch}})](https://github.com/microsoft/promptflow/actions/workflows/{{connection.yaml_name}}) | {{ connection.description }} | {% endfor %} -### Flow Deploy ([tutorials/flow-deploy/](tutorials/flow-deploy/)) - -| path | status | description | -------|--------|------------- -| [deploy.md](tutorials/flow-deploy/deploy.md) | | deploy flow as endpoint ## SDK examples diff --git a/scripts/readme/readme.py b/scripts/readme/readme.py index 1586ed44ae6..1d7075ae318 100644 --- a/scripts/readme/readme.py +++ b/scripts/readme/readme.py @@ -276,6 +276,7 @@ def write_readme(workflow_telemetrys, readme_telemetrys): "examples/flows/**/README.md", "examples/connections/**/README.md", "examples/tutorials/**/chat*.md", + "examples/tutorials/**/README.md", ] readme_telemetrys = [] readme_generator.main(input_glob_readme, readme_telemetrys) diff --git a/scripts/readme/readme_generator.py b/scripts/readme/readme_generator.py index 9170761906a..9e04132bc45 100644 --- a/scripts/readme/readme_generator.py +++ b/scripts/readme/readme_generator.py @@ -23,6 +23,9 @@ def no_readme_generation_filter(item: Path, index, array) -> bool: If there is no steps in the readme, then no generation """ try: + if 'build' in str(item): # skip build folder + return False + full_text = readme_parser(item.relative_to(ReadmeStepsManage.git_base_dir())) if full_text == "": return False diff --git a/src/promptflow/CHANGELOG.md b/src/promptflow/CHANGELOG.md index 89f9bc8f1d0..39887837839 100644 --- a/src/promptflow/CHANGELOG.md +++ b/src/promptflow/CHANGELOG.md @@ -1,15 +1,15 @@ # Release History -## v0.1.0b4 (upcoming) +## 0.1.0b4 (2023.09.04) ### Features added -- **pf flow validate**: validate a flow definition file +- Support `pf flow build` commands -## v0.1.0b3 (2023.08.30) +## 0.1.0b3 (2023.08.30) - Minor bug fixes. -## v0.1.0b2 (2022.08.29) +## 0.1.0b2 (2023.08.29) - First preview version with major CLI & SDK features. @@ -21,6 +21,6 @@ - **pfazure run**: create/list/stream/show/show-details/show-metrics/visualize -## v0.1.0b1 (2023.07.20) +## 0.1.0b1 (2023.07.20) - Stub version in Pypi.