diff --git a/README.md b/README.md index 0f6f19eea..a8e948fb8 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,7 @@ This project consists of several microservices: - **Customers Service**: Manages customer data. - **Vets Service**: Handles information about veterinarians. - **Visits Service**: Manages pet visit records. +- **GenAI Service**: Provides a chatbot interface to the application. - **API Gateway**: Routes client requests to the appropriate services. - **Config Server**: Centralized configuration management for all services. - **Discovery Server**: Eureka-based service registry. @@ -102,16 +103,33 @@ Spring Petclinic integrates a Chatbot that allows you to interact with the appli 3. Is there an owner named Betty? 4. Which owners have dogs? 5. Add a dog for Betty. Its name is Moopsie. -6. Create a new owner +6. Create a new owner. + +![Screenshot of the chat dialog](docs/spring-ai.png) + +This `spring-petlinic-genai-service` microservice currently supports **OpenAI** (default) or **Azure's OpenAI** as the LLM provider. +In order to start the microservice, perform the following steps: + +1. Decide which provider you want to use. By default, the `spring-ai-openai-spring-boot-starter` dependency is enabled. + You can change it to `spring-ai-azure-openai-spring-boot-starter`in the `pom.xml`. +2. Create an OpenAI API key or a Azure OpenAI resource in your Azure Portal. + Refer to the [OpenAI's quickstart](https://platform.openai.com/docs/quickstart) or [Azure's documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/) for further information on how to obtain these. + You only need to populate the provider you're using - either openai, or azure-openai. + If you don't have your own OpenAI API key, don't worry! + You can temporarily use the `demo` key, which OpenAI provides free of charge for demonstration purposes. + This `demo` key has a quota, is limited to the `gpt-4o-mini` model, and is intended solely for demonstration use. + With your own OpenAI account, you can test the `gpt-4o` model by modifying the `deployment-name` property of the `application.yml` file. +3. Export your API keys and endpoint as environment variables: + * either OpenAI: + ```bash + export OPENAI_API_KEY="your_api_key_here" + ``` + * or Azure OpenAI: + ```bash + export AZURE_OPENAI_ENDPOINT="https://your_resource.openai.azure.com" + export AZURE_OPENAI_KEY="your_api_key_here" + ``` -![alt text](docs/spring-ai.png) - -This Microservice currently supports OpenAI or Azure's OpenAI as the LLM provider. -In order to enable Spring AI, perform the following steps: - -1. Decide which provider you want to use. By default, the `spring-ai-azure-openai-spring-boot-starter` dependency is enabled. You can change it to `spring-ai-openai-spring-boot-starter`in `pom.xml`. -2. Copy `src/main/resources/creds-template.yaml` into `src/main/resources/creds.yaml`, and edit its contents with your API key and API endpoint. Refer to OpenAI's or Azure's documentation for further information on how to obtain these. You only need to populate the provider you're using - either openai, or azure-openai. -3. Boot the `spring-petclinic-genai-service` microservice. ## In case you find a bug/suggested improvement for Spring Petclinic Microservices Our issue tracker is available here: https://github.com/spring-petclinic/spring-petclinic-microservices/issues diff --git a/spring-petclinic-genai-service/pom.xml b/spring-petclinic-genai-service/pom.xml index b1f8e1dd4..b3a1e4b06 100644 --- a/spring-petclinic-genai-service/pom.xml +++ b/spring-petclinic-genai-service/pom.xml @@ -17,14 +17,15 @@ 8081 ${basedir}/../docker - 1.0.0-M3 + 1.0.0-M4 org.springframework.ai - spring-ai-azure-openai-spring-boot-starter + spring-ai-openai-spring-boot-starter + org.springframework.boot @@ -168,7 +169,7 @@ - + buildDocker diff --git a/spring-petclinic-genai-service/src/main/java/org/springframework/samples/petclinic/genai/PetclinicChatClient.java b/spring-petclinic-genai-service/src/main/java/org/springframework/samples/petclinic/genai/PetclinicChatClient.java index 1c0666360..7aecf24de 100644 --- a/spring-petclinic-genai-service/src/main/java/org/springframework/samples/petclinic/genai/PetclinicChatClient.java +++ b/spring-petclinic-genai-service/src/main/java/org/springframework/samples/petclinic/genai/PetclinicChatClient.java @@ -37,7 +37,7 @@ public PetclinicChatClient(ChatClient.Builder builder, ChatMemory chatMemory) { you don't know the answer, then ask the user a followup question to try and clarify the question they are asking. If you do know the answer, provide the answer but do not provide any additional followup questions. When dealing with vets, if the user is unsure about the returned results, explain that there may be additional data that was not returned. - Only if the user is asking about the total number of all vets, answer that there are a lot and ask for some additional criteria. + Only if the user is asking about the total number of all vets, answer that there are a lot and ask for some additional criteria. For owners, pets or visits - provide the correct data. """) .defaultAdvisors( @@ -45,13 +45,14 @@ public PetclinicChatClient(ChatClient.Builder builder, ChatMemory chatMemory) { new MessageChatMemoryAdvisor(chatMemory, DEFAULT_CHAT_MEMORY_CONVERSATION_ID, 10), // CHAT MEMORY new SimpleLoggerAdvisor() ) + .defaultFunctions("listOwners", "addOwnerToPetclinic", "addPetToOwner", "listVets") .build(); } @PostMapping("/chatclient") public String exchange(@RequestBody String query) { try { - //All chatbot messages go through this endpoint + //All chatbot messages go through this endpoint //and are passed to the LLM return this.chatClient diff --git a/spring-petclinic-genai-service/src/main/resources/application.yml b/spring-petclinic-genai-service/src/main/resources/application.yml index d7d21ecab..5c42ad1ce 100644 --- a/spring-petclinic-genai-service/src/main/resources/application.yml +++ b/spring-petclinic-genai-service/src/main/resources/application.yml @@ -7,17 +7,27 @@ spring: active: production config: import: optional:configserver:${CONFIG_SERVER_URL:http://localhost:8888/},optional:classpath:/creds.yaml - #These apply when using spring-ai-azure-openai-spring-boot-starter ai: chat: client: enabled: true + # These apply when using spring-ai-azure-openai-spring-boot-starter azure: openai: + api-key: ${AZURE_OPENAI_KEY} + endpoint: ${AZURE_OPENAI_ENDPOINT} chat: options: - functions: listOwners,addOwnerToPetclinic,addPetToOwner,listVets temperature: 0.7 + deployment-name: gpt-4o + # These apply when using spring-ai-openai-spring-boot-starter + openai: + api-key: ${OPENAI_API_KEY:demo} + chat: + options: + temperature: 0.7 + model: gpt-4o-mini + logging: level: @@ -40,4 +50,4 @@ server: eureka: client: serviceUrl: - defaultZone: http://discovery-server:8761/eureka/ \ No newline at end of file + defaultZone: http://discovery-server:8761/eureka/ diff --git a/spring-petclinic-genai-service/src/main/resources/creds-template.yaml b/spring-petclinic-genai-service/src/main/resources/creds-template.yaml deleted file mode 100644 index efa0c15c7..000000000 --- a/spring-petclinic-genai-service/src/main/resources/creds-template.yaml +++ /dev/null @@ -1,17 +0,0 @@ -spring: - ai: - #These parameters only apply when using the spring-ai-azure-openai-spring-boot-starter dependency: - azure: - openai: - api-key: "" - endpoint: "" - chat: - options: - deployment-name: "gpt-4o" - #These parameters only apply when using the spring-ai-openai-spring-boot-starter dependency: - openai: - api-key: "" - endpoint: "" - chat: - options: - deployment-name: "gpt-4o" \ No newline at end of file