diff --git a/caikit_huggingface_demo/app.py b/caikit_huggingface_demo/app.py index dbda740..9cc37d4 100755 --- a/caikit_huggingface_demo/app.py +++ b/caikit_huggingface_demo/app.py @@ -53,7 +53,7 @@ def _get_module_models(model_manager=None) -> dict: """ if model_manager: model_modules = { - k: v.module().metadata["module_id"] + k: v.model().MODULE_ID for (k, v) in model_manager.loaded_models.items() } else: @@ -125,7 +125,7 @@ def start_frontend(backend, inference_service): module_models = _get_module_models(model_manager) # Channel and stub is for client port = ( - get_config().runtime.port if not backend else backend.port + get_config().runtime.grpc.port if not backend else backend.port ) # Using the actual port when we have a backend target = f"localhost:{port}" channel = grpc.insecure_channel(target) @@ -146,9 +146,7 @@ def main() -> int: if backend: print("▶️ Starting the backend Caikit inference server...") - with RuntimeGRPCServer( - inference_service=inference_service, training_service=None - ) as backend: + with RuntimeGRPCServer() as backend: if frontend: start_frontend(backend, inference_service) # and wait for termination else: diff --git a/caikit_huggingface_demo/client/app.py b/caikit_huggingface_demo/client/app.py index c1804b5..344939a 100644 --- a/caikit_huggingface_demo/client/app.py +++ b/caikit_huggingface_demo/client/app.py @@ -67,8 +67,10 @@ def get_frontend( reflection_db = ProtoReflectionDescriptorDatabase(channel) desc_pool = DescriptorPool(reflection_db) services = [ - x for x in reflection_db.get_services() if x.startswith("caikit.runtime.") - ] + x for x in reflection_db.get_services() if x.startswith("caikit.runtime.") and not( + x.endswith("InfoService") or x.endswith("TrainingService") + ) and not ( + x.startswith("caikit.runtime.training") or x.startswith("caikit.runtime.training"))] if len(services) != 1: print(f"Error: Expected 1 caikit.runtime service, but found {len(services)}.") service_name = services[0] diff --git a/caikit_huggingface_demo/runtime/config/config.yml b/caikit_huggingface_demo/runtime/config/config.yml index 231410b..ad04d87 100644 --- a/caikit_huggingface_demo/runtime/config/config.yml +++ b/caikit_huggingface_demo/runtime/config/config.yml @@ -1,8 +1,5 @@ runtime: - # The runtime library (or libraries) whose models we want to serve using Caikit Runtime. This should - # be a snake case string, e.g., caikit_nlp or caikit_cv. library: runtime local_models_dir: models - # Service exposure options - port: 8085 - find_available_port: True + # grpc: + # port: 8085 diff --git a/requirements.txt b/requirements.txt index ca86aae..8aec6d3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -caikit[all]==0.10.1 +caikit[all]==0.25.6 requests==2.31.0 Pillow==9.5.0 click==8.1.3