Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Error related to Json schema #3

Open
arnaudstiegler opened this issue Mar 7, 2025 · 5 comments
Open

Error related to Json schema #3

arnaudstiegler opened this issue Mar 7, 2025 · 5 comments

Comments

@arnaudstiegler
Copy link

arnaudstiegler commented Mar 7, 2025

Tried your MCP with the langchain mcp adapters with bedrock, seems to be hitting some issues with the Json schema. Can provide some code to reproduce if need be

During handling of the above exception, another exception occurred:

  + Exception Group Traceback (most recent call last):
  |   File "/var/tmp/pants-sandbox-8mnatw/./.cache/pex_root/venvs/0/f9ca2de32d9a1d54dc847fe3e4bc2ad0247ac5bf/af1edc4b2527cae6660df81238db6fc4fb583aa3/pex", line 334, in <module>
  |     boot(
  |   File "/var/tmp/pants-sandbox-8mnatw/./.cache/pex_root/venvs/0/f9ca2de32d9a1d54dc847fe3e4bc2ad0247ac5bf/af1edc4b2527cae6660df81238db6fc4fb583aa3/pex", line 317, in boot
  |     runpy.run_module(module_name, run_name="__main__", alter_sys=True)
  |   File "<frozen runpy>", line 226, in run_module
  |   File "<frozen runpy>", line 98, in _run_module_code
  |   File "<frozen runpy>", line 88, in _run_code
  |   File "/var/tmp/pants-sandbox-8mnatw/./olympus/mcp/client.py", line 176, in <module>
  |     asyncio.run(main())
  |   File "/home/arnaud/.pyenv/versions/3.12.7/lib/python3.12/asyncio/runners.py", line 194, in run
  |     return runner.run(main)
  |            ^^^^^^^^^^^^^^^^
  |   File "/home/arnaud/.pyenv/versions/3.12.7/lib/python3.12/asyncio/runners.py", line 118, in run
  |     return self._loop.run_until_complete(task)
  |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  |   File "/home/arnaud/.pyenv/versions/3.12.7/lib/python3.12/asyncio/base_events.py", line 687, in run_until_complete
  |     return future.result()
  |            ^^^^^^^^^^^^^^^
  |   File "/var/tmp/pants-sandbox-8mnatw/./olympus/mcp/client.py", line 151, in main
  |     async with stdio_client(server_params) as (read, write):
  |                ^^^^^^^^^^^^^^^^^^^^^^^^^^^
  |   File "/home/arnaud/.pyenv/versions/3.12.7/lib/python3.12/contextlib.py", line 231, in __aexit__
  |     await self.gen.athrow(value)
  |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/mcp/client/stdio.py", line 148, in stdio_client
  |     anyio.create_task_group() as tg,
  |     ^^^^^^^^^^^^^^^^^^^^^^^^^
  |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py", line 767, in __aexit__
  |     raise BaseExceptionGroup(
  | ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
  +-+---------------- 1 ----------------
    | Traceback (most recent call last):
    |   File "/var/tmp/pants-sandbox-8mnatw/./olympus/mcp/client.py", line 169, in main
    |     agent_response = await agent.ainvoke({"messages": INSTANCE_PROMPT})
    |                      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/pregel/__init__.py", line 2420, in ainvoke
    |     async for chunk in self.astream(
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/pregel/__init__.py", line 2305, in astream
    |     async for _ in runner.atick(
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/pregel/runner.py", line 444, in atick
    |     await arun_with_retry(
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/pregel/retry.py", line 128, in arun_with_retry
    |     return await task.proc.ainvoke(task.input, config)
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/utils/runnable.py", line 583, in ainvoke
    |     input = await step.ainvoke(input, config, **kwargs)
    |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/utils/runnable.py", line 359, in ainvoke
    |     ret = await asyncio.create_task(coro, context=context)
    |           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/prebuilt/chat_agent_executor.py", line 681, in acall_model
    |     response = cast(AIMessage, await model_runnable.ainvoke(state, config))
    |                                ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/runnables/base.py", line 3071, in ainvoke
    |     input = await asyncio.create_task(part(), context=context)  # type: ignore
    |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/runnables/base.py", line 5377, in ainvoke
    |     return await self.bound.ainvoke(
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 306, in ainvoke
    |     llm_result = await self.agenerate_prompt(
    |                  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 871, in agenerate_prompt
    |     return await self.agenerate(
    |            ^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 831, in agenerate
    |     raise exceptions[0]
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 999, in _agenerate_with_cache
    |     result = await self._agenerate(
    |              ^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 1039, in _agenerate
    |     return await run_in_executor(
    |            ^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/runnables/config.py", line 588, in run_in_executor
    |     return await asyncio.get_running_loop().run_in_executor(
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.pyenv/versions/3.12.7/lib/python3.12/concurrent/futures/thread.py", line 58, in run
    |     result = self.fn(*self.args, **self.kwargs)
    |              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/runnables/config.py", line 579, in wrapper
    |     return func(*args, **kwargs)
    |            ^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_aws/chat_models/bedrock.py", line 685, in _generate
    |     completion, tool_calls, llm_output = self._prepare_input_and_invoke(
    |                                          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_aws/llms/bedrock.py", line 964, in _prepare_input_and_invoke
    |     raise e
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_aws/llms/bedrock.py", line 949, in _prepare_input_and_invoke
    |     response = self.client.invoke_model(**request_options)
    |                ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/botocore/client.py", line 570, in _api_call
    |     return self._make_api_call(operation_name, kwargs)
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/botocore/context.py", line 124, in wrapper
    |     return func(*args, **kwargs)
    |            ^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/botocore/client.py", line 1031, in _make_api_call
    |     raise error_class(parsed_response, operation_name)
    | botocore.errorfactory.ValidationException: An error occurred (ValidationException) when calling the InvokeModel operation: tools.1.custom.input_schema: JSON schema is invalid. It must match JSON Schema draft 2020-12 (https://json-schema.org/draft/2020-12). Learn more about tool use at https://docs.anthropic.com/en/docs/tool-use.
    | During task with name 'agent' and id 'f6d6fcc6-07ab-e381-e38f-b231929bc7d8'
    | 
    | During handling of the above exception, another exception occurred:
    | 
    | Exception Group Traceback (most recent call last):
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/mcp/client/stdio.py", line 153, in stdio_client
    |     yield read_stream, write_stream
    |   File "/var/tmp/pants-sandbox-8mnatw/./olympus/mcp/client.py", line 152, in main
    |     async with ClientSession(read, write) as session:
    |                ^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/mcp/shared/session.py", line 197, in __aexit__
    |     return await self._task_group.__aexit__(exc_type, exc_val, exc_tb)
    |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py", line 767, in __aexit__
    |     raise BaseExceptionGroup(
    | ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
    +-+---------------- 1 ----------------
      | Traceback (most recent call last):
      |   File "/var/tmp/pants-sandbox-8mnatw/./olympus/mcp/client.py", line 169, in main
      |     agent_response = await agent.ainvoke({"messages": INSTANCE_PROMPT})
      |                      ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/pregel/__init__.py", line 2420, in ainvoke
      |     async for chunk in self.astream(
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/pregel/__init__.py", line 2305, in astream
      |     async for _ in runner.atick(
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/pregel/runner.py", line 444, in atick
      |     await arun_with_retry(
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/pregel/retry.py", line 128, in arun_with_retry
      |     return await task.proc.ainvoke(task.input, config)
      |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/utils/runnable.py", line 583, in ainvoke
      |     input = await step.ainvoke(input, config, **kwargs)
      |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/utils/runnable.py", line 359, in ainvoke
      |     ret = await asyncio.create_task(coro, context=context)
      |           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langgraph/prebuilt/chat_agent_executor.py", line 681, in acall_model
      |     response = cast(AIMessage, await model_runnable.ainvoke(state, config))
      |                                ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/runnables/base.py", line 3071, in ainvoke
      |     input = await asyncio.create_task(part(), context=context)  # type: ignore
      |             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/runnables/base.py", line 5377, in ainvoke
      |     return await self.bound.ainvoke(
      |            ^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 306, in ainvoke
      |     llm_result = await self.agenerate_prompt(
      |                  ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 871, in agenerate_prompt
      |     return await self.agenerate(
      |            ^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 831, in agenerate
      |     raise exceptions[0]
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 999, in _agenerate_with_cache
      |     result = await self._agenerate(
      |              ^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/language_models/chat_models.py", line 1039, in _agenerate
      |     return await run_in_executor(
      |            ^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/runnables/config.py", line 588, in run_in_executor
      |     return await asyncio.get_running_loop().run_in_executor(
      |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.pyenv/versions/3.12.7/lib/python3.12/concurrent/futures/thread.py", line 58, in run
      |     result = self.fn(*self.args, **self.kwargs)
      |              ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_core/runnables/config.py", line 579, in wrapper
      |     return func(*args, **kwargs)
      |            ^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_aws/chat_models/bedrock.py", line 685, in _generate
      |     completion, tool_calls, llm_output = self._prepare_input_and_invoke(
      |                                          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_aws/llms/bedrock.py", line 964, in _prepare_input_and_invoke
      |     raise e
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/langchain_aws/llms/bedrock.py", line 949, in _prepare_input_and_invoke
      |     response = self.client.invoke_model(**request_options)
      |                ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/botocore/client.py", line 570, in _api_call
      |     return self._make_api_call(operation_name, kwargs)
      |            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/botocore/context.py", line 124, in wrapper
      |     return func(*args, **kwargs)
      |            ^^^^^^^^^^^^^^^^^^^^^
      |   File "/home/arnaud/.cache/pants/named_caches/pex_root/venvs/0/s/df58132c/venv/lib/python3.12/site-packages/botocore/client.py", line 1031, in _make_api_call
      |     raise error_class(parsed_response, operation_name)
      | botocore.errorfactory.ValidationException: An error occurred (ValidationException) when calling the InvokeModel operation: tools.1.custom.input_schema: JSON schema is invalid. It must match JSON Schema draft 2020-12 (https://json-schema.org/draft/2020-12). Learn more about tool use at https://docs.anthropic.com/en/docs/tool-use.
      | During task with name 'agent' and id 'f6d6fcc6-07ab-e381-e38f-b231929bc7d8'
      +------------------------------------
@arnaudstiegler
Copy link
Author

arnaudstiegler commented Mar 7, 2025

Probably something related to using the langchain-mcp-adapters. For now, when I look into the tools, it indeed looks like the args_schema for each tool is not correctly dynamically generated.

For instance, for the first tool:

name='lsp_info' 
description='Returns information about the the LSP tools available. This is useful for debugging which programming languages are supported.' 
args_schema={'type': 'object'} 
response_format='content_and_artifact' 
coroutine=<function convert_mcp_tool_to_langchain_tool.<locals>.call_tool at 0x7fecb46eb880>

@jonrad
Copy link
Owner

jonrad commented Mar 9, 2025

Thanks for the details!
These tools are all a bit finicky when it comes to things like whitespace and formatting, so I'm not surprised.

Can you provide the details of how you're running lsp-mcp? that is, using docker or the command line? What args are you passing?
In addition, do you have a simple way to repro? I don't have a langchain project handy, although I'm sure I can dust one off somewhere if you can't share it.

@jonrad
Copy link
Owner

jonrad commented Mar 9, 2025

Also, please try with the latest version. I just made some updates to get this working with Cursor that may or may not have helped.

@arnaudstiegler
Copy link
Author

I tested the new version ("docker.io/jonrad/lsp-mcp:0.3.0") and I'm seeing a node-related issue:

node:events:496
      throw er; // Unhandled 'error' event
      ^

Error: spawn uvx ENOENT
    at ChildProcess._handle.onexit (node:internal/child_process:285:19)
    at onErrorNT (node:internal/child_process:483:16)
    at process.processTicksAndRejections (node:internal/process/task_queues:82:21)
Emitted 'error' event on ChildProcess instance at:
    at ChildProcess._handle.onexit (node:internal/child_process:291:12)
    at onErrorNT (node:internal/child_process:483:16)
    at process.processTicksAndRejections (node:internal/process/task_queues:82:21) {
  errno: -2,
  code: 'ENOENT',
  syscall: 'spawn uvx',
  path: 'uvx',
  spawnargs: [ '--from', 'python-lsp-server', 'pylsp' ]
}

Node.js v20.18.3

Here is the code to reproduce:

import asyncio
import logging
import tempfile

import boto3
from langchain_aws import ChatBedrock
from langchain_mcp_adapters.tools import load_mcp_tools
from langgraph.prebuilt import create_react_agent
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client

logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)



async def main():
    bedrock_client = boto3.client(  # type: ignore
    service_name='bedrock-runtime',
    region_name='us-west-2',
    aws_access_key_id=os.get_env('BEDROCK_MACHINA_ACCESS_KEY'),
    aws_secret_access_key=os.get_env('BEDROCK_MACHINA_SECRET_KEY'),
    )
    model = ChatBedrock(
    model='us.anthropic.claude-3-7-sonnet-20250219-v1:0',
    region_name='us-east-1',
    client=bedrock_client,
    streaming=False,
    )

    with tempfile.TemporaryDirectory() as tempdir:
        clone_process = await asyncio.create_subprocess_shell(
          f'git clone [email protected]:scikit-learn/scikit-learn.git',
          stdout=asyncio.subprocess.PIPE,
          stderr=asyncio.subprocess.PIPE,
          cwd=tempdir,
        )
        await clone_process.communicate()
        

        print('Starting MCP server')
        server_params = StdioServerParameters(
        command="docker",
        args=["run", "-i", "--rm", "-v", f"{tempdir}:/workspace", "docker.io/jonrad/lsp-mcp:0.3.0"],
        )

        async with stdio_client(server_params) as (read, write):
            async with ClientSession(read, write) as session:
                await session.initialize()

                tools = await load_mcp_tools(session)

                agent = create_react_agent(
                model,
                tools,
                prompt=PROMPT
                )

                agent_response = await agent.ainvoke({'messages': 'Just explore the codebase and tell me what you see'})
                print(agent_response['messages'][-1].content)
                return agent_response['messages'][-1].content


if __name__ == '__main__':
  asyncio.run(main())

@jonrad
Copy link
Owner

jonrad commented Mar 31, 2025

This should be resolved in docker.io/jonrad/lsp-mcp:0.3.1

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants