diff --git a/.github/labeler.yml b/.github/labeler.yml
index a853e8ab95..82af8c96ee 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -64,10 +64,17 @@ integration:docling:
- any-glob-to-any-file: "integrations/docling/**/*"
- any-glob-to-any-file: ".github/workflows/docling.yml"
+<<<<<<< add-e2b-integration
+integration:e2b:
+ - changed-files:
+ - any-glob-to-any-file: "integrations/e2b/**/*"
+ - any-glob-to-any-file: ".github/workflows/e2b.yml"
+=======
integration:dspy:
- changed-files:
- any-glob-to-any-file: "integrations/dspy/**/*"
- any-glob-to-any-file: ".github/workflows/dspy.yml"
+>>>>>>> main
integration:elasticsearch:
- changed-files:
diff --git a/.github/workflows/CI_coverage_comment.yml b/.github/workflows/CI_coverage_comment.yml
index 763d02316b..553523b24c 100644
--- a/.github/workflows/CI_coverage_comment.yml
+++ b/.github/workflows/CI_coverage_comment.yml
@@ -16,6 +16,7 @@ on:
- "Test / cometapi"
- "Test / deepeval"
- "Test / dspy"
+ - "Test / e2b"
- "Test / elasticsearch"
- "Test / faiss"
- "Test / fastembed"
diff --git a/.github/workflows/e2b.yml b/.github/workflows/e2b.yml
new file mode 100644
index 0000000000..7430bb4fd8
--- /dev/null
+++ b/.github/workflows/e2b.yml
@@ -0,0 +1,141 @@
+# This workflow comes from https://github.com/ofek/hatch-mypyc
+# https://github.com/ofek/hatch-mypyc/blob/5a198c0ba8660494d02716cfc9d79ce4adfb1442/.github/workflows/test.yml
+name: Test / e2b
+
+on:
+ schedule:
+ - cron: "0 0 * * *"
+ pull_request:
+ paths:
+ - "integrations/e2b/**"
+ - "!integrations/e2b/*.md"
+ - ".github/workflows/e2b.yml"
+ push:
+ branches:
+ - main
+ paths:
+ - "integrations/e2b/**"
+ - "!integrations/e2b/*.md"
+ - ".github/workflows/e2b.yml"
+
+defaults:
+ run:
+ working-directory: integrations/e2b
+
+concurrency:
+ group: e2b-${{ github.head_ref || github.sha }}
+ cancel-in-progress: true
+
+env:
+ PYTHONUNBUFFERED: "1"
+ FORCE_COLOR: "1"
+ E2B_API_KEY: ${{ secrets.E2B_API_KEY }}
+ TEST_MATRIX_OS: '["ubuntu-latest", "windows-latest", "macos-latest"]'
+ TEST_MATRIX_PYTHON: '["3.10", "3.14"]'
+
+jobs:
+ compute-test-matrix:
+ runs-on: ubuntu-slim
+ defaults:
+ run:
+ working-directory: .
+ outputs:
+ os: ${{ steps.set.outputs.os }}
+ python-version: ${{ steps.set.outputs.python-version }}
+ steps:
+ - id: set
+ run: |
+ echo 'os=${{ github.event_name == 'push' && '["ubuntu-latest"]' || env.TEST_MATRIX_OS }}' >> "$GITHUB_OUTPUT"
+ echo 'python-version=${{ github.event_name == 'push' && '["3.10"]' || env.TEST_MATRIX_PYTHON }}' >> "$GITHUB_OUTPUT"
+
+ run:
+ name: Python ${{ matrix.python-version }} on ${{ startsWith(matrix.os, 'macos-') && 'macOS' || startsWith(matrix.os, 'windows-') && 'Windows' || 'Linux' }}
+ needs: compute-test-matrix
+ permissions:
+ contents: write
+ pull-requests: write
+ runs-on: ${{ matrix.os }}
+ strategy:
+ fail-fast: false
+ matrix:
+ os: ${{ fromJSON(needs.compute-test-matrix.outputs.os) }}
+ python-version: ${{ fromJSON(needs.compute-test-matrix.outputs.python-version) }}
+
+ steps:
+ - name: Support longpaths
+ if: matrix.os == 'windows-latest'
+ working-directory: .
+ run: git config --system core.longpaths true
+
+ - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+
+ - name: Set up Python ${{ matrix.python-version }}
+ uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
+ with:
+ python-version: ${{ matrix.python-version }}
+
+ - name: Install Hatch
+ run: pip install --upgrade hatch
+ - name: Lint
+ if: matrix.python-version == '3.10' && runner.os == 'Linux'
+ run: hatch run fmt-check && hatch run test:types
+
+ - name: Run unit tests
+ run: hatch run test:unit-cov-retry
+
+ # On PR: posts coverage comment (directly on same-repo PRs; via artifact for fork PRs). On push to main: stores coverage baseline on data branch.
+ - name: Store unit tests coverage
+ id: coverage_comment
+ if: matrix.python-version == '3.10' && runner.os == 'Linux' && github.event_name != 'schedule'
+ uses: py-cov-action/python-coverage-comment-action@7188638f871f721a365d644f505d1ff3df20d683 # v3.40
+ with:
+ GITHUB_TOKEN: ${{ github.token }}
+ COVERAGE_PATH: integrations/e2b
+ SUBPROJECT_ID: e2b
+ MINIMUM_GREEN: 90
+ MINIMUM_ORANGE: 60
+
+ - name: Upload coverage comment to be posted
+ if: matrix.python-version == '3.10' && runner.os == 'Linux' && github.event_name == 'pull_request' && steps.coverage_comment.outputs.COMMENT_FILE_WRITTEN == 'true'
+ uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
+ with:
+ name: coverage-comment-e2b
+ path: python-coverage-comment-action-e2b.txt
+
+ - name: Run integration tests
+ if: env.E2B_API_KEY != ''
+ run: hatch run test:integration-cov-append-retry
+
+ - name: Store combined coverage
+ if: github.event_name == 'push'
+ uses: py-cov-action/python-coverage-comment-action@7188638f871f721a365d644f505d1ff3df20d683 # v3.40
+ with:
+ GITHUB_TOKEN: ${{ github.token }}
+ COVERAGE_PATH: integrations/e2b
+ SUBPROJECT_ID: e2b-combined
+ MINIMUM_GREEN: 90
+ MINIMUM_ORANGE: 60
+
+ - name: Run unit tests with lowest direct dependencies
+ if: github.event_name != 'push'
+ run: |
+ hatch run uv pip compile pyproject.toml --resolution lowest-direct --output-file requirements_lowest_direct.txt
+ hatch -e test env run -- uv pip install -r requirements_lowest_direct.txt
+ hatch run test:unit
+
+ - name: Nightly - run unit tests with Haystack main branch
+ if: github.event_name == 'schedule'
+ run: |
+ hatch env prune
+ hatch -e test env run -- uv pip install git+https://github.com/deepset-ai/haystack.git@main
+ hatch run test:unit
+
+
+ notify-slack-on-failure:
+ needs: run
+ if: failure() && github.event_name == 'schedule'
+ runs-on: ubuntu-slim
+ steps:
+ - uses: deepset-ai/notify-slack-action@3cda73b77a148f16f703274198e7771340cf862b # v1
+ with:
+ slack-webhook-url: ${{ secrets.SLACK_WEBHOOK_URL_NOTIFICATIONS }}
diff --git a/README.md b/README.md
index 8f3de61ed7..7b5521ac75 100644
--- a/README.md
+++ b/README.md
@@ -39,6 +39,7 @@ Please check out our [Contribution Guidelines](CONTRIBUTING.md) for all the deta
| [deepeval-haystack](integrations/deepeval/) | Evaluator | [](https://pypi.org/project/deepeval-haystack) | [](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/deepeval.yml) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-deepeval/htmlcov/index.html) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-deepeval-combined/htmlcov/index.html) |
| [docling-haystack](integrations/docling/) | Converter | [](https://pypi.org/project/docling-haystack) | [](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/docling.yml) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-docling/htmlcov/index.html) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-docling-combined/htmlcov/index.html) |
| [dspy-haystack](integrations/dspy/) | Generator | [](https://pypi.org/project/dspy-haystack) | [](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/dspy.yml) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-dspy/htmlcov/index.html) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-dspy-combined/htmlcov/index.html) |
+| [e2b-haystack](integrations/e2b/) | Tool | [](https://pypi.org/project/e2b-haystack) | [](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/e2b.yml) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-e2b/htmlcov/index.html) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-e2b-combined/htmlcov/index.html) |
| [elasticsearch-haystack](integrations/elasticsearch/) | Document Store | [](https://pypi.org/project/elasticsearch-haystack) | [](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/elasticsearch.yml) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-elasticsearch/htmlcov/index.html) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-elasticsearch-combined/htmlcov/index.html) |
| [faiss-haystack](integrations/faiss/) | Document Store | [](https://pypi.org/project/faiss-haystack) | [](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/faiss.yml) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-faiss/htmlcov/index.html) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-faiss-combined/htmlcov/index.html) |
| [fastembed-haystack](integrations/fastembed/) | Embedder, Ranker | [](https://pypi.org/project/fastembed-haystack/) | [](https://github.com/deepset-ai/haystack-core-integrations/actions/workflows/fastembed.yml) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-fastembed/htmlcov/index.html) | [](https://htmlpreview.github.io/?https://github.com/deepset-ai/haystack-core-integrations/blob/python-coverage-comment-action-data-fastembed-combined/htmlcov/index.html) |
diff --git a/integrations/e2b/LICENSE.txt b/integrations/e2b/LICENSE.txt
new file mode 100644
index 0000000000..6134ab324f
--- /dev/null
+++ b/integrations/e2b/LICENSE.txt
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2023-present deepset GmbH
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/integrations/e2b/README.md b/integrations/e2b/README.md
new file mode 100644
index 0000000000..cf1bac6ec6
--- /dev/null
+++ b/integrations/e2b/README.md
@@ -0,0 +1,74 @@
+# e2b-haystack
+
+[](https://pypi.org/project/e2b-haystack)
+[](https://pypi.org/project/e2b-haystack)
+
+- [Changelog](https://github.com/deepset-ai/haystack-core-integrations/blob/main/integrations/e2b/CHANGELOG.md)
+
+---
+
+**e2b-haystack** provides [E2B](https://e2b.dev/) cloud sandbox tools for
+[Haystack](https://haystack.deepset.ai/) agents. It exposes four tools that
+operate inside a shared sandbox environment:
+
+| Tool | Description |
+|------|-------------|
+| `RunBashCommandTool` | Execute bash commands |
+| `ReadFileTool` | Read file contents |
+| `WriteFileTool` | Write files |
+| `ListDirectoryTool` | List directory contents |
+
+All tools share a single `E2BSandbox` instance so the agent can write a file in
+one step and read or execute it in the next.
+
+## Installation
+
+```bash
+pip install e2b-haystack
+```
+
+## Usage
+
+Set the `E2B_API_KEY` environment variable (get one at ).
+
+### Quick start with `E2BToolset`
+
+The simplest way to use all four tools together:
+
+```python
+from haystack.components.agents import Agent
+from haystack.components.generators.chat import OpenAIChatGenerator
+
+from haystack_integrations.tools.e2b import E2BToolset
+
+agent = Agent(
+ chat_generator=OpenAIChatGenerator(model="gpt-4o"),
+ tools=E2BToolset(),
+)
+```
+
+### Using individual tools
+
+For more control, create an `E2BSandbox` and pass it to the tools you need:
+
+```python
+from haystack_integrations.tools.e2b import (
+ E2BSandbox,
+ RunBashCommandTool,
+ ReadFileTool,
+ WriteFileTool,
+ ListDirectoryTool,
+)
+
+sandbox = E2BSandbox()
+tools = [
+ RunBashCommandTool(sandbox=sandbox),
+ ReadFileTool(sandbox=sandbox),
+ WriteFileTool(sandbox=sandbox),
+ ListDirectoryTool(sandbox=sandbox),
+]
+```
+
+## Contributing
+
+Refer to the general [Contribution Guidelines](https://github.com/deepset-ai/haystack-core-integrations/blob/main/CONTRIBUTING.md).
diff --git a/integrations/e2b/examples/e2b_agent_example.py b/integrations/e2b/examples/e2b_agent_example.py
new file mode 100644
index 0000000000..b18ef1f5ca
--- /dev/null
+++ b/integrations/e2b/examples/e2b_agent_example.py
@@ -0,0 +1,97 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
+
+"""
+Example: Haystack Agent with E2B sandbox tools.
+
+Demonstrates that all four tools (run_bash_command, read_file, write_file,
+list_directory) share the same sandbox instance, so the agent can write a
+file in one step and read it back / execute it in the next.
+
+Requirements:
+ pip install e2b-haystack openai
+
+Environment variables:
+ E2B_API_KEY - your E2B API key
+ OPENAI_API_KEY - your OpenAI API key (or swap the generator below)
+"""
+
+import sys
+
+from haystack.components.agents import Agent
+from haystack.components.generators.chat import OpenAIChatGenerator
+from haystack.dataclasses import ChatMessage
+
+from haystack_integrations.tools.e2b import (
+ E2BSandbox,
+ ListDirectoryTool,
+ ReadFileTool,
+ RunBashCommandTool,
+ WriteFileTool,
+)
+
+# ---------------------------------------------------------------------------
+# Example queries that exercise cross-tool state sharing:
+# 1. The agent writes a Python script to the sandbox filesystem.
+# 2. It executes the script via bash and captures stdout.
+# 3. It reads the output file back (or lists a directory) to verify results.
+# ---------------------------------------------------------------------------
+EXAMPLE_QUERIES = [
+ # Simple: purely bash-based data wrangling
+ ("Generate the first 10 Fibonacci numbers using a bash one-liner and show me the results."),
+ # Cross-tool: write -> execute -> read
+ (
+ "Write a Python script to /tmp/primes.py that prints all prime numbers "
+ "up to 50, run it, and then read the file back so I can see both the "
+ "script and its output."
+ ),
+ # Multi-step: write -> list -> bash
+ (
+ "Create a directory /tmp/workspace, write three small text files into it "
+ "with different content, list the directory to confirm they exist, and "
+ "then use bash to count the total number of words across all three files."
+ ),
+]
+
+
+def run(query: str, model: str = "gpt-4o-mini") -> None:
+ print("\n" + "=" * 70)
+ print(f"Query: {query}")
+ print("=" * 70)
+
+ # One sandbox passed to each tool class - they all share the same live sandbox process.
+ sandbox = E2BSandbox()
+ sandbox.warm_up()
+ tools = [
+ RunBashCommandTool(sandbox=sandbox),
+ ReadFileTool(sandbox=sandbox),
+ WriteFileTool(sandbox=sandbox),
+ ListDirectoryTool(sandbox=sandbox),
+ ]
+
+ agent = Agent(
+ chat_generator=OpenAIChatGenerator(model=model),
+ tools=tools,
+ system_prompt=(
+ "You are a helpful coding assistant with access to a live Linux sandbox. "
+ "Use the available tools freely to explore, write files, and run commands. "
+ "All tools operate inside the same sandbox environment, so files written "
+ "with write_file are immediately available to run_bash_command and read_file."
+ ),
+ max_agent_steps=15,
+ )
+
+ result = agent.run(messages=[ChatMessage.from_user(query)])
+ print("\n--- Agent response ---")
+ print(result["last_message"].text)
+
+
+if __name__ == "__main__":
+ # Run a specific query index (0/1/2) or all of them by default.
+ if len(sys.argv) > 1:
+ idx = int(sys.argv[1])
+ run(EXAMPLE_QUERIES[idx])
+ else:
+ for query in EXAMPLE_QUERIES:
+ run(query)
diff --git a/integrations/e2b/examples/e2b_pipeline_example.py b/integrations/e2b/examples/e2b_pipeline_example.py
new file mode 100644
index 0000000000..7ccd7010a8
--- /dev/null
+++ b/integrations/e2b/examples/e2b_pipeline_example.py
@@ -0,0 +1,101 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
+
+"""
+Example: Haystack Pipeline with an Agent and E2B sandbox tools.
+
+Demonstrates that a Pipeline containing an Agent with E2BToolset can be:
+ 1. Serialised to YAML
+ 2. Written to disk
+ 3. Loaded back from YAML with full sandbox config intact
+
+All four tools (run_bash_command, read_file, write_file, list_directory)
+share a single E2BSandbox after the round-trip, so the agent operates
+in one live sandbox environment.
+
+Requirements:
+ pip install e2b-haystack openai
+
+Environment variables:
+ E2B_API_KEY - your E2B API key
+ OPENAI_API_KEY - your OpenAI API key
+"""
+
+import tempfile
+from pathlib import Path
+
+from haystack.components.agents import Agent
+from haystack.components.generators.chat import OpenAIChatGenerator
+from haystack.core.pipeline import Pipeline
+from haystack.dataclasses import ChatMessage
+
+from haystack_integrations.tools.e2b import E2BToolset
+
+
+def build_pipeline() -> Pipeline:
+ agent = Agent(
+ chat_generator=OpenAIChatGenerator(model="gpt-4o-mini"),
+ tools=E2BToolset(sandbox_template="base", timeout=120),
+ system_prompt=(
+ "You are a helpful coding assistant with access to a live Linux sandbox. "
+ "Use the available tools freely to explore, write files, and run commands."
+ ),
+ max_agent_steps=10,
+ )
+ pipeline = Pipeline()
+ pipeline.add_component("agent", agent)
+ return pipeline
+
+
+def roundtrip_yaml(pipeline: Pipeline) -> Pipeline:
+ """Serialise to YAML, save to a temp file, load it back."""
+ yaml_str = pipeline.dumps()
+
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".yaml", delete=False) as f:
+ f.write(yaml_str)
+ yaml_path = Path(f.name)
+
+ print(f"Pipeline YAML written to {yaml_path}\n")
+ print(yaml_str)
+ print("---\n")
+
+ return Pipeline.loads(yaml_path.read_text())
+
+
+def verify_roundtrip(original: Pipeline, restored: Pipeline) -> None:
+ """Check that the restored pipeline has the same structure."""
+ orig_agent: Agent = original.get_component("agent")
+ rest_agent: Agent = restored.get_component("agent")
+
+ orig_ts: E2BToolset = orig_agent.tools # type: ignore[assignment]
+ rest_ts: E2BToolset = rest_agent.tools # type: ignore[assignment]
+
+ assert type(rest_ts).__name__ == "E2BToolset", "Toolset type mismatch"
+ assert [t.name for t in rest_ts] == [t.name for t in orig_ts], "Tool names mismatch"
+ assert rest_ts.sandbox.sandbox_template == orig_ts.sandbox.sandbox_template
+ assert rest_ts.sandbox.timeout == orig_ts.sandbox.timeout
+
+ sandbox_ids = {id(t._e2b_sandbox) for t in rest_ts}
+ assert len(sandbox_ids) == 1, "Tools should share a single sandbox after round-trip"
+
+ print("All assertions passed: YAML round-trip preserves pipeline structure.\n")
+
+
+def run_agent(pipeline: Pipeline, query: str) -> None:
+ """Run the agent with a query (requires live API keys)."""
+ print(f"Query: {query}\n")
+ result = pipeline.run(data={"agent": {"messages": [ChatMessage.from_user(query)]}})
+ print("--- Agent response ---")
+ print(result["agent"]["last_message"].text)
+
+
+if __name__ == "__main__":
+ pipeline = build_pipeline()
+ restored = roundtrip_yaml(pipeline)
+ verify_roundtrip(pipeline, restored)
+
+ run_agent(
+ restored,
+ "Write a Python one-liner to /tmp/hello.py that prints 'Hello from E2B!', run it, then show me the output.",
+ )
diff --git a/integrations/e2b/pydoc/config_docusaurus.yml b/integrations/e2b/pydoc/config_docusaurus.yml
new file mode 100644
index 0000000000..f4d3052265
--- /dev/null
+++ b/integrations/e2b/pydoc/config_docusaurus.yml
@@ -0,0 +1,14 @@
+loaders:
+ - modules:
+ - haystack_integrations.tools.e2b.e2b_sandbox
+ - haystack_integrations.tools.e2b.sandbox_toolset
+ search_path: [../src]
+processors:
+ - type: filter
+ documented_only: true
+ skip_empty_modules: true
+renderer:
+ description: E2B integration for Haystack
+ id: integrations-e2b
+ filename: e2b.md
+ title: E2B
diff --git a/integrations/e2b/pyproject.toml b/integrations/e2b/pyproject.toml
new file mode 100644
index 0000000000..f54eb36143
--- /dev/null
+++ b/integrations/e2b/pyproject.toml
@@ -0,0 +1,167 @@
+[build-system]
+requires = ["hatchling", "hatch-vcs"]
+build-backend = "hatchling.build"
+
+[project]
+name = "e2b-haystack"
+dynamic = ["version"]
+description = "Haystack integration for E2B cloud sandboxes"
+readme = "README.md"
+requires-python = ">=3.10"
+license = "Apache-2.0"
+keywords = ["E2B", "Haystack", "sandbox", "code execution", "tools"]
+authors = [{ name = "deepset GmbH", email = "info@deepset.ai" }]
+classifiers = [
+ "License :: OSI Approved :: Apache Software License",
+ "Development Status :: 4 - Beta",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.13",
+ "Programming Language :: Python :: 3.14",
+ "Programming Language :: Python :: Implementation :: CPython",
+ "Programming Language :: Python :: Implementation :: PyPy",
+]
+dependencies = ["haystack-ai>=2.19.0", "e2b>=2.0.0"]
+
+[project.urls]
+Documentation = "https://github.com/deepset-ai/haystack-core-integrations/tree/main/integrations/e2b#readme"
+Issues = "https://github.com/deepset-ai/haystack-core-integrations/issues"
+Source = "https://github.com/deepset-ai/haystack-core-integrations/tree/main/integrations/e2b"
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/haystack_integrations"]
+
+[tool.hatch.version]
+source = "vcs"
+tag-pattern = 'integrations\/e2b-v(?P.*)'
+
+[tool.hatch.version.raw-options]
+root = "../.."
+git_describe_command = 'git describe --tags --match="integrations/e2b-v[0-9]*"'
+
+[tool.hatch.envs.default]
+installer = "uv"
+dependencies = ["haystack-pydoc-tools", "ruff"]
+
+[tool.hatch.envs.default.scripts]
+docs = ["haystack-pydoc pydoc/config_docusaurus.yml"]
+fmt = "ruff check --fix {args}; ruff format {args}"
+fmt-check = "ruff check {args} && ruff format --check {args}"
+
+[tool.hatch.envs.test]
+dependencies = [
+ "pytest",
+ "pytest-asyncio",
+ "pytest-cov",
+ "pytest-rerunfailures",
+ "mypy",
+ "pip",
+ "e2b>=2.0.0",
+]
+
+[tool.hatch.envs.test.scripts]
+unit = 'pytest -m "not integration" {args:tests}'
+integration = 'pytest -m "integration" {args:tests}'
+all = 'pytest {args:tests}'
+unit-cov-retry = 'pytest --cov=haystack_integrations --reruns 3 --reruns-delay 30 -x -m "not integration" {args:tests}'
+integration-cov-append-retry = 'pytest --cov=haystack_integrations --cov-append --reruns 3 --reruns-delay 30 -x -m "integration" {args:tests}'
+types = "mypy -p haystack_integrations.tools.e2b {args}"
+
+[tool.mypy]
+install_types = true
+non_interactive = true
+check_untyped_defs = true
+disallow_incomplete_defs = true
+
+[[tool.mypy.overrides]]
+module = ["e2b.*"]
+ignore_missing_imports = true
+
+[tool.ruff]
+line-length = 120
+
+[tool.ruff.lint]
+select = [
+ "A",
+ "ANN",
+ "ARG",
+ "B",
+ "C",
+ "D102", # Missing docstring in public method
+ "D103", # Missing docstring in public function
+ "D205", # 1 blank line required between summary line and description
+ "D209", # Closing triple quotes go to new line
+ "D213", # summary lines must be positioned on the second physical line of the docstring
+ "D417", # Missing argument descriptions in the docstring
+ "D419", # Docstring is empty
+ "DTZ",
+ "E",
+ "EM",
+ "F",
+ "I",
+ "ICN",
+ "ISC",
+ "N",
+ "PLC",
+ "PLE",
+ "PLR",
+ "PLW",
+ "Q",
+ "RUF",
+ "S",
+ "T",
+ "TID",
+ "UP",
+ "W",
+ "YTT",
+]
+ignore = [
+ # Allow non-abstract empty methods in abstract base classes
+ "B027",
+ # Allow function calls in argument defaults (common Haystack pattern for Secret.from_env_var)
+ "B008",
+ # Ignore checks for possible passwords
+ "S105",
+ "S106",
+ "S107",
+ # Ignore complexity
+ "C901",
+ "PLR0911",
+ "PLR0912",
+ "PLR0913",
+ "PLR0915",
+ # Allow `Any` type - used legitimately for dynamic types and SDK boundaries
+ "ANN401",
+]
+
+[tool.ruff.lint.isort]
+known-first-party = ["haystack_integrations"]
+
+[tool.ruff.lint.flake8-tidy-imports]
+ban-relative-imports = "parents"
+
+[tool.ruff.lint.per-file-ignores]
+# Tests can use magic values, assertions, relative imports, and don't need type annotations
+"tests/**/*" = ["PLR2004", "S101", "S108", "TID252", "D", "ANN"]
+"examples/**/*" = ["T201", "E501", "ANN", "D", "S101"]
+
+[tool.coverage.run]
+source = ["haystack_integrations"]
+branch = true
+parallel = false
+relative_files = true
+
+[tool.coverage.report]
+omit = ["*/tests/*", "*/__init__.py"]
+show_missing = true
+exclude_lines = ["no cov", "if __name__ == .__main__.:", "if TYPE_CHECKING:"]
+
+[tool.pytest.ini_options]
+addopts = "--strict-markers"
+markers = [
+ "integration: integration tests",
+]
+log_cli = true
+asyncio_default_fixture_loop_scope = "function"
diff --git a/integrations/e2b/src/haystack_integrations/tools/e2b/__init__.py b/integrations/e2b/src/haystack_integrations/tools/e2b/__init__.py
new file mode 100644
index 0000000000..d339acc187
--- /dev/null
+++ b/integrations/e2b/src/haystack_integrations/tools/e2b/__init__.py
@@ -0,0 +1,19 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from haystack_integrations.tools.e2b.bash_tool import RunBashCommandTool
+from haystack_integrations.tools.e2b.e2b_sandbox import E2BSandbox
+from haystack_integrations.tools.e2b.list_directory_tool import ListDirectoryTool
+from haystack_integrations.tools.e2b.read_file_tool import ReadFileTool
+from haystack_integrations.tools.e2b.sandbox_toolset import E2BToolset
+from haystack_integrations.tools.e2b.write_file_tool import WriteFileTool
+
+__all__ = [
+ "E2BSandbox",
+ "E2BToolset",
+ "ListDirectoryTool",
+ "ReadFileTool",
+ "RunBashCommandTool",
+ "WriteFileTool",
+]
diff --git a/integrations/e2b/src/haystack_integrations/tools/e2b/bash_tool.py b/integrations/e2b/src/haystack_integrations/tools/e2b/bash_tool.py
new file mode 100644
index 0000000000..4a126aed17
--- /dev/null
+++ b/integrations/e2b/src/haystack_integrations/tools/e2b/bash_tool.py
@@ -0,0 +1,94 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from typing import Any
+
+from haystack.core.serialization import generate_qualified_class_name
+from haystack.tools import Tool
+
+from haystack_integrations.tools.e2b.e2b_sandbox import E2BSandbox
+
+
+class RunBashCommandTool(Tool):
+ """
+ A :class:`~haystack.tools.Tool` that executes bash commands inside an E2B sandbox.
+
+ Pass the same :class:`E2BSandbox` instance to multiple tool classes so they
+ all operate in the same live sandbox environment.
+
+ ### Usage example
+
+ ```python
+ from haystack_integrations.tools.e2b import E2BSandbox, RunBashCommandTool, ReadFileTool
+
+ sandbox = E2BSandbox()
+ agent = Agent(
+ chat_generator=...,
+ tools=[
+ RunBashCommandTool(sandbox=sandbox),
+ ReadFileTool(sandbox=sandbox),
+ ],
+ )
+ ```
+ """
+
+ def __init__(self, sandbox: E2BSandbox) -> None:
+ """
+ Create a RunBashCommandTool.
+
+ :param sandbox: The :class:`E2BSandbox` instance that will execute commands.
+ """
+
+ def run_bash_command(command: str, timeout: int = 60) -> str:
+ sb = sandbox._require_sandbox()
+ try:
+ result = sb.commands.run(command, timeout=timeout)
+ return f"exit_code: {result.exit_code}\nstdout:\n{result.stdout}\nstderr:\n{result.stderr}"
+ except Exception as e:
+ # e2b raises CommandExitException for non-zero exit codes. That exception
+ # carries exit_code/stdout/stderr attributes — treat it as a valid result
+ # rather than an error so the LLM can see and react to the exit status.
+ if hasattr(e, "exit_code"):
+ stdout = getattr(e, "stdout", "")
+ stderr = getattr(e, "stderr", "")
+ return f"exit_code: {e.exit_code}\nstdout:\n{stdout}\nstderr:\n{stderr}" # type: ignore[union-attr]
+ msg = f"Failed to run bash command: {e}"
+ raise RuntimeError(msg) from e
+
+ super().__init__(
+ name="run_bash_command",
+ description=(
+ "Execute a bash command inside the E2B sandbox and return the combined stdout, "
+ "stderr, and exit code. Use this to run shell scripts, install packages, compile "
+ "code, or perform any system-level operation."
+ ),
+ parameters={
+ "type": "object",
+ "properties": {
+ "command": {"type": "string", "description": "The bash command to execute."},
+ "timeout": {
+ "type": "integer",
+ "description": (
+ "Maximum number of seconds to wait for the command to finish. Defaults to 60 seconds."
+ ),
+ },
+ },
+ "required": ["command"],
+ },
+ function=run_bash_command,
+ )
+ self._e2b_sandbox = sandbox
+
+ def to_dict(self) -> dict[str, Any]:
+ """Serialize this tool to a dictionary."""
+ return {
+ "type": generate_qualified_class_name(type(self)),
+ "data": {"sandbox": self._e2b_sandbox.to_dict()},
+ }
+
+ @classmethod
+ def from_dict(cls, data: dict[str, Any]) -> "RunBashCommandTool":
+ """Deserialize a RunBashCommandTool from a dictionary."""
+ sandbox = E2BSandbox.from_dict(data["data"]["sandbox"])
+ return cls(sandbox=sandbox)
diff --git a/integrations/e2b/src/haystack_integrations/tools/e2b/e2b_sandbox.py b/integrations/e2b/src/haystack_integrations/tools/e2b/e2b_sandbox.py
new file mode 100644
index 0000000000..c00aa3fc1e
--- /dev/null
+++ b/integrations/e2b/src/haystack_integrations/tools/e2b/e2b_sandbox.py
@@ -0,0 +1,185 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from typing import Any
+
+from haystack import logging
+from haystack.core.serialization import generate_qualified_class_name
+from haystack.lazy_imports import LazyImport
+from haystack.utils import Secret, deserialize_secrets_inplace
+
+with LazyImport(message="Run 'pip install e2b'") as e2b_import:
+ from e2b import Sandbox
+
+logger = logging.getLogger(__name__)
+
+
+class E2BSandbox:
+ """
+ Manages the lifecycle of an E2B cloud sandbox.
+
+ Instantiate this class and pass it to one or more E2B tool classes
+ (``RunBashCommandTool``, ``ReadFileTool``, ``WriteFileTool``,
+ ``ListDirectoryTool``) to share a single sandbox environment across all
+ tools. All tools that receive the same ``E2BSandbox`` instance operate
+ inside the same live sandbox process.
+
+ ### Usage example
+
+ ```python
+ from haystack.components.generators.chat import OpenAIChatGenerator
+ from haystack.components.agents import Agent
+
+ from haystack_integrations.tools.e2b import (
+ E2BSandbox,
+ RunBashCommandTool,
+ ReadFileTool,
+ WriteFileTool,
+ ListDirectoryTool,
+ )
+
+ sandbox = E2BSandbox()
+ agent = Agent(
+ chat_generator=OpenAIChatGenerator(model="gpt-4o"),
+ tools=[
+ RunBashCommandTool(sandbox=sandbox),
+ ReadFileTool(sandbox=sandbox),
+ WriteFileTool(sandbox=sandbox),
+ ListDirectoryTool(sandbox=sandbox),
+ ],
+ )
+ ```
+
+ Lifecycle is handled automatically by the Agent's pipeline. If you use the
+ tools standalone, call :meth:`warm_up` before the first tool invocation:
+
+ ```python
+ sandbox.warm_up()
+ # ... use tools ...
+ sandbox.close()
+ ```
+ """
+
+ def __init__(
+ self,
+ api_key: Secret | None = None,
+ sandbox_template: str = "base",
+ timeout: int = 120,
+ environment_vars: dict[str, str] | None = None,
+ ) -> None:
+ """
+ Create an E2BSandbox instance.
+
+ :param api_key: E2B API key. Defaults to ``Secret.from_env_var("E2B_API_KEY")``.
+ :param sandbox_template: E2B sandbox template name. Defaults to ``"base"``.
+ :param timeout: Sandbox inactivity timeout in seconds. Defaults to ``120``.
+ :param environment_vars: Optional environment variables to inject into the sandbox.
+ """
+ self.api_key = api_key or Secret.from_env_var("E2B_API_KEY")
+ self.sandbox_template = sandbox_template
+ self.timeout = timeout
+ self.environment_vars = environment_vars or {}
+ self._sandbox: Any = None
+
+ # ------------------------------------------------------------------
+ # Lifecycle
+ # ------------------------------------------------------------------
+
+ def warm_up(self) -> None:
+ """
+ Establish the connection to the E2B sandbox.
+
+ Idempotent -- calling it multiple times has no effect if the sandbox is
+ already running.
+
+ :raises RuntimeError: If the E2B sandbox cannot be created.
+ """
+ if self._sandbox is not None:
+ return
+
+ e2b_import.check()
+ resolved_key = self.api_key.resolve_value()
+ try:
+ logger.info(
+ "Starting E2B sandbox (template={template}, timeout={timeout}s)",
+ template=self.sandbox_template,
+ timeout=self.timeout,
+ )
+ self._sandbox = Sandbox.create(
+ api_key=resolved_key,
+ template=self.sandbox_template,
+ timeout=self.timeout,
+ envs=self.environment_vars if self.environment_vars else None,
+ )
+ logger.info("E2B sandbox started (id={sandbox_id})", sandbox_id=self._sandbox.sandbox_id)
+ except Exception as e:
+ msg = f"Failed to start E2B sandbox: {e}"
+ raise RuntimeError(msg) from e
+
+ def close(self) -> None:
+ """
+ Shut down the E2B sandbox and release all associated resources.
+
+ Call this when you are done to avoid leaving idle sandboxes running.
+ """
+ if self._sandbox is None:
+ return
+ try:
+ self._sandbox.kill()
+ logger.info("E2B sandbox closed")
+ except Exception as e:
+ logger.warning("Failed to close E2B sandbox: {error}", error=e)
+ finally:
+ self._sandbox = None
+
+ # ------------------------------------------------------------------
+ # Serialisation
+ # ------------------------------------------------------------------
+
+ def to_dict(self) -> dict[str, Any]:
+ """
+ Serialize the sandbox configuration to a dictionary.
+
+ :returns: Dictionary containing the serialised configuration.
+ """
+ return {
+ "type": generate_qualified_class_name(type(self)),
+ "data": {
+ "api_key": self.api_key.to_dict(),
+ "sandbox_template": self.sandbox_template,
+ "timeout": self.timeout,
+ "environment_vars": self.environment_vars,
+ },
+ }
+
+ @classmethod
+ def from_dict(cls, data: dict[str, Any]) -> "E2BSandbox":
+ """
+ Deserialize an :class:`E2BSandbox` from a dictionary.
+
+ :param data: Dictionary created by :meth:`to_dict`.
+ :returns: A new :class:`E2BSandbox` instance ready to be warmed up.
+ """
+ inner = data["data"]
+ deserialize_secrets_inplace(inner, keys=["api_key"])
+ return cls(
+ api_key=inner["api_key"],
+ sandbox_template=inner.get("sandbox_template", "base"),
+ timeout=inner.get("timeout", 120),
+ environment_vars=inner.get("environment_vars", {}),
+ )
+
+ # ------------------------------------------------------------------
+ # Internal helpers (used by the tool classes)
+ # ------------------------------------------------------------------
+
+ def _require_sandbox(self) -> "Sandbox":
+ """Return the active sandbox or raise a helpful error."""
+ if self._sandbox is None:
+ msg = (
+ "E2B sandbox is not running. Call warm_up() before using the tools, "
+ "or add the sandbox to a Haystack pipeline/agent which calls warm_up() automatically."
+ )
+ raise RuntimeError(msg)
+ return self._sandbox
diff --git a/integrations/e2b/src/haystack_integrations/tools/e2b/list_directory_tool.py b/integrations/e2b/src/haystack_integrations/tools/e2b/list_directory_tool.py
new file mode 100644
index 0000000000..3cdca34854
--- /dev/null
+++ b/integrations/e2b/src/haystack_integrations/tools/e2b/list_directory_tool.py
@@ -0,0 +1,81 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from typing import Any
+
+from haystack.core.serialization import generate_qualified_class_name
+from haystack.tools import Tool
+
+from haystack_integrations.tools.e2b.e2b_sandbox import E2BSandbox
+
+
+class ListDirectoryTool(Tool):
+ """
+ A :class:`~haystack.tools.Tool` that lists directory contents in an E2B sandbox.
+
+ Pass the same :class:`E2BSandbox` instance to multiple tool classes so they
+ all operate in the same live sandbox environment.
+
+ ### Usage example
+
+ ```python
+ from haystack_integrations.tools.e2b import E2BSandbox, ListDirectoryTool
+
+ sandbox = E2BSandbox()
+ agent = Agent(chat_generator=..., tools=[ListDirectoryTool(sandbox=sandbox)])
+ ```
+ """
+
+ def __init__(self, sandbox: E2BSandbox) -> None:
+ """
+ Create a ListDirectoryTool.
+
+ :param sandbox: The :class:`E2BSandbox` instance to list directories from.
+ """
+
+ def list_directory(path: str) -> str:
+ sb = sandbox._require_sandbox()
+ try:
+ entries = sb.files.list(path)
+ lines = []
+ for entry in entries:
+ name = entry.name
+ if getattr(entry, "is_dir", False) or getattr(entry, "type", "") == "dir":
+ name = name + "/"
+ lines.append(name)
+ return "\n".join(lines) if lines else "(empty directory)"
+ except Exception as e:
+ msg = f"Failed to list directory '{path}': {e}"
+ raise RuntimeError(msg) from e
+
+ super().__init__(
+ name="list_directory",
+ description=(
+ "List the files and subdirectories inside a directory in the E2B sandbox "
+ "filesystem. Returns a newline-separated list of names with a trailing '/' "
+ "appended to subdirectory names."
+ ),
+ parameters={
+ "type": "object",
+ "properties": {
+ "path": {"type": "string", "description": "Absolute or relative path of the directory to list."}
+ },
+ "required": ["path"],
+ },
+ function=list_directory,
+ )
+ self._e2b_sandbox = sandbox
+
+ def to_dict(self) -> dict[str, Any]:
+ """Serialize this tool to a dictionary."""
+ return {
+ "type": generate_qualified_class_name(type(self)),
+ "data": {"sandbox": self._e2b_sandbox.to_dict()},
+ }
+
+ @classmethod
+ def from_dict(cls, data: dict[str, Any]) -> "ListDirectoryTool":
+ """Deserialize a ListDirectoryTool from a dictionary."""
+ sandbox = E2BSandbox.from_dict(data["data"]["sandbox"])
+ return cls(sandbox=sandbox)
diff --git a/integrations/e2b/src/haystack_integrations/tools/e2b/py.typed b/integrations/e2b/src/haystack_integrations/tools/e2b/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/integrations/e2b/src/haystack_integrations/tools/e2b/read_file_tool.py b/integrations/e2b/src/haystack_integrations/tools/e2b/read_file_tool.py
new file mode 100644
index 0000000000..8ca8aa5b46
--- /dev/null
+++ b/integrations/e2b/src/haystack_integrations/tools/e2b/read_file_tool.py
@@ -0,0 +1,76 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from typing import Any
+
+from haystack.core.serialization import generate_qualified_class_name
+from haystack.tools import Tool
+
+from haystack_integrations.tools.e2b.e2b_sandbox import E2BSandbox
+
+
+class ReadFileTool(Tool):
+ """
+ A :class:`~haystack.tools.Tool` that reads files from an E2B sandbox filesystem.
+
+ Pass the same :class:`E2BSandbox` instance to multiple tool classes so they
+ all operate in the same live sandbox environment.
+
+ ### Usage example
+
+ ```python
+ from haystack_integrations.tools.e2b import E2BSandbox, ReadFileTool
+
+ sandbox = E2BSandbox()
+ agent = Agent(chat_generator=..., tools=[ReadFileTool(sandbox=sandbox)])
+ ```
+ """
+
+ def __init__(self, sandbox: E2BSandbox) -> None:
+ """
+ Create a ReadFileTool.
+
+ :param sandbox: The :class:`E2BSandbox` instance to read files from.
+ """
+
+ def read_file(path: str) -> str:
+ sb = sandbox._require_sandbox()
+ try:
+ content = sb.files.read(path)
+ if isinstance(content, bytes):
+ return content.decode("utf-8", errors="replace")
+ return str(content)
+ except Exception as e:
+ msg = f"Failed to read file '{path}': {e}"
+ raise RuntimeError(msg) from e
+
+ super().__init__(
+ name="read_file",
+ description=(
+ "Read the text content of a file from the E2B sandbox filesystem and return it "
+ "as a string. The file must exist; use list_directory to verify paths first."
+ ),
+ parameters={
+ "type": "object",
+ "properties": {
+ "path": {"type": "string", "description": "Absolute or relative path of the file to read."}
+ },
+ "required": ["path"],
+ },
+ function=read_file,
+ )
+ self._e2b_sandbox = sandbox
+
+ def to_dict(self) -> dict[str, Any]:
+ """Serialize this tool to a dictionary."""
+ return {
+ "type": generate_qualified_class_name(type(self)),
+ "data": {"sandbox": self._e2b_sandbox.to_dict()},
+ }
+
+ @classmethod
+ def from_dict(cls, data: dict[str, Any]) -> "ReadFileTool":
+ """Deserialize a ReadFileTool from a dictionary."""
+ sandbox = E2BSandbox.from_dict(data["data"]["sandbox"])
+ return cls(sandbox=sandbox)
diff --git a/integrations/e2b/src/haystack_integrations/tools/e2b/sandbox_toolset.py b/integrations/e2b/src/haystack_integrations/tools/e2b/sandbox_toolset.py
new file mode 100644
index 0000000000..09acf3f85d
--- /dev/null
+++ b/integrations/e2b/src/haystack_integrations/tools/e2b/sandbox_toolset.py
@@ -0,0 +1,97 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from typing import Any
+
+from haystack.core.serialization import generate_qualified_class_name
+from haystack.tools import Toolset
+from haystack.utils import Secret, deserialize_secrets_inplace
+
+from haystack_integrations.tools.e2b.bash_tool import RunBashCommandTool
+from haystack_integrations.tools.e2b.e2b_sandbox import E2BSandbox
+from haystack_integrations.tools.e2b.list_directory_tool import ListDirectoryTool
+from haystack_integrations.tools.e2b.read_file_tool import ReadFileTool
+from haystack_integrations.tools.e2b.write_file_tool import WriteFileTool
+
+
+class E2BToolset(Toolset):
+ """
+ A :class:`~haystack.tools.Toolset` that bundles all E2B sandbox tools.
+
+ All tools in the set share a single :class:`E2BSandbox` instance so they
+ operate inside the same live sandbox process. The toolset owns the sandbox
+ lifecycle: calling :meth:`warm_up` starts the sandbox, and serialisation
+ round-trips preserve the shared-sandbox relationship.
+
+ ### Usage example
+
+ ```python
+ from haystack.components.generators.chat import OpenAIChatGenerator
+ from haystack.components.agents import Agent
+
+ from haystack_integrations.tools.e2b import E2BToolset
+
+ agent = Agent(
+ chat_generator=OpenAIChatGenerator(model="gpt-4o"),
+ tools=E2BToolset(),
+ )
+ ```
+ """
+
+ def __init__(
+ self,
+ api_key: Secret | None = None,
+ sandbox_template: str = "base",
+ timeout: int = 120,
+ environment_vars: dict[str, str] | None = None,
+ ) -> None:
+ """
+ Create an E2BToolset.
+
+ :param api_key: E2B API key. Defaults to ``Secret.from_env_var("E2B_API_KEY")``.
+ :param sandbox_template: E2B sandbox template name. Defaults to ``"base"``.
+ :param timeout: Sandbox inactivity timeout in seconds. Defaults to ``120``.
+ :param environment_vars: Optional environment variables to inject into the sandbox.
+ """
+ self.sandbox = E2BSandbox(
+ api_key=api_key,
+ sandbox_template=sandbox_template,
+ timeout=timeout,
+ environment_vars=environment_vars,
+ )
+ super().__init__(
+ tools=[
+ RunBashCommandTool(sandbox=self.sandbox),
+ ReadFileTool(sandbox=self.sandbox),
+ WriteFileTool(sandbox=self.sandbox),
+ ListDirectoryTool(sandbox=self.sandbox),
+ ]
+ )
+
+ def warm_up(self) -> None:
+ """Start the shared E2B sandbox (idempotent)."""
+ self.sandbox.warm_up()
+
+ def close(self) -> None:
+ """Shut down the shared E2B sandbox and release cloud resources."""
+ self.sandbox.close()
+
+ def to_dict(self) -> dict[str, Any]:
+ """Serialize this toolset to a dictionary."""
+ return {
+ "type": generate_qualified_class_name(type(self)),
+ "data": self.sandbox.to_dict()["data"],
+ }
+
+ @classmethod
+ def from_dict(cls, data: dict[str, Any]) -> "E2BToolset":
+ """Deserialize an E2BToolset from a dictionary."""
+ inner = data["data"]
+ deserialize_secrets_inplace(inner, keys=["api_key"])
+ return cls(
+ api_key=inner["api_key"],
+ sandbox_template=inner.get("sandbox_template", "base"),
+ timeout=inner.get("timeout", 120),
+ environment_vars=inner.get("environment_vars", {}),
+ )
diff --git a/integrations/e2b/src/haystack_integrations/tools/e2b/write_file_tool.py b/integrations/e2b/src/haystack_integrations/tools/e2b/write_file_tool.py
new file mode 100644
index 0000000000..62086f6fe1
--- /dev/null
+++ b/integrations/e2b/src/haystack_integrations/tools/e2b/write_file_tool.py
@@ -0,0 +1,76 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from typing import Any
+
+from haystack.core.serialization import generate_qualified_class_name
+from haystack.tools import Tool
+
+from haystack_integrations.tools.e2b.e2b_sandbox import E2BSandbox
+
+
+class WriteFileTool(Tool):
+ """
+ A :class:`~haystack.tools.Tool` that writes files to an E2B sandbox filesystem.
+
+ Pass the same :class:`E2BSandbox` instance to multiple tool classes so they
+ all operate in the same live sandbox environment.
+
+ ### Usage example
+
+ ```python
+ from haystack_integrations.tools.e2b import E2BSandbox, WriteFileTool
+
+ sandbox = E2BSandbox()
+ agent = Agent(chat_generator=..., tools=[WriteFileTool(sandbox=sandbox)])
+ ```
+ """
+
+ def __init__(self, sandbox: E2BSandbox) -> None:
+ """
+ Create a WriteFileTool.
+
+ :param sandbox: The :class:`E2BSandbox` instance to write files to.
+ """
+
+ def write_file(path: str, content: str) -> str:
+ sb = sandbox._require_sandbox()
+ try:
+ sb.files.write(path, content)
+ return f"File written successfully: {path}"
+ except Exception as e:
+ msg = f"Failed to write file '{path}': {e}"
+ raise RuntimeError(msg) from e
+
+ super().__init__(
+ name="write_file",
+ description=(
+ "Write text content to a file in the E2B sandbox filesystem. "
+ "Parent directories are created automatically if they do not exist. "
+ "Existing files are overwritten."
+ ),
+ parameters={
+ "type": "object",
+ "properties": {
+ "path": {"type": "string", "description": "Absolute or relative path of the file to write."},
+ "content": {"type": "string", "description": "Text content to write into the file."},
+ },
+ "required": ["path", "content"],
+ },
+ function=write_file,
+ )
+ self._e2b_sandbox = sandbox
+
+ def to_dict(self) -> dict[str, Any]:
+ """Serialize this tool to a dictionary."""
+ return {
+ "type": generate_qualified_class_name(type(self)),
+ "data": {"sandbox": self._e2b_sandbox.to_dict()},
+ }
+
+ @classmethod
+ def from_dict(cls, data: dict[str, Any]) -> "WriteFileTool":
+ """Deserialize a WriteFileTool from a dictionary."""
+ sandbox = E2BSandbox.from_dict(data["data"]["sandbox"])
+ return cls(sandbox=sandbox)
diff --git a/integrations/e2b/src/haystack_integrations/tools/py.typed b/integrations/e2b/src/haystack_integrations/tools/py.typed
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/integrations/e2b/tests/__init__.py b/integrations/e2b/tests/__init__.py
new file mode 100644
index 0000000000..c1764a6e03
--- /dev/null
+++ b/integrations/e2b/tests/__init__.py
@@ -0,0 +1,3 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
diff --git a/integrations/e2b/tests/test_integration.py b/integrations/e2b/tests/test_integration.py
new file mode 100644
index 0000000000..2a2b56a3e2
--- /dev/null
+++ b/integrations/e2b/tests/test_integration.py
@@ -0,0 +1,117 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
+
+"""
+Integration tests for the E2B sandbox tools.
+
+These tests require a valid E2B_API_KEY environment variable and will
+spin up a real cloud sandbox on each run.
+"""
+
+import pytest
+
+from haystack_integrations.tools.e2b import (
+ E2BSandbox,
+ E2BToolset,
+ ListDirectoryTool,
+ ReadFileTool,
+ RunBashCommandTool,
+ WriteFileTool,
+)
+
+
+@pytest.fixture(scope="module")
+def sandbox():
+ """Shared sandbox for the module — spun up once, torn down after all tests."""
+ sb = E2BSandbox()
+ sb.warm_up()
+ yield sb
+ sb.close()
+
+
+@pytest.mark.integration
+class TestRunBashCommandToolIntegration:
+ def test_echo_command(self, sandbox):
+ tool = RunBashCommandTool(sandbox=sandbox)
+ result = tool.invoke(command="echo 'hello from e2b'")
+ assert "hello from e2b" in result
+ assert "exit_code: 0" in result
+
+ def test_exit_code_nonzero(self, sandbox):
+ tool = RunBashCommandTool(sandbox=sandbox)
+ result = tool.invoke(command="exit 42")
+ assert "exit_code: 42" in result
+
+ def test_stderr_captured(self, sandbox):
+ tool = RunBashCommandTool(sandbox=sandbox)
+ result = tool.invoke(command="echo error_msg >&2")
+ assert "error_msg" in result
+
+
+@pytest.mark.integration
+class TestWriteAndReadFileToolIntegration:
+ def test_write_then_read(self, sandbox):
+ write_tool = WriteFileTool(sandbox=sandbox)
+ read_tool = ReadFileTool(sandbox=sandbox)
+
+ write_result = write_tool.invoke(path="/tmp/test_haystack.txt", content="haystack e2b integration")
+ assert "/tmp/test_haystack.txt" in write_result
+
+ read_result = read_tool.invoke(path="/tmp/test_haystack.txt")
+ assert read_result == "haystack e2b integration"
+
+ def test_write_creates_parent_dirs(self, sandbox):
+ write_tool = WriteFileTool(sandbox=sandbox)
+ read_tool = ReadFileTool(sandbox=sandbox)
+
+ write_tool.invoke(path="/tmp/e2b_test_dir/nested/file.txt", content="nested content")
+ result = read_tool.invoke(path="/tmp/e2b_test_dir/nested/file.txt")
+ assert result == "nested content"
+
+
+@pytest.mark.integration
+class TestListDirectoryToolIntegration:
+ def test_list_tmp(self, sandbox):
+ tool = ListDirectoryTool(sandbox=sandbox)
+ result = tool.invoke(path="/tmp")
+ # /tmp always exists and is listable; result is a newline-separated string or "(empty directory)"
+ assert isinstance(result, str)
+
+ def test_lists_written_file(self, sandbox):
+ write_tool = WriteFileTool(sandbox=sandbox)
+ list_tool = ListDirectoryTool(sandbox=sandbox)
+
+ write_tool.invoke(path="/tmp/e2b_list_test/myfile.txt", content="data")
+ result = list_tool.invoke(path="/tmp/e2b_list_test")
+ assert "myfile.txt" in result
+
+
+@pytest.mark.integration
+class TestE2BToolsetIntegration:
+ def test_toolset_warm_up_and_close(self):
+ ts = E2BToolset()
+ ts.warm_up()
+ # Verify sandbox is live by running a command through the bash tool
+ bash_tool = next(t for t in ts if t.name == "run_bash_command")
+ result = bash_tool.invoke(command="echo 'toolset ok'")
+ assert "toolset ok" in result
+ ts.close()
+
+ def test_all_tools_share_sandbox(self):
+ ts = E2BToolset()
+ ts.warm_up()
+
+ write_tool = next(t for t in ts if t.name == "write_file")
+ read_tool = next(t for t in ts if t.name == "read_file")
+ bash_tool = next(t for t in ts if t.name == "run_bash_command")
+
+ # Write via write_file, read back via bash — proves shared sandbox
+ write_tool.invoke(path="/tmp/shared_test.txt", content="shared sandbox state")
+ bash_result = bash_tool.invoke(command="cat /tmp/shared_test.txt")
+ assert "shared sandbox state" in bash_result
+
+ read_result = read_tool.invoke(path="/tmp/shared_test.txt")
+ assert read_result == "shared sandbox state"
+
+ ts.close()
diff --git a/integrations/e2b/tests/test_sandbox_toolset.py b/integrations/e2b/tests/test_sandbox_toolset.py
new file mode 100644
index 0000000000..e392cf8ec4
--- /dev/null
+++ b/integrations/e2b/tests/test_sandbox_toolset.py
@@ -0,0 +1,433 @@
+# SPDX-FileCopyrightText: 2022-present deepset GmbH
+#
+# SPDX-License-Identifier: Apache-2.0
+
+from unittest.mock import MagicMock, patch
+
+import pytest
+from haystack.tools.errors import ToolInvocationError
+from haystack.utils import Secret
+
+from haystack_integrations.tools.e2b.bash_tool import RunBashCommandTool
+from haystack_integrations.tools.e2b.e2b_sandbox import E2BSandbox
+from haystack_integrations.tools.e2b.list_directory_tool import ListDirectoryTool
+from haystack_integrations.tools.e2b.read_file_tool import ReadFileTool
+from haystack_integrations.tools.e2b.sandbox_toolset import E2BToolset
+from haystack_integrations.tools.e2b.write_file_tool import WriteFileTool
+
+# ---------------------------------------------------------------------------
+# Helpers
+# ---------------------------------------------------------------------------
+
+
+def _make_sandbox(**kwargs) -> E2BSandbox:
+ """Create an E2BSandbox with a dummy API key for testing."""
+ defaults = {"api_key": Secret.from_token("test-api-key")}
+ defaults.update(kwargs)
+ return E2BSandbox(**defaults)
+
+
+def _make_sandbox_mock() -> MagicMock:
+ """Return a MagicMock that mimics the e2b Sandbox object."""
+ sandbox = MagicMock()
+ sandbox.sandbox_id = "sandbox-test-123"
+ return sandbox
+
+
+def _sandbox_with_mock() -> tuple[E2BSandbox, MagicMock]:
+ """Return an E2BSandbox that already has a mocked underlying sandbox."""
+ sb = _make_sandbox()
+ mock = _make_sandbox_mock()
+ sb._sandbox = mock
+ return sb, mock
+
+
+# ---------------------------------------------------------------------------
+# E2BSandbox -- initialisation
+# ---------------------------------------------------------------------------
+
+
+class TestE2BSandboxInit:
+ def test_class_defaults(self):
+ """Verify the real class defaults, not values set by a helper."""
+ sandbox = E2BSandbox(api_key=Secret.from_token("test-api-key"))
+ assert sandbox.sandbox_template == "base"
+ assert sandbox.timeout == 120
+ assert sandbox.environment_vars == {}
+ assert sandbox._sandbox is None
+
+ def test_custom_parameters(self):
+ sandbox = _make_sandbox(
+ sandbox_template="my-template",
+ timeout=600,
+ environment_vars={"FOO": "bar"},
+ )
+ assert sandbox.sandbox_template == "my-template"
+ assert sandbox.timeout == 600
+ assert sandbox.environment_vars == {"FOO": "bar"}
+
+
+# ---------------------------------------------------------------------------
+# E2BSandbox -- warm_up
+# ---------------------------------------------------------------------------
+
+
+class TestE2BSandboxWarmUp:
+ @patch("haystack_integrations.tools.e2b.e2b_sandbox.e2b_import")
+ @patch("haystack_integrations.tools.e2b.e2b_sandbox.Sandbox.create")
+ def test_warm_up_creates_sandbox(self, mock_sandbox_create, mock_e2b_import):
+ mock_e2b_import.check.return_value = None
+ mock_instance = _make_sandbox_mock()
+ mock_sandbox_create.return_value = mock_instance
+
+ sb = _make_sandbox(sandbox_template="base", timeout=120)
+ sb.warm_up()
+
+ mock_sandbox_create.assert_called_once_with(
+ api_key="test-api-key",
+ template="base",
+ timeout=120,
+ envs=None,
+ )
+ assert sb._sandbox is mock_instance
+
+ @patch("haystack_integrations.tools.e2b.e2b_sandbox.e2b_import")
+ @patch("haystack_integrations.tools.e2b.e2b_sandbox.Sandbox.create")
+ def test_warm_up_passes_environment_vars(self, mock_sandbox_create, mock_e2b_import):
+ mock_e2b_import.check.return_value = None
+ mock_sandbox_create.return_value = _make_sandbox_mock()
+
+ sb = _make_sandbox(environment_vars={"MY_VAR": "value"})
+ sb.warm_up()
+
+ _, kwargs = mock_sandbox_create.call_args
+ assert kwargs["envs"] == {"MY_VAR": "value"}
+
+ @patch("haystack_integrations.tools.e2b.e2b_sandbox.e2b_import")
+ @patch("haystack_integrations.tools.e2b.e2b_sandbox.Sandbox.create")
+ def test_warm_up_is_idempotent(self, mock_sandbox_create, mock_e2b_import):
+ mock_e2b_import.check.return_value = None
+ mock_sandbox_create.return_value = _make_sandbox_mock()
+
+ sb = _make_sandbox()
+ sb.warm_up()
+ sb.warm_up()
+
+ mock_sandbox_create.assert_called_once()
+
+ @patch("haystack_integrations.tools.e2b.e2b_sandbox.e2b_import")
+ @patch("haystack_integrations.tools.e2b.e2b_sandbox.Sandbox.create")
+ def test_warm_up_raises_on_sandbox_error(self, mock_sandbox_create, mock_e2b_import):
+ mock_e2b_import.check.return_value = None
+ mock_sandbox_create.side_effect = Exception("connection refused")
+
+ sb = _make_sandbox()
+ with pytest.raises(RuntimeError, match="Failed to start E2B sandbox"):
+ sb.warm_up()
+
+
+# ---------------------------------------------------------------------------
+# E2BSandbox -- close
+# ---------------------------------------------------------------------------
+
+
+class TestE2BSandboxClose:
+ def test_close_without_warm_up_is_noop(self):
+ sb = _make_sandbox()
+ sb.close()
+ assert sb._sandbox is None
+
+ def test_close_kills_sandbox(self):
+ sb, mock = _sandbox_with_mock()
+ sb.close()
+ mock.kill.assert_called_once()
+ assert sb._sandbox is None
+
+ def test_close_clears_sandbox_on_kill_error(self):
+ sb, mock = _sandbox_with_mock()
+ mock.kill.side_effect = Exception("kill failed")
+ sb.close() # must not raise
+ assert sb._sandbox is None
+
+
+# ---------------------------------------------------------------------------
+# E2BSandbox -- serialisation
+# ---------------------------------------------------------------------------
+
+
+class TestE2BSandboxSerialisation:
+ def _make_env_sandbox(self, **kwargs) -> E2BSandbox:
+ defaults = {"api_key": Secret.from_env_var("E2B_API_KEY")}
+ defaults.update(kwargs)
+ return E2BSandbox(**defaults)
+
+ def test_to_dict_contains_expected_keys(self):
+ sb = self._make_env_sandbox(sandbox_template="my-template", timeout=600)
+ data = sb.to_dict()
+
+ assert "type" in data
+ assert "data" in data
+ assert data["data"]["sandbox_template"] == "my-template"
+ assert data["data"]["timeout"] == 600
+
+ def test_to_dict_does_not_include_sandbox_instance(self):
+ sb = self._make_env_sandbox()
+ sb._sandbox = _make_sandbox_mock()
+ data = sb.to_dict()
+
+ assert "_sandbox" not in data["data"]
+ assert "sandbox" not in data["data"]
+
+ def test_from_dict_round_trip(self):
+ original = self._make_env_sandbox(
+ sandbox_template="custom",
+ timeout=900,
+ environment_vars={"KEY": "value"},
+ )
+ data = original.to_dict()
+ restored = E2BSandbox.from_dict(data)
+
+ assert restored.sandbox_template == "custom"
+ assert restored.timeout == 900
+ assert restored.environment_vars == {"KEY": "value"}
+ assert restored._sandbox is None
+
+ def test_to_dict_type_is_qualified_class_name(self):
+ sb = self._make_env_sandbox()
+ data = sb.to_dict()
+ assert "E2BSandbox" in data["type"]
+
+
+# ---------------------------------------------------------------------------
+# Tool classes -- structure
+# ---------------------------------------------------------------------------
+
+
+class TestToolClasses:
+ def test_run_bash_command_tool_name_and_schema(self):
+ sb = _make_sandbox()
+ tool = RunBashCommandTool(sandbox=sb)
+ assert tool.name == "run_bash_command"
+ assert tool.description
+ assert "command" in tool.parameters["required"]
+
+ def test_read_file_tool_name_and_schema(self):
+ sb = _make_sandbox()
+ tool = ReadFileTool(sandbox=sb)
+ assert tool.name == "read_file"
+ assert tool.description
+ assert "path" in tool.parameters["required"]
+
+ def test_write_file_tool_name_and_schema(self):
+ sb = _make_sandbox()
+ tool = WriteFileTool(sandbox=sb)
+ assert tool.name == "write_file"
+ assert tool.description
+ assert "path" in tool.parameters["required"]
+ assert "content" in tool.parameters["required"]
+
+ def test_list_directory_tool_name_and_schema(self):
+ sb = _make_sandbox()
+ tool = ListDirectoryTool(sandbox=sb)
+ assert tool.name == "list_directory"
+ assert tool.description
+ assert "path" in tool.parameters["required"]
+
+ def test_tool_stores_sandbox_reference(self):
+ sb = _make_sandbox()
+ tool = RunBashCommandTool(sandbox=sb)
+ assert tool._e2b_sandbox is sb
+
+ def test_e2b_toolset_contains_four_tools(self):
+ ts = E2BToolset(api_key=Secret.from_token("test-api-key"))
+ assert len(ts) == 4
+ names = {t.name for t in ts}
+ assert names == {"run_bash_command", "read_file", "write_file", "list_directory"}
+
+ def test_e2b_toolset_has_correct_tool_types(self):
+ ts = E2BToolset(api_key=Secret.from_token("test-api-key"))
+ tool_types = {type(t) for t in ts}
+ assert tool_types == {RunBashCommandTool, ReadFileTool, WriteFileTool, ListDirectoryTool}
+
+ def test_e2b_toolset_shares_same_sandbox(self):
+ ts = E2BToolset(api_key=Secret.from_token("test-api-key"))
+ assert all(t._e2b_sandbox is ts.sandbox for t in ts)
+
+ mock = _make_sandbox_mock()
+ mock.commands.run.return_value = MagicMock(exit_code=0, stdout="ok", stderr="")
+ ts.sandbox._sandbox = mock
+
+ bash_tool = next(t for t in ts if t.name == "run_bash_command")
+ bash_tool.invoke(command="echo ok")
+
+ mock.commands.run.assert_called_once()
+
+ def test_e2b_toolset_default_api_key(self):
+ """E2BToolset uses E2B_API_KEY env var when api_key is omitted."""
+ ts = E2BToolset()
+ assert ts.sandbox.api_key is not None
+
+ def test_tools_from_same_sandbox_share_state(self):
+ """Tools instantiated with the same sandbox share state."""
+ sb = _make_sandbox()
+ bash_tool = RunBashCommandTool(sandbox=sb)
+ read_tool = ReadFileTool(sandbox=sb)
+ assert bash_tool._e2b_sandbox is read_tool._e2b_sandbox
+
+
+# ---------------------------------------------------------------------------
+# RunBashCommandTool behaviour
+# ---------------------------------------------------------------------------
+
+
+class TestRunBashCommandTool:
+ def test_returns_formatted_output(self):
+ sb, mock = _sandbox_with_mock()
+ mock_result = MagicMock(exit_code=0, stdout="hello world\n", stderr="")
+ mock.commands.run.return_value = mock_result
+ tool = RunBashCommandTool(sandbox=sb)
+
+ output = tool.invoke(command="echo hello world")
+
+ assert "exit_code: 0" in output
+ assert "hello world" in output
+ mock.commands.run.assert_called_once_with("echo hello world", timeout=60)
+
+ def test_passes_custom_timeout(self):
+ sb, mock = _sandbox_with_mock()
+ mock.commands.run.return_value = MagicMock(exit_code=0, stdout="", stderr="")
+ tool = RunBashCommandTool(sandbox=sb)
+
+ tool.invoke(command="sleep 5", timeout=30)
+
+ mock.commands.run.assert_called_once_with("sleep 5", timeout=30)
+
+ def test_raises_when_no_sandbox(self):
+ sb = _make_sandbox()
+ tool = RunBashCommandTool(sandbox=sb)
+ with pytest.raises(ToolInvocationError, match="E2B sandbox is not running"):
+ tool.invoke(command="ls")
+
+ def test_wraps_sandbox_exception(self):
+ sb, mock = _sandbox_with_mock()
+ mock.commands.run.side_effect = Exception("timeout")
+ tool = RunBashCommandTool(sandbox=sb)
+ with pytest.raises(ToolInvocationError, match="Failed to run bash command"):
+ tool.invoke(command="sleep 1000")
+
+
+# ---------------------------------------------------------------------------
+# ReadFileTool behaviour
+# ---------------------------------------------------------------------------
+
+
+class TestReadFileTool:
+ def test_returns_string(self):
+ sb, mock = _sandbox_with_mock()
+ mock.files.read.return_value = "file content"
+ tool = ReadFileTool(sandbox=sb)
+
+ result = tool.invoke(path="/some/file.txt")
+
+ assert result == "file content"
+ mock.files.read.assert_called_once_with("/some/file.txt")
+
+ def test_decodes_bytes(self):
+ sb, mock = _sandbox_with_mock()
+ mock.files.read.return_value = b"binary content"
+ tool = ReadFileTool(sandbox=sb)
+
+ result = tool.invoke(path="/binary.bin")
+
+ assert result == "binary content"
+
+ def test_raises_when_no_sandbox(self):
+ sb = _make_sandbox()
+ tool = ReadFileTool(sandbox=sb)
+ with pytest.raises(ToolInvocationError, match="E2B sandbox is not running"):
+ tool.invoke(path="/some/file.txt")
+
+ def test_wraps_sandbox_exception(self):
+ sb, mock = _sandbox_with_mock()
+ mock.files.read.side_effect = Exception("file not found")
+ tool = ReadFileTool(sandbox=sb)
+ with pytest.raises(ToolInvocationError, match="Failed to read file"):
+ tool.invoke(path="/nonexistent.txt")
+
+
+# ---------------------------------------------------------------------------
+# WriteFileTool behaviour
+# ---------------------------------------------------------------------------
+
+
+class TestWriteFileTool:
+ def test_returns_confirmation(self):
+ sb, mock = _sandbox_with_mock()
+ tool = WriteFileTool(sandbox=sb)
+
+ result = tool.invoke(path="/output/result.txt", content="hello")
+
+ assert "/output/result.txt" in result
+ mock.files.write.assert_called_once_with("/output/result.txt", "hello")
+
+ def test_raises_when_no_sandbox(self):
+ sb = _make_sandbox()
+ tool = WriteFileTool(sandbox=sb)
+ with pytest.raises(ToolInvocationError, match="E2B sandbox is not running"):
+ tool.invoke(path="/some/path.txt", content="content")
+
+ def test_wraps_sandbox_exception(self):
+ sb, mock = _sandbox_with_mock()
+ mock.files.write.side_effect = Exception("permission denied")
+ tool = WriteFileTool(sandbox=sb)
+ with pytest.raises(ToolInvocationError, match="Failed to write file"):
+ tool.invoke(path="/protected/file.txt", content="data")
+
+
+# ---------------------------------------------------------------------------
+# ListDirectoryTool behaviour
+# ---------------------------------------------------------------------------
+
+
+class TestListDirectoryTool:
+ def _make_entry(self, name: str, is_dir: bool = False) -> MagicMock:
+ entry = MagicMock()
+ entry.name = name
+ entry.is_dir = is_dir
+ return entry
+
+ def test_returns_names(self):
+ sb, mock = _sandbox_with_mock()
+ mock.files.list.return_value = [
+ self._make_entry("file.txt"),
+ self._make_entry("subdir", is_dir=True),
+ ]
+ tool = ListDirectoryTool(sandbox=sb)
+
+ result = tool.invoke(path="/home/user")
+
+ assert "file.txt" in result
+ assert "subdir/" in result
+ mock.files.list.assert_called_once_with("/home/user")
+
+ def test_empty_directory(self):
+ sb, mock = _sandbox_with_mock()
+ mock.files.list.return_value = []
+ tool = ListDirectoryTool(sandbox=sb)
+
+ result = tool.invoke(path="/empty")
+
+ assert result == "(empty directory)"
+
+ def test_raises_when_no_sandbox(self):
+ sb = _make_sandbox()
+ tool = ListDirectoryTool(sandbox=sb)
+ with pytest.raises(ToolInvocationError, match="E2B sandbox is not running"):
+ tool.invoke(path="/home")
+
+ def test_wraps_sandbox_exception(self):
+ sb, mock = _sandbox_with_mock()
+ mock.files.list.side_effect = Exception("not a directory")
+ tool = ListDirectoryTool(sandbox=sb)
+ with pytest.raises(ToolInvocationError, match="Failed to list directory"):
+ tool.invoke(path="/nonexistent")