Co-authored-by: Bastien <bastien.baret@gmail.com>
Co-authored-by: Laure Hugo <201583486+laure0303@users.noreply.github.com>
Co-authored-by: Michel Thomazo <51709227+michelTho@users.noreply.github.com>
Co-authored-by: Paul Cacheux <paul.cacheux@mistral.ai>
Co-authored-by: Val <102326092+vdeva@users.noreply.github.com>
Co-authored-by: Mistral Vibe <vibe@mistral.ai>
This commit is contained in:
Clément Drouin
2026-04-03 15:56:50 +02:00
committed by GitHub
parent 9c1c32e058
commit 90763daf81
61 changed files with 6046 additions and 694 deletions

View File

@@ -14,46 +14,69 @@ jobs:
contents: read
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
smoke_matrix: ${{ steps.set-matrix.outputs.smoke_matrix }}
steps:
- name: Set matrix
id: set-matrix
run: |
# Linux: manylinux_2_28 containers → binary works on glibc >= 2.28 (RHEL 8+, Ubuntu 20.04+)
if [[ "${{ github.repository }}" == "mistralai/mistral-vibe" ]]; then
matrix='{
"include": [
{"runner": "ubuntu-22.04", "os": "linux", "arch": "x86_64"},
{"runner": "ubuntu-22.04-arm", "os": "linux", "arch": "aarch64"},
{"runner": "ubuntu-22.04", "os": "linux", "arch": "x86_64", "container": "quay.io/pypa/manylinux_2_28_x86_64"},
{"runner": "ubuntu-22.04-arm", "os": "linux", "arch": "aarch64", "container": "quay.io/pypa/manylinux_2_28_aarch64"},
{"runner": "macos-15-intel", "os": "darwin", "arch": "x86_64"},
{"runner": "macos-14", "os": "darwin", "arch": "aarch64"},
{"runner": "windows-2022", "os": "windows", "arch": "x86_64"},
{"runner": "windows-11-arm", "os": "windows", "arch": "aarch64"}
]
}'
smoke_matrix='{
"include": [
{"runner": "ubuntu-24.04", "os": "linux", "arch": "x86_64"},
{"runner": "ubuntu-24.04", "os": "linux", "arch": "x86_64", "tag": "old-glibc", "container": "almalinux:8"},
{"runner": "ubuntu-24.04-arm", "os": "linux", "arch": "aarch64"},
{"runner": "macos-15", "os": "darwin", "arch": "aarch64"},
{"runner": "macos-15-intel", "os": "darwin", "arch": "x86_64"},
{"runner": "windows-latest", "os": "windows", "arch": "x86_64"},
{"runner": "windows-11-arm", "os": "windows", "arch": "aarch64"}
]
}'
else # skip ARM Linux/Windows (runners not available on non public repos)
matrix='{
"include": [
{"runner": "ubuntu-22.04", "os": "linux", "arch": "x86_64"},
{"runner": "ubuntu-22.04", "os": "linux", "arch": "x86_64", "container": "quay.io/pypa/manylinux_2_28_x86_64"},
{"runner": "macos-15-intel", "os": "darwin", "arch": "x86_64"},
{"runner": "macos-14", "os": "darwin", "arch": "aarch64"},
{"runner": "windows-2022", "os": "windows", "arch": "x86_64"}
]
}'
smoke_matrix='{
"include": [
{"runner": "ubuntu-24.04", "os": "linux", "arch": "x86_64"},
{"runner": "ubuntu-24.04", "os": "linux", "arch": "x86_64", "tag": "old-glibc", "container": "almalinux:8"},
{"runner": "macos-15", "os": "darwin", "arch": "aarch64"},
{"runner": "macos-15-intel", "os": "darwin", "arch": "x86_64"},
{"runner": "windows-latest", "os": "windows", "arch": "x86_64"}
]
}'
fi
echo "matrix=$(echo $matrix | jq -c .)" >> $GITHUB_OUTPUT
echo "smoke_matrix=$(echo $smoke_matrix | jq -c .)" >> $GITHUB_OUTPUT
build-and-upload:
needs: configure
name: ${{ matrix.os }}-${{ matrix.arch }}
name: "Build and upload: ${{ matrix.os }}-${{ matrix.arch }}"
permissions:
contents: read
strategy:
matrix: ${{ fromJSON(needs.configure.outputs.matrix) }}
runs-on: ${{ matrix.runner }}
container: ${{ matrix.container || '' }}
steps:
- name: echo
run: echo github.repository=${{ github.repository }}
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
@@ -65,44 +88,115 @@ jobs:
cache-dependency-glob: "uv.lock"
- name: Set up Python
if: ${{ matrix.os != 'linux' }}
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
with:
python-version: "3.12"
- name: Install Python (Linux)
if: ${{ matrix.os == 'linux' }}
run: |
uv python install 3.12
# Install patchelf >= 0.18 (yum/EPEL8 ships 0.12 which lacks --clear-execstack)
PATCHELF_VERSION=0.18.0
curl -sL "https://github.com/NixOS/patchelf/releases/download/${PATCHELF_VERSION}/patchelf-${PATCHELF_VERSION}-$(uname -m).tar.gz" \
| tar xz -C /usr/local
# python-build-standalone ships libpython with GNU_STACK RWE (executable stack)
# which is rejected by hardened Linux kernels — clear it with patchelf
find "$(uv python dir)" -name 'libpython*.so*' -exec patchelf --clear-execstack {} \;
- name: Sync dependencies
run: uv sync --no-dev --group build
- name: Build with PyInstaller
run: uv run --no-dev --group build pyinstaller vibe-acp.spec
- name: Get package version with uv (Unix)
id: get_version_unix
if: ${{ matrix.os != 'windows' }}
run: python -c "import subprocess; version = subprocess.check_output(['uv', 'version']).decode().split()[1]; print(f'version={version}')" >> $GITHUB_OUTPUT
- name: Clear executable stack on bundled libraries
if: ${{ matrix.os == 'linux' }}
run: |
find dist/vibe-acp-dir/_internal -name '*.so*' -type f -print0 \
| xargs -0 -I{} patchelf --clear-execstack {}
patchelf --clear-execstack dist/vibe-acp-dir/vibe-acp || true
- name: Get package version with uv (Windows)
id: get_version_windows
if: ${{ matrix.os == 'windows' }}
shell: pwsh
run: python -c "import subprocess; version = subprocess.check_output(['uv', 'version']).decode().split()[1]; print(f'version={version}')" >> $env:GITHUB_OUTPUT
- name: Smoke test bundled binary (Unix)
if: ${{ matrix.os != 'windows' }}
run: ./dist/vibe-acp-dir/vibe-acp --version
- name: Smoke test bundled binary (Windows)
if: ${{ matrix.os == 'windows' }}
shell: pwsh
run: .\dist\vibe-acp-dir\vibe-acp.exe --version
- name: Get package version
id: get_version
shell: bash
run: echo "version=$(uv version | cut -d' ' -f2)" >> $GITHUB_OUTPUT
- name: Upload binary as artifact
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5
with:
name: vibe-acp-${{ matrix.os }}-${{ matrix.arch }}-${{ matrix.os == 'windows' && steps.get_version_windows.outputs.version || steps.get_version_unix.outputs.version }}
name: vibe-acp-${{ matrix.os }}-${{ matrix.arch }}-${{ steps.get_version.outputs.version }}
path: dist/vibe-acp-dir/
nix-build:
needs: configure
name: "Nix build and upload: ${{ matrix.os }}-${{ matrix.arch }}"
permissions:
contents: read
strategy:
matrix: ${{ fromJSON(needs.configure.outputs.matrix) }}
runs-on: ${{ matrix.runner }}
steps:
- name: Checkout repository
if: matrix.os != 'windows'
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
- name: Install Nix
if: matrix.os != 'windows'
uses: cachix/install-nix-action@4e002c8ec80594ecd40e759629461e26c8abed15 # v31
- name: Build with Nix
if: matrix.os != 'windows'
shell: bash
run: |
nix build .#
- name: Nix Smoke Test
if: matrix.os != 'windows'
shell: bash
run: |
nix run .# -- --version
smoke-test:
needs: [configure, build-and-upload]
name: "Test: ${{ matrix.os }}-${{ matrix.arch }}${{ matrix.tag && format('-{0}', matrix.tag) || '' }}"
permissions:
contents: read
strategy:
matrix: ${{ fromJSON(needs.configure.outputs.smoke_matrix) }}
runs-on: ${{ matrix.runner }}
container: ${{ matrix.container || '' }}
steps:
- name: Checkout repository
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
- name: Set up Python
if: ${{ !matrix.container }}
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5
with:
python-version: "3.12"
- name: Set up Python (container)
if: ${{ matrix.container }}
run: yum install -y python3.11 python3.11-pip
- name: Install smoke test deps
run: ${{ matrix.container && 'python3.11' || 'python' }} -m pip install agent-client-protocol==0.9.0
- name: Download artifact
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5
with:
pattern: vibe-acp-${{ matrix.os }}-${{ matrix.arch }}-*
merge-multiple: true
path: dist/vibe-acp-dir
- name: Run smoke tests
run: ${{ matrix.container && 'python3.11' || 'python' }} tests/acp/smoke_binary.py dist/vibe-acp-dir
attach-to-release:
needs: build-and-upload
needs: [build-and-upload, smoke-test]
runs-on: ubuntu-latest
if: github.event_name == 'release'
permissions:
@@ -130,32 +224,3 @@ jobs:
files: release-assets/*.zip
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
nix-build:
needs: configure
name: "Nix: ${{ matrix.os }}-${{ matrix.arch }}"
permissions:
contents: read
strategy:
matrix: ${{ fromJSON(needs.configure.outputs.matrix) }}
runs-on: ${{ matrix.runner }}
steps:
- name: Checkout repository
if: matrix.os != 'windows'
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
- name: Install Nix
if: matrix.os != 'windows'
uses: cachix/install-nix-action@4e002c8ec80594ecd40e759629461e26c8abed15 # v31
- name: Build with Nix
if: matrix.os != 'windows'
shell: bash
run: |
nix build .#
- name: Nix Smoke Test
if: matrix.os != 'windows'
shell: bash
run: |
nix run .# -- --version

2
.vscode/launch.json vendored
View File

@@ -1,5 +1,5 @@
{
"version": "2.7.2",
"version": "2.7.3",
"configurations": [
{
"name": "ACP Server",

View File

@@ -124,7 +124,7 @@ guidelines:
- title: "Use uv for All Commands"
description: >
We use uv to manage our python environment. You should nevery try to run a bare python commands.
We use uv to manage our python environment. You should never try to run bare python commands.
Always run commands using `uv` instead of invoking `python` or `pip` directly.
For example, use `uv add package` and `uv run script.py` rather than `pip install package` or `python script.py`.
This practice helps avoid environment drift and leverages modern Python packaging best practices.

View File

@@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [2.7.3] - 2026-04-03
### Added
- `/data-retention` slash command to view Mistral AI's data retention notice and privacy settings
## [2.7.2] - 2026-04-01
### Added

View File

@@ -1,7 +1,7 @@
id = "mistral-vibe"
name = "Mistral Vibe"
description = "Mistral's open-source coding assistant"
version = "2.7.2"
version = "2.7.3"
schema_version = 1
authors = ["Mistral AI"]
repository = "https://github.com/mistralai/mistral-vibe"
@@ -11,25 +11,25 @@ name = "Mistral Vibe"
icon = "./icons/mistral_vibe.svg"
[agent_servers.mistral-vibe.targets.darwin-aarch64]
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.2/vibe-acp-darwin-aarch64-2.7.2.zip"
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.3/vibe-acp-darwin-aarch64-2.7.3.zip"
cmd = "./vibe-acp"
[agent_servers.mistral-vibe.targets.darwin-x86_64]
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.2/vibe-acp-darwin-x86_64-2.7.2.zip"
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.3/vibe-acp-darwin-x86_64-2.7.3.zip"
cmd = "./vibe-acp"
[agent_servers.mistral-vibe.targets.linux-aarch64]
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.2/vibe-acp-linux-aarch64-2.7.2.zip"
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.3/vibe-acp-linux-aarch64-2.7.3.zip"
cmd = "./vibe-acp"
[agent_servers.mistral-vibe.targets.linux-x86_64]
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.2/vibe-acp-linux-x86_64-2.7.2.zip"
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.3/vibe-acp-linux-x86_64-2.7.3.zip"
cmd = "./vibe-acp"
[agent_servers.mistral-vibe.targets.windows-aarch64]
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.2/vibe-acp-windows-aarch64-2.7.2.zip"
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.3/vibe-acp-windows-aarch64-2.7.3.zip"
cmd = "./vibe-acp.exe"
[agent_servers.mistral-vibe.targets.windows-x86_64]
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.2/vibe-acp-windows-x86_64-2.7.2.zip"
archive = "https://github.com/mistralai/mistral-vibe/releases/download/v2.7.3/vibe-acp-windows-x86_64-2.7.3.zip"
cmd = "./vibe-acp.exe"

View File

@@ -1,6 +1,6 @@
[project]
name = "mistral-vibe"
version = "2.7.2"
version = "2.7.3"
description = "Minimal CLI coding agent by Mistral"
readme = "README.md"
requires-python = ">=3.12"

284
tests/acp/smoke_binary.py Normal file
View File

@@ -0,0 +1,284 @@
#!/usr/bin/env python3
"""Smoke tests for the built vibe-acp binary.
Usage: python tests/smoke_binary.py <binary-dir>
Tests:
1. --version exits successfully
2. ACP initialize handshake returns expected agent info
3. (Linux) No ELF binaries require executable stack (GNU_STACK RWE)
"""
from __future__ import annotations
import asyncio
import asyncio.subprocess as aio_subprocess
import contextlib
import os
from pathlib import Path
import platform
import struct
import subprocess
import sys
import tempfile
from typing import Any, NoReturn
from acp import PROTOCOL_VERSION, Client, RequestError, connect_to_agent
from acp.schema import ClientCapabilities, Implementation
class _SmokeClient(Client):
def on_connect(self, conn: Any) -> None:
pass
async def request_permission(self, *args: Any, **kwargs: Any) -> Any:
raise RequestError.method_not_found("session/request_permission")
async def write_text_file(self, *args: Any, **kwargs: Any) -> Any:
raise RequestError.method_not_found("fs/write_text_file")
async def read_text_file(self, *args: Any, **kwargs: Any) -> Any:
raise RequestError.method_not_found("fs/read_text_file")
async def create_terminal(self, *args: Any, **kwargs: Any) -> Any:
raise RequestError.method_not_found("terminal/create")
async def terminal_output(self, *args: Any, **kwargs: Any) -> Any:
raise RequestError.method_not_found("terminal/output")
async def release_terminal(self, *args: Any, **kwargs: Any) -> Any:
raise RequestError.method_not_found("terminal/release")
async def wait_for_terminal_exit(self, *args: Any, **kwargs: Any) -> Any:
raise RequestError.method_not_found("terminal/wait_for_exit")
async def kill_terminal(self, *args: Any, **kwargs: Any) -> Any:
raise RequestError.method_not_found("terminal/kill")
async def ext_method(self, method: str, params: dict[str, Any]) -> dict[str, Any]:
raise RequestError.method_not_found(method)
async def ext_notification(self, method: str, params: dict[str, Any]) -> None:
raise RequestError.method_not_found(method)
async def session_update(self, *_args: Any, **_kwargs: Any) -> None:
pass
async def _terminate(proc: asyncio.subprocess.Process) -> None:
if proc.returncode is None:
with contextlib.suppress(ProcessLookupError):
proc.terminate()
with contextlib.suppress(TimeoutError):
await asyncio.wait_for(proc.wait(), timeout=5)
if proc.returncode is None:
with contextlib.suppress(ProcessLookupError):
proc.kill()
await proc.wait()
def _fail(msg: str) -> NoReturn:
print(f"FAIL: {msg}", file=sys.stderr)
sys.exit(1)
def test_version(binary: Path) -> None:
result = subprocess.run(
[str(binary), "--version"], capture_output=True, text=True, timeout=30
)
if result.returncode != 0:
_fail(
f"--version exited with code {result.returncode}\nstderr: {result.stderr}"
)
print(f"PASS: --version -> {result.stdout.strip()}")
async def test_acp_initialize(binary: Path) -> None:
with tempfile.TemporaryDirectory() as tmp:
vibe_home = Path(tmp) / ".vibe"
env = os.environ.copy()
env["VIBE_HOME"] = str(vibe_home)
env["MISTRAL_API_KEY"] = "smoke-test-mock-key"
proc = await asyncio.create_subprocess_exec(
str(binary),
stdin=aio_subprocess.PIPE,
stdout=aio_subprocess.PIPE,
stderr=aio_subprocess.PIPE,
env=env,
)
try:
assert proc.stdin is not None
assert proc.stdout is not None
conn = connect_to_agent(_SmokeClient(), proc.stdin, proc.stdout)
resp = await asyncio.wait_for(
conn.initialize(
protocol_version=PROTOCOL_VERSION,
client_capabilities=ClientCapabilities(),
client_info=Implementation(
name="smoke-test", title="Smoke Test", version="0.0.0"
),
),
timeout=15,
)
if resp.protocol_version != PROTOCOL_VERSION:
_fail(
f"protocol version mismatch: {resp.protocol_version} != {PROTOCOL_VERSION}"
)
if resp.agent_info is None:
_fail("agent_info is None")
if resp.agent_info.name != "@mistralai/mistral-vibe":
_fail(f"unexpected agent name: {resp.agent_info.name}")
print(
f"PASS: ACP initialize -> {resp.agent_info.name} v{resp.agent_info.version}"
)
finally:
await _terminate(proc)
# ---------------------------------------------------------------------------
# Executable-stack detection (Linux only)
# ---------------------------------------------------------------------------
_PT_GNU_STACK = 0x6474E551
_PF_X = 0x1
def _has_executable_stack(filepath: Path) -> bool | None:
"""Check if an ELF binary has executable stack (GNU_STACK with PF_X).
Returns True if execstack is set, False if clear, None if not an ELF file.
"""
try:
with open(filepath, "rb") as f:
magic = f.read(4)
if magic != b"\x7fELF":
return None
ei_class = f.read(1)[0] # 1 = 32-bit, 2 = 64-bit
ei_data = f.read(1)[0] # 1 = LE, 2 = BE
if ei_data == 1:
endian = "<"
elif ei_data == 2:
endian = ">"
else:
return None
if ei_class == 2: # 64-bit
f.seek(32)
(e_phoff,) = struct.unpack(f"{endian}Q", f.read(8))
f.seek(54)
(e_phentsize,) = struct.unpack(f"{endian}H", f.read(2))
(e_phnum,) = struct.unpack(f"{endian}H", f.read(2))
for i in range(e_phnum):
f.seek(e_phoff + i * e_phentsize)
(p_type,) = struct.unpack(f"{endian}I", f.read(4))
(p_flags,) = struct.unpack(f"{endian}I", f.read(4))
if p_type == _PT_GNU_STACK:
return bool(p_flags & _PF_X)
elif ei_class == 1: # 32-bit
f.seek(28)
(e_phoff,) = struct.unpack(f"{endian}I", f.read(4))
f.seek(42)
(e_phentsize,) = struct.unpack(f"{endian}H", f.read(2))
(e_phnum,) = struct.unpack(f"{endian}H", f.read(2))
for i in range(e_phnum):
off = e_phoff + i * e_phentsize
f.seek(off)
(p_type,) = struct.unpack(f"{endian}I", f.read(4))
# 32-bit phdr: p_flags is at offset 24 within the entry
f.seek(off + 24)
(p_flags,) = struct.unpack(f"{endian}I", f.read(4))
if p_type == _PT_GNU_STACK:
return bool(p_flags & _PF_X)
# No GNU_STACK header → no executable stack requirement
return False
except (OSError, struct.error):
return None
def test_no_executable_stack(binary_dir: Path) -> None:
"""Verify no ELF binaries in the bundle require executable stack.
Executable stack (GNU_STACK RWE) is rejected by hardened Linux kernels
(SELinux enforcing on Fedora, RHEL, etc.). Symptom:
[PYI-9483:ERROR] Failed to load Python shared library
'.../libpython3.12.so.1.0': cannot enable executable stack
as shared object requires: Invalid argument
"""
if platform.system() != "Linux":
print("SKIP: executable stack check (not Linux)")
return
internal_dir = binary_dir / "_internal"
if not internal_dir.exists():
_fail(f"_internal directory not found at {internal_dir}")
violations: list[Path] = []
checked = 0
# Check main binary + everything under _internal/
candidates = [binary_dir / "vibe-acp"]
candidates.extend(internal_dir.rglob("*"))
for filepath in candidates:
if not filepath.is_file():
continue
result = _has_executable_stack(filepath)
if result is None:
continue # not ELF
checked += 1
if result:
violations.append(filepath)
if violations:
lines = [
f"Found {len(violations)} ELF file(s) with executable stack "
f"(GNU_STACK RWE) out of {checked} checked.",
"",
"These will FAIL on SELinux-enforcing systems (Fedora, RHEL, hardened kernels):",
]
for v in violations:
lines.append(f" - {v.relative_to(binary_dir)}")
lines.append("")
lines.append("Fix: run 'patchelf --clear-execstack' on these files.")
_fail("\n".join(lines))
print(f"PASS: no executable stack in {checked} ELF files")
def main() -> None:
if len(sys.argv) != 2:
print(f"Usage: {sys.argv[0]} <binary-dir>")
sys.exit(1)
binary_dir = Path(sys.argv[1])
binary_name = "vibe-acp.exe" if platform.system() == "Windows" else "vibe-acp"
binary = binary_dir / binary_name
if not binary.exists():
_fail(f"binary not found at {binary}")
if platform.system() != "Windows":
binary.chmod(0o755)
print(f"Testing binary: {binary}\n")
test_version(binary)
test_no_executable_stack(binary_dir)
asyncio.run(test_acp_initialize(binary))
print("\nAll smoke tests passed!")
if __name__ == "__main__":
main()

View File

@@ -28,7 +28,7 @@ class TestACPInitialize:
session_capabilities=SessionCapabilities(list=SessionListCapabilities()),
)
assert response.agent_info == Implementation(
name="@mistralai/mistral-vibe", title="Mistral Vibe", version="2.7.2"
name="@mistralai/mistral-vibe", title="Mistral Vibe", version="2.7.3"
)
assert response.auth_methods == []
@@ -52,7 +52,7 @@ class TestACPInitialize:
session_capabilities=SessionCapabilities(list=SessionListCapabilities()),
)
assert response.agent_info == Implementation(
name="@mistralai/mistral-vibe", title="Mistral Vibe", version="2.7.2"
name="@mistralai/mistral-vibe", title="Mistral Vibe", version="2.7.3"
)
assert response.auth_methods is not None

View File

@@ -1,5 +1,6 @@
from __future__ import annotations
import asyncio
from pathlib import Path
from unittest.mock import patch
@@ -38,8 +39,6 @@ class TestAvailableCommandsUpdate:
async def test_available_commands_sent_on_new_session(
self, acp_agent_loop: VibeAcpAgentLoop
) -> None:
import asyncio
await acp_agent_loop.new_session(cwd=str(Path.cwd()), mcp_servers=[])
await asyncio.sleep(0)
@@ -57,6 +56,27 @@ class TestAvailableCommandsUpdate:
assert proxy_cmd is not None
assert "proxy" in proxy_cmd.description.lower()
@pytest.mark.asyncio
async def test_data_retention_command_sent_on_new_session(
self, acp_agent_loop: VibeAcpAgentLoop
) -> None:
await acp_agent_loop.new_session(cwd=str(Path.cwd()), mcp_servers=[])
await asyncio.sleep(0)
updates = _get_fake_client(acp_agent_loop)._session_updates
available_commands_updates = [
u for u in updates if isinstance(u.update, AvailableCommandsUpdate)
]
assert len(available_commands_updates) == 1
update = available_commands_updates[0].update
data_retention_cmd = next(
(c for c in update.available_commands if c.name == "data-retention"), None
)
assert data_retention_cmd is not None
assert "data retention" in data_retention_cmd.description.lower()
class TestProxySetupCommand:
@pytest.mark.asyncio
@@ -363,3 +383,36 @@ class TestProxySetupMessageId:
assert env_file.exists()
env_content = env_file.read_text()
assert "HTTP_PROXY" in env_content
class TestDataRetentionCommand:
@pytest.mark.asyncio
async def test_data_retention_returns_notice(
self, acp_agent_loop: VibeAcpAgentLoop
) -> None:
session_response = await acp_agent_loop.new_session(
cwd=str(Path.cwd()), mcp_servers=[]
)
session_id = session_response.session_id
_get_fake_client(acp_agent_loop)._session_updates.clear()
response = await acp_agent_loop.prompt(
prompt=[TextContentBlock(type="text", text="/data-retention")],
session_id=session_id,
)
assert response.stop_reason == "end_turn"
assert response.user_message_id is not None
updates = _get_fake_client(acp_agent_loop)._session_updates
message_updates = [
u for u in updates if isinstance(u.update, AgentMessageChunk)
]
assert len(message_updates) == 1
chunk = message_updates[0].update
assert chunk.message_id is not None
content = chunk.content.text
assert "Your Data Helps Improve Mistral AI" in content
assert "https://admin.mistral.ai/plateforme/privacy" in content

View File

@@ -11,6 +11,7 @@ class TestCommandRegistry:
assert registry.get_command_name("/model") == "model"
assert registry.get_command_name("/clear") == "clear"
assert registry.get_command_name("/exit") == "exit"
assert registry.get_command_name("/data-retention") == "data-retention"
def test_get_command_name_normalizes_input(self) -> None:
registry = CommandRegistry()
@@ -60,3 +61,9 @@ class TestCommandRegistry:
cmd = registry.find_command("/resume")
assert cmd is not None
assert cmd.handler == "_show_session_picker"
def test_data_retention_command_registration(self) -> None:
registry = CommandRegistry()
cmd = registry.find_command("/data-retention")
assert cmd is not None
assert cmd.handler == "_show_data_retention"

View File

@@ -0,0 +1,286 @@
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from vibe.cli.textual_ui.remote.remote_session_manager import RemoteSessionManager
from vibe.core.tools.builtins.ask_user_question import AskUserQuestionArgs
from vibe.core.types import WaitingForInputEvent
@pytest.fixture
def manager() -> RemoteSessionManager:
return RemoteSessionManager()
class TestProperties:
def test_is_active_false_by_default(self, manager: RemoteSessionManager) -> None:
assert manager.is_active is False
def test_is_terminated_false_when_inactive(
self, manager: RemoteSessionManager
) -> None:
assert manager.is_terminated is False
def test_is_waiting_for_input_false_when_inactive(
self, manager: RemoteSessionManager
) -> None:
assert manager.is_waiting_for_input is False
def test_has_pending_input_false_by_default(
self, manager: RemoteSessionManager
) -> None:
assert manager.has_pending_input is False
def test_session_id_none_when_inactive(self, manager: RemoteSessionManager) -> None:
assert manager.session_id is None
class TestAttachDetach:
@pytest.mark.asyncio
async def test_attach_activates_manager(
self, manager: RemoteSessionManager
) -> None:
with patch(
"vibe.cli.textual_ui.remote.remote_session_manager.RemoteEventsSource"
) as MockSource:
mock_source = MagicMock()
mock_source.session_id = "test-session-id"
MockSource.return_value = mock_source
config = MagicMock()
await manager.attach(session_id="test-session-id", config=config)
assert manager.is_active is True
assert manager.session_id == "test-session-id"
@pytest.mark.asyncio
async def test_detach_cleans_up(self, manager: RemoteSessionManager) -> None:
with patch(
"vibe.cli.textual_ui.remote.remote_session_manager.RemoteEventsSource"
) as MockSource:
mock_source = AsyncMock()
mock_source.session_id = "test-id"
MockSource.return_value = mock_source
config = MagicMock()
await manager.attach(session_id="test-id", config=config)
await manager.detach()
assert manager.is_active is False
assert manager.session_id is None
assert manager.has_pending_input is False
mock_source.close.assert_called_once()
@pytest.mark.asyncio
async def test_attach_detaches_previous_session(
self, manager: RemoteSessionManager
) -> None:
with patch(
"vibe.cli.textual_ui.remote.remote_session_manager.RemoteEventsSource"
) as MockSource:
first_source = AsyncMock()
second_source = MagicMock()
second_source.session_id = "second-id"
MockSource.side_effect = [first_source, second_source]
config = MagicMock()
await manager.attach(session_id="first-id", config=config)
await manager.attach(session_id="second-id", config=config)
first_source.close.assert_called_once()
assert manager.session_id == "second-id"
class TestValidateInput:
@pytest.mark.asyncio
async def test_returns_none_when_waiting_for_input(
self, manager: RemoteSessionManager
) -> None:
with patch(
"vibe.cli.textual_ui.remote.remote_session_manager.RemoteEventsSource"
) as MockSource:
mock_source = MagicMock()
mock_source.is_terminated = False
mock_source.is_waiting_for_input = True
MockSource.return_value = mock_source
config = MagicMock()
await manager.attach(session_id="id", config=config)
assert manager.validate_input() is None
@pytest.mark.asyncio
async def test_returns_warning_when_terminated(
self, manager: RemoteSessionManager
) -> None:
with patch(
"vibe.cli.textual_ui.remote.remote_session_manager.RemoteEventsSource"
) as MockSource:
mock_source = MagicMock()
mock_source.is_terminated = True
MockSource.return_value = mock_source
config = MagicMock()
await manager.attach(session_id="id", config=config)
result = manager.validate_input()
assert result is not None
assert "ended" in result
@pytest.mark.asyncio
async def test_returns_warning_when_not_waiting_for_input(
self, manager: RemoteSessionManager
) -> None:
with patch(
"vibe.cli.textual_ui.remote.remote_session_manager.RemoteEventsSource"
) as MockSource:
mock_source = MagicMock()
mock_source.is_terminated = False
mock_source.is_waiting_for_input = False
MockSource.return_value = mock_source
config = MagicMock()
await manager.attach(session_id="id", config=config)
result = manager.validate_input()
assert result is not None
assert "not waiting" in result
class TestSendPrompt:
@pytest.mark.asyncio
async def test_raises_when_inactive_and_required(
self, manager: RemoteSessionManager
) -> None:
with pytest.raises(RuntimeError, match="No active remote session"):
await manager.send_prompt("hello")
@pytest.mark.asyncio
async def test_returns_silently_when_inactive_and_not_required(
self, manager: RemoteSessionManager
) -> None:
await manager.send_prompt("hello", require_source=False)
@pytest.mark.asyncio
async def test_restores_pending_on_error(
self, manager: RemoteSessionManager
) -> None:
with patch(
"vibe.cli.textual_ui.remote.remote_session_manager.RemoteEventsSource"
) as MockSource:
mock_source = AsyncMock()
mock_source.send_prompt.side_effect = Exception("connection error")
MockSource.return_value = mock_source
config = MagicMock()
await manager.attach(session_id="id", config=config)
event = WaitingForInputEvent(task_id="t1", label="test")
manager.set_pending_input(event)
with pytest.raises(Exception, match="connection error"):
await manager.send_prompt("hello")
assert manager.has_pending_input is True
class TestPendingInput:
def test_set_and_cancel_pending_input(self, manager: RemoteSessionManager) -> None:
event = WaitingForInputEvent(task_id="t1", label="test")
manager.set_pending_input(event)
assert manager.has_pending_input is True
manager.cancel_pending_input()
assert manager.has_pending_input is False
class TestBuildQuestionArgs:
def test_returns_none_with_no_predefined_answers(
self, manager: RemoteSessionManager
) -> None:
event = WaitingForInputEvent(task_id="t1", label="test")
assert manager.build_question_args(event) is None
def test_returns_none_with_one_predefined_answer(
self, manager: RemoteSessionManager
) -> None:
event = WaitingForInputEvent(
task_id="t1", label="test", predefined_answers=["only one"]
)
assert manager.build_question_args(event) is None
def test_returns_args_with_two_predefined_answers(
self, manager: RemoteSessionManager
) -> None:
event = WaitingForInputEvent(
task_id="t1", label="Pick one", predefined_answers=["yes", "no"]
)
result = manager.build_question_args(event)
assert result is not None
assert isinstance(result, AskUserQuestionArgs)
assert len(result.questions) == 1
assert result.questions[0].question == "Pick one"
assert len(result.questions[0].options) == 2
def test_caps_at_four_predefined_answers(
self, manager: RemoteSessionManager
) -> None:
event = WaitingForInputEvent(
task_id="t1",
label="Pick",
predefined_answers=["a", "b", "c", "d", "e", "f"],
)
result = manager.build_question_args(event)
assert result is not None
assert len(result.questions[0].options) == 4
def test_uses_default_question_when_no_label(
self, manager: RemoteSessionManager
) -> None:
event = WaitingForInputEvent(task_id="t1", predefined_answers=["a", "b"])
result = manager.build_question_args(event)
assert result is not None
assert result.questions[0].question == "Choose an answer"
class TestBuildTerminalMessage:
def test_completed_when_no_source(self, manager: RemoteSessionManager) -> None:
msg_type, text = manager.build_terminal_message()
assert msg_type == "info"
assert "completed" in text
@pytest.mark.asyncio
async def test_failed_state(self, manager: RemoteSessionManager) -> None:
with patch(
"vibe.cli.textual_ui.remote.remote_session_manager.RemoteEventsSource"
) as MockSource:
mock_source = MagicMock()
mock_source.is_failed = True
mock_source.is_canceled = False
MockSource.return_value = mock_source
config = MagicMock()
await manager.attach(session_id="id", config=config)
msg_type, text = manager.build_terminal_message()
assert msg_type == "error"
assert "failed" in text.lower()
@pytest.mark.asyncio
async def test_canceled_state(self, manager: RemoteSessionManager) -> None:
with patch(
"vibe.cli.textual_ui.remote.remote_session_manager.RemoteEventsSource"
) as MockSource:
mock_source = MagicMock()
mock_source.is_failed = False
mock_source.is_canceled = True
MockSource.return_value = mock_source
config = MagicMock()
await manager.attach(session_id="id", config=config)
msg_type, text = manager.build_terminal_message()
assert msg_type == "warning"
assert "canceled" in text.lower()

View File

@@ -8,29 +8,33 @@ from vibe.cli.textual_ui.widgets.session_picker import (
SessionPickerApp,
_format_relative_time,
)
from vibe.core.session.session_loader import SessionInfo
from vibe.core.session.resume_sessions import ResumeSessionInfo
@pytest.fixture
def sample_sessions() -> list[SessionInfo]:
def sample_sessions() -> list[ResumeSessionInfo]:
return [
SessionInfo(
ResumeSessionInfo(
session_id="session-a",
source="local",
cwd="/test",
title="Session A",
end_time=(datetime.now(UTC) - timedelta(minutes=5)).isoformat(),
),
SessionInfo(
ResumeSessionInfo(
session_id="session-b",
source="local",
cwd="/test",
title="Session B",
end_time=(datetime.now(UTC) - timedelta(hours=1)).isoformat(),
),
SessionInfo(
ResumeSessionInfo(
session_id="session-c",
source="remote",
cwd="/test",
title="Session C",
end_time=(datetime.now(UTC) - timedelta(days=1)).isoformat(),
status="RUNNING",
),
]
@@ -38,9 +42,9 @@ def sample_sessions() -> list[SessionInfo]:
@pytest.fixture
def sample_latest_messages() -> dict[str, str]:
return {
"session-a": "Help me fix this bug",
"session-b": "Refactor the authentication module",
"session-c": "Add unit tests for the API",
"local:session-a": "Help me fix this bug",
"local:session-b": "Refactor the authentication module",
"remote:session-c": "Add unit tests for the API",
}
@@ -86,7 +90,9 @@ class TestFormatRelativeTime:
class TestSessionPickerAppInit:
def test_init_sets_properties(
self, sample_sessions: list[SessionInfo], sample_latest_messages: dict[str, str]
self,
sample_sessions: list[ResumeSessionInfo],
sample_latest_messages: dict[str, str],
) -> None:
picker = SessionPickerApp(
sessions=sample_sessions, latest_messages=sample_latest_messages
@@ -103,13 +109,20 @@ class TestSessionPickerAppInit:
class TestSessionPickerMessages:
def test_session_selected_stores_session_id(self) -> None:
msg = SessionPickerApp.SessionSelected("test-session-id")
def test_session_selected_stores_option_id(self) -> None:
msg = SessionPickerApp.SessionSelected(
"local:test-session-id", "local", "test-session-id"
)
assert msg.option_id == "local:test-session-id"
assert msg.source == "local"
assert msg.session_id == "test-session-id"
def test_session_selected_with_full_uuid(self) -> None:
session_id = "abc12345-6789-0123-4567-89abcdef0123"
msg = SessionPickerApp.SessionSelected(session_id)
option_id = f"remote:{session_id}"
msg = SessionPickerApp.SessionSelected(option_id, "remote", session_id)
assert msg.option_id == option_id
assert msg.source == "remote"
assert msg.session_id == session_id
def test_cancelled_can_be_instantiated(self) -> None:

View File

View File

@@ -0,0 +1,283 @@
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from tests.conftest import build_test_vibe_config
from vibe.core.agent_loop import AgentLoopStateError
from vibe.core.nuage.exceptions import ErrorCode, WorkflowsException
from vibe.core.nuage.remote_events_source import RemoteEventsSource
from vibe.core.nuage.streaming import StreamEvent
_SESSION_ID = "test-session"
def _make_source(**kwargs) -> RemoteEventsSource:
config = build_test_vibe_config(enabled_tools=kwargs.pop("enabled_tools", []))
return RemoteEventsSource(session_id=_SESSION_ID, config=config, **kwargs)
def _make_retryable_exc(msg: str) -> WorkflowsException:
return WorkflowsException(message=msg, code=ErrorCode.GET_EVENTS_STREAM_ERROR)
def _make_stream_event(
broker_sequence: int | None = None, data: dict | None = None
) -> StreamEvent:
return StreamEvent(data=data or {}, broker_sequence=broker_sequence)
class TestIsRetryableStreamDisconnect:
def test_peer_closed_connection(self) -> None:
source = _make_source()
exc = _make_retryable_exc("Peer closed connection without response")
assert source._is_retryable_stream_disconnect(exc) is True
def test_incomplete_chunked_read(self) -> None:
source = _make_source()
exc = _make_retryable_exc("Incomplete chunked read during streaming")
assert source._is_retryable_stream_disconnect(exc) is True
def test_non_retryable_message(self) -> None:
source = _make_source()
exc = WorkflowsException(
message="some other error", code=ErrorCode.GET_EVENTS_STREAM_ERROR
)
assert source._is_retryable_stream_disconnect(exc) is False
def test_wrong_error_code(self) -> None:
source = _make_source()
exc = WorkflowsException(
message="peer closed connection",
code=ErrorCode.POST_EXECUTIONS_SIGNALS_ERROR,
)
assert source._is_retryable_stream_disconnect(exc) is False
async def _async_gen_from_list(items):
for item in items:
yield item
async def _async_gen_raise(items, exc):
for item in items:
yield item
raise exc
class _FakeStream:
def __init__(self, payloads=None, exc=None):
self._payloads = payloads or []
self._exc = exc
self._closed = False
def __aiter__(self):
return self._iterate().__aiter__()
async def _iterate(self):
for p in self._payloads:
yield p
if self._exc is not None:
raise self._exc
async def aclose(self):
self._closed = True
class TestStreamRemoteEventsRetry:
@pytest.mark.asyncio
async def test_retries_on_retryable_disconnect(self) -> None:
source = _make_source()
exc = _make_retryable_exc("peer closed connection")
call_count = 0
def make_stream(_params):
nonlocal call_count
call_count += 1
return _FakeStream(exc=exc)
mock_client = MagicMock()
mock_client.stream_events = make_stream
source._client = mock_client
with patch("asyncio.sleep", new_callable=AsyncMock):
events = [e async for e in source._stream_remote_events()]
assert events == []
assert call_count == 4 # 1 initial + 3 retries
@pytest.mark.asyncio
async def test_stops_after_max_retry_count(self) -> None:
source = _make_source()
exc = _make_retryable_exc("incomplete chunked read")
call_count = 0
def make_stream(_params):
nonlocal call_count
call_count += 1
return _FakeStream(exc=exc)
mock_client = MagicMock()
mock_client.stream_events = make_stream
source._client = mock_client
with patch("asyncio.sleep", new_callable=AsyncMock):
events = [e async for e in source._stream_remote_events()]
assert events == []
assert call_count == 4
@pytest.mark.asyncio
async def test_resets_retry_count_on_successful_event(self) -> None:
source = _make_source()
exc = _make_retryable_exc("peer closed connection")
successful_event = _make_stream_event(broker_sequence=0, data={})
call_count = 0
def make_stream(_params):
nonlocal call_count
call_count += 1
if call_count <= 2:
return _FakeStream(payloads=[successful_event], exc=exc)
return _FakeStream(exc=exc)
mock_client = MagicMock()
mock_client.stream_events = make_stream
source._client = mock_client
with (
patch("asyncio.sleep", new_callable=AsyncMock),
patch.object(source, "_normalize_stream_event", return_value=None),
):
events = [e async for e in source._stream_remote_events()]
assert events == []
# call 1: success + exc -> retry_count = 1
# call 2: success (reset) + exc -> retry_count = 1
# call 3: exc -> retry_count = 2
# call 4: exc -> retry_count = 3
# call 5: exc -> retry_count = 4 > 3 -> break
assert call_count == 5
@pytest.mark.asyncio
async def test_non_retryable_raises_agent_loop_state_error(self) -> None:
source = _make_source()
exc = WorkflowsException(
message="something bad", code=ErrorCode.TEMPORAL_CONNECTION_ERROR
)
mock_client = MagicMock()
mock_client.stream_events = lambda _: _FakeStream(exc=exc)
source._client = mock_client
with pytest.raises(AgentLoopStateError):
async for _ in source._stream_remote_events():
pass
class TestStreamRemoteEventsIdleBoundary:
@pytest.mark.asyncio
async def test_stops_on_idle_boundary(self) -> None:
source = _make_source()
event_data = _make_stream_event(broker_sequence=0, data={})
sentinel_event = MagicMock()
mock_client = MagicMock()
mock_client.stream_events = lambda _: _FakeStream(payloads=[event_data])
source._client = mock_client
workflow_event = MagicMock()
with (
patch.object(
source, "_normalize_stream_event", return_value=workflow_event
),
patch.object(
source, "_consume_workflow_event", return_value=[sentinel_event]
),
patch.object(source, "_is_idle_boundary", return_value=True) as mock_idle,
):
events = [
e
async for e in source._stream_remote_events(stop_on_idle_boundary=True)
]
assert events == [sentinel_event]
mock_idle.assert_called_once_with(workflow_event)
@pytest.mark.asyncio
async def test_continues_past_idle_boundary_when_disabled(self) -> None:
source = _make_source()
event1 = _make_stream_event(broker_sequence=0, data={})
event2 = _make_stream_event(broker_sequence=1, data={})
sentinel1 = MagicMock()
sentinel2 = MagicMock()
mock_client = MagicMock()
mock_client.stream_events = lambda _: _FakeStream(payloads=[event1, event2])
source._client = mock_client
workflow_event = MagicMock()
call_count = 0
def consume_side_effect(_evt):
nonlocal call_count
call_count += 1
return [sentinel1] if call_count == 1 else [sentinel2]
with (
patch.object(
source, "_normalize_stream_event", return_value=workflow_event
),
patch.object(
source, "_consume_workflow_event", side_effect=consume_side_effect
),
patch.object(source, "_is_idle_boundary", return_value=True),
):
events = [
e
async for e in source._stream_remote_events(stop_on_idle_boundary=False)
]
assert events == [sentinel1, sentinel2]
class TestBrokerSequenceTracking:
@pytest.mark.asyncio
async def test_next_start_seq_updated(self) -> None:
source = _make_source()
assert source._next_start_seq == 0
event1 = _make_stream_event(broker_sequence=5, data={})
event2 = _make_stream_event(broker_sequence=10, data={})
mock_client = MagicMock()
mock_client.stream_events = lambda _: _FakeStream(payloads=[event1, event2])
source._client = mock_client
with patch.object(source, "_normalize_stream_event", return_value=None):
events = [e async for e in source._stream_remote_events()]
assert events == []
assert source._next_start_seq == 11
@pytest.mark.asyncio
async def test_none_broker_sequence_not_updated(self) -> None:
source = _make_source()
source._next_start_seq = 5
event = _make_stream_event(broker_sequence=None, data={})
mock_client = MagicMock()
mock_client.stream_events = lambda _: _FakeStream(payloads=[event])
source._client = mock_client
with patch.object(source, "_normalize_stream_event", return_value=None):
events = [e async for e in source._stream_remote_events()]
assert events == []
assert source._next_start_seq == 5

View File

@@ -0,0 +1,206 @@
from __future__ import annotations
import json
from unittest.mock import AsyncMock, patch
import pytest
from vibe.core.nuage.client import WorkflowsClient
from vibe.core.nuage.exceptions import ErrorCode, WorkflowsException
from vibe.core.nuage.streaming import StreamEvent, StreamEventsQueryParams
def _make_client() -> WorkflowsClient:
return WorkflowsClient(base_url="http://localhost:8080", api_key="test-key")
def _valid_event_payload() -> dict:
return {
"stream": "test-stream",
"timestamp_unix_nano": 1000000,
"data": {"key": "value"},
}
class TestParseSSEData:
def test_valid_json_returns_stream_event(self) -> None:
client = _make_client()
payload = _valid_event_payload()
result = client._parse_sse_data(json.dumps(payload), event_type=None)
assert isinstance(result, StreamEvent)
assert result.stream == "test-stream"
assert result.data == {"key": "value"}
def test_error_event_type_raises(self) -> None:
client = _make_client()
payload = {"some": "data"}
with pytest.raises(WorkflowsException) as exc_info:
client._parse_sse_data(json.dumps(payload), event_type="error")
assert exc_info.value.code == ErrorCode.GET_EVENTS_STREAM_ERROR
assert "Stream error from server" in exc_info.value.message
def test_error_key_in_json_raises(self) -> None:
client = _make_client()
payload = {"error": "something went wrong"}
with pytest.raises(WorkflowsException) as exc_info:
client._parse_sse_data(json.dumps(payload), event_type=None)
assert exc_info.value.code == ErrorCode.GET_EVENTS_STREAM_ERROR
assert "something went wrong" in exc_info.value.message
def test_error_event_type_with_non_dict_parsed(self) -> None:
client = _make_client()
with pytest.raises(WorkflowsException) as exc_info:
client._parse_sse_data(json.dumps("a plain string"), event_type="error")
assert "a plain string" in exc_info.value.message
def test_malformed_json_raises(self) -> None:
client = _make_client()
with pytest.raises(json.JSONDecodeError):
client._parse_sse_data("{not valid json", event_type=None)
class TestIterSSEEvents:
@pytest.mark.asyncio
async def test_parses_data_lines(self) -> None:
client = _make_client()
payload = _valid_event_payload()
lines = [f"data: {json.dumps(payload)}"]
response = AsyncMock()
response.aiter_lines = _async_line_iter(lines)
events = [e async for e in client._iter_sse_events(response)]
assert len(events) == 1
assert events[0].stream == "test-stream"
@pytest.mark.asyncio
async def test_skips_empty_lines_and_comments(self) -> None:
client = _make_client()
payload = _valid_event_payload()
lines = ["", ": this is a comment", f"data: {json.dumps(payload)}", ""]
response = AsyncMock()
response.aiter_lines = _async_line_iter(lines)
events = [e async for e in client._iter_sse_events(response)]
assert len(events) == 1
@pytest.mark.asyncio
async def test_parses_event_type_and_passes_to_parse(self) -> None:
client = _make_client()
payload = {"error": "server broke"}
lines = ["event: error", f"data: {json.dumps(payload)}"]
response = AsyncMock()
response.aiter_lines = _async_line_iter(lines)
with pytest.raises(WorkflowsException) as exc_info:
_ = [e async for e in client._iter_sse_events(response)]
assert exc_info.value.code == ErrorCode.GET_EVENTS_STREAM_ERROR
@pytest.mark.asyncio
async def test_resets_event_type_after_data_line(self) -> None:
client = _make_client()
payload = _valid_event_payload()
lines = [
"event: custom_type",
f"data: {json.dumps(payload)}",
f"data: {json.dumps(payload)}",
]
response = AsyncMock()
response.aiter_lines = _async_line_iter(lines)
with patch.object(
client, "_parse_sse_data", wraps=client._parse_sse_data
) as mock_parse:
events = [e async for e in client._iter_sse_events(response)]
assert len(events) == 2
assert mock_parse.call_args_list[0].args[1] == "custom_type"
assert mock_parse.call_args_list[1].args[1] is None
@pytest.mark.asyncio
async def test_skips_non_data_non_event_lines(self) -> None:
client = _make_client()
payload = _valid_event_payload()
lines = ["id: 123", "retry: 5000", f"data: {json.dumps(payload)}"]
response = AsyncMock()
response.aiter_lines = _async_line_iter(lines)
events = [e async for e in client._iter_sse_events(response)]
assert len(events) == 1
@pytest.mark.asyncio
async def test_parse_failure_logs_warning_and_continues(self) -> None:
client = _make_client()
payload = _valid_event_payload()
lines = ["data: {not valid json}", f"data: {json.dumps(payload)}"]
response = AsyncMock()
response.aiter_lines = _async_line_iter(lines)
with patch("vibe.core.nuage.client.logger") as mock_logger:
events = [e async for e in client._iter_sse_events(response)]
assert len(events) == 1
mock_logger.warning.assert_called_once()
def _setup_mock_client(client: WorkflowsClient, mock_response: AsyncMock) -> None:
mock_stream = AsyncMock()
mock_stream.__aenter__ = AsyncMock(return_value=mock_response)
mock_stream.__aexit__ = AsyncMock(return_value=False)
mock_http = AsyncMock()
mock_http.stream = lambda *args, **kwargs: mock_stream
client._client = mock_http
class TestStreamEvents:
@pytest.mark.asyncio
async def test_yields_stream_events(self) -> None:
client = _make_client()
payload = _valid_event_payload()
lines = [f"data: {json.dumps(payload)}"]
mock_response = AsyncMock()
mock_response.raise_for_status = lambda: None
mock_response.aiter_lines = _async_line_iter(lines)
_setup_mock_client(client, mock_response)
params = StreamEventsQueryParams(workflow_exec_id="wf-1", start_seq=0)
events = [e async for e in client.stream_events(params)]
assert len(events) == 1
assert isinstance(events[0], StreamEvent)
@pytest.mark.asyncio
async def test_reraises_workflows_exception(self) -> None:
client = _make_client()
mock_response = AsyncMock()
mock_response.raise_for_status = lambda: None
mock_response.aiter_lines = _async_line_iter([
"event: error",
'data: {"error": "stream error"}',
])
_setup_mock_client(client, mock_response)
params = StreamEventsQueryParams(workflow_exec_id="wf-1")
with pytest.raises(WorkflowsException) as exc_info:
_ = [e async for e in client.stream_events(params)]
assert exc_info.value.code == ErrorCode.GET_EVENTS_STREAM_ERROR
@pytest.mark.asyncio
async def test_wraps_other_exceptions_in_workflows_exception(self) -> None:
client = _make_client()
mock_response = AsyncMock()
mock_response.raise_for_status.side_effect = RuntimeError("connection lost")
_setup_mock_client(client, mock_response)
params = StreamEventsQueryParams(workflow_exec_id="wf-1")
with pytest.raises(WorkflowsException) as exc_info:
_ = [e async for e in client.stream_events(params)]
assert exc_info.value.code == ErrorCode.GET_EVENTS_STREAM_ERROR
assert "Failed to stream events" in exc_info.value.message
def _async_line_iter(lines: list[str]):
async def _iter():
for line in lines:
yield line
return _iter

View File

@@ -0,0 +1,95 @@
from __future__ import annotations
from unittest.mock import AsyncMock, patch
import pytest
from vibe.core.teleport.errors import ServiceTeleportError
from vibe.core.teleport.nuage import GitHubPublicData, GitHubStatus, NuageClient
@pytest.fixture
def client() -> NuageClient:
return NuageClient(
base_url="https://test.example.com", api_key="test-key", workflow_id="wf-1"
)
class TestWaitForGithubConnection:
@pytest.mark.asyncio
async def test_returns_immediately_when_connected(
self, client: NuageClient
) -> None:
connected = GitHubPublicData(status=GitHubStatus.CONNECTED)
client.get_github_integration = AsyncMock(return_value=connected)
result = await client.wait_for_github_connection("exec-1")
assert result.connected is True
client.get_github_integration.assert_called_once_with("exec-1")
@pytest.mark.asyncio
async def test_polls_until_connected(self, client: NuageClient) -> None:
pending = GitHubPublicData(status=GitHubStatus.PENDING)
waiting = GitHubPublicData(
status=GitHubStatus.WAITING_FOR_OAUTH, oauth_url="https://github.com/auth"
)
connected = GitHubPublicData(status=GitHubStatus.CONNECTED)
client.get_github_integration = AsyncMock(
side_effect=[pending, waiting, connected]
)
with patch("vibe.core.teleport.nuage.asyncio.sleep", new_callable=AsyncMock):
result = await client.wait_for_github_connection("exec-1")
assert result.connected is True
assert client.get_github_integration.call_count == 3
@pytest.mark.asyncio
async def test_raises_on_error_status(self, client: NuageClient) -> None:
error_data = GitHubPublicData(
status=GitHubStatus.ERROR, error="App not installed"
)
client.get_github_integration = AsyncMock(return_value=error_data)
with pytest.raises(ServiceTeleportError, match="App not installed"):
await client.wait_for_github_connection("exec-1")
@pytest.mark.asyncio
async def test_raises_on_oauth_timeout(self, client: NuageClient) -> None:
timeout_data = GitHubPublicData(status=GitHubStatus.OAUTH_TIMEOUT)
client.get_github_integration = AsyncMock(return_value=timeout_data)
with pytest.raises(ServiceTeleportError, match="oauth_timeout"):
await client.wait_for_github_connection("exec-1")
@pytest.mark.asyncio
async def test_raises_on_timeout(self, client: NuageClient) -> None:
pending = GitHubPublicData(status=GitHubStatus.PENDING)
client.get_github_integration = AsyncMock(return_value=pending)
monotonic_values = iter([0.0, 0.0, 601.0])
with (
patch(
"vibe.core.teleport.nuage.time.monotonic",
side_effect=lambda: next(monotonic_values),
),
patch("vibe.core.teleport.nuage.asyncio.sleep", new_callable=AsyncMock),
pytest.raises(ServiceTeleportError, match="timed out"),
):
await client.wait_for_github_connection("exec-1", timeout=600.0)
@pytest.mark.asyncio
async def test_sleeps_with_correct_interval(self, client: NuageClient) -> None:
pending = GitHubPublicData(status=GitHubStatus.PENDING)
connected = GitHubPublicData(status=GitHubStatus.CONNECTED)
client.get_github_integration = AsyncMock(side_effect=[pending, connected])
with patch(
"vibe.core.teleport.nuage.asyncio.sleep", new_callable=AsyncMock
) as mock_sleep:
await client.wait_for_github_connection("exec-1", interval=5.0)
mock_sleep.assert_called_once()
sleep_duration = mock_sleep.call_args[0][0]
assert sleep_duration <= 5.0

View File

@@ -0,0 +1,726 @@
from __future__ import annotations
from typing import Any
from tests.conftest import build_test_vibe_config
from vibe.core.nuage.events import (
CustomTaskCanceled,
CustomTaskCanceledAttributes,
CustomTaskCompleted,
CustomTaskCompletedAttributes,
CustomTaskInProgress,
CustomTaskInProgressAttributes,
CustomTaskStarted,
CustomTaskStartedAttributes,
JSONPatchAdd,
JSONPatchAppend,
JSONPatchPayload,
JSONPatchReplace,
JSONPayload,
)
from vibe.core.nuage.remote_events_source import RemoteEventsSource
from vibe.core.types import (
AssistantEvent,
ReasoningEvent,
Role,
ToolCallEvent,
ToolResultEvent,
ToolStreamEvent,
UserMessageEvent,
WaitingForInputEvent,
)
_EXEC_ID = "session-123"
def _make_loop(enabled_tools: list[str] | None = None) -> RemoteEventsSource:
config = build_test_vibe_config(enabled_tools=enabled_tools or [])
return RemoteEventsSource(session_id=_EXEC_ID, config=config)
def _started(
task_id: str, task_type: str, payload: dict[str, Any]
) -> CustomTaskStarted:
return CustomTaskStarted(
event_id=f"evt-{task_id}-start",
workflow_exec_id=_EXEC_ID,
attributes=CustomTaskStartedAttributes(
custom_task_id=task_id,
custom_task_type=task_type,
payload=JSONPayload(value=payload),
),
)
def _completed(
task_id: str, task_type: str, payload: dict[str, Any]
) -> CustomTaskCompleted:
return CustomTaskCompleted(
event_id=f"evt-{task_id}-done",
workflow_exec_id=_EXEC_ID,
attributes=CustomTaskCompletedAttributes(
custom_task_id=task_id,
custom_task_type=task_type,
payload=JSONPayload(value=payload),
),
)
def _in_progress(
task_id: str, task_type: str, patches: list[Any]
) -> CustomTaskInProgress:
return CustomTaskInProgress(
event_id=f"evt-{task_id}-progress",
workflow_exec_id=_EXEC_ID,
attributes=CustomTaskInProgressAttributes(
custom_task_id=task_id,
custom_task_type=task_type,
payload=JSONPatchPayload(value=patches),
),
)
def _canceled(task_id: str, task_type: str, reason: str = "") -> CustomTaskCanceled:
return CustomTaskCanceled(
event_id=f"evt-{task_id}-cancel",
workflow_exec_id=_EXEC_ID,
attributes=CustomTaskCanceledAttributes(
custom_task_id=task_id, custom_task_type=task_type, reason=reason
),
)
def test_consume_wait_for_input_event_emits_waiting_event() -> None:
loop = _make_loop()
event = _started(
"wait-task-1",
"wait_for_input",
{
"task_id": "wait-task-1",
"input_schema": {"title": "ChatInput"},
"label": "What next?",
},
)
emitted_events = loop._consume_workflow_event(event)
assert len(emitted_events) == 2
assistant_event = emitted_events[0]
waiting_event = emitted_events[1]
assert isinstance(assistant_event, AssistantEvent)
assert assistant_event.content == "What next?"
assert isinstance(waiting_event, WaitingForInputEvent)
assert waiting_event.task_id == "wait-task-1"
assert waiting_event.label == "What next?"
assert waiting_event.predefined_answers is None
def test_consume_agent_input_keeps_repeated_text_across_distinct_turns() -> None:
loop = _make_loop()
first_event = _completed(
"input-1", "AgentInputState", {"input": {"message": [{"text": "continue"}]}}
)
second_event = _completed(
"input-2", "AgentInputState", {"input": {"message": [{"text": "continue"}]}}
)
assert loop._consume_workflow_event(first_event) == []
assert loop._consume_workflow_event(second_event) == []
assert [msg.content for msg in loop.messages if msg.role == Role.user] == [
"continue",
"continue",
]
def test_wait_for_input_emits_predefined_answers_and_user_message() -> None:
loop = _make_loop()
started = _started(
"wait-task-1",
"wait_for_input",
{
"input_schema": {
"title": "ChatInput",
"properties": {
"message": {
"examples": [
[{"type": "text", "text": "Python"}],
[{"type": "text", "text": "JavaScript"}],
[{"type": "text", "text": "Other"}],
]
}
},
},
"label": "Which language?",
},
)
completed = _completed(
"wait-task-1",
"wait_for_input",
{"input": {"message": [{"type": "text", "text": "Python"}]}},
)
started_events = loop._consume_workflow_event(started)
completed_events = loop._consume_workflow_event(completed)
assistant_event = next(
event for event in started_events if isinstance(event, AssistantEvent)
)
waiting_event = next(
event for event in started_events if isinstance(event, WaitingForInputEvent)
)
assert assistant_event.content == "Which language?"
assert waiting_event.predefined_answers == ["Python", "JavaScript"]
user_event = next(
event for event in completed_events if isinstance(event, UserMessageEvent)
)
assert user_event.content == "Python"
def test_tool_events_update_stats_and_messages() -> None:
loop = _make_loop(enabled_tools=["todo"])
started = _started(
"tool-task-1",
"AgentToolCallState",
{"name": "todo", "tool_call_id": "call-1", "kwargs": {"action": "read"}},
)
completed = _completed(
"tool-task-1",
"AgentToolCallState",
{
"name": "todo",
"tool_call_id": "call-1",
"kwargs": {"action": "read"},
"output": {"total_count": 0},
},
)
started_events = loop._consume_workflow_event(started)
completed_events = loop._consume_workflow_event(completed)
assert any(isinstance(event, ToolCallEvent) for event in started_events)
result_event = next(
event for event in completed_events if isinstance(event, ToolResultEvent)
)
assert result_event.error is None
assert result_event.cancelled is False
assert result_event.tool_call_id == "call-1"
assert loop.stats.tool_calls_agreed == 1
assert loop.stats.tool_calls_succeeded == 1
assert loop.stats.tool_calls_failed == 0
tool_messages = [msg for msg in loop.messages if msg.role == Role.tool]
assert len(tool_messages) == 1
assert tool_messages[0].tool_call_id == "call-1"
def test_ask_user_question_tool_emits_assistant_question() -> None:
loop = _make_loop(enabled_tools=["ask_user_question"])
started = _started(
"tool-task-question",
"AgentToolCallState",
{
"name": "ask_user_question",
"tool_call_id": "call-question",
"kwargs": {
"questions": [
{
"question": "Which file type should I create?",
"options": [{"label": "Python"}, {"label": "JavaScript"}],
}
]
},
},
)
events = loop._consume_workflow_event(started)
assistant_event = next(
event for event in events if isinstance(event, AssistantEvent)
)
tool_call_event = next(
event for event in events if isinstance(event, ToolCallEvent)
)
assert assistant_event.content == "Which file type should I create?"
assert tool_call_event.tool_call_id == "call-question"
def test_ask_user_question_wait_for_input_completion_emits_tool_result() -> None:
loop = _make_loop(enabled_tools=["ask_user_question"])
ask_started = _started(
"tool-task-question",
"AgentToolCallState",
{
"name": "ask_user_question",
"tool_call_id": "call-question",
"kwargs": {
"questions": [{"question": "Which type of file?", "options": []}]
},
},
)
wait_started = _started(
"wait-task-1",
"wait_for_input",
{"input_schema": {"title": "ChatInput"}, "label": "Which type of file?"},
)
wait_completed = _completed(
"wait-task-1",
"wait_for_input",
{
"input_schema": {"title": "ChatInput"},
"label": "Which type of file?",
"input": {"message": [{"type": "text", "text": "Python"}]},
},
)
loop._consume_workflow_event(ask_started)
loop._consume_workflow_event(wait_started)
completed_events = loop._consume_workflow_event(wait_completed)
tool_result = next(
(e for e in completed_events if isinstance(e, ToolResultEvent)), None
)
assert tool_result is not None
assert tool_result.tool_call_id == "call-question"
user_message = next(e for e in completed_events if isinstance(e, UserMessageEvent))
assert user_message.content == "Python"
def test_working_events_without_tool_call_id_render_remote_progress_row() -> None:
loop = _make_loop()
started = _started(
"working-1",
"working",
{"title": "Creating sandbox", "content": "initializing", "toolUIState": None},
)
completed = _completed(
"working-1",
"working",
{
"title": "Creating sandbox",
"content": "sandbox created",
"toolUIState": None,
},
)
started_events = loop._consume_workflow_event(started)
completed_events = loop._consume_workflow_event(completed)
assert started_events == []
assert any(isinstance(event, ToolCallEvent) for event in completed_events)
assert any(isinstance(event, ToolStreamEvent) for event in completed_events)
result_event = next(
event for event in completed_events if isinstance(event, ToolResultEvent)
)
assert result_event.tool_name == "Creating sandbox"
assert result_event.tool_call_id == "working-1"
def test_working_events_with_tool_call_id_wait_for_real_tool_call() -> None:
loop = _make_loop()
working_started = _started(
"working-tool-1",
"working",
{
"title": "Executing write_file",
"content": "writing file",
"toolUIState": {"toolCallId": "call-write"},
},
)
tool_started = _started(
"tool-task-1",
"AgentToolCallState",
{
"name": "write_file",
"tool_call_id": "call-write",
"kwargs": {
"path": "hello_world.js",
"content": "console.log('Hello, World!');",
},
},
)
working_events = loop._consume_workflow_event(working_started)
tool_events = loop._consume_workflow_event(tool_started)
assert any(isinstance(event, ToolCallEvent) for event in working_events)
assert any(isinstance(event, ToolStreamEvent) for event in working_events)
assert not any(isinstance(event, ToolCallEvent) for event in tool_events)
assert not any(isinstance(event, ToolResultEvent) for event in tool_events)
def test_working_task_promoted_to_real_tool_call_does_not_create_duplicate_row() -> (
None
):
loop = _make_loop(enabled_tools=["write_file"])
working_started = _started(
"working-tool-1",
"working",
{
"title": "Writing file",
"content": '# hello.py\n\nprint("Hello, World!")',
"toolUIState": None,
},
)
working_promoted = _completed(
"working-tool-1",
"working",
{
"title": "Executing write_file",
"content": "",
"toolUIState": {
"type": "file",
"toolCallId": "call-write",
"operations": [
{
"type": "create",
"uri": "/workspace/hello.py",
"content": 'print("Hello, World!")',
}
],
},
},
)
agent_tool_completed = _completed(
"tool-task-1",
"AgentToolCallState",
{
"name": "write_file",
"tool_call_id": "call-write",
"kwargs": {"path": "hello.py", "content": 'print("Hello, World!")'},
"output": {
"path": "/workspace/hello.py",
"bytes_written": 22,
"file_existed": False,
"content": 'print("Hello, World!")',
},
},
)
assert loop._consume_workflow_event(working_started) == []
promoted_events = loop._consume_workflow_event(working_promoted)
assert len([e for e in promoted_events if isinstance(e, ToolCallEvent)]) == 1
assert not any(isinstance(e, ToolStreamEvent) for e in promoted_events)
assert any(isinstance(e, ToolResultEvent) for e in promoted_events)
completed_events = loop._consume_workflow_event(agent_tool_completed)
assert not any(isinstance(e, ToolCallEvent) for e in completed_events)
assert not any(isinstance(e, ToolResultEvent) for e in completed_events)
def test_idle_boundary_waits_for_open_tool_results() -> None:
loop = _make_loop(enabled_tools=["write_file"])
working_started = _started(
"working-tool-1",
"working",
{
"title": "Executing write_file",
"content": "writing file",
"toolUIState": {"toolCallId": "call-write"},
},
)
idle_candidate = _completed("input-task-1", "AgentInputState", {"input": None})
tool_completed = _completed(
"tool-task-1",
"AgentToolCallState",
{
"name": "write_file",
"tool_call_id": "call-write",
"kwargs": {
"path": "hello_world.js",
"content": "console.log('Hello, World!');",
},
"output": {
"path": "/workspace/hello_world.js",
"bytes_written": 29,
"file_existed": False,
"content": "console.log('Hello, World!');",
},
},
)
idle_after_tool = _completed("input-task-2", "AgentInputState", {"input": None})
working_events = loop._consume_workflow_event(working_started)
assert any(isinstance(event, ToolCallEvent) for event in working_events)
loop._consume_workflow_event(idle_candidate)
assert loop._is_idle_boundary(idle_candidate) is False
tool_events = loop._consume_workflow_event(tool_completed)
assert not any(isinstance(event, ToolCallEvent) for event in tool_events)
assert any(isinstance(event, ToolResultEvent) for event in tool_events)
loop._consume_workflow_event(idle_after_tool)
assert loop._is_idle_boundary(idle_after_tool) is True
def test_send_user_message_tool_is_not_rendered() -> None:
loop = _make_loop()
started = _started(
"tool-task-send-user-message",
"AgentToolCallState",
{
"name": "send_user_message",
"tool_call_id": "call-send",
"kwargs": {"message": "hello"},
},
)
completed = _completed(
"tool-task-send-user-message",
"AgentToolCallState",
{
"name": "send_user_message",
"tool_call_id": "call-send",
"kwargs": {"message": "hello"},
"output": {"success": True, "error": None},
},
)
assert loop._consume_workflow_event(started) == []
assert loop._consume_workflow_event(completed) == []
def test_send_user_message_working_events_are_not_rendered() -> None:
loop = _make_loop()
started = _started(
"working-send-user-message",
"working",
{
"title": "Executing send_user_message",
"content": "Hello!",
"toolUIState": {"toolCallId": "call-send-working"},
},
)
completed = _completed(
"working-send-user-message",
"working",
{
"title": "Executing send_user_message",
"content": "Hello!",
"toolUIState": {"toolCallId": "call-send-working"},
},
)
assert loop._consume_workflow_event(started) == []
assert loop._consume_workflow_event(completed) == []
def test_remote_bash_uses_known_tool_display_even_when_disabled_locally() -> None:
loop = _make_loop(enabled_tools=["write_file"])
started = _started(
"tool-task-bash",
"AgentToolCallState",
{
"name": "bash",
"tool_call_id": "call-bash",
"kwargs": {"command": "cat hello.py | wc -c"},
},
)
completed = _completed(
"tool-task-bash",
"AgentToolCallState",
{
"name": "bash",
"tool_call_id": "call-bash",
"kwargs": {"command": "cat hello.py | wc -c"},
"output": {
"command": "cat hello.py | wc -c",
"stdout": "22\n",
"stderr": "",
"returncode": 0,
},
},
)
started_events = loop._consume_workflow_event(started)
completed_events = loop._consume_workflow_event(completed)
tool_call_event = next(
event for event in started_events if isinstance(event, ToolCallEvent)
)
result_event = next(
event for event in completed_events if isinstance(event, ToolResultEvent)
)
assert tool_call_event.tool_name == "bash"
assert tool_call_event.tool_class.get_name() == "bash"
assert tool_call_event.args is not None
assert tool_call_event.args.command == "cat hello.py | wc -c" # type: ignore[attr-defined]
assert result_event.result is not None
assert result_event.result.command == "cat hello.py | wc -c" # type: ignore[attr-defined]
assert result_event.result.stdout == "22\n" # type: ignore[attr-defined]
def test_canceled_tool_marks_cancelled_and_failed_stats() -> None:
loop = _make_loop(enabled_tools=["todo"])
loop._task_state["tool-task-2"] = {
"name": "todo",
"tool_call_id": "call-2",
"kwargs": {"action": "read"},
}
canceled = _canceled(
"tool-task-2", "AgentToolCallState", reason="user interrupted tool"
)
events = loop._consume_workflow_event(canceled)
result_event = next(event for event in events if isinstance(event, ToolResultEvent))
assert result_event.cancelled is True
assert result_event.error == "Canceled: user interrupted tool"
assert loop.stats.tool_calls_failed == 1
assert loop.stats.tool_calls_succeeded == 0
def test_working_thinking_type_emits_assistant_events() -> None:
loop = _make_loop()
started = _started(
"thinking-1",
"working",
{"type": "thinking", "title": "Thinking", "content": "", "toolUIState": None},
)
in_progress = _in_progress(
"thinking-1", "working", [JSONPatchAppend(path="/content", value="Hello!")]
)
completed = _completed(
"thinking-1",
"working",
{
"type": "thinking",
"title": "Thinking",
"content": "Hello!",
"toolUIState": None,
},
)
started_events = loop._consume_workflow_event(started)
progress_events = loop._consume_workflow_event(in_progress)
completed_events = loop._consume_workflow_event(completed)
assert started_events == []
assert len(progress_events) == 1
assert isinstance(progress_events[0], ReasoningEvent)
assert progress_events[0].content == "Hello!"
assert completed_events == []
def test_working_bash_progress_without_tool_call_id_streams_command_output() -> None:
loop = _make_loop(enabled_tools=["write_file"])
started = _started(
"working-bash-1",
"working",
{"type": "tool", "title": "Planning", "content": "", "toolUIState": None},
)
in_progress = _in_progress(
"working-bash-1",
"working",
[
JSONPatchAdd(
path="/toolUIState",
value={
"type": "command",
"command": "ls -la /workspace",
"result": {
"status": "success",
"output": "total 4\ndrwxrwxrwx 2 root root 4096 Mar 20 10:18 .\ndrwxr-xr-x 1 root root 80 Mar 20 10:18 ..\n",
},
},
),
JSONPatchReplace(path="/title", value="Executing bash"),
JSONPatchReplace(path="/content", value=""),
],
)
started_events = loop._consume_workflow_event(started)
progress_events = loop._consume_workflow_event(in_progress)
assert started_events == []
tool_call_event = next(
event for event in progress_events if isinstance(event, ToolCallEvent)
)
tool_stream_event = next(
event for event in progress_events if isinstance(event, ToolStreamEvent)
)
assert tool_call_event.tool_name == "bash"
assert tool_call_event.tool_class.get_name() == "bash"
assert tool_call_event.tool_call_id == "working-bash-1"
assert tool_stream_event.tool_name == "bash"
assert tool_stream_event.tool_call_id == "working-bash-1"
assert "command: ls -la /workspace" in tool_stream_event.message
assert "total 4" in tool_stream_event.message
assert "drwxrwxrwx 2 root root 4096" in tool_stream_event.message
def test_working_completed_with_tool_call_id_emits_tool_result() -> None:
loop = _make_loop(enabled_tools=["write_file"])
working_started = _started(
"working-tool-1",
"working",
{
"title": "Executing write_file",
"content": "",
"toolUIState": {"toolCallId": "call-write-solo"},
},
)
working_completed = _completed(
"working-tool-1",
"working",
{
"title": "Executing write_file",
"content": "",
"toolUIState": {
"type": "file",
"toolCallId": "call-write-solo",
"operations": [
{
"type": "create",
"uri": "/workspace/hello.py",
"content": 'print("Hello, World!")',
}
],
},
},
)
started_events = loop._consume_workflow_event(working_started)
assert any(isinstance(e, ToolCallEvent) for e in started_events)
completed_events = loop._consume_workflow_event(working_completed)
result_events = [e for e in completed_events if isinstance(e, ToolResultEvent)]
assert len(result_events) == 1
assert result_events[0].error is None
assert result_events[0].tool_call_id == "call-write-solo"
def test_working_completed_with_tool_call_id_emits_error_result() -> None:
loop = _make_loop(enabled_tools=["write_file"])
working_started = _started(
"working-tool-2",
"working",
{
"title": "Executing write_file",
"content": "",
"toolUIState": {"toolCallId": "call-write-err"},
},
)
working_completed = _completed(
"working-tool-2",
"working",
{
"title": "Executing write_file",
"content": "Error: File exists. Set overwrite=True.",
"toolUIState": {
"type": "file",
"toolCallId": "call-write-err",
"operations": [],
},
},
)
loop._consume_workflow_event(working_started)
completed_events = loop._consume_workflow_event(working_completed)
result_events = [e for e in completed_events if isinstance(e, ToolResultEvent)]
assert len(result_events) == 1
assert result_events[0].error is not None
assert result_events[0].tool_call_id == "call-write-err"

View File

@@ -0,0 +1,68 @@
from __future__ import annotations
import pytest
from vibe.core.teleport.nuage import TeleportSession
from vibe.core.teleport.teleport import TeleportService
@pytest.fixture
def teleport_service() -> TeleportService:
return TeleportService.__new__(TeleportService)
def test_returns_last_user_message(teleport_service: TeleportService) -> None:
session = TeleportSession(
messages=[
{"role": "user", "content": "first"},
{"role": "assistant", "content": "reply"},
{"role": "user", "content": "second"},
]
)
assert teleport_service._get_last_user_message(session) == "second"
def test_returns_none_when_no_user_messages(teleport_service: TeleportService) -> None:
session = TeleportSession(
messages=[
{"role": "system", "content": "system prompt"},
{"role": "assistant", "content": "hello"},
]
)
assert teleport_service._get_last_user_message(session) is None
def test_returns_none_for_empty_messages(teleport_service: TeleportService) -> None:
session = TeleportSession(messages=[])
assert teleport_service._get_last_user_message(session) is None
def test_skips_non_string_content(teleport_service: TeleportService) -> None:
session = TeleportSession(
messages=[
{"role": "user", "content": [{"type": "text", "text": "block content"}]}
]
)
assert teleport_service._get_last_user_message(session) is None
def test_skips_empty_string_content(teleport_service: TeleportService) -> None:
session = TeleportSession(messages=[{"role": "user", "content": ""}])
assert teleport_service._get_last_user_message(session) is None
def test_skips_non_string_returns_earlier_string(
teleport_service: TeleportService,
) -> None:
session = TeleportSession(
messages=[
{"role": "user", "content": "valid message"},
{"role": "user", "content": [{"type": "text", "text": "block"}]},
]
)
assert teleport_service._get_last_user_message(session) == "valid message"
def test_skips_missing_content_key(teleport_service: TeleportService) -> None:
session = TeleportSession(messages=[{"role": "user"}])
assert teleport_service._get_last_user_message(session) is None

View File

@@ -5,73 +5,73 @@ from unittest.mock import AsyncMock, MagicMock
import httpx
import pytest
from vibe.core.auth import EncryptedPayload
from vibe.core.teleport.errors import ServiceTeleportError
from vibe.core.teleport.nuage import (
CreateLeChatThreadInput,
GitRepoConfig,
ChatAssistantParams,
GitHubParams,
NuageClient,
VibeNewSandbox,
VibeSandboxConfig,
WorkflowIntegrations,
WorkflowParams,
)
class TestNuageModels:
def test_git_repo_config_defaults(self) -> None:
config = GitRepoConfig(url="https://github.com/owner/repo.git")
assert config.url == "https://github.com/owner/repo.git"
assert config.branch is None
assert config.commit is None
def test_github_params_defaults(self) -> None:
params = GitHubParams(repo="owner/repo")
assert params.repo == "owner/repo"
assert params.branch is None
assert params.commit is None
assert params.pr_number is None
assert params.teleported_diffs is None
def test_git_repo_config_with_values(self) -> None:
config = GitRepoConfig(
url="https://github.com/owner/repo.git", branch="main", commit="abc123"
def test_github_params_with_values(self) -> None:
params = GitHubParams(
repo="owner/repo",
branch="main",
commit="abc123",
pr_number=42,
teleported_diffs=b"base64data",
)
assert config.branch == "main"
assert config.commit == "abc123"
assert params.repo == "owner/repo"
assert params.branch == "main"
assert params.commit == "abc123"
assert params.pr_number == 42
assert params.teleported_diffs == b"base64data"
def test_vibe_sandbox_config_defaults(self) -> None:
config = VibeSandboxConfig()
assert config.git_repo is None
def test_chat_assistant_params(self) -> None:
params = ChatAssistantParams(
user_message="test message", project_name="test-project"
)
assert params.user_message == "test message"
assert params.project_name == "test-project"
def test_vibe_new_sandbox_defaults(self) -> None:
sandbox = VibeNewSandbox()
assert sandbox.type == "new"
assert sandbox.config.git_repo is None
assert sandbox.teleported_diffs is None
def test_workflow_integrations(self) -> None:
integrations = WorkflowIntegrations(
github=GitHubParams(repo="owner/repo"),
chat_assistant=ChatAssistantParams(user_message="test"),
)
assert integrations.github is not None
assert integrations.chat_assistant is not None
def test_workflow_params_serialization(self) -> None:
params = WorkflowParams(
prompt="test prompt",
sandbox=VibeNewSandbox(
config=VibeSandboxConfig(
git_repo=GitRepoConfig(
url="https://github.com/owner/repo.git",
branch="main",
commit="abc123",
)
integrations=WorkflowIntegrations(
github=GitHubParams(
repo="owner/repo",
branch="main",
commit="abc123",
pr_number=42,
teleported_diffs=b"base64data",
),
teleported_diffs=b"base64data",
chat_assistant=ChatAssistantParams(user_message="test"),
),
)
data = params.model_dump()
assert data["prompt"] == "test prompt"
assert data["sandbox"]["type"] == "new"
assert data["sandbox"]["config"]["git_repo"]["url"] == (
"https://github.com/owner/repo.git"
)
assert data["sandbox"]["teleported_diffs"] == b"base64data"
def test_create_le_chat_thread_input(self) -> None:
input_data = CreateLeChatThreadInput(
encrypted_api_key={"key": "value"},
user_message="test message",
project_name="test-project",
)
assert input_data.encrypted_api_key == {"key": "value"}
assert input_data.user_message == "test message"
assert input_data.project_name == "test-project"
assert data["integrations"]["github"]["repo"] == "owner/repo"
assert data["integrations"]["github"]["pr_number"] == 42
assert data["integrations"]["github"]["teleported_diffs"] == b"base64data"
class TestNuageClientContextManager:
@@ -116,7 +116,7 @@ class TestNuageClientStartWorkflow:
mock_response.json.return_value = {"execution_id": "exec-123"}
mock_client.post = AsyncMock(return_value=mock_response)
params = WorkflowParams(prompt="test", sandbox=VibeNewSandbox())
params = WorkflowParams(prompt="test", integrations=WorkflowIntegrations())
execution_id = await nuage.start_workflow(params)
assert execution_id == "exec-123"
@@ -133,25 +133,12 @@ class TestNuageClientStartWorkflow:
mock_response.text = "Internal Server Error"
mock_client.post = AsyncMock(return_value=mock_response)
params = WorkflowParams(prompt="test", sandbox=VibeNewSandbox())
params = WorkflowParams(prompt="test", integrations=WorkflowIntegrations())
with pytest.raises(ServiceTeleportError, match="Nuage workflow trigger failed"):
await nuage.start_workflow(params)
@pytest.mark.asyncio
async def test_start_workflow_unauthorized_hint(
self, nuage: NuageClient, mock_client: MagicMock
) -> None:
mock_response = MagicMock()
mock_response.is_success = False
mock_response.text = "Unauthorized"
mock_client.post = AsyncMock(return_value=mock_response)
params = WorkflowParams(prompt="test", sandbox=VibeNewSandbox())
with pytest.raises(ServiceTeleportError, match="STAGING_MISTRAL_API_KEY"):
await nuage.start_workflow(params)
class TestNuageClientSendGithubToken:
class TestNuageClientGetGitHubIntegration:
@pytest.fixture
def mock_client(self) -> MagicMock:
return MagicMock(spec=httpx.AsyncClient)
@@ -163,48 +150,43 @@ class TestNuageClientSendGithubToken:
)
@pytest.mark.asyncio
async def test_send_github_token_success(
self,
nuage: NuageClient,
mock_client: MagicMock,
monkeypatch: pytest.MonkeyPatch,
async def test_get_github_integration_connected(
self, nuage: NuageClient, mock_client: MagicMock
) -> None:
query_response = MagicMock()
query_response.is_success = True
query_response.json.return_value = {
"result": {
"public_key": (
"-----BEGIN PUBLIC KEY-----\n"
"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0Z3VS5JJcds3xfn/"
"ygWyf8TFXQNZ0XsLOqXB1Mi2+bKPFv1WfhECTxJ3c6SXl0p1sGyWTFxRV8u/"
"bKqYZ0E6VZ6YRTRPFiGq0kkONjVBFxOQ8Y0jeT0d9e0Y3E3MWDL8tQ0Nz9v8"
"5Y7gC8F1m/dEbBwPjCJQV0Dg0z3gZDO8RCG0GrBoLO0b+NNqL8FXPPDXQ1l4"
"FGnYM0gZ1rCU7Y/zTN1wI4sCQ0GJQPDA1hWB8KRJl5x0ZDXE3rRwT1E8c+Fn"
"ZFV1nN0C6zxF7GpVY3FVWXS4PA0FH+8C1+TnYgBL7xS0o+LF6PgjGT5F3CXD"
"BZmYSxKL+EsVVGT5EuYbJE9TxVwIDAQAB\n"
"-----END PUBLIC KEY-----"
)
}
mock_response = MagicMock()
mock_response.is_success = True
mock_response.json.return_value = {
"result": {"status": "connected", "oauth_url": None, "error": None}
}
mock_client.post = AsyncMock(return_value=mock_response)
signal_response = MagicMock()
signal_response.is_success = True
result = await nuage.get_github_integration("exec-123")
mock_client.post = AsyncMock(side_effect=[query_response, signal_response])
encrypted = EncryptedPayload(
encrypted_key="enc_key", nonce="nonce", ciphertext="cipher"
)
monkeypatch.setattr(
"vibe.core.teleport.nuage.encrypt", lambda _token, _key: encrypted
)
await nuage.send_github_token("exec-123", "ghp_token")
assert mock_client.post.call_count == 2
assert result.connected is True
assert result.oauth_url is None
@pytest.mark.asyncio
async def test_query_public_key_failure(
async def test_get_github_integration_waiting_for_oauth(
self, nuage: NuageClient, mock_client: MagicMock
) -> None:
mock_response = MagicMock()
mock_response.is_success = True
mock_response.json.return_value = {
"result": {
"status": "waiting_for_oauth",
"oauth_url": "https://github.com/login/oauth",
"error": None,
}
}
mock_client.post = AsyncMock(return_value=mock_response)
result = await nuage.get_github_integration("exec-123")
assert result.connected is False
assert result.oauth_url == "https://github.com/login/oauth"
@pytest.mark.asyncio
async def test_get_github_integration_failure(
self, nuage: NuageClient, mock_client: MagicMock
) -> None:
mock_response = MagicMock()
@@ -212,26 +194,13 @@ class TestNuageClientSendGithubToken:
mock_response.text = "Not found"
mock_client.post = AsyncMock(return_value=mock_response)
with pytest.raises(ServiceTeleportError, match="Failed to get public key"):
await nuage._query_public_key("exec-123")
@pytest.mark.asyncio
async def test_signal_encrypted_token_failure(
self, nuage: NuageClient, mock_client: MagicMock
) -> None:
mock_response = MagicMock()
mock_response.is_success = False
mock_response.text = "Signal failed"
mock_client.post = AsyncMock(return_value=mock_response)
encrypted = EncryptedPayload(
encrypted_key="enc_key", nonce="nonce", ciphertext="cipher"
)
with pytest.raises(ServiceTeleportError, match="Failed to send GitHub token"):
await nuage._signal_encrypted_token("exec-123", encrypted)
with pytest.raises(
ServiceTeleportError, match="Failed to get GitHub integration"
):
await nuage.get_github_integration("exec-123")
class TestNuageClientCreateLeChatThread:
class TestNuageClientGetChatAssistantUrl:
@pytest.fixture
def mock_client(self) -> MagicMock:
return MagicMock(spec=httpx.AsyncClient)
@@ -243,90 +212,32 @@ class TestNuageClientCreateLeChatThread:
)
@pytest.mark.asyncio
async def test_create_le_chat_thread_success(
self,
nuage: NuageClient,
mock_client: MagicMock,
monkeypatch: pytest.MonkeyPatch,
async def test_get_chat_assistant_url_success(
self, nuage: NuageClient, mock_client: MagicMock
) -> None:
query_response = MagicMock()
query_response.is_success = True
query_response.json.return_value = {
"result": {
"public_key": (
"-----BEGIN PUBLIC KEY-----\n"
"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0Z3VS5JJcds3xfn/"
"ygWyf8TFXQNZ0XsLOqXB1Mi2+bKPFv1WfhECTxJ3c6SXl0p1sGyWTFxRV8u/"
"bKqYZ0E6VZ6YRTRPFiGq0kkONjVBFxOQ8Y0jeT0d9e0Y3E3MWDL8tQ0Nz9v8"
"5Y7gC8F1m/dEbBwPjCJQV0Dg0z3gZDO8RCG0GrBoLO0b+NNqL8FXPPDXQ1l4"
"FGnYM0gZ1rCU7Y/zTN1wI4sCQ0GJQPDA1hWB8KRJl5x0ZDXE3rRwT1E8c+Fn"
"ZFV1nN0C6zxF7GpVY3FVWXS4PA0FH+8C1+TnYgBL7xS0o+LF6PgjGT5F3CXD"
"BZmYSxKL+EsVVGT5EuYbJE9TxVwIDAQAB\n"
"-----END PUBLIC KEY-----"
)
}
}
update_response = MagicMock()
update_response.is_success = True
update_response.json.return_value = {
mock_response = MagicMock()
mock_response.is_success = True
mock_response.json.return_value = {
"result": {"chat_url": "https://chat.example.com/thread/123"}
}
mock_client.post = AsyncMock(return_value=mock_response)
mock_client.post = AsyncMock(side_effect=[query_response, update_response])
encrypted = EncryptedPayload(
encrypted_key="enc_key", nonce="nonce", ciphertext="cipher"
)
monkeypatch.setattr(
"vibe.core.teleport.nuage.encrypt", lambda _token, _key: encrypted
)
url = await nuage.create_le_chat_thread("exec-123", "test message")
url = await nuage.get_chat_assistant_url("exec-123")
assert url == "https://chat.example.com/thread/123"
@pytest.mark.asyncio
async def test_create_le_chat_thread_failure(
self,
nuage: NuageClient,
mock_client: MagicMock,
monkeypatch: pytest.MonkeyPatch,
async def test_get_chat_assistant_url_failure(
self, nuage: NuageClient, mock_client: MagicMock
) -> None:
query_response = MagicMock()
query_response.is_success = True
query_response.json.return_value = {
"result": {
"public_key": (
"-----BEGIN PUBLIC KEY-----\n"
"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0Z3VS5JJcds3xfn/"
"ygWyf8TFXQNZ0XsLOqXB1Mi2+bKPFv1WfhECTxJ3c6SXl0p1sGyWTFxRV8u/"
"bKqYZ0E6VZ6YRTRPFiGq0kkONjVBFxOQ8Y0jeT0d9e0Y3E3MWDL8tQ0Nz9v8"
"5Y7gC8F1m/dEbBwPjCJQV0Dg0z3gZDO8RCG0GrBoLO0b+NNqL8FXPPDXQ1l4"
"FGnYM0gZ1rCU7Y/zTN1wI4sCQ0GJQPDA1hWB8KRJl5x0ZDXE3rRwT1E8c+Fn"
"ZFV1nN0C6zxF7GpVY3FVWXS4PA0FH+8C1+TnYgBL7xS0o+LF6PgjGT5F3CXD"
"BZmYSxKL+EsVVGT5EuYbJE9TxVwIDAQAB\n"
"-----END PUBLIC KEY-----"
)
}
}
update_response = MagicMock()
update_response.is_success = False
update_response.text = "Failed to create thread"
mock_client.post = AsyncMock(side_effect=[query_response, update_response])
encrypted = EncryptedPayload(
encrypted_key="enc_key", nonce="nonce", ciphertext="cipher"
)
monkeypatch.setattr(
"vibe.core.teleport.nuage.encrypt", lambda _token, _key: encrypted
)
mock_response = MagicMock()
mock_response.is_success = False
mock_response.text = "Failed"
mock_client.post = AsyncMock(return_value=mock_response)
with pytest.raises(
ServiceTeleportError, match="Failed to create Le Chat thread"
ServiceTeleportError, match="Failed to get chat assistant integration"
):
await nuage.create_le_chat_thread("exec-123", "test message")
await nuage.get_chat_assistant_url("exec-123")
class TestNuageClientHeaders:

View File

@@ -16,18 +16,19 @@ from vibe.core.teleport.errors import (
ServiceTeleportNotSupportedError,
)
from vibe.core.teleport.git import GitRepoInfo
from vibe.core.teleport.nuage import TeleportSession
from vibe.core.teleport.teleport import _NUAGE_EXECUTION_URL_TEMPLATE, TeleportService
from vibe.core.teleport.nuage import GitHubStatus, TeleportSession
from vibe.core.teleport.teleport import TeleportService
from vibe.core.teleport.types import (
TeleportAuthCompleteEvent,
TeleportAuthRequiredEvent,
TeleportCheckingGitEvent,
TeleportCompleteEvent,
TeleportFetchingUrlEvent,
TeleportPushingEvent,
TeleportPushRequiredEvent,
TeleportPushResponseEvent,
TeleportSendingGithubTokenEvent,
TeleportStartingWorkflowEvent,
TeleportWaitingForGitHubEvent,
)
@@ -68,7 +69,7 @@ class TestTeleportServiceCompressDiff:
service._compress_diff(large_diff, max_size=100)
class TestTeleportServiceBuildSandbox:
class TestTeleportServiceBuildGitHubParams:
@pytest.fixture
def service(self, tmp_path: Path) -> TeleportService:
mock_session_logger = MagicMock()
@@ -80,7 +81,7 @@ class TestTeleportServiceBuildSandbox:
workdir=tmp_path,
)
def test_builds_sandbox_from_git_info(self, service: TeleportService) -> None:
def test_builds_params_from_git_info(self, service: TeleportService) -> None:
git_info = GitRepoInfo(
remote_url="https://github.com/owner/repo.git",
owner="owner",
@@ -89,14 +90,12 @@ class TestTeleportServiceBuildSandbox:
commit="abc123",
diff="",
)
sandbox = service._build_sandbox(git_info)
params = service._build_github_params(git_info)
assert sandbox.type == "new"
assert sandbox.config.git_repo is not None
assert sandbox.config.git_repo.url == "https://github.com/owner/repo.git"
assert sandbox.config.git_repo.branch == "main"
assert sandbox.config.git_repo.commit == "abc123"
assert sandbox.teleported_diffs is None
assert params.repo == "owner/repo"
assert params.branch == "main"
assert params.commit == "abc123"
assert params.teleported_diffs is None
def test_includes_compressed_diff(self, service: TeleportService) -> None:
git_info = GitRepoInfo(
@@ -107,9 +106,9 @@ class TestTeleportServiceBuildSandbox:
commit="abc123",
diff="diff content",
)
sandbox = service._build_sandbox(git_info)
params = service._build_github_params(git_info)
assert sandbox.teleported_diffs is not None
assert params.teleported_diffs is not None
class TestTeleportServiceValidateConfig:
@@ -122,9 +121,7 @@ class TestTeleportServiceValidateConfig:
nuage_api_key="",
workdir=tmp_path,
)
with pytest.raises(
ServiceTeleportError, match="STAGING_MISTRAL_API_KEY not set"
):
with pytest.raises(ServiceTeleportError, match="MISTRAL_API_KEY not set"):
service._validate_config()
def test_passes_when_api_key_set(self, tmp_path: Path) -> None:
@@ -138,6 +135,21 @@ class TestTeleportServiceValidateConfig:
)
service._validate_config()
def test_uses_custom_env_var_name_in_error(self, tmp_path: Path) -> None:
mock_session_logger = MagicMock()
mock_config = MagicMock()
mock_config.nuage_api_key_env_var = "CUSTOM_API_KEY"
service = TeleportService(
session_logger=mock_session_logger,
nuage_base_url="https://api.example.com",
nuage_workflow_id="workflow-id",
nuage_api_key="",
workdir=tmp_path,
vibe_config=mock_config,
)
with pytest.raises(ServiceTeleportError, match="CUSTOM_API_KEY not set"):
service._validate_config()
class TestTeleportServiceCheckSupported:
@pytest.fixture
@@ -213,6 +225,8 @@ class TestTeleportServiceExecute:
nuage_api_key="api-key",
workdir=tmp_path,
)
service._git.fetch = AsyncMock()
service._git.is_branch_pushed = AsyncMock(return_value=True)
return service
@pytest.fixture
@@ -226,20 +240,32 @@ class TestTeleportServiceExecute:
diff="",
)
@pytest.fixture
def mock_github_connected(self) -> MagicMock:
github_data = MagicMock()
github_data.connected = True
github_data.oauth_url = None
github_data.status = GitHubStatus.CONNECTED
return github_data
@pytest.mark.asyncio
async def test_execute_happy_path_commit_pushed_with_token(
self, service: TeleportService, git_info: GitRepoInfo
async def test_execute_happy_path_github_already_connected(
self,
service: TeleportService,
git_info: GitRepoInfo,
mock_github_connected: MagicMock,
) -> None:
service._git.get_info = AsyncMock(return_value=git_info)
service._git.is_commit_pushed = AsyncMock(return_value=True)
mock_github_auth = MagicMock()
mock_github_auth.get_valid_token = AsyncMock(return_value="ghp_existing_token")
service._github_auth = mock_github_auth
mock_nuage = MagicMock()
mock_nuage.start_workflow = AsyncMock(return_value="exec-123")
mock_nuage.send_github_token = AsyncMock()
mock_nuage.get_github_integration = AsyncMock(
return_value=mock_github_connected
)
mock_nuage.get_chat_assistant_url = AsyncMock(
return_value="https://chat.example.com/123"
)
service._nuage = mock_nuage
session = TeleportSession()
@@ -250,29 +276,31 @@ class TestTeleportServiceExecute:
assert isinstance(events[0], TeleportCheckingGitEvent)
assert isinstance(events[1], TeleportStartingWorkflowEvent)
assert isinstance(events[2], TeleportSendingGithubTokenEvent)
assert isinstance(events[3], TeleportCompleteEvent)
expected_url = _NUAGE_EXECUTION_URL_TEMPLATE.format(
workflow_id="workflow-id", execution_id="exec-123"
)
assert events[3].url == expected_url
assert isinstance(events[2], TeleportWaitingForGitHubEvent)
assert isinstance(events[3], TeleportFetchingUrlEvent)
assert isinstance(events[4], TeleportCompleteEvent)
assert events[4].url == "https://chat.example.com/123"
@pytest.mark.asyncio
async def test_execute_requires_push_and_user_approves(
self, service: TeleportService, git_info: GitRepoInfo
self,
service: TeleportService,
git_info: GitRepoInfo,
mock_github_connected: MagicMock,
) -> None:
service._git.get_info = AsyncMock(return_value=git_info)
service._git.is_commit_pushed = AsyncMock(return_value=False)
service._git.get_unpushed_commit_count = AsyncMock(return_value=3)
service._git.push_current_branch = AsyncMock(return_value=True)
mock_github_auth = MagicMock()
mock_github_auth.get_valid_token = AsyncMock(return_value="ghp_token")
service._github_auth = mock_github_auth
mock_nuage = MagicMock()
mock_nuage.start_workflow = AsyncMock(return_value="exec-123")
mock_nuage.send_github_token = AsyncMock()
mock_nuage.get_github_integration = AsyncMock(
return_value=mock_github_connected
)
mock_nuage.get_chat_assistant_url = AsyncMock(
return_value="https://chat.example.com/123"
)
service._nuage = mock_nuage
session = TeleportSession()
@@ -315,25 +343,29 @@ class TestTeleportServiceExecute:
await gen.asend(TeleportPushResponseEvent(approved=False))
@pytest.mark.asyncio
async def test_execute_requires_auth_flow(
async def test_execute_requires_oauth_flow(
self, service: TeleportService, git_info: GitRepoInfo
) -> None:
service._git.get_info = AsyncMock(return_value=git_info)
service._git.is_commit_pushed = AsyncMock(return_value=True)
mock_handle = MagicMock()
mock_handle.info.user_code = "ABC-123"
mock_handle.info.verification_uri = "https://github.com/login/device"
github_pending = MagicMock()
github_pending.connected = False
github_pending.oauth_url = "https://github.com/login/oauth"
github_pending.status = GitHubStatus.WAITING_FOR_OAUTH
mock_github_auth = MagicMock()
mock_github_auth.get_valid_token = AsyncMock(return_value=None)
mock_github_auth.start_device_flow = AsyncMock(return_value=mock_handle)
mock_github_auth.wait_for_token = AsyncMock(return_value="ghp_new_token")
service._github_auth = mock_github_auth
github_connected = MagicMock()
github_connected.connected = True
github_connected.oauth_url = None
github_connected.status = GitHubStatus.CONNECTED
mock_nuage = MagicMock()
mock_nuage.start_workflow = AsyncMock(return_value="exec-123")
mock_nuage.send_github_token = AsyncMock()
mock_nuage.get_github_integration = AsyncMock(return_value=github_pending)
mock_nuage.wait_for_github_connection = AsyncMock(return_value=github_connected)
mock_nuage.get_chat_assistant_url = AsyncMock(
return_value="https://chat.example.com/123"
)
service._nuage = mock_nuage
session = TeleportSession()
@@ -343,35 +375,42 @@ class TestTeleportServiceExecute:
events.append(event)
assert isinstance(events[0], TeleportCheckingGitEvent)
assert isinstance(events[1], TeleportAuthRequiredEvent)
assert events[1].user_code == "ABC-123"
assert isinstance(events[2], TeleportAuthCompleteEvent)
assert isinstance(events[3], TeleportStartingWorkflowEvent)
assert isinstance(events[1], TeleportStartingWorkflowEvent)
assert isinstance(events[2], TeleportWaitingForGitHubEvent)
assert isinstance(events[3], TeleportAuthRequiredEvent)
assert events[3].oauth_url == "https://github.com/login/oauth"
assert isinstance(events[4], TeleportAuthCompleteEvent)
assert isinstance(events[-1], TeleportCompleteEvent)
@pytest.mark.asyncio
async def test_execute_uses_default_prompt_when_none(
self, service: TeleportService, git_info: GitRepoInfo
self,
service: TeleportService,
git_info: GitRepoInfo,
mock_github_connected: MagicMock,
) -> None:
service._git.get_info = AsyncMock(return_value=git_info)
service._git.is_commit_pushed = AsyncMock(return_value=True)
mock_github_auth = MagicMock()
mock_github_auth.get_valid_token = AsyncMock(return_value="ghp_token")
service._github_auth = mock_github_auth
mock_nuage = MagicMock()
mock_nuage.start_workflow = AsyncMock(return_value="exec-123")
mock_nuage.send_github_token = AsyncMock()
mock_nuage.get_github_integration = AsyncMock(
return_value=mock_github_connected
)
mock_nuage.get_chat_assistant_url = AsyncMock(
return_value="https://chat.example.com/123"
)
service._nuage = mock_nuage
session = TeleportSession()
session = TeleportSession(
messages=[{"role": "user", "content": "help me refactor"}]
)
gen = service.execute(None, session)
async for _ in gen:
pass
call_args = mock_nuage.start_workflow.call_args
assert "continue where you left off" in call_args[0][0].prompt
assert "teleported" in call_args[0][0].prompt.lower()
class TestTeleportServiceContextManager:
@@ -388,7 +427,6 @@ class TestTeleportServiceContextManager:
assert service._client is None
async with service:
assert service._client is not None
assert service._github_auth is not None
assert service._nuage is not None
assert service._client is None

View File

@@ -0,0 +1,121 @@
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from vibe.core.session.resume_sessions import (
SHORT_SESSION_ID_LEN,
list_remote_resume_sessions,
short_session_id,
)
class TestShortSessionId:
def test_local_shortens_to_first_chars(self) -> None:
sid = "abcdef1234567890"
result = short_session_id(sid)
assert result == sid[:SHORT_SESSION_ID_LEN]
assert len(result) == SHORT_SESSION_ID_LEN
def test_local_is_default(self) -> None:
sid = "abcdef1234567890"
assert short_session_id(sid) == short_session_id(sid, source="local")
def test_remote_shortens_to_last_chars(self) -> None:
sid = "abcdef1234567890"
result = short_session_id(sid, source="remote")
assert result == sid[-SHORT_SESSION_ID_LEN:]
assert len(result) == SHORT_SESSION_ID_LEN
def test_returns_full_id_when_shorter_than_limit(self) -> None:
sid = "abc"
assert short_session_id(sid) == "abc"
assert short_session_id(sid, source="remote") == "abc"
def test_empty_string(self) -> None:
assert short_session_id("") == ""
class TestListRemoteResumeSessions:
@pytest.mark.asyncio
async def test_returns_empty_when_nuage_disabled(self) -> None:
config = MagicMock()
config.nuage_enabled = False
config.nuage_api_key = "key"
result = await list_remote_resume_sessions(config)
assert result == []
@pytest.mark.asyncio
async def test_returns_empty_when_no_api_key(self) -> None:
config = MagicMock()
config.nuage_enabled = True
config.nuage_api_key = None
result = await list_remote_resume_sessions(config)
assert result == []
@pytest.mark.asyncio
async def test_returns_empty_when_both_missing(self) -> None:
config = MagicMock()
config.nuage_enabled = False
config.nuage_api_key = None
result = await list_remote_resume_sessions(config)
assert result == []
@pytest.mark.asyncio
async def test_filters_only_active_statuses(self) -> None:
from datetime import datetime
from vibe.core.nuage.workflow import (
WorkflowExecutionListResponse,
WorkflowExecutionStatus,
WorkflowExecutionWithoutResultResponse,
)
running = WorkflowExecutionWithoutResultResponse(
workflow_name="vibe",
execution_id="exec-running",
status=WorkflowExecutionStatus.RUNNING,
start_time=datetime(2026, 1, 1),
end_time=None,
)
completed = WorkflowExecutionWithoutResultResponse(
workflow_name="vibe",
execution_id="exec-completed",
status=WorkflowExecutionStatus.COMPLETED,
start_time=datetime(2026, 1, 1),
end_time=datetime(2026, 1, 2),
)
retrying = WorkflowExecutionWithoutResultResponse(
workflow_name="vibe",
execution_id="exec-retrying",
status=WorkflowExecutionStatus.RETRYING_AFTER_ERROR,
start_time=datetime(2026, 1, 1),
end_time=None,
)
mock_response = WorkflowExecutionListResponse(
executions=[running, completed, retrying]
)
config = MagicMock()
config.nuage_enabled = True
config.nuage_api_key = "test-key"
config.nuage_base_url = "https://test.example.com"
config.api_timeout = 30
config.nuage_workflow_id = "workflow-1"
with patch("vibe.core.session.resume_sessions.WorkflowsClient") as MockClient:
mock_client = AsyncMock()
mock_client.get_workflow_runs.return_value = mock_response
MockClient.return_value.__aenter__ = AsyncMock(return_value=mock_client)
MockClient.return_value.__aexit__ = AsyncMock(return_value=False)
result = await list_remote_resume_sessions(config)
assert len(result) == 2
session_ids = {s.session_id for s in result}
assert "exec-running" in session_ids
assert "exec-retrying" in session_ids
assert "exec-completed" not in session_ids
assert all(s.source == "remote" for s in result)

View File

@@ -0,0 +1,202 @@
<svg class="rich-terminal" viewBox="0 0 1238 928.4" xmlns="http://www.w3.org/2000/svg">
<!-- Generated with Rich https://www.textualize.io -->
<style>
@font-face {
font-family: "Fira Code";
src: local("FiraCode-Regular"),
url("https://cdnjs.cloudflare.com/ajax/libs/firacode/6.2.0/woff2/FiraCode-Regular.woff2") format("woff2"),
url("https://cdnjs.cloudflare.com/ajax/libs/firacode/6.2.0/woff/FiraCode-Regular.woff") format("woff");
font-style: normal;
font-weight: 400;
}
@font-face {
font-family: "Fira Code";
src: local("FiraCode-Bold"),
url("https://cdnjs.cloudflare.com/ajax/libs/firacode/6.2.0/woff2/FiraCode-Bold.woff2") format("woff2"),
url("https://cdnjs.cloudflare.com/ajax/libs/firacode/6.2.0/woff/FiraCode-Bold.woff") format("woff");
font-style: bold;
font-weight: 700;
}
.terminal-matrix {
font-family: Fira Code, monospace;
font-size: 20px;
line-height: 24.4px;
font-variant-east-asian: full-width;
}
.terminal-title {
font-size: 18px;
font-weight: bold;
font-family: arial;
}
.terminal-r1 { fill: #c5c8c6 }
.terminal-r2 { fill: #ff8205;font-weight: bold }
.terminal-r3 { fill: #68a0b3 }
.terminal-r4 { fill: #9a9b99 }
.terminal-r5 { fill: #608ab1;text-decoration: underline; }
.terminal-r6 { fill: #c5c8c6;text-decoration: underline; }
</style>
<defs>
<clipPath id="terminal-clip-terminal">
<rect x="0" y="0" width="1219.0" height="877.4" />
</clipPath>
<clipPath id="terminal-line-0">
<rect x="0" y="1.5" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-1">
<rect x="0" y="25.9" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-2">
<rect x="0" y="50.3" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-3">
<rect x="0" y="74.7" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-4">
<rect x="0" y="99.1" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-5">
<rect x="0" y="123.5" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-6">
<rect x="0" y="147.9" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-7">
<rect x="0" y="172.3" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-8">
<rect x="0" y="196.7" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-9">
<rect x="0" y="221.1" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-10">
<rect x="0" y="245.5" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-11">
<rect x="0" y="269.9" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-12">
<rect x="0" y="294.3" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-13">
<rect x="0" y="318.7" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-14">
<rect x="0" y="343.1" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-15">
<rect x="0" y="367.5" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-16">
<rect x="0" y="391.9" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-17">
<rect x="0" y="416.3" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-18">
<rect x="0" y="440.7" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-19">
<rect x="0" y="465.1" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-20">
<rect x="0" y="489.5" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-21">
<rect x="0" y="513.9" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-22">
<rect x="0" y="538.3" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-23">
<rect x="0" y="562.7" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-24">
<rect x="0" y="587.1" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-25">
<rect x="0" y="611.5" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-26">
<rect x="0" y="635.9" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-27">
<rect x="0" y="660.3" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-28">
<rect x="0" y="684.7" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-29">
<rect x="0" y="709.1" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-30">
<rect x="0" y="733.5" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-31">
<rect x="0" y="757.9" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-32">
<rect x="0" y="782.3" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-33">
<rect x="0" y="806.7" width="1220" height="24.65"/>
</clipPath>
<clipPath id="terminal-line-34">
<rect x="0" y="831.1" width="1220" height="24.65"/>
</clipPath>
</defs>
<rect fill="#292929" stroke="rgba(255,255,255,0.35)" stroke-width="1" x="1" y="1" width="1236" height="926.4" rx="8"/><text class="terminal-title" fill="#c5c8c6" text-anchor="middle" x="618" y="27">DataRetentionTestApp</text>
<g transform="translate(26,22)">
<circle cx="0" cy="0" r="7" fill="#ff5f57"/>
<circle cx="22" cy="0" r="7" fill="#febc2e"/>
<circle cx="44" cy="0" r="7" fill="#28c840"/>
</g>
<g transform="translate(9, 41)" clip-path="url(#terminal-clip-terminal)">
<g class="terminal-matrix">
<text class="terminal-r1" x="1220" y="20" textLength="12.2" clip-path="url(#terminal-line-0)">
</text><text class="terminal-r1" x="1220" y="44.4" textLength="12.2" clip-path="url(#terminal-line-1)">
</text><text class="terminal-r1" x="1220" y="68.8" textLength="12.2" clip-path="url(#terminal-line-2)">
</text><text class="terminal-r1" x="1220" y="93.2" textLength="12.2" clip-path="url(#terminal-line-3)">
</text><text class="terminal-r1" x="1220" y="117.6" textLength="12.2" clip-path="url(#terminal-line-4)">
</text><text class="terminal-r1" x="1220" y="142" textLength="12.2" clip-path="url(#terminal-line-5)">
</text><text class="terminal-r1" x="1220" y="166.4" textLength="12.2" clip-path="url(#terminal-line-6)">
</text><text class="terminal-r1" x="1220" y="190.8" textLength="12.2" clip-path="url(#terminal-line-7)">
</text><text class="terminal-r1" x="1220" y="215.2" textLength="12.2" clip-path="url(#terminal-line-8)">
</text><text class="terminal-r1" x="1220" y="239.6" textLength="12.2" clip-path="url(#terminal-line-9)">
</text><text class="terminal-r1" x="1220" y="264" textLength="12.2" clip-path="url(#terminal-line-10)">
</text><text class="terminal-r1" x="1220" y="288.4" textLength="12.2" clip-path="url(#terminal-line-11)">
</text><text class="terminal-r1" x="1220" y="312.8" textLength="12.2" clip-path="url(#terminal-line-12)">
</text><text class="terminal-r1" x="1220" y="337.2" textLength="12.2" clip-path="url(#terminal-line-13)">
</text><text class="terminal-r1" x="1220" y="361.6" textLength="12.2" clip-path="url(#terminal-line-14)">
</text><text class="terminal-r1" x="1220" y="386" textLength="12.2" clip-path="url(#terminal-line-15)">
</text><text class="terminal-r1" x="1220" y="410.4" textLength="12.2" clip-path="url(#terminal-line-16)">
</text><text class="terminal-r1" x="1220" y="434.8" textLength="12.2" clip-path="url(#terminal-line-17)">
</text><text class="terminal-r1" x="0" y="459.2" textLength="134.2" clip-path="url(#terminal-line-18)">&#160;&#160;⡠⣒⠄&#160;&#160;⡔⢄⠔⡄</text><text class="terminal-r2" x="170.8" y="459.2" textLength="146.4" clip-path="url(#terminal-line-18)">Mistral&#160;Vibe</text><text class="terminal-r1" x="317.2" y="459.2" textLength="122" clip-path="url(#terminal-line-18)">&#160;v0.0.0&#160;·&#160;</text><text class="terminal-r3" x="439.2" y="459.2" textLength="183" clip-path="url(#terminal-line-18)">devstral-latest</text><text class="terminal-r1" x="622.2" y="459.2" textLength="256.2" clip-path="url(#terminal-line-18)">&#160;·&#160;[Subscription]&#160;Pro</text><text class="terminal-r1" x="1220" y="459.2" textLength="12.2" clip-path="url(#terminal-line-18)">
</text><text class="terminal-r1" x="0" y="483.6" textLength="134.2" clip-path="url(#terminal-line-19)">&#160;⢸⠸⣀⡔⢉⠱⣃⡢⣂⡣</text><text class="terminal-r1" x="170.8" y="483.6" textLength="414.8" clip-path="url(#terminal-line-19)">1&#160;model&#160;·&#160;0&#160;MCP&#160;servers&#160;·&#160;0&#160;skills</text><text class="terminal-r1" x="1220" y="483.6" textLength="12.2" clip-path="url(#terminal-line-19)">
</text><text class="terminal-r1" x="0" y="508" textLength="134.2" clip-path="url(#terminal-line-20)">&#160;&#160;⠉⠒⠣⠤⠵⠤⠬⠮⠆</text><text class="terminal-r1" x="170.8" y="508" textLength="61" clip-path="url(#terminal-line-20)">Type&#160;</text><text class="terminal-r3" x="231.8" y="508" textLength="61" clip-path="url(#terminal-line-20)">/help</text><text class="terminal-r1" x="292.8" y="508" textLength="256.2" clip-path="url(#terminal-line-20)">&#160;for&#160;more&#160;information</text><text class="terminal-r1" x="1220" y="508" textLength="12.2" clip-path="url(#terminal-line-20)">
</text><text class="terminal-r1" x="1220" y="532.4" textLength="12.2" clip-path="url(#terminal-line-21)">
</text><text class="terminal-r4" x="24.4" y="556.8" textLength="12.2" clip-path="url(#terminal-line-22)"></text><text class="terminal-r5" x="48.8" y="556.8" textLength="414.8" clip-path="url(#terminal-line-22)">Your&#160;Data&#160;Helps&#160;Improve&#160;Mistral&#160;AI</text><text class="terminal-r1" x="1220" y="556.8" textLength="12.2" clip-path="url(#terminal-line-22)">
</text><text class="terminal-r4" x="24.4" y="581.2" textLength="12.2" clip-path="url(#terminal-line-23)"></text><text class="terminal-r1" x="48.8" y="581.2" textLength="1159" clip-path="url(#terminal-line-23)">At&#160;Mistral&#160;AI,&#160;we&#x27;re&#160;committed&#160;to&#160;delivering&#160;the&#160;best&#160;possible&#160;experience.&#160;When&#160;you&#160;use&#160;Mistral</text><text class="terminal-r1" x="1220" y="581.2" textLength="12.2" clip-path="url(#terminal-line-23)">
</text><text class="terminal-r4" x="24.4" y="605.6" textLength="12.2" clip-path="url(#terminal-line-24)"></text><text class="terminal-r1" x="48.8" y="605.6" textLength="1159" clip-path="url(#terminal-line-24)">models&#160;on&#160;our&#160;API,&#160;your&#160;interactions&#160;may&#160;be&#160;collected&#160;to&#160;improve&#160;our&#160;models,&#160;ensuring&#160;they&#160;stay</text><text class="terminal-r1" x="1220" y="605.6" textLength="12.2" clip-path="url(#terminal-line-24)">
</text><text class="terminal-r4" x="24.4" y="630" textLength="12.2" clip-path="url(#terminal-line-25)"></text><text class="terminal-r1" x="48.8" y="630" textLength="439.2" clip-path="url(#terminal-line-25)">cutting-edge,&#160;accurate,&#160;and&#160;helpful.</text><text class="terminal-r1" x="1220" y="630" textLength="12.2" clip-path="url(#terminal-line-25)">
</text><text class="terminal-r4" x="24.4" y="654.4" textLength="12.2" clip-path="url(#terminal-line-26)"></text><text class="terminal-r1" x="1220" y="654.4" textLength="12.2" clip-path="url(#terminal-line-26)">
</text><text class="terminal-r4" x="24.4" y="678.8" textLength="12.2" clip-path="url(#terminal-line-27)"></text><text class="terminal-r1" x="48.8" y="678.8" textLength="317.2" clip-path="url(#terminal-line-27)">Manage&#160;your&#160;data&#160;settings&#160;</text><text class="terminal-r6" x="366" y="678.8" textLength="48.8" clip-path="url(#terminal-line-27)">here</text><text class="terminal-r1" x="1220" y="678.8" textLength="12.2" clip-path="url(#terminal-line-27)">
</text><text class="terminal-r1" x="1220" y="703.2" textLength="12.2" clip-path="url(#terminal-line-28)">
</text><text class="terminal-r1" x="1220" y="727.6" textLength="12.2" clip-path="url(#terminal-line-29)">
</text><text class="terminal-r4" x="0" y="752" textLength="1220" clip-path="url(#terminal-line-30)">┌────────────────────────────────────────────────────────────────────────────────────────&#160;default&#160;─┐</text><text class="terminal-r1" x="1220" y="752" textLength="12.2" clip-path="url(#terminal-line-30)">
</text><text class="terminal-r4" x="0" y="776.4" textLength="12.2" clip-path="url(#terminal-line-31)"></text><text class="terminal-r2" x="24.4" y="776.4" textLength="12.2" clip-path="url(#terminal-line-31)">&gt;</text><text class="terminal-r4" x="1207.8" y="776.4" textLength="12.2" clip-path="url(#terminal-line-31)"></text><text class="terminal-r1" x="1220" y="776.4" textLength="12.2" clip-path="url(#terminal-line-31)">
</text><text class="terminal-r4" x="0" y="800.8" textLength="12.2" clip-path="url(#terminal-line-32)"></text><text class="terminal-r4" x="1207.8" y="800.8" textLength="12.2" clip-path="url(#terminal-line-32)"></text><text class="terminal-r1" x="1220" y="800.8" textLength="12.2" clip-path="url(#terminal-line-32)">
</text><text class="terminal-r4" x="0" y="825.2" textLength="12.2" clip-path="url(#terminal-line-33)"></text><text class="terminal-r4" x="1207.8" y="825.2" textLength="12.2" clip-path="url(#terminal-line-33)"></text><text class="terminal-r1" x="1220" y="825.2" textLength="12.2" clip-path="url(#terminal-line-33)">
</text><text class="terminal-r4" x="0" y="849.6" textLength="1220" clip-path="url(#terminal-line-34)">└──────────────────────────────────────────────────────────────────────────────────────────────────┘</text><text class="terminal-r1" x="1220" y="849.6" textLength="12.2" clip-path="url(#terminal-line-34)">
</text><text class="terminal-r4" x="0" y="874" textLength="158.6" clip-path="url(#terminal-line-35)">/test/workdir</text><text class="terminal-r4" x="1012.6" y="874" textLength="207.4" clip-path="url(#terminal-line-35)">0%&#160;of&#160;200k&#160;tokens</text>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 15 KiB

View File

@@ -0,0 +1,23 @@
from __future__ import annotations
from textual.pilot import Pilot
from tests.snapshots.base_snapshot_test_app import BaseSnapshotTestApp
from tests.snapshots.snap_compare import SnapCompare
class DataRetentionTestApp(BaseSnapshotTestApp):
async def on_mount(self) -> None:
await super().on_mount()
await self._show_data_retention()
def test_snapshot_data_retention(snap_compare: SnapCompare) -> None:
async def run_before(pilot: Pilot) -> None:
await pilot.pause(0.2)
assert snap_compare(
"test_ui_snapshot_data_retention.py:DataRetentionTestApp",
terminal_size=(100, 36),
run_before=run_before,
)

View File

@@ -141,3 +141,38 @@ def test_run_programmatic_ignores_system_messages_in_previous(
assert spy.emitted[1][1] == "Continue our previous discussion."
assert spy.emitted[2][1] == "Let's move on to practical examples."
assert spy.emitted[3][1] == "Understood."
def test_run_programmatic_teleport_ignored_when_nuage_disabled(
monkeypatch: pytest.MonkeyPatch,
) -> None:
spy = SpyStreamingFormatter()
monkeypatch.setattr(
"vibe.core.programmatic.create_formatter", lambda *_args, **_kwargs: spy
)
with mock_backend_factory(
Backend.MISTRAL,
lambda provider, **kwargs: FakeBackend([
mock_llm_chunk(content="Normal response.")
]),
):
cfg = build_test_vibe_config(
system_prompt_id="tests",
include_project_context=False,
include_prompt_detail=False,
include_model_info=False,
include_commit_signature=False,
nuage_enabled=False,
)
run_programmatic(
config=cfg,
prompt="Hello",
output_format=OutputFormat.STREAMING,
teleport=True,
)
roles = [r for r, _ in spy.emitted]
assert roles == [Role.system, Role.user, Role.assistant]
assert spy.emitted[2][1] == "Normal response."

152
uv.lock generated
View File

@@ -3,7 +3,7 @@ revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2026-03-25T16:07:22.482308Z"
exclude-newer = "2026-03-27T13:28:45.258057Z"
exclude-newer-span = "P7D"
[options.exclude-newer-package]
@@ -77,20 +77,20 @@ wheels = [
[[package]]
name = "cachetools"
version = "7.0.1"
version = "7.0.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d4/07/56595285564e90777d758ebd383d6b0b971b87729bbe2184a849932a3736/cachetools-7.0.1.tar.gz", hash = "sha256:e31e579d2c5b6e2944177a0397150d312888ddf4e16e12f1016068f0c03b8341", size = 36126, upload-time = "2026-02-10T22:24:05.03Z" }
sdist = { url = "https://files.pythonhosted.org/packages/6c/c7/342b33cc6877eebc6c9bb45cb9f78e170e575839699f6f3cc96050176431/cachetools-7.0.2.tar.gz", hash = "sha256:7e7f09a4ca8b791d8bb4864afc71e9c17e607a28e6839ca1a644253c97dbeae0", size = 36983, upload-time = "2026-03-02T19:45:16.926Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ed/9e/5faefbf9db1db466d633735faceda1f94aa99ce506ac450d232536266b32/cachetools-7.0.1-py3-none-any.whl", hash = "sha256:8f086515c254d5664ae2146d14fc7f65c9a4bce75152eb247e5a9c5e6d7b2ecf", size = 13484, upload-time = "2026-02-10T22:24:03.741Z" },
{ url = "https://files.pythonhosted.org/packages/ef/04/4b6968e77c110f12da96fdbfcb39c6557c2e5e81bd7afcf8ed893d5bc588/cachetools-7.0.2-py3-none-any.whl", hash = "sha256:938dcad184827c5e94928c4fd5526e2b46692b7fb1ae94472da9131d0299343c", size = 13793, upload-time = "2026-03-02T19:45:15.495Z" },
]
[[package]]
name = "certifi"
version = "2026.1.4"
version = "2026.2.25"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" }
sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" },
{ url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" },
]
[[package]]
@@ -352,11 +352,11 @@ wheels = [
[[package]]
name = "filelock"
version = "3.24.3"
version = "3.25.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/73/92/a8e2479937ff39185d20dd6a851c1a63e55849e447a55e798cc2e1f49c65/filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa", size = 37935, upload-time = "2026-02-19T00:48:20.543Z" }
sdist = { url = "https://files.pythonhosted.org/packages/77/18/a1fd2231c679dcb9726204645721b12498aeac28e1ad0601038f94b42556/filelock-3.25.0.tar.gz", hash = "sha256:8f00faf3abf9dc730a1ffe9c354ae5c04e079ab7d3a683b7c32da5dd05f26af3", size = 40158, upload-time = "2026-03-01T15:08:45.916Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9c/0f/5d0c71a1aefeb08efff26272149e07ab922b64f46c63363756224bd6872e/filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d", size = 24331, upload-time = "2026-02-19T00:48:18.465Z" },
{ url = "https://files.pythonhosted.org/packages/f9/0b/de6f54d4a8bedfe8645c41497f3c18d749f0bd3218170c667bf4b81d0cdd/filelock-3.25.0-py3-none-any.whl", hash = "sha256:5ccf8069f7948f494968fc0713c10e5c182a9c9d9eef3a636307a20c2490f047", size = 26427, upload-time = "2026-03-01T15:08:44.593Z" },
]
[[package]]
@@ -478,11 +478,11 @@ wheels = [
[[package]]
name = "identify"
version = "2.6.16"
version = "2.6.17"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5b/8d/e8b97e6bd3fb6fb271346f7981362f1e04d6a7463abd0de79e1fda17c067/identify-2.6.16.tar.gz", hash = "sha256:846857203b5511bbe94d5a352a48ef2359532bc8f6727b5544077a0dcfb24980", size = 99360, upload-time = "2026-01-12T18:58:58.201Z" }
sdist = { url = "https://files.pythonhosted.org/packages/57/84/376a3b96e5a8d33a7aa2c5b3b31a4b3c364117184bf0b17418055f6ace66/identify-2.6.17.tar.gz", hash = "sha256:f816b0b596b204c9fdf076ded172322f2723cf958d02f9c3587504834c8ff04d", size = 99579, upload-time = "2026-03-01T20:04:12.702Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b8/58/40fbbcefeda82364720eba5cf2270f98496bdfa19ea75b4cccae79c698e6/identify-2.6.16-py2.py3-none-any.whl", hash = "sha256:391ee4d77741d994189522896270b787aed8670389bfd60f326d677d64a6dfb0", size = 99202, upload-time = "2026-01-12T18:58:56.627Z" },
{ url = "https://files.pythonhosted.org/packages/40/66/71c1227dff78aaeb942fed29dd5651f2aec166cc7c9aeea3e8b26a539b7d/identify-2.6.17-py2.py3-none-any.whl", hash = "sha256:be5f8412d5ed4b20f2bd41a65f920990bdccaa6a4a18a08f1eefdcd0bdd885f0", size = 99382, upload-time = "2026-03-01T20:04:11.439Z" },
]
[[package]]
@@ -624,14 +624,14 @@ wheels = [
[[package]]
name = "linkify-it-py"
version = "2.0.3"
version = "2.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "uc-micro-py" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2a/ae/bb56c6828e4797ba5a4821eec7c43b8bf40f69cda4d4f5f8c8a2810ec96a/linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048", size = 27946, upload-time = "2024-02-04T14:48:04.179Z" }
sdist = { url = "https://files.pythonhosted.org/packages/2e/c9/06ea13676ef354f0af6169587ae292d3e2406e212876a413bf9eece4eb23/linkify_it_py-2.1.0.tar.gz", hash = "sha256:43360231720999c10e9328dc3691160e27a718e280673d444c38d7d3aaa3b98b", size = 29158, upload-time = "2026-03-01T07:48:47.683Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/1e/b832de447dee8b582cac175871d2f6c3d5077cc56d5575cadba1fd1cccfa/linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79", size = 19820, upload-time = "2024-02-04T14:48:02.496Z" },
{ url = "https://files.pythonhosted.org/packages/b4/de/88b3be5c31b22333b3ca2f6ff1de4e863d8fe45aaea7485f591970ec1d3e/linkify_it_py-2.1.0-py3-none-any.whl", hash = "sha256:0d252c1594ecba2ecedc444053db5d3a9b7ec1b0dd929c8f1d74dce89f86c05e", size = 19878, upload-time = "2026-03-01T07:48:46.098Z" },
]
[[package]]
@@ -787,7 +787,7 @@ wheels = [
[[package]]
name = "mistral-vibe"
version = "2.7.2"
version = "2.7.3"
source = { editable = "." }
dependencies = [
{ name = "agent-client-protocol" },
@@ -1316,15 +1316,15 @@ wheels = [
[[package]]
name = "pyinstaller-hooks-contrib"
version = "2026.1"
version = "2026.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "packaging" },
{ name = "setuptools" },
]
sdist = { url = "https://files.pythonhosted.org/packages/95/eb/e1dd9a5348e4cf348471c0e5fd617d948779bc3199cf4edb134d8fceca91/pyinstaller_hooks_contrib-2026.1.tar.gz", hash = "sha256:a5f0891a1e81e92406ab917d9e76adfd7a2b68415ee2e35c950a7b3910bc361b", size = 171504, upload-time = "2026-02-18T13:01:15.711Z" }
sdist = { url = "https://files.pythonhosted.org/packages/6b/90/f3b30d72b89ab5b8f3ef714db94d09c7c263cce6562b4c7c636d99630695/pyinstaller_hooks_contrib-2026.2.tar.gz", hash = "sha256:cbd1eb00b5d13301b1cce602e1fffb17f0c531c0391f0a87a383d376be68a186", size = 171884, upload-time = "2026-03-02T23:07:01.221Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7f/69/12bafee3cc485d977f596e0d803d7c6fb147430fc35dfe505730aa3a28dd/pyinstaller_hooks_contrib-2026.1-py3-none-any.whl", hash = "sha256:66ad4888ba67de6f3cfd7ef554f9dd1a4389e2eb19f84d7129a5a6818e3f2180", size = 452841, upload-time = "2026-02-18T13:01:14.471Z" },
{ url = "https://files.pythonhosted.org/packages/34/3b/1efef5ff4d4d150f646b873e963437c0b800cb375a37df01fefab149f4d9/pyinstaller_hooks_contrib-2026.2-py3-none-any.whl", hash = "sha256:fc29f0481b58adf78ce9c1d9cf135fe96f38c708f74b2aa0670ef93e59578ab9", size = 453939, upload-time = "2026-03-02T23:06:59.469Z" },
]
[[package]]
@@ -1446,12 +1446,25 @@ wheels = [
]
[[package]]
name = "python-dotenv"
version = "1.2.1"
name = "python-discovery"
version = "1.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" }
dependencies = [
{ name = "filelock" },
{ name = "platformdirs" },
]
sdist = { url = "https://files.pythonhosted.org/packages/82/bb/93a3e83bdf9322c7e21cafd092e56a4a17c4d8ef4277b6eb01af1a540a6f/python_discovery-1.1.0.tar.gz", hash = "sha256:447941ba1aed8cc2ab7ee3cb91be5fc137c5bdbb05b7e6ea62fbdcb66e50b268", size = 55674, upload-time = "2026-02-26T09:42:49.668Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
{ url = "https://files.pythonhosted.org/packages/06/54/82a6e2ef37f0f23dccac604b9585bdcbd0698604feb64807dcb72853693e/python_discovery-1.1.0-py3-none-any.whl", hash = "sha256:a162893b8809727f54594a99ad2179d2ede4bf953e12d4c7abc3cc9cdbd1437b", size = 30687, upload-time = "2026-02-26T09:42:48.548Z" },
]
[[package]]
name = "python-dotenv"
version = "1.2.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
]
[[package]]
@@ -1718,27 +1731,27 @@ wheels = [
[[package]]
name = "ruff"
version = "0.15.2"
version = "0.15.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/06/04/eab13a954e763b0606f460443fcbf6bb5a0faf06890ea3754ff16523dce5/ruff-0.15.2.tar.gz", hash = "sha256:14b965afee0969e68bb871eba625343b8673375f457af4abe98553e8bbb98342", size = 4558148, upload-time = "2026-02-19T22:32:20.271Z" }
sdist = { url = "https://files.pythonhosted.org/packages/da/31/d6e536cdebb6568ae75a7f00e4b4819ae0ad2640c3604c305a0428680b0c/ruff-0.15.4.tar.gz", hash = "sha256:3412195319e42d634470cc97aa9803d07e9d5c9223b99bcb1518f0c725f26ae1", size = 4569550, upload-time = "2026-02-26T20:04:14.959Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2f/70/3a4dc6d09b13cb3e695f28307e5d889b2e1a66b7af9c5e257e796695b0e6/ruff-0.15.2-py3-none-linux_armv6l.whl", hash = "sha256:120691a6fdae2f16d65435648160f5b81a9625288f75544dc40637436b5d3c0d", size = 10430565, upload-time = "2026-02-19T22:32:41.824Z" },
{ url = "https://files.pythonhosted.org/packages/71/0b/bb8457b56185ece1305c666dc895832946d24055be90692381c31d57466d/ruff-0.15.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a89056d831256099658b6bba4037ac6dd06f49d194199215befe2bb10457ea5e", size = 10820354, upload-time = "2026-02-19T22:32:07.366Z" },
{ url = "https://files.pythonhosted.org/packages/2d/c1/e0532d7f9c9e0b14c46f61b14afd563298b8b83f337b6789ddd987e46121/ruff-0.15.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e36dee3a64be0ebd23c86ffa3aa3fd3ac9a712ff295e192243f814a830b6bd87", size = 10170767, upload-time = "2026-02-19T22:32:13.188Z" },
{ url = "https://files.pythonhosted.org/packages/47/e8/da1aa341d3af017a21c7a62fb5ec31d4e7ad0a93ab80e3a508316efbcb23/ruff-0.15.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9fb47b6d9764677f8c0a193c0943ce9a05d6763523f132325af8a858eadc2b9", size = 10529591, upload-time = "2026-02-19T22:32:02.547Z" },
{ url = "https://files.pythonhosted.org/packages/93/74/184fbf38e9f3510231fbc5e437e808f0b48c42d1df9434b208821efcd8d6/ruff-0.15.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f376990f9d0d6442ea9014b19621d8f2aaf2b8e39fdbfc79220b7f0c596c9b80", size = 10260771, upload-time = "2026-02-19T22:32:36.938Z" },
{ url = "https://files.pythonhosted.org/packages/05/ac/605c20b8e059a0bc4b42360414baa4892ff278cec1c91fff4be0dceedefd/ruff-0.15.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dcc987551952d73cbf5c88d9fdee815618d497e4df86cd4c4824cc59d5dd75f", size = 11045791, upload-time = "2026-02-19T22:32:31.642Z" },
{ url = "https://files.pythonhosted.org/packages/fd/52/db6e419908f45a894924d410ac77d64bdd98ff86901d833364251bd08e22/ruff-0.15.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42a47fd785cbe8c01b9ff45031af875d101b040ad8f4de7bbb716487c74c9a77", size = 11879271, upload-time = "2026-02-19T22:32:29.305Z" },
{ url = "https://files.pythonhosted.org/packages/3e/d8/7992b18f2008bdc9231d0f10b16df7dda964dbf639e2b8b4c1b4e91b83af/ruff-0.15.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbe9f49354866e575b4c6943856989f966421870e85cd2ac94dccb0a9dcb2fea", size = 11303707, upload-time = "2026-02-19T22:32:22.492Z" },
{ url = "https://files.pythonhosted.org/packages/d7/02/849b46184bcfdd4b64cde61752cc9a146c54759ed036edd11857e9b8443b/ruff-0.15.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7a672c82b5f9887576087d97be5ce439f04bbaf548ee987b92d3a7dede41d3a", size = 11149151, upload-time = "2026-02-19T22:32:44.234Z" },
{ url = "https://files.pythonhosted.org/packages/70/04/f5284e388bab60d1d3b99614a5a9aeb03e0f333847e2429bebd2aaa1feec/ruff-0.15.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:72ecc64f46f7019e2bcc3cdc05d4a7da958b629a5ab7033195e11a438403d956", size = 11091132, upload-time = "2026-02-19T22:32:24.691Z" },
{ url = "https://files.pythonhosted.org/packages/fa/ae/88d844a21110e14d92cf73d57363fab59b727ebeabe78009b9ccb23500af/ruff-0.15.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:8dcf243b15b561c655c1ef2f2b0050e5d50db37fe90115507f6ff37d865dc8b4", size = 10504717, upload-time = "2026-02-19T22:32:26.75Z" },
{ url = "https://files.pythonhosted.org/packages/64/27/867076a6ada7f2b9c8292884ab44d08fd2ba71bd2b5364d4136f3cd537e1/ruff-0.15.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dab6941c862c05739774677c6273166d2510d254dac0695c0e3f5efa1b5585de", size = 10263122, upload-time = "2026-02-19T22:32:10.036Z" },
{ url = "https://files.pythonhosted.org/packages/e7/ef/faf9321d550f8ebf0c6373696e70d1758e20ccdc3951ad7af00c0956be7c/ruff-0.15.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b9164f57fc36058e9a6806eb92af185b0697c9fe4c7c52caa431c6554521e5c", size = 10735295, upload-time = "2026-02-19T22:32:39.227Z" },
{ url = "https://files.pythonhosted.org/packages/2f/55/e8089fec62e050ba84d71b70e7834b97709ca9b7aba10c1a0b196e493f97/ruff-0.15.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:80d24fcae24d42659db7e335b9e1531697a7102c19185b8dc4a028b952865fd8", size = 11241641, upload-time = "2026-02-19T22:32:34.617Z" },
{ url = "https://files.pythonhosted.org/packages/23/01/1c30526460f4d23222d0fabd5888868262fd0e2b71a00570ca26483cd993/ruff-0.15.2-py3-none-win32.whl", hash = "sha256:fd5ff9e5f519a7e1bd99cbe8daa324010a74f5e2ebc97c6242c08f26f3714f6f", size = 10507885, upload-time = "2026-02-19T22:32:15.635Z" },
{ url = "https://files.pythonhosted.org/packages/5c/10/3d18e3bbdf8fc50bbb4ac3cc45970aa5a9753c5cb51bf9ed9a3cd8b79fa3/ruff-0.15.2-py3-none-win_amd64.whl", hash = "sha256:d20014e3dfa400f3ff84830dfb5755ece2de45ab62ecea4af6b7262d0fb4f7c5", size = 11623725, upload-time = "2026-02-19T22:32:04.947Z" },
{ url = "https://files.pythonhosted.org/packages/6d/78/097c0798b1dab9f8affe73da9642bb4500e098cb27fd8dc9724816ac747b/ruff-0.15.2-py3-none-win_arm64.whl", hash = "sha256:cabddc5822acdc8f7b5527b36ceac55cc51eec7b1946e60181de8fe83ca8876e", size = 10941649, upload-time = "2026-02-19T22:32:18.108Z" },
{ url = "https://files.pythonhosted.org/packages/f2/82/c11a03cfec3a4d26a0ea1e571f0f44be5993b923f905eeddfc397c13d360/ruff-0.15.4-py3-none-linux_armv6l.whl", hash = "sha256:a1810931c41606c686bae8b5b9a8072adac2f611bb433c0ba476acba17a332e0", size = 10453333, upload-time = "2026-02-26T20:04:20.093Z" },
{ url = "https://files.pythonhosted.org/packages/ce/5d/6a1f271f6e31dffb31855996493641edc3eef8077b883eaf007a2f1c2976/ruff-0.15.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5a1632c66672b8b4d3e1d1782859e98d6e0b4e70829530666644286600a33992", size = 10853356, upload-time = "2026-02-26T20:04:05.808Z" },
{ url = "https://files.pythonhosted.org/packages/b1/d8/0fab9f8842b83b1a9c2bf81b85063f65e93fb512e60effa95b0be49bfc54/ruff-0.15.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4386ba2cd6c0f4ff75252845906acc7c7c8e1ac567b7bc3d373686ac8c222ba", size = 10187434, upload-time = "2026-02-26T20:03:54.656Z" },
{ url = "https://files.pythonhosted.org/packages/85/cc/cc220fd9394eff5db8d94dec199eec56dd6c9f3651d8869d024867a91030/ruff-0.15.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2496488bdfd3732747558b6f95ae427ff066d1fcd054daf75f5a50674411e75", size = 10535456, upload-time = "2026-02-26T20:03:52.738Z" },
{ url = "https://files.pythonhosted.org/packages/fa/0f/bced38fa5cf24373ec767713c8e4cadc90247f3863605fb030e597878661/ruff-0.15.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f1c4893841ff2d54cbda1b2860fa3260173df5ddd7b95d370186f8a5e66a4ac", size = 10287772, upload-time = "2026-02-26T20:04:08.138Z" },
{ url = "https://files.pythonhosted.org/packages/2b/90/58a1802d84fed15f8f281925b21ab3cecd813bde52a8ca033a4de8ab0e7a/ruff-0.15.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:820b8766bd65503b6c30aaa6331e8ef3a6e564f7999c844e9a547c40179e440a", size = 11049051, upload-time = "2026-02-26T20:04:03.53Z" },
{ url = "https://files.pythonhosted.org/packages/d2/ac/b7ad36703c35f3866584564dc15f12f91cb1a26a897dc2fd13d7cb3ae1af/ruff-0.15.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9fb74bab47139c1751f900f857fa503987253c3ef89129b24ed375e72873e85", size = 11890494, upload-time = "2026-02-26T20:04:10.497Z" },
{ url = "https://files.pythonhosted.org/packages/93/3d/3eb2f47a39a8b0da99faf9c54d3eb24720add1e886a5309d4d1be73a6380/ruff-0.15.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f80c98765949c518142b3a50a5db89343aa90f2c2bf7799de9986498ae6176db", size = 11326221, upload-time = "2026-02-26T20:04:12.84Z" },
{ url = "https://files.pythonhosted.org/packages/ff/90/bf134f4c1e5243e62690e09d63c55df948a74084c8ac3e48a88468314da6/ruff-0.15.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451a2e224151729b3b6c9ffb36aed9091b2996fe4bdbd11f47e27d8f2e8888ec", size = 11168459, upload-time = "2026-02-26T20:04:00.969Z" },
{ url = "https://files.pythonhosted.org/packages/b5/e5/a64d27688789b06b5d55162aafc32059bb8c989c61a5139a36e1368285eb/ruff-0.15.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a8f157f2e583c513c4f5f896163a93198297371f34c04220daf40d133fdd4f7f", size = 11104366, upload-time = "2026-02-26T20:03:48.099Z" },
{ url = "https://files.pythonhosted.org/packages/f1/f6/32d1dcb66a2559763fc3027bdd65836cad9eb09d90f2ed6a63d8e9252b02/ruff-0.15.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:917cc68503357021f541e69b35361c99387cdbbf99bd0ea4aa6f28ca99ff5338", size = 10510887, upload-time = "2026-02-26T20:03:45.771Z" },
{ url = "https://files.pythonhosted.org/packages/ff/92/22d1ced50971c5b6433aed166fcef8c9343f567a94cf2b9d9089f6aa80fe/ruff-0.15.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e9737c8161da79fd7cfec19f1e35620375bd8b2a50c3e77fa3d2c16f574105cc", size = 10285939, upload-time = "2026-02-26T20:04:22.42Z" },
{ url = "https://files.pythonhosted.org/packages/e6/f4/7c20aec3143837641a02509a4668fb146a642fd1211846634edc17eb5563/ruff-0.15.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:291258c917539e18f6ba40482fe31d6f5ac023994ee11d7bdafd716f2aab8a68", size = 10765471, upload-time = "2026-02-26T20:03:58.924Z" },
{ url = "https://files.pythonhosted.org/packages/d0/09/6d2f7586f09a16120aebdff8f64d962d7c4348313c77ebb29c566cefc357/ruff-0.15.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3f83c45911da6f2cd5936c436cf86b9f09f09165f033a99dcf7477e34041cbc3", size = 11263382, upload-time = "2026-02-26T20:04:24.424Z" },
{ url = "https://files.pythonhosted.org/packages/1b/fa/2ef715a1cd329ef47c1a050e10dee91a9054b7ce2fcfdd6a06d139afb7ec/ruff-0.15.4-py3-none-win32.whl", hash = "sha256:65594a2d557d4ee9f02834fcdf0a28daa8b3b9f6cb2cb93846025a36db47ef22", size = 10506664, upload-time = "2026-02-26T20:03:50.56Z" },
{ url = "https://files.pythonhosted.org/packages/d0/a8/c688ef7e29983976820d18710f955751d9f4d4eb69df658af3d006e2ba3e/ruff-0.15.4-py3-none-win_amd64.whl", hash = "sha256:04196ad44f0df220c2ece5b0e959c2f37c777375ec744397d21d15b50a75264f", size = 11651048, upload-time = "2026-02-26T20:04:17.191Z" },
{ url = "https://files.pythonhosted.org/packages/3e/0a/9e1be9035b37448ce2e68c978f0591da94389ade5a5abafa4cf99985d1b2/ruff-0.15.4-py3-none-win_arm64.whl", hash = "sha256:60d5177e8cfc70e51b9c5fad936c634872a74209f934c1e79107d11787ad5453", size = 10966776, upload-time = "2026-02-26T20:03:56.908Z" },
]
[[package]]
@@ -1799,24 +1812,24 @@ wheels = [
[[package]]
name = "soupsieve"
version = "2.8.1"
version = "2.8.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/89/23/adf3796d740536d63a6fbda113d07e60c734b6ed5d3058d1e47fc0495e47/soupsieve-2.8.1.tar.gz", hash = "sha256:4cf733bc50fa805f5df4b8ef4740fc0e0fa6218cf3006269afd3f9d6d80fd350", size = 117856, upload-time = "2025-12-18T13:50:34.655Z" }
sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/48/f3/b67d6ea49ca9154453b6d70b34ea22f3996b9fa55da105a79d8732227adc/soupsieve-2.8.1-py3-none-any.whl", hash = "sha256:a11fe2a6f3d76ab3cf2de04eb339c1be5b506a8a47f2ceb6d139803177f85434", size = 36710, upload-time = "2025-12-18T13:50:33.267Z" },
{ url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" },
]
[[package]]
name = "sse-starlette"
version = "3.2.0"
version = "3.3.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "starlette" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253, upload-time = "2026-01-17T13:11:05.62Z" }
sdist = { url = "https://files.pythonhosted.org/packages/5a/9f/c3695c2d2d4ef70072c3a06992850498b01c6bc9be531950813716b426fa/sse_starlette-3.3.2.tar.gz", hash = "sha256:678fca55a1945c734d8472a6cad186a55ab02840b4f6786f5ee8770970579dcd", size = 32326, upload-time = "2026-02-28T11:24:34.36Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/96/7f/832f015020844a8b8f7a9cbc103dd76ba8e3875004c41e08440ea3a2b41a/sse_starlette-3.2.0-py3-none-any.whl", hash = "sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf", size = 12763, upload-time = "2026-01-17T13:11:03.775Z" },
{ url = "https://files.pythonhosted.org/packages/61/28/8cb142d3fe80c4a2d8af54ca0b003f47ce0ba920974e7990fa6e016402d1/sse_starlette-3.3.2-py3-none-any.whl", hash = "sha256:5c3ea3dad425c601236726af2f27689b74494643f57017cafcb6f8c9acfbb862", size = 14270, upload-time = "2026-02-28T11:24:32.984Z" },
]
[[package]]
@@ -2030,28 +2043,28 @@ wheels = [
[[package]]
name = "typos"
version = "1.43.5"
version = "1.44.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/aa/10/f3e13aece5b053745242aecbcdfa05110d65d5fb8d3ea25b05540ea97540/typos-1.43.5.tar.gz", hash = "sha256:aa445c5eaf0e32095c3183dda96e0fa8ffcabbfd796721c75d7a8a68e9cd0d75", size = 1806801, upload-time = "2026-02-16T15:34:17.41Z" }
sdist = { url = "https://files.pythonhosted.org/packages/db/12/6049f719f30e5066bb5059a24413cbd91f79fa9aa7d71517e4e620abdee0/typos-1.44.0.tar.gz", hash = "sha256:8e1046d02f2fcea6df907b34b90556e4acafd9b287ad70ab27d2c06489f5df43", size = 1817247, upload-time = "2026-02-27T16:37:09.584Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f0/3d/698e7b760f68558048407718d4abb0bcc9baf9cfb7d1b9d68bcf60417031/typos-1.43.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:733221c09d6914c001d2ac16f630773dc36ff384df9ab128c9e93b019eeedf2f", size = 3474242, upload-time = "2026-02-16T15:34:03.26Z" },
{ url = "https://files.pythonhosted.org/packages/92/08/3b09c26a9e476399f9b2c4ab42df4d079b900d3818d5f47a7e9c04d81192/typos-1.43.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b1d2f86b458917fdc91b89f47101498adc7338361e1ed0bed6e4325d0e674aca", size = 3370863, upload-time = "2026-02-16T15:34:04.839Z" },
{ url = "https://files.pythonhosted.org/packages/67/c6/64e75469439f78c75ed23a1c3712ce68e10293402da912c453f5b432ee9f/typos-1.43.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d75091c9519224f2f74964b6ca5133abfba9be44ce49f12a981e27c29bcef97", size = 8176174, upload-time = "2026-02-16T15:34:06.627Z" },
{ url = "https://files.pythonhosted.org/packages/b5/0f/60a3cc327cd5d5ab49f615c451ac24e59dedea2e33d69b3d54a6e9749c76/typos-1.43.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:839dfa2bc802b02460097b21f7d93d0fa87d5f76adb83049072211a7279f5ebe", size = 7326066, upload-time = "2026-02-16T15:34:08.002Z" },
{ url = "https://files.pythonhosted.org/packages/75/6e/54ee65eb9a00f398d0767a10f73ea430ec072ff3a1fea40a3f3573ce7a61/typos-1.43.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0825a4d77d66726e86fac03c6cf2efc11b0a5c112b2156dcaeb61a24a807166", size = 7701673, upload-time = "2026-02-16T15:34:09.324Z" },
{ url = "https://files.pythonhosted.org/packages/d4/9b/b5a6dc3fadece24dbf6e9a528ed0b9d19d22e55681d5330c1c693cb8bcfe/typos-1.43.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7ee0a71ac21820868686ad52cb26a3e08197b09ceedf04d1736cb8472061665b", size = 7053904, upload-time = "2026-02-16T15:34:11.065Z" },
{ url = "https://files.pythonhosted.org/packages/45/e1/9369b16d0797d4601a874e454825a3742b9df43941aca90d89448750ce2f/typos-1.43.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:09615c8f64e656940533aa025a9e8dc5c93e5cf6bfbedb059cdcafde75cb6d72", size = 8126680, upload-time = "2026-02-16T15:34:12.549Z" },
{ url = "https://files.pythonhosted.org/packages/18/6f/dd5c786e860954a31fb5d2831a4d95e5f5b019cc6c558225fb6d38edc6c1/typos-1.43.5-py3-none-win32.whl", hash = "sha256:4c865b1b6149acbdaed9d548282ccfad526e1380de19d189e7ac675cd4869156", size = 3129847, upload-time = "2026-02-16T15:34:14.309Z" },
{ url = "https://files.pythonhosted.org/packages/07/35/d496189b38ec92c0946e13e63fa630699fd785f6d765132dad0c3efe3b55/typos-1.43.5-py3-none-win_amd64.whl", hash = "sha256:10009e9a3702da037803b075efc94fa5328bd93af357fb6a2f284c0dbcc77f30", size = 3310650, upload-time = "2026-02-16T15:34:15.617Z" },
{ url = "https://files.pythonhosted.org/packages/53/d1/db308b654e8ecb41df0b26610fb7970436effb318fd75dd0187f67c73e2c/typos-1.44.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:bf4241c469c14b7213a5ce2cf2a0692be21641be1a247dc126bec3f982195d6c", size = 3481848, upload-time = "2026-02-27T16:36:55.129Z" },
{ url = "https://files.pythonhosted.org/packages/67/9a/c04f8993bf96ee00921693f59d789c957263ad83d87f2b9cf550315105e4/typos-1.44.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:28332344a2939f20707ad0265cc10da5b930015c75920725cdd6db9ab9bdb18b", size = 3380731, upload-time = "2026-02-27T16:36:57.1Z" },
{ url = "https://files.pythonhosted.org/packages/f3/81/c6938251220335960d0322df26b6cb8b6a920b399cf71f8e00905946c582/typos-1.44.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b91c20c914a0f728d3d0ad21e0fee3fd0bd0b7514d66b1d42f6047ebbb8646", size = 8191469, upload-time = "2026-02-27T16:36:58.887Z" },
{ url = "https://files.pythonhosted.org/packages/30/5b/4806b29068d85bd11230b20fa412b6159e70ee9cf0e153fdcafb71d9f468/typos-1.44.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd7a3d55466896336230679bf87484074b5912f555a8c8cf6ea88c084bf5b28d", size = 7336904, upload-time = "2026-02-27T16:37:00.535Z" },
{ url = "https://files.pythonhosted.org/packages/a3/91/54d735f2e792a20aedad46d33e0ec848afc4b0faa401dd356a656e24ef89/typos-1.44.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d78c219d802b124a9b90d7327665e6de20e3aba5f4ff31fb76e25c338a9475d", size = 7711695, upload-time = "2026-02-27T16:37:02.233Z" },
{ url = "https://files.pythonhosted.org/packages/03/2e/10baf07d7af76915dee47139e959c40fdbd821ad4c5530b055432da98cc3/typos-1.44.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b3796090bdf1531cc3abd34a597d0ca5aef5c40841e49f9aeda58f1ab950e060", size = 7065163, upload-time = "2026-02-27T16:37:03.691Z" },
{ url = "https://files.pythonhosted.org/packages/e3/b0/e9eb53fdd512971fbf4a60d70402365242b99063fc92afff0294aab00791/typos-1.44.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c6aea9c04fb9759efe94bddf8af4156707bc82de3b9b58118530f2c9818352c9", size = 8145921, upload-time = "2026-02-27T16:37:05.191Z" },
{ url = "https://files.pythonhosted.org/packages/e8/80/256939188c954219a9541cf7f7aa91212bcfb33efd61db2f4cb5ccc43376/typos-1.44.0-py3-none-win32.whl", hash = "sha256:4af31b78e38e9720be009b725334108a3c4684bfc820ca8309f7ce54d72c303d", size = 3138754, upload-time = "2026-02-27T16:37:06.699Z" },
{ url = "https://files.pythonhosted.org/packages/98/53/cc43bbfd5e003ebdba987e352b1f3f7fc069be5c61ee472983e689661a79/typos-1.44.0-py3-none-win_amd64.whl", hash = "sha256:3624055a8f04d9c40faf20ff0fcce9dbf4f8e2987c39680358901cc8634228c0", size = 3318795, upload-time = "2026-02-27T16:37:08.196Z" },
]
[[package]]
name = "uc-micro-py"
version = "1.0.3"
version = "2.0.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/91/7a/146a99696aee0609e3712f2b44c6274566bc368dfe8375191278045186b8/uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a", size = 6043, upload-time = "2024-02-09T16:52:01.654Z" }
sdist = { url = "https://files.pythonhosted.org/packages/78/67/9a363818028526e2d4579334460df777115bdec1bb77c08f9db88f6389f2/uc_micro_py-2.0.0.tar.gz", hash = "sha256:c53691e495c8db60e16ffc4861a35469b0ba0821fe409a8a7a0a71864d33a811", size = 6611, upload-time = "2026-03-01T06:31:27.526Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/37/87/1f677586e8ac487e29672e4b17455758fce261de06a0d086167bb760361a/uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5", size = 6229, upload-time = "2024-02-09T16:52:00.371Z" },
{ url = "https://files.pythonhosted.org/packages/61/73/d21edf5b204d1467e06500080a50f79d49ef2b997c79123a536d4a17d97c/uc_micro_py-2.0.0-py3-none-any.whl", hash = "sha256:3603a3859af53e5a39bc7677713c78ea6589ff188d70f4fee165db88e22b242c", size = 6383, upload-time = "2026-03-01T06:31:26.257Z" },
]
[[package]]
@@ -2078,25 +2091,26 @@ wheels = [
[[package]]
name = "virtualenv"
version = "20.38.0"
version = "21.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "distlib" },
{ name = "filelock" },
{ name = "platformdirs" },
{ name = "python-discovery" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d2/03/a94d404ca09a89a7301a7008467aed525d4cdeb9186d262154dd23208709/virtualenv-20.38.0.tar.gz", hash = "sha256:94f39b1abaea5185bf7ea5a46702b56f1d0c9aa2f41a6c2b8b0af4ddc74c10a7", size = 5864558, upload-time = "2026-02-19T07:48:02.385Z" }
sdist = { url = "https://files.pythonhosted.org/packages/2f/c9/18d4b36606d6091844daa3bd93cf7dc78e6f5da21d9f21d06c221104b684/virtualenv-21.1.0.tar.gz", hash = "sha256:1990a0188c8f16b6b9cf65c9183049007375b26aad415514d377ccacf1e4fb44", size = 5840471, upload-time = "2026-02-27T08:49:29.702Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/42/d7/394801755d4c8684b655d35c665aea7836ec68320304f62ab3c94395b442/virtualenv-20.38.0-py3-none-any.whl", hash = "sha256:d6e78e5889de3a4742df2d3d44e779366325a90cf356f15621fddace82431794", size = 5837778, upload-time = "2026-02-19T07:47:59.778Z" },
{ url = "https://files.pythonhosted.org/packages/78/55/896b06bf93a49bec0f4ae2a6f1ed12bd05c8860744ac3a70eda041064e4d/virtualenv-21.1.0-py3-none-any.whl", hash = "sha256:164f5e14c5587d170cf98e60378eb91ea35bf037be313811905d3a24ea33cc07", size = 5825072, upload-time = "2026-02-27T08:49:27.516Z" },
]
[[package]]
name = "vulture"
version = "2.14"
version = "2.15"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/8e/25/925f35db758a0f9199113aaf61d703de891676b082bd7cf73ea01d6000f7/vulture-2.14.tar.gz", hash = "sha256:cb8277902a1138deeab796ec5bef7076a6e0248ca3607a3f3dee0b6d9e9b8415", size = 58823, upload-time = "2024-12-08T17:39:43.319Z" }
sdist = { url = "https://files.pythonhosted.org/packages/59/c6/4f147b621b4c0899eb1770f98113334bb706ebd251ac2be979316b1985fa/vulture-2.15.tar.gz", hash = "sha256:f9d8b4ce29c69950d323f21dceab4a4d6c694403dffbed7713c4691057e561fe", size = 52438, upload-time = "2026-03-04T21:41:39.096Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a0/56/0cc15b8ff2613c1d5c3dc1f3f576ede1c43868c1bc2e5ccaa2d4bcd7974d/vulture-2.14-py2.py3-none-any.whl", hash = "sha256:d9a90dba89607489548a49d557f8bac8112bd25d3cbc8aeef23e860811bd5ed9", size = 28915, upload-time = "2024-12-08T17:39:40.573Z" },
{ url = "https://files.pythonhosted.org/packages/1c/f3/07cf122e145bc6df976030e9935123124c3fcb5044cf407b5e71e85821b4/vulture-2.15-py3-none-any.whl", hash = "sha256:a3d8ebef918694326620eb128fa783486c8d285b23381c2b457d864ac056ef8d", size = 26895, upload-time = "2026-03-04T21:41:39.878Z" },
]
[[package]]

View File

@@ -3,14 +3,16 @@
# Build: uv run --group build pyinstaller vibe-acp.spec
# Output: dist/vibe-acp-dir/vibe-acp (+ dist/vibe-acp-dir/_internal/)
from PyInstaller.utils.hooks import collect_all
from PyInstaller.utils.hooks import collect_all, collect_submodules
# Collect all dependencies (including hidden imports and binaries) from builtins modules
core_builtins_deps = collect_all('vibe.core.tools.builtins')
acp_builtins_deps = collect_all('vibe.acp.tools.builtins')
# Extract hidden imports and binaries, filtering to ensure only strings are in hiddenimports
hidden_imports = ["truststore"]
# rich lazily loads Unicode width tables via importlib.import_module() at runtime,
# which PyInstaller's static analysis cannot discover.
hidden_imports = ["truststore"] + collect_submodules("rich._unicode_data")
for item in core_builtins_deps[2] + acp_builtins_deps[2]:
if isinstance(item, str):
hidden_imports.append(item)

View File

@@ -3,4 +3,4 @@ from __future__ import annotations
from pathlib import Path
VIBE_ROOT = Path(__file__).parent
__version__ = "2.7.2"
__version__ = "2.7.3"

View File

@@ -100,6 +100,7 @@ from vibe.core.config import (
VibeConfig,
load_dotenv_values,
)
from vibe.core.data_retention import DATA_RETENTION_MESSAGE
from vibe.core.proxy_setup import (
ProxySetupError,
parse_proxy_command,
@@ -476,6 +477,11 @@ class VibeAcpAgentLoop(AcpAgent):
description="Install the Lean 4 agent (leanstral)",
input=AvailableCommandInput(root=UnstructuredCommandInput(hint="")),
),
AvailableCommand(
name="data-retention",
description="Show data retention information",
input=AvailableCommandInput(root=UnstructuredCommandInput(hint="")),
),
AvailableCommand(
name="unleanstall",
description="Uninstall the Lean 4 agent",
@@ -545,6 +551,19 @@ class VibeAcpAgentLoop(AcpAgent):
)
return PromptResponse(stop_reason="end_turn", user_message_id=message_id)
async def _handle_data_retention_command(
self, session_id: str, message_id: str
) -> PromptResponse:
await self.client.session_update(
session_id=session_id,
update=AgentMessageChunk(
session_update="agent_message_chunk",
content=TextContentBlock(type="text", text=DATA_RETENTION_MESSAGE),
message_id=str(uuid4()),
),
)
return PromptResponse(stop_reason="end_turn", user_message_id=message_id)
async def _handle_unleanstall_command(
self, session_id: str, message_id: str
) -> PromptResponse:
@@ -772,6 +791,11 @@ class VibeAcpAgentLoop(AcpAgent):
if text_prompt.strip().lower().startswith("/leanstall"):
return await self._handle_leanstall_command(session_id, resolved_message_id)
if text_prompt.strip().lower().startswith("/data-retention"):
return await self._handle_data_retention_command(
session_id, resolved_message_id
)
async def agent_loop_task() -> None:
async for update in self._run_agent_loop(
session, text_prompt, resolved_message_id

View File

@@ -9,7 +9,7 @@ import tomli_w
from vibe import __version__
from vibe.cli.textual_ui.app import StartupOptions, run_textual_ui
from vibe.core.agent_loop import AgentLoop
from vibe.core.agent_loop import AgentLoop, TeleportError
from vibe.core.agents.models import BuiltinAgentName
from vibe.core.config import (
MissingAPIKeyError,
@@ -176,12 +176,13 @@ def run_cli(args: argparse.Namespace) -> None:
try:
final_response = run_programmatic(
config=config,
prompt=programmatic_prompt,
prompt=programmatic_prompt or "",
max_turns=args.max_turns,
max_price=args.max_price,
output_format=output_format,
previous_messages=loaded_session[0] if loaded_session else None,
agent_name=initial_agent_name,
teleport=args.teleport and config.nuage_enabled,
)
if final_response:
print(final_response)
@@ -189,6 +190,9 @@ def run_cli(args: argparse.Namespace) -> None:
except ConversationLimitException as e:
print(e, file=sys.stderr)
sys.exit(1)
except TeleportError as e:
print(f"Teleport error: {e}", file=sys.stderr)
sys.exit(1)
except RuntimeError as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)

View File

@@ -105,6 +105,11 @@ class CommandRegistry:
description="Rewind to a previous message",
handler="_start_rewind_mode",
),
"data-retention": Command(
aliases=frozenset(["/data-retention"]),
description="Show data retention information",
handler="_show_data_retention",
),
}
for command in excluded_commands:

View File

@@ -11,6 +11,7 @@ import subprocess
import time
from typing import Any, ClassVar, assert_never, cast
from weakref import WeakKeyDictionary
import webbrowser
from pydantic import BaseModel
from rich import print as rprint
@@ -46,6 +47,7 @@ from vibe.cli.textual_ui.notifications import (
NotificationPort,
TextualNotificationAdapter,
)
from vibe.cli.textual_ui.remote import RemoteSessionManager, is_progress_event
from vibe.cli.textual_ui.session_exit import print_session_resume_message
from vibe.cli.textual_ui.widgets.approval_app import ApprovalApp
from vibe.cli.textual_ui.widgets.banner.banner import Banner
@@ -109,20 +111,28 @@ from vibe.core.audio_player.audio_player import AudioPlayer
from vibe.core.audio_recorder import AudioRecorder
from vibe.core.autocompletion.path_prompt_adapter import render_path_prompt
from vibe.core.config import VibeConfig
from vibe.core.data_retention import DATA_RETENTION_MESSAGE
from vibe.core.logger import logger
from vibe.core.paths import HISTORY_FILE
from vibe.core.rewind import RewindError
from vibe.core.session.resume_sessions import (
ResumeSessionInfo,
list_local_resume_sessions,
list_remote_resume_sessions,
short_session_id,
)
from vibe.core.session.session_loader import SessionLoader
from vibe.core.teleport.types import (
TeleportAuthCompleteEvent,
TeleportAuthRequiredEvent,
TeleportCheckingGitEvent,
TeleportCompleteEvent,
TeleportFetchingUrlEvent,
TeleportPushingEvent,
TeleportPushRequiredEvent,
TeleportPushResponseEvent,
TeleportSendingGithubTokenEvent,
TeleportStartingWorkflowEvent,
TeleportWaitingForGitHubEvent,
)
from vibe.core.tools.builtins.ask_user_question import (
AskUserQuestionArgs,
@@ -136,9 +146,11 @@ from vibe.core.types import (
AgentStats,
ApprovalResponse,
Backend,
BaseEvent,
LLMMessage,
RateLimitError,
Role,
WaitingForInputEvent,
)
from vibe.core.utils import (
CancellationReason,
@@ -302,6 +314,7 @@ class VibeApp(App): # noqa: PLR0904
self._agent_running = False
self._interrupt_requested = False
self._agent_task: asyncio.Task | None = None
self._remote_manager = RemoteSessionManager()
self._loading_widget: LoadingWidget | None = None
self._pending_approval: asyncio.Future | None = None
@@ -398,6 +411,7 @@ class VibeApp(App): # noqa: PLR0904
mount_callback=self._mount_and_scroll,
get_tools_collapsed=lambda: self._tools_collapsed,
on_profile_changed=self._on_profile_changed,
is_remote=self._remote_manager.is_active,
)
self._chat_input_container = self.query_one(ChatInputContainer)
@@ -512,11 +526,21 @@ class VibeApp(App): # noqa: PLR0904
await self._remove_loading_widget()
async def on_question_app_answered(self, message: QuestionApp.Answered) -> None:
if self._remote_manager.has_pending_input and self._remote_manager.is_active:
result = AskUserQuestionResult(answers=message.answers, cancelled=False)
await self._handle_remote_answer(result)
return
if self._pending_question and not self._pending_question.done():
result = AskUserQuestionResult(answers=message.answers, cancelled=False)
self._pending_question.set_result(result)
async def on_question_app_cancelled(self, message: QuestionApp.Cancelled) -> None:
if self._remote_manager.has_pending_input:
self._remote_manager.cancel_pending_input()
await self._switch_to_input_app()
return
if self._pending_question and not self._pending_question.done():
result = AskUserQuestionResult(answers=[], cancelled=True)
self._pending_question.set_result(result)
@@ -554,6 +578,21 @@ class VibeApp(App): # noqa: PLR0904
await self._switch_to_input_app()
await self._switch_to_model_picker_app()
async def _ensure_loading_widget(self, status: str = "Generating") -> None:
if self._loading_widget and self._loading_widget.parent:
self._loading_widget.set_status(status)
return
loading_area = self._cached_loading_area
if loading_area is None:
try:
loading_area = self.query_one("#loading-area-content")
except Exception:
return
loading = LoadingWidget(status=status)
self._loading_widget = loading
await loading_area.mount(loading)
async def on_config_app_config_closed(
self, message: ConfigApp.ConfigClosed
) -> None:
@@ -755,6 +794,10 @@ class VibeApp(App): # noqa: PLR0904
)
async def _handle_user_message(self, message: str) -> None:
if self._remote_manager.is_active:
await self._handle_remote_user_message(message)
return
# message_index is where the user message will land in agent_loop.messages
# (checkpoint is created in agent_loop.act())
message_index = len(self.agent_loop.messages)
@@ -765,10 +808,46 @@ class VibeApp(App): # noqa: PLR0904
self._feedback_bar.maybe_show()
if not self._agent_running:
await self._remote_manager.stop_stream()
await self._remove_loading_widget()
self._agent_task = asyncio.create_task(
self._handle_agent_loop_turn(message)
)
async def _handle_remote_user_message(self, message: str) -> None:
warning = self._remote_manager.validate_input()
if warning:
await self._mount_and_scroll(WarningMessage(warning))
return
try:
await self._remote_manager.send_prompt(message)
except Exception as e:
await self._mount_and_scroll(
ErrorMessage(
f"Failed to send message: {e}", collapsed=self._tools_collapsed
)
)
return
await self._ensure_loading_widget()
async def _handle_remote_waiting_input(self, event: WaitingForInputEvent) -> None:
self._remote_manager.set_pending_input(event)
if question_args := self._remote_manager.build_question_args(event):
await self._switch_to_question_app(question_args)
return
await self._switch_to_input_app()
async def _handle_remote_answer(self, result: AskUserQuestionResult) -> None:
if result.cancelled or not result.answers:
self._remote_manager.cancel_pending_input()
await self._switch_to_input_app()
return
await self._remote_manager.send_prompt(
result.answers[0].answer, require_source=False
)
await self._switch_to_input_app()
await self._ensure_loading_widget()
def _reset_ui_state(self) -> None:
self._windowing.reset()
self._tool_call_map = None
@@ -881,13 +960,8 @@ class VibeApp(App): # noqa: PLR0904
async def _handle_agent_loop_turn(self, prompt: str) -> None:
self._agent_running = True
loading_area = self._cached_loading_area or self.query_one(
"#loading-area-content"
)
loading = LoadingWidget()
self._loading_widget = loading
await loading_area.mount(loading)
await self._remove_loading_widget()
await self._ensure_loading_widget()
try:
rendered_prompt = render_path_prompt(prompt, base_dir=Path.cwd())
@@ -895,6 +969,12 @@ class VibeApp(App): # noqa: PLR0904
self._narrator_manager.on_turn_start(rendered_prompt)
async for event in self.agent_loop.act(rendered_prompt):
self._narrator_manager.on_turn_event(event)
if isinstance(event, WaitingForInputEvent):
await self._remove_loading_widget()
if self._remote_manager.is_active:
await self._handle_remote_waiting_input(event)
elif self._loading_widget is None and is_progress_event(event):
await self._ensure_loading_widget()
if self.event_handler:
await self.event_handler.handle_event(
event,
@@ -972,32 +1052,47 @@ class VibeApp(App): # noqa: PLR0904
teleport_msg = TeleportMessage()
await self._mount_and_scroll(teleport_msg)
if self._remote_manager.is_active:
await loading.remove()
await self._mount_and_scroll(
ErrorMessage(
"Teleport is not available for remote sessions.",
collapsed=self._tools_collapsed,
)
)
return
try:
gen = self.agent_loop.teleport_to_vibe_nuage(prompt)
async for event in gen:
match event:
case TeleportCheckingGitEvent():
teleport_msg.set_status("Checking git status...")
case TeleportPushRequiredEvent(unpushed_count=count):
teleport_msg.set_status("Preparing workspace...")
case TeleportPushRequiredEvent(
unpushed_count=count, branch_not_pushed=branch_not_pushed
):
await loading.remove()
response = await self._ask_push_approval(count)
response = await self._ask_push_approval(
count, branch_not_pushed
)
await loading_area.mount(loading)
teleport_msg.set_status("Teleporting...")
await gen.asend(response)
next_event = await gen.asend(response)
if isinstance(next_event, TeleportPushingEvent):
teleport_msg.set_status("Syncing with remote...")
case TeleportPushingEvent():
teleport_msg.set_status("Pushing to remote...")
case TeleportAuthRequiredEvent(
user_code=code, verification_uri=uri
):
teleport_msg.set_status(
f"GitHub auth required. Code: {code} (copied)\nOpen: {uri}"
)
case TeleportAuthCompleteEvent():
teleport_msg.set_status("GitHub authenticated.")
teleport_msg.set_status("Syncing with remote...")
case TeleportStartingWorkflowEvent():
teleport_msg.set_status("Starting Nuage workflow...")
case TeleportSendingGithubTokenEvent():
teleport_msg.set_status("Sending encrypted GitHub token...")
teleport_msg.set_status("Teleporting...")
case TeleportWaitingForGitHubEvent():
teleport_msg.set_status("Connecting to GitHub...")
case TeleportAuthRequiredEvent(oauth_url=url):
webbrowser.open(url)
teleport_msg.set_status("Authorizing GitHub...")
case TeleportAuthCompleteEvent():
teleport_msg.set_status("GitHub authorized")
case TeleportFetchingUrlEvent():
teleport_msg.set_status("Finalizing...")
case TeleportCompleteEvent(url=url):
teleport_msg.set_complete(url)
except TeleportError as e:
@@ -1009,14 +1104,20 @@ class VibeApp(App): # noqa: PLR0904
if loading.parent:
await loading.remove()
async def _ask_push_approval(self, count: int) -> TeleportPushResponseEvent:
word = f"commit{'s' if count != 1 else ''}"
async def _ask_push_approval(
self, count: int, branch_not_pushed: bool
) -> TeleportPushResponseEvent:
if branch_not_pushed:
question = "Your branch doesn't exist on remote. Push to continue?"
else:
word = f"commit{'s' if count != 1 else ''}"
question = f"You have {count} unpushed {word}. Push to continue?"
push_label = "Push and continue"
result = await self._user_input_callback(
AskUserQuestionArgs(
questions=[
Question(
question=f"You have {count} unpushed {word}. Push to continue?",
question=question,
header="Push",
options=[Choice(label=push_label), Choice(label="Cancel")],
hide_other=True,
@@ -1105,20 +1206,41 @@ class VibeApp(App): # noqa: PLR0904
return
await self._switch_to_proxy_setup_app()
async def _show_data_retention(self) -> None:
await self._mount_and_scroll(UserCommandMessage(DATA_RETENTION_MESSAGE))
async def _show_session_picker(self) -> None:
session_config = self.config.session_logging
if not session_config.enabled:
await self._mount_and_scroll(
ErrorMessage(
"Session logging is disabled in configuration.",
collapsed=self._tools_collapsed,
)
)
return
cwd = str(Path.cwd())
raw_sessions = SessionLoader.list_sessions(session_config, cwd=cwd)
local_sessions = (
list_local_resume_sessions(self.config, cwd)
if self.config.session_logging.enabled
else []
)
remote_list_timeout = max(float(self.config.api_timeout), 10.0)
remote_error: str | None = None
await self._ensure_loading_widget("Loading sessions")
try:
remote_sessions = await asyncio.wait_for(
list_remote_resume_sessions(self.config), timeout=remote_list_timeout
)
except TimeoutError:
remote_sessions = []
remote_error = (
"Timed out while listing remote sessions "
f"after {remote_list_timeout:.0f}s."
)
except Exception as e:
remote_sessions = []
remote_error = f"Failed to list remote sessions: {e}"
finally:
await self._remove_loading_widget()
if remote_error is not None:
await self._mount_and_scroll(
ErrorMessage(remote_error, collapsed=self._tools_collapsed)
)
raw_sessions = [*local_sessions, *remote_sessions]
if not raw_sessions:
await self._mount_and_scroll(
@@ -1126,16 +1248,20 @@ class VibeApp(App): # noqa: PLR0904
)
return
sessions = sorted(
raw_sessions, key=lambda s: s.get("end_time") or "", reverse=True
)
sessions = sorted(raw_sessions, key=lambda s: s.end_time or "", reverse=True)
latest_messages = {
s["session_id"]: SessionLoader.get_first_user_message(
s["session_id"], session_config
s.option_id: SessionLoader.get_first_user_message(
s.session_id, self.config.session_logging
)
for s in sessions
if s.source == "local"
}
for session in sessions:
if session.source == "remote":
latest_messages[session.option_id] = (
f"{session.title or 'Remote workflow'} ({(session.status or 'RUNNING').lower()})"
)
picker = SessionPickerApp(sessions=sessions, latest_messages=latest_messages)
await self._switch_from_input(picker)
@@ -1144,53 +1270,21 @@ class VibeApp(App): # noqa: PLR0904
self, event: SessionPickerApp.SessionSelected
) -> None:
await self._switch_to_input_app()
session_config = self.config.session_logging
session_path = SessionLoader.find_session_by_id(
event.session_id, session_config
session = ResumeSessionInfo(
session_id=event.session_id,
source=event.source,
cwd="",
title=None,
end_time=None,
)
if not session_path:
await self._mount_and_scroll(
ErrorMessage(
f"Session `{event.session_id[:8]}` not found.",
collapsed=self._tools_collapsed,
)
)
return
try:
loaded_messages, _ = SessionLoader.load_session(session_path)
current_system_messages = [
msg for msg in self.agent_loop.messages if msg.role == Role.system
]
non_system_messages = [
msg for msg in loaded_messages if msg.role != Role.system
]
self.agent_loop.session_id = event.session_id
self.agent_loop.session_logger.resume_existing_session(
event.session_id, session_path
)
self.agent_loop.messages.reset(
current_system_messages + non_system_messages
)
self._reset_ui_state()
await self._load_more.hide()
messages_area = self._cached_messages_area or self.query_one("#messages")
await messages_area.remove_children()
await self._resume_history_from_messages()
await self._mount_and_scroll(
UserCommandMessage(f"Resumed session `{event.session_id[:8]}`")
)
except ValueError as e:
if event.source == "local":
await self._resume_local_session(session)
elif event.source == "remote":
await self._resume_remote_session(session)
else:
raise ValueError(f"Unknown session source: {event.source}")
except Exception as e:
await self._mount_and_scroll(
ErrorMessage(
f"Failed to load session: {e}", collapsed=self._tools_collapsed
@@ -1204,6 +1298,113 @@ class VibeApp(App): # noqa: PLR0904
await self._mount_and_scroll(UserCommandMessage("Resume cancelled."))
async def _resume_local_session(self, session: ResumeSessionInfo) -> None:
await self._remote_manager.detach()
session_config = self.config.session_logging
session_path = SessionLoader.find_session_by_id(
session.session_id, session_config
)
if not session_path:
raise ValueError(
f"Session `{short_session_id(session.session_id)}` not found."
)
loaded_messages, _ = SessionLoader.load_session(session_path)
if self._chat_input_container:
self._chat_input_container.set_custom_border(None)
current_system_messages = [
msg for msg in self.agent_loop.messages if msg.role == Role.system
]
non_system_messages = [
msg for msg in loaded_messages if msg.role != Role.system
]
self.agent_loop.session_id = session.session_id
self.agent_loop.session_logger.resume_existing_session(
session.session_id, session_path
)
self.agent_loop.messages.reset(current_system_messages + non_system_messages)
self._refresh_profile_widgets()
self._reset_ui_state()
await self._load_more.hide()
messages_area = self._cached_messages_area or self.query_one("#messages")
await messages_area.remove_children()
if self.event_handler:
self.event_handler.is_remote = False
await self._resume_history_from_messages()
await self._mount_and_scroll(
UserCommandMessage(
f"Resumed session `{short_session_id(session.session_id)}`"
)
)
async def _resume_remote_session(self, session: ResumeSessionInfo) -> None:
await self._remote_manager.attach(
session_id=session.session_id, config=self.config
)
self._refresh_profile_widgets()
if self._chat_input_container:
self._chat_input_container.set_custom_border(None)
self._reset_ui_state()
await self._load_more.hide()
messages_area = self._cached_messages_area or self.query_one("#messages")
await messages_area.remove_children()
if self.event_handler:
self.event_handler.is_remote = True
self._remote_manager.start_stream(self)
async def on_remote_event(
self, event: BaseEvent, loading_active: bool, loading_widget: Any
) -> None:
if self.event_handler:
await self.event_handler.handle_event(
event, loading_active=loading_active, loading_widget=loading_widget
)
async def on_remote_waiting_input(self, event: WaitingForInputEvent) -> None:
await self._handle_remote_waiting_input(event)
async def on_remote_user_message_cleared_input(self) -> None:
await self._switch_to_input_app()
async def on_remote_stream_error(self, error: str) -> None:
await self._mount_and_scroll(
ErrorMessage(error, collapsed=self._tools_collapsed)
)
async def on_remote_stream_ended(self, msg_type: str, text: str) -> None:
if msg_type == "error":
widget = ErrorMessage(text, collapsed=self._tools_collapsed)
elif msg_type == "warning":
widget = WarningMessage(text)
else:
widget = UserCommandMessage(text)
await self._mount_and_scroll(widget)
if self._chat_input_container:
self._chat_input_container.set_custom_border("Remote session ended")
async def on_remote_finalize_streaming(self) -> None:
if self.event_handler:
await self.event_handler.finalize_streaming()
async def remove_loading(self) -> None:
await self._remove_loading_widget()
async def ensure_loading(self, status: str = "Generating") -> None:
await self._ensure_loading_widget(status)
@property
def loading_widget(self) -> LoadingWidget | None:
return self._loading_widget
async def _reload_config(self) -> None:
try:
self._reset_ui_state()
@@ -1254,6 +1455,13 @@ class VibeApp(App): # noqa: PLR0904
async def _clear_history(self) -> None:
try:
self._reset_ui_state()
if self._remote_manager.is_active:
await self._remote_manager.detach()
self._refresh_profile_widgets()
if self.event_handler:
self.event_handler.is_remote = False
if self._chat_input_container:
self._chat_input_container.set_custom_border(None)
await self.agent_loop.clear_history()
if self.event_handler:
await self.event_handler.finalize_streaming()
@@ -1348,6 +1556,8 @@ class VibeApp(App): # noqa: PLR0904
self.event_handler.current_compact = None
def _get_session_resume_info(self) -> str | None:
if self._remote_manager.is_active:
return None
if not self.agent_loop.session_logger.enabled:
return None
if not self.agent_loop.session_logger.session_id:
@@ -1358,7 +1568,7 @@ class VibeApp(App): # noqa: PLR0904
)
if session_path is None:
return None
return self.agent_loop.session_logger.session_id[:8]
return short_session_id(self.agent_loop.session_logger.session_id)
async def _exit_app(self) -> None:
await self._narrator_manager.close()
@@ -1918,6 +2128,14 @@ class VibeApp(App): # noqa: PLR0904
if self._chat_input_container:
self._chat_input_container.set_safety(profile.safety)
self._chat_input_container.set_agent_name(profile.display_name.lower())
if self._remote_manager.is_active:
session_id = self._remote_manager.session_id
self._chat_input_container.set_custom_border(
f"Remote session {short_session_id(session_id, source='remote') if session_id else ''}",
ChatInputContainer.REMOTE_BORDER_CLASS,
)
else:
self._chat_input_container.set_custom_border(None)
async def _cycle_agent(self) -> None:
new_profile = self.agent_loop.agent_manager.next_agent(
@@ -1965,6 +2183,7 @@ class VibeApp(App): # noqa: PLR0904
def action_force_quit(self) -> None:
if self._agent_task and not self._agent_task.done():
self._agent_task.cancel()
self._remote_manager.cancel_stream_task()
self._narrator_manager.cancel()
self.exit(result=self._get_session_resume_info())

View File

@@ -118,6 +118,11 @@ TextArea > .text-area--cursor {
border: solid $mistral_orange;
border-title-color: $mistral_orange;
}
&.border-remote {
border: solid $mistral_orange;
border-title-color: $mistral_orange;
}
}
#input-body {

View File

@@ -4,7 +4,11 @@ from collections.abc import Callable
from typing import TYPE_CHECKING
from vibe.cli.textual_ui.widgets.compact import CompactMessage
from vibe.cli.textual_ui.widgets.messages import AssistantMessage, ReasoningMessage
from vibe.cli.textual_ui.widgets.messages import (
AssistantMessage,
ReasoningMessage,
UserMessage,
)
from vibe.cli.textual_ui.widgets.no_markup_static import NoMarkupStatic
from vibe.cli.textual_ui.widgets.tools import ToolCallMessage, ToolResultMessage
from vibe.core.tools.ui import ToolUIDataAdapter
@@ -19,6 +23,7 @@ from vibe.core.types import (
ToolResultEvent,
ToolStreamEvent,
UserMessageEvent,
WaitingForInputEvent,
)
from vibe.core.utils import TaggedText
@@ -32,10 +37,12 @@ class EventHandler:
mount_callback: Callable,
get_tools_collapsed: Callable[[], bool],
on_profile_changed: Callable[[], None] | None = None,
is_remote: bool = False,
) -> None:
self.mount_callback = mount_callback
self.get_tools_collapsed = get_tools_collapsed
self.on_profile_changed = on_profile_changed
self.is_remote = is_remote
self.tool_calls: dict[str, ToolCallMessage] = {}
self.current_compact: CompactMessage | None = None
self.current_streaming_message: AssistantMessage | None = None
@@ -72,6 +79,10 @@ class EventHandler:
self.on_profile_changed()
case UserMessageEvent():
await self.finalize_streaming()
if self.is_remote:
await self.mount_callback(UserMessage(event.content))
case WaitingForInputEvent():
await self.finalize_streaming()
case _:
await self.finalize_streaming()
await self._handle_unknown_event(event)

View File

@@ -0,0 +1,8 @@
from __future__ import annotations
from vibe.cli.textual_ui.remote.remote_session_manager import (
RemoteSessionManager,
is_progress_event,
)
__all__ = ["RemoteSessionManager", "is_progress_event"]

View File

@@ -0,0 +1,216 @@
from __future__ import annotations
import asyncio
from typing import Any, Protocol
from vibe.core.config import VibeConfig
from vibe.core.nuage.remote_events_source import RemoteEventsSource
from vibe.core.tools.builtins.ask_user_question import (
AskUserQuestionArgs,
Choice,
Question,
)
from vibe.core.types import (
AssistantEvent,
BaseEvent,
ReasoningEvent,
ToolCallEvent,
ToolStreamEvent,
UserMessageEvent,
WaitingForInputEvent,
)
_MIN_QUESTION_OPTIONS = 2
_MAX_QUESTION_OPTIONS = 4
class RemoteSessionUI(Protocol):
async def on_remote_event(
self, event: BaseEvent, loading_active: bool, loading_widget: Any
) -> None: ...
async def on_remote_waiting_input(self, event: WaitingForInputEvent) -> None: ...
async def on_remote_user_message_cleared_input(self) -> None: ...
async def on_remote_stream_error(self, error: str) -> None: ...
async def on_remote_stream_ended(self, msg_type: str, text: str) -> None: ...
async def on_remote_finalize_streaming(self) -> None: ...
async def remove_loading(self) -> None: ...
async def ensure_loading(self, status: str = "Generating") -> None: ...
@property
def loading_widget(self) -> Any: ...
def is_progress_event(event: object) -> bool:
return isinstance(
event, (AssistantEvent, ReasoningEvent, ToolCallEvent, ToolStreamEvent)
)
class RemoteSessionManager:
def __init__(self) -> None:
self._events_source: RemoteEventsSource | None = None
self._stream_task: asyncio.Task | None = None
self._pending_waiting_input: WaitingForInputEvent | None = None
@property
def is_active(self) -> bool:
return self._events_source is not None
@property
def is_terminated(self) -> bool:
if self._events_source is None:
return False
return self._events_source.is_terminated
@property
def is_waiting_for_input(self) -> bool:
if self._events_source is None:
return False
return self._events_source.is_waiting_for_input
@property
def has_pending_input(self) -> bool:
return self._pending_waiting_input is not None
@property
def session_id(self) -> str | None:
if self._events_source is None:
return None
return self._events_source.session_id
async def attach(self, session_id: str, config: VibeConfig) -> None:
await self.detach()
self._events_source = RemoteEventsSource(session_id=session_id, config=config)
async def detach(self) -> None:
await self._stop_stream()
if self._events_source is not None:
await self._events_source.close()
self._events_source = None
self._pending_waiting_input = None
def validate_input(self) -> str | None:
if self.is_terminated:
return (
"Remote session has ended. Use /clear to start a new session"
" or /resume to attach to another."
)
if not self.is_waiting_for_input:
return (
"Remote session is not waiting for input. Please wait for the"
" current task to complete."
)
return None
async def send_prompt(self, message: str, *, require_source: bool = True) -> None:
if self._events_source is None:
if require_source:
raise RuntimeError("No active remote session")
return
saved_pending = self._pending_waiting_input
self._pending_waiting_input = None
try:
await self._events_source.send_prompt(message)
except Exception:
self._pending_waiting_input = saved_pending
raise
def cancel_pending_input(self) -> None:
self._pending_waiting_input = None
def build_question_args(
self, event: WaitingForInputEvent
) -> AskUserQuestionArgs | None:
if (
not event.predefined_answers
or len(event.predefined_answers) < _MIN_QUESTION_OPTIONS
):
return None
question = event.label or "Choose an answer"
return AskUserQuestionArgs(
questions=[
Question(
question=question,
options=[
Choice(label=answer)
for answer in event.predefined_answers[:_MAX_QUESTION_OPTIONS]
],
)
]
)
def set_pending_input(self, event: WaitingForInputEvent) -> None:
self._pending_waiting_input = event
def start_stream(self, ui: RemoteSessionUI) -> None:
if self._events_source is None:
return
if self._stream_task and not self._stream_task.done():
return
self._stream_task = asyncio.create_task(
self._consume_stream(ui), name="remote-session-stream"
)
async def stop_stream(self) -> None:
await self._stop_stream()
def build_terminal_message(self) -> tuple[str, str]:
if self._events_source is None:
return ("info", "Remote session completed")
if self._events_source.is_failed:
return ("error", "Remote session failed")
if self._events_source.is_canceled:
return ("warning", "Remote session was canceled")
return ("info", "Remote session completed")
def cancel_stream_task(self) -> None:
if self._stream_task and not self._stream_task.done():
self._stream_task.cancel()
async def _stop_stream(self) -> None:
if self._stream_task is None or self._stream_task.done():
self._stream_task = None
return
self._stream_task.cancel()
try:
await self._stream_task
except asyncio.CancelledError:
pass
self._stream_task = None
async def _consume_stream(self, ui: RemoteSessionUI) -> None:
events_source = self._events_source
if events_source is None:
return
await ui.ensure_loading("Generating")
try:
async for event in events_source.attach():
if isinstance(event, WaitingForInputEvent):
await ui.remove_loading()
self._pending_waiting_input = event
await ui.on_remote_waiting_input(event)
elif (
isinstance(event, UserMessageEvent)
and self._pending_waiting_input is not None
):
self._pending_waiting_input = None
await ui.on_remote_user_message_cleared_input()
elif ui.loading_widget is None and is_progress_event(event):
await ui.ensure_loading()
await ui.on_remote_event(
event,
loading_active=ui.loading_widget is not None,
loading_widget=ui.loading_widget,
)
except asyncio.CancelledError:
raise
except Exception as e:
await ui.on_remote_stream_error(f"Remote stream stopped: {e}")
finally:
await ui.on_remote_finalize_streaming()
await ui.remove_loading()
self._stream_task = None
self._pending_waiting_input = None
if events_source.is_terminated:
msg_type, text = self.build_terminal_message()
await ui.on_remote_stream_ended(msg_type, text)

View File

@@ -30,6 +30,7 @@ SAFETY_BORDER_CLASSES: dict[AgentSafety, str] = {
class ChatInputContainer(Vertical):
ID_INPUT_BOX = "input-box"
REMOTE_BORDER_CLASS = "border-remote"
class Submitted(Message):
def __init__(self, value: str) -> None:
@@ -59,6 +60,8 @@ class ChatInputContainer(Vertical):
)
self._nuage_enabled = nuage_enabled
self._voice_manager = voice_manager
self._custom_border_label: str | None = None
self._custom_border_class: str | None = None
self._completion_manager = MultiCompletionManager([
SlashCommandController(CommandCompleter(self._get_slash_entries), self),
@@ -86,9 +89,9 @@ class ChatInputContainer(Vertical):
self._completion_popup = CompletionPopup()
yield self._completion_popup
border_class = SAFETY_BORDER_CLASSES.get(self._safety, "")
border_class = self._get_border_class()
with Vertical(id=self.ID_INPUT_BOX, classes=border_class) as input_box:
input_box.border_title = self._agent_name
input_box.border_title = self._get_border_title()
self._body = ChatInputBody(
history_file=self._history_file,
id="input-body",
@@ -195,23 +198,43 @@ class ChatInputContainer(Vertical):
def set_safety(self, safety: AgentSafety) -> None:
self._safety = safety
self._apply_input_box_chrome()
def set_agent_name(self, name: str) -> None:
self._agent_name = name
self._apply_input_box_chrome()
def set_custom_border(
self, label: str | None, border_class: str | None = None
) -> None:
self._custom_border_label = label
self._custom_border_class = border_class
self._apply_input_box_chrome()
def _get_border_class(self) -> str:
if self._custom_border_class is not None:
return self._custom_border_class
if self._custom_border_label is not None:
return ""
return SAFETY_BORDER_CLASSES.get(self._safety, "")
def _get_border_title(self) -> str:
if self._custom_border_label is not None:
return self._custom_border_label
return self._agent_name
def _apply_input_box_chrome(self) -> None:
try:
input_box = self.get_widget_by_id(self.ID_INPUT_BOX)
except Exception:
return
input_box.remove_class(self.REMOTE_BORDER_CLASS)
for border_class in SAFETY_BORDER_CLASSES.values():
input_box.remove_class(border_class)
if safety in SAFETY_BORDER_CLASSES:
input_box.add_class(SAFETY_BORDER_CLASSES[safety])
border_class = self._get_border_class()
if border_class:
input_box.add_class(border_class)
def set_agent_name(self, name: str) -> None:
self._agent_name = name
try:
input_box = self.get_widget_by_id(self.ID_INPUT_BOX)
input_box.border_title = name
except Exception:
pass
input_box.border_title = self._get_border_title()

View File

@@ -78,6 +78,7 @@ class LoadingWidget(SpinnerMixin, Static):
self.current_color_index = 0
self._color_direction = 1
self.transition_progress = 0
self._indicator_widget: Static | None = None
self._status_widget: Static | None = None
self.hint_widget: Static | None = None
self.start_time: float | None = None

View File

@@ -1,7 +1,7 @@
from __future__ import annotations
from datetime import UTC, datetime
from typing import TYPE_CHECKING, Any, ClassVar
from typing import Any, ClassVar, cast
from rich.text import Text
from textual.app import ComposeResult
@@ -12,9 +12,11 @@ from textual.widgets import OptionList
from textual.widgets.option_list import Option
from vibe.cli.textual_ui.widgets.no_markup_static import NoMarkupStatic
if TYPE_CHECKING:
from vibe.core.session.session_loader import SessionInfo
from vibe.core.session.resume_sessions import (
ResumeSessionInfo,
ResumeSessionSource,
short_session_id,
)
_SECONDS_PER_MINUTE = 60
_SECONDS_PER_HOUR = 3600
@@ -46,12 +48,15 @@ def _format_relative_time(iso_time: str | None) -> str:
return "unknown"
def _build_option_text(session: SessionInfo, message: str) -> Text:
def _build_option_text(session: ResumeSessionInfo, message: str) -> Text:
text = Text(no_wrap=True)
time_str = _format_relative_time(session.get("end_time"))
session_id = session["session_id"][:8]
time_str = _format_relative_time(session.end_time)
session_id = short_session_id(session.session_id, source=session.source)
source = session.source
text.append(f"{time_str:10}", style="dim")
text.append(" ")
text.append(f"{source:6}", style="cyan")
text.append(" ")
text.append(f"{session_id} ", style="dim")
text.append(message)
return text
@@ -67,7 +72,15 @@ class SessionPickerApp(Container):
]
class SessionSelected(Message):
def __init__(self, session_id: str) -> None:
option_id: str
source: ResumeSessionSource
session_id: str
def __init__(
self, option_id: str, source: ResumeSessionSource, session_id: str
) -> None:
self.option_id = option_id
self.source = source
self.session_id = session_id
super().__init__()
@@ -76,7 +89,7 @@ class SessionPickerApp(Container):
def __init__(
self,
sessions: list[SessionInfo],
sessions: list[ResumeSessionInfo],
latest_messages: dict[str, str],
**kwargs: Any,
) -> None:
@@ -89,9 +102,9 @@ class SessionPickerApp(Container):
Option(
_build_option_text(
session,
self._latest_messages.get(session["session_id"], "(empty session)"),
self._latest_messages.get(session.option_id, "(empty session)"),
),
id=session["session_id"],
id=session.option_id,
)
for session in self._sessions
]
@@ -106,7 +119,15 @@ class SessionPickerApp(Container):
def on_option_list_option_selected(self, event: OptionList.OptionSelected) -> None:
if event.option.id:
self.post_message(self.SessionSelected(event.option.id))
option_id = event.option.id
source, _, session_id = option_id.partition(":")
self.post_message(
self.SessionSelected(
option_id=option_id,
source=cast(ResumeSessionSource, source),
session_id=session_id,
)
)
def action_cancel(self) -> None:
self.post_message(self.Cancelled())

View File

@@ -76,9 +76,7 @@ class ToolCallMessage(StatusMessage):
self._stream_widget.display = True
def stop_spinning(self, success: bool = True) -> None:
"""Stop the spinner and hide the stream widget."""
if self._stream_widget:
self._stream_widget.display = False
"""Stop the spinner while keeping stream row stable to avoid layout jumps."""
super().stop_spinning(success)
def set_result_text(self, text: str) -> None:

View File

@@ -161,18 +161,19 @@ class AgentLoop:
entrypoint_metadata: EntrypointMetadata | None = None,
) -> None:
self._base_config = config
self._max_turns = max_turns
self._max_price = max_price
self._plan_session = PlanSession()
self.mcp_registry = MCPRegistry()
self.agent_manager = AgentManager(
lambda: self._base_config, initial_agent=agent_name
)
self.mcp_registry = MCPRegistry()
self.tool_manager = ToolManager(
lambda: self.config, mcp_registry=self.mcp_registry
)
self.skill_manager = SkillManager(lambda: self.config)
self.message_observer = message_observer
self._max_turns = max_turns
self._max_price = max_price
self._plan_session = PlanSession()
self.format_handler = APIToolFormatHandler()
self.backend_factory = lambda: backend or self._select_backend()
@@ -181,7 +182,6 @@ class AgentLoop:
backend_getter=lambda: self.backend, config_getter=lambda: self.config
)
self.message_observer = message_observer
self.enable_streaming = enable_streaming
self.middleware_pipeline = MiddlewarePipeline()
self._setup_middleware()
@@ -193,6 +193,11 @@ class AgentLoop:
self.messages = MessageList(initial=[system_message], observer=message_observer)
self.stats = AgentStats()
self.approval_callback: ApprovalCallback | None = None
self.user_input_callback: UserInputCallback | None = None
self.entrypoint_metadata = entrypoint_metadata
self.session_id = str(uuid4())
try:
active_model = config.get_active_model()
self.stats.input_price_per_million = active_model.input_price
@@ -200,11 +205,6 @@ class AgentLoop:
except ValueError:
pass
self.approval_callback: ApprovalCallback | None = None
self.user_input_callback: UserInputCallback | None = None
self.entrypoint_metadata = entrypoint_metadata
self.session_id = str(uuid4())
self._current_user_message_id: str | None = None
self._is_user_prompt_call: bool = False
@@ -245,6 +245,16 @@ class AgentLoop:
def auto_approve(self) -> bool:
return self.config.auto_approve
def refresh_config(self) -> None:
self._base_config = VibeConfig.load()
self.agent_manager.invalidate_config()
def set_approval_callback(self, callback: ApprovalCallback) -> None:
self.approval_callback = callback
def set_user_input_callback(self, callback: UserInputCallback) -> None:
self.user_input_callback = callback
def set_tool_permission(
self, tool_name: str, permission: ToolPermission, save_permanently: bool = False
) -> None:
@@ -291,10 +301,6 @@ class AgentLoop:
tool_name, ToolPermission.ALWAYS, save_permanently=save_permanently
)
def refresh_config(self) -> None:
self._base_config = VibeConfig.load()
self.agent_manager.invalidate_config()
def emit_new_session_telemetry(self) -> None:
entrypoint = (
self.entrypoint_metadata.agent_entrypoint
@@ -346,7 +352,7 @@ class AgentLoop:
async def act(
self, msg: str, client_message_id: str | None = None
) -> AsyncGenerator[BaseEvent]:
) -> AsyncGenerator[BaseEvent, None]:
self._clean_message_history()
self.rewind_manager.create_checkpoint()
try:
@@ -376,6 +382,7 @@ class AgentLoop:
nuage_workflow_id=self.config.nuage_workflow_id,
nuage_api_key=self.config.nuage_api_key,
nuage_task_queue=self.config.nuage_task_queue,
vibe_config=self._base_config,
)
return self._teleport_service
@@ -1130,12 +1137,6 @@ class AgentLoop:
self.session_id = str(uuid4())
self.session_logger.reset_session(self.session_id)
def set_approval_callback(self, callback: ApprovalCallback) -> None:
self.approval_callback = callback
def set_user_input_callback(self, callback: UserInputCallback) -> None:
self.user_input_callback = callback
async def clear_history(self) -> None:
await self.session_logger.save_interaction(
self.messages,

View File

@@ -396,13 +396,12 @@ class VibeConfig(BaseSettings):
api_timeout: float = 720.0
auto_compact_threshold: int = 200_000
# TODO(vibe-nuage): remove exclude=True once the feature is publicly available
nuage_enabled: bool = Field(default=False, exclude=True)
nuage_base_url: str = Field(default="https://api.globalaegis.net", exclude=True)
nuage_base_url: str = Field(default="https://api.mistral.ai", exclude=True)
nuage_workflow_id: str = Field(default="__shared-nuage-workflow", exclude=True)
nuage_task_queue: str | None = Field(default="shared-vibe-nuage", exclude=True)
# TODO(vibe-nuage): change default value to MISTRAL_API_KEY once prod has shared vibe-nuage workers
nuage_api_key_env_var: str = Field(default="STAGING_MISTRAL_API_KEY", exclude=True)
nuage_api_key_env_var: str = Field(default="MISTRAL_API_KEY", exclude=True)
nuage_project_name: str = Field(default="Vibe", exclude=True)
# TODO(otel): remove exclude=True once the feature is publicly available
enable_otel: bool = Field(default=False, exclude=True)

View File

@@ -0,0 +1,7 @@
from __future__ import annotations
DATA_RETENTION_MESSAGE = """## Your Data Helps Improve Mistral AI
At Mistral AI, we're committed to delivering the best possible experience. When you use Mistral models on our API, your interactions may be collected to improve our models, ensuring they stay cutting-edge, accurate, and helpful.
Manage your data settings [here](https://admin.mistral.ai/plateforme/privacy)"""

View File

View File

@@ -0,0 +1,26 @@
from __future__ import annotations
from typing import Any
from pydantic import BaseModel, ConfigDict
_SUBMIT_INPUT_UPDATE_NAME = "__submit_input"
class AgentCompletionState(BaseModel):
content: str = ""
reasoning_content: str = ""
class InterruptSignal(BaseModel):
prompt: str
class ChatInputModel(BaseModel):
model_config = ConfigDict(title="ChatInput", extra="forbid")
message: list[Any]
class SubmitInputModel(BaseModel):
task_id: str
input: Any

195
vibe/core/nuage/client.py Normal file
View File

@@ -0,0 +1,195 @@
from __future__ import annotations
from collections.abc import AsyncGenerator
import json
from typing import Any
import httpx
from pydantic import BaseModel
from vibe.core.logger import logger
from vibe.core.nuage.exceptions import ErrorCode, WorkflowsException
from vibe.core.nuage.streaming import StreamEvent, StreamEventsQueryParams
from vibe.core.nuage.workflow import (
SignalWorkflowResponse,
UpdateWorkflowResponse,
WorkflowExecutionListResponse,
)
class WorkflowsClient:
def __init__(
self, base_url: str, api_key: str | None = None, timeout: float = 60.0
) -> None:
self._base_url = base_url.rstrip("/")
self._api_key = api_key
self._timeout = timeout
self._client: httpx.AsyncClient | None = None
self._owns_client = True
async def __aenter__(self) -> WorkflowsClient:
headers: dict[str, str] = {}
if self._api_key:
headers["Authorization"] = f"Bearer {self._api_key}"
self._client = httpx.AsyncClient(timeout=self._timeout, headers=headers)
return self
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: Any,
) -> None:
if self._owns_client and self._client:
await self._client.aclose()
self._client = None
@property
def _http_client(self) -> httpx.AsyncClient:
if self._client is None:
headers: dict[str, str] = {}
if self._api_key:
headers["Authorization"] = f"Bearer {self._api_key}"
self._client = httpx.AsyncClient(timeout=self._timeout, headers=headers)
self._owns_client = True
return self._client
def _api_url(self, endpoint: str) -> str:
return f"{self._base_url}/v1/workflows{endpoint}"
def _parse_sse_data(
self, raw_data: str, event_type: str | None
) -> StreamEvent | None:
parsed = json.loads(raw_data)
if event_type == "error" or (isinstance(parsed, dict) and "error" in parsed):
error_msg = (
parsed.get("error", "Unknown stream error")
if isinstance(parsed, dict)
else str(parsed)
)
raise WorkflowsException(
message=f"Stream error from server: {error_msg}",
code=ErrorCode.GET_EVENTS_STREAM_ERROR,
)
return StreamEvent.model_validate(parsed)
async def stream_events(
self, params: StreamEventsQueryParams
) -> AsyncGenerator[StreamEvent, None]:
endpoint = "/events/stream"
query = params.model_dump(exclude_none=True)
try:
async with self._http_client.stream(
"GET", self._api_url(endpoint), params=query
) as response:
response.raise_for_status()
async for event in self._iter_sse_events(response):
yield event
except WorkflowsException:
raise
except Exception as exc:
raise WorkflowsException.from_api_client_error(
exc,
message="Failed to stream events",
code=ErrorCode.GET_EVENTS_STREAM_ERROR,
) from exc
async def _iter_sse_events(
self, response: httpx.Response
) -> AsyncGenerator[StreamEvent, None]:
event_type: str | None = None
async for line in response.aiter_lines():
if line is None or line == "" or line.startswith(":"):
continue
if line.startswith("event:"):
event_type = line[len("event:") :].strip()
continue
if not line.startswith("data:"):
continue
raw_data = line[len("data:") :].strip()
try:
event = self._parse_sse_data(raw_data, event_type)
if event:
yield event
except WorkflowsException:
raise
except Exception:
logger.warning(
"Failed to parse SSE event",
exc_info=True,
extra={"event_data": raw_data},
)
finally:
event_type = None
async def signal_workflow(
self, execution_id: str, signal_name: str, input_data: BaseModel | None = None
) -> SignalWorkflowResponse:
endpoint = f"/executions/{execution_id}/signals"
try:
input_data_dict = input_data.model_dump(mode="json") if input_data else {}
request_body = {"name": signal_name, "input": input_data_dict}
response = await self._http_client.post(
self._api_url(endpoint),
json=request_body,
headers={"Content-Type": "application/json"},
)
response.raise_for_status()
return SignalWorkflowResponse.model_validate(response.json())
except WorkflowsException:
raise
except Exception as exc:
raise WorkflowsException.from_api_client_error(
exc,
message="Failed to signal workflow",
code=ErrorCode.POST_EXECUTIONS_SIGNALS_ERROR,
) from exc
async def update_workflow(
self, execution_id: str, update_name: str, input_data: BaseModel | None = None
) -> UpdateWorkflowResponse:
endpoint = f"/executions/{execution_id}/updates"
try:
input_data_dict = input_data.model_dump(mode="json") if input_data else {}
request_body = {"name": update_name, "input": input_data_dict}
response = await self._http_client.post(
self._api_url(endpoint),
json=request_body,
headers={"Content-Type": "application/json"},
)
response.raise_for_status()
return UpdateWorkflowResponse.model_validate(response.json())
except WorkflowsException:
raise
except Exception as exc:
raise WorkflowsException.from_api_client_error(
exc,
message="Failed to update workflow",
code=ErrorCode.POST_EXECUTIONS_UPDATES_ERROR,
) from exc
async def get_workflow_runs(
self,
workflow_identifier: str | None = None,
page_size: int = 50,
next_page_token: str | None = None,
) -> WorkflowExecutionListResponse:
params: dict[str, Any] = {"page_size": page_size}
if workflow_identifier:
params["workflow_identifier"] = workflow_identifier
if next_page_token:
params["next_page_token"] = next_page_token
endpoint = "/runs"
try:
response = await self._http_client.get(
self._api_url(endpoint), params=params
)
response.raise_for_status()
return WorkflowExecutionListResponse.model_validate(response.json())
except Exception as exc:
raise WorkflowsException.from_api_client_error(
exc,
message="Failed to get workflow runs",
code=ErrorCode.GET_EXECUTIONS_ERROR,
) from exc

227
vibe/core/nuage/events.py Normal file
View File

@@ -0,0 +1,227 @@
from __future__ import annotations
from enum import StrEnum
from typing import Annotated, Any, Literal
from pydantic import BaseModel, Discriminator, Field, Tag
class WorkflowEventType(StrEnum):
WORKFLOW_EXECUTION_COMPLETED = "WORKFLOW_EXECUTION_COMPLETED"
WORKFLOW_EXECUTION_FAILED = "WORKFLOW_EXECUTION_FAILED"
WORKFLOW_EXECUTION_CANCELED = "WORKFLOW_EXECUTION_CANCELED"
CUSTOM_TASK_STARTED = "CUSTOM_TASK_STARTED"
CUSTOM_TASK_IN_PROGRESS = "CUSTOM_TASK_IN_PROGRESS"
CUSTOM_TASK_COMPLETED = "CUSTOM_TASK_COMPLETED"
CUSTOM_TASK_FAILED = "CUSTOM_TASK_FAILED"
CUSTOM_TASK_TIMED_OUT = "CUSTOM_TASK_TIMED_OUT"
CUSTOM_TASK_CANCELED = "CUSTOM_TASK_CANCELED"
class JSONPatchBase(BaseModel):
path: str
value: Any = None
class JSONPatchAdd(JSONPatchBase):
op: Literal["add"] = "add"
class JSONPatchReplace(JSONPatchBase):
op: Literal["replace"] = "replace"
class JSONPatchRemove(JSONPatchBase):
op: Literal["remove"] = "remove"
class JSONPatchAppend(JSONPatchBase):
op: Literal["append"] = "append"
value: str = ""
JSONPatch = Annotated[
Annotated[JSONPatchAppend, Tag("append")]
| Annotated[JSONPatchAdd, Tag("add")]
| Annotated[JSONPatchReplace, Tag("replace")]
| Annotated[JSONPatchRemove, Tag("remove")],
Discriminator("op"),
]
class JSONPatchPayload(BaseModel):
type: Literal["json_patch"] = "json_patch"
value: list[JSONPatch] = Field(default_factory=list)
class JSONPayload(BaseModel):
type: Literal["json"] = "json"
value: Any = None
Payload = Annotated[
Annotated[JSONPayload, Tag("json")]
| Annotated[JSONPatchPayload, Tag("json_patch")],
Discriminator("type"),
]
class Failure(BaseModel):
message: str
class BaseEvent(BaseModel):
event_id: str
event_timestamp: int = 0
root_workflow_exec_id: str = ""
parent_workflow_exec_id: str | None = None
workflow_exec_id: str = ""
workflow_run_id: str = ""
workflow_name: str = ""
class WorkflowExecutionFailedAttributes(BaseModel):
task_id: str = ""
failure: Failure
class WorkflowExecutionCanceledAttributes(BaseModel):
task_id: str = ""
reason: str | None = None
class WorkflowExecutionCompletedAttributes(BaseModel):
task_id: str = ""
result: JSONPayload = Field(default_factory=lambda: JSONPayload(value=None))
class CustomTaskStartedAttributes(BaseModel):
custom_task_id: str
custom_task_type: str
payload: JSONPayload = Field(default_factory=lambda: JSONPayload(value=None))
class CustomTaskInProgressAttributes(BaseModel):
custom_task_id: str
custom_task_type: str
payload: Payload
class CustomTaskCompletedAttributes(BaseModel):
custom_task_id: str
custom_task_type: str
payload: JSONPayload = Field(default_factory=lambda: JSONPayload(value=None))
class CustomTaskFailedAttributes(BaseModel):
custom_task_id: str
custom_task_type: str
failure: Failure
class CustomTaskTimedOutAttributes(BaseModel):
custom_task_id: str
custom_task_type: str
timeout_type: str | None = None
class CustomTaskCanceledAttributes(BaseModel):
custom_task_id: str
custom_task_type: str
reason: str | None = None
class WorkflowExecutionCompleted(BaseEvent):
event_type: Literal[WorkflowEventType.WORKFLOW_EXECUTION_COMPLETED] = (
WorkflowEventType.WORKFLOW_EXECUTION_COMPLETED
)
attributes: WorkflowExecutionCompletedAttributes
class WorkflowExecutionFailed(BaseEvent):
event_type: Literal[WorkflowEventType.WORKFLOW_EXECUTION_FAILED] = (
WorkflowEventType.WORKFLOW_EXECUTION_FAILED
)
attributes: WorkflowExecutionFailedAttributes
class WorkflowExecutionCanceled(BaseEvent):
event_type: Literal[WorkflowEventType.WORKFLOW_EXECUTION_CANCELED] = (
WorkflowEventType.WORKFLOW_EXECUTION_CANCELED
)
attributes: WorkflowExecutionCanceledAttributes
class CustomTaskStarted(BaseEvent):
event_type: Literal[WorkflowEventType.CUSTOM_TASK_STARTED] = (
WorkflowEventType.CUSTOM_TASK_STARTED
)
attributes: CustomTaskStartedAttributes
class CustomTaskInProgress(BaseEvent):
event_type: Literal[WorkflowEventType.CUSTOM_TASK_IN_PROGRESS] = (
WorkflowEventType.CUSTOM_TASK_IN_PROGRESS
)
attributes: CustomTaskInProgressAttributes
class CustomTaskCompleted(BaseEvent):
event_type: Literal[WorkflowEventType.CUSTOM_TASK_COMPLETED] = (
WorkflowEventType.CUSTOM_TASK_COMPLETED
)
attributes: CustomTaskCompletedAttributes
class CustomTaskFailed(BaseEvent):
event_type: Literal[WorkflowEventType.CUSTOM_TASK_FAILED] = (
WorkflowEventType.CUSTOM_TASK_FAILED
)
attributes: CustomTaskFailedAttributes
class CustomTaskTimedOut(BaseEvent):
event_type: Literal[WorkflowEventType.CUSTOM_TASK_TIMED_OUT] = (
WorkflowEventType.CUSTOM_TASK_TIMED_OUT
)
attributes: CustomTaskTimedOutAttributes
class CustomTaskCanceled(BaseEvent):
event_type: Literal[WorkflowEventType.CUSTOM_TASK_CANCELED] = (
WorkflowEventType.CUSTOM_TASK_CANCELED
)
attributes: CustomTaskCanceledAttributes
def _get_event_type_discriminator(v: Any) -> str:
if isinstance(v, dict):
event_type_val = v.get("event_type", "")
if isinstance(event_type_val, WorkflowEventType):
return event_type_val.value
return str(event_type_val)
event_type_attr = getattr(v, "event_type", "")
if isinstance(event_type_attr, WorkflowEventType):
return event_type_attr.value
return str(event_type_attr)
WorkflowEvent = Annotated[
Annotated[
WorkflowExecutionCompleted, Tag(WorkflowEventType.WORKFLOW_EXECUTION_COMPLETED)
]
| Annotated[
WorkflowExecutionFailed, Tag(WorkflowEventType.WORKFLOW_EXECUTION_FAILED)
]
| Annotated[
WorkflowExecutionCanceled, Tag(WorkflowEventType.WORKFLOW_EXECUTION_CANCELED)
]
| Annotated[CustomTaskStarted, Tag(WorkflowEventType.CUSTOM_TASK_STARTED)]
| Annotated[CustomTaskInProgress, Tag(WorkflowEventType.CUSTOM_TASK_IN_PROGRESS)]
| Annotated[CustomTaskCompleted, Tag(WorkflowEventType.CUSTOM_TASK_COMPLETED)]
| Annotated[CustomTaskFailed, Tag(WorkflowEventType.CUSTOM_TASK_FAILED)]
| Annotated[CustomTaskTimedOut, Tag(WorkflowEventType.CUSTOM_TASK_TIMED_OUT)]
| Annotated[CustomTaskCanceled, Tag(WorkflowEventType.CUSTOM_TASK_CANCELED)],
Discriminator(_get_event_type_discriminator),
]

View File

@@ -0,0 +1,46 @@
from __future__ import annotations
from enum import StrEnum
from http import HTTPStatus
import httpx
class ErrorCode(StrEnum):
TEMPORAL_CONNECTION_ERROR = "temporal_connection_error"
GET_EVENTS_STREAM_ERROR = "get_events_stream_error"
POST_EXECUTIONS_SIGNALS_ERROR = "post_executions_signals_error"
POST_EXECUTIONS_UPDATES_ERROR = "post_executions_updates_error"
GET_EXECUTIONS_ERROR = "get_executions_error"
class WorkflowsException(Exception):
def __init__(
self,
message: str,
status: HTTPStatus = HTTPStatus.INTERNAL_SERVER_ERROR,
code: ErrorCode = ErrorCode.TEMPORAL_CONNECTION_ERROR,
) -> None:
self.status = status
self.message = message
self.code = code
def __str__(self) -> str:
return f"{self.message} (code={self.code}, status={self.status.value})"
@classmethod
def from_api_client_error(
cls,
exc: Exception,
message: str = "HTTP request failed",
code: ErrorCode = ErrorCode.TEMPORAL_CONNECTION_ERROR,
) -> WorkflowsException:
status = HTTPStatus.INTERNAL_SERVER_ERROR
if isinstance(exc, httpx.HTTPStatusError):
try:
status = HTTPStatus(exc.response.status_code)
except ValueError:
pass
if isinstance(exc, httpx.ConnectError | httpx.TimeoutException):
status = HTTPStatus.BAD_GATEWAY
return cls(message=f"{message}: {exc}", code=code, status=status)

View File

@@ -0,0 +1,202 @@
from __future__ import annotations
import asyncio
from collections.abc import AsyncGenerator
from typing import Any
from pydantic import TypeAdapter, ValidationError
from vibe.core.agent_loop import AgentLoopStateError
from vibe.core.config import VibeConfig
from vibe.core.nuage.agent_models import (
_SUBMIT_INPUT_UPDATE_NAME,
ChatInputModel,
SubmitInputModel,
)
from vibe.core.nuage.client import WorkflowsClient
from vibe.core.nuage.events import WorkflowEvent
from vibe.core.nuage.exceptions import ErrorCode, WorkflowsException
from vibe.core.nuage.remote_workflow_event_translator import (
PendingInputRequest,
RemoteWorkflowEventTranslator,
)
from vibe.core.nuage.streaming import StreamEventsQueryParams
from vibe.core.nuage.workflow import WorkflowExecutionStatus
from vibe.core.tools.manager import ToolManager
from vibe.core.types import AgentStats, BaseEvent, LLMMessage, Role
_RETRYABLE_STREAM_ERRORS = ("peer closed connection", "incomplete chunked read")
_WORKFLOW_EVENT_ADAPTER = TypeAdapter(WorkflowEvent)
class RemoteEventsSource:
def __init__(self, session_id: str, config: VibeConfig) -> None:
self.session_id = session_id
self._config = config
self.messages: list[LLMMessage] = []
self.stats = AgentStats()
self._tool_manager = ToolManager(lambda: config)
self._next_start_seq = 0
self._client: WorkflowsClient | None = None
self._translator = RemoteWorkflowEventTranslator(
available_tools=self._tool_manager._available,
stats=self.stats,
merge_message=self._merge_message,
)
@property
def is_waiting_for_input(self) -> bool:
return self._translator.pending_input_request is not None
@property
def _pending_input_request(self) -> PendingInputRequest | None:
return self._translator.pending_input_request
@_pending_input_request.setter
def _pending_input_request(self, value: PendingInputRequest | None) -> None:
self._translator.pending_input_request = value
@property
def _task_state(self) -> dict[str, dict[str, Any]]:
return self._translator.task_state
@property
def is_terminated(self) -> bool:
return self._translator.last_status is not None
@property
def is_failed(self) -> bool:
return self._translator.last_status == WorkflowExecutionStatus.FAILED
@property
def is_canceled(self) -> bool:
return self._translator.last_status == WorkflowExecutionStatus.CANCELED
@property
def client(self) -> WorkflowsClient:
if self._client is None:
self._client = WorkflowsClient(
base_url=self._config.nuage_base_url,
api_key=self._config.nuage_api_key,
timeout=self._config.api_timeout,
)
return self._client
async def close(self) -> None:
if self._client is not None:
await self._client.__aexit__(None, None, None)
self._client = None
async def attach(self) -> AsyncGenerator[BaseEvent, None]:
async for event in self._stream_remote_events(stop_on_idle_boundary=False):
yield event
for event in self._translator.flush_open_tool_calls():
yield event
async def send_prompt(self, msg: str) -> None:
pending = self._translator.pending_input_request
if pending is None:
return
if not self._is_chat_input_request(pending):
raise AgentLoopStateError(
"Remote workflow is waiting for structured input that this UI does not support."
)
await self.client.update_workflow(
self.session_id,
_SUBMIT_INPUT_UPDATE_NAME,
SubmitInputModel(
task_id=pending.task_id,
input={"message": [{"type": "text", "text": msg}]},
),
)
self._translator.pending_input_request = None
def _is_chat_input_request(self, request: PendingInputRequest) -> bool:
title = request.input_schema.get("title")
return title == ChatInputModel.model_config.get("title")
async def _stream_remote_events(
self, stop_on_idle_boundary: bool = True
) -> AsyncGenerator[BaseEvent]:
retry_count = 0
max_retry_count = 3
done = False
while not done:
params = StreamEventsQueryParams(
workflow_exec_id=self.session_id, start_seq=self._next_start_seq
)
stream = self.client.stream_events(params)
try:
async for payload in stream:
retry_count = 0
if payload.broker_sequence is not None:
self._next_start_seq = payload.broker_sequence + 1
event = self._normalize_stream_event(payload.data)
if event is None:
continue
for emitted_event in self._consume_workflow_event(event):
yield emitted_event
if self.is_terminated:
done = True
break
if stop_on_idle_boundary and self._is_idle_boundary(event):
done = True
break
else:
break
except WorkflowsException as exc:
if self._is_retryable_stream_disconnect(exc):
retry_count += 1
if retry_count > max_retry_count:
break
await asyncio.sleep(0.2 * retry_count)
continue
raise AgentLoopStateError(str(exc)) from exc
finally:
await stream.aclose()
def _normalize_stream_event(
self, event: WorkflowEvent | dict[str, Any]
) -> WorkflowEvent | None:
if not isinstance(event, dict):
return event
try:
return _WORKFLOW_EVENT_ADAPTER.validate_python(event)
except ValidationError:
return None
def _consume_workflow_event(self, event: WorkflowEvent) -> list[BaseEvent]:
return self._translator.consume_workflow_event(event)
def _is_retryable_stream_disconnect(self, exc: WorkflowsException) -> bool:
if exc.code != ErrorCode.GET_EVENTS_STREAM_ERROR:
return False
msg = str(exc).lower()
return any(needle in msg for needle in _RETRYABLE_STREAM_ERRORS)
def _is_idle_boundary(self, event: WorkflowEvent) -> bool:
return self._translator.is_idle_boundary(event)
def _merge_message(self, message: LLMMessage) -> None:
if not self.messages:
self.messages.append(message)
return
last_message = self.messages[-1]
if (
last_message.role == message.role
and last_message.message_id == message.message_id
and message.role == Role.assistant
):
self.messages[-1] = last_message + message
return
self.messages.append(message)

View File

@@ -0,0 +1,137 @@
from __future__ import annotations
from typing import Any, Literal
from pydantic import BaseModel, ConfigDict
class BaseUIState(BaseModel):
toolCallId: str = ""
class FileOperation(BaseModel):
type: str = ""
uri: str = ""
content: str = ""
class FileUIState(BaseUIState):
type: Literal["file"] = "file"
operations: list[FileOperation] = []
class CommandResult(BaseModel):
status: str = ""
output: str = ""
class CommandUIState(BaseUIState):
type: Literal["command"] = "command"
command: str = ""
result: CommandResult | None = None
class GenericToolResult(BaseModel):
status: str = ""
error: str | None = None
class GenericToolUIState(BaseUIState):
model_config = ConfigDict(extra="allow")
type: Literal["generic_tool"] = "generic_tool"
arguments: dict[str, Any] = {}
result: GenericToolResult | None = None
AnyToolUIState = FileUIState | CommandUIState | GenericToolUIState
def parse_tool_ui_state(raw: dict[str, Any]) -> AnyToolUIState | None:
ui_type = raw.get("type")
if ui_type == "file":
return FileUIState.model_validate(raw)
if ui_type == "command":
return CommandUIState.model_validate(raw)
if ui_type == "generic_tool":
return GenericToolUIState.model_validate(raw)
return None
class WorkingState(BaseModel):
title: str = ""
content: str = ""
type: str = ""
toolUIState: dict[str, Any] | None = None
class ContentChunk(BaseModel):
type: str = ""
text: str = ""
class AssistantMessageState(BaseModel):
contentChunks: list[ContentChunk] = []
class AgentToolCallState(BaseModel):
model_config = ConfigDict(extra="allow")
name: str = ""
tool_call_id: str = ""
kwargs: dict[str, Any] = {}
output: Any = None
class MessageSchema(BaseModel):
model_config = ConfigDict(extra="allow")
examples: list[Any] = []
class InputSchemaProperties(BaseModel):
model_config = ConfigDict(extra="allow")
message: MessageSchema | None = None
class InputSchema(BaseModel):
model_config = ConfigDict(extra="allow")
properties: InputSchemaProperties | None = None
class PredefinedAnswersState(BaseModel):
model_config = ConfigDict(extra="allow")
input_schema: InputSchema | None = None
class WaitForInputInput(BaseModel):
model_config = ConfigDict(extra="allow")
message: Any = None
class WaitForInputPayload(BaseModel):
model_config = ConfigDict(extra="allow")
input: WaitForInputInput | None = None
class AskUserQuestion(BaseModel):
question: str
class AskUserQuestionArgs(BaseModel):
model_config = ConfigDict(extra="allow")
questions: list[AskUserQuestion] = []
class PendingInputRequest(BaseModel):
task_id: str
input_schema: dict[str, Any]
label: str | None = None
class RemoteToolArgs(BaseModel):
model_config = ConfigDict(extra="allow")
summary: str | None = None
content: str | None = None
class RemoteToolResult(BaseModel):
model_config = ConfigDict(extra="allow")
message: str | None = None

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,32 @@
from __future__ import annotations
import time
from typing import Any
from pydantic import BaseModel, Field
from vibe.core.nuage.events import WorkflowEvent
class StreamEventWorkflowContext(BaseModel):
namespace: str = ""
workflow_name: str = ""
workflow_exec_id: str = ""
parent_workflow_exec_id: str | None = None
root_workflow_exec_id: str | None = None
class StreamEvent(BaseModel):
stream: str = ""
timestamp_unix_nano: int = Field(default_factory=lambda: time.time_ns())
data: WorkflowEvent | dict[str, Any]
workflow_context: StreamEventWorkflowContext = Field(
default_factory=StreamEventWorkflowContext
)
metadata: dict[str, Any] = Field(default_factory=dict)
broker_sequence: int | None = None
class StreamEventsQueryParams(BaseModel):
workflow_exec_id: str = ""
start_seq: int = 0

View File

@@ -0,0 +1,45 @@
from __future__ import annotations
from datetime import datetime
from enum import StrEnum
from typing import Any
from pydantic import BaseModel, Field
class WorkflowExecutionStatus(StrEnum):
RUNNING = "RUNNING"
COMPLETED = "COMPLETED"
FAILED = "FAILED"
CANCELED = "CANCELED"
TERMINATED = "TERMINATED"
CONTINUED_AS_NEW = "CONTINUED_AS_NEW"
TIMED_OUT = "TIMED_OUT"
RETRYING_AFTER_ERROR = "RETRYING_AFTER_ERROR"
class WorkflowExecutionWithoutResultResponse(BaseModel):
workflow_name: str
execution_id: str
parent_execution_id: str | None = None
root_execution_id: str = ""
status: WorkflowExecutionStatus | None = None
start_time: datetime
end_time: datetime | None = None
total_duration_ms: int | None = None
class WorkflowExecutionListResponse(BaseModel):
executions: list[WorkflowExecutionWithoutResultResponse] = Field(
default_factory=list
)
next_page_token: str | None = None
class SignalWorkflowResponse(BaseModel):
message: str = "Signal accepted"
class UpdateWorkflowResponse(BaseModel):
update_name: str = ""
result: Any = None

View File

@@ -5,6 +5,17 @@ import json
import sys
from typing import TextIO
from vibe.core.teleport.types import (
TeleportAuthCompleteEvent,
TeleportAuthRequiredEvent,
TeleportCheckingGitEvent,
TeleportCompleteEvent,
TeleportFetchingUrlEvent,
TeleportPushingEvent,
TeleportPushRequiredEvent,
TeleportStartingWorkflowEvent,
TeleportWaitingForGitHubEvent,
)
from vibe.core.types import AssistantEvent, BaseEvent, LLMMessage, OutputFormat
@@ -36,9 +47,31 @@ class TextOutputFormatter(OutputFormatter):
def on_message_added(self, message: LLMMessage) -> None:
self._messages.append(message)
def _print(self, text: str) -> None:
print(text, file=self.stream)
def on_event(self, event: BaseEvent) -> None:
if isinstance(event, AssistantEvent):
self._final_response = event.content
match event:
case AssistantEvent():
self._final_response = event.content
case TeleportCheckingGitEvent():
self._print("Preparing workspace...")
case TeleportPushRequiredEvent(unpushed_count=count):
self._print(f"Pushing {count} commit(s)...")
case TeleportPushingEvent():
self._print("Syncing with remote...")
case TeleportStartingWorkflowEvent():
self._print("Teleporting...")
case TeleportWaitingForGitHubEvent():
self._print("Connecting to GitHub...")
case TeleportAuthRequiredEvent(oauth_url=url):
self._print(f"Open to authorize GitHub: {url}")
case TeleportAuthCompleteEvent():
self._print("GitHub authorized")
case TeleportFetchingUrlEvent():
self._print("Finalizing...")
case TeleportCompleteEvent():
self._final_response = event.url
def finalize(self) -> str | None:
return self._final_response

View File

@@ -3,11 +3,15 @@ from __future__ import annotations
import asyncio
from vibe import __version__
from vibe.core.agent_loop import AgentLoop
from vibe.core.agent_loop import AgentLoop, TeleportError
from vibe.core.agents.models import BuiltinAgentName
from vibe.core.config import VibeConfig
from vibe.core.logger import logger
from vibe.core.output_formatters import create_formatter
from vibe.core.teleport.types import (
TeleportPushRequiredEvent,
TeleportPushResponseEvent,
)
from vibe.core.types import (
AssistantEvent,
ClientMetadata,
@@ -18,6 +22,8 @@ from vibe.core.types import (
)
from vibe.core.utils import ConversationLimitException
__all__ = ["TeleportError", "run_programmatic"]
_DEFAULT_CLIENT_METADATA = ClientMetadata(name="vibe_programmatic", version=__version__)
@@ -30,6 +36,7 @@ def run_programmatic(
previous_messages: list[LLMMessage] | None = None,
agent_name: str = BuiltinAgentName.AUTO_APPROVE,
client_metadata: ClientMetadata = _DEFAULT_CLIENT_METADATA,
teleport: bool = False,
) -> str | None:
formatter = create_formatter(output_format)
@@ -62,10 +69,23 @@ def run_programmatic(
agent_loop.emit_new_session_telemetry()
async for event in agent_loop.act(prompt):
formatter.on_event(event)
if isinstance(event, AssistantEvent) and event.stopped_by_middleware:
raise ConversationLimitException(event.content)
if teleport and config.nuage_enabled:
gen = agent_loop.teleport_to_vibe_nuage(prompt or None)
async for event in gen:
formatter.on_event(event)
if isinstance(event, TeleportPushRequiredEvent):
next_event = await gen.asend(
TeleportPushResponseEvent(approved=True)
)
formatter.on_event(next_event)
else:
async for event in agent_loop.act(prompt):
formatter.on_event(event)
if (
isinstance(event, AssistantEvent)
and event.stopped_by_middleware
):
raise ConversationLimitException(event.content)
return formatter.finalize()
finally:

View File

@@ -0,0 +1,93 @@
from __future__ import annotations
from dataclasses import dataclass
from typing import Literal
from vibe.core.config import VibeConfig
from vibe.core.logger import logger
from vibe.core.nuage.client import WorkflowsClient
from vibe.core.nuage.workflow import WorkflowExecutionStatus
from vibe.core.session.session_loader import SessionLoader
ResumeSessionSource = Literal["local", "remote"]
SHORT_SESSION_ID_LEN = 8
def short_session_id(session_id: str, source: ResumeSessionSource = "local") -> str:
if source == "remote":
return session_id[-SHORT_SESSION_ID_LEN:]
return session_id[:SHORT_SESSION_ID_LEN]
_ACTIVE_STATUSES = {
WorkflowExecutionStatus.RUNNING,
WorkflowExecutionStatus.RETRYING_AFTER_ERROR,
}
@dataclass(frozen=True)
class ResumeSessionInfo:
session_id: str
source: ResumeSessionSource
cwd: str
title: str | None
end_time: str | None
status: str | None = None
@property
def option_id(self) -> str:
return f"{self.source}:{self.session_id}"
def list_local_resume_sessions(
config: VibeConfig, cwd: str | None
) -> list[ResumeSessionInfo]:
return [
ResumeSessionInfo(
session_id=session["session_id"],
source="local",
cwd=session["cwd"],
title=session.get("title"),
end_time=session.get("end_time"),
)
for session in SessionLoader.list_sessions(config.session_logging, cwd=cwd)
]
async def list_remote_resume_sessions(config: VibeConfig) -> list[ResumeSessionInfo]:
if not config.nuage_enabled or not config.nuage_api_key:
logger.debug("Remote resume listing skipped: missing Nuage configuration")
return []
async with WorkflowsClient(
base_url=config.nuage_base_url,
api_key=config.nuage_api_key,
timeout=config.api_timeout,
) as client:
response = await client.get_workflow_runs(
workflow_identifier=config.nuage_workflow_id, page_size=50
)
sessions: list[ResumeSessionInfo] = []
for execution in response.executions:
if execution.status not in _ACTIVE_STATUSES:
continue
sessions.append(
ResumeSessionInfo(
session_id=execution.execution_id,
source="remote",
cwd="",
title="Vibe Nuage",
end_time=(
execution.end_time.isoformat()
if execution.end_time
else execution.start_time.isoformat()
),
status=execution.status,
)
)
logger.debug("Remote resume listing filtered sessions: %d", len(sessions))
return sessions

View File

@@ -76,11 +76,29 @@ class GitRepository:
diff=diff,
)
async def is_commit_pushed(self, commit: str, remote: str = "origin") -> bool:
async def fetch(self, remote: str = "origin") -> None:
repo = self._repo_or_raise()
await self._fetch(repo, remote)
async def is_commit_pushed(
self, commit: str, remote: str = "origin", *, fetch: bool = True
) -> bool:
repo = self._repo_or_raise()
if fetch:
await self._fetch(repo, remote)
return await self._branch_contains(repo, commit, remote)
async def is_branch_pushed(
self, remote: str = "origin", *, fetch: bool = True
) -> bool:
repo = self._repo_or_raise()
if repo.head.is_detached:
return True # Detached HEAD doesn't have a branch to check
branch = repo.active_branch.name
if fetch:
await self._fetch(repo, remote)
return await self._ref_exists(repo, f"{remote}/{branch}")
async def get_unpushed_commit_count(self, remote: str = "origin") -> int:
repo = self._repo_or_raise()
@@ -179,7 +197,13 @@ class GitRepository:
async def _push(self, repo: Repo, branch: str, remote: str) -> bool:
try:
await self._executor.run(lambda: repo.remote(remote).push(branch))
result = await self._executor.run(
lambda: repo.remote(remote).push(branch, set_upstream=True)
)
# Check if any push info indicates an error
for info in result:
if info.flags & info.ERROR:
return False
return True
except (TimeoutError, ValueError, GitCommandError):
return False

View File

@@ -1,67 +1,96 @@
from __future__ import annotations
from dataclasses import asdict
import asyncio
from enum import StrEnum, auto
import time
import types
from typing import Any
import httpx
from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, ValidationError
from vibe.core.auth import EncryptedPayload, encrypt
from vibe.core.teleport.errors import ServiceTeleportError
class GitRepoConfig(BaseModel):
url: str
class GitHubParams(BaseModel):
repo: str | None = None
branch: str | None = None
commit: str | None = None
class VibeSandboxConfig(BaseModel):
git_repo: GitRepoConfig | None = None
class VibeNewSandbox(BaseModel):
type: str = "new"
config: VibeSandboxConfig = Field(default_factory=VibeSandboxConfig)
pr_number: int | None = None
teleported_diffs: bytes | None = None
class ChatAssistantParams(BaseModel):
create_thread: bool = False
user_message: str | None = None
project_name: str | None = None
class TeleportSession(BaseModel):
metadata: dict[str, Any] = Field(default_factory=dict)
messages: list[dict[str, Any]] = Field(default_factory=list)
class WorkflowIntegrations(BaseModel):
github: GitHubParams | None = None
chat_assistant: ChatAssistantParams | None = None
class VibeAgent(BaseModel):
polymorphic_type: str = "vibe_agent"
name: str = "vibe-agent"
vibe_config: dict[str, Any] | None = None
session: TeleportSession | None = None
class WorkflowConfig(BaseModel):
agent: VibeAgent = Field(default_factory=VibeAgent)
class WorkflowParams(BaseModel):
prompt: str
sandbox: VibeNewSandbox
session: TeleportSession | None = None
config: WorkflowConfig = Field(default_factory=WorkflowConfig)
integrations: WorkflowIntegrations = Field(default_factory=WorkflowIntegrations)
class WorkflowExecuteResponse(BaseModel):
execution_id: str
class PublicKeyResult(BaseModel):
public_key: str
class GitHubStatus(StrEnum):
PENDING = auto()
WAITING_FOR_OAUTH = auto()
CONNECTED = auto()
OAUTH_TIMEOUT = auto()
ERROR = auto()
class QueryResponse(BaseModel):
result: PublicKeyResult
class GitHubPublicData(BaseModel):
status: GitHubStatus
oauth_url: str | None = None
error: str | None = None
working_branch: str | None = None
repo: str | None = None
@property
def connected(self) -> bool:
return self.status == GitHubStatus.CONNECTED
@property
def is_error(self) -> bool:
return self.status in {GitHubStatus.OAUTH_TIMEOUT, GitHubStatus.ERROR}
class CreateLeChatThreadInput(BaseModel):
encrypted_api_key: dict[str, str]
user_message: str
project_name: str | None = None
class CreateLeChatThreadOutput(BaseModel):
class ChatAssistantPublicData(BaseModel):
chat_url: str
class UpdateResponse(BaseModel):
result: CreateLeChatThreadOutput
class GetChatAssistantIntegrationResponse(BaseModel):
result: ChatAssistantPublicData
class GetGitHubIntegrationResponse(BaseModel):
result: GitHubPublicData
class NuageClient:
@@ -122,64 +151,62 @@ class NuageClient:
)
if not response.is_success:
error_msg = f"Nuage workflow trigger failed: {response.text}"
# TODO(vibe-nuage): remove this once prod has shared vibe-nuage workers
if "Unauthorized" in response.text or "unauthorized" in response.text:
error_msg += (
"\n\nHint: This version uses Mistral staging environment. "
"Set STAGING_MISTRAL_API_KEY from https://console.globalaegis.net/"
)
raise ServiceTeleportError(error_msg)
result = WorkflowExecuteResponse.model_validate(response.json())
return result.execution_id
async def send_github_token(self, execution_id: str, token: str) -> None:
public_key_pem = await self._query_public_key(execution_id)
encrypted = encrypt(token, public_key_pem)
await self._signal_encrypted_token(execution_id, encrypted)
async def _query_public_key(self, execution_id: str) -> bytes:
response = await self._http_client.post(
f"{self._base_url}/v1/workflows/executions/{execution_id}/queries",
headers=self._headers(),
json={"name": "get_public_key", "input": {}},
)
if not response.is_success:
raise ServiceTeleportError(f"Failed to get public key: {response.text}")
result = QueryResponse.model_validate(response.json())
return result.result.public_key.encode("utf-8")
async def _signal_encrypted_token(
self, execution_id: str, encrypted: EncryptedPayload
) -> None:
response = await self._http_client.post(
f"{self._base_url}/v1/workflows/executions/{execution_id}/signals",
headers=self._headers(),
json={"name": "github_token", "input": {"payload": asdict(encrypted)}},
)
if not response.is_success:
raise ServiceTeleportError(f"Failed to send GitHub token: {response.text}")
async def create_le_chat_thread(
self, execution_id: str, user_message: str, project_name: str | None = None
) -> str:
public_key_pem = await self._query_public_key(execution_id)
encrypted = encrypt(self._api_key, public_key_pem)
input_data = CreateLeChatThreadInput(
encrypted_api_key={
k: v for k, v in asdict(encrypted).items() if v is not None
},
user_message=user_message,
project_name=project_name,
)
async def get_github_integration(self, execution_id: str) -> GitHubPublicData:
response = await self._http_client.post(
f"{self._base_url}/v1/workflows/executions/{execution_id}/updates",
headers=self._headers(),
json={"name": "create_le_chat_thread", "input": input_data.model_dump()},
json={"name": "get_integration", "input": {"integration_id": "github"}},
)
if not response.is_success:
raise ServiceTeleportError(
f"Failed to create Le Chat thread: {response.text}"
f"Failed to get GitHub integration: {response.text}"
)
result = UpdateResponse.model_validate(response.json())
try:
result = GetGitHubIntegrationResponse.model_validate(response.json())
except ValidationError as e:
data = response.json()
error = data.get("result", {}).get("error")
status = data.get("result", {}).get("status")
raise ServiceTeleportError(
f"GitHub integration error: {error or status}"
) from e
return result.result
async def wait_for_github_connection(
self, execution_id: str, timeout: float = 600.0, interval: float = 2.0
) -> GitHubPublicData:
deadline = time.monotonic() + timeout
while time.monotonic() < deadline:
github_data = await self.get_github_integration(execution_id)
if github_data.connected:
return github_data
if github_data.is_error:
raise ServiceTeleportError(
github_data.error
or f"GitHub integration failed: {github_data.status.value}"
)
remaining = deadline - time.monotonic()
if remaining <= 0:
break
await asyncio.sleep(min(interval, remaining))
raise ServiceTeleportError("GitHub connection timed out")
async def get_chat_assistant_url(self, execution_id: str) -> str:
response = await self._http_client.post(
f"{self._base_url}/v1/workflows/executions/{execution_id}/updates",
headers=self._headers(),
json={
"name": "get_integration",
"input": {"integration_id": "chat_assistant"},
},
)
if not response.is_success:
raise ServiceTeleportError(
f"Failed to get chat assistant integration: {response.text}"
)
result = GetChatAssistantIntegrationResponse.model_validate(response.json())
return result.result.chat_url

View File

@@ -1,5 +1,6 @@
from __future__ import annotations
import asyncio
import base64
from collections.abc import AsyncGenerator
from pathlib import Path
@@ -8,16 +9,18 @@ import types
import httpx
import zstandard
from vibe.core.auth.github import GitHubAuthProvider
from vibe.core.config import VibeConfig
from vibe.core.session.session_logger import SessionLogger
from vibe.core.teleport.errors import ServiceTeleportError
from vibe.core.teleport.git import GitRepoInfo, GitRepository
from vibe.core.teleport.nuage import (
GitRepoConfig,
ChatAssistantParams,
GitHubParams,
NuageClient,
TeleportSession,
VibeNewSandbox,
VibeSandboxConfig,
VibeAgent,
WorkflowConfig,
WorkflowIntegrations,
WorkflowParams,
)
from vibe.core.teleport.types import (
@@ -25,18 +28,17 @@ from vibe.core.teleport.types import (
TeleportAuthRequiredEvent,
TeleportCheckingGitEvent,
TeleportCompleteEvent,
TeleportFetchingUrlEvent,
TeleportPushingEvent,
TeleportPushRequiredEvent,
TeleportPushResponseEvent,
TeleportSendEvent,
TeleportSendingGithubTokenEvent,
TeleportStartingWorkflowEvent,
TeleportWaitingForGitHubEvent,
TeleportYieldEvent,
)
# TODO(vibe-nuage): update URL once prod has shared vibe-nuage workers
_NUAGE_EXECUTION_URL_TEMPLATE = "https://console.globalaegis.net/build/workflows/{workflow_id}?tab=executions&executionId={execution_id}"
_DEFAULT_TELEPORT_PROMPT = "please continue where you left off"
_DEFAULT_TELEPORT_PROMPT = "Your session has been teleported on a remote workspace. Changes of workspace has been automatically teleported. External workspace changes has NOT been teleported. Environment variables has NOT been teleported. Please continue where you left off."
class TeleportService:
@@ -49,6 +51,7 @@ class TeleportService:
workdir: Path | None = None,
*,
nuage_task_queue: str | None = None,
vibe_config: VibeConfig | None = None,
client: httpx.AsyncClient | None = None,
timeout: float = 60.0,
) -> None:
@@ -57,17 +60,19 @@ class TeleportService:
self._nuage_workflow_id = nuage_workflow_id
self._nuage_api_key = nuage_api_key
self._nuage_task_queue = nuage_task_queue
self._nuage_project_name = (
vibe_config.nuage_project_name if vibe_config else "Vibe"
)
self._vibe_config = vibe_config
self._git = GitRepository(workdir)
self._client = client
self._owns_client = client is None
self._timeout = timeout
self._github_auth: GitHubAuthProvider | None = None
self._nuage: NuageClient | None = None
async def __aenter__(self) -> TeleportService:
if self._client is None:
self._client = httpx.AsyncClient(timeout=httpx.Timeout(self._timeout))
self._github_auth = GitHubAuthProvider(client=self._client)
self._nuage = NuageClient(
self._nuage_base_url,
self._nuage_api_key,
@@ -96,12 +101,6 @@ class TeleportService:
self._owns_client = True
return self._client
@property
def _github_auth_provider(self) -> GitHubAuthProvider:
if self._github_auth is None:
self._github_auth = GitHubAuthProvider(client=self._http_client)
return self._github_auth
@property
def _nuage_client(self) -> NuageClient:
if self._nuage is None:
@@ -123,56 +122,76 @@ class TeleportService:
async def execute(
self, prompt: str | None, session: TeleportSession
) -> AsyncGenerator[TeleportYieldEvent, TeleportSendEvent]:
prompt = prompt or _DEFAULT_TELEPORT_PROMPT
if prompt:
lechat_user_message = prompt
else:
last_user_message = self._get_last_user_message(session)
if not last_user_message:
raise ServiceTeleportError(
"No prompt provided and no user message found in session."
)
lechat_user_message = f"{last_user_message} (continue)"
prompt = _DEFAULT_TELEPORT_PROMPT
self._validate_config()
git_info = await self._git.get_info()
yield TeleportCheckingGitEvent()
if not await self._git.is_commit_pushed(git_info.commit):
await self._git.fetch()
commit_pushed, branch_pushed = await asyncio.gather(
self._git.is_commit_pushed(git_info.commit, fetch=False),
self._git.is_branch_pushed(fetch=False),
)
if not commit_pushed or not branch_pushed:
unpushed_count = await self._git.get_unpushed_commit_count()
response = yield TeleportPushRequiredEvent(
unpushed_count=max(1, unpushed_count)
unpushed_count=max(1, unpushed_count),
branch_not_pushed=not branch_pushed,
)
if (
not isinstance(response, TeleportPushResponseEvent)
or not response.approved
):
raise ServiceTeleportError("Teleport cancelled: commit not pushed.")
raise ServiceTeleportError("Teleport cancelled: changes not pushed.")
yield TeleportPushingEvent()
await self._push_or_fail()
github_token = await self._github_auth_provider.get_valid_token()
if not github_token:
handle = await self._github_auth_provider.start_device_flow(
open_browser=True
)
yield TeleportAuthRequiredEvent(
user_code=handle.info.user_code,
verification_uri=handle.info.verification_uri,
)
github_token = await self._github_auth_provider.wait_for_token(handle)
yield TeleportAuthCompleteEvent()
yield TeleportStartingWorkflowEvent()
execution_id = await self._nuage_client.start_workflow(
WorkflowParams(
prompt=prompt, sandbox=self._build_sandbox(git_info), session=session
prompt=prompt,
config=WorkflowConfig(
agent=VibeAgent(
vibe_config=self._vibe_config.model_dump()
if self._vibe_config
else None,
session=session,
)
),
integrations=WorkflowIntegrations(
github=self._build_github_params(git_info),
chat_assistant=ChatAssistantParams(
create_thread=True,
user_message=lechat_user_message,
project_name=self._nuage_project_name,
),
),
)
)
yield TeleportSendingGithubTokenEvent()
await self._nuage_client.send_github_token(execution_id, github_token)
yield TeleportWaitingForGitHubEvent()
github_data = await self._nuage_client.get_github_integration(execution_id)
chat_url = _NUAGE_EXECUTION_URL_TEMPLATE.format(
workflow_id=self._nuage_workflow_id, execution_id=execution_id
)
# chat_url = await nuage.create_le_chat_thread(
# execution_id=execution_id, user_message=prompt
# )
if not github_data.connected:
if github_data.oauth_url:
yield TeleportAuthRequiredEvent(oauth_url=github_data.oauth_url)
await self._nuage_client.wait_for_github_connection(execution_id)
yield TeleportAuthCompleteEvent()
yield TeleportFetchingUrlEvent()
chat_url = await self._nuage_client.get_chat_assistant_url(execution_id)
yield TeleportCompleteEvent(url=chat_url)
@@ -181,22 +200,19 @@ class TeleportService:
raise ServiceTeleportError("Failed to push current branch to remote.")
def _validate_config(self) -> None:
# TODO(vibe-nuage): update error message once prod has shared vibe-nuage workers
if not self._nuage_api_key:
raise ServiceTeleportError(
"STAGING_MISTRAL_API_KEY not set. "
"Set it from https://console.globalaegis.net/ to use teleport."
env_var = (
self._vibe_config.nuage_api_key_env_var
if self._vibe_config
else "MISTRAL_API_KEY"
)
raise ServiceTeleportError(f"{env_var} not set.")
def _build_sandbox(self, git_info: GitRepoInfo) -> VibeNewSandbox:
return VibeNewSandbox(
config=VibeSandboxConfig(
git_repo=GitRepoConfig(
url=git_info.remote_url,
branch=git_info.branch,
commit=git_info.commit,
)
),
def _build_github_params(self, git_info: GitRepoInfo) -> GitHubParams:
return GitHubParams(
repo=f"{git_info.owner}/{git_info.repo}",
branch=git_info.branch,
commit=git_info.commit,
teleported_diffs=self._compress_diff(git_info.diff or ""),
)
@@ -210,3 +226,11 @@ class TeleportService:
"Diff too large to teleport. Please commit and push your changes first."
)
return encoded
def _get_last_user_message(self, session: TeleportSession) -> str | None:
for msg in reversed(session.messages):
if msg.get("role") == "user":
content = msg.get("content")
if isinstance(content, str) and content:
return content
return None

View File

@@ -4,8 +4,7 @@ from vibe.core.types import BaseEvent
class TeleportAuthRequiredEvent(BaseEvent):
user_code: str
verification_uri: str
oauth_url: str
class TeleportAuthCompleteEvent(BaseEvent):
@@ -22,6 +21,7 @@ class TeleportCheckingGitEvent(BaseEvent):
class TeleportPushRequiredEvent(BaseEvent):
unpushed_count: int = 1
branch_not_pushed: bool = False
class TeleportPushResponseEvent(BaseEvent):
@@ -32,7 +32,11 @@ class TeleportPushingEvent(BaseEvent):
pass
class TeleportSendingGithubTokenEvent(BaseEvent):
class TeleportWaitingForGitHubEvent(BaseEvent):
pass
class TeleportFetchingUrlEvent(BaseEvent):
pass
@@ -47,7 +51,8 @@ type TeleportYieldEvent = (
| TeleportPushRequiredEvent
| TeleportPushingEvent
| TeleportStartingWorkflowEvent
| TeleportSendingGithubTokenEvent
| TeleportWaitingForGitHubEvent
| TeleportFetchingUrlEvent
| TeleportCompleteEvent
)

View File

@@ -397,6 +397,12 @@ class ToolStreamEvent(BaseEvent):
tool_call_id: str
class WaitingForInputEvent(BaseEvent):
task_id: str
label: str | None = None
predefined_answers: list[str] | None = None
class CompactStartEvent(BaseEvent):
current_context_tokens: int
threshold: int

View File

@@ -1,2 +1,3 @@
# What's new in v2.7.0
- **Rewind mode**: Added navigation and forking of conversation history with /rewind
# What's new in v2.7.3
- **Data retention command**: Added a `/data-retention` slash command to view Mistral AI's data retention notice and privacy settings