From ecd74904cd6f8901bc094caf94060a5d4eb4ad52 Mon Sep 17 00:00:00 2001 From: Devin Deng Date: Sat, 7 Mar 2026 04:45:53 +0000 Subject: [PATCH 01/10] sandbox api --- hyperbrowser/client/async_client.py | 3 + .../client/managers/async_manager/sandbox.py | 1376 ++++++++++++++++ .../client/managers/sync_manager/sandbox.py | 1445 +++++++++++++++++ hyperbrowser/client/sync.py | 3 + hyperbrowser/exceptions.py | 16 +- hyperbrowser/models/__init__.py | 115 ++ hyperbrowser/models/sandbox.py | 453 ++++++ hyperbrowser/sandbox_common.py | 218 +++ poetry.lock | 257 ++- pyproject.toml | 2 + tests/.env.example | 3 + tests/conftest.py | 6 + tests/helpers/__init__.py | 1 + tests/helpers/config.py | 43 + tests/helpers/errors.py | 117 ++ tests/helpers/http.py | 45 + tests/helpers/sandbox.py | 99 ++ tests/sandbox/e2e/test_async_files.py | 237 +++ tests/sandbox/e2e/test_async_lifecycle.py | 144 ++ tests/sandbox/e2e/test_async_process.py | 146 ++ .../sandbox/e2e/test_async_terminal_smoke.py | 133 ++ tests/sandbox/e2e/test_files.py | 230 +++ tests/sandbox/e2e/test_lifecycle.py | 141 ++ tests/sandbox/e2e/test_process.py | 142 ++ tests/sandbox/e2e/test_terminal_smoke.py | 129 ++ 25 files changed, 5497 insertions(+), 7 deletions(-) create mode 100644 hyperbrowser/client/managers/async_manager/sandbox.py create mode 100644 hyperbrowser/client/managers/sync_manager/sandbox.py create mode 100644 hyperbrowser/models/sandbox.py create mode 100644 hyperbrowser/sandbox_common.py create mode 100644 tests/.env.example create mode 100644 tests/conftest.py create mode 100644 tests/helpers/__init__.py create mode 100644 tests/helpers/config.py create mode 100644 tests/helpers/errors.py create mode 100644 tests/helpers/http.py create mode 100644 tests/helpers/sandbox.py create mode 100644 tests/sandbox/e2e/test_async_files.py create mode 100644 tests/sandbox/e2e/test_async_lifecycle.py create mode 100644 tests/sandbox/e2e/test_async_process.py create mode 100644 tests/sandbox/e2e/test_async_terminal_smoke.py create mode 100644 tests/sandbox/e2e/test_files.py create mode 100644 tests/sandbox/e2e/test_lifecycle.py create mode 100644 tests/sandbox/e2e/test_process.py create mode 100644 tests/sandbox/e2e/test_terminal_smoke.py diff --git a/hyperbrowser/client/async_client.py b/hyperbrowser/client/async_client.py index 338021b6..bd23a541 100644 --- a/hyperbrowser/client/async_client.py +++ b/hyperbrowser/client/async_client.py @@ -9,6 +9,7 @@ from .managers.async_manager.extension import ExtensionManager from .managers.async_manager.extract import ExtractManager from .managers.async_manager.profile import ProfileManager +from .managers.async_manager.sandbox import SandboxManager from .managers.async_manager.scrape import ScrapeManager from .managers.async_manager.session import SessionManager from .managers.async_manager.team import TeamManager @@ -26,6 +27,7 @@ def __init__( timeout: Optional[int] = 30, ): super().__init__(AsyncTransport, config, api_key, base_url) + self.timeout = timeout or 30 self.transport.client.timeout = timeout self.sessions = SessionManager(self) self.web = WebManager(self) @@ -37,6 +39,7 @@ def __init__( self.agents = Agents(self) self.team = TeamManager(self) self.computer_action = ComputerActionManager(self) + self.sandboxes = SandboxManager(self) async def close(self) -> None: await self.transport.close() diff --git a/hyperbrowser/client/managers/async_manager/sandbox.py b/hyperbrowser/client/managers/async_manager/sandbox.py new file mode 100644 index 00000000..b302a7bb --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandbox.py @@ -0,0 +1,1376 @@ +import base64 +import json +import socket +from datetime import datetime, timedelta, timezone +from typing import AsyncIterator, Dict, Optional, Union +from urllib.parse import urlencode + +import httpx +from websockets.asyncio.client import connect as async_ws_connect +from websockets.exceptions import ConnectionClosed + +from ....exceptions import HyperbrowserError +from ....models.sandbox import ( + CreateSandboxParams, + SandboxDetail, + SandboxExecParams, + SandboxFileChmodParams, + SandboxFileChownParams, + SandboxFileDeleteParams, + SandboxFileEntry, + SandboxFileListResponse, + SandboxFileMoveCopyResult, + SandboxFileMutationResult, + SandboxFileReadResult, + SandboxFileTransferResult, + SandboxFileWatchDoneEvent, + SandboxFileWatchEventMessage, + SandboxFileWatchStatus, + SandboxListParams, + SandboxListResponse, + SandboxPresignFileParams, + SandboxPresignedUrl, + SandboxProcessExitEvent, + SandboxProcessListResponse, + SandboxProcessOutputEvent, + SandboxProcessResult, + SandboxProcessStdinParams, + SandboxProcessSummary, + SandboxRuntimeSession, + SandboxTerminalCreateParams, + SandboxTerminalExitEvent, + SandboxTerminalOutputEvent, + SandboxTerminalStatus, + SandboxTerminalWaitParams, + StartSandboxFromSnapshotParams, +) +from ....models.session import BasicResponse +from ....sandbox_common import ( + RUNTIME_SESSION_REFRESH_BUFFER_MS, + RuntimeConnection, + build_headers, + ensure_response_ok, + normalize_network_error, + parse_json_response, + resolve_runtime_transport_target, + to_websocket_transport_target, +) +from ..sync_manager.sandbox import _build_query_path, _copy_model, _normalize_websocket_error + +DEFAULT_PROCESS_KILL_WAIT_SECONDS = 5.0 +DEFAULT_TERMINAL_KILL_WAIT_SECONDS = 5.0 + + +def _expires_within_buffer(expires_at): + if expires_at is None: + return False + if expires_at.tzinfo is None: + expires_at = expires_at.replace(tzinfo=timezone.utc) + threshold = datetime.now(timezone.utc) + timedelta( + milliseconds=RUNTIME_SESSION_REFRESH_BUFFER_MS + ) + return expires_at <= threshold + + +class RuntimeTransport: + def __init__(self, resolve_connection, timeout: float = 30.0): + self._resolve_connection = resolve_connection + self._timeout = timeout + + async def request_json( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + ): + response = await self._request( + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return parse_json_response(response, "runtime") + + async def request_bytes( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + headers: Optional[Dict[str, str]] = None, + ) -> bytes: + response = await self._request(path, method=method, params=params, headers=headers) + return response.content + + async def stream_sse( + self, path: str, params: Optional[Dict[str, object]] = None + ) -> AsyncIterator[Dict[str, object]]: + client, response = await self._open_stream(path, params=params) + event_name = "message" + event_id = None + data_lines = [] + + def flush_event(): + nonlocal event_name, event_id, data_lines + if not data_lines and event_name == "message" and event_id is None: + return None + + raw_data = "\n".join(data_lines) + data = raw_data + if raw_data: + try: + data = json.loads(raw_data) + except json.JSONDecodeError: + data = raw_data + + event = { + "event": event_name, + "data": data, + "id": event_id, + } + event_name = "message" + event_id = None + data_lines = [] + return event + + try: + async for line in response.aiter_lines(): + if line == "": + event = flush_event() + if event is not None: + yield event + continue + + if line.startswith(":"): + continue + + if ":" in line: + field, value = line.split(":", 1) + value = value.lstrip(" ") + else: + field, value = line, "" + + if field == "event": + event_name = value or "message" + elif field == "data": + data_lines.append(value) + elif field == "id": + event_id = value + + trailing = flush_event() + if trailing is not None: + yield trailing + finally: + await response.aclose() + await client.aclose() + + async def _request( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + allow_refresh: bool = True, + ) -> httpx.Response: + connection = await self._resolve_connection(False) + response = await self._send( + connection, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + + if response.status_code == 401 and allow_refresh: + await response.aclose() + refreshed = await self._resolve_connection(True) + retry = await self._send( + refreshed, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return ensure_response_ok(retry, "runtime") + + return ensure_response_ok(response, "runtime") + + async def _open_stream( + self, + path: str, + *, + params: Optional[Dict[str, object]] = None, + allow_refresh: bool = True, + ): + connection = await self._resolve_connection(False) + client, response = await self._send_stream(connection, path, params=params) + if response.status_code == 401 and allow_refresh: + await response.aclose() + await client.aclose() + refreshed = await self._resolve_connection(True) + client, response = await self._send_stream(refreshed, path, params=params) + + if not response.is_success: + await response.aread() + ensure_response_ok(response, "runtime") + return client, response + + async def _send( + self, + connection: RuntimeConnection, + path: str, + *, + method: str, + params: Optional[Dict[str, object]], + json_body: Optional[Dict[str, object]], + content: Optional[Union[str, bytes]], + headers: Optional[Dict[str, str]], + ) -> httpx.Response: + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target(connection.base_url, request_path) + merged_headers = build_headers(connection.token, headers, target.host_header) + client = httpx.AsyncClient(timeout=self._timeout) + + try: + response = await client.request( + method, + target.url, + headers=merged_headers, + json=json_body, + content=content, + ) + except BaseException as error: + await client.aclose() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) + + await response.aread() + await client.aclose() + return response + + async def _send_stream( + self, + connection: RuntimeConnection, + path: str, + *, + params: Optional[Dict[str, object]], + ): + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target(connection.base_url, request_path) + headers = build_headers( + connection.token, + {"Accept": "text/event-stream"}, + target.host_header, + ) + client = httpx.AsyncClient(timeout=self._timeout) + + try: + request = client.build_request("GET", target.url, headers=headers) + response = await client.send(request, stream=True) + return client, response + except BaseException as error: + await client.aclose() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) + + +class SandboxProcessHandle: + def __init__(self, transport: RuntimeTransport, summary: SandboxProcessSummary): + self._transport = transport + self._summary = summary + + @property + def id(self) -> str: + return self._summary.id + + @property + def status(self) -> str: + return self._summary.status + + def to_dict(self): + return self._summary.model_dump() + + def to_json(self): + return self.to_dict() + + async def refresh(self) -> "SandboxProcessHandle": + payload = await self._transport.request_json(f"/sandbox/processes/{self.id}") + self._summary = SandboxProcessSummary(**payload["process"]) + return self + + async def wait( + self, + timeout_ms: Optional[int] = None, + timeout_sec: Optional[int] = None, + ) -> SandboxProcessResult: + payload = await self._transport.request_json( + f"/sandbox/processes/{self.id}/wait", + method="POST", + json_body={ + "timeoutMs": timeout_ms, + "timeout_sec": timeout_sec, + }, + headers={"content-type": "application/json"}, + ) + result = SandboxProcessResult(**payload["result"]) + self._summary = SandboxProcessSummary( + id=result.id, + status=result.status, + command=self._summary.command, + args=self._summary.args, + cwd=self._summary.cwd, + pid=self._summary.pid, + exit_code=result.exit_code, + started_at=result.started_at, + completed_at=result.completed_at, + ) + return result + + async def signal(self, signal: str) -> None: + payload = await self._transport.request_json( + f"/sandbox/processes/{self.id}/signal", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._summary = SandboxProcessSummary(**payload["process"]) + + async def kill( + self, + timeout_ms: Optional[int] = None, + timeout_sec: Optional[int] = None, + ) -> SandboxProcessResult: + payload = await self._transport.request_json( + f"/sandbox/processes/{self.id}", + method="DELETE", + ) + self._summary = SandboxProcessSummary(**payload["process"]) + if timeout_ms is None and timeout_sec is None: + timeout_ms = int(DEFAULT_PROCESS_KILL_WAIT_SECONDS * 1000) + return await self.wait(timeout_ms=timeout_ms, timeout_sec=timeout_sec) + + async def write_stdin( + self, + data: Optional[Union[str, bytes, bytearray, SandboxProcessStdinParams]] = None, + *, + encoding: Optional[str] = None, + eof: Optional[bool] = None, + ) -> None: + if isinstance(data, SandboxProcessStdinParams): + params = data + else: + params = SandboxProcessStdinParams(data=data, encoding=encoding, eof=eof) + + payload: Dict[str, object] = {"eof": params.eof} + if params.data is not None: + if isinstance(params.data, str): + payload["data"] = params.data + payload["encoding"] = params.encoding or "utf8" + else: + payload["data"] = base64.b64encode(bytes(params.data)).decode("ascii") + payload["encoding"] = "base64" + + await self._transport.request_json( + f"/sandbox/processes/{self.id}/stdin", + method="POST", + json_body=payload, + headers={"content-type": "application/json"}, + ) + + async def stream(self, from_seq: Optional[int] = None) -> AsyncIterator[object]: + params = {"from_seq": from_seq} if from_seq and from_seq > 0 else None + async for event in self._transport.stream_sse( + f"/sandbox/processes/{self.id}/stream", + params=params, + ): + event_type = event["event"] + data = event["data"] + if event_type == "output": + yield SandboxProcessOutputEvent( + type=data["stream"], + seq=data["seq"], + data=data["data"], + timestamp=data["timestamp"], + ) + elif event_type == "done": + yield SandboxProcessExitEvent( + type="exit", + result=SandboxProcessResult(**data), + ) + + async def result(self) -> SandboxProcessResult: + return await self.wait() + + +class SandboxProcessesApi: + def __init__(self, transport: RuntimeTransport): + self._transport = transport + + async def exec( + self, input: Union[SandboxExecParams, Dict[str, object]] + ) -> SandboxProcessResult: + params = input if isinstance(input, SandboxExecParams) else SandboxExecParams(**input) + payload = await self._transport.request_json( + "/sandbox/exec", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessResult(**payload["result"]) + + async def start( + self, input: Union[SandboxExecParams, Dict[str, object]] + ) -> SandboxProcessHandle: + params = input if isinstance(input, SandboxExecParams) else SandboxExecParams(**input) + payload = await self._transport.request_json( + "/sandbox/processes", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + async def get(self, process_id: str) -> SandboxProcessHandle: + payload = await self._transport.request_json(f"/sandbox/processes/{process_id}") + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + async def list( + self, + *, + status=None, + limit: Optional[int] = None, + cursor: Optional[Union[str, int]] = None, + created_after: Optional[int] = None, + created_before: Optional[int] = None, + ) -> SandboxProcessListResponse: + normalized_status = None + if isinstance(status, list): + normalized_status = ",".join(status) if status else None + else: + normalized_status = status + + payload = await self._transport.request_json( + "/sandbox/processes", + params={ + "status": normalized_status, + "limit": limit, + "cursor": cursor, + "created_after": created_after, + "created_before": created_before, + }, + ) + return SandboxProcessListResponse(**payload) + + +class SandboxFileWatchHandle: + def __init__(self, transport: RuntimeTransport, get_connection_info, status): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxFileWatchStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + async def refresh(self, include_events: bool = False) -> "SandboxFileWatchHandle": + params = {"includeEvents": True} if include_events else None + payload = await self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + params=params, + ) + self._status = SandboxFileWatchStatus(**payload["watch"]) + return self + + async def stop(self) -> None: + await self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + method="DELETE", + ) + self._status = self._status.model_copy( + update={ + "active": False, + "stopped_at": self._status.stopped_at or int(datetime.now().timestamp() * 1000), + } + ) + + async def events( + self, + *, + cursor: Optional[int] = None, + route: str = "ws", + ) -> AsyncIterator[object]: + connection = await self._get_connection_info() + query = urlencode( + [ + ("sessionId", connection.sandbox_id), + *([("cursor", str(cursor))] if cursor is not None else []), + ] + ) + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/files/watch/{self.id}/{route}?{query}", + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + sock = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + sock.setblocking(False) + connect_kwargs["sock"] = sock + try: + websocket = await async_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + try: + while True: + try: + message = await websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "event": + event = SandboxFileWatchEventMessage( + type="event", + event=parsed["event"], + ) + self._status = self._status.model_copy( + update={ + "oldest_seq": self._status.oldest_seq or event.event.seq, + "last_seq": max(self._status.last_seq, event.event.seq), + } + ) + yield event + elif parsed["type"] == "done": + self._status = SandboxFileWatchStatus(**parsed["status"]) + yield SandboxFileWatchDoneEvent(type="done", status=self.current) + break + except GeneratorExit: + raise + except BaseException as error: + raise _normalize_websocket_error(error) + finally: + await websocket.close() + + +class SandboxFilesApi: + def __init__(self, transport: RuntimeTransport, get_connection_info): + self._transport = transport + self._get_connection_info = get_connection_info + + async def list( + self, + path: str, + *, + recursive: Optional[bool] = None, + limit: Optional[int] = None, + cursor: Optional[int] = None, + ) -> SandboxFileListResponse: + payload = await self._transport.request_json( + "/sandbox/files", + params={ + "path": path, + "recursive": recursive, + "limit": limit, + "cursor": cursor, + }, + ) + return SandboxFileListResponse(**payload) + + async def stat(self, path: str): + payload = await self._transport.request_json( + "/sandbox/files/stat", + params={"path": path}, + ) + return SandboxFileEntry(**payload["file"]) + + async def exists(self, path: str) -> bool: + try: + await self.stat(path) + return True + except HyperbrowserError as error: + if error.status_code == 404: + return False + if "not found" in str(error).lower() or "no such file" in str(error).lower(): + return False + raise + + async def read( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + encoding: str = "utf8", + ) -> SandboxFileReadResult: + payload = await self._transport.request_json( + "/sandbox/files/read", + method="POST", + json_body={ + "path": path, + "offset": offset, + "length": length, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileReadResult(**payload) + + async def read_text( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> str: + return (await self.read(path, offset=offset, length=length, encoding="utf8")).content + + async def read_bytes( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> bytes: + result = await self.read(path, offset=offset, length=length, encoding="base64") + return base64.b64decode(result.content) + + async def write_text( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return await self._write( + path, + data, + append=append, + mode=mode, + encoding="utf8", + ) + + async def write_bytes( + self, + path: str, + data: bytes, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return await self._write( + path, + base64.b64encode(data).decode("ascii"), + append=append, + mode=mode, + encoding="base64", + ) + + async def upload(self, path: str, data: Union[str, bytes, bytearray]): + body = data.encode("utf-8") if isinstance(data, str) else bytes(data) + payload = await self._transport.request_json( + "/sandbox/files/upload", + method="PUT", + params={"path": path}, + content=body, + ) + return SandboxFileTransferResult(**payload) + + async def download(self, path: str) -> bytes: + return await self._transport.request_bytes( + "/sandbox/files/download", + params={"path": path}, + ) + + async def delete(self, path: str, *, recursive: Optional[bool] = None): + payload = await self._transport.request_json( + "/sandbox/files/delete", + method="POST", + json_body=SandboxFileDeleteParams( + path=path, + recursive=recursive, + ).model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + return SandboxFileMutationResult(**payload) + + async def mkdir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ): + payload = await self._transport.request_json( + "/sandbox/files/mkdir", + method="POST", + json_body={ + "path": path, + "parents": parents, + "mode": mode, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileMutationResult(**payload) + + async def move( + self, + *, + source: str, + destination: str, + overwrite: Optional[bool] = None, + ) -> SandboxFileMoveCopyResult: + payload = await self._transport.request_json( + "/sandbox/files/move", + method="POST", + json_body={ + "from": source, + "to": destination, + "overwrite": overwrite, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileMoveCopyResult(**payload) + + async def copy( + self, + *, + source: str, + destination: str, + recursive: Optional[bool] = None, + overwrite: Optional[bool] = None, + ) -> SandboxFileMoveCopyResult: + payload = await self._transport.request_json( + "/sandbox/files/copy", + method="POST", + json_body={ + "from": source, + "to": destination, + "recursive": recursive, + "overwrite": overwrite, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileMoveCopyResult(**payload) + + async def chmod(self, *, path: str, mode: str, recursive: Optional[bool] = None): + payload = await self._transport.request_json( + "/sandbox/files/chmod", + method="POST", + json_body=SandboxFileChmodParams( + path=path, + mode=mode, + recursive=recursive, + ).model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + return SandboxFileMutationResult(**payload) + + async def chown( + self, + *, + path: str, + uid: Optional[int] = None, + gid: Optional[int] = None, + recursive: Optional[bool] = None, + ): + payload = await self._transport.request_json( + "/sandbox/files/chown", + method="POST", + json_body=SandboxFileChownParams( + path=path, + uid=uid, + gid=gid, + recursive=recursive, + ).model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + return SandboxFileMutationResult(**payload) + + async def watch(self, path: str, *, recursive: Optional[bool] = None): + payload = await self._transport.request_json( + "/sandbox/files/watch", + method="POST", + json_body={ + "path": path, + "recursive": recursive, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + ) + + async def get_watch( + self, watch_id: str, include_events: bool = False + ) -> SandboxFileWatchHandle: + payload = await self._transport.request_json( + f"/sandbox/files/watch/{watch_id}", + params={"includeEvents": True} if include_events else None, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + ) + + async def upload_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = await self._transport.request_json( + "/sandbox/files/presign-upload", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + async def download_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = await self._transport.request_json( + "/sandbox/files/presign-download", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + async def _write( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + encoding: str, + ): + payload = await self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path, + "data": data, + "append": append, + "mode": mode, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileTransferResult(**payload) + + +class SandboxTerminalConnection: + def __init__(self, websocket): + self._websocket = websocket + + async def events(self) -> AsyncIterator[object]: + while True: + try: + message = await self._websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "output": + raw = base64.b64decode(parsed["data"]) + yield SandboxTerminalOutputEvent( + type="output", + seq=parsed["seq"], + data=raw.decode("utf-8", errors="replace"), + raw=raw, + timestamp=parsed["timestamp"], + ) + elif parsed["type"] == "exit": + yield SandboxTerminalExitEvent( + type="exit", + status=SandboxTerminalStatus(**parsed["status"]), + ) + + async def write(self, data: Union[str, bytes, bytearray]) -> None: + payload = { + "type": "input", + "data": data if isinstance(data, str) else base64.b64encode(bytes(data)).decode("ascii"), + } + if not isinstance(data, str): + payload["encoding"] = "base64" + await self._websocket.send(json.dumps(payload)) + + async def resize(self, rows: int, cols: int) -> None: + await self._websocket.send( + json.dumps( + { + "type": "resize", + "rows": rows, + "cols": cols, + } + ) + ) + + async def close(self) -> None: + await self._websocket.close() + + +class SandboxTerminalHandle: + def __init__(self, transport: RuntimeTransport, get_connection_info, status): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxTerminalStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + async def refresh(self, include_output: bool = False) -> "SandboxTerminalHandle": + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}", + params={"includeOutput": True} if include_output else None, + ) + self._status = SandboxTerminalStatus(**payload["pty"]) + return self + + async def wait( + self, + timeout_ms: Optional[int] = None, + include_output: Optional[bool] = None, + ) -> SandboxTerminalStatus: + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}/wait", + method="POST", + json_body=SandboxTerminalWaitParams( + timeout_ms=timeout_ms, + include_output=include_output, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + self._status = SandboxTerminalStatus(**payload["pty"]) + return self.current + + async def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}/kill", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._status = SandboxTerminalStatus(**payload["pty"]) + return self.current + + async def kill( + self, + signal: Optional[str] = None, + *, + timeout_ms: Optional[int] = None, + ) -> SandboxTerminalStatus: + await self.signal(signal) + if timeout_ms is None: + timeout_ms = int(DEFAULT_TERMINAL_KILL_WAIT_SECONDS * 1000) + return await self.wait(timeout_ms=timeout_ms) + + async def resize(self, rows: int, cols: int) -> SandboxTerminalStatus: + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}/resize", + method="POST", + json_body={"rows": rows, "cols": cols}, + headers={"content-type": "application/json"}, + ) + self._status = SandboxTerminalStatus(**payload["pty"]) + return self.current + + async def attach(self) -> SandboxTerminalConnection: + connection = await self._get_connection_info() + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/pty/{self.id}/ws?sessionId={connection.sandbox_id}", + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + sock = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + sock.setblocking(False) + connect_kwargs["sock"] = sock + + try: + websocket = await async_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + return SandboxTerminalConnection(websocket) + + +class SandboxTerminalApi: + def __init__(self, transport: RuntimeTransport, get_connection_info): + self._transport = transport + self._get_connection_info = get_connection_info + + async def create( + self, + input: Union[SandboxTerminalCreateParams, Dict[str, object]], + ) -> SandboxTerminalHandle: + params = ( + input + if isinstance(input, SandboxTerminalCreateParams) + else SandboxTerminalCreateParams(**input) + ) + payload = await self._transport.request_json( + "/sandbox/pty", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + SandboxTerminalStatus(**payload["pty"]), + ) + + async def get( + self, terminal_id: str, include_output: bool = False + ) -> SandboxTerminalHandle: + payload = await self._transport.request_json( + f"/sandbox/pty/{terminal_id}", + params={"includeOutput": True} if include_output else None, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + SandboxTerminalStatus(**payload["pty"]), + ) + + +class SandboxHandle: + def __init__(self, service: "SandboxManager", detail: SandboxDetail): + self._service = service + self._detail = detail + self._runtime_session = self._to_runtime_session(detail) + self._transport = RuntimeTransport( + self._resolve_runtime_connection, + service.runtime_timeout, + ) + self.processes = SandboxProcessesApi(self._transport) + self.files = SandboxFilesApi(self._transport, self._resolve_runtime_socket_info) + self.terminal = SandboxTerminalApi( + self._transport, + self._resolve_runtime_socket_info, + ) + self.pty = self.terminal + + @property + def id(self) -> str: + return self._detail.id + + @property + def status(self) -> str: + return self._detail.status + + @property + def region(self): + return self._detail.region + + @property + def runtime(self): + return self._detail.runtime + + @property + def token_expires_at(self): + return self._detail.token_expires_at + + @property + def session_url(self) -> str: + return self._detail.session_url + + def to_dict(self): + return self._detail.model_dump() + + def to_json(self): + return self.to_dict() + + async def info(self) -> SandboxDetail: + detail = await self._service.get_detail(self.id) + self._hydrate(detail) + return _copy_model(self._detail) + + async def refresh(self) -> "SandboxHandle": + await self.info() + return self + + async def connect(self) -> "SandboxHandle": + await self.create_runtime_session(force_refresh=True) + return self + + async def stop(self) -> BasicResponse: + response = await self._service.stop(self.id) + self._clear_runtime_session("closed") + return response + + async def create_runtime_session( + self, force_refresh: bool = False + ) -> SandboxRuntimeSession: + self._assert_runtime_available() + if ( + not force_refresh + and self._runtime_session is not None + and not _expires_within_buffer(self._runtime_session.token_expires_at) + ): + return _copy_model(self._runtime_session) + + session = await self._service.get_runtime_session(self.id) + self._apply_runtime_session(session) + return _copy_model(session) + + async def exec(self, input: Union[str, SandboxExecParams, Dict[str, object]]): + if isinstance(input, str): + params = SandboxExecParams(command=input) + elif isinstance(input, SandboxExecParams): + params = input + else: + params = SandboxExecParams(**input) + return await self.processes.exec(params) + + async def get_process(self, process_id: str) -> SandboxProcessHandle: + return await self.processes.get(process_id) + + def _hydrate(self, detail: SandboxDetail) -> None: + self._detail = detail + self._runtime_session = self._to_runtime_session(detail) + + async def _resolve_runtime_connection( + self, force_refresh: bool = False + ) -> RuntimeConnection: + session = await self.create_runtime_session(force_refresh=force_refresh) + return RuntimeConnection( + sandbox_id=self.id, + base_url=session.runtime.base_url, + token=session.token, + ) + + async def _resolve_runtime_socket_info(self) -> RuntimeConnection: + session = await self.create_runtime_session() + return RuntimeConnection( + sandbox_id=self.id, + base_url=session.runtime.base_url, + token=session.token, + ) + + def _apply_runtime_session(self, session: SandboxRuntimeSession) -> None: + self._runtime_session = _copy_model(session) + self._detail = self._detail.model_copy( + update={ + "status": session.status, + "region": session.region, + "runtime": session.runtime, + "token": session.token, + "token_expires_at": session.token_expires_at, + } + ) + + def _clear_runtime_session(self, status: Optional[str] = None) -> None: + self._runtime_session = None + self._detail = self._detail.model_copy( + update={ + "status": status or self._detail.status, + "token": None, + "token_expires_at": None, + } + ) + + def _assert_runtime_available(self) -> None: + if self._detail.status in {"closed", "error"}: + raise HyperbrowserError( + f"Sandbox {self.id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + + @staticmethod + def _to_runtime_session(detail: SandboxDetail) -> Optional[SandboxRuntimeSession]: + if not detail.token: + return None + return SandboxRuntimeSession( + sandbox_id=detail.id, + status=detail.status, + region=detail.region, + token=detail.token, + token_expires_at=detail.token_expires_at, + runtime=detail.runtime, + ) + + +class SandboxManager: + def __init__(self, client): + self._client = client + self.runtime_timeout = getattr(client, "timeout", 30) + + async def create(self, params: CreateSandboxParams) -> SandboxHandle: + detail = await self._create_detail(params) + return self.attach(detail) + + async def start_from_snapshot( + self, params: StartSandboxFromSnapshotParams + ) -> SandboxHandle: + detail = await self._start_from_snapshot_detail(params) + return self.attach(detail) + + async def get(self, sandbox_id: str) -> SandboxHandle: + return self.attach(await self.get_detail(sandbox_id)) + + async def connect(self, sandbox_id: str) -> SandboxHandle: + sandbox = await self.get(sandbox_id) + await sandbox.connect() + return sandbox + + async def list( + self, params: Optional[SandboxListParams] = None + ) -> SandboxListResponse: + payload = await self._request( + "GET", + "/sandboxes", + params=(params or SandboxListParams()).model_dump( + exclude_none=True, by_alias=True + ), + ) + return SandboxListResponse(**payload) + + async def stop(self, sandbox_id: str) -> BasicResponse: + payload = await self._request("POST", f"/sandboxes/{sandbox_id}/stop") + return BasicResponse(**payload) + + async def get_runtime_session(self, sandbox_id: str) -> SandboxRuntimeSession: + payload = await self._request("POST", f"/sandboxes/{sandbox_id}/runtime-session") + return SandboxRuntimeSession(**payload) + + async def get_detail(self, sandbox_id: str) -> SandboxDetail: + payload = await self._request("GET", f"/sandboxes/{sandbox_id}") + return SandboxDetail(**payload) + + def attach(self, detail: SandboxDetail) -> SandboxHandle: + return SandboxHandle(self, detail) + + async def _create_detail(self, params: CreateSandboxParams) -> SandboxDetail: + payload = await self._request( + "POST", + "/sandboxes", + data=params.model_dump(exclude_none=True, by_alias=True), + ) + return SandboxDetail(**payload) + + async def _start_from_snapshot_detail( + self, params: StartSandboxFromSnapshotParams + ) -> SandboxDetail: + payload = await self._request( + "POST", + "/sandboxes/startFromSnapshot", + data=params.model_dump(exclude_none=True, by_alias=True), + ) + return SandboxDetail(**payload) + + async def _request( + self, + method: str, + path: str, + *, + params: Optional[Dict[str, object]] = None, + data: Optional[Dict[str, object]] = None, + ): + try: + response = await self._client.transport.client.request( + method, + self._client._build_url(path), + params={k: v for k, v in (params or {}).items() if v is not None}, + json=data, + ) + except BaseException as error: + raise normalize_network_error( + error, + "control", + "Unknown error occurred", + ) + + ensure_response_ok(response, "control") + return parse_json_response(response, "control") diff --git a/hyperbrowser/client/managers/sync_manager/sandbox.py b/hyperbrowser/client/managers/sync_manager/sandbox.py new file mode 100644 index 00000000..823a444f --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandbox.py @@ -0,0 +1,1445 @@ +import base64 +import json +import socket +from datetime import datetime, timedelta, timezone +from typing import Dict, Iterator, Optional, Union +from urllib.parse import urlencode + +import httpx +from websockets.exceptions import ConnectionClosed +from websockets.sync.client import connect as sync_ws_connect + +from ....exceptions import HyperbrowserError +from ....models.sandbox import ( + CreateSandboxParams, + SandboxDetail, + SandboxExecParams, + SandboxFileChmodParams, + SandboxFileChownParams, + SandboxFileEntry, + SandboxFileDeleteParams, + SandboxFileListResponse, + SandboxFileMoveCopyResult, + SandboxFileMutationResult, + SandboxFileReadResult, + SandboxFileTransferResult, + SandboxFileWatchDoneEvent, + SandboxFileWatchEventMessage, + SandboxFileWatchStatus, + SandboxListParams, + SandboxListResponse, + SandboxPresignFileParams, + SandboxPresignedUrl, + SandboxProcessExitEvent, + SandboxProcessListResponse, + SandboxProcessOutputEvent, + SandboxProcessResult, + SandboxProcessStdinParams, + SandboxProcessSummary, + SandboxRuntimeSession, + SandboxTerminalCreateParams, + SandboxTerminalExitEvent, + SandboxTerminalOutputEvent, + SandboxTerminalStatus, + SandboxTerminalWaitParams, + StartSandboxFromSnapshotParams, +) +from ....models.session import BasicResponse +from ....sandbox_common import ( + RUNTIME_SESSION_REFRESH_BUFFER_MS, + RuntimeConnection, + build_headers, + ensure_response_ok, + normalize_network_error, + parse_error_payload, + parse_json_response, + resolve_runtime_transport_target, + to_websocket_transport_target, +) + +DEFAULT_PROCESS_KILL_WAIT_SECONDS = 5.0 +DEFAULT_TERMINAL_KILL_WAIT_SECONDS = 5.0 + + +def _copy_model(model): + return model.model_copy(deep=True) + + +def _expires_within_buffer(expires_at: Optional[datetime]) -> bool: + if expires_at is None: + return False + if expires_at.tzinfo is None: + expires_at = expires_at.replace(tzinfo=timezone.utc) + threshold = datetime.now(timezone.utc) + timedelta( + milliseconds=RUNTIME_SESSION_REFRESH_BUFFER_MS + ) + return expires_at <= threshold + + +def _build_query_path(path: str, params: Optional[Dict[str, object]] = None) -> str: + if not params: + return path + + filtered = [] + for key, value in params.items(): + if value is None: + continue + filtered.append((key, str(value))) + + if not filtered: + return path + + return f"{path}?{urlencode(filtered)}" + + +def _normalize_websocket_error(error: BaseException) -> HyperbrowserError: + if isinstance(error, HyperbrowserError): + return error + + response = getattr(error, "response", None) + if response is not None: + status_code = getattr(response, "status_code", None) + headers = getattr(response, "headers", {}) or {} + body = getattr(response, "body", b"") + if isinstance(body, memoryview): + body = body.tobytes() + if isinstance(body, bytearray): + body = bytes(body) + if isinstance(body, bytes): + raw_text = body.decode("utf-8", errors="replace") + elif isinstance(body, str): + raw_text = body + else: + raw_text = "" + + message, code, details = parse_error_payload( + raw_text, + f"Runtime websocket request failed: {status_code or 0}", + ) + request_id = None + if isinstance(headers, dict): + request_id = headers.get("x-request-id") or headers.get("request-id") + else: + request_id = headers.get("x-request-id") or headers.get("request-id") + + return HyperbrowserError( + message, + status_code=status_code, + code=code, + request_id=request_id, + retryable=bool(status_code in {429, 502, 503, 504}), + service="runtime", + details=details, + cause=error, + original_error=error if isinstance(error, Exception) else None, + ) + + status_code = getattr(error, "status_code", None) + headers = getattr(error, "headers", None) + if status_code is not None: + request_id = None + if headers is not None: + request_id = headers.get("x-request-id") or headers.get("request-id") + return HyperbrowserError( + f"Runtime websocket request failed: {status_code}", + status_code=status_code, + request_id=request_id, + retryable=bool(status_code in {429, 502, 503, 504}), + service="runtime", + cause=error, + original_error=error if isinstance(error, Exception) else None, + ) + + return normalize_network_error( + error, + "runtime", + "Unknown runtime websocket error", + ) + + +class RuntimeTransport: + def __init__(self, resolve_connection, timeout: float = 30.0): + self._resolve_connection = resolve_connection + self._timeout = timeout + + def request_json( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + ): + response = self._request( + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return parse_json_response(response, "runtime") + + def request_bytes( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + headers: Optional[Dict[str, str]] = None, + ) -> bytes: + response = self._request(path, method=method, params=params, headers=headers) + return response.content + + def stream_sse( + self, path: str, params: Optional[Dict[str, object]] = None + ) -> Iterator[Dict[str, object]]: + client, response = self._open_stream(path, params=params) + event_name = "message" + event_id = None + data_lines = [] + + def flush_event(): + nonlocal event_name, event_id, data_lines + if not data_lines and event_name == "message" and event_id is None: + return None + + raw_data = "\n".join(data_lines) + data = raw_data + if raw_data: + try: + data = json.loads(raw_data) + except json.JSONDecodeError: + data = raw_data + + event = { + "event": event_name, + "data": data, + "id": event_id, + } + event_name = "message" + event_id = None + data_lines = [] + return event + + try: + for line in response.iter_lines(): + if line == "": + event = flush_event() + if event is not None: + yield event + continue + + if line.startswith(":"): + continue + + if ":" in line: + field, value = line.split(":", 1) + value = value.lstrip(" ") + else: + field, value = line, "" + + if field == "event": + event_name = value or "message" + elif field == "data": + data_lines.append(value) + elif field == "id": + event_id = value + + trailing = flush_event() + if trailing is not None: + yield trailing + finally: + response.close() + client.close() + + def _request( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + allow_refresh: bool = True, + ) -> httpx.Response: + connection = self._resolve_connection(False) + response = self._send( + connection, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + + if response.status_code == 401 and allow_refresh: + response.close() + refreshed = self._resolve_connection(True) + retry = self._send( + refreshed, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return ensure_response_ok(retry, "runtime") + + return ensure_response_ok(response, "runtime") + + def _open_stream( + self, + path: str, + *, + params: Optional[Dict[str, object]] = None, + allow_refresh: bool = True, + ): + connection = self._resolve_connection(False) + client, response = self._send_stream(connection, path, params=params) + if response.status_code == 401 and allow_refresh: + response.close() + client.close() + refreshed = self._resolve_connection(True) + client, response = self._send_stream(refreshed, path, params=params) + + if not response.is_success: + response.read() + ensure_response_ok(response, "runtime") + return client, response + + def _send( + self, + connection: RuntimeConnection, + path: str, + *, + method: str, + params: Optional[Dict[str, object]], + json_body: Optional[Dict[str, object]], + content: Optional[Union[str, bytes]], + headers: Optional[Dict[str, str]], + ) -> httpx.Response: + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target(connection.base_url, request_path) + merged_headers = build_headers(connection.token, headers, target.host_header) + client = httpx.Client(timeout=self._timeout) + + try: + response = client.request( + method, + target.url, + headers=merged_headers, + json=json_body, + content=content, + ) + except BaseException as error: + client.close() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) + + response.read() + client.close() + return response + + def _send_stream( + self, + connection: RuntimeConnection, + path: str, + *, + params: Optional[Dict[str, object]], + ): + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target(connection.base_url, request_path) + headers = build_headers( + connection.token, + {"Accept": "text/event-stream"}, + target.host_header, + ) + client = httpx.Client(timeout=self._timeout) + + try: + request = client.build_request("GET", target.url, headers=headers) + response = client.send(request, stream=True) + return client, response + except BaseException as error: + client.close() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) + + +class SandboxProcessHandle: + def __init__(self, transport: RuntimeTransport, summary: SandboxProcessSummary): + self._transport = transport + self._summary = summary + + @property + def id(self) -> str: + return self._summary.id + + @property + def status(self) -> str: + return self._summary.status + + def to_dict(self): + return self._summary.model_dump() + + def to_json(self): + return self.to_dict() + + def refresh(self) -> "SandboxProcessHandle": + payload = self._transport.request_json(f"/sandbox/processes/{self.id}") + self._summary = SandboxProcessSummary(**payload["process"]) + return self + + def wait(self, timeout_ms: Optional[int] = None, timeout_sec: Optional[int] = None): + payload = self._transport.request_json( + f"/sandbox/processes/{self.id}/wait", + method="POST", + json_body={ + "timeoutMs": timeout_ms, + "timeout_sec": timeout_sec, + }, + headers={"content-type": "application/json"}, + ) + result = SandboxProcessResult(**payload["result"]) + self._summary = SandboxProcessSummary( + id=result.id, + status=result.status, + command=self._summary.command, + args=self._summary.args, + cwd=self._summary.cwd, + pid=self._summary.pid, + exit_code=result.exit_code, + started_at=result.started_at, + completed_at=result.completed_at, + ) + return result + + def signal(self, signal: str) -> None: + payload = self._transport.request_json( + f"/sandbox/processes/{self.id}/signal", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._summary = SandboxProcessSummary(**payload["process"]) + + def kill( + self, + timeout_ms: Optional[int] = None, + timeout_sec: Optional[int] = None, + ) -> SandboxProcessResult: + payload = self._transport.request_json( + f"/sandbox/processes/{self.id}", + method="DELETE", + ) + self._summary = SandboxProcessSummary(**payload["process"]) + if timeout_ms is None and timeout_sec is None: + timeout_ms = int(DEFAULT_PROCESS_KILL_WAIT_SECONDS * 1000) + return self.wait(timeout_ms=timeout_ms, timeout_sec=timeout_sec) + + def write_stdin( + self, + data: Optional[Union[str, bytes, bytearray, SandboxProcessStdinParams]] = None, + *, + encoding: Optional[str] = None, + eof: Optional[bool] = None, + ) -> None: + if isinstance(data, SandboxProcessStdinParams): + params = data + else: + params = SandboxProcessStdinParams(data=data, encoding=encoding, eof=eof) + + payload: Dict[str, object] = {"eof": params.eof} + if params.data is not None: + if isinstance(params.data, str): + payload["data"] = params.data + payload["encoding"] = params.encoding or "utf8" + else: + payload["data"] = base64.b64encode(bytes(params.data)).decode("ascii") + payload["encoding"] = "base64" + + self._transport.request_json( + f"/sandbox/processes/{self.id}/stdin", + method="POST", + json_body=payload, + headers={"content-type": "application/json"}, + ) + + def stream(self, from_seq: Optional[int] = None): + params = {"from_seq": from_seq} if from_seq and from_seq > 0 else None + for event in self._transport.stream_sse( + f"/sandbox/processes/{self.id}/stream", + params=params, + ): + event_type = event["event"] + data = event["data"] + if event_type == "output": + yield SandboxProcessOutputEvent( + type=data["stream"], + seq=data["seq"], + data=data["data"], + timestamp=data["timestamp"], + ) + elif event_type == "done": + yield SandboxProcessExitEvent( + type="exit", + result=SandboxProcessResult(**data), + ) + + def result(self) -> SandboxProcessResult: + return self.wait() + + +class SandboxProcessesApi: + def __init__(self, transport: RuntimeTransport): + self._transport = transport + + def exec(self, input: Union[SandboxExecParams, Dict[str, object]]) -> SandboxProcessResult: + params = input if isinstance(input, SandboxExecParams) else SandboxExecParams(**input) + payload = self._transport.request_json( + "/sandbox/exec", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessResult(**payload["result"]) + + def start(self, input: Union[SandboxExecParams, Dict[str, object]]) -> SandboxProcessHandle: + params = input if isinstance(input, SandboxExecParams) else SandboxExecParams(**input) + payload = self._transport.request_json( + "/sandbox/processes", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + def get(self, process_id: str) -> SandboxProcessHandle: + payload = self._transport.request_json(f"/sandbox/processes/{process_id}") + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + def list( + self, + *, + status=None, + limit: Optional[int] = None, + cursor: Optional[Union[str, int]] = None, + created_after: Optional[int] = None, + created_before: Optional[int] = None, + ) -> SandboxProcessListResponse: + normalized_status = None + if isinstance(status, list): + normalized_status = ",".join(status) if status else None + else: + normalized_status = status + + payload = self._transport.request_json( + "/sandbox/processes", + params={ + "status": normalized_status, + "limit": limit, + "cursor": cursor, + "created_after": created_after, + "created_before": created_before, + }, + ) + return SandboxProcessListResponse(**payload) + + +class SandboxFileWatchHandle: + def __init__(self, transport: RuntimeTransport, get_connection_info, status): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxFileWatchStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + def refresh(self, include_events: bool = False) -> "SandboxFileWatchHandle": + params = {"includeEvents": True} if include_events else None + payload = self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + params=params, + ) + self._status = SandboxFileWatchStatus(**payload["watch"]) + return self + + def stop(self) -> None: + self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + method="DELETE", + ) + self._status = self._status.model_copy( + update={ + "active": False, + "stopped_at": self._status.stopped_at or int(datetime.now().timestamp() * 1000), + } + ) + + def events( + self, + *, + cursor: Optional[int] = None, + route: str = "ws", + ): + connection = self._get_connection_info() + query = urlencode( + [ + ("sessionId", connection.sandbox_id), + *([("cursor", str(cursor))] if cursor is not None else []), + ] + ) + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/files/watch/{self.id}/{route}?{query}", + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + connect_kwargs["sock"] = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + try: + websocket = sync_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + try: + while True: + try: + message = websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "event": + event = SandboxFileWatchEventMessage( + type="event", + event=parsed["event"], + ) + self._status = self._status.model_copy( + update={ + "oldest_seq": self._status.oldest_seq or event.event.seq, + "last_seq": max(self._status.last_seq, event.event.seq), + } + ) + yield event + elif parsed["type"] == "done": + self._status = SandboxFileWatchStatus(**parsed["status"]) + yield SandboxFileWatchDoneEvent(type="done", status=self.current) + break + except GeneratorExit: + raise + except BaseException as error: + raise _normalize_websocket_error(error) + finally: + websocket.close() + + +class SandboxFilesApi: + def __init__(self, transport: RuntimeTransport, get_connection_info): + self._transport = transport + self._get_connection_info = get_connection_info + + def list( + self, + path: str, + *, + recursive: Optional[bool] = None, + limit: Optional[int] = None, + cursor: Optional[int] = None, + ) -> SandboxFileListResponse: + payload = self._transport.request_json( + "/sandbox/files", + params={ + "path": path, + "recursive": recursive, + "limit": limit, + "cursor": cursor, + }, + ) + return SandboxFileListResponse(**payload) + + def stat(self, path: str): + payload = self._transport.request_json( + "/sandbox/files/stat", + params={"path": path}, + ) + return SandboxFileEntry(**payload["file"]) + + def exists(self, path: str) -> bool: + try: + self.stat(path) + return True + except HyperbrowserError as error: + if error.status_code == 404: + return False + if "not found" in str(error).lower() or "no such file" in str(error).lower(): + return False + raise + + def read( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + encoding: str = "utf8", + ) -> SandboxFileReadResult: + payload = self._transport.request_json( + "/sandbox/files/read", + method="POST", + json_body={ + "path": path, + "offset": offset, + "length": length, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileReadResult(**payload) + + def read_text( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> str: + return self.read(path, offset=offset, length=length, encoding="utf8").content + + def read_bytes( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> bytes: + result = self.read(path, offset=offset, length=length, encoding="base64") + return base64.b64decode(result.content) + + def write_text( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return self._write( + path, + data, + append=append, + mode=mode, + encoding="utf8", + ) + + def write_bytes( + self, + path: str, + data: bytes, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return self._write( + path, + base64.b64encode(data).decode("ascii"), + append=append, + mode=mode, + encoding="base64", + ) + + def upload(self, path: str, data: Union[str, bytes, bytearray]): + body = data.encode("utf-8") if isinstance(data, str) else bytes(data) + payload = self._transport.request_json( + "/sandbox/files/upload", + method="PUT", + params={"path": path}, + content=body, + ) + return SandboxFileTransferResult(**payload) + + def download(self, path: str) -> bytes: + return self._transport.request_bytes( + "/sandbox/files/download", + params={"path": path}, + ) + + def delete(self, path: str, *, recursive: Optional[bool] = None): + payload = self._transport.request_json( + "/sandbox/files/delete", + method="POST", + json_body=SandboxFileDeleteParams( + path=path, + recursive=recursive, + ).model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + return SandboxFileMutationResult(**payload) + + def mkdir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ): + payload = self._transport.request_json( + "/sandbox/files/mkdir", + method="POST", + json_body={ + "path": path, + "parents": parents, + "mode": mode, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileMutationResult(**payload) + + def move( + self, + *, + source: str, + destination: str, + overwrite: Optional[bool] = None, + ) -> SandboxFileMoveCopyResult: + payload = self._transport.request_json( + "/sandbox/files/move", + method="POST", + json_body={ + "from": source, + "to": destination, + "overwrite": overwrite, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileMoveCopyResult(**payload) + + def copy( + self, + *, + source: str, + destination: str, + recursive: Optional[bool] = None, + overwrite: Optional[bool] = None, + ) -> SandboxFileMoveCopyResult: + payload = self._transport.request_json( + "/sandbox/files/copy", + method="POST", + json_body={ + "from": source, + "to": destination, + "recursive": recursive, + "overwrite": overwrite, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileMoveCopyResult(**payload) + + def chmod(self, *, path: str, mode: str, recursive: Optional[bool] = None): + payload = self._transport.request_json( + "/sandbox/files/chmod", + method="POST", + json_body=SandboxFileChmodParams( + path=path, + mode=mode, + recursive=recursive, + ).model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + return SandboxFileMutationResult(**payload) + + def chown( + self, + *, + path: str, + uid: Optional[int] = None, + gid: Optional[int] = None, + recursive: Optional[bool] = None, + ): + payload = self._transport.request_json( + "/sandbox/files/chown", + method="POST", + json_body=SandboxFileChownParams( + path=path, + uid=uid, + gid=gid, + recursive=recursive, + ).model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + return SandboxFileMutationResult(**payload) + + def watch(self, path: str, *, recursive: Optional[bool] = None): + payload = self._transport.request_json( + "/sandbox/files/watch", + method="POST", + json_body={ + "path": path, + "recursive": recursive, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + ) + + def get_watch( + self, watch_id: str, include_events: bool = False + ) -> SandboxFileWatchHandle: + payload = self._transport.request_json( + f"/sandbox/files/watch/{watch_id}", + params={"includeEvents": True} if include_events else None, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + ) + + def upload_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = self._transport.request_json( + "/sandbox/files/presign-upload", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + def download_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = self._transport.request_json( + "/sandbox/files/presign-download", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + def _write( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + encoding: str, + ): + payload = self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path, + "data": data, + "append": append, + "mode": mode, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileTransferResult(**payload) + + +class SandboxTerminalConnection: + def __init__(self, websocket): + self._websocket = websocket + + def events(self): + while True: + try: + message = self._websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "output": + raw = base64.b64decode(parsed["data"]) + yield SandboxTerminalOutputEvent( + type="output", + seq=parsed["seq"], + data=raw.decode("utf-8", errors="replace"), + raw=raw, + timestamp=parsed["timestamp"], + ) + elif parsed["type"] == "exit": + yield SandboxTerminalExitEvent( + type="exit", + status=SandboxTerminalStatus(**parsed["status"]), + ) + + def write(self, data: Union[str, bytes, bytearray]) -> None: + payload = { + "type": "input", + "data": data if isinstance(data, str) else base64.b64encode(bytes(data)).decode("ascii"), + } + if not isinstance(data, str): + payload["encoding"] = "base64" + self._websocket.send(json.dumps(payload)) + + def resize(self, rows: int, cols: int) -> None: + self._websocket.send( + json.dumps( + { + "type": "resize", + "rows": rows, + "cols": cols, + } + ) + ) + + def close(self) -> None: + self._websocket.close() + + +class SandboxTerminalHandle: + def __init__(self, transport: RuntimeTransport, get_connection_info, status): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxTerminalStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + def refresh(self, include_output: bool = False) -> "SandboxTerminalHandle": + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}", + params={"includeOutput": True} if include_output else None, + ) + self._status = SandboxTerminalStatus(**payload["pty"]) + return self + + def wait( + self, + timeout_ms: Optional[int] = None, + include_output: Optional[bool] = None, + ) -> SandboxTerminalStatus: + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}/wait", + method="POST", + json_body=SandboxTerminalWaitParams( + timeout_ms=timeout_ms, + include_output=include_output, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + self._status = SandboxTerminalStatus(**payload["pty"]) + return self.current + + def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}/kill", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._status = SandboxTerminalStatus(**payload["pty"]) + return self.current + + def kill( + self, + signal: Optional[str] = None, + *, + timeout_ms: Optional[int] = None, + ) -> SandboxTerminalStatus: + self.signal(signal) + if timeout_ms is None: + timeout_ms = int(DEFAULT_TERMINAL_KILL_WAIT_SECONDS * 1000) + return self.wait(timeout_ms=timeout_ms) + + def resize(self, rows: int, cols: int) -> SandboxTerminalStatus: + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}/resize", + method="POST", + json_body={"rows": rows, "cols": cols}, + headers={"content-type": "application/json"}, + ) + self._status = SandboxTerminalStatus(**payload["pty"]) + return self.current + + def attach(self) -> SandboxTerminalConnection: + connection = self._get_connection_info() + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/pty/{self.id}/ws?sessionId={connection.sandbox_id}", + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + connect_kwargs["sock"] = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + + try: + websocket = sync_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + return SandboxTerminalConnection(websocket) + + +class SandboxTerminalApi: + def __init__(self, transport: RuntimeTransport, get_connection_info): + self._transport = transport + self._get_connection_info = get_connection_info + + def create( + self, + input: Union[SandboxTerminalCreateParams, Dict[str, object]], + ) -> SandboxTerminalHandle: + params = ( + input + if isinstance(input, SandboxTerminalCreateParams) + else SandboxTerminalCreateParams(**input) + ) + payload = self._transport.request_json( + "/sandbox/pty", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + SandboxTerminalStatus(**payload["pty"]), + ) + + def get(self, terminal_id: str, include_output: bool = False) -> SandboxTerminalHandle: + payload = self._transport.request_json( + f"/sandbox/pty/{terminal_id}", + params={"includeOutput": True} if include_output else None, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + SandboxTerminalStatus(**payload["pty"]), + ) + + +class SandboxHandle: + def __init__(self, service: "SandboxManager", detail: SandboxDetail): + self._service = service + self._detail = detail + self._runtime_session = self._to_runtime_session(detail) + self._transport = RuntimeTransport( + self._resolve_runtime_connection, + service.runtime_timeout, + ) + self.processes = SandboxProcessesApi(self._transport) + self.files = SandboxFilesApi(self._transport, self._resolve_runtime_socket_info) + self.terminal = SandboxTerminalApi( + self._transport, + self._resolve_runtime_socket_info, + ) + self.pty = self.terminal + + @property + def id(self) -> str: + return self._detail.id + + @property + def status(self) -> str: + return self._detail.status + + @property + def region(self): + return self._detail.region + + @property + def runtime(self): + return self._detail.runtime + + @property + def token_expires_at(self): + return self._detail.token_expires_at + + @property + def session_url(self) -> str: + return self._detail.session_url + + def to_dict(self): + return self._detail.model_dump() + + def to_json(self): + return self.to_dict() + + def info(self) -> SandboxDetail: + detail = self._service.get_detail(self.id) + self._hydrate(detail) + return _copy_model(self._detail) + + def refresh(self) -> "SandboxHandle": + self.info() + return self + + def connect(self) -> "SandboxHandle": + self.create_runtime_session(force_refresh=True) + return self + + def stop(self) -> BasicResponse: + response = self._service.stop(self.id) + self._clear_runtime_session("closed") + return response + + def create_runtime_session( + self, force_refresh: bool = False + ) -> SandboxRuntimeSession: + self._assert_runtime_available() + if ( + not force_refresh + and self._runtime_session is not None + and not _expires_within_buffer(self._runtime_session.token_expires_at) + ): + return _copy_model(self._runtime_session) + + session = self._service.get_runtime_session(self.id) + self._apply_runtime_session(session) + return _copy_model(session) + + def exec(self, input: Union[str, SandboxExecParams, Dict[str, object]]): + if isinstance(input, str): + params = SandboxExecParams(command=input) + elif isinstance(input, SandboxExecParams): + params = input + else: + params = SandboxExecParams(**input) + return self.processes.exec(params) + + def get_process(self, process_id: str) -> SandboxProcessHandle: + return self.processes.get(process_id) + + def _hydrate(self, detail: SandboxDetail) -> None: + self._detail = detail + self._runtime_session = self._to_runtime_session(detail) + + def _resolve_runtime_connection(self, force_refresh: bool = False) -> RuntimeConnection: + session = self.create_runtime_session(force_refresh=force_refresh) + return RuntimeConnection( + sandbox_id=self.id, + base_url=session.runtime.base_url, + token=session.token, + ) + + def _resolve_runtime_socket_info(self) -> RuntimeConnection: + session = self.create_runtime_session() + return RuntimeConnection( + sandbox_id=self.id, + base_url=session.runtime.base_url, + token=session.token, + ) + + def _apply_runtime_session(self, session: SandboxRuntimeSession) -> None: + self._runtime_session = _copy_model(session) + self._detail = self._detail.model_copy( + update={ + "status": session.status, + "region": session.region, + "runtime": session.runtime, + "token": session.token, + "token_expires_at": session.token_expires_at, + } + ) + + def _clear_runtime_session(self, status: Optional[str] = None) -> None: + self._runtime_session = None + self._detail = self._detail.model_copy( + update={ + "status": status or self._detail.status, + "token": None, + "token_expires_at": None, + } + ) + + def _assert_runtime_available(self) -> None: + if self._detail.status in {"closed", "error"}: + raise HyperbrowserError( + f"Sandbox {self.id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + + @staticmethod + def _to_runtime_session(detail: SandboxDetail) -> Optional[SandboxRuntimeSession]: + if not detail.token: + return None + return SandboxRuntimeSession( + sandbox_id=detail.id, + status=detail.status, + region=detail.region, + token=detail.token, + token_expires_at=detail.token_expires_at, + runtime=detail.runtime, + ) + + +class SandboxManager: + def __init__(self, client): + self._client = client + self.runtime_timeout = getattr(client, "timeout", 30) + + def create(self, params: CreateSandboxParams) -> SandboxHandle: + detail = self._create_detail(params) + return self.attach(detail) + + def start_from_snapshot( + self, params: StartSandboxFromSnapshotParams + ) -> SandboxHandle: + detail = self._start_from_snapshot_detail(params) + return self.attach(detail) + + def get(self, sandbox_id: str) -> SandboxHandle: + return self.attach(self.get_detail(sandbox_id)) + + def connect(self, sandbox_id: str) -> SandboxHandle: + sandbox = self.get(sandbox_id) + sandbox.connect() + return sandbox + + def list( + self, params: Optional[SandboxListParams] = None + ) -> SandboxListResponse: + payload = self._request( + "GET", + "/sandboxes", + params=(params or SandboxListParams()).model_dump( + exclude_none=True, by_alias=True + ), + ) + return SandboxListResponse(**payload) + + def stop(self, sandbox_id: str) -> BasicResponse: + payload = self._request("POST", f"/sandboxes/{sandbox_id}/stop") + return BasicResponse(**payload) + + def get_runtime_session(self, sandbox_id: str) -> SandboxRuntimeSession: + payload = self._request("POST", f"/sandboxes/{sandbox_id}/runtime-session") + return SandboxRuntimeSession(**payload) + + def get_detail(self, sandbox_id: str) -> SandboxDetail: + payload = self._request("GET", f"/sandboxes/{sandbox_id}") + return SandboxDetail(**payload) + + def attach(self, detail: SandboxDetail) -> SandboxHandle: + return SandboxHandle(self, detail) + + def _create_detail(self, params: CreateSandboxParams) -> SandboxDetail: + payload = self._request( + "POST", + "/sandboxes", + data=params.model_dump(exclude_none=True, by_alias=True), + ) + return SandboxDetail(**payload) + + def _start_from_snapshot_detail( + self, params: StartSandboxFromSnapshotParams + ) -> SandboxDetail: + payload = self._request( + "POST", + "/sandboxes/startFromSnapshot", + data=params.model_dump(exclude_none=True, by_alias=True), + ) + return SandboxDetail(**payload) + + def _request( + self, + method: str, + path: str, + *, + params: Optional[Dict[str, object]] = None, + data: Optional[Dict[str, object]] = None, + ): + try: + response = self._client.transport.client.request( + method, + self._client._build_url(path), + params={k: v for k, v in (params or {}).items() if v is not None}, + json=data, + ) + except BaseException as error: + raise normalize_network_error( + error, + "control", + "Unknown error occurred", + ) + + ensure_response_ok(response, "control") + return parse_json_response(response, "control") diff --git a/hyperbrowser/client/sync.py b/hyperbrowser/client/sync.py index dd5329b0..1fa82ee4 100644 --- a/hyperbrowser/client/sync.py +++ b/hyperbrowser/client/sync.py @@ -9,6 +9,7 @@ from .managers.sync_manager.extension import ExtensionManager from .managers.sync_manager.extract import ExtractManager from .managers.sync_manager.profile import ProfileManager +from .managers.sync_manager.sandbox import SandboxManager from .managers.sync_manager.scrape import ScrapeManager from .managers.sync_manager.session import SessionManager from .managers.sync_manager.team import TeamManager @@ -26,6 +27,7 @@ def __init__( timeout: Optional[int] = 30, ): super().__init__(SyncTransport, config, api_key, base_url) + self.timeout = timeout or 30 self.transport.client.timeout = timeout self.sessions = SessionManager(self) self.web = WebManager(self) @@ -37,6 +39,7 @@ def __init__( self.agents = Agents(self) self.team = TeamManager(self) self.computer_action = ComputerActionManager(self) + self.sandboxes = SandboxManager(self) def close(self) -> None: self.transport.close() diff --git a/hyperbrowser/exceptions.py b/hyperbrowser/exceptions.py index 906a138a..e07b7996 100644 --- a/hyperbrowser/exceptions.py +++ b/hyperbrowser/exceptions.py @@ -1,5 +1,7 @@ # exceptions.py -from typing import Optional, Any +from typing import Any, Literal, Optional + +HyperbrowserService = Literal["control", "runtime"] class HyperbrowserError(Exception): @@ -11,11 +13,23 @@ def __init__( status_code: Optional[int] = None, response: Optional[Any] = None, original_error: Optional[Exception] = None, + code: Optional[str] = None, + request_id: Optional[str] = None, + retryable: bool = False, + service: Optional[HyperbrowserService] = None, + details: Optional[Any] = None, + cause: Optional[Any] = None, ): super().__init__(message) self.status_code = status_code self.response = response self.original_error = original_error + self.code = code + self.request_id = request_id + self.retryable = retryable + self.service = service + self.details = details + self.cause = cause if cause is not None else original_error def __str__(self) -> str: """Custom string representation to show a cleaner error message""" diff --git a/hyperbrowser/models/__init__.py b/hyperbrowser/models/__init__.py index ecd4c34e..06039127 100644 --- a/hyperbrowser/models/__init__.py +++ b/hyperbrowser/models/__init__.py @@ -235,6 +235,64 @@ ImageCaptchaParam, UpdateSessionProfileParams, ) +from .sandbox import ( + SandboxStatus, + SandboxRegion, + SandboxRuntimeTarget, + Sandbox, + SandboxDetail, + SandboxRuntimeSession, + CreateSandboxParams, + StartSandboxFromSnapshotParams, + SandboxListParams, + SandboxListResponse, + SandboxProcessStatus, + SandboxExecParams, + SandboxProcessSummary, + SandboxProcessResult, + SandboxProcessListParams, + SandboxProcessListResponse, + SandboxProcessWaitParams, + SandboxProcessStdinParams, + SandboxProcessOutputEvent, + SandboxProcessExitEvent, + SandboxProcessStreamEvent, + SandboxFileEntry, + SandboxFileListParams, + SandboxFileListResponse, + SandboxFileReadParams, + SandboxFileReadResult, + SandboxFileWriteTextParams, + SandboxFileWriteBytesParams, + SandboxFileWriteResult, + SandboxFileUploadParams, + SandboxFileDeleteParams, + SandboxFileMkdirParams, + SandboxFileMoveParams, + SandboxFileCopyParams, + SandboxFileChmodParams, + SandboxFileChownParams, + SandboxFileMutationResult, + SandboxFileTransferResult, + SandboxFileMoveCopyResult, + SandboxFileWatchParams, + SandboxFileWatchEvent, + SandboxFileWatchStatus, + SandboxFileWatchRoute, + SandboxFileWatchEventsParams, + SandboxFileWatchEventMessage, + SandboxFileWatchDoneEvent, + SandboxFileWatchStreamEvent, + SandboxPresignFileParams, + SandboxPresignedUrl, + SandboxTerminalCreateParams, + SandboxTerminalStatus, + SandboxTerminalWaitParams, + SandboxTerminalKillParams, + SandboxTerminalOutputEvent, + SandboxTerminalExitEvent, + SandboxTerminalEvent, +) from .team import TeamCreditInfo __all__ = [ @@ -403,6 +461,63 @@ "UploadFileResponse", "ImageCaptchaParam", "UpdateSessionProfileParams", + # sandbox + "SandboxStatus", + "SandboxRegion", + "SandboxRuntimeTarget", + "Sandbox", + "SandboxDetail", + "SandboxRuntimeSession", + "CreateSandboxParams", + "StartSandboxFromSnapshotParams", + "SandboxListParams", + "SandboxListResponse", + "SandboxProcessStatus", + "SandboxExecParams", + "SandboxProcessSummary", + "SandboxProcessResult", + "SandboxProcessListParams", + "SandboxProcessListResponse", + "SandboxProcessWaitParams", + "SandboxProcessStdinParams", + "SandboxProcessOutputEvent", + "SandboxProcessExitEvent", + "SandboxProcessStreamEvent", + "SandboxFileEntry", + "SandboxFileListParams", + "SandboxFileListResponse", + "SandboxFileReadParams", + "SandboxFileReadResult", + "SandboxFileWriteTextParams", + "SandboxFileWriteBytesParams", + "SandboxFileWriteResult", + "SandboxFileUploadParams", + "SandboxFileDeleteParams", + "SandboxFileMkdirParams", + "SandboxFileMoveParams", + "SandboxFileCopyParams", + "SandboxFileChmodParams", + "SandboxFileChownParams", + "SandboxFileMutationResult", + "SandboxFileTransferResult", + "SandboxFileMoveCopyResult", + "SandboxFileWatchParams", + "SandboxFileWatchEvent", + "SandboxFileWatchStatus", + "SandboxFileWatchRoute", + "SandboxFileWatchEventsParams", + "SandboxFileWatchEventMessage", + "SandboxFileWatchDoneEvent", + "SandboxFileWatchStreamEvent", + "SandboxPresignFileParams", + "SandboxPresignedUrl", + "SandboxTerminalCreateParams", + "SandboxTerminalStatus", + "SandboxTerminalWaitParams", + "SandboxTerminalKillParams", + "SandboxTerminalOutputEvent", + "SandboxTerminalExitEvent", + "SandboxTerminalEvent", # team "TeamCreditInfo", # computer action diff --git a/hyperbrowser/models/sandbox.py b/hyperbrowser/models/sandbox.py new file mode 100644 index 00000000..a9882e0c --- /dev/null +++ b/hyperbrowser/models/sandbox.py @@ -0,0 +1,453 @@ +from datetime import datetime +from typing import Dict, List, Literal, Optional, Union + +from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator + +from .consts import SessionRegion +from .session import BasicResponse, SessionLaunchState, SessionStatus + +SandboxStatus = SessionStatus +SandboxRegion = Literal[ + "us-central", + "asia-south", + "us-dev", + "europe-west", + "us-west", + "us-east", + "us", +] +SandboxProcessStatus = Literal[ + "queued", + "running", + "exited", + "failed", + "killed", + "timed_out", +] +SandboxFileWatchRoute = Literal["ws", "stream"] +SandboxFileEncoding = Literal["utf8", "base64"] + + +class SandboxBaseModel(BaseModel): + model_config = ConfigDict(populate_by_name=True) + + +def _parse_optional_int(value): + if value is None or isinstance(value, int): + return value + if isinstance(value, str) and value.strip() == "": + return None + if isinstance(value, str): + return int(value) + return value + + +def _parse_optional_datetime(value): + if value in (None, ""): + return None + return value + + +class SandboxRuntimeTarget(SandboxBaseModel): + transport: Literal["regional_proxy"] + host: str + base_url: str = Field(alias="baseUrl") + + +class Sandbox(SandboxBaseModel): + id: str + team_id: str = Field(alias="teamId") + status: SandboxStatus + end_time: Optional[int] = Field(default=None, alias="endTime") + start_time: Optional[int] = Field(default=None, alias="startTime") + created_at: datetime = Field(alias="createdAt") + updated_at: datetime = Field(alias="updatedAt") + close_reason: Optional[str] = Field(default=None, alias="closeReason") + data_consumed: Optional[int] = Field(default=None, alias="dataConsumed") + proxy_data_consumed: Optional[int] = Field( + default=None, alias="proxyDataConsumed" + ) + usage_type: Optional[str] = Field(default=None, alias="usageType") + job_id: Optional[str] = Field(default=None, alias="jobId") + launch_state: Optional[SessionLaunchState] = Field( + default=None, alias="launchState" + ) + credits_used: Optional[float] = Field(default=None, alias="creditsUsed") + region: SandboxRegion + session_url: str = Field(alias="sessionUrl") + duration: int + proxy_bytes_used: Optional[int] = Field(default=None, alias="proxyBytesUsed") + runtime: SandboxRuntimeTarget + + @field_validator( + "end_time", + "start_time", + "data_consumed", + "proxy_data_consumed", + "proxy_bytes_used", + mode="before", + ) + @classmethod + def parse_optional_int_fields(cls, value): + return _parse_optional_int(value) + + +class SandboxDetail(Sandbox): + token: Optional[str] = None + token_expires_at: Optional[datetime] = Field(default=None, alias="tokenExpiresAt") + + @field_validator("token_expires_at", mode="before") + @classmethod + def parse_token_expires_at(cls, value): + return _parse_optional_datetime(value) + + +class SandboxRuntimeSession(SandboxBaseModel): + sandbox_id: str = Field(alias="sandboxId") + status: SandboxStatus + region: SandboxRegion + token: str + token_expires_at: Optional[datetime] = Field(default=None, alias="tokenExpiresAt") + runtime: SandboxRuntimeTarget + + @field_validator("token_expires_at", mode="before") + @classmethod + def parse_token_expires_at(cls, value): + return _parse_optional_datetime(value) + + +class CreateSandboxParams(SandboxBaseModel): + sandbox_name: str = Field(alias="sandboxName") + region: Optional[SandboxRegion] = None + enable_recording: Optional[bool] = Field(default=None, alias="enableRecording") + timeout_minutes: Optional[int] = Field(default=None, alias="timeoutMinutes") + snapshot_id: Optional[str] = Field(default=None, alias="snapshotId") + snapshot_name: Optional[str] = Field(default=None, alias="snapshotName") + snapshot_namespace: Optional[str] = Field( + default=None, alias="snapshotNamespace" + ) + + @model_validator(mode="after") + def validate_snapshot_selector(self): + if bool(self.snapshot_id) == bool(self.snapshot_name): + raise ValueError("Exactly one of snapshot_id or snapshot_name is required") + return self + + +class StartSandboxFromSnapshotParams(CreateSandboxParams): + pass + + +class SandboxListParams(SandboxBaseModel): + status: Optional[SandboxStatus] = None + page: Optional[int] = None + limit: Optional[int] = None + search: Optional[str] = None + + +class SandboxListResponse(SandboxBaseModel): + sandboxes: List[Sandbox] + total_count: int = Field(alias="totalCount") + page: int + per_page: int = Field(alias="perPage") + + +class SandboxExecParams(SandboxBaseModel): + command: str + args: Optional[List[str]] = None + cwd: Optional[str] = None + env: Optional[Dict[str, str]] = None + timeout_ms: Optional[int] = Field(default=None, alias="timeoutMs") + timeout_sec: Optional[int] = Field(default=None, alias="timeoutSec") + use_shell: Optional[bool] = Field(default=None, alias="useShell") + + +class SandboxProcessSummary(SandboxBaseModel): + id: str + status: SandboxProcessStatus + command: str + args: Optional[List[str]] = None + cwd: str + pid: Optional[int] = None + exit_code: Optional[int] = Field(default=None, alias="exit_code") + started_at: int = Field(alias="started_at") + completed_at: Optional[int] = Field(default=None, alias="completed_at") + + +class SandboxProcessResult(SandboxBaseModel): + id: str + status: SandboxProcessStatus + exit_code: Optional[int] = Field(default=None, alias="exit_code") + stdout: str + stderr: str + started_at: int = Field(alias="started_at") + completed_at: Optional[int] = Field(default=None, alias="completed_at") + error: Optional[str] = None + + +class SandboxProcessListParams(SandboxBaseModel): + status: Optional[Union[SandboxProcessStatus, List[SandboxProcessStatus]]] = None + limit: Optional[int] = None + cursor: Optional[Union[str, int]] = None + created_after: Optional[int] = Field(default=None, alias="created_after") + created_before: Optional[int] = Field(default=None, alias="created_before") + + +class SandboxProcessListResponse(SandboxBaseModel): + data: List[SandboxProcessSummary] + next_cursor: Optional[str] = Field(default=None, alias="next_cursor") + + +class SandboxProcessWaitParams(SandboxBaseModel): + timeout_ms: Optional[int] = Field(default=None, alias="timeoutMs") + timeout_sec: Optional[int] = Field(default=None, alias="timeoutSec") + + +class SandboxProcessStdinParams(SandboxBaseModel): + data: Optional[Union[str, bytes]] = None + encoding: Optional[SandboxFileEncoding] = None + eof: Optional[bool] = None + + +class SandboxProcessOutputEvent(SandboxBaseModel): + type: Literal["stdout", "stderr", "system"] + seq: int + data: str + timestamp: int + + +class SandboxProcessExitEvent(SandboxBaseModel): + type: Literal["exit"] + result: SandboxProcessResult + + +SandboxProcessStreamEvent = Union[SandboxProcessOutputEvent, SandboxProcessExitEvent] + + +class SandboxFileEntry(SandboxBaseModel): + path: str + name: str + type: str + size: int + mode: str + mod_time: int = Field(alias="modTime") + + +class SandboxFileListParams(SandboxBaseModel): + path: str + recursive: Optional[bool] = None + limit: Optional[int] = None + cursor: Optional[int] = None + + +class SandboxFileListResponse(SandboxBaseModel): + path: str + entries: List[SandboxFileEntry] + limit: int + cursor: int + recursive: bool + next_cursor: Optional[int] = Field(default=None, alias="nextCursor") + + +class SandboxFileReadParams(SandboxBaseModel): + path: str + offset: Optional[int] = None + length: Optional[int] = None + encoding: Optional[SandboxFileEncoding] = None + + +class SandboxFileReadResult(SandboxBaseModel): + content: str + encoding: SandboxFileEncoding + bytes_read: int = Field(alias="bytesRead") + truncated: bool + content_type: Optional[str] = Field(default=None, alias="contentType") + + +class SandboxFileWriteTextParams(SandboxBaseModel): + path: str + data: str + append: Optional[bool] = None + mode: Optional[str] = None + + +class SandboxFileWriteBytesParams(SandboxBaseModel): + path: str + data: bytes + append: Optional[bool] = None + mode: Optional[str] = None + + +class SandboxFileWriteResult(SandboxBaseModel): + bytes_written: int = Field(alias="bytesWritten") + path: str + + +class SandboxFileUploadParams(SandboxBaseModel): + path: str + data: Union[bytes, str] + + +class SandboxFileDeleteParams(SandboxBaseModel): + path: str + recursive: Optional[bool] = None + + +class SandboxFileMkdirParams(SandboxBaseModel): + path: str + parents: Optional[bool] = None + mode: Optional[str] = None + + +class SandboxFileMoveParams(SandboxBaseModel): + source: str + destination: str + overwrite: Optional[bool] = None + + +class SandboxFileCopyParams(SandboxBaseModel): + source: str + destination: str + recursive: Optional[bool] = None + overwrite: Optional[bool] = None + + +class SandboxFileChmodParams(SandboxBaseModel): + path: str + mode: str + recursive: Optional[bool] = None + + +class SandboxFileChownParams(SandboxBaseModel): + path: str + uid: Optional[int] = None + gid: Optional[int] = None + recursive: Optional[bool] = None + + +class SandboxFileMutationResult(SandboxBaseModel): + path: str + + +class SandboxFileTransferResult(SandboxBaseModel): + path: str + bytes_written: int = Field(alias="bytesWritten") + + +class SandboxFileMoveCopyResult(SandboxBaseModel): + from_path: str = Field(alias="from") + to: str + + +class SandboxFileWatchParams(SandboxBaseModel): + path: str + recursive: Optional[bool] = None + + +class SandboxFileWatchEvent(SandboxBaseModel): + seq: int + path: str + op: str + timestamp: int + + +class SandboxFileWatchStatus(SandboxBaseModel): + id: str + path: str + recursive: bool + active: bool + error: Optional[str] = None + created_at: int = Field(alias="createdAt") + stopped_at: Optional[int] = Field(default=None, alias="stoppedAt") + oldest_seq: int = Field(default=0, alias="oldestSeq") + last_seq: int = Field(default=0, alias="lastSeq") + event_count: int = Field(default=0, alias="eventCount") + events: Optional[List[SandboxFileWatchEvent]] = None + + +class SandboxFileWatchEventsParams(SandboxBaseModel): + cursor: Optional[int] = None + route: Optional[SandboxFileWatchRoute] = None + + +class SandboxFileWatchEventMessage(SandboxBaseModel): + type: Literal["event"] + event: SandboxFileWatchEvent + + +class SandboxFileWatchDoneEvent(SandboxBaseModel): + type: Literal["done"] + status: SandboxFileWatchStatus + + +SandboxFileWatchStreamEvent = Union[ + SandboxFileWatchEventMessage, + SandboxFileWatchDoneEvent, +] + + +class SandboxPresignFileParams(SandboxBaseModel): + path: str + expires_in_seconds: Optional[int] = Field(default=None, alias="expiresInSeconds") + one_time: Optional[bool] = Field(default=None, alias="oneTime") + + +class SandboxPresignedUrl(SandboxBaseModel): + token: str + path: str + method: str + expires_at: int = Field(alias="expiresAt") + url: str + + +class SandboxTerminalCreateParams(SandboxBaseModel): + command: str + args: Optional[List[str]] = None + cwd: Optional[str] = None + env: Optional[Dict[str, str]] = None + use_shell: Optional[bool] = Field(default=None, alias="useShell") + rows: Optional[int] = None + cols: Optional[int] = None + timeout_ms: Optional[int] = Field(default=None, alias="timeoutMs") + + +class SandboxTerminalStatus(SandboxBaseModel): + id: str + command: str + args: Optional[List[str]] = None + cwd: str + pid: Optional[int] = None + running: bool + exit_code: Optional[int] = Field(default=None, alias="exitCode") + error: Optional[str] = None + timed_out: Optional[bool] = Field(default=None, alias="timedOut") + rows: int + cols: int + started_at: int = Field(alias="startedAt") + finished_at: Optional[int] = Field(default=None, alias="finishedAt") + + +class SandboxTerminalWaitParams(SandboxBaseModel): + timeout_ms: Optional[int] = Field(default=None, alias="timeoutMs") + include_output: Optional[bool] = Field(default=None, alias="includeOutput") + + +class SandboxTerminalKillParams(SandboxBaseModel): + signal: Optional[str] = None + timeout_ms: Optional[int] = Field(default=None, alias="timeoutMs") + + +class SandboxTerminalOutputEvent(SandboxBaseModel): + type: Literal["output"] + seq: int + data: str + raw: bytes + timestamp: int + + +class SandboxTerminalExitEvent(SandboxBaseModel): + type: Literal["exit"] + status: SandboxTerminalStatus + + +SandboxTerminalEvent = Union[SandboxTerminalOutputEvent, SandboxTerminalExitEvent] diff --git a/hyperbrowser/sandbox_common.py b/hyperbrowser/sandbox_common.py new file mode 100644 index 00000000..f60ca1f3 --- /dev/null +++ b/hyperbrowser/sandbox_common.py @@ -0,0 +1,218 @@ +import json +import os +from dataclasses import dataclass +from typing import Any, Dict, Optional, Tuple +from urllib.parse import urljoin, urlsplit, urlunsplit + +import httpx + +from .exceptions import HyperbrowserError, HyperbrowserService + +RETRYABLE_STATUS_CODES = {429, 502, 503, 504} +RUNTIME_SESSION_REFRESH_BUFFER_MS = 60_000 +REGIONAL_PROXY_DEV_HOST = os.environ.get("REGIONAL_PROXY_DEV_HOST", "").strip() + + +@dataclass(frozen=True) +class RuntimeConnection: + sandbox_id: str + base_url: str + token: str + + +@dataclass(frozen=True) +class RuntimeTransportTarget: + url: str + host_header: Optional[str] = None + connect_host: Optional[str] = None + connect_port: Optional[int] = None + + +def get_request_id(response: httpx.Response) -> Optional[str]: + return response.headers.get("x-request-id") or response.headers.get("request-id") + + +def is_retryable_network_error(error: BaseException) -> bool: + return isinstance( + error, + ( + httpx.TimeoutException, + httpx.NetworkError, + httpx.RemoteProtocolError, + httpx.ProxyError, + httpx.ReadError, + httpx.WriteError, + httpx.PoolTimeout, + ), + ) + + +def parse_error_payload(raw_text: str, fallback_message: str) -> Tuple[str, Optional[str], Any]: + if not raw_text: + return fallback_message, None, None + + try: + parsed = json.loads(raw_text) + except json.JSONDecodeError: + return raw_text, None, raw_text + + if isinstance(parsed, dict): + message = ( + parsed.get("message") + or parsed.get("error") + or fallback_message + ) + code = parsed.get("code") if isinstance(parsed.get("code"), str) else None + return message, code, parsed + + return fallback_message, None, parsed + + +def ensure_response_ok( + response: httpx.Response, + service: HyperbrowserService, + default_message: Optional[str] = None, +) -> httpx.Response: + if response.is_success: + return response + + fallback = default_message or ( + f"Request failed: {response.status_code} {response.reason_phrase}" + ) + message, code, details = parse_error_payload(response.text, fallback) + raise HyperbrowserError( + message, + status_code=response.status_code, + response=response, + code=code, + request_id=get_request_id(response), + retryable=response.status_code in RETRYABLE_STATUS_CODES, + service=service, + details=details, + ) + + +def parse_json_response( + response: httpx.Response, + service: HyperbrowserService, + default_message: str = "Failed to parse JSON response", +) -> Any: + if not response.content: + return {} + + try: + return response.json() + except json.JSONDecodeError as error: + raise HyperbrowserError( + default_message, + status_code=response.status_code, + response=response, + request_id=get_request_id(response), + retryable=False, + service=service, + cause=error, + ) + + +def has_scheme(value: str) -> bool: + return "://" in value + + +def resolve_runtime_transport_target( + base_url: str, path: str +) -> RuntimeTransportTarget: + normalized_base = base_url if base_url.endswith("/") else f"{base_url}/" + url = urljoin(normalized_base, path.lstrip("/")) + + if not REGIONAL_PROXY_DEV_HOST: + return RuntimeTransportTarget(url=url) + + override_raw = ( + REGIONAL_PROXY_DEV_HOST + if has_scheme(REGIONAL_PROXY_DEV_HOST) + else f"{urlsplit(url).scheme}://{REGIONAL_PROXY_DEV_HOST}" + ) + original = urlsplit(url) + override = urlsplit(override_raw) + rewritten = urlunsplit( + ( + override.scheme or original.scheme, + override.netloc or original.netloc, + original.path, + original.query, + original.fragment, + ) + ) + runtime_host = urlsplit(base_url).netloc + return RuntimeTransportTarget(url=rewritten, host_header=runtime_host) + + +def to_websocket_transport_target( + base_url: str, path: str +) -> RuntimeTransportTarget: + normalized_base = base_url if base_url.endswith("/") else f"{base_url}/" + url = urljoin(normalized_base, path.lstrip("/")) + parts = urlsplit(url) + scheme = parts.scheme + if scheme == "https": + scheme = "wss" + elif scheme == "http": + scheme = "ws" + websocket_url = urlunsplit( + (scheme, parts.netloc, parts.path, parts.query, parts.fragment) + ) + + if not REGIONAL_PROXY_DEV_HOST: + return RuntimeTransportTarget(url=websocket_url) + + override = urlsplit( + REGIONAL_PROXY_DEV_HOST + if has_scheme(REGIONAL_PROXY_DEV_HOST) + else f"{parts.scheme}://{REGIONAL_PROXY_DEV_HOST}" + ) + connect_port = override.port + if connect_port is None: + if override.scheme in {"https", "wss"}: + connect_port = 443 + elif override.scheme in {"http", "ws"}: + connect_port = 80 + + return RuntimeTransportTarget( + url=websocket_url, + connect_host=override.hostname, + connect_port=connect_port, + ) + + +def normalize_network_error( + error: BaseException, + service: HyperbrowserService, + default_message: str, +) -> HyperbrowserError: + if isinstance(error, HyperbrowserError): + return error + + return HyperbrowserError( + str(error) if str(error) else default_message, + retryable=is_retryable_network_error(error), + service=service, + cause=error, + original_error=error if isinstance(error, Exception) else None, + ) + + +def build_headers( + token: str, + extra_headers: Optional[Dict[str, str]] = None, + host_header: Optional[str] = None, +) -> Dict[str, str]: + headers: Dict[str, str] = { + "Authorization": f"Bearer {token}", + } + if extra_headers: + for key, value in extra_headers.items(): + if value is not None: + headers[key] = str(value) + if host_header and "Host" not in headers and "host" not in headers: + headers["Host"] = host_header + return headers diff --git a/poetry.lock b/poetry.lock index 3718645e..f66abd98 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -6,6 +6,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -20,6 +21,7 @@ version = "4.5.2" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"}, {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"}, @@ -33,7 +35,7 @@ typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21.0b1) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -42,17 +44,33 @@ version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + [[package]] name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -67,6 +85,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -78,6 +97,7 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -99,6 +119,7 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -111,7 +132,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -123,6 +144,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -131,23 +153,65 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + [[package]] name = "jsonref" version = "1.1.0" description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jsonref-1.1.0-py3-none-any.whl", hash = "sha256:590dc7773df6c21cbf948b5dac07a72a251db28b0238ceecce0a2abfa8ec30a9"}, {file = "jsonref-1.1.0.tar.gz", hash = "sha256:32fe8e1d85af0fdefbebce950af85590b22b60f9e95443176adbde4e1ecea552"}, ] +[[package]] +name = "packaging" +version = "26.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529"}, + {file = "packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "pydantic" version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -160,7 +224,7 @@ typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -168,6 +232,7 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -274,12 +339,36 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pytest" +version = "8.3.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + [[package]] name = "ruff" version = "0.3.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, @@ -306,23 +395,179 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "tomli" +version = "2.4.0" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867"}, + {file = "tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9"}, + {file = "tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95"}, + {file = "tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76"}, + {file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d"}, + {file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576"}, + {file = "tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a"}, + {file = "tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa"}, + {file = "tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614"}, + {file = "tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1"}, + {file = "tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8"}, + {file = "tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a"}, + {file = "tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1"}, + {file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b"}, + {file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51"}, + {file = "tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729"}, + {file = "tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da"}, + {file = "tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3"}, + {file = "tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0"}, + {file = "tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e"}, + {file = "tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4"}, + {file = "tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e"}, + {file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c"}, + {file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f"}, + {file = "tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86"}, + {file = "tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87"}, + {file = "tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132"}, + {file = "tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6"}, + {file = "tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc"}, + {file = "tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66"}, + {file = "tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d"}, + {file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702"}, + {file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8"}, + {file = "tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776"}, + {file = "tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475"}, + {file = "tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2"}, + {file = "tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9"}, + {file = "tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0"}, + {file = "tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df"}, + {file = "tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d"}, + {file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f"}, + {file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b"}, + {file = "tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087"}, + {file = "tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd"}, + {file = "tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4"}, + {file = "tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a"}, + {file = "tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c"}, +] + [[package]] name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "websockets" +version = "13.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, + {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, + {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, + {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, + {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, + {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, + {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, + {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, + {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, + {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, + {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, + {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, + {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, + {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"}, + {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"}, + {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"}, + {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, + {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, + {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, + {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, + {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"}, + {file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, + {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, + {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, + {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, +] + [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.8" -content-hash = "7b62e1f0f4a0585712cb1026052e7baaa2c123a0bf48fd94a4e97ca405148ea2" +content-hash = "811874a8f5ef40c48d5591893d71256c97ace59a0ef0d84bc7c7dde6782efb95" diff --git a/pyproject.toml b/pyproject.toml index aa2cd7a9..26bfe62f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,10 +14,12 @@ python = "^3.8" pydantic = ">=2.0,<3" httpx = ">=0.23.0,<1" jsonref = ">=1.1.0" +websockets = ">=13,<16" [tool.poetry.group.dev.dependencies] ruff = "^0.3.0" +pytest = "^8.3.0" [build-system] diff --git a/tests/.env.example b/tests/.env.example new file mode 100644 index 00000000..70db0662 --- /dev/null +++ b/tests/.env.example @@ -0,0 +1,3 @@ +HYPERBROWSER_API_KEY= +HYPERBROWSER_BASE_URL=http://localhost:8080 +REGIONAL_PROXY_DEV_HOST=127.0.0.1:8090 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..af7e4799 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,6 @@ +import pytest + + +@pytest.fixture +def anyio_backend(): + return "asyncio" diff --git a/tests/helpers/__init__.py b/tests/helpers/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/tests/helpers/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/helpers/config.py b/tests/helpers/config.py new file mode 100644 index 00000000..ef7748f6 --- /dev/null +++ b/tests/helpers/config.py @@ -0,0 +1,43 @@ +import os +from pathlib import Path + +from hyperbrowser import Hyperbrowser + +TESTS_DIR = Path(__file__).resolve().parent.parent +ENV_PATH = TESTS_DIR / ".env" + + +def _load_env() -> None: + if not ENV_PATH.exists(): + return + + for raw_line in ENV_PATH.read_text().splitlines(): + line = raw_line.strip() + if not line or line.startswith("#") or "=" not in line: + continue + key, value = line.split("=", 1) + os.environ.setdefault(key.strip(), value.strip()) + + +_load_env() + +API_KEY = os.environ.get("HYPERBROWSER_API_KEY", "") +BASE_URL = os.environ.get("HYPERBROWSER_BASE_URL", "http://localhost:8080") +REGIONAL_PROXY_DEV_HOST = os.environ.get("REGIONAL_PROXY_DEV_HOST", "") +DEFAULT_SNAPSHOT_NAME = "receiverStarted-ubuntu-24-node" + + +def create_client() -> Hyperbrowser: + if not API_KEY: + raise RuntimeError( + "Set HYPERBROWSER_API_KEY in tests/.env before running sandbox e2e tests" + ) + + return Hyperbrowser(api_key=API_KEY, base_url=BASE_URL) + + +def make_test_name(prefix: str) -> str: + import random + import time + + return f"{prefix}-{int(time.time() * 1000)}-{random.randrange(16**6):06x}" diff --git a/tests/helpers/errors.py b/tests/helpers/errors.py new file mode 100644 index 00000000..1ebd7d58 --- /dev/null +++ b/tests/helpers/errors.py @@ -0,0 +1,117 @@ +from typing import Callable, Iterable, Optional + +from hyperbrowser.exceptions import HyperbrowserError + + +def _normalize_messages( + value: Optional[Iterable[str]], single: Optional[str] +): + if single is not None: + return [single] + if value is None: + return [] + return list(value) + + +def expect_hyperbrowser_error( + label: str, + action: Callable[[], object], + *, + status_code: Optional[int] = None, + code: Optional[str] = None, + service: Optional[str] = None, + retryable: Optional[bool] = None, + message_includes: Optional[str] = None, + message_includes_many: Optional[Iterable[str]] = None, + message_includes_any: Optional[Iterable[str]] = None, +): + try: + action() + except HyperbrowserError as error: + assert "Unknown error occurred" not in str(error), ( + f"{label}: unexpected generic error message {error!r}" + ) + + if status_code is not None: + assert error.status_code == status_code, ( + f"{label}: expected status_code={status_code}, " + f"got {error.status_code}" + ) + if code is not None: + assert error.code == code, f"{label}: expected code={code}, got {error.code}" + if service is not None: + assert error.service == service, ( + f"{label}: expected service={service}, got {error.service}" + ) + if retryable is not None: + assert error.retryable == retryable, ( + f"{label}: expected retryable={retryable}, got {error.retryable}" + ) + + for text in _normalize_messages(message_includes_many, message_includes): + assert text in str(error), ( + f"{label}: expected error message to include {text!r}, " + f"got {str(error)!r}" + ) + + if message_includes_any: + assert any(text in str(error) for text in message_includes_any), ( + f"{label}: expected error message to include one of " + f"{list(message_includes_any)!r}, got {str(error)!r}" + ) + + return error + + raise AssertionError(f"{label}: expected HyperbrowserError, but call succeeded") + + +async def expect_hyperbrowser_error_async( + label: str, + action, + *, + status_code: Optional[int] = None, + code: Optional[str] = None, + service: Optional[str] = None, + retryable: Optional[bool] = None, + message_includes: Optional[str] = None, + message_includes_many: Optional[Iterable[str]] = None, + message_includes_any: Optional[Iterable[str]] = None, +): + try: + await action() + except HyperbrowserError as error: + assert "Unknown error occurred" not in str(error), ( + f"{label}: unexpected generic error message {error!r}" + ) + + if status_code is not None: + assert error.status_code == status_code, ( + f"{label}: expected status_code={status_code}, " + f"got {error.status_code}" + ) + if code is not None: + assert error.code == code, f"{label}: expected code={code}, got {error.code}" + if service is not None: + assert error.service == service, ( + f"{label}: expected service={service}, got {error.service}" + ) + if retryable is not None: + assert error.retryable == retryable, ( + f"{label}: expected retryable={retryable}, got {error.retryable}" + ) + + for text in _normalize_messages(message_includes_many, message_includes): + assert text in str(error), ( + f"{label}: expected error message to include {text!r}, " + f"got {str(error)!r}" + ) + + if message_includes_any: + assert any(text in str(error) for text in message_includes_any), ( + f"{label}: expected error message to include one of " + f"{list(message_includes_any)!r}, got {str(error)!r}" + ) + + return error + + raise AssertionError(f"{label}: expected HyperbrowserError, but call succeeded") diff --git a/tests/helpers/http.py b/tests/helpers/http.py new file mode 100644 index 00000000..6c202667 --- /dev/null +++ b/tests/helpers/http.py @@ -0,0 +1,45 @@ +from urllib.parse import urlsplit, urlunsplit + +import httpx + +from tests.helpers.config import REGIONAL_PROXY_DEV_HOST + + +def _has_scheme(value: str) -> bool: + return "://" in value + + +def _resolve_signed_url_target(input_url: str): + original = urlsplit(input_url) + if not REGIONAL_PROXY_DEV_HOST: + return input_url, None + + override = urlsplit( + REGIONAL_PROXY_DEV_HOST + if _has_scheme(REGIONAL_PROXY_DEV_HOST) + else f"{original.scheme}://{REGIONAL_PROXY_DEV_HOST}" + ) + rewritten = urlunsplit( + ( + override.scheme or original.scheme, + override.netloc or original.netloc, + original.path, + original.query, + original.fragment, + ) + ) + return rewritten, original.netloc + + +def fetch_signed_url( + input_url: str, + *, + method: str = "GET", + body=None, + headers=None, +) -> httpx.Response: + url, host_header = _resolve_signed_url_target(input_url) + request_headers = dict(headers or {}) + if host_header and "Host" not in request_headers and "host" not in request_headers: + request_headers["Host"] = host_header + return httpx.request(method, url, headers=request_headers, content=body, timeout=30) diff --git a/tests/helpers/sandbox.py b/tests/helpers/sandbox.py new file mode 100644 index 00000000..0d07b00c --- /dev/null +++ b/tests/helpers/sandbox.py @@ -0,0 +1,99 @@ +import time + +from hyperbrowser.exceptions import HyperbrowserError +from hyperbrowser.models import CreateSandboxParams + +from tests.helpers.config import DEFAULT_SNAPSHOT_NAME, make_test_name + + +def default_sandbox_params(prefix: str) -> CreateSandboxParams: + return CreateSandboxParams( + sandbox_name=make_test_name(prefix), + snapshot_name=DEFAULT_SNAPSHOT_NAME, + ) + + +def stop_sandbox_if_running(sandbox) -> None: + if sandbox is None: + return + + try: + sandbox.stop() + except HyperbrowserError as error: + if error.status_code in {404, 409}: + return + raise + + +def wait_for_runtime_ready( + sandbox, + *, + attempts: int = 5, + delay_seconds: float = 0.25, +) -> None: + last_error = None + + for attempt in range(1, attempts + 1): + try: + result = sandbox.exec("true") + if result.exit_code == 0: + return + last_error = RuntimeError( + f"runtime readiness probe exited with code {result.exit_code}" + ) + except HyperbrowserError as error: + if error.service == "runtime" and error.retryable: + last_error = error + else: + raise + + if attempt < attempts: + time.sleep(delay_seconds * attempt) + + if isinstance(last_error, Exception): + raise last_error + raise RuntimeError("sandbox runtime did not become ready") + + +async def stop_sandbox_if_running_async(sandbox) -> None: + if sandbox is None: + return + + try: + await sandbox.stop() + except HyperbrowserError as error: + if error.status_code in {404, 409}: + return + raise + + +async def wait_for_runtime_ready_async( + sandbox, + *, + attempts: int = 5, + delay_seconds: float = 0.25, +) -> None: + import asyncio + + last_error = None + + for attempt in range(1, attempts + 1): + try: + result = await sandbox.exec("true") + if result.exit_code == 0: + return + last_error = RuntimeError( + f"runtime readiness probe exited with code {result.exit_code}" + ) + except HyperbrowserError as error: + if error.service == "runtime" and error.retryable: + last_error = error + else: + raise + + if attempt < attempts: + await asyncio.sleep(delay_seconds * attempt) + + if isinstance(last_error, Exception): + raise last_error + raise RuntimeError("sandbox runtime did not become ready") diff --git a/tests/sandbox/e2e/test_async_files.py b/tests/sandbox/e2e/test_async_files.py new file mode 100644 index 00000000..1edb5c7c --- /dev/null +++ b/tests/sandbox/e2e/test_async_files.py @@ -0,0 +1,237 @@ +import asyncio + +import pytest + +from hyperbrowser import AsyncHyperbrowser + +from tests.helpers.config import make_test_name +from tests.helpers.errors import expect_hyperbrowser_error_async +from tests.helpers.http import fetch_signed_url +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + + +async def _next_watch_event(watch, *, route="ws", cursor=None): + async for event in watch.events(route=route, cursor=cursor): + if event.type == "event": + return event.event + raise RuntimeError("watch stream ended before an event was received") + + +async def _wait_for_watch_buffer_rollover(watch, *, attempts=20, delay_seconds=0.1): + for _ in range(attempts): + refreshed = await watch.refresh() + if refreshed.current.oldest_seq > 1: + return refreshed + await asyncio.sleep(delay_seconds) + raise RuntimeError("watch buffer did not roll over before timeout") + + +@pytest.mark.anyio +async def test_async_sandbox_files_e2e(): + client = AsyncHyperbrowser() + sandbox = None + base_dir = f"/tmp/{make_test_name('py-async-files')}" + + try: + sandbox = await client.sandboxes.create(default_sandbox_params("py-async-files")) + await wait_for_runtime_ready_async(sandbox) + + assert await sandbox.files.exists(f"{base_dir}/missing.txt") is False + + result = await sandbox.files.mkdir(base_dir, parents=True) + assert result.path == base_dir + + await sandbox.files.write_text(f"{base_dir}/hello.txt", "hello from sdk files") + content = await sandbox.files.read_text(f"{base_dir}/hello.txt") + assert content == "hello from sdk files" + + chunk = await sandbox.files.read_text( + f"{base_dir}/hello.txt", offset=6, length=4 + ) + assert chunk == "from" + + result = await sandbox.files.read( + f"{base_dir}/hello.txt", + offset=0, + length=5, + encoding="utf8", + ) + assert result.content == "hello" + assert result.encoding == "utf8" + assert result.bytes_read == 5 + assert result.truncated is True + + source = bytes([0, 1, 2, 3, 4]) + await sandbox.files.write_bytes(f"{base_dir}/bytes.bin", source) + content = await sandbox.files.read_bytes(f"{base_dir}/bytes.bin") + assert content == source + + stat = await sandbox.files.stat(f"{base_dir}/hello.txt") + assert stat.name == "hello.txt" + + listing = await sandbox.files.list(base_dir) + assert any(entry.name == "hello.txt" for entry in listing.entries) + + uploaded = await sandbox.files.upload(f"{base_dir}/upload.txt", "uploaded from sdk") + assert uploaded.bytes_written > 0 + + downloaded = await sandbox.files.download(f"{base_dir}/upload.txt") + assert downloaded.decode("utf-8") == "uploaded from sdk" + + moved = await sandbox.files.move( + source=f"{base_dir}/hello.txt", + destination=f"{base_dir}/hello-moved.txt", + ) + assert moved.to == f"{base_dir}/hello-moved.txt" + + copied = await sandbox.files.copy( + source=f"{base_dir}/hello-moved.txt", + destination=f"{base_dir}/hello-copy.txt", + ) + assert copied.to == f"{base_dir}/hello-copy.txt" + + await sandbox.files.chmod(path=f"{base_dir}/hello-copy.txt", mode="0640") + stat = await sandbox.files.stat(f"{base_dir}/hello-copy.txt") + assert "640" in stat.mode + + try: + await expect_hyperbrowser_error_async( + "file chown", + lambda: sandbox.files.chown( + path=f"{base_dir}/hello-copy.txt", + uid=0, + gid=0, + ), + status_code=400, + service="runtime", + retryable=False, + message_includes_any=["operation", "permission"], + ) + except AssertionError as error: + if "expected HyperbrowserError, but call succeeded" not in str(error): + raise + stat = await sandbox.files.stat(f"{base_dir}/hello-copy.txt") + assert stat.name == "hello-copy.txt" + + watch = await sandbox.files.watch(base_dir, recursive=False) + try: + await sandbox.files.write_text(f"{base_dir}/watch.txt", "watch me") + event = await _next_watch_event(watch, route="stream") + assert "watch.txt" in event.path + + fetched = await sandbox.files.get_watch(watch.id, True) + assert fetched.id == watch.id + assert fetched.current.path == base_dir + finally: + await watch.stop() + + watch = await sandbox.files.watch(base_dir, recursive=False) + try: + await sandbox.files.write_text(f"{base_dir}/watch-refresh-1.txt", "one") + refreshed = await watch.refresh(True) + assert refreshed.current.last_seq > 0 + assert refreshed.current.oldest_seq > 0 + assert any( + "watch-refresh-1.txt" in event.path + for event in (refreshed.current.events or []) + ) + + await sandbox.files.write_text(f"{base_dir}/watch-refresh-2.txt", "two") + event = await _next_watch_event( + watch, + route="ws", + cursor=refreshed.current.last_seq, + ) + assert "watch-refresh-2.txt" in event.path + assert watch.current.last_seq >= event.seq + finally: + await watch.stop() + + watch = await sandbox.files.watch(base_dir, recursive=False) + try: + burst = await sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'for i in $(seq 1 1200); do echo x > "{base_dir}/overflow-$i.txt"; rm -f "{base_dir}/overflow-$i.txt"; done', + ], + } + ) + assert burst.exit_code == 0 + + rolled = await _wait_for_watch_buffer_rollover(watch) + assert rolled.current.oldest_seq > 1 + + await expect_hyperbrowser_error_async( + "watch replay window expired", + lambda: anext(watch.events(route="ws", cursor=0)), + status_code=410, + code="replay_window_expired", + service="runtime", + retryable=False, + message_includes="Replay window expired", + ) + finally: + await watch.stop() + + upload = await sandbox.files.upload_url( + f"{base_dir}/presign-upload.txt", + one_time=True, + ) + assert upload.path == f"{base_dir}/presign-upload.txt" + assert upload.url + assert upload.method == "PUT" + + upload_response = fetch_signed_url( + upload.url, + method=upload.method, + body="presigned upload body", + ) + assert upload_response.status_code == 200 + + uploaded_body = await sandbox.files.read_text(f"{base_dir}/presign-upload.txt") + assert uploaded_body == "presigned upload body" + + download = await sandbox.files.download_url( + f"{base_dir}/presign-upload.txt", + one_time=True, + ) + assert download.path == f"{base_dir}/presign-upload.txt" + assert download.method == "GET" + + download_response = fetch_signed_url(download.url, method=download.method) + assert download_response.status_code == 200 + assert download_response.text == "presigned upload body" + + deleted_file = await sandbox.files.delete(f"{base_dir}/hello-copy.txt") + assert deleted_file.path == f"{base_dir}/hello-copy.txt" + + deleted_dir = await sandbox.files.delete(base_dir, recursive=True) + assert deleted_dir.path == base_dir + assert await sandbox.files.exists(base_dir) is False + + await expect_hyperbrowser_error_async( + "missing file read", + lambda: sandbox.files.read_text(f"{base_dir}/still-missing.txt"), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], + ) + + await expect_hyperbrowser_error_async( + "missing file delete", + lambda: sandbox.files.delete(f"{base_dir}/still-missing.txt"), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], + ) + finally: + await stop_sandbox_if_running_async(sandbox) + await client.close() diff --git a/tests/sandbox/e2e/test_async_lifecycle.py b/tests/sandbox/e2e/test_async_lifecycle.py new file mode 100644 index 00000000..76dd7f1d --- /dev/null +++ b/tests/sandbox/e2e/test_async_lifecycle.py @@ -0,0 +1,144 @@ +from datetime import datetime, timedelta, timezone +from uuid import uuid4 + +import pytest + +from hyperbrowser import AsyncHyperbrowser +from hyperbrowser.models import SandboxListParams, SandboxRuntimeSession + +from tests.helpers.errors import expect_hyperbrowser_error_async +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + + +@pytest.mark.anyio +async def test_async_sandbox_lifecycle_e2e(): + client = AsyncHyperbrowser() + sandbox = None + stale_handle = None + secondary = None + + try: + sandbox = await client.sandboxes.create(default_sandbox_params("py-async-lifecycle")) + stale_handle = await client.sandboxes.get(sandbox.id) + await wait_for_runtime_ready_async(sandbox) + + assert sandbox.to_dict()["token"] + assert sandbox.runtime.base_url + assert sandbox.token_expires_at is not None + + session = await sandbox.create_runtime_session() + assert session.token + assert session.sandbox_id == sandbox.id + assert session.runtime.base_url == sandbox.runtime.base_url + + info = await sandbox.info() + assert info.id == sandbox.id + await sandbox.refresh() + assert sandbox.status == "active" + + await sandbox.connect() + assert sandbox.status == "active" + + original_create_runtime_session = sandbox.create_runtime_session + valid_session = await original_create_runtime_session(force_refresh=True) + invalid_jwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.invalid-signature" + refresh_count = 0 + + async def patched_create_runtime_session(force_refresh: bool = False): + nonlocal refresh_count + if force_refresh: + refresh_count += 1 + return await original_create_runtime_session(force_refresh=True) + + return SandboxRuntimeSession( + sandbox_id=valid_session.sandbox_id, + status=valid_session.status, + region=valid_session.region, + token=invalid_jwt, + token_expires_at=datetime.now(timezone.utc) + timedelta(hours=1), + runtime=valid_session.runtime, + ) + + sandbox.create_runtime_session = patched_create_runtime_session + try: + result = await sandbox.exec("echo runtime-refresh-ok") + assert result.exit_code == 0 + assert "runtime-refresh-ok" in result.stdout + assert refresh_count > 0 + assert sandbox.to_dict()["token"] + assert sandbox.to_dict()["token"] != invalid_jwt + finally: + sandbox.create_runtime_session = original_create_runtime_session + + listing = await client.sandboxes.list( + SandboxListParams(search=sandbox.id, limit=20) + ) + assert any(entry.id == sandbox.id for entry in listing.sandboxes) + + response = await sandbox.stop() + assert response.success is True + assert sandbox.status == "closed" + + await expect_hyperbrowser_error_async( + "stopped sandbox connect", + lambda: sandbox.connect(), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + await expect_hyperbrowser_error_async( + "stopped sandbox exec", + lambda: sandbox.exec("echo should-not-run"), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + await expect_hyperbrowser_error_async( + "stale sandbox connect", + lambda: stale_handle.connect(), + status_code=409, + service="control", + retryable=False, + message_includes="Sandbox is not running", + ) + + await expect_hyperbrowser_error_async( + "stopped sandbox reconnect", + lambda: client.sandboxes.connect(sandbox.id), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + await expect_hyperbrowser_error_async( + "missing sandbox get", + lambda: client.sandboxes.get(str(uuid4())), + status_code=404, + service="control", + retryable=False, + message_includes="not found", + ) + + secondary = await client.sandboxes.start_from_snapshot( + default_sandbox_params("py-async-secondary") + ) + response = await secondary.stop() + assert response.success is True + assert secondary.status == "closed" + finally: + await stop_sandbox_if_running_async(sandbox) + await stop_sandbox_if_running_async(stale_handle) + await stop_sandbox_if_running_async(secondary) + await client.close() diff --git a/tests/sandbox/e2e/test_async_process.py b/tests/sandbox/e2e/test_async_process.py new file mode 100644 index 00000000..4df9a3b3 --- /dev/null +++ b/tests/sandbox/e2e/test_async_process.py @@ -0,0 +1,146 @@ +import pytest + +from hyperbrowser import AsyncHyperbrowser + +from tests.helpers.errors import expect_hyperbrowser_error_async +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + + +async def _collect_process_stream(events): + output = [] + async for event in events: + output.append(event) + if event.type == "exit": + break + return output + + +@pytest.mark.anyio +async def test_async_sandbox_process_e2e(): + client = AsyncHyperbrowser() + sandbox = None + + try: + sandbox = await client.sandboxes.create(default_sandbox_params("py-async-process")) + await wait_for_runtime_ready_async(sandbox) + + result = await sandbox.exec("echo process-exec-ok") + assert result.exit_code == 0 + assert "process-exec-ok" in result.stdout + + result = await sandbox.exec( + { + "command": "bash", + "args": ["-lc", "echo process-exec-fail 1>&2; exit 7"], + } + ) + assert result.exit_code == 7 + assert "process-exec-fail" in result.stderr + + stdin_process = await sandbox.processes.start( + { + "command": "bash", + "args": ["-lc", "read line; echo stdout:$line; echo stderr:$line 1>&2"], + } + ) + fetched = await sandbox.get_process(stdin_process.id) + assert fetched.id == stdin_process.id + + listing = await sandbox.processes.list(limit=20) + assert any(entry.id == stdin_process.id for entry in listing.data) + + await stdin_process.write_stdin("sdk-stdin\n", eof=True) + result = await stdin_process.wait() + assert result.exit_code == 0 + assert "stdout:sdk-stdin" in result.stdout + assert "stderr:sdk-stdin" in result.stderr + + running_process = await sandbox.processes.start( + {"command": "bash", "args": ["-lc", "sleep 30"]} + ) + refreshed = await running_process.refresh() + assert refreshed.status in {"queued", "running"} + result = await running_process.kill() + assert result.status not in {"queued", "running"} + + streamed = await sandbox.processes.start( + { + "command": "bash", + "args": ["-lc", "echo stream-out; echo stream-err 1>&2"], + } + ) + events = await _collect_process_stream(streamed.stream()) + assert any( + event.type == "stdout" and "stream-out" in event.data for event in events + ) + assert any( + event.type == "stderr" and "stream-err" in event.data for event in events + ) + assert any(event.type == "exit" for event in events) + + result_process = await sandbox.processes.start( + {"command": "bash", "args": ["-lc", "echo result-alias-ok"]} + ) + result = await result_process.result() + assert result.exit_code == 0 + assert "result-alias-ok" in result.stdout + + noisy_process = await sandbox.processes.start( + { + "command": "bash", + "args": [ + "-lc", + 'yes "process-replay-window-overflow-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" | head -n 120000', + ], + } + ) + result = await noisy_process.result() + assert len(result.stdout) > 3 * 1024 * 1024 + + await expect_hyperbrowser_error_async( + "process replay window expired", + lambda: _collect_process_stream(noisy_process.stream(1)), + status_code=410, + code="replay_window_expired", + service="runtime", + retryable=False, + message_includes="Replay window expired", + ) + + timeout_process = await sandbox.processes.start( + {"command": "bash", "args": ["-lc", "sleep 10"]} + ) + await expect_hyperbrowser_error_async( + "process wait timeout", + lambda: timeout_process.wait(timeout_ms=100), + status_code=408, + service="runtime", + retryable=False, + message_includes="timed out", + ) + await timeout_process.signal("TERM") + result = await timeout_process.wait(timeout_ms=3000) + assert result.status in {"exited", "failed", "killed", "timed_out"} + + kill_process = await sandbox.processes.start( + {"command": "bash", "args": ["-lc", "sleep 30"]} + ) + result = await kill_process.kill() + assert result.status not in {"queued", "running"} + assert kill_process.status not in {"queued", "running"} + + await expect_hyperbrowser_error_async( + "missing process get", + lambda: sandbox.get_process("proc_missing"), + status_code=404, + service="runtime", + retryable=False, + message_includes="not found", + ) + finally: + await stop_sandbox_if_running_async(sandbox) + await client.close() diff --git a/tests/sandbox/e2e/test_async_terminal_smoke.py b/tests/sandbox/e2e/test_async_terminal_smoke.py new file mode 100644 index 00000000..cfc7c20d --- /dev/null +++ b/tests/sandbox/e2e/test_async_terminal_smoke.py @@ -0,0 +1,133 @@ +import pytest + +from hyperbrowser import AsyncHyperbrowser + +from tests.helpers.errors import expect_hyperbrowser_error_async +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + + +async def _collect_terminal_session(connection): + output = "" + exit_code = None + + async for event in connection.events(): + if event.type == "output": + output += event.data + continue + exit_code = event.status.exit_code + break + + return output, exit_code + + +@pytest.mark.anyio +async def test_async_sandbox_terminal_e2e(): + client = AsyncHyperbrowser() + sandbox = None + + try: + sandbox = await client.sandboxes.create(default_sandbox_params("py-async-terminal")) + await wait_for_runtime_ready_async(sandbox) + + assert sandbox.pty is sandbox.terminal + + terminal = await sandbox.terminal.create( + { + "command": "bash", + "args": ["-l"], + "rows": 24, + "cols": 80, + } + ) + fetched = await sandbox.terminal.get(terminal.id) + assert fetched.id == terminal.id + + connection = await terminal.attach() + try: + await terminal.resize(30, 100) + await connection.write("pwd\n") + await connection.write("echo terminal-smoke-ok\n") + await connection.write("exit\n") + + output, exit_code = await _collect_terminal_session(connection) + assert "terminal-smoke-ok" in output + assert exit_code == 0 + finally: + await connection.close() + + status = await terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + terminal = await sandbox.terminal.create( + { + "command": "bash", + "args": ["-l"], + "rows": 24, + "cols": 80, + } + ) + connection = await terminal.attach() + try: + await connection.resize(32, 110) + refreshed = await terminal.refresh() + assert refreshed.current.rows == 32 + assert refreshed.current.cols == 110 + + await connection.write("exit\n") + _, exit_code = await _collect_terminal_session(connection) + assert exit_code == 0 + finally: + await connection.close() + + status = await terminal.wait(timeout_ms=2000) + assert status.running is False + + timeout_terminal = await sandbox.pty.create( + { + "command": "bash", + "args": ["-lc", "sleep 10"], + "rows": 24, + "cols": 80, + } + ) + await expect_hyperbrowser_error_async( + "terminal wait timeout", + lambda: timeout_terminal.wait(timeout_ms=100), + status_code=408, + service="runtime", + retryable=False, + message_includes="timed out", + ) + + await timeout_terminal.signal("TERM") + status = await timeout_terminal.wait(timeout_ms=3000) + assert status.running is False + + kill_terminal = await sandbox.pty.create( + { + "command": "bash", + "args": ["-lc", "sleep 30"], + "rows": 24, + "cols": 80, + } + ) + status = await kill_terminal.kill() + assert status.running is False + assert kill_terminal.current.running is False + + await expect_hyperbrowser_error_async( + "missing terminal get", + lambda: sandbox.terminal.get("pty_missing"), + status_code=404, + service="runtime", + retryable=False, + message_includes="not found", + ) + finally: + await stop_sandbox_if_running_async(sandbox) + await client.close() diff --git a/tests/sandbox/e2e/test_files.py b/tests/sandbox/e2e/test_files.py new file mode 100644 index 00000000..fded4a88 --- /dev/null +++ b/tests/sandbox/e2e/test_files.py @@ -0,0 +1,230 @@ +import time + +from tests.helpers.config import create_client, make_test_name +from tests.helpers.errors import expect_hyperbrowser_error +from tests.helpers.http import fetch_signed_url +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() + + +def _next_watch_event(watch, *, route="ws", cursor=None): + for event in watch.events(route=route, cursor=cursor): + if event.type == "event": + return event.event + raise RuntimeError("watch stream ended before an event was received") + + +def _wait_for_watch_buffer_rollover(watch, *, attempts=20, delay_seconds=0.1): + for _ in range(attempts): + refreshed = watch.refresh() + if refreshed.current.oldest_seq > 1: + return refreshed + time.sleep(delay_seconds) + raise RuntimeError("watch buffer did not roll over before timeout") + + +def test_sandbox_files_e2e(): + sandbox = None + base_dir = f"/tmp/{make_test_name('py-sdk-files')}" + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-files")) + wait_for_runtime_ready(sandbox) + + assert sandbox.files.exists(f"{base_dir}/missing.txt") is False + + result = sandbox.files.mkdir(base_dir, parents=True) + assert result.path == base_dir + + sandbox.files.write_text(f"{base_dir}/hello.txt", "hello from sdk files") + content = sandbox.files.read_text(f"{base_dir}/hello.txt") + assert content == "hello from sdk files" + + chunk = sandbox.files.read_text(f"{base_dir}/hello.txt", offset=6, length=4) + assert chunk == "from" + + result = sandbox.files.read( + f"{base_dir}/hello.txt", + offset=0, + length=5, + encoding="utf8", + ) + assert result.content == "hello" + assert result.encoding == "utf8" + assert result.bytes_read == 5 + assert result.truncated is True + + source = bytes([0, 1, 2, 3, 4]) + sandbox.files.write_bytes(f"{base_dir}/bytes.bin", source) + content = sandbox.files.read_bytes(f"{base_dir}/bytes.bin") + assert content == source + + stat = sandbox.files.stat(f"{base_dir}/hello.txt") + assert stat.name == "hello.txt" + + listing = sandbox.files.list(base_dir) + assert any(entry.name == "hello.txt" for entry in listing.entries) + + uploaded = sandbox.files.upload(f"{base_dir}/upload.txt", "uploaded from sdk") + assert uploaded.bytes_written > 0 + + downloaded = sandbox.files.download(f"{base_dir}/upload.txt") + assert downloaded.decode("utf-8") == "uploaded from sdk" + + moved = sandbox.files.move( + source=f"{base_dir}/hello.txt", + destination=f"{base_dir}/hello-moved.txt", + ) + assert moved.to == f"{base_dir}/hello-moved.txt" + + copied = sandbox.files.copy( + source=f"{base_dir}/hello-moved.txt", + destination=f"{base_dir}/hello-copy.txt", + ) + assert copied.to == f"{base_dir}/hello-copy.txt" + + sandbox.files.chmod(path=f"{base_dir}/hello-copy.txt", mode="0640") + stat = sandbox.files.stat(f"{base_dir}/hello-copy.txt") + assert "640" in stat.mode + + try: + expect_hyperbrowser_error( + "file chown", + lambda: sandbox.files.chown( + path=f"{base_dir}/hello-copy.txt", + uid=0, + gid=0, + ), + status_code=400, + service="runtime", + retryable=False, + message_includes_any=["operation", "permission"], + ) + except AssertionError as error: + if "expected HyperbrowserError, but call succeeded" not in str(error): + raise + stat = sandbox.files.stat(f"{base_dir}/hello-copy.txt") + assert stat.name == "hello-copy.txt" + + watch = sandbox.files.watch(base_dir, recursive=False) + try: + sandbox.files.write_text(f"{base_dir}/watch.txt", "watch me") + event = _next_watch_event(watch, route="stream") + assert "watch.txt" in event.path + + fetched = sandbox.files.get_watch(watch.id, True) + assert fetched.id == watch.id + assert fetched.current.path == base_dir + finally: + watch.stop() + + watch = sandbox.files.watch(base_dir, recursive=False) + try: + sandbox.files.write_text(f"{base_dir}/watch-refresh-1.txt", "one") + refreshed = watch.refresh(True) + assert refreshed.current.last_seq > 0 + assert refreshed.current.oldest_seq > 0 + assert any( + "watch-refresh-1.txt" in event.path + for event in (refreshed.current.events or []) + ) + + sandbox.files.write_text(f"{base_dir}/watch-refresh-2.txt", "two") + event = _next_watch_event( + watch, + route="ws", + cursor=refreshed.current.last_seq, + ) + assert "watch-refresh-2.txt" in event.path + assert watch.current.last_seq >= event.seq + finally: + watch.stop() + + watch = sandbox.files.watch(base_dir, recursive=False) + try: + burst = sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'for i in $(seq 1 1200); do echo x > "{base_dir}/overflow-$i.txt"; rm -f "{base_dir}/overflow-$i.txt"; done', + ], + } + ) + assert burst.exit_code == 0 + + rolled = _wait_for_watch_buffer_rollover(watch) + assert rolled.current.oldest_seq > 1 + + expect_hyperbrowser_error( + "watch replay window expired", + lambda: next(watch.events(route="ws", cursor=0)), + status_code=410, + code="replay_window_expired", + service="runtime", + retryable=False, + message_includes="Replay window expired", + ) + finally: + watch.stop() + + upload = sandbox.files.upload_url( + f"{base_dir}/presign-upload.txt", + one_time=True, + ) + assert upload.path == f"{base_dir}/presign-upload.txt" + assert upload.url + assert upload.method == "PUT" + + upload_response = fetch_signed_url( + upload.url, + method=upload.method, + body="presigned upload body", + ) + assert upload_response.status_code == 200 + + uploaded_body = sandbox.files.read_text(f"{base_dir}/presign-upload.txt") + assert uploaded_body == "presigned upload body" + + download = sandbox.files.download_url( + f"{base_dir}/presign-upload.txt", + one_time=True, + ) + assert download.path == f"{base_dir}/presign-upload.txt" + assert download.method == "GET" + + download_response = fetch_signed_url(download.url, method=download.method) + assert download_response.status_code == 200 + assert download_response.text == "presigned upload body" + + deleted_file = sandbox.files.delete(f"{base_dir}/hello-copy.txt") + assert deleted_file.path == f"{base_dir}/hello-copy.txt" + + deleted_dir = sandbox.files.delete(base_dir, recursive=True) + assert deleted_dir.path == base_dir + assert sandbox.files.exists(base_dir) is False + + expect_hyperbrowser_error( + "missing file read", + lambda: sandbox.files.read_text(f"{base_dir}/still-missing.txt"), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], + ) + + expect_hyperbrowser_error( + "missing file delete", + lambda: sandbox.files.delete(f"{base_dir}/still-missing.txt"), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], + ) + finally: + stop_sandbox_if_running(sandbox) diff --git a/tests/sandbox/e2e/test_lifecycle.py b/tests/sandbox/e2e/test_lifecycle.py new file mode 100644 index 00000000..176c11ed --- /dev/null +++ b/tests/sandbox/e2e/test_lifecycle.py @@ -0,0 +1,141 @@ +from datetime import datetime, timedelta, timezone +from uuid import uuid4 + +from hyperbrowser.models import SandboxListParams, SandboxRuntimeSession + +from tests.helpers.config import create_client +from tests.helpers.errors import expect_hyperbrowser_error +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() + + +def test_sandbox_lifecycle_e2e(): + sandbox = None + stale_handle = None + secondary = None + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-lifecycle")) + stale_handle = client.sandboxes.get(sandbox.id) + wait_for_runtime_ready(sandbox) + + assert sandbox.to_dict()["token"] + assert sandbox.runtime.base_url + assert sandbox.token_expires_at is not None + + session = sandbox.create_runtime_session() + assert session.token + assert session.sandbox_id == sandbox.id + assert session.runtime.base_url == sandbox.runtime.base_url + + info = sandbox.info() + assert info.id == sandbox.id + sandbox.refresh() + assert sandbox.status == "active" + + sandbox.connect() + assert sandbox.status == "active" + + original_create_runtime_session = sandbox.create_runtime_session + valid_session = original_create_runtime_session(force_refresh=True) + invalid_jwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.invalid-signature" + refresh_count = 0 + + def patched_create_runtime_session(force_refresh: bool = False): + nonlocal refresh_count + if force_refresh: + refresh_count += 1 + return original_create_runtime_session(force_refresh=True) + + return SandboxRuntimeSession( + sandbox_id=valid_session.sandbox_id, + status=valid_session.status, + region=valid_session.region, + token=invalid_jwt, + token_expires_at=datetime.now(timezone.utc) + timedelta(hours=1), + runtime=valid_session.runtime, + ) + + sandbox.create_runtime_session = patched_create_runtime_session + try: + result = sandbox.exec("echo runtime-refresh-ok") + assert result.exit_code == 0 + assert "runtime-refresh-ok" in result.stdout + assert refresh_count > 0 + assert sandbox.to_dict()["token"] + assert sandbox.to_dict()["token"] != invalid_jwt + finally: + sandbox.create_runtime_session = original_create_runtime_session + + listing = client.sandboxes.list( + SandboxListParams(search=sandbox.id, limit=20) + ) + assert any(entry.id == sandbox.id for entry in listing.sandboxes) + + response = sandbox.stop() + assert response.success is True + assert sandbox.status == "closed" + + expect_hyperbrowser_error( + "stopped sandbox connect", + lambda: sandbox.connect(), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + expect_hyperbrowser_error( + "stopped sandbox exec", + lambda: sandbox.exec("echo should-not-run"), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + expect_hyperbrowser_error( + "stale sandbox connect", + lambda: stale_handle.connect(), + status_code=409, + service="control", + retryable=False, + message_includes="Sandbox is not running", + ) + + expect_hyperbrowser_error( + "stopped sandbox reconnect", + lambda: client.sandboxes.connect(sandbox.id), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + expect_hyperbrowser_error( + "missing sandbox get", + lambda: client.sandboxes.get(str(uuid4())), + status_code=404, + service="control", + retryable=False, + message_includes="not found", + ) + + secondary = client.sandboxes.start_from_snapshot( + default_sandbox_params("py-sdk-secondary") + ) + response = secondary.stop() + assert response.success is True + assert secondary.status == "closed" + finally: + stop_sandbox_if_running(sandbox) + stop_sandbox_if_running(stale_handle) + stop_sandbox_if_running(secondary) diff --git a/tests/sandbox/e2e/test_process.py b/tests/sandbox/e2e/test_process.py new file mode 100644 index 00000000..74ffaa9f --- /dev/null +++ b/tests/sandbox/e2e/test_process.py @@ -0,0 +1,142 @@ +from tests.helpers.config import create_client +from tests.helpers.errors import expect_hyperbrowser_error +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() + + +def _collect_process_stream(events): + output = [] + for event in events: + output.append(event) + if event.type == "exit": + break + return output + + +def test_sandbox_process_e2e(): + sandbox = None + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-process")) + wait_for_runtime_ready(sandbox) + + result = sandbox.exec("echo process-exec-ok") + assert result.exit_code == 0 + assert "process-exec-ok" in result.stdout + + result = sandbox.exec( + { + "command": "bash", + "args": ["-lc", "echo process-exec-fail 1>&2; exit 7"], + } + ) + assert result.exit_code == 7 + assert "process-exec-fail" in result.stderr + + stdin_process = sandbox.processes.start( + { + "command": "bash", + "args": ["-lc", "read line; echo stdout:$line; echo stderr:$line 1>&2"], + } + ) + fetched = sandbox.get_process(stdin_process.id) + assert fetched.id == stdin_process.id + + listing = sandbox.processes.list(limit=20) + assert any(entry.id == stdin_process.id for entry in listing.data) + + stdin_process.write_stdin("sdk-stdin\n", eof=True) + result = stdin_process.wait() + assert result.exit_code == 0 + assert "stdout:sdk-stdin" in result.stdout + assert "stderr:sdk-stdin" in result.stderr + + running_process = sandbox.processes.start( + {"command": "bash", "args": ["-lc", "sleep 30"]} + ) + refreshed = running_process.refresh() + assert refreshed.status in {"queued", "running"} + result = running_process.kill() + assert result.status not in {"queued", "running"} + + streamed = sandbox.processes.start( + { + "command": "bash", + "args": ["-lc", "echo stream-out; echo stream-err 1>&2"], + } + ) + events = _collect_process_stream(streamed.stream()) + assert any( + event.type == "stdout" and "stream-out" in event.data for event in events + ) + assert any( + event.type == "stderr" and "stream-err" in event.data for event in events + ) + assert any(event.type == "exit" for event in events) + + result_process = sandbox.processes.start( + {"command": "bash", "args": ["-lc", "echo result-alias-ok"]} + ) + result = result_process.result() + assert result.exit_code == 0 + assert "result-alias-ok" in result.stdout + + noisy_process = sandbox.processes.start( + { + "command": "bash", + "args": [ + "-lc", + 'yes "process-replay-window-overflow-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" | head -n 120000', + ], + } + ) + result = noisy_process.result() + assert len(result.stdout) > 3 * 1024 * 1024 + + expect_hyperbrowser_error( + "process replay window expired", + lambda: _collect_process_stream(noisy_process.stream(1)), + status_code=410, + code="replay_window_expired", + service="runtime", + retryable=False, + message_includes="Replay window expired", + ) + + timeout_process = sandbox.processes.start( + {"command": "bash", "args": ["-lc", "sleep 10"]} + ) + expect_hyperbrowser_error( + "process wait timeout", + lambda: timeout_process.wait(timeout_ms=100), + status_code=408, + service="runtime", + retryable=False, + message_includes="timed out", + ) + timeout_process.signal("TERM") + result = timeout_process.wait(timeout_ms=3000) + assert result.status in {"exited", "failed", "killed", "timed_out"} + + kill_process = sandbox.processes.start( + {"command": "bash", "args": ["-lc", "sleep 30"]} + ) + result = kill_process.kill() + assert result.status not in {"queued", "running"} + assert kill_process.status not in {"queued", "running"} + + expect_hyperbrowser_error( + "missing process get", + lambda: sandbox.get_process("proc_missing"), + status_code=404, + service="runtime", + retryable=False, + message_includes="not found", + ) + finally: + stop_sandbox_if_running(sandbox) diff --git a/tests/sandbox/e2e/test_terminal_smoke.py b/tests/sandbox/e2e/test_terminal_smoke.py new file mode 100644 index 00000000..901b68de --- /dev/null +++ b/tests/sandbox/e2e/test_terminal_smoke.py @@ -0,0 +1,129 @@ +from tests.helpers.config import create_client +from tests.helpers.errors import expect_hyperbrowser_error +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() + + +def _collect_terminal_session(connection): + output = "" + exit_code = None + + for event in connection.events(): + if event.type == "output": + output += event.data + continue + exit_code = event.status.exit_code + break + + return output, exit_code + + +def test_sandbox_terminal_e2e(): + sandbox = None + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-terminal")) + wait_for_runtime_ready(sandbox) + + assert sandbox.pty is sandbox.terminal + + terminal = sandbox.terminal.create( + { + "command": "bash", + "args": ["-l"], + "rows": 24, + "cols": 80, + } + ) + fetched = sandbox.terminal.get(terminal.id) + assert fetched.id == terminal.id + + connection = terminal.attach() + try: + terminal.resize(30, 100) + connection.write("pwd\n") + connection.write("echo terminal-smoke-ok\n") + connection.write("exit\n") + + output, exit_code = _collect_terminal_session(connection) + assert "terminal-smoke-ok" in output + assert exit_code == 0 + finally: + connection.close() + + status = terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + terminal = sandbox.terminal.create( + { + "command": "bash", + "args": ["-l"], + "rows": 24, + "cols": 80, + } + ) + connection = terminal.attach() + try: + connection.resize(32, 110) + refreshed = terminal.refresh() + assert refreshed.current.rows == 32 + assert refreshed.current.cols == 110 + + connection.write("exit\n") + _, exit_code = _collect_terminal_session(connection) + assert exit_code == 0 + finally: + connection.close() + + status = terminal.wait(timeout_ms=2000) + assert status.running is False + + timeout_terminal = sandbox.pty.create( + { + "command": "bash", + "args": ["-lc", "sleep 10"], + "rows": 24, + "cols": 80, + } + ) + expect_hyperbrowser_error( + "terminal wait timeout", + lambda: timeout_terminal.wait(timeout_ms=100), + status_code=408, + service="runtime", + retryable=False, + message_includes="timed out", + ) + + timeout_terminal.signal("TERM") + status = timeout_terminal.wait(timeout_ms=3000) + assert status.running is False + + kill_terminal = sandbox.pty.create( + { + "command": "bash", + "args": ["-lc", "sleep 30"], + "rows": 24, + "cols": 80, + } + ) + status = kill_terminal.kill() + assert status.running is False + assert kill_terminal.current.running is False + + expect_hyperbrowser_error( + "missing terminal get", + lambda: sandbox.terminal.get("pty_missing"), + status_code=404, + service="runtime", + retryable=False, + message_includes="not found", + ) + finally: + stop_sandbox_if_running(sandbox) From c9a391e041fd8bc48b506b81254c36209000399a Mon Sep 17 00:00:00 2001 From: Devin Deng Date: Wed, 11 Mar 2026 21:49:09 +0000 Subject: [PATCH 02/10] update tests --- hyperbrowser/client/async_client.py | 9 +- hyperbrowser/client/base.py | 2 + .../client/managers/async_manager/sandbox.py | 619 +++++++++++---- .../client/managers/sync_manager/sandbox.py | 716 ++++++++++++++---- hyperbrowser/client/sync.py | 9 +- hyperbrowser/config.py | 1 + hyperbrowser/models/__init__.py | 36 + hyperbrowser/models/sandbox.py | 193 ++++- hyperbrowser/sandbox_common.py | 26 +- tests/helpers/config.py | 46 +- tests/helpers/http.py | 47 +- tests/helpers/sandbox.py | 5 +- tests/sandbox/e2e/test_async_expose.py | 134 ++++ tests/sandbox/e2e/test_async_files.py | 684 +++++++++++++---- tests/sandbox/e2e/test_async_lifecycle.py | 190 ++++- tests/sandbox/e2e/test_async_process.py | 5 +- tests/sandbox/e2e/test_async_sudo.py | 69 ++ .../sandbox/e2e/test_async_terminal_smoke.py | 105 ++- tests/sandbox/e2e/test_expose.py | 130 ++++ tests/sandbox/e2e/test_files.py | 667 ++++++++++++---- tests/sandbox/e2e/test_lifecycle.py | 186 ++++- tests/sandbox/e2e/test_runtime_transport.py | 44 ++ tests/sandbox/e2e/test_sudo.py | 66 ++ tests/sandbox/e2e/test_terminal_smoke.py | 88 +++ 24 files changed, 3343 insertions(+), 734 deletions(-) create mode 100644 tests/sandbox/e2e/test_async_expose.py create mode 100644 tests/sandbox/e2e/test_async_sudo.py create mode 100644 tests/sandbox/e2e/test_expose.py create mode 100644 tests/sandbox/e2e/test_runtime_transport.py create mode 100644 tests/sandbox/e2e/test_sudo.py diff --git a/hyperbrowser/client/async_client.py b/hyperbrowser/client/async_client.py index bd23a541..a6c9b3d2 100644 --- a/hyperbrowser/client/async_client.py +++ b/hyperbrowser/client/async_client.py @@ -25,8 +25,15 @@ def __init__( api_key: Optional[str] = None, base_url: Optional[str] = None, timeout: Optional[int] = 30, + runtime_proxy_override: Optional[str] = None, ): - super().__init__(AsyncTransport, config, api_key, base_url) + super().__init__( + AsyncTransport, + config, + api_key, + base_url, + runtime_proxy_override, + ) self.timeout = timeout or 30 self.transport.client.timeout = timeout self.sessions = SessionManager(self) diff --git a/hyperbrowser/client/base.py b/hyperbrowser/client/base.py index 69ffc76e..ac6ac227 100644 --- a/hyperbrowser/client/base.py +++ b/hyperbrowser/client/base.py @@ -15,6 +15,7 @@ def __init__( config: Optional[ClientConfig] = None, api_key: Optional[str] = None, base_url: Optional[str] = None, + runtime_proxy_override: Optional[str] = None, ): if config is None: config = ClientConfig( @@ -30,6 +31,7 @@ def __init__( "HYPERBROWSER_BASE_URL", "https://api.hyperbrowser.ai" ) ), + runtime_proxy_override=runtime_proxy_override, ) if not config.api_key: diff --git a/hyperbrowser/client/managers/async_manager/sandbox.py b/hyperbrowser/client/managers/async_manager/sandbox.py index b302a7bb..0f086793 100644 --- a/hyperbrowser/client/managers/async_manager/sandbox.py +++ b/hyperbrowser/client/managers/async_manager/sandbox.py @@ -1,8 +1,12 @@ +import asyncio import base64 +import io +import inspect import json +import posixpath import socket from datetime import datetime, timedelta, timezone -from typing import AsyncIterator, Dict, Optional, Union +from typing import AsyncIterator, Callable, Dict, List, Optional, Union from urllib.parse import urlencode import httpx @@ -16,18 +20,20 @@ SandboxExecParams, SandboxFileChmodParams, SandboxFileChownParams, + SandboxFileCopyParams, SandboxFileDeleteParams, - SandboxFileEntry, - SandboxFileListResponse, - SandboxFileMoveCopyResult, - SandboxFileMutationResult, + SandboxFileInfo, SandboxFileReadResult, + SandboxFileSystemEvent, + SandboxFileWriteEntry, SandboxFileTransferResult, SandboxFileWatchDoneEvent, SandboxFileWatchEventMessage, SandboxFileWatchStatus, - SandboxListParams, - SandboxListResponse, + SandboxMemorySnapshotParams, + SandboxMemorySnapshotResult, + SandboxExposeParams, + SandboxExposeResult, SandboxPresignFileParams, SandboxPresignedUrl, SandboxProcessExitEvent, @@ -55,7 +61,20 @@ resolve_runtime_transport_target, to_websocket_transport_target, ) -from ..sync_manager.sandbox import _build_query_path, _copy_model, _normalize_websocket_error +from ..sync_manager.sandbox import ( + DEFAULT_WATCH_TIMEOUT_MS, + _build_sandbox_exposed_url, + _build_query_path, + _copy_model, + _encode_write_data, + _normalize_event_type, + _normalize_file_info, + _normalize_terminal_output_chunk, + _normalize_terminal_status, + _normalize_websocket_error, + _normalize_write_info, + _relative_watch_name, +) DEFAULT_PROCESS_KILL_WAIT_SECONDS = 5.0 DEFAULT_TERMINAL_KILL_WAIT_SECONDS = 5.0 @@ -73,9 +92,15 @@ def _expires_within_buffer(expires_at): class RuntimeTransport: - def __init__(self, resolve_connection, timeout: float = 30.0): + def __init__( + self, + resolve_connection, + timeout: float = 30.0, + runtime_proxy_override: Optional[str] = None, + ): self._resolve_connection = resolve_connection self._timeout = timeout + self._runtime_proxy_override = runtime_proxy_override async def request_json( self, @@ -240,7 +265,11 @@ async def _send( headers: Optional[Dict[str, str]], ) -> httpx.Response: request_path = _build_query_path(path, params) - target = resolve_runtime_transport_target(connection.base_url, request_path) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) merged_headers = build_headers(connection.token, headers, target.host_header) client = httpx.AsyncClient(timeout=self._timeout) @@ -272,7 +301,11 @@ async def _send_stream( params: Optional[Dict[str, object]], ): request_path = _build_query_path(path, params) - target = resolve_runtime_transport_target(connection.base_url, request_path) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) headers = build_headers( connection.token, {"Accept": "text/event-stream"}, @@ -488,10 +521,17 @@ async def list( class SandboxFileWatchHandle: - def __init__(self, transport: RuntimeTransport, get_connection_info, status): + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): self._transport = transport self._get_connection_info = get_connection_info self._status = status + self._runtime_proxy_override = runtime_proxy_override @property def id(self) -> str: @@ -544,6 +584,7 @@ async def events( target = to_websocket_transport_target( connection.base_url, f"/sandbox/files/watch/{self.id}/{route}?{query}", + self._runtime_proxy_override, ) headers = build_headers(connection.token, host_header=target.host_header) connect_kwargs = {} @@ -598,40 +639,126 @@ async def events( await websocket.close() +class SandboxWatchDirHandle: + def __init__( + self, + watch: SandboxFileWatchHandle, + on_event: Callable[[SandboxFileSystemEvent], object], + *, + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + timeout_ms: Optional[int] = None, + ): + self._watch = watch + self._root_path = watch.current.path + self._on_event = on_event + self._on_exit = on_exit + self._stop_requested = False + self._exit_notified = False + self._task = asyncio.create_task(self._run()) + effective_timeout = DEFAULT_WATCH_TIMEOUT_MS if timeout_ms is None else timeout_ms + self._timeout_task = ( + asyncio.create_task(self._auto_stop(effective_timeout)) + if effective_timeout > 0 + else None + ) + + async def stop(self) -> None: + if self._stop_requested: + return + self._stop_requested = True + + if self._timeout_task is not None: + self._timeout_task.cancel() + self._timeout_task = None + + try: + await self._watch.stop() + except HyperbrowserError as error: + if error.status_code not in {404, 409}: + raise + + if asyncio.current_task() is not self._task: + await self._task + + async def _auto_stop(self, timeout_ms: int) -> None: + try: + await asyncio.sleep(timeout_ms / 1000.0) + await self.stop() + except asyncio.CancelledError: + return + + async def _run(self) -> None: + exit_error = None + try: + async for message in self._watch.events(): + event_type = _normalize_event_type(message.event.op) + if not event_type: + continue + result = self._on_event( + SandboxFileSystemEvent( + type=event_type, + name=_relative_watch_name(self._root_path, message.event.path), + ) + ) + if inspect.isawaitable(result): + await result + except BaseException as error: + exit_error = error + finally: + if self._timeout_task is not None: + self._timeout_task.cancel() + self._timeout_task = None + if not self._exit_notified: + self._exit_notified = True + if self._on_exit is not None: + result = self._on_exit(exit_error) + if inspect.isawaitable(result): + await result + + class SandboxFilesApi: - def __init__(self, transport: RuntimeTransport, get_connection_info): + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): self._transport = transport self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override async def list( self, path: str, *, - recursive: Optional[bool] = None, - limit: Optional[int] = None, - cursor: Optional[int] = None, - ) -> SandboxFileListResponse: + depth: Optional[int] = None, + ) -> List[SandboxFileInfo]: + depth = 1 if depth is None else depth + if depth < 1: + raise ValueError("depth should be at least one") + payload = await self._transport.request_json( "/sandbox/files", params={ "path": path, - "recursive": recursive, - "limit": limit, - "cursor": cursor, + "depth": depth, }, ) - return SandboxFileListResponse(**payload) + return [_normalize_file_info(entry) for entry in payload.get("entries", [])] - async def stat(self, path: str): + async def get_info(self, path: str) -> SandboxFileInfo: payload = await self._transport.request_json( "/sandbox/files/stat", params={"path": path}, ) - return SandboxFileEntry(**payload["file"]) + return _normalize_file_info(payload["file"]) + + async def stat(self, path: str) -> SandboxFileInfo: + return await self.get_info(path) async def exists(self, path: str) -> bool: try: - await self.stat(path) + await self.get_info(path) return True except HyperbrowserError as error: if error.status_code == 404: @@ -646,20 +773,30 @@ async def read( *, offset: Optional[int] = None, length: Optional[int] = None, - encoding: str = "utf8", - ) -> SandboxFileReadResult: - payload = await self._transport.request_json( - "/sandbox/files/read", - method="POST", - json_body={ - "path": path, - "offset": offset, - "length": length, - "encoding": encoding, - }, - headers={"content-type": "application/json"}, + format: str = "text", + ): + if format == "text": + return ( + await self._read_wire( + path, + offset=offset, + length=length, + encoding="utf8", + ) + ).content + + response = await self._read_wire( + path, + offset=offset, + length=length, + encoding="base64", ) - return SandboxFileReadResult(**payload) + content = base64.b64decode(response.content) + if format in {"bytes", "blob"}: + return content + if format == "stream": + return io.BytesIO(content) + raise ValueError("format should be one of: text, bytes, blob, stream") async def read_text( self, @@ -668,7 +805,7 @@ async def read_text( offset: Optional[int] = None, length: Optional[int] = None, ) -> str: - return (await self.read(path, offset=offset, length=length, encoding="utf8")).content + return await self.read(path, offset=offset, length=length, format="text") async def read_bytes( self, @@ -677,8 +814,51 @@ async def read_bytes( offset: Optional[int] = None, length: Optional[int] = None, ) -> bytes: - result = await self.read(path, offset=offset, length=length, encoding="base64") - return base64.b64decode(result.content) + return await self.read(path, offset=offset, length=length, format="bytes") + + async def write( + self, + path_or_files: Union[str, List[Union[SandboxFileWriteEntry, Dict[str, object]]]], + data: Optional[Union[str, bytes, bytearray]] = None, + ): + if isinstance(path_or_files, str): + if data is None: + raise ValueError("Path and data are required") + payload = await self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path_or_files, + **_encode_write_data(data), + }, + headers={"content-type": "application/json"}, + ) + return _normalize_write_info(payload["files"][0]) + + if not path_or_files: + return [] + + encoded_files = [] + for entry in path_or_files: + normalized = ( + entry + if isinstance(entry, SandboxFileWriteEntry) + else SandboxFileWriteEntry(**entry) + ) + encoded_files.append( + { + "path": normalized.path, + **_encode_write_data(normalized.data), + } + ) + + payload = await self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={"files": encoded_files}, + headers={"content-type": "application/json"}, + ) + return [_normalize_write_info(entry) for entry in payload.get("files", [])] async def write_text( self, @@ -688,7 +868,7 @@ async def write_text( append: Optional[bool] = None, mode: Optional[str] = None, ): - return await self._write( + return await self._write_single( path, data, append=append, @@ -704,7 +884,7 @@ async def write_bytes( append: Optional[bool] = None, mode: Optional[str] = None, ): - return await self._write( + return await self._write_single( path, base64.b64encode(data).decode("ascii"), append=append, @@ -728,17 +908,24 @@ async def download(self, path: str) -> bytes: params={"path": path}, ) - async def delete(self, path: str, *, recursive: Optional[bool] = None): + async def make_dir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ) -> bool: payload = await self._transport.request_json( - "/sandbox/files/delete", + "/sandbox/files/mkdir", method="POST", - json_body=SandboxFileDeleteParams( - path=path, - recursive=recursive, - ).model_dump(exclude_none=True), + json_body={ + "path": path, + "parents": parents, + "mode": mode, + }, headers={"content-type": "application/json"}, ) - return SandboxFileMutationResult(**payload) + return bool(payload.get("created")) async def mkdir( self, @@ -746,18 +933,20 @@ async def mkdir( *, parents: Optional[bool] = None, mode: Optional[str] = None, - ): + ) -> bool: + return await self.make_dir(path, parents=parents, mode=mode) + + async def rename(self, old_path: str, new_path: str) -> SandboxFileInfo: payload = await self._transport.request_json( - "/sandbox/files/mkdir", + "/sandbox/files/move", method="POST", json_body={ - "path": path, - "parents": parents, - "mode": mode, + "from": old_path, + "to": new_path, }, headers={"content-type": "application/json"}, ) - return SandboxFileMutationResult(**payload) + return _normalize_file_info(payload["entry"]) async def move( self, @@ -765,73 +954,109 @@ async def move( source: str, destination: str, overwrite: Optional[bool] = None, - ) -> SandboxFileMoveCopyResult: - payload = await self._transport.request_json( - "/sandbox/files/move", + ) -> SandboxFileInfo: + return await self.rename(source, destination) + + async def remove(self, path: str, *, recursive: Optional[bool] = None) -> None: + await self._transport.request_json( + "/sandbox/files/delete", method="POST", - json_body={ - "from": source, - "to": destination, - "overwrite": overwrite, - }, + json_body=SandboxFileDeleteParams( + path=path, + recursive=recursive, + ).model_dump(exclude_none=True), headers={"content-type": "application/json"}, ) - return SandboxFileMoveCopyResult(**payload) + + async def delete(self, path: str, *, recursive: Optional[bool] = None) -> None: + await self.remove(path, recursive=recursive) async def copy( self, + params: Optional[Union[SandboxFileCopyParams, Dict[str, object]]] = None, *, - source: str, - destination: str, + source: Optional[str] = None, + destination: Optional[str] = None, recursive: Optional[bool] = None, overwrite: Optional[bool] = None, - ) -> SandboxFileMoveCopyResult: + ) -> SandboxFileInfo: + if params is None: + normalized = SandboxFileCopyParams( + source=source, + destination=destination, + recursive=recursive, + overwrite=overwrite, + ) + elif isinstance(params, SandboxFileCopyParams): + normalized = params + else: + normalized = SandboxFileCopyParams(**params) + payload = await self._transport.request_json( "/sandbox/files/copy", method="POST", json_body={ - "from": source, - "to": destination, - "recursive": recursive, - "overwrite": overwrite, + "from": normalized.source, + "to": normalized.destination, + "recursive": normalized.recursive, + "overwrite": normalized.overwrite, }, headers={"content-type": "application/json"}, ) - return SandboxFileMoveCopyResult(**payload) + return _normalize_file_info(payload["entry"]) - async def chmod(self, *, path: str, mode: str, recursive: Optional[bool] = None): - payload = await self._transport.request_json( + async def chmod( + self, + params: Optional[Union[SandboxFileChmodParams, Dict[str, object]]] = None, + *, + path: Optional[str] = None, + mode: Optional[str] = None, + recursive: Optional[bool] = None, + ) -> None: + normalized = ( + params + if isinstance(params, SandboxFileChmodParams) + else SandboxFileChmodParams( + **(params or {"path": path, "mode": mode, "recursive": recursive}) + ) + ) + await self._transport.request_json( "/sandbox/files/chmod", method="POST", - json_body=SandboxFileChmodParams( - path=path, - mode=mode, - recursive=recursive, - ).model_dump(exclude_none=True), + json_body=normalized.model_dump(exclude_none=True), headers={"content-type": "application/json"}, ) - return SandboxFileMutationResult(**payload) async def chown( self, + params: Optional[Union[SandboxFileChownParams, Dict[str, object]]] = None, *, - path: str, + path: Optional[str] = None, uid: Optional[int] = None, gid: Optional[int] = None, recursive: Optional[bool] = None, - ): - payload = await self._transport.request_json( + ) -> None: + normalized = ( + params + if isinstance(params, SandboxFileChownParams) + else SandboxFileChownParams( + **( + params + or { + "path": path, + "uid": uid, + "gid": gid, + "recursive": recursive, + } + ) + ) + ) + await self._transport.request_json( "/sandbox/files/chown", method="POST", - json_body=SandboxFileChownParams( - path=path, - uid=uid, - gid=gid, - recursive=recursive, - ).model_dump(exclude_none=True), + json_body=normalized.model_dump(exclude_none=True), headers={"content-type": "application/json"}, ) - return SandboxFileMutationResult(**payload) async def watch(self, path: str, *, recursive: Optional[bool] = None): payload = await self._transport.request_json( @@ -847,6 +1072,23 @@ async def watch(self, path: str, *, recursive: Optional[bool] = None): self._transport, self._get_connection_info, SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, + ) + + async def watch_dir( + self, + path: str, + on_event: Callable[[SandboxFileSystemEvent], object], + *, + recursive: Optional[bool] = None, + timeout_ms: Optional[int] = None, + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + ) -> SandboxWatchDirHandle: + return SandboxWatchDirHandle( + await self.watch(path, recursive=recursive), + on_event, + on_exit=on_exit, + timeout_ms=timeout_ms, ) async def get_watch( @@ -860,6 +1102,7 @@ async def get_watch( self._transport, self._get_connection_info, SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, ) async def upload_url( @@ -900,7 +1143,28 @@ async def download_url( ) return SandboxPresignedUrl(**payload) - async def _write( + async def _read_wire( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + encoding: str, + ) -> SandboxFileReadResult: + payload = await self._transport.request_json( + "/sandbox/files/read", + method="POST", + json_body={ + "path": path, + "offset": offset, + "length": length, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileReadResult(**payload) + + async def _write_single( self, path: str, data: str, @@ -921,7 +1185,7 @@ async def _write( }, headers={"content-type": "application/json"}, ) - return SandboxFileTransferResult(**payload) + return _normalize_write_info(payload["files"][0]) class SandboxTerminalConnection: @@ -939,18 +1203,15 @@ async def events(self) -> AsyncIterator[object]: message = message.decode("utf-8") parsed = json.loads(message) if parsed["type"] == "output": - raw = base64.b64decode(parsed["data"]) + normalized = _normalize_terminal_output_chunk(parsed) yield SandboxTerminalOutputEvent( type="output", - seq=parsed["seq"], - data=raw.decode("utf-8", errors="replace"), - raw=raw, - timestamp=parsed["timestamp"], + **normalized, ) elif parsed["type"] == "exit": yield SandboxTerminalExitEvent( type="exit", - status=SandboxTerminalStatus(**parsed["status"]), + status=_normalize_terminal_status(parsed["status"]), ) async def write(self, data: Union[str, bytes, bytearray]) -> None: @@ -978,10 +1239,17 @@ async def close(self) -> None: class SandboxTerminalHandle: - def __init__(self, transport: RuntimeTransport, get_connection_info, status): + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): self._transport = transport self._get_connection_info = get_connection_info self._status = status + self._runtime_proxy_override = runtime_proxy_override @property def id(self) -> str: @@ -1002,7 +1270,7 @@ async def refresh(self, include_output: bool = False) -> "SandboxTerminalHandle" f"/sandbox/pty/{self.id}", params={"includeOutput": True} if include_output else None, ) - self._status = SandboxTerminalStatus(**payload["pty"]) + self._status = _normalize_terminal_status(payload["pty"]) return self async def wait( @@ -1019,7 +1287,7 @@ async def wait( ).model_dump(exclude_none=True, by_alias=True), headers={"content-type": "application/json"}, ) - self._status = SandboxTerminalStatus(**payload["pty"]) + self._status = _normalize_terminal_status(payload["pty"]) return self.current async def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: @@ -1029,7 +1297,7 @@ async def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: json_body={"signal": signal}, headers={"content-type": "application/json"}, ) - self._status = SandboxTerminalStatus(**payload["pty"]) + self._status = _normalize_terminal_status(payload["pty"]) return self.current async def kill( @@ -1050,7 +1318,7 @@ async def resize(self, rows: int, cols: int) -> SandboxTerminalStatus: json_body={"rows": rows, "cols": cols}, headers={"content-type": "application/json"}, ) - self._status = SandboxTerminalStatus(**payload["pty"]) + self._status = _normalize_terminal_status(payload["pty"]) return self.current async def attach(self) -> SandboxTerminalConnection: @@ -1058,6 +1326,7 @@ async def attach(self) -> SandboxTerminalConnection: target = to_websocket_transport_target( connection.base_url, f"/sandbox/pty/{self.id}/ws?sessionId={connection.sandbox_id}", + self._runtime_proxy_override, ) headers = build_headers(connection.token, host_header=target.host_header) connect_kwargs = {} @@ -1083,9 +1352,15 @@ async def attach(self) -> SandboxTerminalConnection: class SandboxTerminalApi: - def __init__(self, transport: RuntimeTransport, get_connection_info): + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): self._transport = transport self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override async def create( self, @@ -1105,7 +1380,8 @@ async def create( return SandboxTerminalHandle( self._transport, self._get_connection_info, - SandboxTerminalStatus(**payload["pty"]), + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, ) async def get( @@ -1118,7 +1394,8 @@ async def get( return SandboxTerminalHandle( self._transport, self._get_connection_info, - SandboxTerminalStatus(**payload["pty"]), + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, ) @@ -1130,12 +1407,18 @@ def __init__(self, service: "SandboxManager", detail: SandboxDetail): self._transport = RuntimeTransport( self._resolve_runtime_connection, service.runtime_timeout, + service.runtime_proxy_override, ) self.processes = SandboxProcessesApi(self._transport) - self.files = SandboxFilesApi(self._transport, self._resolve_runtime_socket_info) + self.files = SandboxFilesApi( + self._transport, + self._resolve_runtime_socket_info, + service.runtime_proxy_override, + ) self.terminal = SandboxTerminalApi( self._transport, self._resolve_runtime_socket_info, + service.runtime_proxy_override, ) self.pty = self.terminal @@ -1187,6 +1470,29 @@ async def stop(self) -> BasicResponse: self._clear_runtime_session("closed") return response + async def create_memory_snapshot( + self, params: Optional[Union[SandboxMemorySnapshotParams, Dict[str, object]]] = None + ) -> SandboxMemorySnapshotResult: + normalized = ( + params + if isinstance(params, SandboxMemorySnapshotParams) + else SandboxMemorySnapshotParams(**(params or {})) + ) + return await self._service.create_memory_snapshot(self.id, normalized) + + async def expose( + self, params: Union[SandboxExposeParams, Dict[str, object]] + ) -> SandboxExposeResult: + normalized = ( + params + if isinstance(params, SandboxExposeParams) + else SandboxExposeParams(**params) + ) + return await self._service.expose(self.id, normalized, runtime=self.runtime) + + def get_exposed_url(self, port: int) -> str: + return _build_sandbox_exposed_url(self.runtime, port) + async def create_runtime_session( self, force_refresh: bool = False ) -> SandboxRuntimeSession: @@ -1198,9 +1504,17 @@ async def create_runtime_session( ): return _copy_model(self._runtime_session) - session = await self._service.get_runtime_session(self.id) - self._apply_runtime_session(session) - return _copy_model(session) + detail = await self._service.get_detail(self.id) + self._hydrate(detail) + if self._runtime_session is None: + raise HyperbrowserError( + f"Sandbox {self.id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + return _copy_model(self._runtime_session) async def exec(self, input: Union[str, SandboxExecParams, Dict[str, object]]): if isinstance(input, str): @@ -1286,16 +1600,25 @@ class SandboxManager: def __init__(self, client): self._client = client self.runtime_timeout = getattr(client, "timeout", 30) + self.runtime_proxy_override = getattr( + client.config, + "runtime_proxy_override", + None, + ) - async def create(self, params: CreateSandboxParams) -> SandboxHandle: - detail = await self._create_detail(params) + async def create( + self, params: Union[CreateSandboxParams, Dict[str, object]] + ) -> SandboxHandle: + normalized = ( + params if isinstance(params, CreateSandboxParams) else CreateSandboxParams(**params) + ) + detail = await self._create_detail(normalized) return self.attach(detail) async def start_from_snapshot( - self, params: StartSandboxFromSnapshotParams + self, params: Union[StartSandboxFromSnapshotParams, Dict[str, object]] ) -> SandboxHandle: - detail = await self._start_from_snapshot_detail(params) - return self.attach(detail) + return await self.create(params) async def get(self, sandbox_id: str) -> SandboxHandle: return self.attach(await self.get_detail(sandbox_id)) @@ -1305,47 +1628,67 @@ async def connect(self, sandbox_id: str) -> SandboxHandle: await sandbox.connect() return sandbox - async def list( - self, params: Optional[SandboxListParams] = None - ) -> SandboxListResponse: - payload = await self._request( - "GET", - "/sandboxes", - params=(params or SandboxListParams()).model_dump( - exclude_none=True, by_alias=True - ), - ) - return SandboxListResponse(**payload) - async def stop(self, sandbox_id: str) -> BasicResponse: - payload = await self._request("POST", f"/sandboxes/{sandbox_id}/stop") + payload = await self._request("PUT", f"/sandbox/{sandbox_id}/stop") return BasicResponse(**payload) async def get_runtime_session(self, sandbox_id: str) -> SandboxRuntimeSession: - payload = await self._request("POST", f"/sandboxes/{sandbox_id}/runtime-session") - return SandboxRuntimeSession(**payload) + detail = await self.get_detail(sandbox_id) + session = SandboxHandle._to_runtime_session(detail) + if session is None: + raise HyperbrowserError( + f"Sandbox {sandbox_id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + return session async def get_detail(self, sandbox_id: str) -> SandboxDetail: - payload = await self._request("GET", f"/sandboxes/{sandbox_id}") + payload = await self._request("GET", f"/sandbox/{sandbox_id}") return SandboxDetail(**payload) def attach(self, detail: SandboxDetail) -> SandboxHandle: return SandboxHandle(self, detail) - async def _create_detail(self, params: CreateSandboxParams) -> SandboxDetail: + async def create_memory_snapshot( + self, + sandbox_id: str, + params: Optional[SandboxMemorySnapshotParams] = None, + ) -> SandboxMemorySnapshotResult: payload = await self._request( "POST", - "/sandboxes", + f"/sandbox/{sandbox_id}/snapshot", + data=(params or SandboxMemorySnapshotParams()).model_dump( + exclude_none=True, by_alias=True + ), + ) + return SandboxMemorySnapshotResult(**payload) + + async def expose( + self, + sandbox_id: str, + params: SandboxExposeParams, + *, + runtime=None, + ) -> SandboxExposeResult: + payload = await self._request( + "POST", + f"/sandbox/{sandbox_id}/expose", data=params.model_dump(exclude_none=True, by_alias=True), ) - return SandboxDetail(**payload) + target_runtime = runtime or (await self.get_detail(sandbox_id)).runtime + return SandboxExposeResult( + port=payload["port"], + auth=payload["auth"], + url=_build_sandbox_exposed_url(target_runtime, payload["port"]), + ) - async def _start_from_snapshot_detail( - self, params: StartSandboxFromSnapshotParams - ) -> SandboxDetail: + async def _create_detail(self, params: CreateSandboxParams) -> SandboxDetail: payload = await self._request( "POST", - "/sandboxes/startFromSnapshot", + "/sandbox", data=params.model_dump(exclude_none=True, by_alias=True), ) return SandboxDetail(**payload) diff --git a/hyperbrowser/client/managers/sync_manager/sandbox.py b/hyperbrowser/client/managers/sync_manager/sandbox.py index 823a444f..79cba4a3 100644 --- a/hyperbrowser/client/managers/sync_manager/sandbox.py +++ b/hyperbrowser/client/managers/sync_manager/sandbox.py @@ -1,9 +1,12 @@ import base64 +import io import json +import posixpath import socket +import threading from datetime import datetime, timedelta, timezone -from typing import Dict, Iterator, Optional, Union -from urllib.parse import urlencode +from typing import Callable, Dict, Iterator, List, Optional, Union +from urllib.parse import urlencode, urlsplit, urlunsplit import httpx from websockets.exceptions import ConnectionClosed @@ -16,18 +19,23 @@ SandboxExecParams, SandboxFileChmodParams, SandboxFileChownParams, - SandboxFileEntry, + SandboxFileCopyParams, SandboxFileDeleteParams, - SandboxFileListResponse, - SandboxFileMoveCopyResult, - SandboxFileMutationResult, + SandboxFileInfo, + SandboxFileListOptions, + SandboxFileReadOptions, SandboxFileReadResult, + SandboxFileSystemEvent, + SandboxFileWriteEntry, + SandboxFileWriteInfo, SandboxFileTransferResult, SandboxFileWatchDoneEvent, SandboxFileWatchEventMessage, SandboxFileWatchStatus, - SandboxListParams, - SandboxListResponse, + SandboxMemorySnapshotParams, + SandboxMemorySnapshotResult, + SandboxExposeParams, + SandboxExposeResult, SandboxPresignFileParams, SandboxPresignedUrl, SandboxProcessExitEvent, @@ -59,12 +67,34 @@ DEFAULT_PROCESS_KILL_WAIT_SECONDS = 5.0 DEFAULT_TERMINAL_KILL_WAIT_SECONDS = 5.0 +DEFAULT_WATCH_TIMEOUT_MS = 60_000 def _copy_model(model): return model.model_copy(deep=True) +def _build_sandbox_exposed_url(runtime, port: int) -> str: + parsed = urlsplit(runtime.base_url) + hostname = parsed.hostname + if not hostname: + return runtime.base_url.rstrip("/") + + exposed_host = f"{port}-{hostname}" + netloc = exposed_host + if parsed.port: + netloc = f"{netloc}:{parsed.port}" + if parsed.username: + credentials = parsed.username + if parsed.password: + credentials = f"{credentials}:{parsed.password}" + netloc = f"{credentials}@{netloc}" + + return urlunsplit( + (parsed.scheme, netloc, parsed.path, parsed.query, parsed.fragment) + ).rstrip("/") + + def _expires_within_buffer(expires_at: Optional[datetime]) -> bool: if expires_at is None: return False @@ -157,10 +187,90 @@ def _normalize_websocket_error(error: BaseException) -> HyperbrowserError: ) +def _normalize_file_type(value: Optional[str]) -> Optional[str]: + if not value: + return None + return "dir" if value in {"dir", "directory"} else "file" + + +def _normalize_file_info(entry: Dict[str, object]) -> SandboxFileInfo: + normalized = dict(entry) + normalized["type"] = _normalize_file_type(normalized.get("type")) + return SandboxFileInfo(**normalized) + + +def _normalize_write_info(entry: Dict[str, object]) -> SandboxFileWriteInfo: + normalized = dict(entry) + normalized["type"] = _normalize_file_type(normalized.get("type")) + return SandboxFileWriteInfo(**normalized) + + +def _normalize_event_type(operation: str) -> Optional[str]: + lower = operation.lower() + if "chmod" in lower: + return "chmod" + if "create" in lower: + return "create" + if "remove" in lower or "delete" in lower: + return "remove" + if "rename" in lower: + return "rename" + if "write" in lower: + return "write" + return None + + +def _relative_watch_name(root: str, absolute_path: str) -> str: + relative = posixpath.relpath(absolute_path, root) + if relative in {"", "."}: + return posixpath.basename(absolute_path) + return relative + + +def _encode_write_data(data: Union[str, bytes, bytearray]) -> Dict[str, str]: + if isinstance(data, str): + return { + "data": data, + "encoding": "utf8", + } + return { + "data": base64.b64encode(bytes(data)).decode("ascii"), + "encoding": "base64", + } + + +def _normalize_terminal_output_chunk(entry: Dict[str, object]) -> Dict[str, object]: + raw = base64.b64decode(entry["data"]) + return { + "seq": entry["seq"], + "data": raw.decode("utf-8", errors="replace"), + "raw": raw, + "timestamp": entry["timestamp"], + } + + +def _normalize_terminal_status(entry: Dict[str, object]) -> SandboxTerminalStatus: + normalized = dict(entry) + output = normalized.get("output") + if isinstance(output, list): + normalized["output"] = [ + _normalize_terminal_output_chunk(chunk) + for chunk in output + if isinstance(chunk, dict) + ] + return SandboxTerminalStatus(**normalized) + + class RuntimeTransport: - def __init__(self, resolve_connection, timeout: float = 30.0): + def __init__( + self, + resolve_connection, + timeout: float = 30.0, + runtime_proxy_override: Optional[str] = None, + ): self._resolve_connection = resolve_connection self._timeout = timeout + self._runtime_proxy_override = runtime_proxy_override def request_json( self, @@ -325,7 +435,11 @@ def _send( headers: Optional[Dict[str, str]], ) -> httpx.Response: request_path = _build_query_path(path, params) - target = resolve_runtime_transport_target(connection.base_url, request_path) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) merged_headers = build_headers(connection.token, headers, target.host_header) client = httpx.Client(timeout=self._timeout) @@ -357,7 +471,11 @@ def _send_stream( params: Optional[Dict[str, object]], ): request_path = _build_query_path(path, params) - target = resolve_runtime_transport_target(connection.base_url, request_path) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) headers = build_headers( connection.token, {"Accept": "text/event-stream"}, @@ -565,10 +683,17 @@ def list( class SandboxFileWatchHandle: - def __init__(self, transport: RuntimeTransport, get_connection_info, status): + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): self._transport = transport self._get_connection_info = get_connection_info self._status = status + self._runtime_proxy_override = runtime_proxy_override @property def id(self) -> str: @@ -621,6 +746,7 @@ def events( target = to_websocket_transport_target( connection.base_url, f"/sandbox/files/watch/{self.id}/{route}?{query}", + self._runtime_proxy_override, ) headers = build_headers(connection.token, host_header=target.host_header) connect_kwargs = {} @@ -673,40 +799,89 @@ def events( websocket.close() -class SandboxFilesApi: - def __init__(self, transport: RuntimeTransport, get_connection_info): - self._transport = transport - self._get_connection_info = get_connection_info - - def list( +class SandboxWatchDirHandle: + def __init__( self, - path: str, + watch: SandboxFileWatchHandle, + on_event: Callable[[SandboxFileSystemEvent], object], *, - recursive: Optional[bool] = None, - limit: Optional[int] = None, - cursor: Optional[int] = None, - ) -> SandboxFileListResponse: - payload = self._transport.request_json( - "/sandbox/files", - params={ - "path": path, - "recursive": recursive, - "limit": limit, - "cursor": cursor, - }, - ) - return SandboxFileListResponse(**payload) + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + timeout_ms: Optional[int] = None, + ): + self._watch = watch + self._root_path = watch.current.path + self._on_event = on_event + self._on_exit = on_exit + self._thread = threading.Thread(target=self._run, daemon=True) + self._timer = None + self._stopped = threading.Event() + self._exit_notified = False + + effective_timeout = DEFAULT_WATCH_TIMEOUT_MS if timeout_ms is None else timeout_ms + if effective_timeout > 0: + self._timer = threading.Timer(effective_timeout / 1000.0, self.stop) + self._timer.daemon = True + self._timer.start() + + self._thread.start() + + def stop(self) -> None: + if self._stopped.is_set(): + return + self._stopped.set() + + if self._timer is not None: + self._timer.cancel() + self._timer = None + + try: + self._watch.stop() + except HyperbrowserError as error: + if error.status_code not in {404, 409}: + raise + + if threading.current_thread() is not self._thread: + self._thread.join() + + def _run(self) -> None: + exit_error = None + try: + for message in self._watch.events(): + event_type = _normalize_event_type(message.event.op) + if not event_type: + continue + self._on_event( + SandboxFileSystemEvent( + type=event_type, + name=_relative_watch_name(self._root_path, message.event.path), + ) + ) + except BaseException as error: + exit_error = error + finally: + if self._timer is not None: + self._timer.cancel() + self._timer = None + if not self._exit_notified: + self._exit_notified = True + if self._on_exit is not None: + self._on_exit(exit_error) - def stat(self, path: str): - payload = self._transport.request_json( - "/sandbox/files/stat", - params={"path": path}, - ) - return SandboxFileEntry(**payload["file"]) + +class SandboxFilesApi: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override def exists(self, path: str) -> bool: try: - self.stat(path) + self.get_info(path) return True except HyperbrowserError as error: if error.status_code == 404: @@ -715,26 +890,53 @@ def exists(self, path: str) -> bool: return False raise - def read( + def get_info(self, path: str) -> SandboxFileInfo: + payload = self._transport.request_json( + "/sandbox/files/stat", + params={"path": path}, + ) + return _normalize_file_info(payload["file"]) + + def stat(self, path: str) -> SandboxFileInfo: + return self.get_info(path) + + def list( self, path: str, *, - offset: Optional[int] = None, - length: Optional[int] = None, - encoding: str = "utf8", - ) -> SandboxFileReadResult: + depth: Optional[int] = None, + ) -> List[SandboxFileInfo]: + depth = 1 if depth is None else depth + if depth < 1: + raise ValueError("depth should be at least one") + payload = self._transport.request_json( - "/sandbox/files/read", - method="POST", - json_body={ + "/sandbox/files", + params={ "path": path, - "offset": offset, - "length": length, - "encoding": encoding, + "depth": depth, }, - headers={"content-type": "application/json"}, ) - return SandboxFileReadResult(**payload) + return [_normalize_file_info(entry) for entry in payload.get("entries", [])] + + def read( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + format: str = "text", + ): + if format == "text": + return self._read_wire(path, offset=offset, length=length, encoding="utf8").content + + response = self._read_wire(path, offset=offset, length=length, encoding="base64") + content = base64.b64decode(response.content) + if format in {"bytes", "blob"}: + return content + if format == "stream": + return io.BytesIO(content) + raise ValueError("format should be one of: text, bytes, blob, stream") def read_text( self, @@ -743,7 +945,7 @@ def read_text( offset: Optional[int] = None, length: Optional[int] = None, ) -> str: - return self.read(path, offset=offset, length=length, encoding="utf8").content + return self.read(path, offset=offset, length=length, format="text") def read_bytes( self, @@ -752,8 +954,51 @@ def read_bytes( offset: Optional[int] = None, length: Optional[int] = None, ) -> bytes: - result = self.read(path, offset=offset, length=length, encoding="base64") - return base64.b64decode(result.content) + return self.read(path, offset=offset, length=length, format="bytes") + + def write( + self, + path_or_files: Union[str, List[Union[SandboxFileWriteEntry, Dict[str, object]]]], + data: Optional[Union[str, bytes, bytearray]] = None, + ): + if isinstance(path_or_files, str): + if data is None: + raise ValueError("Path and data are required") + payload = self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path_or_files, + **_encode_write_data(data), + }, + headers={"content-type": "application/json"}, + ) + return _normalize_write_info(payload["files"][0]) + + if not path_or_files: + return [] + + encoded_files = [] + for entry in path_or_files: + normalized = ( + entry + if isinstance(entry, SandboxFileWriteEntry) + else SandboxFileWriteEntry(**entry) + ) + encoded_files.append( + { + "path": normalized.path, + **_encode_write_data(normalized.data), + } + ) + + payload = self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={"files": encoded_files}, + headers={"content-type": "application/json"}, + ) + return [_normalize_write_info(entry) for entry in payload.get("files", [])] def write_text( self, @@ -763,7 +1008,7 @@ def write_text( append: Optional[bool] = None, mode: Optional[str] = None, ): - return self._write( + return self._write_single( path, data, append=append, @@ -779,7 +1024,7 @@ def write_bytes( append: Optional[bool] = None, mode: Optional[str] = None, ): - return self._write( + return self._write_single( path, base64.b64encode(data).decode("ascii"), append=append, @@ -803,17 +1048,24 @@ def download(self, path: str) -> bytes: params={"path": path}, ) - def delete(self, path: str, *, recursive: Optional[bool] = None): + def make_dir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ) -> bool: payload = self._transport.request_json( - "/sandbox/files/delete", + "/sandbox/files/mkdir", method="POST", - json_body=SandboxFileDeleteParams( - path=path, - recursive=recursive, - ).model_dump(exclude_none=True), + json_body={ + "path": path, + "parents": parents, + "mode": mode, + }, headers={"content-type": "application/json"}, ) - return SandboxFileMutationResult(**payload) + return bool(payload.get("created")) def mkdir( self, @@ -821,18 +1073,20 @@ def mkdir( *, parents: Optional[bool] = None, mode: Optional[str] = None, - ): + ) -> bool: + return self.make_dir(path, parents=parents, mode=mode) + + def rename(self, old_path: str, new_path: str) -> SandboxFileInfo: payload = self._transport.request_json( - "/sandbox/files/mkdir", + "/sandbox/files/move", method="POST", json_body={ - "path": path, - "parents": parents, - "mode": mode, + "from": old_path, + "to": new_path, }, headers={"content-type": "application/json"}, ) - return SandboxFileMutationResult(**payload) + return _normalize_file_info(payload["entry"]) def move( self, @@ -840,73 +1094,109 @@ def move( source: str, destination: str, overwrite: Optional[bool] = None, - ) -> SandboxFileMoveCopyResult: - payload = self._transport.request_json( - "/sandbox/files/move", + ) -> SandboxFileInfo: + return self.rename(source, destination) + + def remove(self, path: str, *, recursive: Optional[bool] = None) -> None: + self._transport.request_json( + "/sandbox/files/delete", method="POST", - json_body={ - "from": source, - "to": destination, - "overwrite": overwrite, - }, + json_body=SandboxFileDeleteParams( + path=path, + recursive=recursive, + ).model_dump(exclude_none=True), headers={"content-type": "application/json"}, ) - return SandboxFileMoveCopyResult(**payload) + + def delete(self, path: str, *, recursive: Optional[bool] = None) -> None: + self.remove(path, recursive=recursive) def copy( self, + params: Optional[Union[SandboxFileCopyParams, Dict[str, object]]] = None, *, - source: str, - destination: str, + source: Optional[str] = None, + destination: Optional[str] = None, recursive: Optional[bool] = None, overwrite: Optional[bool] = None, - ) -> SandboxFileMoveCopyResult: + ) -> SandboxFileInfo: + if params is None: + normalized = SandboxFileCopyParams( + source=source, + destination=destination, + recursive=recursive, + overwrite=overwrite, + ) + elif isinstance(params, SandboxFileCopyParams): + normalized = params + else: + normalized = SandboxFileCopyParams(**params) + payload = self._transport.request_json( "/sandbox/files/copy", method="POST", json_body={ - "from": source, - "to": destination, - "recursive": recursive, - "overwrite": overwrite, + "from": normalized.source, + "to": normalized.destination, + "recursive": normalized.recursive, + "overwrite": normalized.overwrite, }, headers={"content-type": "application/json"}, ) - return SandboxFileMoveCopyResult(**payload) + return _normalize_file_info(payload["entry"]) - def chmod(self, *, path: str, mode: str, recursive: Optional[bool] = None): - payload = self._transport.request_json( + def chmod( + self, + params: Optional[Union[SandboxFileChmodParams, Dict[str, object]]] = None, + *, + path: Optional[str] = None, + mode: Optional[str] = None, + recursive: Optional[bool] = None, + ) -> None: + normalized = ( + params + if isinstance(params, SandboxFileChmodParams) + else SandboxFileChmodParams( + **(params or {"path": path, "mode": mode, "recursive": recursive}) + ) + ) + self._transport.request_json( "/sandbox/files/chmod", method="POST", - json_body=SandboxFileChmodParams( - path=path, - mode=mode, - recursive=recursive, - ).model_dump(exclude_none=True), + json_body=normalized.model_dump(exclude_none=True), headers={"content-type": "application/json"}, ) - return SandboxFileMutationResult(**payload) def chown( self, + params: Optional[Union[SandboxFileChownParams, Dict[str, object]]] = None, *, - path: str, + path: Optional[str] = None, uid: Optional[int] = None, gid: Optional[int] = None, recursive: Optional[bool] = None, - ): - payload = self._transport.request_json( + ) -> None: + normalized = ( + params + if isinstance(params, SandboxFileChownParams) + else SandboxFileChownParams( + **( + params + or { + "path": path, + "uid": uid, + "gid": gid, + "recursive": recursive, + } + ) + ) + ) + self._transport.request_json( "/sandbox/files/chown", method="POST", - json_body=SandboxFileChownParams( - path=path, - uid=uid, - gid=gid, - recursive=recursive, - ).model_dump(exclude_none=True), + json_body=normalized.model_dump(exclude_none=True), headers={"content-type": "application/json"}, ) - return SandboxFileMutationResult(**payload) def watch(self, path: str, *, recursive: Optional[bool] = None): payload = self._transport.request_json( @@ -922,6 +1212,23 @@ def watch(self, path: str, *, recursive: Optional[bool] = None): self._transport, self._get_connection_info, SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, + ) + + def watch_dir( + self, + path: str, + on_event: Callable[[SandboxFileSystemEvent], object], + *, + recursive: Optional[bool] = None, + timeout_ms: Optional[int] = None, + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + ) -> SandboxWatchDirHandle: + return SandboxWatchDirHandle( + self.watch(path, recursive=recursive), + on_event, + on_exit=on_exit, + timeout_ms=timeout_ms, ) def get_watch( @@ -935,6 +1242,7 @@ def get_watch( self._transport, self._get_connection_info, SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, ) def upload_url( @@ -975,7 +1283,28 @@ def download_url( ) return SandboxPresignedUrl(**payload) - def _write( + def _read_wire( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + encoding: str, + ) -> SandboxFileReadResult: + payload = self._transport.request_json( + "/sandbox/files/read", + method="POST", + json_body={ + "path": path, + "offset": offset, + "length": length, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileReadResult(**payload) + + def _write_single( self, path: str, data: str, @@ -996,7 +1325,7 @@ def _write( }, headers={"content-type": "application/json"}, ) - return SandboxFileTransferResult(**payload) + return _normalize_write_info(payload["files"][0]) class SandboxTerminalConnection: @@ -1014,18 +1343,15 @@ def events(self): message = message.decode("utf-8") parsed = json.loads(message) if parsed["type"] == "output": - raw = base64.b64decode(parsed["data"]) + normalized = _normalize_terminal_output_chunk(parsed) yield SandboxTerminalOutputEvent( type="output", - seq=parsed["seq"], - data=raw.decode("utf-8", errors="replace"), - raw=raw, - timestamp=parsed["timestamp"], + **normalized, ) elif parsed["type"] == "exit": yield SandboxTerminalExitEvent( type="exit", - status=SandboxTerminalStatus(**parsed["status"]), + status=_normalize_terminal_status(parsed["status"]), ) def write(self, data: Union[str, bytes, bytearray]) -> None: @@ -1053,10 +1379,17 @@ def close(self) -> None: class SandboxTerminalHandle: - def __init__(self, transport: RuntimeTransport, get_connection_info, status): + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): self._transport = transport self._get_connection_info = get_connection_info self._status = status + self._runtime_proxy_override = runtime_proxy_override @property def id(self) -> str: @@ -1077,7 +1410,7 @@ def refresh(self, include_output: bool = False) -> "SandboxTerminalHandle": f"/sandbox/pty/{self.id}", params={"includeOutput": True} if include_output else None, ) - self._status = SandboxTerminalStatus(**payload["pty"]) + self._status = _normalize_terminal_status(payload["pty"]) return self def wait( @@ -1094,7 +1427,7 @@ def wait( ).model_dump(exclude_none=True, by_alias=True), headers={"content-type": "application/json"}, ) - self._status = SandboxTerminalStatus(**payload["pty"]) + self._status = _normalize_terminal_status(payload["pty"]) return self.current def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: @@ -1104,7 +1437,7 @@ def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: json_body={"signal": signal}, headers={"content-type": "application/json"}, ) - self._status = SandboxTerminalStatus(**payload["pty"]) + self._status = _normalize_terminal_status(payload["pty"]) return self.current def kill( @@ -1125,7 +1458,7 @@ def resize(self, rows: int, cols: int) -> SandboxTerminalStatus: json_body={"rows": rows, "cols": cols}, headers={"content-type": "application/json"}, ) - self._status = SandboxTerminalStatus(**payload["pty"]) + self._status = _normalize_terminal_status(payload["pty"]) return self.current def attach(self) -> SandboxTerminalConnection: @@ -1133,6 +1466,7 @@ def attach(self) -> SandboxTerminalConnection: target = to_websocket_transport_target( connection.base_url, f"/sandbox/pty/{self.id}/ws?sessionId={connection.sandbox_id}", + self._runtime_proxy_override, ) headers = build_headers(connection.token, host_header=target.host_header) connect_kwargs = {} @@ -1156,9 +1490,15 @@ def attach(self) -> SandboxTerminalConnection: class SandboxTerminalApi: - def __init__(self, transport: RuntimeTransport, get_connection_info): + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): self._transport = transport self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override def create( self, @@ -1178,7 +1518,8 @@ def create( return SandboxTerminalHandle( self._transport, self._get_connection_info, - SandboxTerminalStatus(**payload["pty"]), + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, ) def get(self, terminal_id: str, include_output: bool = False) -> SandboxTerminalHandle: @@ -1189,7 +1530,8 @@ def get(self, terminal_id: str, include_output: bool = False) -> SandboxTerminal return SandboxTerminalHandle( self._transport, self._get_connection_info, - SandboxTerminalStatus(**payload["pty"]), + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, ) @@ -1201,12 +1543,18 @@ def __init__(self, service: "SandboxManager", detail: SandboxDetail): self._transport = RuntimeTransport( self._resolve_runtime_connection, service.runtime_timeout, + service.runtime_proxy_override, ) self.processes = SandboxProcessesApi(self._transport) - self.files = SandboxFilesApi(self._transport, self._resolve_runtime_socket_info) + self.files = SandboxFilesApi( + self._transport, + self._resolve_runtime_socket_info, + service.runtime_proxy_override, + ) self.terminal = SandboxTerminalApi( self._transport, self._resolve_runtime_socket_info, + service.runtime_proxy_override, ) self.pty = self.terminal @@ -1258,6 +1606,29 @@ def stop(self) -> BasicResponse: self._clear_runtime_session("closed") return response + def create_memory_snapshot( + self, params: Optional[Union[SandboxMemorySnapshotParams, Dict[str, object]]] = None + ) -> SandboxMemorySnapshotResult: + normalized = ( + params + if isinstance(params, SandboxMemorySnapshotParams) + else SandboxMemorySnapshotParams(**(params or {})) + ) + return self._service.create_memory_snapshot(self.id, normalized) + + def expose( + self, params: Union[SandboxExposeParams, Dict[str, object]] + ) -> SandboxExposeResult: + normalized = ( + params + if isinstance(params, SandboxExposeParams) + else SandboxExposeParams(**params) + ) + return self._service.expose(self.id, normalized, runtime=self.runtime) + + def get_exposed_url(self, port: int) -> str: + return _build_sandbox_exposed_url(self.runtime, port) + def create_runtime_session( self, force_refresh: bool = False ) -> SandboxRuntimeSession: @@ -1269,9 +1640,17 @@ def create_runtime_session( ): return _copy_model(self._runtime_session) - session = self._service.get_runtime_session(self.id) - self._apply_runtime_session(session) - return _copy_model(session) + detail = self._service.get_detail(self.id) + self._hydrate(detail) + if self._runtime_session is None: + raise HyperbrowserError( + f"Sandbox {self.id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + return _copy_model(self._runtime_session) def exec(self, input: Union[str, SandboxExecParams, Dict[str, object]]): if isinstance(input, str): @@ -1355,16 +1734,25 @@ class SandboxManager: def __init__(self, client): self._client = client self.runtime_timeout = getattr(client, "timeout", 30) + self.runtime_proxy_override = getattr( + client.config, + "runtime_proxy_override", + None, + ) - def create(self, params: CreateSandboxParams) -> SandboxHandle: - detail = self._create_detail(params) + def create(self, params: Union[CreateSandboxParams, Dict[str, object]]) -> SandboxHandle: + normalized = ( + params + if isinstance(params, CreateSandboxParams) + else CreateSandboxParams(**params) + ) + detail = self._create_detail(normalized) return self.attach(detail) def start_from_snapshot( - self, params: StartSandboxFromSnapshotParams + self, params: Union[StartSandboxFromSnapshotParams, Dict[str, object]] ) -> SandboxHandle: - detail = self._start_from_snapshot_detail(params) - return self.attach(detail) + return self.create(params) def get(self, sandbox_id: str) -> SandboxHandle: return self.attach(self.get_detail(sandbox_id)) @@ -1374,47 +1762,67 @@ def connect(self, sandbox_id: str) -> SandboxHandle: sandbox.connect() return sandbox - def list( - self, params: Optional[SandboxListParams] = None - ) -> SandboxListResponse: - payload = self._request( - "GET", - "/sandboxes", - params=(params or SandboxListParams()).model_dump( - exclude_none=True, by_alias=True - ), - ) - return SandboxListResponse(**payload) - def stop(self, sandbox_id: str) -> BasicResponse: - payload = self._request("POST", f"/sandboxes/{sandbox_id}/stop") + payload = self._request("PUT", f"/sandbox/{sandbox_id}/stop") return BasicResponse(**payload) def get_runtime_session(self, sandbox_id: str) -> SandboxRuntimeSession: - payload = self._request("POST", f"/sandboxes/{sandbox_id}/runtime-session") - return SandboxRuntimeSession(**payload) + detail = self.get_detail(sandbox_id) + session = SandboxHandle._to_runtime_session(detail) + if session is None: + raise HyperbrowserError( + f"Sandbox {sandbox_id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + return session def get_detail(self, sandbox_id: str) -> SandboxDetail: - payload = self._request("GET", f"/sandboxes/{sandbox_id}") + payload = self._request("GET", f"/sandbox/{sandbox_id}") return SandboxDetail(**payload) def attach(self, detail: SandboxDetail) -> SandboxHandle: return SandboxHandle(self, detail) - def _create_detail(self, params: CreateSandboxParams) -> SandboxDetail: + def create_memory_snapshot( + self, + sandbox_id: str, + params: Optional[SandboxMemorySnapshotParams] = None, + ) -> SandboxMemorySnapshotResult: + payload = self._request( + "POST", + f"/sandbox/{sandbox_id}/snapshot", + data=(params or SandboxMemorySnapshotParams()).model_dump( + exclude_none=True, by_alias=True + ), + ) + return SandboxMemorySnapshotResult(**payload) + + def expose( + self, + sandbox_id: str, + params: SandboxExposeParams, + *, + runtime=None, + ) -> SandboxExposeResult: payload = self._request( "POST", - "/sandboxes", + f"/sandbox/{sandbox_id}/expose", data=params.model_dump(exclude_none=True, by_alias=True), ) - return SandboxDetail(**payload) + target_runtime = runtime or self.get_detail(sandbox_id).runtime + return SandboxExposeResult( + port=payload["port"], + auth=payload["auth"], + url=_build_sandbox_exposed_url(target_runtime, payload["port"]), + ) - def _start_from_snapshot_detail( - self, params: StartSandboxFromSnapshotParams - ) -> SandboxDetail: + def _create_detail(self, params: CreateSandboxParams) -> SandboxDetail: payload = self._request( "POST", - "/sandboxes/startFromSnapshot", + "/sandbox", data=params.model_dump(exclude_none=True, by_alias=True), ) return SandboxDetail(**payload) diff --git a/hyperbrowser/client/sync.py b/hyperbrowser/client/sync.py index 1fa82ee4..f08e42d3 100644 --- a/hyperbrowser/client/sync.py +++ b/hyperbrowser/client/sync.py @@ -25,8 +25,15 @@ def __init__( api_key: Optional[str] = None, base_url: Optional[str] = None, timeout: Optional[int] = 30, + runtime_proxy_override: Optional[str] = None, ): - super().__init__(SyncTransport, config, api_key, base_url) + super().__init__( + SyncTransport, + config, + api_key, + base_url, + runtime_proxy_override, + ) self.timeout = timeout or 30 self.transport.client.timeout = timeout self.sessions = SessionManager(self) diff --git a/hyperbrowser/config.py b/hyperbrowser/config.py index c055ab17..e1ec8f20 100644 --- a/hyperbrowser/config.py +++ b/hyperbrowser/config.py @@ -9,6 +9,7 @@ class ClientConfig: api_key: str base_url: str = "https://api.hyperbrowser.ai" + runtime_proxy_override: Optional[str] = None @classmethod def from_env(cls) -> "ClientConfig": diff --git a/hyperbrowser/models/__init__.py b/hyperbrowser/models/__init__.py index 06039127..8135e4fd 100644 --- a/hyperbrowser/models/__init__.py +++ b/hyperbrowser/models/__init__.py @@ -246,6 +246,10 @@ StartSandboxFromSnapshotParams, SandboxListParams, SandboxListResponse, + SandboxMemorySnapshotParams, + SandboxMemorySnapshotResult, + SandboxExposeParams, + SandboxExposeResult, SandboxProcessStatus, SandboxExecParams, SandboxProcessSummary, @@ -257,16 +261,27 @@ SandboxProcessOutputEvent, SandboxProcessExitEvent, SandboxProcessStreamEvent, + SandboxFileType, + SandboxFileReadFormat, + SandboxFileInfo, + SandboxFileWriteInfo, SandboxFileEntry, + SandboxFileListOptions, SandboxFileListParams, SandboxFileListResponse, + SandboxFileReadOptions, SandboxFileReadParams, SandboxFileReadResult, + SandboxFileWriteEntry, + SandboxFileTextWriteOptions, + SandboxFileBytesWriteOptions, SandboxFileWriteTextParams, SandboxFileWriteBytesParams, SandboxFileWriteResult, SandboxFileUploadParams, + SandboxFileRemoveOptions, SandboxFileDeleteParams, + SandboxFileMakeDirOptions, SandboxFileMkdirParams, SandboxFileMoveParams, SandboxFileCopyParams, @@ -283,9 +298,12 @@ SandboxFileWatchEventMessage, SandboxFileWatchDoneEvent, SandboxFileWatchStreamEvent, + SandboxFileSystemEventType, + SandboxFileSystemEvent, SandboxPresignFileParams, SandboxPresignedUrl, SandboxTerminalCreateParams, + SandboxTerminalOutputChunk, SandboxTerminalStatus, SandboxTerminalWaitParams, SandboxTerminalKillParams, @@ -472,6 +490,10 @@ "StartSandboxFromSnapshotParams", "SandboxListParams", "SandboxListResponse", + "SandboxMemorySnapshotParams", + "SandboxMemorySnapshotResult", + "SandboxExposeParams", + "SandboxExposeResult", "SandboxProcessStatus", "SandboxExecParams", "SandboxProcessSummary", @@ -483,16 +505,27 @@ "SandboxProcessOutputEvent", "SandboxProcessExitEvent", "SandboxProcessStreamEvent", + "SandboxFileType", + "SandboxFileReadFormat", + "SandboxFileInfo", + "SandboxFileWriteInfo", "SandboxFileEntry", + "SandboxFileListOptions", "SandboxFileListParams", "SandboxFileListResponse", + "SandboxFileReadOptions", "SandboxFileReadParams", "SandboxFileReadResult", + "SandboxFileWriteEntry", + "SandboxFileTextWriteOptions", + "SandboxFileBytesWriteOptions", "SandboxFileWriteTextParams", "SandboxFileWriteBytesParams", "SandboxFileWriteResult", "SandboxFileUploadParams", + "SandboxFileRemoveOptions", "SandboxFileDeleteParams", + "SandboxFileMakeDirOptions", "SandboxFileMkdirParams", "SandboxFileMoveParams", "SandboxFileCopyParams", @@ -509,9 +542,12 @@ "SandboxFileWatchEventMessage", "SandboxFileWatchDoneEvent", "SandboxFileWatchStreamEvent", + "SandboxFileSystemEventType", + "SandboxFileSystemEvent", "SandboxPresignFileParams", "SandboxPresignedUrl", "SandboxTerminalCreateParams", + "SandboxTerminalOutputChunk", "SandboxTerminalStatus", "SandboxTerminalWaitParams", "SandboxTerminalKillParams", diff --git a/hyperbrowser/models/sandbox.py b/hyperbrowser/models/sandbox.py index a9882e0c..d3735b54 100644 --- a/hyperbrowser/models/sandbox.py +++ b/hyperbrowser/models/sandbox.py @@ -1,10 +1,9 @@ -from datetime import datetime -from typing import Dict, List, Literal, Optional, Union +from datetime import datetime, timezone +from typing import Callable, Dict, Iterable, List, Literal, Optional, Union from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator -from .consts import SessionRegion -from .session import BasicResponse, SessionLaunchState, SessionStatus +from .session import SessionLaunchState, SessionStatus SandboxStatus = SessionStatus SandboxRegion = Literal[ @@ -24,12 +23,17 @@ "killed", "timed_out", ] -SandboxFileWatchRoute = Literal["ws", "stream"] +SandboxFileType = Literal["file", "dir"] SandboxFileEncoding = Literal["utf8", "base64"] +SandboxFileReadFormat = Literal["text", "bytes", "blob", "stream"] +SandboxFileWatchRoute = Literal["ws", "stream"] +SandboxFileSystemEventType = Literal["chmod", "create", "remove", "rename", "write"] -class SandboxBaseModel(BaseModel): - model_config = ConfigDict(populate_by_name=True) +def _parse_optional_datetime(value): + if value in (None, ""): + return None + return value def _parse_optional_int(value): @@ -42,12 +46,22 @@ def _parse_optional_int(value): return value -def _parse_optional_datetime(value): +def _parse_optional_datetime_from_millis(value): if value in (None, ""): return None + if isinstance(value, datetime): + if value.tzinfo is None: + return value.replace(tzinfo=timezone.utc) + return value + if isinstance(value, (int, float)): + return datetime.fromtimestamp(float(value) / 1000.0, tz=timezone.utc) return value +class SandboxBaseModel(BaseModel): + model_config = ConfigDict(populate_by_name=True) + + class SandboxRuntimeTarget(SandboxBaseModel): transport: Literal["regional_proxy"] host: str @@ -117,20 +131,29 @@ def parse_token_expires_at(cls, value): class CreateSandboxParams(SandboxBaseModel): - sandbox_name: str = Field(alias="sandboxName") + sandbox_name: Optional[str] = Field(default=None, alias="sandboxName") + snapshot_name: Optional[str] = Field(default=None, alias="snapshotName") + snapshot_id: Optional[str] = Field(default=None, alias="snapshotId") + image_name: Optional[str] = Field(default=None, alias="imageName") + image_id: Optional[str] = Field(default=None, alias="imageId") region: Optional[SandboxRegion] = None enable_recording: Optional[bool] = Field(default=None, alias="enableRecording") timeout_minutes: Optional[int] = Field(default=None, alias="timeoutMinutes") - snapshot_id: Optional[str] = Field(default=None, alias="snapshotId") - snapshot_name: Optional[str] = Field(default=None, alias="snapshotName") - snapshot_namespace: Optional[str] = Field( - default=None, alias="snapshotNamespace" - ) @model_validator(mode="after") - def validate_snapshot_selector(self): - if bool(self.snapshot_id) == bool(self.snapshot_name): - raise ValueError("Exactly one of snapshot_id or snapshot_name is required") + def validate_launch_source(self): + source_count = sum( + bool(value) + for value in [self.sandbox_name, self.snapshot_name, self.image_name] + ) + if source_count != 1: + raise ValueError( + "Provide exactly one start source: sandbox_name, snapshot_name, or image_name" + ) + if self.snapshot_id and not self.snapshot_name: + raise ValueError("snapshot_id requires snapshot_name") + if self.image_id and not self.image_name: + raise ValueError("image_id requires image_name") return self @@ -152,6 +175,31 @@ class SandboxListResponse(SandboxBaseModel): per_page: int = Field(alias="perPage") +class SandboxMemorySnapshotParams(SandboxBaseModel): + snapshot_name: Optional[str] = Field(default=None, alias="snapshotName") + + +class SandboxMemorySnapshotResult(SandboxBaseModel): + snapshot_name: str = Field(alias="snapshotName") + snapshot_id: str = Field(alias="snapshotId") + namespace: str + status: str + image_name: str = Field(alias="imageName") + image_id: str = Field(alias="imageId") + image_namespace: str = Field(alias="imageNamespace") + + +class SandboxExposeParams(SandboxBaseModel): + port: int + auth: Optional[bool] = None + + +class SandboxExposeResult(SandboxBaseModel): + port: int + auth: bool + url: str + + class SandboxExecParams(SandboxBaseModel): command: str args: Optional[List[str]] = None @@ -189,8 +237,8 @@ class SandboxProcessListParams(SandboxBaseModel): status: Optional[Union[SandboxProcessStatus, List[SandboxProcessStatus]]] = None limit: Optional[int] = None cursor: Optional[Union[str, int]] = None - created_after: Optional[int] = Field(default=None, alias="created_after") - created_before: Optional[int] = Field(default=None, alias="created_before") + created_after: Optional[int] = Field(default=None, alias="createdAfter") + created_before: Optional[int] = Field(default=None, alias="createdBefore") class SandboxProcessListResponse(SandboxBaseModel): @@ -224,29 +272,52 @@ class SandboxProcessExitEvent(SandboxBaseModel): SandboxProcessStreamEvent = Union[SandboxProcessOutputEvent, SandboxProcessExitEvent] -class SandboxFileEntry(SandboxBaseModel): +class SandboxFileInfo(SandboxBaseModel): path: str name: str - type: str + type: SandboxFileType size: int - mode: str - mod_time: int = Field(alias="modTime") + mode: int + permissions: str + owner: str + group: str + modified_time: Optional[datetime] = Field(default=None, alias="modifiedTime") + symlink_target: Optional[str] = Field(default=None, alias="symlinkTarget") + + @field_validator("modified_time", mode="before") + @classmethod + def parse_modified_time(cls, value): + return _parse_optional_datetime_from_millis(value) + + +class SandboxFileWriteInfo(SandboxBaseModel): + path: str + name: str + type: Optional[SandboxFileType] = None + + +SandboxFileEntry = SandboxFileInfo + + +class SandboxFileListOptions(SandboxBaseModel): + depth: Optional[int] = None class SandboxFileListParams(SandboxBaseModel): path: str - recursive: Optional[bool] = None - limit: Optional[int] = None - cursor: Optional[int] = None + depth: Optional[int] = None class SandboxFileListResponse(SandboxBaseModel): path: str - entries: List[SandboxFileEntry] - limit: int - cursor: int - recursive: bool - next_cursor: Optional[int] = Field(default=None, alias="nextCursor") + depth: int + entries: List[SandboxFileInfo] + + +class SandboxFileReadOptions(SandboxBaseModel): + offset: Optional[int] = None + length: Optional[int] = None + format: Optional[SandboxFileReadFormat] = None class SandboxFileReadParams(SandboxBaseModel): @@ -264,6 +335,24 @@ class SandboxFileReadResult(SandboxBaseModel): content_type: Optional[str] = Field(default=None, alias="contentType") +SandboxFileWriteData = Union[str, bytes] + + +class SandboxFileWriteEntry(SandboxBaseModel): + path: str + data: SandboxFileWriteData + + +class SandboxFileTextWriteOptions(SandboxBaseModel): + append: Optional[bool] = None + mode: Optional[str] = None + + +class SandboxFileBytesWriteOptions(SandboxBaseModel): + append: Optional[bool] = None + mode: Optional[str] = None + + class SandboxFileWriteTextParams(SandboxBaseModel): path: str data: str @@ -279,8 +368,7 @@ class SandboxFileWriteBytesParams(SandboxBaseModel): class SandboxFileWriteResult(SandboxBaseModel): - bytes_written: int = Field(alias="bytesWritten") - path: str + files: List[SandboxFileWriteInfo] class SandboxFileUploadParams(SandboxBaseModel): @@ -288,11 +376,25 @@ class SandboxFileUploadParams(SandboxBaseModel): data: Union[bytes, str] +class SandboxFileTransferResult(SandboxBaseModel): + path: str + bytes_written: int = Field(alias="bytesWritten") + + +class SandboxFileRemoveOptions(SandboxBaseModel): + recursive: Optional[bool] = None + + class SandboxFileDeleteParams(SandboxBaseModel): path: str recursive: Optional[bool] = None +class SandboxFileMakeDirOptions(SandboxBaseModel): + parents: Optional[bool] = None + mode: Optional[str] = None + + class SandboxFileMkdirParams(SandboxBaseModel): path: str parents: Optional[bool] = None @@ -327,16 +429,11 @@ class SandboxFileChownParams(SandboxBaseModel): class SandboxFileMutationResult(SandboxBaseModel): path: str - - -class SandboxFileTransferResult(SandboxBaseModel): - path: str - bytes_written: int = Field(alias="bytesWritten") + created: Optional[bool] = None class SandboxFileMoveCopyResult(SandboxBaseModel): - from_path: str = Field(alias="from") - to: str + entry: SandboxFileInfo class SandboxFileWatchParams(SandboxBaseModel): @@ -386,6 +483,14 @@ class SandboxFileWatchDoneEvent(SandboxBaseModel): ] +class SandboxFileSystemEvent(SandboxBaseModel): + type: SandboxFileSystemEventType + name: str + + +SandboxWatchDirExitCallback = Callable[[Optional[BaseException]], object] + + class SandboxPresignFileParams(SandboxBaseModel): path: str expires_in_seconds: Optional[int] = Field(default=None, alias="expiresInSeconds") @@ -411,6 +516,13 @@ class SandboxTerminalCreateParams(SandboxBaseModel): timeout_ms: Optional[int] = Field(default=None, alias="timeoutMs") +class SandboxTerminalOutputChunk(SandboxBaseModel): + seq: int + data: str + raw: bytes + timestamp: int + + class SandboxTerminalStatus(SandboxBaseModel): id: str command: str @@ -425,6 +537,7 @@ class SandboxTerminalStatus(SandboxBaseModel): cols: int started_at: int = Field(alias="startedAt") finished_at: Optional[int] = Field(default=None, alias="finishedAt") + output: Optional[List[SandboxTerminalOutputChunk]] = None class SandboxTerminalWaitParams(SandboxBaseModel): diff --git a/hyperbrowser/sandbox_common.py b/hyperbrowser/sandbox_common.py index f60ca1f3..a600dd71 100644 --- a/hyperbrowser/sandbox_common.py +++ b/hyperbrowser/sandbox_common.py @@ -1,5 +1,4 @@ import json -import os from dataclasses import dataclass from typing import Any, Dict, Optional, Tuple from urllib.parse import urljoin, urlsplit, urlunsplit @@ -10,7 +9,6 @@ RETRYABLE_STATUS_CODES = {429, 502, 503, 504} RUNTIME_SESSION_REFRESH_BUFFER_MS = 60_000 -REGIONAL_PROXY_DEV_HOST = os.environ.get("REGIONAL_PROXY_DEV_HOST", "").strip() @dataclass(frozen=True) @@ -119,18 +117,20 @@ def has_scheme(value: str) -> bool: def resolve_runtime_transport_target( - base_url: str, path: str + base_url: str, + path: str, + runtime_proxy_override: Optional[str] = None, ) -> RuntimeTransportTarget: normalized_base = base_url if base_url.endswith("/") else f"{base_url}/" url = urljoin(normalized_base, path.lstrip("/")) - if not REGIONAL_PROXY_DEV_HOST: + if not runtime_proxy_override: return RuntimeTransportTarget(url=url) override_raw = ( - REGIONAL_PROXY_DEV_HOST - if has_scheme(REGIONAL_PROXY_DEV_HOST) - else f"{urlsplit(url).scheme}://{REGIONAL_PROXY_DEV_HOST}" + runtime_proxy_override + if has_scheme(runtime_proxy_override) + else f"{urlsplit(url).scheme}://{runtime_proxy_override}" ) original = urlsplit(url) override = urlsplit(override_raw) @@ -148,7 +148,9 @@ def resolve_runtime_transport_target( def to_websocket_transport_target( - base_url: str, path: str + base_url: str, + path: str, + runtime_proxy_override: Optional[str] = None, ) -> RuntimeTransportTarget: normalized_base = base_url if base_url.endswith("/") else f"{base_url}/" url = urljoin(normalized_base, path.lstrip("/")) @@ -162,13 +164,13 @@ def to_websocket_transport_target( (scheme, parts.netloc, parts.path, parts.query, parts.fragment) ) - if not REGIONAL_PROXY_DEV_HOST: + if not runtime_proxy_override: return RuntimeTransportTarget(url=websocket_url) override = urlsplit( - REGIONAL_PROXY_DEV_HOST - if has_scheme(REGIONAL_PROXY_DEV_HOST) - else f"{parts.scheme}://{REGIONAL_PROXY_DEV_HOST}" + runtime_proxy_override + if has_scheme(runtime_proxy_override) + else f"{parts.scheme}://{runtime_proxy_override}" ) connect_port = override.port if connect_port is None: diff --git a/tests/helpers/config.py b/tests/helpers/config.py index ef7748f6..ab136345 100644 --- a/tests/helpers/config.py +++ b/tests/helpers/config.py @@ -1,22 +1,27 @@ import os from pathlib import Path -from hyperbrowser import Hyperbrowser +from hyperbrowser import AsyncHyperbrowser, Hyperbrowser TESTS_DIR = Path(__file__).resolve().parent.parent -ENV_PATH = TESTS_DIR / ".env" +ENV_PATHS = ( + TESTS_DIR / ".env", + TESTS_DIR.parent / ".env", +) def _load_env() -> None: - if not ENV_PATH.exists(): - return - - for raw_line in ENV_PATH.read_text().splitlines(): - line = raw_line.strip() - if not line or line.startswith("#") or "=" not in line: + for env_path in ENV_PATHS: + if not env_path.exists(): continue - key, value = line.split("=", 1) - os.environ.setdefault(key.strip(), value.strip()) + + for raw_line in env_path.read_text().splitlines(): + line = raw_line.strip() + if not line or line.startswith("#") or "=" not in line: + continue + key, value = line.split("=", 1) + os.environ.setdefault(key.strip(), value.strip()) + return _load_env() @@ -24,7 +29,7 @@ def _load_env() -> None: API_KEY = os.environ.get("HYPERBROWSER_API_KEY", "") BASE_URL = os.environ.get("HYPERBROWSER_BASE_URL", "http://localhost:8080") REGIONAL_PROXY_DEV_HOST = os.environ.get("REGIONAL_PROXY_DEV_HOST", "") -DEFAULT_SNAPSHOT_NAME = "receiverStarted-ubuntu-24-node" +DEFAULT_IMAGE_NAME = os.environ.get("HYPERBROWSER_DEFAULT_IMAGE_NAME", "node") def create_client() -> Hyperbrowser: @@ -33,7 +38,24 @@ def create_client() -> Hyperbrowser: "Set HYPERBROWSER_API_KEY in tests/.env before running sandbox e2e tests" ) - return Hyperbrowser(api_key=API_KEY, base_url=BASE_URL) + return Hyperbrowser( + api_key=API_KEY, + base_url=BASE_URL, + runtime_proxy_override=REGIONAL_PROXY_DEV_HOST or None, + ) + + +def create_async_client() -> AsyncHyperbrowser: + if not API_KEY: + raise RuntimeError( + "Set HYPERBROWSER_API_KEY in tests/.env before running sandbox e2e tests" + ) + + return AsyncHyperbrowser( + api_key=API_KEY, + base_url=BASE_URL, + runtime_proxy_override=REGIONAL_PROXY_DEV_HOST or None, + ) def make_test_name(prefix: str) -> str: diff --git a/tests/helpers/http.py b/tests/helpers/http.py index 6c202667..3926ba3b 100644 --- a/tests/helpers/http.py +++ b/tests/helpers/http.py @@ -2,7 +2,7 @@ import httpx -from tests.helpers.config import REGIONAL_PROXY_DEV_HOST +from tests.helpers.config import API_KEY, BASE_URL, REGIONAL_PROXY_DEV_HOST def _has_scheme(value: str) -> bool: @@ -43,3 +43,48 @@ def fetch_signed_url( if host_header and "Host" not in request_headers and "host" not in request_headers: request_headers["Host"] = host_header return httpx.request(method, url, headers=request_headers, content=body, timeout=30) + + +def fetch_runtime_url( + input_url: str, + *, + method: str = "GET", + body=None, + headers=None, +) -> httpx.Response: + return fetch_signed_url( + input_url, + method=method, + body=body, + headers=headers, + ) + + +def get_image_by_name(image_name: str): + response = httpx.get( + f"{BASE_URL}/api/images", + headers={"Authorization": f"Bearer {API_KEY}"}, + timeout=30, + ) + response.raise_for_status() + payload = response.json() + images = payload.get("data", {}).get("images") or payload.get("images") or [] + image = next((entry for entry in images if entry.get("imageName") == image_name), None) + if image is None: + raise RuntimeError(f"custom image {image_name!r} not found in /api/images") + return image + + +async def get_image_by_name_async(image_name: str): + async with httpx.AsyncClient(timeout=30) as client: + response = await client.get( + f"{BASE_URL}/api/images", + headers={"Authorization": f"Bearer {API_KEY}"}, + ) + response.raise_for_status() + payload = response.json() + images = payload.get("data", {}).get("images") or payload.get("images") or [] + image = next((entry for entry in images if entry.get("imageName") == image_name), None) + if image is None: + raise RuntimeError(f"custom image {image_name!r} not found in /api/images") + return image diff --git a/tests/helpers/sandbox.py b/tests/helpers/sandbox.py index 0d07b00c..dd525640 100644 --- a/tests/helpers/sandbox.py +++ b/tests/helpers/sandbox.py @@ -3,13 +3,12 @@ from hyperbrowser.exceptions import HyperbrowserError from hyperbrowser.models import CreateSandboxParams -from tests.helpers.config import DEFAULT_SNAPSHOT_NAME, make_test_name +from tests.helpers.config import DEFAULT_IMAGE_NAME def default_sandbox_params(prefix: str) -> CreateSandboxParams: return CreateSandboxParams( - sandbox_name=make_test_name(prefix), - snapshot_name=DEFAULT_SNAPSHOT_NAME, + image_name=DEFAULT_IMAGE_NAME, ) diff --git a/tests/sandbox/e2e/test_async_expose.py b/tests/sandbox/e2e/test_async_expose.py new file mode 100644 index 00000000..d48ecdc3 --- /dev/null +++ b/tests/sandbox/e2e/test_async_expose.py @@ -0,0 +1,134 @@ +import asyncio + +import pytest + +from tests.helpers.config import create_async_client +from tests.helpers.errors import expect_hyperbrowser_error_async +from tests.helpers.http import fetch_runtime_url +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + +HTTP_PORT = 3210 + + +async def _wait_for_http_response(url: str, *, headers=None, predicate, attempts: int = 15): + last_status = 0 + last_body = "" + + for attempt in range(1, attempts + 1): + try: + response = await asyncio.to_thread(fetch_runtime_url, url, headers=headers) + body = response.text + last_status = response.status_code + last_body = body + if predicate(response.status_code, body): + return response.status_code, body + except Exception as error: # pragma: no cover - network edge in e2e + last_body = str(error) + + if attempt < attempts: + await asyncio.sleep(0.2 * attempt) + + raise AssertionError( + f"did not receive expected response for {url}; " + f"last status={last_status}, last body={last_body!r}" + ) + + +@pytest.mark.anyio +async def test_async_sandbox_expose_e2e(): + client = create_async_client() + sandbox = None + server_process = None + + try: + sandbox = await client.sandboxes.create(default_sandbox_params("py-async-expose")) + await wait_for_runtime_ready_async(sandbox) + + server_process = await sandbox.processes.start( + { + "command": "node", + "args": [ + "-e", + " ".join( + [ + "const http = require('http');", + f"const port = {HTTP_PORT};", + "const server = http.createServer((req, res) => {", + " res.writeHead(200, {'content-type': 'text/plain'});", + " res.end(`sdk-exposed:${req.method}:${req.url}`);", + "});", + "server.listen(port, '0.0.0.0', () => {", + " console.log(`listening:${port}`);", + "});", + "process.on('SIGTERM', () => server.close(() => process.exit(0)));", + "process.on('SIGINT', () => server.close(() => process.exit(0)));", + ] + ), + ], + } + ) + + token = sandbox.to_dict()["token"] + assert token + await _wait_for_http_response( + sandbox.get_exposed_url(HTTP_PORT), + headers={"Authorization": f"Bearer {token}"}, + predicate=lambda status, _: status == 403, + ) + + await expect_hyperbrowser_error_async( + "reserved receiver port expose", + lambda: sandbox.expose({"port": 4001}), + status_code=400, + service="control", + retryable=False, + message_includes="cannot be exposed", + ) + + exposure = await sandbox.expose({"port": HTTP_PORT, "auth": False}) + assert exposure.port == HTTP_PORT + assert exposure.auth is False + assert exposure.url == sandbox.get_exposed_url(HTTP_PORT) + + status, body = await _wait_for_http_response( + exposure.url, + predicate=lambda response_status, response_body: ( + response_status == 200 and "sdk-exposed:GET:/" in response_body + ), + ) + assert status == 200 + assert "sdk-exposed:GET:/" in body + + exposure = await sandbox.expose({"port": HTTP_PORT, "auth": True}) + assert exposure.auth is True + + status, _ = await _wait_for_http_response( + exposure.url, + predicate=lambda response_status, _: response_status == 401, + ) + assert status == 401 + + await sandbox.refresh() + token = sandbox.to_dict()["token"] + assert token + status, body = await _wait_for_http_response( + exposure.url, + headers={"Authorization": f"Bearer {token}"}, + predicate=lambda response_status, response_body: ( + response_status == 200 and "sdk-exposed:GET:/" in response_body + ), + ) + assert status == 200 + assert "sdk-exposed:GET:/" in body + finally: + if server_process is not None: + try: + await server_process.kill() + except Exception: + pass + await stop_sandbox_if_running_async(sandbox) + await client.close() diff --git a/tests/sandbox/e2e/test_async_files.py b/tests/sandbox/e2e/test_async_files.py index 1edb5c7c..85c08eca 100644 --- a/tests/sandbox/e2e/test_async_files.py +++ b/tests/sandbox/e2e/test_async_files.py @@ -2,9 +2,7 @@ import pytest -from hyperbrowser import AsyncHyperbrowser - -from tests.helpers.config import make_test_name +from tests.helpers.config import create_async_client, make_test_name from tests.helpers.errors import expect_hyperbrowser_error_async from tests.helpers.http import fetch_signed_url from tests.helpers.sandbox import ( @@ -14,25 +12,49 @@ ) -async def _next_watch_event(watch, *, route="ws", cursor=None): - async for event in watch.events(route=route, cursor=cursor): - if event.type == "event": - return event.event - raise RuntimeError("watch stream ended before an event was received") - - -async def _wait_for_watch_buffer_rollover(watch, *, attempts=20, delay_seconds=0.1): - for _ in range(attempts): - refreshed = await watch.refresh() - if refreshed.current.oldest_seq > 1: - return refreshed - await asyncio.sleep(delay_seconds) - raise RuntimeError("watch buffer did not roll over before timeout") +def _read_stream_text(stream) -> str: + return stream.read().decode("utf-8") + + +async def _await_future(future: asyncio.Future, timeout: float = 10.0): + return await asyncio.wait_for(future, timeout=timeout) + + +async def _create_parent_symlink_escape_fixture(sandbox, base_dir: str, name: str): + allowed_dir = f"{base_dir}/{name}" + outside_dir = f"/var/tmp/{make_test_name(name)}" + outside_file = f"{outside_dir}/secret.txt" + link_dir = f"{allowed_dir}/evil" + escaped_file = f"{link_dir}/secret.txt" + setup = await sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + " && ".join( + [ + f'mkdir -p "{allowed_dir}"', + f'mkdir -p "{outside_dir}"', + f'printf "outside secret" > "{outside_file}"', + f'ln -sfn "{outside_dir}" "{link_dir}"', + ] + ), + ], + } + ) + assert setup.exit_code == 0 + return { + "allowed_dir": allowed_dir, + "outside_dir": outside_dir, + "outside_file": outside_file, + "link_dir": link_dir, + "escaped_file": escaped_file, + } @pytest.mark.anyio async def test_async_sandbox_files_e2e(): - client = AsyncHyperbrowser() + client = create_async_client() sandbox = None base_dir = f"/tmp/{make_test_name('py-async-files')}" @@ -42,70 +64,258 @@ async def test_async_sandbox_files_e2e(): assert await sandbox.files.exists(f"{base_dir}/missing.txt") is False - result = await sandbox.files.mkdir(base_dir, parents=True) - assert result.path == base_dir - - await sandbox.files.write_text(f"{base_dir}/hello.txt", "hello from sdk files") - content = await sandbox.files.read_text(f"{base_dir}/hello.txt") - assert content == "hello from sdk files" - - chunk = await sandbox.files.read_text( - f"{base_dir}/hello.txt", offset=6, length=4 + path = f"{base_dir}/dirs/root" + assert await sandbox.files.make_dir(path) is True + assert await sandbox.files.make_dir(path) is False + + info_path = f"{base_dir}/info/hello.txt" + await sandbox.files.write_text(info_path, "hello from sdk files") + info = await sandbox.files.get_info(info_path) + assert info.name == "hello.txt" + assert info.path == info_path + assert info.type == "file" + assert info.size == len("hello from sdk files") + assert info.mode == 0o644 + assert info.permissions == "-rw-r--r--" + assert info.owner + assert info.group + assert info.modified_time is not None + + list_dir = f"{base_dir}/list" + await sandbox.files.make_dir(f"{list_dir}/nested/inner", parents=True) + await sandbox.files.write_text(f"{list_dir}/root.txt", "root") + await sandbox.files.write_text(f"{list_dir}/nested/child.txt", "child") + await sandbox.files.write_text( + f"{list_dir}/nested/inner/grandchild.txt", "grandchild" ) - assert chunk == "from" - result = await sandbox.files.read( - f"{base_dir}/hello.txt", - offset=0, - length=5, - encoding="utf8", + depth_one = await sandbox.files.list(list_dir, depth=1) + assert [entry.name for entry in depth_one] == ["nested", "root.txt"] + assert [entry.type for entry in depth_one] == ["dir", "file"] + + depth_two = await sandbox.files.list(list_dir, depth=2) + assert [entry.path for entry in depth_two] == [ + f"{list_dir}/nested", + f"{list_dir}/nested/child.txt", + f"{list_dir}/nested/inner", + f"{list_dir}/root.txt", + ] + + symlink_dir = f"{base_dir}/list-symlink" + target = f"{symlink_dir}/target.txt" + link = f"{symlink_dir}/link.txt" + await sandbox.files.make_dir(symlink_dir) + await sandbox.files.write_text(target, "payload") + result = await sandbox.exec( + {"command": "bash", "args": ["-lc", f'ln -sfn "{target}" "{link}"']} + ) + assert result.exit_code == 0 + link_entry = next( + entry + for entry in await sandbox.files.list(symlink_dir, depth=1) + if entry.path == link + ) + assert link_entry.symlink_target == target + + symlink_target = f"{base_dir}/symlink/target.txt" + symlink_link = f"{base_dir}/symlink/link.txt" + await sandbox.files.write_text(symlink_target, "target") + result = await sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'mkdir -p "{base_dir}/symlink" && ln -sfn "{symlink_target}" "{symlink_link}"', + ], + } + ) + assert result.exit_code == 0 + assert (await sandbox.files.get_info(symlink_link)).symlink_target == symlink_target + + broken_target = f"{base_dir}/symlink-broken/missing-target.txt" + broken_link = f"{base_dir}/symlink-broken/link.txt" + result = await sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'mkdir -p "{base_dir}/symlink-broken" && ln -sfn "{broken_target}" "{broken_link}"', + ], + } + ) + assert result.exit_code == 0 + assert await sandbox.files.exists(broken_link) is True + assert (await sandbox.files.get_info(broken_link)).symlink_target == broken_target + + read_path = f"{base_dir}/read/readme.txt" + await sandbox.files.write_text(read_path, "hello from sdk files") + assert await sandbox.files.read(read_path) == "hello from sdk files" + assert await sandbox.files.read(read_path, format="text", offset=6, length=4) == "from" + assert await sandbox.files.read(read_path, format="bytes") == b"hello from sdk files" + assert await sandbox.files.read(read_path, format="blob") == b"hello from sdk files" + assert _read_stream_text(await sandbox.files.read(read_path, format="stream")) == "hello from sdk files" + + single = await sandbox.files.write(f"{base_dir}/write/single.txt", "single file") + assert single.name == "single.txt" + assert single.path == f"{base_dir}/write/single.txt" + assert await sandbox.files.read_text(single.path) == "single file" + + batch = await sandbox.files.write( + [ + {"path": f"{base_dir}/write/batch-a.txt", "data": "batch-a"}, + {"path": f"{base_dir}/write/batch-b.bin", "data": bytes([1, 2, 3, 4])}, + ] + ) + assert [entry.name for entry in batch] == ["batch-a.txt", "batch-b.bin"] + assert await sandbox.files.read_text(f"{base_dir}/write/batch-a.txt") == "batch-a" + assert await sandbox.files.read_bytes(f"{base_dir}/write/batch-b.bin") == bytes( + [1, 2, 3, 4] ) - assert result.content == "hello" - assert result.encoding == "utf8" - assert result.bytes_read == 5 - assert result.truncated is True - - source = bytes([0, 1, 2, 3, 4]) - await sandbox.files.write_bytes(f"{base_dir}/bytes.bin", source) - content = await sandbox.files.read_bytes(f"{base_dir}/bytes.bin") - assert content == source - stat = await sandbox.files.stat(f"{base_dir}/hello.txt") - assert stat.name == "hello.txt" + text_path = f"{base_dir}/write-options/text.txt" + await sandbox.files.write_text(text_path, "hello", mode="0640") + await sandbox.files.write_text(text_path, " world", append=True) + assert await sandbox.files.read_text(text_path) == "hello world" + assert (await sandbox.files.get_info(text_path)).mode == 0o640 - listing = await sandbox.files.list(base_dir) - assert any(entry.name == "hello.txt" for entry in listing.entries) + bytes_path = f"{base_dir}/write-options/bytes.bin" + await sandbox.files.write_bytes(bytes_path, bytes([1, 2]), mode="0600") + await sandbox.files.write_bytes(bytes_path, bytes([3]), append=True) + assert await sandbox.files.read_bytes(bytes_path) == bytes([1, 2, 3]) - uploaded = await sandbox.files.upload(f"{base_dir}/upload.txt", "uploaded from sdk") + transfer_path = f"{base_dir}/transfer/upload.txt" + uploaded = await sandbox.files.upload(transfer_path, "uploaded from sdk") assert uploaded.bytes_written > 0 - - downloaded = await sandbox.files.download(f"{base_dir}/upload.txt") - assert downloaded.decode("utf-8") == "uploaded from sdk" - - moved = await sandbox.files.move( - source=f"{base_dir}/hello.txt", - destination=f"{base_dir}/hello-moved.txt", + assert (await sandbox.files.download(transfer_path)).decode("utf-8") == "uploaded from sdk" + + file_path = f"{base_dir}/rename/hello.txt" + renamed_path = f"{base_dir}/rename/hello-renamed.txt" + await sandbox.files.write_text(file_path, "rename me") + renamed = await sandbox.files.rename(file_path, renamed_path) + assert renamed.path == renamed_path + assert await sandbox.files.exists(file_path) is False + assert await sandbox.files.read_text(renamed_path) == "rename me" + + link_path = f"{base_dir}/rename/hello-link.txt" + copied_link_path = f"{base_dir}/rename/hello-link-copy.txt" + renamed_link_path = f"{base_dir}/rename/hello-link-renamed.txt" + result = await sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'ln -sfn "{renamed_path}" "{link_path}"'], + } ) - assert moved.to == f"{base_dir}/hello-moved.txt" - - copied = await sandbox.files.copy( - source=f"{base_dir}/hello-moved.txt", - destination=f"{base_dir}/hello-copy.txt", + assert result.exit_code == 0 + copied_link = await sandbox.files.copy(source=link_path, destination=copied_link_path) + assert copied_link.path == copied_link_path + assert (await sandbox.files.get_info(copied_link_path)).symlink_target == renamed_path + renamed_link = await sandbox.files.rename(copied_link_path, renamed_link_path) + assert renamed_link.path == renamed_link_path + assert (await sandbox.files.get_info(renamed_link_path)).symlink_target == renamed_path + + target_dir = f"{base_dir}/rename-dir/target-dir" + link_dir = f"{base_dir}/rename-dir/link-dir" + renamed_link_dir = f"{base_dir}/rename-dir/link-dir-renamed" + await sandbox.files.make_dir(target_dir) + await sandbox.files.write_text(f"{target_dir}/child.txt", "child") + result = await sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'ln -sfn "{target_dir}" "{link_dir}"'], + } + ) + assert result.exit_code == 0 + renamed = await sandbox.files.rename(link_dir, renamed_link_dir) + assert renamed.path == renamed_link_dir + assert (await sandbox.files.get_info(renamed_link_dir)).symlink_target == target_dir + assert [entry.path for entry in await sandbox.files.list(renamed_link_dir, depth=1)] == [ + f"{target_dir}/child.txt" + ] + + source_dir = f"{base_dir}/copy-tree/source" + nested_dir = f"{source_dir}/nested" + nested_target = f"{nested_dir}/target.txt" + destination_dir = f"{base_dir}/copy-tree/destination" + await sandbox.files.make_dir(nested_dir) + await sandbox.files.write_text(nested_target, "payload") + result = await sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'cd "{nested_dir}" && ln -sfn "target.txt" "link.txt"'], + } + ) + assert result.exit_code == 0 + await sandbox.files.copy(source=source_dir, destination=destination_dir, recursive=True) + copied_target = f"{destination_dir}/nested/target.txt" + copied_link = f"{destination_dir}/nested/link.txt" + assert await sandbox.files.read_text(copied_target) == "payload" + assert (await sandbox.files.get_info(copied_link)).symlink_target == copied_target + + loop_dir = f"{base_dir}/loop-list" + loop_nested_dir = f"{loop_dir}/nested" + await sandbox.files.make_dir(loop_nested_dir) + await sandbox.files.write_text(f"{loop_nested_dir}/child.txt", "payload") + result = await sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'cd "{loop_nested_dir}" && ln -sfn .. loop'], + } + ) + assert result.exit_code == 0 + loop_entries = await sandbox.files.list(loop_dir, depth=4) + loop_paths = [entry.path for entry in loop_entries] + assert f"{loop_nested_dir}/loop" in loop_paths + assert not any("/loop/" in path for path in loop_paths) + assert (await sandbox.files.get_info(f"{loop_nested_dir}/loop")).symlink_target == loop_dir + + source_dir = f"{base_dir}/loop-copy/source" + nested_dir = f"{source_dir}/nested" + await sandbox.files.make_dir(nested_dir) + await sandbox.files.write_text(f"{nested_dir}/child.txt", "payload") + result = await sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'cd "{nested_dir}" && ln -sfn .. loop'], + } + ) + assert result.exit_code == 0 + destination_dir = f"{base_dir}/loop-copy/destination" + await sandbox.files.copy(source=source_dir, destination=destination_dir, recursive=True) + copied_loop = f"{destination_dir}/nested/loop" + assert (await sandbox.files.get_info(copied_loop)).symlink_target == destination_dir + assert not any( + "/loop/" in entry.path + for entry in await sandbox.files.list(destination_dir, depth=4) ) - assert copied.to == f"{base_dir}/hello-copy.txt" - - await sandbox.files.chmod(path=f"{base_dir}/hello-copy.txt", mode="0640") - stat = await sandbox.files.stat(f"{base_dir}/hello-copy.txt") - assert "640" in stat.mode + source = f"{base_dir}/copy-overwrite/source.txt" + existing_target = f"{base_dir}/copy-overwrite/existing-target.txt" + destination_link = f"{base_dir}/copy-overwrite/destination-link.txt" + await sandbox.files.write_text(source, "source payload") + await sandbox.files.write_text(existing_target, "existing target") + result = await sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'mkdir -p "{base_dir}/copy-overwrite" && ln -sfn "{existing_target}" "{destination_link}"', + ], + } + ) + assert result.exit_code == 0 + await sandbox.files.copy(source=source, destination=destination_link, overwrite=True) + assert await sandbox.files.read_text(destination_link) == "source payload" + assert await sandbox.files.read_text(existing_target) == "existing target" + assert (await sandbox.files.get_info(destination_link)).symlink_target is None + + chmod_path = f"{base_dir}/chmod/file.txt" + await sandbox.files.write_text(chmod_path, "chmod me") + await sandbox.files.chmod(path=chmod_path, mode="0640") + assert (await sandbox.files.get_info(chmod_path)).mode == 0o640 try: await expect_hyperbrowser_error_async( "file chown", - lambda: sandbox.files.chown( - path=f"{base_dir}/hello-copy.txt", - uid=0, - gid=0, - ), + lambda: sandbox.files.chown(path=chmod_path, uid=0, gid=0), status_code=400, service="runtime", retryable=False, @@ -114,124 +324,294 @@ async def test_async_sandbox_files_e2e(): except AssertionError as error: if "expected HyperbrowserError, but call succeeded" not in str(error): raise - stat = await sandbox.files.stat(f"{base_dir}/hello-copy.txt") - assert stat.name == "hello-copy.txt" - - watch = await sandbox.files.watch(base_dir, recursive=False) - try: - await sandbox.files.write_text(f"{base_dir}/watch.txt", "watch me") - event = await _next_watch_event(watch, route="stream") - assert "watch.txt" in event.path - - fetched = await sandbox.files.get_watch(watch.id, True) - assert fetched.id == watch.id - assert fetched.current.path == base_dir - finally: - await watch.stop() + assert (await sandbox.files.get_info(chmod_path)).name == "file.txt" + + remove_path = f"{base_dir}/remove/file.txt" + await sandbox.files.write_text(remove_path, "remove me") + await sandbox.files.remove(remove_path) + assert await sandbox.files.exists(remove_path) is False + await sandbox.files.remove(remove_path) + await sandbox.files.remove(f"{base_dir}/remove", recursive=True) + assert await sandbox.files.exists(f"{base_dir}/remove") is False + + target = f"{base_dir}/remove-link/target.txt" + link = f"{base_dir}/remove-link/link.txt" + await sandbox.files.write_text(target, "keep me") + result = await sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'mkdir -p "{base_dir}/remove-link" && ln -sfn "{target}" "{link}"'], + } + ) + assert result.exit_code == 0 + await sandbox.files.remove(link) + assert await sandbox.files.exists(link) is False + assert await sandbox.files.read_text(target) == "keep me" + + target_dir = f"{base_dir}/remove-recursive/target-dir" + target_file = f"{target_dir}/child.txt" + link_dir = f"{base_dir}/remove-recursive/link-dir" + await sandbox.files.make_dir(target_dir) + await sandbox.files.write_text(target_file, "keep tree") + result = await sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'mkdir -p "{base_dir}/remove-recursive" && ln -sfn "{target_dir}" "{link_dir}"', + ], + } + ) + assert result.exit_code == 0 + await sandbox.files.remove(link_dir, recursive=True) + assert await sandbox.files.exists(link_dir) is False + assert await sandbox.files.read_text(target_file) == "keep tree" + + link = f"{base_dir}/escape/file-link" + result = await sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'mkdir -p "{base_dir}/escape" && ln -sfn /etc/hosts "{link}"'], + } + ) + assert result.exit_code == 0 + text = await sandbox.files.read_text(link) + assert "localhost" in text + assert "localhost" in (await sandbox.files.download(link)).decode("utf-8") - watch = await sandbox.files.watch(base_dir, recursive=False) + fixture = await _create_parent_symlink_escape_fixture( + sandbox, base_dir, "parent-escape-read" + ) + assert await sandbox.files.read_text(fixture["escaped_file"]) == "outside secret" + assert (await sandbox.files.download(fixture["escaped_file"])).decode("utf-8") == "outside secret" + assert [entry.path for entry in await sandbox.files.list(fixture["link_dir"], depth=1)] == [ + f'{fixture["outside_dir"]}/secret.txt' + ] + seen = asyncio.get_running_loop().create_future() + + async def on_parent_event(event): + if event.type == "write" and event.name == "fresh.txt" and not seen.done(): + seen.set_result(event.name) + + handle = await sandbox.files.watch_dir(fixture["link_dir"], on_parent_event) try: - await sandbox.files.write_text(f"{base_dir}/watch-refresh-1.txt", "one") - refreshed = await watch.refresh(True) - assert refreshed.current.last_seq > 0 - assert refreshed.current.oldest_seq > 0 - assert any( - "watch-refresh-1.txt" in event.path - for event in (refreshed.current.events or []) - ) - - await sandbox.files.write_text(f"{base_dir}/watch-refresh-2.txt", "two") - event = await _next_watch_event( - watch, - route="ws", - cursor=refreshed.current.last_seq, + await sandbox.files.write_text( + f'{fixture["outside_dir"]}/fresh.txt', "watch parent link" ) - assert "watch-refresh-2.txt" in event.path - assert watch.current.last_seq >= event.seq + assert await _await_future(seen) == "fresh.txt" finally: - await watch.stop() + await handle.stop() - watch = await sandbox.files.watch(base_dir, recursive=False) - try: - burst = await sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'for i in $(seq 1 1200); do echo x > "{base_dir}/overflow-$i.txt"; rm -f "{base_dir}/overflow-$i.txt"; done', - ], - } - ) - assert burst.exit_code == 0 + fixture = await _create_parent_symlink_escape_fixture( + sandbox, base_dir, "parent-escape-mutate" + ) + info = await sandbox.files.get_info(fixture["escaped_file"]) + assert info.type == "file" + assert info.size == len("outside secret") + copied = await sandbox.files.copy( + source=fixture["escaped_file"], + destination=f"{base_dir}/parent-escape-mutate/copied.txt", + ) + assert copied.path == f"{base_dir}/parent-escape-mutate/copied.txt" + assert await sandbox.files.read_text(copied.path) == "outside secret" + renamed = await sandbox.files.rename( + fixture["escaped_file"], + f"{base_dir}/parent-escape-mutate/renamed.txt", + ) + assert renamed.path == f"{base_dir}/parent-escape-mutate/renamed.txt" + assert await sandbox.files.exists(fixture["outside_file"]) is False + assert await sandbox.files.read_text(renamed.path) == "outside secret" + await sandbox.files.write_text(fixture["escaped_file"], "remove me") + await sandbox.files.remove(fixture["escaped_file"]) + outside_read = await sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'if [ -e "{fixture["outside_file"]}" ]; then cat "{fixture["outside_file"]}"; else printf "__MISSING__"; fi', + ], + } + ) + assert outside_read.exit_code == 0 + assert outside_read.stdout.strip() == "__MISSING__" + + target_dir = f"/var/tmp/{make_test_name('watch-outside-target')}" + target_file = f"{target_dir}/child.txt" + link = f"{base_dir}/escape/dir-link" + result = await sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'mkdir -p "{base_dir}/escape" "{target_dir}" && printf "child" > "{target_file}" && ln -sfn "{target_dir}" "{link}"', + ], + } + ) + assert result.exit_code == 0 + assert [entry.path for entry in await sandbox.files.list(link, depth=1)] == [target_file] + seen = asyncio.get_running_loop().create_future() - rolled = await _wait_for_watch_buffer_rollover(watch) - assert rolled.current.oldest_seq > 1 + async def on_link_event(event): + if event.type == "write" and event.name == "file.txt" and not seen.done(): + seen.set_result(event.name) - await expect_hyperbrowser_error_async( - "watch replay window expired", - lambda: anext(watch.events(route="ws", cursor=0)), - status_code=410, - code="replay_window_expired", - service="runtime", - retryable=False, - message_includes="Replay window expired", + handle = await sandbox.files.watch_dir(link, on_link_event) + try: + await sandbox.files.write_text(f"{target_dir}/file.txt", "watch through link") + assert await _await_future(seen) == "file.txt" + finally: + await handle.stop() + + watch_dir = f"{base_dir}/watch" + await sandbox.files.make_dir(f"{watch_dir}/nested", parents=True) + direct_future = asyncio.get_running_loop().create_future() + recursive_future = asyncio.get_running_loop().create_future() + + async def on_direct(event): + if event.type == "write" and event.name == "direct.txt" and not direct_future.done(): + direct_future.set_result(event.name) + + async def on_recursive(event): + if ( + event.type == "write" + and event.name == "nested/recursive.txt" + and not recursive_future.done() + ): + recursive_future.set_result(event.name) + + direct_handle = await sandbox.files.watch_dir(watch_dir, on_direct) + recursive_handle = await sandbox.files.watch_dir( + watch_dir, + on_recursive, + recursive=True, + ) + try: + await sandbox.files.write_text(f"{watch_dir}/direct.txt", "watch me") + await sandbox.files.write_text( + f"{watch_dir}/nested/recursive.txt", "watch me too" ) + assert await _await_future(direct_future) == "direct.txt" + assert await _await_future(recursive_future) == "nested/recursive.txt" finally: - await watch.stop() + await direct_handle.stop() + await recursive_handle.stop() - upload = await sandbox.files.upload_url( - f"{base_dir}/presign-upload.txt", - one_time=True, + await expect_hyperbrowser_error_async( + "watch missing directory", + lambda: sandbox.files.watch_dir(f"{base_dir}/watch-missing", lambda event: None), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], + ) + + invalid_file_path = f"{base_dir}/watch-invalid/file.txt" + await sandbox.files.write_text(invalid_file_path, "not a directory") + await expect_hyperbrowser_error_async( + "watch file path", + lambda: sandbox.files.watch_dir(invalid_file_path, lambda event: None), + status_code=400, + service="runtime", + retryable=False, + message_includes="not a directory", ) - assert upload.path == f"{base_dir}/presign-upload.txt" - assert upload.url - assert upload.method == "PUT" - upload_response = fetch_signed_url( + path = f"{base_dir}/presign/file.txt" + upload = await sandbox.files.upload_url(path, one_time=True) + assert upload.path == path + assert upload.method == "PUT" + upload_response = await asyncio.to_thread( + fetch_signed_url, upload.url, method=upload.method, body="presigned upload body", ) assert upload_response.status_code == 200 + assert await sandbox.files.read_text(path) == "presigned upload body" - uploaded_body = await sandbox.files.read_text(f"{base_dir}/presign-upload.txt") - assert uploaded_body == "presigned upload body" - - download = await sandbox.files.download_url( - f"{base_dir}/presign-upload.txt", - one_time=True, - ) - assert download.path == f"{base_dir}/presign-upload.txt" + download = await sandbox.files.download_url(path, one_time=True) + assert download.path == path assert download.method == "GET" - - download_response = fetch_signed_url(download.url, method=download.method) + download_response = await asyncio.to_thread( + fetch_signed_url, + download.url, + method=download.method, + ) assert download_response.status_code == 200 assert download_response.text == "presigned upload body" - deleted_file = await sandbox.files.delete(f"{base_dir}/hello-copy.txt") - assert deleted_file.path == f"{base_dir}/hello-copy.txt" - - deleted_dir = await sandbox.files.delete(base_dir, recursive=True) - assert deleted_dir.path == base_dir - assert await sandbox.files.exists(base_dir) is False - + path = f"{base_dir}/presign-race/upload.txt" + upload = await sandbox.files.upload_url(path, one_time=True) + first, second = await asyncio.gather( + asyncio.to_thread( + fetch_signed_url, + upload.url, + method=upload.method, + body="first body", + ), + asyncio.to_thread( + fetch_signed_url, + upload.url, + method=upload.method, + body="second body", + ), + ) + assert sorted([first.status_code, second.status_code]) == [200, 401] + assert await sandbox.files.read_text(path) in {"first body", "second body"} + + path = f"{base_dir}/presign-race/download.txt" + await sandbox.files.write_text(path, "download once") + download = await sandbox.files.download_url(path, one_time=True) + first, second = await asyncio.gather( + asyncio.to_thread(fetch_signed_url, download.url, method=download.method), + asyncio.to_thread(fetch_signed_url, download.url, method=download.method), + ) + assert sorted([first.status_code, second.status_code]) == [200, 401] + assert "download once" in {first.text, second.text} + + source = f"{base_dir}/rename-race/source.txt" + left = f"{base_dir}/rename-race/left.txt" + right = f"{base_dir}/rename-race/right.txt" + await sandbox.files.write_text(source, "race") + results = await asyncio.gather( + sandbox.files.rename(source, left), + sandbox.files.rename(source, right), + return_exceptions=True, + ) + fulfilled = [result for result in results if not isinstance(result, Exception)] + rejected = [result for result in results if isinstance(result, Exception)] + assert len(fulfilled) == 1 + assert len(rejected) == 1 await expect_hyperbrowser_error_async( - "missing file read", - lambda: sandbox.files.read_text(f"{base_dir}/still-missing.txt"), + "rename race failure", + lambda: (_async_raise(rejected[0])), status_code=404, service="runtime", retryable=False, message_includes_any=["not found", "no such file"], ) + winner_path = left if await sandbox.files.exists(left) else right + assert await sandbox.files.read_text(winner_path) == "race" await expect_hyperbrowser_error_async( - "missing file delete", - lambda: sandbox.files.delete(f"{base_dir}/still-missing.txt"), + "missing file read", + lambda: sandbox.files.read_text(f"{base_dir}/still-missing.txt"), status_code=404, service="runtime", retryable=False, message_includes_any=["not found", "no such file"], ) + + try: + await sandbox.files.list(base_dir, depth=0) + except ValueError as error: + assert "depth should be at least one" in str(error) + else: + raise AssertionError("expected invalid depth to fail locally") finally: await stop_sandbox_if_running_async(sandbox) await client.close() + + +async def _async_raise(error): + raise error diff --git a/tests/sandbox/e2e/test_async_lifecycle.py b/tests/sandbox/e2e/test_async_lifecycle.py index 76dd7f1d..5e85a39f 100644 --- a/tests/sandbox/e2e/test_async_lifecycle.py +++ b/tests/sandbox/e2e/test_async_lifecycle.py @@ -1,39 +1,76 @@ +import asyncio from datetime import datetime, timedelta, timezone from uuid import uuid4 import pytest -from hyperbrowser import AsyncHyperbrowser -from hyperbrowser.models import SandboxListParams, SandboxRuntimeSession +from hyperbrowser.exceptions import HyperbrowserError +from hyperbrowser.models import SandboxRuntimeSession +from tests.helpers.config import DEFAULT_IMAGE_NAME, create_async_client from tests.helpers.errors import expect_hyperbrowser_error_async +from tests.helpers.http import get_image_by_name_async from tests.helpers.sandbox import ( default_sandbox_params, stop_sandbox_if_running_async, wait_for_runtime_ready_async, ) +CUSTOM_IMAGE_NAME = "node" +SNAPSHOT_CREATE_RETRY_DELAY_SECONDS = 0.5 +SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS = 60 + + +async def _create_sandbox_with_snapshot_retry(client, params): + deadline = asyncio.get_running_loop().time() + SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS + last_error = None + + while asyncio.get_running_loop().time() < deadline: + try: + return await client.sandboxes.create(params) + except HyperbrowserError as error: + is_snapshot_catalog_race = ( + error.status_code == 404 + and isinstance(str(error), str) + and "snapshot not found" in str(error).lower() + ) + if not is_snapshot_catalog_race: + raise + last_error = error + await asyncio.sleep(SNAPSHOT_CREATE_RETRY_DELAY_SECONDS) + + if isinstance(last_error, Exception): + raise last_error + raise RuntimeError("snapshot create retry failed") + @pytest.mark.anyio async def test_async_sandbox_lifecycle_e2e(): - client = AsyncHyperbrowser() + client = create_async_client() sandbox = None stale_handle = None secondary = None + image_sandbox = None + custom_image_sandbox = None + custom_snapshot_sandbox = None + memory_snapshot = None + custom_image_memory_snapshot = None + custom_image = None try: sandbox = await client.sandboxes.create(default_sandbox_params("py-async-lifecycle")) stale_handle = await client.sandboxes.get(sandbox.id) + custom_image = await get_image_by_name_async(CUSTOM_IMAGE_NAME) await wait_for_runtime_ready_async(sandbox) - assert sandbox.to_dict()["token"] + detail = sandbox.to_dict() + assert detail["token"] assert sandbox.runtime.base_url assert sandbox.token_expires_at is not None - session = await sandbox.create_runtime_session() - assert session.token - assert session.sandbox_id == sandbox.id - assert session.runtime.base_url == sandbox.runtime.base_url + stale_detail = stale_handle.to_dict() + assert stale_detail["token"] + assert stale_handle.runtime.base_url == sandbox.runtime.base_url info = await sandbox.info() assert info.id == sandbox.id @@ -43,27 +80,41 @@ async def test_async_sandbox_lifecycle_e2e(): await sandbox.connect() assert sandbox.status == "active" - original_create_runtime_session = sandbox.create_runtime_session - valid_session = await original_create_runtime_session(force_refresh=True) + memory_snapshot = await sandbox.create_memory_snapshot() + assert memory_snapshot.snapshot_name + assert memory_snapshot.snapshot_id + assert memory_snapshot.namespace + assert memory_snapshot.status + assert memory_snapshot.image_name + assert memory_snapshot.image_id + assert memory_snapshot.image_namespace + + valid_detail = await sandbox.info() invalid_jwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.invalid-signature" refresh_count = 0 + original_get_detail = sandbox._service.get_detail + + sandbox._runtime_session = SandboxRuntimeSession( + sandbox_id=sandbox.id, + status=valid_detail.status, + region=valid_detail.region, + token=invalid_jwt, + token_expires_at=datetime.now(timezone.utc) + timedelta(hours=1), + runtime=valid_detail.runtime, + ) + sandbox._detail = valid_detail.model_copy( + update={ + "token": invalid_jwt, + "token_expires_at": sandbox._runtime_session.token_expires_at, + } + ) - async def patched_create_runtime_session(force_refresh: bool = False): + async def patched_get_detail(sandbox_id: str): nonlocal refresh_count - if force_refresh: - refresh_count += 1 - return await original_create_runtime_session(force_refresh=True) - - return SandboxRuntimeSession( - sandbox_id=valid_session.sandbox_id, - status=valid_session.status, - region=valid_session.region, - token=invalid_jwt, - token_expires_at=datetime.now(timezone.utc) + timedelta(hours=1), - runtime=valid_session.runtime, - ) + refresh_count += 1 + return await original_get_detail(sandbox_id) - sandbox.create_runtime_session = patched_create_runtime_session + sandbox._service.get_detail = patched_get_detail try: result = await sandbox.exec("echo runtime-refresh-ok") assert result.exit_code == 0 @@ -72,17 +123,84 @@ async def patched_create_runtime_session(force_refresh: bool = False): assert sandbox.to_dict()["token"] assert sandbox.to_dict()["token"] != invalid_jwt finally: - sandbox.create_runtime_session = original_create_runtime_session + sandbox._service.get_detail = original_get_detail + + image_sandbox = await client.sandboxes.create({"imageName": DEFAULT_IMAGE_NAME}) + assert image_sandbox.id + assert image_sandbox.status == "active" + response = await image_sandbox.stop() + assert response.success is True + assert image_sandbox.status == "closed" + + custom_image_sandbox = await client.sandboxes.create( + { + "imageName": custom_image["imageName"], + "imageId": custom_image["id"], + } + ) + assert custom_image_sandbox.id + assert custom_image_sandbox.status == "active" + await wait_for_runtime_ready_async(custom_image_sandbox) + + custom_image_memory_snapshot = await custom_image_sandbox.create_memory_snapshot() + assert custom_image_memory_snapshot.image_name == custom_image["imageName"] + assert custom_image_memory_snapshot.image_id == custom_image["id"] + assert custom_image_memory_snapshot.image_namespace == custom_image["namespace"] + + custom_snapshot_sandbox = await _create_sandbox_with_snapshot_retry( + client, + { + "snapshotName": custom_image_memory_snapshot.snapshot_name, + "snapshotId": custom_image_memory_snapshot.snapshot_id, + }, + ) + assert custom_snapshot_sandbox.id + assert custom_snapshot_sandbox.status == "active" + response = await custom_snapshot_sandbox.stop() + assert response.success is True + assert custom_snapshot_sandbox.status == "closed" + + await expect_hyperbrowser_error_async( + "mismatched image selector", + lambda: client.sandboxes.create( + { + "imageName": custom_image["imageName"], + "imageId": str(uuid4()), + } + ), + status_code=404, + service="control", + retryable=False, + message_includes_any=["image not found", "not found"], + ) - listing = await client.sandboxes.list( - SandboxListParams(search=sandbox.id, limit=20) + await expect_hyperbrowser_error_async( + "mismatched snapshot selector", + lambda: client.sandboxes.create( + { + "snapshotName": memory_snapshot.snapshot_name, + "snapshotId": str(uuid4()), + } + ), + status_code=404, + service="control", + retryable=False, + message_includes_any=["snapshot not found", "not found"], ) - assert any(entry.id == sandbox.id for entry in listing.sandboxes) response = await sandbox.stop() assert response.success is True assert sandbox.status == "closed" + await expect_hyperbrowser_error_async( + "stopped sandbox memory snapshot", + lambda: sandbox.create_memory_snapshot(), + status_code=409, + service="control", + retryable=False, + message_includes="Sandbox is not running", + ) + await expect_hyperbrowser_error_async( "stopped sandbox connect", lambda: sandbox.connect(), @@ -107,9 +225,10 @@ async def patched_create_runtime_session(force_refresh: bool = False): "stale sandbox connect", lambda: stale_handle.connect(), status_code=409, - service="control", + code="sandbox_not_running", + service="runtime", retryable=False, - message_includes="Sandbox is not running", + message_includes="not running", ) await expect_hyperbrowser_error_async( @@ -131,8 +250,12 @@ async def patched_create_runtime_session(force_refresh: bool = False): message_includes="not found", ) - secondary = await client.sandboxes.start_from_snapshot( - default_sandbox_params("py-async-secondary") + secondary = await _create_sandbox_with_snapshot_retry( + client, + { + "snapshotName": memory_snapshot.snapshot_name, + "snapshotId": memory_snapshot.snapshot_id, + }, ) response = await secondary.stop() assert response.success is True @@ -141,4 +264,7 @@ async def patched_create_runtime_session(force_refresh: bool = False): await stop_sandbox_if_running_async(sandbox) await stop_sandbox_if_running_async(stale_handle) await stop_sandbox_if_running_async(secondary) + await stop_sandbox_if_running_async(image_sandbox) + await stop_sandbox_if_running_async(custom_image_sandbox) + await stop_sandbox_if_running_async(custom_snapshot_sandbox) await client.close() diff --git a/tests/sandbox/e2e/test_async_process.py b/tests/sandbox/e2e/test_async_process.py index 4df9a3b3..44401d8b 100644 --- a/tests/sandbox/e2e/test_async_process.py +++ b/tests/sandbox/e2e/test_async_process.py @@ -1,7 +1,6 @@ import pytest -from hyperbrowser import AsyncHyperbrowser - +from tests.helpers.config import create_async_client from tests.helpers.errors import expect_hyperbrowser_error_async from tests.helpers.sandbox import ( default_sandbox_params, @@ -21,7 +20,7 @@ async def _collect_process_stream(events): @pytest.mark.anyio async def test_async_sandbox_process_e2e(): - client = AsyncHyperbrowser() + client = create_async_client() sandbox = None try: diff --git a/tests/sandbox/e2e/test_async_sudo.py b/tests/sandbox/e2e/test_async_sudo.py new file mode 100644 index 00000000..fa2e92e2 --- /dev/null +++ b/tests/sandbox/e2e/test_async_sudo.py @@ -0,0 +1,69 @@ +import pytest + +from tests.helpers.config import create_async_client +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + + +@pytest.mark.anyio +async def test_async_sandbox_sudo_e2e(): + client = create_async_client() + sandbox = None + + try: + sandbox = await client.sandboxes.create(default_sandbox_params("py-async-sudo")) + await wait_for_runtime_ready_async(sandbox) + + path = "/tmp/sdk-sudo-check.txt" + + runtime_user = await sandbox.exec( + { + "command": "bash", + "args": ["-lc", "whoami && id -u && id -g"], + } + ) + assert runtime_user.exit_code == 0 + assert "ubuntu" in runtime_user.stdout + assert "1000" in runtime_user.stdout + + direct_chown = await sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + " && ".join( + [ + f'printf "sudo-check" > "{path}"', + f'chown root:root "{path}"', + ] + ), + ], + } + ) + assert direct_chown.exit_code != 0 + assert "operation not permitted" in direct_chown.stderr.lower() + + sudo_result = await sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + " && ".join( + [ + "sudo -n whoami", + f'sudo -n chown root:root "{path}"', + f"stat -c '%U:%G' \"{path}\"", + ] + ), + ], + } + ) + assert sudo_result.exit_code == 0 + assert "root" in sudo_result.stdout + assert "root:root" in sudo_result.stdout + finally: + await stop_sandbox_if_running_async(sandbox) + await client.close() diff --git a/tests/sandbox/e2e/test_async_terminal_smoke.py b/tests/sandbox/e2e/test_async_terminal_smoke.py index cfc7c20d..c986c7f3 100644 --- a/tests/sandbox/e2e/test_async_terminal_smoke.py +++ b/tests/sandbox/e2e/test_async_terminal_smoke.py @@ -1,7 +1,8 @@ -import pytest +import asyncio -from hyperbrowser import AsyncHyperbrowser +import pytest +from tests.helpers.config import create_async_client from tests.helpers.errors import expect_hyperbrowser_error_async from tests.helpers.sandbox import ( default_sandbox_params, @@ -24,9 +25,39 @@ async def _collect_terminal_session(connection): return output, exit_code +def _terminal_status_output(status) -> str: + return "".join(chunk.data for chunk in ((status.output if status else None) or [])) + + +def _terminal_status_raw_output(status) -> str: + return b"".join(chunk.raw for chunk in ((status.output if status else None) or [])).decode( + "utf-8" + ) + + +async def _wait_for_terminal_status_output( + read_status, + marker: str, + timeout_seconds: float = 5.0, +): + deadline = asyncio.get_running_loop().time() + timeout_seconds + last_status = None + + while asyncio.get_running_loop().time() < deadline: + last_status = await read_status() + if marker in _terminal_status_output(last_status): + return last_status + await asyncio.sleep(0.1) + + raise AssertionError( + f"timed out waiting for terminal output {marker!r}; " + f"last output={_terminal_status_output(last_status)!r}" + ) + + @pytest.mark.anyio async def test_async_sandbox_terminal_e2e(): - client = AsyncHyperbrowser() + client = create_async_client() sandbox = None try: @@ -87,6 +118,66 @@ async def test_async_sandbox_terminal_e2e(): status = await terminal.wait(timeout_ms=2000) assert status.running is False + marker = "terminal-get-output" + terminal = await sandbox.terminal.create( + { + "command": "bash", + "args": ["-lc", f"printf '{marker}' && sleep 1"], + "rows": 24, + "cols": 80, + } + ) + without_output = await sandbox.terminal.get(terminal.id) + assert without_output.current.output is None + fetched = await _wait_for_terminal_status_output( + lambda: _get_terminal_status(sandbox, terminal.id, include_output=True), + marker, + ) + assert marker in _terminal_status_output(fetched) + assert marker in _terminal_status_raw_output(fetched) + assert fetched.output + status = await terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + marker = "terminal-refresh-output" + terminal = await sandbox.terminal.create( + { + "command": "bash", + "args": ["-lc", f"printf '{marker}' && sleep 1"], + "rows": 24, + "cols": 80, + } + ) + without_output = await terminal.refresh() + assert without_output.current.output is None + refreshed = await _wait_for_terminal_status_output( + lambda: _refresh_terminal_status(terminal, include_output=True), + marker, + ) + assert marker in _terminal_status_output(refreshed) + assert marker in _terminal_status_raw_output(refreshed) + assert refreshed.output + status = await terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + marker = "terminal-wait-output" + terminal = await sandbox.terminal.create( + { + "command": "bash", + "args": ["-lc", f"printf '{marker}'"], + "rows": 24, + "cols": 80, + } + ) + status = await terminal.wait(timeout_ms=2000, include_output=True) + assert status.running is False + assert status.exit_code == 0 + assert marker in _terminal_status_output(status) + assert marker in _terminal_status_raw_output(status) + assert status.output + timeout_terminal = await sandbox.pty.create( { "command": "bash", @@ -131,3 +222,11 @@ async def test_async_sandbox_terminal_e2e(): finally: await stop_sandbox_if_running_async(sandbox) await client.close() + + +async def _get_terminal_status(sandbox, terminal_id: str, *, include_output: bool = False): + return (await sandbox.terminal.get(terminal_id, include_output=include_output)).current + + +async def _refresh_terminal_status(terminal, *, include_output: bool = False): + return (await terminal.refresh(include_output=include_output)).current diff --git a/tests/sandbox/e2e/test_expose.py b/tests/sandbox/e2e/test_expose.py new file mode 100644 index 00000000..e2956df7 --- /dev/null +++ b/tests/sandbox/e2e/test_expose.py @@ -0,0 +1,130 @@ +import time + +from tests.helpers.config import create_client +from tests.helpers.errors import expect_hyperbrowser_error +from tests.helpers.http import fetch_runtime_url +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() +HTTP_PORT = 3210 + + +def _wait_for_http_response(url: str, *, headers=None, predicate, attempts: int = 15): + last_status = 0 + last_body = "" + + for attempt in range(1, attempts + 1): + try: + response = fetch_runtime_url(url, headers=headers) + body = response.text + last_status = response.status_code + last_body = body + if predicate(response.status_code, body): + return response.status_code, body + except Exception as error: # pragma: no cover - network edge in e2e + last_body = str(error) + + if attempt < attempts: + time.sleep(0.2 * attempt) + + raise AssertionError( + f"did not receive expected response for {url}; " + f"last status={last_status}, last body={last_body!r}" + ) + + +def test_sandbox_expose_e2e(): + sandbox = None + server_process = None + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-expose")) + wait_for_runtime_ready(sandbox) + + server_process = sandbox.processes.start( + { + "command": "node", + "args": [ + "-e", + " ".join( + [ + "const http = require('http');", + f"const port = {HTTP_PORT};", + "const server = http.createServer((req, res) => {", + " res.writeHead(200, {'content-type': 'text/plain'});", + " res.end(`sdk-exposed:${req.method}:${req.url}`);", + "});", + "server.listen(port, '0.0.0.0', () => {", + " console.log(`listening:${port}`);", + "});", + "process.on('SIGTERM', () => server.close(() => process.exit(0)));", + "process.on('SIGINT', () => server.close(() => process.exit(0)));", + ] + ), + ], + } + ) + + token = sandbox.to_dict()["token"] + assert token + _wait_for_http_response( + sandbox.get_exposed_url(HTTP_PORT), + headers={"Authorization": f"Bearer {token}"}, + predicate=lambda status, _: status == 403, + ) + + expect_hyperbrowser_error( + "reserved receiver port expose", + lambda: sandbox.expose({"port": 4001}), + status_code=400, + service="control", + retryable=False, + message_includes="cannot be exposed", + ) + + exposure = sandbox.expose({"port": HTTP_PORT, "auth": False}) + assert exposure.port == HTTP_PORT + assert exposure.auth is False + assert exposure.url == sandbox.get_exposed_url(HTTP_PORT) + + status, body = _wait_for_http_response( + exposure.url, + predicate=lambda response_status, response_body: ( + response_status == 200 and "sdk-exposed:GET:/" in response_body + ), + ) + assert status == 200 + assert "sdk-exposed:GET:/" in body + + exposure = sandbox.expose({"port": HTTP_PORT, "auth": True}) + assert exposure.auth is True + + status, _ = _wait_for_http_response( + exposure.url, + predicate=lambda response_status, _: response_status == 401, + ) + assert status == 401 + + sandbox.refresh() + token = sandbox.to_dict()["token"] + assert token + status, body = _wait_for_http_response( + exposure.url, + headers={"Authorization": f"Bearer {token}"}, + predicate=lambda response_status, response_body: ( + response_status == 200 and "sdk-exposed:GET:/" in response_body + ), + ) + assert status == 200 + assert "sdk-exposed:GET:/" in body + finally: + if server_process is not None: + try: + server_process.kill() + except Exception: + pass + stop_sandbox_if_running(sandbox) diff --git a/tests/sandbox/e2e/test_files.py b/tests/sandbox/e2e/test_files.py index fded4a88..36585f86 100644 --- a/tests/sandbox/e2e/test_files.py +++ b/tests/sandbox/e2e/test_files.py @@ -1,4 +1,5 @@ -import time +from concurrent.futures import ThreadPoolExecutor +from queue import Empty, Queue from tests.helpers.config import create_client, make_test_name from tests.helpers.errors import expect_hyperbrowser_error @@ -12,20 +13,47 @@ client = create_client() -def _next_watch_event(watch, *, route="ws", cursor=None): - for event in watch.events(route=route, cursor=cursor): - if event.type == "event": - return event.event - raise RuntimeError("watch stream ended before an event was received") +def _read_stream_text(stream) -> str: + return stream.read().decode("utf-8") -def _wait_for_watch_buffer_rollover(watch, *, attempts=20, delay_seconds=0.1): - for _ in range(attempts): - refreshed = watch.refresh() - if refreshed.current.oldest_seq > 1: - return refreshed - time.sleep(delay_seconds) - raise RuntimeError("watch buffer did not roll over before timeout") +def _await_queue_value(queue: Queue, timeout: float = 10.0): + try: + return queue.get(timeout=timeout) + except Empty as error: + raise AssertionError("timed out waiting for watch event") from error + + +def _create_parent_symlink_escape_fixture(sandbox, base_dir: str, name: str): + allowed_dir = f"{base_dir}/{name}" + outside_dir = f"/var/tmp/{make_test_name(name)}" + outside_file = f"{outside_dir}/secret.txt" + link_dir = f"{allowed_dir}/evil" + escaped_file = f"{link_dir}/secret.txt" + setup = sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + " && ".join( + [ + f'mkdir -p "{allowed_dir}"', + f'mkdir -p "{outside_dir}"', + f'printf "outside secret" > "{outside_file}"', + f'ln -sfn "{outside_dir}" "{link_dir}"', + ] + ), + ], + } + ) + assert setup.exit_code == 0 + return { + "allowed_dir": allowed_dir, + "outside_dir": outside_dir, + "outside_file": outside_file, + "link_dir": link_dir, + "escaped_file": escaped_file, + } def test_sandbox_files_e2e(): @@ -38,68 +66,250 @@ def test_sandbox_files_e2e(): assert sandbox.files.exists(f"{base_dir}/missing.txt") is False - result = sandbox.files.mkdir(base_dir, parents=True) - assert result.path == base_dir - - sandbox.files.write_text(f"{base_dir}/hello.txt", "hello from sdk files") - content = sandbox.files.read_text(f"{base_dir}/hello.txt") - assert content == "hello from sdk files" - - chunk = sandbox.files.read_text(f"{base_dir}/hello.txt", offset=6, length=4) - assert chunk == "from" - - result = sandbox.files.read( - f"{base_dir}/hello.txt", - offset=0, - length=5, - encoding="utf8", + path = f"{base_dir}/dirs/root" + assert sandbox.files.make_dir(path) is True + assert sandbox.files.make_dir(path) is False + + info_path = f"{base_dir}/info/hello.txt" + sandbox.files.write_text(info_path, "hello from sdk files") + info = sandbox.files.get_info(info_path) + assert info.name == "hello.txt" + assert info.path == info_path + assert info.type == "file" + assert info.size == len("hello from sdk files") + assert info.mode == 0o644 + assert info.permissions == "-rw-r--r--" + assert info.owner + assert info.group + assert info.modified_time is not None + + list_dir = f"{base_dir}/list" + sandbox.files.make_dir(f"{list_dir}/nested/inner", parents=True) + sandbox.files.write_text(f"{list_dir}/root.txt", "root") + sandbox.files.write_text(f"{list_dir}/nested/child.txt", "child") + sandbox.files.write_text(f"{list_dir}/nested/inner/grandchild.txt", "grandchild") + + depth_one = sandbox.files.list(list_dir, depth=1) + assert [entry.name for entry in depth_one] == ["nested", "root.txt"] + assert [entry.type for entry in depth_one] == ["dir", "file"] + + depth_two = sandbox.files.list(list_dir, depth=2) + assert [entry.path for entry in depth_two] == [ + f"{list_dir}/nested", + f"{list_dir}/nested/child.txt", + f"{list_dir}/nested/inner", + f"{list_dir}/root.txt", + ] + + symlink_dir = f"{base_dir}/list-symlink" + target = f"{symlink_dir}/target.txt" + link = f"{symlink_dir}/link.txt" + sandbox.files.make_dir(symlink_dir) + sandbox.files.write_text(target, "payload") + result = sandbox.exec( + {"command": "bash", "args": ["-lc", f'ln -sfn "{target}" "{link}"']} ) - assert result.content == "hello" - assert result.encoding == "utf8" - assert result.bytes_read == 5 - assert result.truncated is True - - source = bytes([0, 1, 2, 3, 4]) - sandbox.files.write_bytes(f"{base_dir}/bytes.bin", source) - content = sandbox.files.read_bytes(f"{base_dir}/bytes.bin") - assert content == source - - stat = sandbox.files.stat(f"{base_dir}/hello.txt") - assert stat.name == "hello.txt" - - listing = sandbox.files.list(base_dir) - assert any(entry.name == "hello.txt" for entry in listing.entries) - - uploaded = sandbox.files.upload(f"{base_dir}/upload.txt", "uploaded from sdk") + assert result.exit_code == 0 + link_entry = next( + entry for entry in sandbox.files.list(symlink_dir, depth=1) if entry.path == link + ) + assert link_entry.symlink_target == target + + symlink_target = f"{base_dir}/symlink/target.txt" + symlink_link = f"{base_dir}/symlink/link.txt" + sandbox.files.write_text(symlink_target, "target") + result = sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'mkdir -p "{base_dir}/symlink" && ln -sfn "{symlink_target}" "{symlink_link}"', + ], + } + ) + assert result.exit_code == 0 + assert sandbox.files.get_info(symlink_link).symlink_target == symlink_target + + broken_target = f"{base_dir}/symlink-broken/missing-target.txt" + broken_link = f"{base_dir}/symlink-broken/link.txt" + result = sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'mkdir -p "{base_dir}/symlink-broken" && ln -sfn "{broken_target}" "{broken_link}"', + ], + } + ) + assert result.exit_code == 0 + assert sandbox.files.exists(broken_link) is True + assert sandbox.files.get_info(broken_link).symlink_target == broken_target + + read_path = f"{base_dir}/read/readme.txt" + sandbox.files.write_text(read_path, "hello from sdk files") + assert sandbox.files.read(read_path) == "hello from sdk files" + assert sandbox.files.read(read_path, format="text", offset=6, length=4) == "from" + assert sandbox.files.read(read_path, format="bytes") == b"hello from sdk files" + assert sandbox.files.read(read_path, format="blob") == b"hello from sdk files" + assert _read_stream_text(sandbox.files.read(read_path, format="stream")) == "hello from sdk files" + + single = sandbox.files.write(f"{base_dir}/write/single.txt", "single file") + assert single.name == "single.txt" + assert single.path == f"{base_dir}/write/single.txt" + assert sandbox.files.read_text(single.path) == "single file" + + batch = sandbox.files.write( + [ + {"path": f"{base_dir}/write/batch-a.txt", "data": "batch-a"}, + {"path": f"{base_dir}/write/batch-b.bin", "data": bytes([1, 2, 3, 4])}, + ] + ) + assert [entry.name for entry in batch] == ["batch-a.txt", "batch-b.bin"] + assert sandbox.files.read_text(f"{base_dir}/write/batch-a.txt") == "batch-a" + assert sandbox.files.read_bytes(f"{base_dir}/write/batch-b.bin") == bytes([1, 2, 3, 4]) + + text_path = f"{base_dir}/write-options/text.txt" + sandbox.files.write_text(text_path, "hello", mode="0640") + sandbox.files.write_text(text_path, " world", append=True) + assert sandbox.files.read_text(text_path) == "hello world" + assert sandbox.files.get_info(text_path).mode == 0o640 + + bytes_path = f"{base_dir}/write-options/bytes.bin" + sandbox.files.write_bytes(bytes_path, bytes([1, 2]), mode="0600") + sandbox.files.write_bytes(bytes_path, bytes([3]), append=True) + assert sandbox.files.read_bytes(bytes_path) == bytes([1, 2, 3]) + + transfer_path = f"{base_dir}/transfer/upload.txt" + uploaded = sandbox.files.upload(transfer_path, "uploaded from sdk") assert uploaded.bytes_written > 0 - - downloaded = sandbox.files.download(f"{base_dir}/upload.txt") - assert downloaded.decode("utf-8") == "uploaded from sdk" - - moved = sandbox.files.move( - source=f"{base_dir}/hello.txt", - destination=f"{base_dir}/hello-moved.txt", + assert sandbox.files.download(transfer_path).decode("utf-8") == "uploaded from sdk" + + file_path = f"{base_dir}/rename/hello.txt" + renamed_path = f"{base_dir}/rename/hello-renamed.txt" + sandbox.files.write_text(file_path, "rename me") + renamed = sandbox.files.rename(file_path, renamed_path) + assert renamed.path == renamed_path + assert sandbox.files.exists(file_path) is False + assert sandbox.files.read_text(renamed_path) == "rename me" + + link_path = f"{base_dir}/rename/hello-link.txt" + copied_link_path = f"{base_dir}/rename/hello-link-copy.txt" + renamed_link_path = f"{base_dir}/rename/hello-link-renamed.txt" + result = sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'ln -sfn "{renamed_path}" "{link_path}"'], + } ) - assert moved.to == f"{base_dir}/hello-moved.txt" - - copied = sandbox.files.copy( - source=f"{base_dir}/hello-moved.txt", - destination=f"{base_dir}/hello-copy.txt", + assert result.exit_code == 0 + copied_link = sandbox.files.copy(source=link_path, destination=copied_link_path) + assert copied_link.path == copied_link_path + assert sandbox.files.get_info(copied_link_path).symlink_target == renamed_path + renamed_link = sandbox.files.rename(copied_link_path, renamed_link_path) + assert renamed_link.path == renamed_link_path + assert sandbox.files.get_info(renamed_link_path).symlink_target == renamed_path + + target_dir = f"{base_dir}/rename-dir/target-dir" + link_dir = f"{base_dir}/rename-dir/link-dir" + renamed_link_dir = f"{base_dir}/rename-dir/link-dir-renamed" + sandbox.files.make_dir(target_dir) + sandbox.files.write_text(f"{target_dir}/child.txt", "child") + result = sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'ln -sfn "{target_dir}" "{link_dir}"'], + } ) - assert copied.to == f"{base_dir}/hello-copy.txt" - - sandbox.files.chmod(path=f"{base_dir}/hello-copy.txt", mode="0640") - stat = sandbox.files.stat(f"{base_dir}/hello-copy.txt") - assert "640" in stat.mode - + assert result.exit_code == 0 + renamed = sandbox.files.rename(link_dir, renamed_link_dir) + assert renamed.path == renamed_link_dir + assert sandbox.files.get_info(renamed_link_dir).symlink_target == target_dir + assert [entry.path for entry in sandbox.files.list(renamed_link_dir, depth=1)] == [ + f"{target_dir}/child.txt" + ] + + source_dir = f"{base_dir}/copy-tree/source" + nested_dir = f"{source_dir}/nested" + nested_target = f"{nested_dir}/target.txt" + nested_link = f"{nested_dir}/link.txt" + destination_dir = f"{base_dir}/copy-tree/destination" + sandbox.files.make_dir(nested_dir) + sandbox.files.write_text(nested_target, "payload") + result = sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'cd "{nested_dir}" && ln -sfn "target.txt" "link.txt"'], + } + ) + assert result.exit_code == 0 + sandbox.files.copy(source=source_dir, destination=destination_dir, recursive=True) + copied_target = f"{destination_dir}/nested/target.txt" + copied_link = f"{destination_dir}/nested/link.txt" + assert sandbox.files.read_text(copied_target) == "payload" + assert sandbox.files.get_info(copied_link).symlink_target == copied_target + + loop_dir = f"{base_dir}/loop-list" + loop_nested_dir = f"{loop_dir}/nested" + sandbox.files.make_dir(loop_nested_dir) + sandbox.files.write_text(f"{loop_nested_dir}/child.txt", "payload") + result = sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'cd "{loop_nested_dir}" && ln -sfn .. loop'], + } + ) + assert result.exit_code == 0 + loop_entries = sandbox.files.list(loop_dir, depth=4) + loop_paths = [entry.path for entry in loop_entries] + assert f"{loop_nested_dir}/loop" in loop_paths + assert not any("/loop/" in path for path in loop_paths) + assert sandbox.files.get_info(f"{loop_nested_dir}/loop").symlink_target == loop_dir + + source_dir = f"{base_dir}/loop-copy/source" + nested_dir = f"{source_dir}/nested" + sandbox.files.make_dir(nested_dir) + sandbox.files.write_text(f"{nested_dir}/child.txt", "payload") + result = sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'cd "{nested_dir}" && ln -sfn .. loop'], + } + ) + assert result.exit_code == 0 + destination_dir = f"{base_dir}/loop-copy/destination" + sandbox.files.copy(source=source_dir, destination=destination_dir, recursive=True) + copied_loop = f"{destination_dir}/nested/loop" + assert sandbox.files.get_info(copied_loop).symlink_target == destination_dir + assert not any("/loop/" in entry.path for entry in sandbox.files.list(destination_dir, depth=4)) + + source = f"{base_dir}/copy-overwrite/source.txt" + existing_target = f"{base_dir}/copy-overwrite/existing-target.txt" + destination_link = f"{base_dir}/copy-overwrite/destination-link.txt" + sandbox.files.write_text(source, "source payload") + sandbox.files.write_text(existing_target, "existing target") + result = sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'mkdir -p "{base_dir}/copy-overwrite" && ln -sfn "{existing_target}" "{destination_link}"', + ], + } + ) + assert result.exit_code == 0 + sandbox.files.copy(source=source, destination=destination_link, overwrite=True) + assert sandbox.files.read_text(destination_link) == "source payload" + assert sandbox.files.read_text(existing_target) == "existing target" + assert sandbox.files.get_info(destination_link).symlink_target is None + + chmod_path = f"{base_dir}/chmod/file.txt" + sandbox.files.write_text(chmod_path, "chmod me") + sandbox.files.chmod(path=chmod_path, mode="0640") + assert sandbox.files.get_info(chmod_path).mode == 0o640 try: expect_hyperbrowser_error( "file chown", - lambda: sandbox.files.chown( - path=f"{base_dir}/hello-copy.txt", - uid=0, - gid=0, - ), + lambda: sandbox.files.chown(path=chmod_path, uid=0, gid=0), status_code=400, service="runtime", retryable=False, @@ -108,123 +318,278 @@ def test_sandbox_files_e2e(): except AssertionError as error: if "expected HyperbrowserError, but call succeeded" not in str(error): raise - stat = sandbox.files.stat(f"{base_dir}/hello-copy.txt") - assert stat.name == "hello-copy.txt" - - watch = sandbox.files.watch(base_dir, recursive=False) + assert sandbox.files.get_info(chmod_path).name == "file.txt" + + remove_path = f"{base_dir}/remove/file.txt" + sandbox.files.write_text(remove_path, "remove me") + sandbox.files.remove(remove_path) + assert sandbox.files.exists(remove_path) is False + sandbox.files.remove(remove_path) + sandbox.files.remove(f"{base_dir}/remove", recursive=True) + assert sandbox.files.exists(f"{base_dir}/remove") is False + + target = f"{base_dir}/remove-link/target.txt" + link = f"{base_dir}/remove-link/link.txt" + sandbox.files.write_text(target, "keep me") + result = sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'mkdir -p "{base_dir}/remove-link" && ln -sfn "{target}" "{link}"'], + } + ) + assert result.exit_code == 0 + sandbox.files.remove(link) + assert sandbox.files.exists(link) is False + assert sandbox.files.read_text(target) == "keep me" + + target_dir = f"{base_dir}/remove-recursive/target-dir" + target_file = f"{target_dir}/child.txt" + link_dir = f"{base_dir}/remove-recursive/link-dir" + sandbox.files.make_dir(target_dir) + sandbox.files.write_text(target_file, "keep tree") + result = sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'mkdir -p "{base_dir}/remove-recursive" && ln -sfn "{target_dir}" "{link_dir}"', + ], + } + ) + assert result.exit_code == 0 + sandbox.files.remove(link_dir, recursive=True) + assert sandbox.files.exists(link_dir) is False + assert sandbox.files.read_text(target_file) == "keep tree" + + link = f"{base_dir}/escape/file-link" + result = sandbox.exec( + { + "command": "bash", + "args": ["-lc", f'mkdir -p "{base_dir}/escape" && ln -sfn /etc/hosts "{link}"'], + } + ) + assert result.exit_code == 0 + text = sandbox.files.read_text(link) + assert "localhost" in text + assert "localhost" in sandbox.files.download(link).decode("utf-8") + + fixture = _create_parent_symlink_escape_fixture(sandbox, base_dir, "parent-escape-read") + assert sandbox.files.read_text(fixture["escaped_file"]) == "outside secret" + assert sandbox.files.download(fixture["escaped_file"]).decode("utf-8") == "outside secret" + assert [entry.path for entry in sandbox.files.list(fixture["link_dir"], depth=1)] == [ + f'{fixture["outside_dir"]}/secret.txt' + ] + seen = Queue(maxsize=1) + handle = sandbox.files.watch_dir( + fixture["link_dir"], + lambda event: seen.put_nowait(event.name) + if event.type == "write" and event.name == "fresh.txt" + else None, + ) try: - sandbox.files.write_text(f"{base_dir}/watch.txt", "watch me") - event = _next_watch_event(watch, route="stream") - assert "watch.txt" in event.path - - fetched = sandbox.files.get_watch(watch.id, True) - assert fetched.id == watch.id - assert fetched.current.path == base_dir + sandbox.files.write_text(f'{fixture["outside_dir"]}/fresh.txt', "watch parent link") + assert _await_queue_value(seen) == "fresh.txt" finally: - watch.stop() + handle.stop() - watch = sandbox.files.watch(base_dir, recursive=False) + fixture = _create_parent_symlink_escape_fixture(sandbox, base_dir, "parent-escape-mutate") + info = sandbox.files.get_info(fixture["escaped_file"]) + assert info.type == "file" + assert info.size == len("outside secret") + copied = sandbox.files.copy( + source=fixture["escaped_file"], + destination=f"{base_dir}/parent-escape-mutate/copied.txt", + ) + assert copied.path == f"{base_dir}/parent-escape-mutate/copied.txt" + assert sandbox.files.read_text(copied.path) == "outside secret" + renamed = sandbox.files.rename( + fixture["escaped_file"], + f"{base_dir}/parent-escape-mutate/renamed.txt", + ) + assert renamed.path == f"{base_dir}/parent-escape-mutate/renamed.txt" + assert sandbox.files.exists(fixture["outside_file"]) is False + assert sandbox.files.read_text(renamed.path) == "outside secret" + sandbox.files.write_text(fixture["escaped_file"], "remove me") + sandbox.files.remove(fixture["escaped_file"]) + outside_read = sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'if [ -e "{fixture["outside_file"]}" ]; then cat "{fixture["outside_file"]}"; else printf "__MISSING__"; fi', + ], + } + ) + assert outside_read.exit_code == 0 + assert outside_read.stdout.strip() == "__MISSING__" + + target_dir = f"/var/tmp/{make_test_name('watch-outside-target')}" + target_file = f"{target_dir}/child.txt" + link = f"{base_dir}/escape/dir-link" + result = sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + f'mkdir -p "{base_dir}/escape" "{target_dir}" && printf "child" > "{target_file}" && ln -sfn "{target_dir}" "{link}"', + ], + } + ) + assert result.exit_code == 0 + assert [entry.path for entry in sandbox.files.list(link, depth=1)] == [target_file] + seen = Queue(maxsize=1) + handle = sandbox.files.watch_dir( + link, + lambda event: seen.put_nowait(event.name) + if event.type == "write" and event.name == "file.txt" + else None, + ) try: - sandbox.files.write_text(f"{base_dir}/watch-refresh-1.txt", "one") - refreshed = watch.refresh(True) - assert refreshed.current.last_seq > 0 - assert refreshed.current.oldest_seq > 0 - assert any( - "watch-refresh-1.txt" in event.path - for event in (refreshed.current.events or []) - ) - - sandbox.files.write_text(f"{base_dir}/watch-refresh-2.txt", "two") - event = _next_watch_event( - watch, - route="ws", - cursor=refreshed.current.last_seq, - ) - assert "watch-refresh-2.txt" in event.path - assert watch.current.last_seq >= event.seq + sandbox.files.write_text(f"{target_dir}/file.txt", "watch through link") + assert _await_queue_value(seen) == "file.txt" finally: - watch.stop() - - watch = sandbox.files.watch(base_dir, recursive=False) + handle.stop() + + watch_dir = f"{base_dir}/watch" + sandbox.files.make_dir(f"{watch_dir}/nested", parents=True) + direct_event = Queue(maxsize=1) + recursive_event = Queue(maxsize=1) + direct_handle = sandbox.files.watch_dir( + watch_dir, + lambda event: direct_event.put_nowait(event.name) + if event.type == "write" and event.name == "direct.txt" + else None, + ) + recursive_handle = sandbox.files.watch_dir( + watch_dir, + lambda event: recursive_event.put_nowait(event.name) + if event.type == "write" and event.name == "nested/recursive.txt" + else None, + recursive=True, + ) try: - burst = sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'for i in $(seq 1 1200); do echo x > "{base_dir}/overflow-$i.txt"; rm -f "{base_dir}/overflow-$i.txt"; done', - ], - } - ) - assert burst.exit_code == 0 - - rolled = _wait_for_watch_buffer_rollover(watch) - assert rolled.current.oldest_seq > 1 - - expect_hyperbrowser_error( - "watch replay window expired", - lambda: next(watch.events(route="ws", cursor=0)), - status_code=410, - code="replay_window_expired", - service="runtime", - retryable=False, - message_includes="Replay window expired", - ) + sandbox.files.write_text(f"{watch_dir}/direct.txt", "watch me") + sandbox.files.write_text(f"{watch_dir}/nested/recursive.txt", "watch me too") + assert _await_queue_value(direct_event) == "direct.txt" + assert _await_queue_value(recursive_event) == "nested/recursive.txt" finally: - watch.stop() + direct_handle.stop() + recursive_handle.stop() - upload = sandbox.files.upload_url( - f"{base_dir}/presign-upload.txt", - one_time=True, + expect_hyperbrowser_error( + "watch missing directory", + lambda: sandbox.files.watch_dir(f"{base_dir}/watch-missing", lambda event: None), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], ) - assert upload.path == f"{base_dir}/presign-upload.txt" - assert upload.url - assert upload.method == "PUT" + invalid_file_path = f"{base_dir}/watch-invalid/file.txt" + sandbox.files.write_text(invalid_file_path, "not a directory") + expect_hyperbrowser_error( + "watch file path", + lambda: sandbox.files.watch_dir(invalid_file_path, lambda event: None), + status_code=400, + service="runtime", + retryable=False, + message_includes="not a directory", + ) + + path = f"{base_dir}/presign/file.txt" + upload = sandbox.files.upload_url(path, one_time=True) + assert upload.path == path + assert upload.method == "PUT" upload_response = fetch_signed_url( upload.url, method=upload.method, body="presigned upload body", ) assert upload_response.status_code == 200 + assert sandbox.files.read_text(path) == "presigned upload body" - uploaded_body = sandbox.files.read_text(f"{base_dir}/presign-upload.txt") - assert uploaded_body == "presigned upload body" - - download = sandbox.files.download_url( - f"{base_dir}/presign-upload.txt", - one_time=True, - ) - assert download.path == f"{base_dir}/presign-upload.txt" + download = sandbox.files.download_url(path, one_time=True) + assert download.path == path assert download.method == "GET" - download_response = fetch_signed_url(download.url, method=download.method) assert download_response.status_code == 200 assert download_response.text == "presigned upload body" - deleted_file = sandbox.files.delete(f"{base_dir}/hello-copy.txt") - assert deleted_file.path == f"{base_dir}/hello-copy.txt" - - deleted_dir = sandbox.files.delete(base_dir, recursive=True) - assert deleted_dir.path == base_dir - assert sandbox.files.exists(base_dir) is False - + path = f"{base_dir}/presign-race/upload.txt" + upload = sandbox.files.upload_url(path, one_time=True) + with ThreadPoolExecutor(max_workers=2) as executor: + first_future = executor.submit( + fetch_signed_url, + upload.url, + method=upload.method, + body="first body", + ) + second_future = executor.submit( + fetch_signed_url, + upload.url, + method=upload.method, + body="second body", + ) + first = first_future.result() + second = second_future.result() + assert sorted([first.status_code, second.status_code]) == [200, 401] + assert sandbox.files.read_text(path) in {"first body", "second body"} + + path = f"{base_dir}/presign-race/download.txt" + sandbox.files.write_text(path, "download once") + download = sandbox.files.download_url(path, one_time=True) + with ThreadPoolExecutor(max_workers=2) as executor: + first_future = executor.submit(fetch_signed_url, download.url, method=download.method) + second_future = executor.submit(fetch_signed_url, download.url, method=download.method) + first = first_future.result() + second = second_future.result() + assert sorted([first.status_code, second.status_code]) == [200, 401] + assert "download once" in {first.text, second.text} + + source = f"{base_dir}/rename-race/source.txt" + left = f"{base_dir}/rename-race/left.txt" + right = f"{base_dir}/rename-race/right.txt" + sandbox.files.write_text(source, "race") + with ThreadPoolExecutor(max_workers=2) as executor: + futures = [ + executor.submit(sandbox.files.rename, source, left), + executor.submit(sandbox.files.rename, source, right), + ] + results = [] + for future in futures: + try: + results.append(("fulfilled", future.result())) + except Exception as error: # pragma: no cover - exercised in e2e + results.append(("rejected", error)) + fulfilled = [result for result in results if result[0] == "fulfilled"] + rejected = [result for result in results if result[0] == "rejected"] + assert len(fulfilled) == 1 + assert len(rejected) == 1 expect_hyperbrowser_error( - "missing file read", - lambda: sandbox.files.read_text(f"{base_dir}/still-missing.txt"), + "rename race failure", + lambda: (_ for _ in ()).throw(rejected[0][1]), status_code=404, service="runtime", retryable=False, message_includes_any=["not found", "no such file"], ) + winner_path = left if sandbox.files.exists(left) else right + assert sandbox.files.read_text(winner_path) == "race" expect_hyperbrowser_error( - "missing file delete", - lambda: sandbox.files.delete(f"{base_dir}/still-missing.txt"), + "missing file read", + lambda: sandbox.files.read_text(f"{base_dir}/still-missing.txt"), status_code=404, service="runtime", retryable=False, message_includes_any=["not found", "no such file"], ) + + try: + sandbox.files.list(base_dir, depth=0) + except ValueError as error: + assert "depth should be at least one" in str(error) + else: + raise AssertionError("expected invalid depth to fail locally") finally: stop_sandbox_if_running(sandbox) diff --git a/tests/sandbox/e2e/test_lifecycle.py b/tests/sandbox/e2e/test_lifecycle.py index 176c11ed..e7e5bdc3 100644 --- a/tests/sandbox/e2e/test_lifecycle.py +++ b/tests/sandbox/e2e/test_lifecycle.py @@ -1,10 +1,13 @@ +import time from datetime import datetime, timedelta, timezone from uuid import uuid4 -from hyperbrowser.models import SandboxListParams, SandboxRuntimeSession +from hyperbrowser.exceptions import HyperbrowserError +from hyperbrowser.models import SandboxRuntimeSession -from tests.helpers.config import create_client +from tests.helpers.config import DEFAULT_IMAGE_NAME, create_client from tests.helpers.errors import expect_hyperbrowser_error +from tests.helpers.http import get_image_by_name from tests.helpers.sandbox import ( default_sandbox_params, stop_sandbox_if_running, @@ -13,25 +16,59 @@ client = create_client() +CUSTOM_IMAGE_NAME = "node" +SNAPSHOT_CREATE_RETRY_DELAY_SECONDS = 0.5 +SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS = 60 + + +def _create_sandbox_with_snapshot_retry(params): + deadline = time.monotonic() + SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS + last_error = None + + while time.monotonic() < deadline: + try: + return client.sandboxes.create(params) + except HyperbrowserError as error: + is_snapshot_catalog_race = ( + error.status_code == 404 + and isinstance(str(error), str) + and "snapshot not found" in str(error).lower() + ) + if not is_snapshot_catalog_race: + raise + last_error = error + time.sleep(SNAPSHOT_CREATE_RETRY_DELAY_SECONDS) + + if isinstance(last_error, Exception): + raise last_error + raise RuntimeError("snapshot create retry failed") + def test_sandbox_lifecycle_e2e(): sandbox = None stale_handle = None secondary = None + image_sandbox = None + custom_image_sandbox = None + custom_snapshot_sandbox = None + memory_snapshot = None + custom_image_memory_snapshot = None + custom_image = None try: sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-lifecycle")) stale_handle = client.sandboxes.get(sandbox.id) + custom_image = get_image_by_name(CUSTOM_IMAGE_NAME) wait_for_runtime_ready(sandbox) - assert sandbox.to_dict()["token"] + detail = sandbox.to_dict() + assert detail["token"] assert sandbox.runtime.base_url assert sandbox.token_expires_at is not None - session = sandbox.create_runtime_session() - assert session.token - assert session.sandbox_id == sandbox.id - assert session.runtime.base_url == sandbox.runtime.base_url + stale_detail = stale_handle.to_dict() + assert stale_detail["token"] + assert stale_handle.runtime.base_url == sandbox.runtime.base_url info = sandbox.info() assert info.id == sandbox.id @@ -41,27 +78,41 @@ def test_sandbox_lifecycle_e2e(): sandbox.connect() assert sandbox.status == "active" - original_create_runtime_session = sandbox.create_runtime_session - valid_session = original_create_runtime_session(force_refresh=True) + memory_snapshot = sandbox.create_memory_snapshot() + assert memory_snapshot.snapshot_name + assert memory_snapshot.snapshot_id + assert memory_snapshot.namespace + assert memory_snapshot.status + assert memory_snapshot.image_name + assert memory_snapshot.image_id + assert memory_snapshot.image_namespace + + valid_detail = sandbox.info() invalid_jwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.invalid-signature" refresh_count = 0 + original_get_detail = sandbox._service.get_detail + + sandbox._runtime_session = SandboxRuntimeSession( + sandbox_id=sandbox.id, + status=valid_detail.status, + region=valid_detail.region, + token=invalid_jwt, + token_expires_at=datetime.now(timezone.utc) + timedelta(hours=1), + runtime=valid_detail.runtime, + ) + sandbox._detail = valid_detail.model_copy( + update={ + "token": invalid_jwt, + "token_expires_at": sandbox._runtime_session.token_expires_at, + } + ) - def patched_create_runtime_session(force_refresh: bool = False): + def patched_get_detail(sandbox_id: str): nonlocal refresh_count - if force_refresh: - refresh_count += 1 - return original_create_runtime_session(force_refresh=True) - - return SandboxRuntimeSession( - sandbox_id=valid_session.sandbox_id, - status=valid_session.status, - region=valid_session.region, - token=invalid_jwt, - token_expires_at=datetime.now(timezone.utc) + timedelta(hours=1), - runtime=valid_session.runtime, - ) + refresh_count += 1 + return original_get_detail(sandbox_id) - sandbox.create_runtime_session = patched_create_runtime_session + sandbox._service.get_detail = patched_get_detail try: result = sandbox.exec("echo runtime-refresh-ok") assert result.exit_code == 0 @@ -70,17 +121,83 @@ def patched_create_runtime_session(force_refresh: bool = False): assert sandbox.to_dict()["token"] assert sandbox.to_dict()["token"] != invalid_jwt finally: - sandbox.create_runtime_session = original_create_runtime_session + sandbox._service.get_detail = original_get_detail + + image_sandbox = client.sandboxes.create({"imageName": DEFAULT_IMAGE_NAME}) + assert image_sandbox.id + assert image_sandbox.status == "active" + response = image_sandbox.stop() + assert response.success is True + assert image_sandbox.status == "closed" + + custom_image_sandbox = client.sandboxes.create( + { + "imageName": custom_image["imageName"], + "imageId": custom_image["id"], + } + ) + assert custom_image_sandbox.id + assert custom_image_sandbox.status == "active" + wait_for_runtime_ready(custom_image_sandbox) + + custom_image_memory_snapshot = custom_image_sandbox.create_memory_snapshot() + assert custom_image_memory_snapshot.image_name == custom_image["imageName"] + assert custom_image_memory_snapshot.image_id == custom_image["id"] + assert custom_image_memory_snapshot.image_namespace == custom_image["namespace"] + + custom_snapshot_sandbox = _create_sandbox_with_snapshot_retry( + { + "snapshotName": custom_image_memory_snapshot.snapshot_name, + "snapshotId": custom_image_memory_snapshot.snapshot_id, + } + ) + assert custom_snapshot_sandbox.id + assert custom_snapshot_sandbox.status == "active" + response = custom_snapshot_sandbox.stop() + assert response.success is True + assert custom_snapshot_sandbox.status == "closed" + + expect_hyperbrowser_error( + "mismatched image selector", + lambda: client.sandboxes.create( + { + "imageName": custom_image["imageName"], + "imageId": str(uuid4()), + } + ), + status_code=404, + service="control", + retryable=False, + message_includes_any=["image not found", "not found"], + ) - listing = client.sandboxes.list( - SandboxListParams(search=sandbox.id, limit=20) + expect_hyperbrowser_error( + "mismatched snapshot selector", + lambda: client.sandboxes.create( + { + "snapshotName": memory_snapshot.snapshot_name, + "snapshotId": str(uuid4()), + } + ), + status_code=404, + service="control", + retryable=False, + message_includes_any=["snapshot not found", "not found"], ) - assert any(entry.id == sandbox.id for entry in listing.sandboxes) response = sandbox.stop() assert response.success is True assert sandbox.status == "closed" + expect_hyperbrowser_error( + "stopped sandbox memory snapshot", + lambda: sandbox.create_memory_snapshot(), + status_code=409, + service="control", + retryable=False, + message_includes="Sandbox is not running", + ) + expect_hyperbrowser_error( "stopped sandbox connect", lambda: sandbox.connect(), @@ -105,9 +222,10 @@ def patched_create_runtime_session(force_refresh: bool = False): "stale sandbox connect", lambda: stale_handle.connect(), status_code=409, - service="control", + code="sandbox_not_running", + service="runtime", retryable=False, - message_includes="Sandbox is not running", + message_includes="not running", ) expect_hyperbrowser_error( @@ -129,8 +247,11 @@ def patched_create_runtime_session(force_refresh: bool = False): message_includes="not found", ) - secondary = client.sandboxes.start_from_snapshot( - default_sandbox_params("py-sdk-secondary") + secondary = _create_sandbox_with_snapshot_retry( + { + "snapshotName": memory_snapshot.snapshot_name, + "snapshotId": memory_snapshot.snapshot_id, + } ) response = secondary.stop() assert response.success is True @@ -139,3 +260,6 @@ def patched_create_runtime_session(force_refresh: bool = False): stop_sandbox_if_running(sandbox) stop_sandbox_if_running(stale_handle) stop_sandbox_if_running(secondary) + stop_sandbox_if_running(image_sandbox) + stop_sandbox_if_running(custom_image_sandbox) + stop_sandbox_if_running(custom_snapshot_sandbox) diff --git a/tests/sandbox/e2e/test_runtime_transport.py b/tests/sandbox/e2e/test_runtime_transport.py new file mode 100644 index 00000000..5727a6d7 --- /dev/null +++ b/tests/sandbox/e2e/test_runtime_transport.py @@ -0,0 +1,44 @@ +from hyperbrowser.sandbox_common import ( + resolve_runtime_transport_target, + to_websocket_transport_target, +) + + +def test_runtime_transport_target_ignores_ambient_proxy_without_explicit_override( + monkeypatch, +): + monkeypatch.setenv("REGIONAL_PROXY_DEV_HOST", "http://127.0.0.1:8090") + + target = resolve_runtime_transport_target( + "https://session.example.dev:8443", + "/sandbox/exec?foo=bar", + ) + + assert target.url == "https://session.example.dev:8443/sandbox/exec?foo=bar" + assert target.host_header is None + + +def test_runtime_transport_target_applies_explicit_proxy_override(): + target = resolve_runtime_transport_target( + "https://session.example.dev:8443", + "/sandbox/exec?foo=bar", + "http://127.0.0.1:8090", + ) + + assert target.url == "http://127.0.0.1:8090/sandbox/exec?foo=bar" + assert target.host_header == "session.example.dev:8443" + + +def test_runtime_websocket_target_applies_explicit_proxy_override(): + target = to_websocket_transport_target( + "https://session.example.dev:8443", + "/sandbox/pty/pty_123/ws?sessionId=sandbox_123", + "http://127.0.0.1:8090", + ) + + assert ( + target.url + == "wss://session.example.dev:8443/sandbox/pty/pty_123/ws?sessionId=sandbox_123" + ) + assert target.connect_host == "127.0.0.1" + assert target.connect_port == 8090 diff --git a/tests/sandbox/e2e/test_sudo.py b/tests/sandbox/e2e/test_sudo.py new file mode 100644 index 00000000..9ba02369 --- /dev/null +++ b/tests/sandbox/e2e/test_sudo.py @@ -0,0 +1,66 @@ +from tests.helpers.config import create_client +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() + + +def test_sandbox_sudo_e2e(): + sandbox = None + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-sudo")) + wait_for_runtime_ready(sandbox) + + path = "/tmp/sdk-sudo-check.txt" + + runtime_user = sandbox.exec( + { + "command": "bash", + "args": ["-lc", "whoami && id -u && id -g"], + } + ) + assert runtime_user.exit_code == 0 + assert "ubuntu" in runtime_user.stdout + assert "1000" in runtime_user.stdout + + direct_chown = sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + " && ".join( + [ + f'printf "sudo-check" > "{path}"', + f'chown root:root "{path}"', + ] + ), + ], + } + ) + assert direct_chown.exit_code != 0 + assert "operation not permitted" in direct_chown.stderr.lower() + + sudo_result = sandbox.exec( + { + "command": "bash", + "args": [ + "-lc", + " && ".join( + [ + "sudo -n whoami", + f'sudo -n chown root:root "{path}"', + f"stat -c '%U:%G' \"{path}\"", + ] + ), + ], + } + ) + assert sudo_result.exit_code == 0 + assert "root" in sudo_result.stdout + assert "root:root" in sudo_result.stdout + finally: + stop_sandbox_if_running(sandbox) diff --git a/tests/sandbox/e2e/test_terminal_smoke.py b/tests/sandbox/e2e/test_terminal_smoke.py index 901b68de..34f681e7 100644 --- a/tests/sandbox/e2e/test_terminal_smoke.py +++ b/tests/sandbox/e2e/test_terminal_smoke.py @@ -1,3 +1,5 @@ +import time + from tests.helpers.config import create_client from tests.helpers.errors import expect_hyperbrowser_error from tests.helpers.sandbox import ( @@ -23,6 +25,32 @@ def _collect_terminal_session(connection): return output, exit_code +def _terminal_status_output(status) -> str: + return "".join(chunk.data for chunk in ((status.output if status else None) or [])) + + +def _terminal_status_raw_output(status) -> str: + return b"".join(chunk.raw for chunk in ((status.output if status else None) or [])).decode( + "utf-8" + ) + + +def _wait_for_terminal_status_output(read_status, marker: str, timeout_seconds: float = 5.0): + deadline = time.monotonic() + timeout_seconds + last_status = None + + while time.monotonic() < deadline: + last_status = read_status() + if marker in _terminal_status_output(last_status): + return last_status + time.sleep(0.1) + + raise AssertionError( + f"timed out waiting for terminal output {marker!r}; " + f"last output={_terminal_status_output(last_status)!r}" + ) + + def test_sandbox_terminal_e2e(): sandbox = None @@ -84,6 +112,66 @@ def test_sandbox_terminal_e2e(): status = terminal.wait(timeout_ms=2000) assert status.running is False + marker = "terminal-get-output" + terminal = sandbox.terminal.create( + { + "command": "bash", + "args": ["-lc", f"printf '{marker}' && sleep 1"], + "rows": 24, + "cols": 80, + } + ) + without_output = sandbox.terminal.get(terminal.id) + assert without_output.current.output is None + fetched = _wait_for_terminal_status_output( + lambda: sandbox.terminal.get(terminal.id, include_output=True).current, + marker, + ) + assert marker in _terminal_status_output(fetched) + assert marker in _terminal_status_raw_output(fetched) + assert fetched.output + status = terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + marker = "terminal-refresh-output" + terminal = sandbox.terminal.create( + { + "command": "bash", + "args": ["-lc", f"printf '{marker}' && sleep 1"], + "rows": 24, + "cols": 80, + } + ) + without_output = terminal.refresh() + assert without_output.current.output is None + refreshed = _wait_for_terminal_status_output( + lambda: terminal.refresh(include_output=True).current, + marker, + ) + assert marker in _terminal_status_output(refreshed) + assert marker in _terminal_status_raw_output(refreshed) + assert refreshed.output + status = terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + marker = "terminal-wait-output" + terminal = sandbox.terminal.create( + { + "command": "bash", + "args": ["-lc", f"printf '{marker}'"], + "rows": 24, + "cols": 80, + } + ) + status = terminal.wait(timeout_ms=2000, include_output=True) + assert status.running is False + assert status.exit_code == 0 + assert marker in _terminal_status_output(status) + assert marker in _terminal_status_raw_output(status) + assert status.output + timeout_terminal = sandbox.pty.create( { "command": "bash", From a1068aed0e4664a72610785eac4a55b36dbc54c9 Mon Sep 17 00:00:00 2001 From: Devin Deng Date: Thu, 12 Mar 2026 04:58:06 +0000 Subject: [PATCH 03/10] remove sandbox name --- hyperbrowser/models/sandbox.py | 14 +++------ tests/test_create_sandbox_params.py | 47 +++++++++++++++++++++++++++++ uv.lock | 3 ++ 3 files changed, 55 insertions(+), 9 deletions(-) create mode 100644 tests/test_create_sandbox_params.py create mode 100644 uv.lock diff --git a/hyperbrowser/models/sandbox.py b/hyperbrowser/models/sandbox.py index d3735b54..894e0835 100644 --- a/hyperbrowser/models/sandbox.py +++ b/hyperbrowser/models/sandbox.py @@ -131,7 +131,6 @@ def parse_token_expires_at(cls, value): class CreateSandboxParams(SandboxBaseModel): - sandbox_name: Optional[str] = Field(default=None, alias="sandboxName") snapshot_name: Optional[str] = Field(default=None, alias="snapshotName") snapshot_id: Optional[str] = Field(default=None, alias="snapshotId") image_name: Optional[str] = Field(default=None, alias="imageName") @@ -142,18 +141,15 @@ class CreateSandboxParams(SandboxBaseModel): @model_validator(mode="after") def validate_launch_source(self): - source_count = sum( - bool(value) - for value in [self.sandbox_name, self.snapshot_name, self.image_name] - ) - if source_count != 1: - raise ValueError( - "Provide exactly one start source: sandbox_name, snapshot_name, or image_name" - ) if self.snapshot_id and not self.snapshot_name: raise ValueError("snapshot_id requires snapshot_name") if self.image_id and not self.image_name: raise ValueError("image_id requires image_name") + source_count = sum(bool(value) for value in [self.snapshot_name, self.image_name]) + if source_count != 1: + raise ValueError( + "Provide exactly one start source: snapshot_name or image_name" + ) return self diff --git a/tests/test_create_sandbox_params.py b/tests/test_create_sandbox_params.py new file mode 100644 index 00000000..1d9dd00f --- /dev/null +++ b/tests/test_create_sandbox_params.py @@ -0,0 +1,47 @@ +import pytest +from pydantic import ValidationError + +from hyperbrowser.models import CreateSandboxParams + + +def test_create_sandbox_params_accepts_image_source(): + params = CreateSandboxParams(image_name="node") + + assert params.model_dump(by_alias=True, exclude_none=True) == {"imageName": "node"} + + +def test_create_sandbox_params_accepts_snapshot_source(): + params = CreateSandboxParams(snapshot_name="snap", snapshot_id="snap-id") + + assert params.model_dump(by_alias=True, exclude_none=True) == { + "snapshotName": "snap", + "snapshotId": "snap-id", + } + + +@pytest.mark.parametrize( + "payload", + [ + {"sandboxName": "legacy"}, + {"sandbox_name": "legacy"}, + ], +) +def test_create_sandbox_params_rejects_legacy_sandbox_name(payload): + with pytest.raises( + ValidationError, + match="Provide exactly one start source: snapshot_name or image_name", + ): + CreateSandboxParams(**payload) + + +def test_create_sandbox_params_rejects_multiple_sources(): + with pytest.raises( + ValidationError, + match="Provide exactly one start source: snapshot_name or image_name", + ): + CreateSandboxParams(image_name="node", snapshot_name="snap") + + +def test_create_sandbox_params_requires_snapshot_name_for_snapshot_id(): + with pytest.raises(ValidationError, match="snapshot_id requires snapshot_name"): + CreateSandboxParams(snapshot_id="snap-id") diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000..7518fc90 --- /dev/null +++ b/uv.lock @@ -0,0 +1,3 @@ +version = 1 +revision = 3 +requires-python = ">=3.12" From e0de3189d25633f74746f5ca8f75230185f00660 Mon Sep 17 00:00:00 2001 From: Devin Deng Date: Thu, 12 Mar 2026 05:39:17 +0000 Subject: [PATCH 04/10] fix tests --- tests/sandbox/e2e/test_async_lifecycle.py | 3 +++ tests/sandbox/e2e/test_lifecycle.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/tests/sandbox/e2e/test_async_lifecycle.py b/tests/sandbox/e2e/test_async_lifecycle.py index 5e85a39f..067dbb21 100644 --- a/tests/sandbox/e2e/test_async_lifecycle.py +++ b/tests/sandbox/e2e/test_async_lifecycle.py @@ -89,6 +89,9 @@ async def test_async_sandbox_lifecycle_e2e(): assert memory_snapshot.image_id assert memory_snapshot.image_namespace + # Snapshot creation can briefly disrupt the next fast exec on the same handle. + await wait_for_runtime_ready_async(sandbox) + valid_detail = await sandbox.info() invalid_jwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.invalid-signature" refresh_count = 0 diff --git a/tests/sandbox/e2e/test_lifecycle.py b/tests/sandbox/e2e/test_lifecycle.py index e7e5bdc3..bd097e87 100644 --- a/tests/sandbox/e2e/test_lifecycle.py +++ b/tests/sandbox/e2e/test_lifecycle.py @@ -87,6 +87,9 @@ def test_sandbox_lifecycle_e2e(): assert memory_snapshot.image_id assert memory_snapshot.image_namespace + # Snapshot creation can briefly disrupt the next fast exec on the same handle. + wait_for_runtime_ready(sandbox) + valid_detail = sandbox.info() invalid_jwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.invalid-signature" refresh_count = 0 From 61fd2371a0fb1a9cef543ad8381e86163659f2ce Mon Sep 17 00:00:00 2001 From: Devin Deng Date: Thu, 12 Mar 2026 06:25:08 +0000 Subject: [PATCH 05/10] refactor, bump version --- .../client/managers/async_manager/sandbox.py | 1425 +-------------- .../async_manager/sandboxes/__init__.py | 33 + .../async_manager/sandboxes/sandbox_files.py | 718 ++++++++ .../sandboxes/sandbox_processes.py | 217 +++ .../sandboxes/sandbox_terminal.py | 238 +++ .../sandboxes/sandbox_transport.py | 251 +++ .../client/managers/sandboxes/__init__.py | 31 + .../client/managers/sandboxes/shared.py | 210 +++ .../client/managers/sync_manager/sandbox.py | 1571 +---------------- .../sync_manager/sandboxes/__init__.py | 33 + .../sync_manager/sandboxes/sandbox_files.py | 699 ++++++++ .../sandboxes/sandbox_processes.py | 213 +++ .../sandboxes/sandbox_terminal.py | 236 +++ .../sandboxes/sandbox_transport.py | 249 +++ pyproject.toml | 2 +- tests/sandbox/e2e/test_async_lifecycle.py | 8 +- 16 files changed, 3231 insertions(+), 2903 deletions(-) create mode 100644 hyperbrowser/client/managers/async_manager/sandboxes/__init__.py create mode 100644 hyperbrowser/client/managers/async_manager/sandboxes/sandbox_files.py create mode 100644 hyperbrowser/client/managers/async_manager/sandboxes/sandbox_processes.py create mode 100644 hyperbrowser/client/managers/async_manager/sandboxes/sandbox_terminal.py create mode 100644 hyperbrowser/client/managers/async_manager/sandboxes/sandbox_transport.py create mode 100644 hyperbrowser/client/managers/sandboxes/__init__.py create mode 100644 hyperbrowser/client/managers/sandboxes/shared.py create mode 100644 hyperbrowser/client/managers/sync_manager/sandboxes/__init__.py create mode 100644 hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_files.py create mode 100644 hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_processes.py create mode 100644 hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_terminal.py create mode 100644 hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_transport.py diff --git a/hyperbrowser/client/managers/async_manager/sandbox.py b/hyperbrowser/client/managers/async_manager/sandbox.py index 0f086793..62f04cb7 100644 --- a/hyperbrowser/client/managers/async_manager/sandbox.py +++ b/hyperbrowser/client/managers/async_manager/sandbox.py @@ -1,1402 +1,64 @@ -import asyncio -import base64 -import io -import inspect -import json -import posixpath -import socket -from datetime import datetime, timedelta, timezone -from typing import AsyncIterator, Callable, Dict, List, Optional, Union -from urllib.parse import urlencode - -import httpx -from websockets.asyncio.client import connect as async_ws_connect -from websockets.exceptions import ConnectionClosed +from typing import Dict, Optional, Union from ....exceptions import HyperbrowserError from ....models.sandbox import ( CreateSandboxParams, SandboxDetail, SandboxExecParams, - SandboxFileChmodParams, - SandboxFileChownParams, - SandboxFileCopyParams, - SandboxFileDeleteParams, - SandboxFileInfo, - SandboxFileReadResult, - SandboxFileSystemEvent, - SandboxFileWriteEntry, - SandboxFileTransferResult, - SandboxFileWatchDoneEvent, - SandboxFileWatchEventMessage, - SandboxFileWatchStatus, - SandboxMemorySnapshotParams, - SandboxMemorySnapshotResult, SandboxExposeParams, SandboxExposeResult, - SandboxPresignFileParams, - SandboxPresignedUrl, - SandboxProcessExitEvent, - SandboxProcessListResponse, - SandboxProcessOutputEvent, - SandboxProcessResult, - SandboxProcessStdinParams, - SandboxProcessSummary, + SandboxMemorySnapshotParams, + SandboxMemorySnapshotResult, SandboxRuntimeSession, - SandboxTerminalCreateParams, - SandboxTerminalExitEvent, - SandboxTerminalOutputEvent, - SandboxTerminalStatus, - SandboxTerminalWaitParams, StartSandboxFromSnapshotParams, ) from ....models.session import BasicResponse from ....sandbox_common import ( - RUNTIME_SESSION_REFRESH_BUFFER_MS, RuntimeConnection, - build_headers, ensure_response_ok, normalize_network_error, parse_json_response, - resolve_runtime_transport_target, - to_websocket_transport_target, ) -from ..sync_manager.sandbox import ( - DEFAULT_WATCH_TIMEOUT_MS, +from ..sandboxes.shared import ( _build_sandbox_exposed_url, - _build_query_path, _copy_model, - _encode_write_data, - _normalize_event_type, - _normalize_file_info, - _normalize_terminal_output_chunk, - _normalize_terminal_status, - _normalize_websocket_error, - _normalize_write_info, - _relative_watch_name, + _expires_within_buffer, ) - -DEFAULT_PROCESS_KILL_WAIT_SECONDS = 5.0 -DEFAULT_TERMINAL_KILL_WAIT_SECONDS = 5.0 - - -def _expires_within_buffer(expires_at): - if expires_at is None: - return False - if expires_at.tzinfo is None: - expires_at = expires_at.replace(tzinfo=timezone.utc) - threshold = datetime.now(timezone.utc) + timedelta( - milliseconds=RUNTIME_SESSION_REFRESH_BUFFER_MS - ) - return expires_at <= threshold - - -class RuntimeTransport: - def __init__( - self, - resolve_connection, - timeout: float = 30.0, - runtime_proxy_override: Optional[str] = None, - ): - self._resolve_connection = resolve_connection - self._timeout = timeout - self._runtime_proxy_override = runtime_proxy_override - - async def request_json( - self, - path: str, - *, - method: str = "GET", - params: Optional[Dict[str, object]] = None, - json_body: Optional[Dict[str, object]] = None, - content: Optional[Union[str, bytes]] = None, - headers: Optional[Dict[str, str]] = None, - ): - response = await self._request( - path, - method=method, - params=params, - json_body=json_body, - content=content, - headers=headers, - ) - return parse_json_response(response, "runtime") - - async def request_bytes( - self, - path: str, - *, - method: str = "GET", - params: Optional[Dict[str, object]] = None, - headers: Optional[Dict[str, str]] = None, - ) -> bytes: - response = await self._request(path, method=method, params=params, headers=headers) - return response.content - - async def stream_sse( - self, path: str, params: Optional[Dict[str, object]] = None - ) -> AsyncIterator[Dict[str, object]]: - client, response = await self._open_stream(path, params=params) - event_name = "message" - event_id = None - data_lines = [] - - def flush_event(): - nonlocal event_name, event_id, data_lines - if not data_lines and event_name == "message" and event_id is None: - return None - - raw_data = "\n".join(data_lines) - data = raw_data - if raw_data: - try: - data = json.loads(raw_data) - except json.JSONDecodeError: - data = raw_data - - event = { - "event": event_name, - "data": data, - "id": event_id, - } - event_name = "message" - event_id = None - data_lines = [] - return event - - try: - async for line in response.aiter_lines(): - if line == "": - event = flush_event() - if event is not None: - yield event - continue - - if line.startswith(":"): - continue - - if ":" in line: - field, value = line.split(":", 1) - value = value.lstrip(" ") - else: - field, value = line, "" - - if field == "event": - event_name = value or "message" - elif field == "data": - data_lines.append(value) - elif field == "id": - event_id = value - - trailing = flush_event() - if trailing is not None: - yield trailing - finally: - await response.aclose() - await client.aclose() - - async def _request( - self, - path: str, - *, - method: str = "GET", - params: Optional[Dict[str, object]] = None, - json_body: Optional[Dict[str, object]] = None, - content: Optional[Union[str, bytes]] = None, - headers: Optional[Dict[str, str]] = None, - allow_refresh: bool = True, - ) -> httpx.Response: - connection = await self._resolve_connection(False) - response = await self._send( - connection, - path, - method=method, - params=params, - json_body=json_body, - content=content, - headers=headers, - ) - - if response.status_code == 401 and allow_refresh: - await response.aclose() - refreshed = await self._resolve_connection(True) - retry = await self._send( - refreshed, - path, - method=method, - params=params, - json_body=json_body, - content=content, - headers=headers, - ) - return ensure_response_ok(retry, "runtime") - - return ensure_response_ok(response, "runtime") - - async def _open_stream( - self, - path: str, - *, - params: Optional[Dict[str, object]] = None, - allow_refresh: bool = True, - ): - connection = await self._resolve_connection(False) - client, response = await self._send_stream(connection, path, params=params) - if response.status_code == 401 and allow_refresh: - await response.aclose() - await client.aclose() - refreshed = await self._resolve_connection(True) - client, response = await self._send_stream(refreshed, path, params=params) - - if not response.is_success: - await response.aread() - ensure_response_ok(response, "runtime") - return client, response - - async def _send( - self, - connection: RuntimeConnection, - path: str, - *, - method: str, - params: Optional[Dict[str, object]], - json_body: Optional[Dict[str, object]], - content: Optional[Union[str, bytes]], - headers: Optional[Dict[str, str]], - ) -> httpx.Response: - request_path = _build_query_path(path, params) - target = resolve_runtime_transport_target( - connection.base_url, - request_path, - self._runtime_proxy_override, - ) - merged_headers = build_headers(connection.token, headers, target.host_header) - client = httpx.AsyncClient(timeout=self._timeout) - - try: - response = await client.request( - method, - target.url, - headers=merged_headers, - json=json_body, - content=content, - ) - except BaseException as error: - await client.aclose() - raise normalize_network_error( - error, - "runtime", - "Unknown runtime request error", - ) - - await response.aread() - await client.aclose() - return response - - async def _send_stream( - self, - connection: RuntimeConnection, - path: str, - *, - params: Optional[Dict[str, object]], - ): - request_path = _build_query_path(path, params) - target = resolve_runtime_transport_target( - connection.base_url, - request_path, - self._runtime_proxy_override, - ) - headers = build_headers( - connection.token, - {"Accept": "text/event-stream"}, - target.host_header, - ) - client = httpx.AsyncClient(timeout=self._timeout) - - try: - request = client.build_request("GET", target.url, headers=headers) - response = await client.send(request, stream=True) - return client, response - except BaseException as error: - await client.aclose() - raise normalize_network_error( - error, - "runtime", - "Unknown runtime request error", - ) - - -class SandboxProcessHandle: - def __init__(self, transport: RuntimeTransport, summary: SandboxProcessSummary): - self._transport = transport - self._summary = summary - - @property - def id(self) -> str: - return self._summary.id - - @property - def status(self) -> str: - return self._summary.status - - def to_dict(self): - return self._summary.model_dump() - - def to_json(self): - return self.to_dict() - - async def refresh(self) -> "SandboxProcessHandle": - payload = await self._transport.request_json(f"/sandbox/processes/{self.id}") - self._summary = SandboxProcessSummary(**payload["process"]) - return self - - async def wait( - self, - timeout_ms: Optional[int] = None, - timeout_sec: Optional[int] = None, - ) -> SandboxProcessResult: - payload = await self._transport.request_json( - f"/sandbox/processes/{self.id}/wait", - method="POST", - json_body={ - "timeoutMs": timeout_ms, - "timeout_sec": timeout_sec, - }, - headers={"content-type": "application/json"}, - ) - result = SandboxProcessResult(**payload["result"]) - self._summary = SandboxProcessSummary( - id=result.id, - status=result.status, - command=self._summary.command, - args=self._summary.args, - cwd=self._summary.cwd, - pid=self._summary.pid, - exit_code=result.exit_code, - started_at=result.started_at, - completed_at=result.completed_at, - ) - return result - - async def signal(self, signal: str) -> None: - payload = await self._transport.request_json( - f"/sandbox/processes/{self.id}/signal", - method="POST", - json_body={"signal": signal}, - headers={"content-type": "application/json"}, - ) - self._summary = SandboxProcessSummary(**payload["process"]) - - async def kill( - self, - timeout_ms: Optional[int] = None, - timeout_sec: Optional[int] = None, - ) -> SandboxProcessResult: - payload = await self._transport.request_json( - f"/sandbox/processes/{self.id}", - method="DELETE", - ) - self._summary = SandboxProcessSummary(**payload["process"]) - if timeout_ms is None and timeout_sec is None: - timeout_ms = int(DEFAULT_PROCESS_KILL_WAIT_SECONDS * 1000) - return await self.wait(timeout_ms=timeout_ms, timeout_sec=timeout_sec) - - async def write_stdin( - self, - data: Optional[Union[str, bytes, bytearray, SandboxProcessStdinParams]] = None, - *, - encoding: Optional[str] = None, - eof: Optional[bool] = None, - ) -> None: - if isinstance(data, SandboxProcessStdinParams): - params = data - else: - params = SandboxProcessStdinParams(data=data, encoding=encoding, eof=eof) - - payload: Dict[str, object] = {"eof": params.eof} - if params.data is not None: - if isinstance(params.data, str): - payload["data"] = params.data - payload["encoding"] = params.encoding or "utf8" - else: - payload["data"] = base64.b64encode(bytes(params.data)).decode("ascii") - payload["encoding"] = "base64" - - await self._transport.request_json( - f"/sandbox/processes/{self.id}/stdin", - method="POST", - json_body=payload, - headers={"content-type": "application/json"}, - ) - - async def stream(self, from_seq: Optional[int] = None) -> AsyncIterator[object]: - params = {"from_seq": from_seq} if from_seq and from_seq > 0 else None - async for event in self._transport.stream_sse( - f"/sandbox/processes/{self.id}/stream", - params=params, - ): - event_type = event["event"] - data = event["data"] - if event_type == "output": - yield SandboxProcessOutputEvent( - type=data["stream"], - seq=data["seq"], - data=data["data"], - timestamp=data["timestamp"], - ) - elif event_type == "done": - yield SandboxProcessExitEvent( - type="exit", - result=SandboxProcessResult(**data), - ) - - async def result(self) -> SandboxProcessResult: - return await self.wait() - - -class SandboxProcessesApi: - def __init__(self, transport: RuntimeTransport): - self._transport = transport - - async def exec( - self, input: Union[SandboxExecParams, Dict[str, object]] - ) -> SandboxProcessResult: - params = input if isinstance(input, SandboxExecParams) else SandboxExecParams(**input) - payload = await self._transport.request_json( - "/sandbox/exec", - method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - return SandboxProcessResult(**payload["result"]) - - async def start( - self, input: Union[SandboxExecParams, Dict[str, object]] - ) -> SandboxProcessHandle: - params = input if isinstance(input, SandboxExecParams) else SandboxExecParams(**input) - payload = await self._transport.request_json( - "/sandbox/processes", - method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - return SandboxProcessHandle( - self._transport, - SandboxProcessSummary(**payload["process"]), - ) - - async def get(self, process_id: str) -> SandboxProcessHandle: - payload = await self._transport.request_json(f"/sandbox/processes/{process_id}") - return SandboxProcessHandle( - self._transport, - SandboxProcessSummary(**payload["process"]), - ) - - async def list( - self, - *, - status=None, - limit: Optional[int] = None, - cursor: Optional[Union[str, int]] = None, - created_after: Optional[int] = None, - created_before: Optional[int] = None, - ) -> SandboxProcessListResponse: - normalized_status = None - if isinstance(status, list): - normalized_status = ",".join(status) if status else None - else: - normalized_status = status - - payload = await self._transport.request_json( - "/sandbox/processes", - params={ - "status": normalized_status, - "limit": limit, - "cursor": cursor, - "created_after": created_after, - "created_before": created_before, - }, - ) - return SandboxProcessListResponse(**payload) - - -class SandboxFileWatchHandle: - def __init__( - self, - transport: RuntimeTransport, - get_connection_info, - status, - runtime_proxy_override: Optional[str] = None, - ): - self._transport = transport - self._get_connection_info = get_connection_info - self._status = status - self._runtime_proxy_override = runtime_proxy_override - - @property - def id(self) -> str: - return self._status.id - - @property - def current(self) -> SandboxFileWatchStatus: - return _copy_model(self._status) - - def to_dict(self): - return self._status.model_dump() - - def to_json(self): - return self.to_dict() - - async def refresh(self, include_events: bool = False) -> "SandboxFileWatchHandle": - params = {"includeEvents": True} if include_events else None - payload = await self._transport.request_json( - f"/sandbox/files/watch/{self.id}", - params=params, - ) - self._status = SandboxFileWatchStatus(**payload["watch"]) - return self - - async def stop(self) -> None: - await self._transport.request_json( - f"/sandbox/files/watch/{self.id}", - method="DELETE", - ) - self._status = self._status.model_copy( - update={ - "active": False, - "stopped_at": self._status.stopped_at or int(datetime.now().timestamp() * 1000), - } - ) - - async def events( - self, - *, - cursor: Optional[int] = None, - route: str = "ws", - ) -> AsyncIterator[object]: - connection = await self._get_connection_info() - query = urlencode( - [ - ("sessionId", connection.sandbox_id), - *([("cursor", str(cursor))] if cursor is not None else []), - ] - ) - target = to_websocket_transport_target( - connection.base_url, - f"/sandbox/files/watch/{self.id}/{route}?{query}", - self._runtime_proxy_override, - ) - headers = build_headers(connection.token, host_header=target.host_header) - connect_kwargs = {} - if target.connect_host is not None and target.connect_port is not None: - sock = socket.create_connection( - (target.connect_host, target.connect_port), - timeout=self._transport._timeout, - ) - sock.setblocking(False) - connect_kwargs["sock"] = sock - try: - websocket = await async_ws_connect( - target.url, - additional_headers=headers, - open_timeout=self._transport._timeout, - **connect_kwargs, - ) - except BaseException as error: - raise _normalize_websocket_error(error) - - try: - while True: - try: - message = await websocket.recv() - except ConnectionClosed: - break - - if isinstance(message, bytes): - message = message.decode("utf-8") - parsed = json.loads(message) - if parsed["type"] == "event": - event = SandboxFileWatchEventMessage( - type="event", - event=parsed["event"], - ) - self._status = self._status.model_copy( - update={ - "oldest_seq": self._status.oldest_seq or event.event.seq, - "last_seq": max(self._status.last_seq, event.event.seq), - } - ) - yield event - elif parsed["type"] == "done": - self._status = SandboxFileWatchStatus(**parsed["status"]) - yield SandboxFileWatchDoneEvent(type="done", status=self.current) - break - except GeneratorExit: - raise - except BaseException as error: - raise _normalize_websocket_error(error) - finally: - await websocket.close() - - -class SandboxWatchDirHandle: - def __init__( - self, - watch: SandboxFileWatchHandle, - on_event: Callable[[SandboxFileSystemEvent], object], - *, - on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, - timeout_ms: Optional[int] = None, - ): - self._watch = watch - self._root_path = watch.current.path - self._on_event = on_event - self._on_exit = on_exit - self._stop_requested = False - self._exit_notified = False - self._task = asyncio.create_task(self._run()) - effective_timeout = DEFAULT_WATCH_TIMEOUT_MS if timeout_ms is None else timeout_ms - self._timeout_task = ( - asyncio.create_task(self._auto_stop(effective_timeout)) - if effective_timeout > 0 - else None - ) - - async def stop(self) -> None: - if self._stop_requested: - return - self._stop_requested = True - - if self._timeout_task is not None: - self._timeout_task.cancel() - self._timeout_task = None - - try: - await self._watch.stop() - except HyperbrowserError as error: - if error.status_code not in {404, 409}: - raise - - if asyncio.current_task() is not self._task: - await self._task - - async def _auto_stop(self, timeout_ms: int) -> None: - try: - await asyncio.sleep(timeout_ms / 1000.0) - await self.stop() - except asyncio.CancelledError: - return - - async def _run(self) -> None: - exit_error = None - try: - async for message in self._watch.events(): - event_type = _normalize_event_type(message.event.op) - if not event_type: - continue - result = self._on_event( - SandboxFileSystemEvent( - type=event_type, - name=_relative_watch_name(self._root_path, message.event.path), - ) - ) - if inspect.isawaitable(result): - await result - except BaseException as error: - exit_error = error - finally: - if self._timeout_task is not None: - self._timeout_task.cancel() - self._timeout_task = None - if not self._exit_notified: - self._exit_notified = True - if self._on_exit is not None: - result = self._on_exit(exit_error) - if inspect.isawaitable(result): - await result - - -class SandboxFilesApi: - def __init__( - self, - transport: RuntimeTransport, - get_connection_info, - runtime_proxy_override: Optional[str] = None, - ): - self._transport = transport - self._get_connection_info = get_connection_info - self._runtime_proxy_override = runtime_proxy_override - - async def list( - self, - path: str, - *, - depth: Optional[int] = None, - ) -> List[SandboxFileInfo]: - depth = 1 if depth is None else depth - if depth < 1: - raise ValueError("depth should be at least one") - - payload = await self._transport.request_json( - "/sandbox/files", - params={ - "path": path, - "depth": depth, - }, - ) - return [_normalize_file_info(entry) for entry in payload.get("entries", [])] - - async def get_info(self, path: str) -> SandboxFileInfo: - payload = await self._transport.request_json( - "/sandbox/files/stat", - params={"path": path}, - ) - return _normalize_file_info(payload["file"]) - - async def stat(self, path: str) -> SandboxFileInfo: - return await self.get_info(path) - - async def exists(self, path: str) -> bool: - try: - await self.get_info(path) - return True - except HyperbrowserError as error: - if error.status_code == 404: - return False - if "not found" in str(error).lower() or "no such file" in str(error).lower(): - return False - raise - - async def read( - self, - path: str, - *, - offset: Optional[int] = None, - length: Optional[int] = None, - format: str = "text", - ): - if format == "text": - return ( - await self._read_wire( - path, - offset=offset, - length=length, - encoding="utf8", - ) - ).content - - response = await self._read_wire( - path, - offset=offset, - length=length, - encoding="base64", - ) - content = base64.b64decode(response.content) - if format in {"bytes", "blob"}: - return content - if format == "stream": - return io.BytesIO(content) - raise ValueError("format should be one of: text, bytes, blob, stream") - - async def read_text( - self, - path: str, - *, - offset: Optional[int] = None, - length: Optional[int] = None, - ) -> str: - return await self.read(path, offset=offset, length=length, format="text") - - async def read_bytes( - self, - path: str, - *, - offset: Optional[int] = None, - length: Optional[int] = None, - ) -> bytes: - return await self.read(path, offset=offset, length=length, format="bytes") - - async def write( - self, - path_or_files: Union[str, List[Union[SandboxFileWriteEntry, Dict[str, object]]]], - data: Optional[Union[str, bytes, bytearray]] = None, - ): - if isinstance(path_or_files, str): - if data is None: - raise ValueError("Path and data are required") - payload = await self._transport.request_json( - "/sandbox/files/write", - method="POST", - json_body={ - "path": path_or_files, - **_encode_write_data(data), - }, - headers={"content-type": "application/json"}, - ) - return _normalize_write_info(payload["files"][0]) - - if not path_or_files: - return [] - - encoded_files = [] - for entry in path_or_files: - normalized = ( - entry - if isinstance(entry, SandboxFileWriteEntry) - else SandboxFileWriteEntry(**entry) - ) - encoded_files.append( - { - "path": normalized.path, - **_encode_write_data(normalized.data), - } - ) - - payload = await self._transport.request_json( - "/sandbox/files/write", - method="POST", - json_body={"files": encoded_files}, - headers={"content-type": "application/json"}, - ) - return [_normalize_write_info(entry) for entry in payload.get("files", [])] - - async def write_text( - self, - path: str, - data: str, - *, - append: Optional[bool] = None, - mode: Optional[str] = None, - ): - return await self._write_single( - path, - data, - append=append, - mode=mode, - encoding="utf8", - ) - - async def write_bytes( - self, - path: str, - data: bytes, - *, - append: Optional[bool] = None, - mode: Optional[str] = None, - ): - return await self._write_single( - path, - base64.b64encode(data).decode("ascii"), - append=append, - mode=mode, - encoding="base64", - ) - - async def upload(self, path: str, data: Union[str, bytes, bytearray]): - body = data.encode("utf-8") if isinstance(data, str) else bytes(data) - payload = await self._transport.request_json( - "/sandbox/files/upload", - method="PUT", - params={"path": path}, - content=body, - ) - return SandboxFileTransferResult(**payload) - - async def download(self, path: str) -> bytes: - return await self._transport.request_bytes( - "/sandbox/files/download", - params={"path": path}, - ) - - async def make_dir( - self, - path: str, - *, - parents: Optional[bool] = None, - mode: Optional[str] = None, - ) -> bool: - payload = await self._transport.request_json( - "/sandbox/files/mkdir", - method="POST", - json_body={ - "path": path, - "parents": parents, - "mode": mode, - }, - headers={"content-type": "application/json"}, - ) - return bool(payload.get("created")) - - async def mkdir( - self, - path: str, - *, - parents: Optional[bool] = None, - mode: Optional[str] = None, - ) -> bool: - return await self.make_dir(path, parents=parents, mode=mode) - - async def rename(self, old_path: str, new_path: str) -> SandboxFileInfo: - payload = await self._transport.request_json( - "/sandbox/files/move", - method="POST", - json_body={ - "from": old_path, - "to": new_path, - }, - headers={"content-type": "application/json"}, - ) - return _normalize_file_info(payload["entry"]) - - async def move( - self, - *, - source: str, - destination: str, - overwrite: Optional[bool] = None, - ) -> SandboxFileInfo: - return await self.rename(source, destination) - - async def remove(self, path: str, *, recursive: Optional[bool] = None) -> None: - await self._transport.request_json( - "/sandbox/files/delete", - method="POST", - json_body=SandboxFileDeleteParams( - path=path, - recursive=recursive, - ).model_dump(exclude_none=True), - headers={"content-type": "application/json"}, - ) - - async def delete(self, path: str, *, recursive: Optional[bool] = None) -> None: - await self.remove(path, recursive=recursive) - - async def copy( - self, - params: Optional[Union[SandboxFileCopyParams, Dict[str, object]]] = None, - *, - source: Optional[str] = None, - destination: Optional[str] = None, - recursive: Optional[bool] = None, - overwrite: Optional[bool] = None, - ) -> SandboxFileInfo: - if params is None: - normalized = SandboxFileCopyParams( - source=source, - destination=destination, - recursive=recursive, - overwrite=overwrite, - ) - elif isinstance(params, SandboxFileCopyParams): - normalized = params - else: - normalized = SandboxFileCopyParams(**params) - - payload = await self._transport.request_json( - "/sandbox/files/copy", - method="POST", - json_body={ - "from": normalized.source, - "to": normalized.destination, - "recursive": normalized.recursive, - "overwrite": normalized.overwrite, - }, - headers={"content-type": "application/json"}, - ) - return _normalize_file_info(payload["entry"]) - - async def chmod( - self, - params: Optional[Union[SandboxFileChmodParams, Dict[str, object]]] = None, - *, - path: Optional[str] = None, - mode: Optional[str] = None, - recursive: Optional[bool] = None, - ) -> None: - normalized = ( - params - if isinstance(params, SandboxFileChmodParams) - else SandboxFileChmodParams( - **(params or {"path": path, "mode": mode, "recursive": recursive}) - ) - ) - await self._transport.request_json( - "/sandbox/files/chmod", - method="POST", - json_body=normalized.model_dump(exclude_none=True), - headers={"content-type": "application/json"}, - ) - - async def chown( - self, - params: Optional[Union[SandboxFileChownParams, Dict[str, object]]] = None, - *, - path: Optional[str] = None, - uid: Optional[int] = None, - gid: Optional[int] = None, - recursive: Optional[bool] = None, - ) -> None: - normalized = ( - params - if isinstance(params, SandboxFileChownParams) - else SandboxFileChownParams( - **( - params - or { - "path": path, - "uid": uid, - "gid": gid, - "recursive": recursive, - } - ) - ) - ) - await self._transport.request_json( - "/sandbox/files/chown", - method="POST", - json_body=normalized.model_dump(exclude_none=True), - headers={"content-type": "application/json"}, - ) - - async def watch(self, path: str, *, recursive: Optional[bool] = None): - payload = await self._transport.request_json( - "/sandbox/files/watch", - method="POST", - json_body={ - "path": path, - "recursive": recursive, - }, - headers={"content-type": "application/json"}, - ) - return SandboxFileWatchHandle( - self._transport, - self._get_connection_info, - SandboxFileWatchStatus(**payload["watch"]), - self._runtime_proxy_override, - ) - - async def watch_dir( - self, - path: str, - on_event: Callable[[SandboxFileSystemEvent], object], - *, - recursive: Optional[bool] = None, - timeout_ms: Optional[int] = None, - on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, - ) -> SandboxWatchDirHandle: - return SandboxWatchDirHandle( - await self.watch(path, recursive=recursive), - on_event, - on_exit=on_exit, - timeout_ms=timeout_ms, - ) - - async def get_watch( - self, watch_id: str, include_events: bool = False - ) -> SandboxFileWatchHandle: - payload = await self._transport.request_json( - f"/sandbox/files/watch/{watch_id}", - params={"includeEvents": True} if include_events else None, - ) - return SandboxFileWatchHandle( - self._transport, - self._get_connection_info, - SandboxFileWatchStatus(**payload["watch"]), - self._runtime_proxy_override, - ) - - async def upload_url( - self, - path: str, - *, - expires_in_seconds: Optional[int] = None, - one_time: Optional[bool] = None, - ) -> SandboxPresignedUrl: - payload = await self._transport.request_json( - "/sandbox/files/presign-upload", - method="POST", - json_body=SandboxPresignFileParams( - path=path, - expires_in_seconds=expires_in_seconds, - one_time=one_time, - ).model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - return SandboxPresignedUrl(**payload) - - async def download_url( - self, - path: str, - *, - expires_in_seconds: Optional[int] = None, - one_time: Optional[bool] = None, - ) -> SandboxPresignedUrl: - payload = await self._transport.request_json( - "/sandbox/files/presign-download", - method="POST", - json_body=SandboxPresignFileParams( - path=path, - expires_in_seconds=expires_in_seconds, - one_time=one_time, - ).model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - return SandboxPresignedUrl(**payload) - - async def _read_wire( - self, - path: str, - *, - offset: Optional[int] = None, - length: Optional[int] = None, - encoding: str, - ) -> SandboxFileReadResult: - payload = await self._transport.request_json( - "/sandbox/files/read", - method="POST", - json_body={ - "path": path, - "offset": offset, - "length": length, - "encoding": encoding, - }, - headers={"content-type": "application/json"}, - ) - return SandboxFileReadResult(**payload) - - async def _write_single( - self, - path: str, - data: str, - *, - append: Optional[bool] = None, - mode: Optional[str] = None, - encoding: str, - ): - payload = await self._transport.request_json( - "/sandbox/files/write", - method="POST", - json_body={ - "path": path, - "data": data, - "append": append, - "mode": mode, - "encoding": encoding, - }, - headers={"content-type": "application/json"}, - ) - return _normalize_write_info(payload["files"][0]) - - -class SandboxTerminalConnection: - def __init__(self, websocket): - self._websocket = websocket - - async def events(self) -> AsyncIterator[object]: - while True: - try: - message = await self._websocket.recv() - except ConnectionClosed: - break - - if isinstance(message, bytes): - message = message.decode("utf-8") - parsed = json.loads(message) - if parsed["type"] == "output": - normalized = _normalize_terminal_output_chunk(parsed) - yield SandboxTerminalOutputEvent( - type="output", - **normalized, - ) - elif parsed["type"] == "exit": - yield SandboxTerminalExitEvent( - type="exit", - status=_normalize_terminal_status(parsed["status"]), - ) - - async def write(self, data: Union[str, bytes, bytearray]) -> None: - payload = { - "type": "input", - "data": data if isinstance(data, str) else base64.b64encode(bytes(data)).decode("ascii"), - } - if not isinstance(data, str): - payload["encoding"] = "base64" - await self._websocket.send(json.dumps(payload)) - - async def resize(self, rows: int, cols: int) -> None: - await self._websocket.send( - json.dumps( - { - "type": "resize", - "rows": rows, - "cols": cols, - } - ) - ) - - async def close(self) -> None: - await self._websocket.close() - - -class SandboxTerminalHandle: - def __init__( - self, - transport: RuntimeTransport, - get_connection_info, - status, - runtime_proxy_override: Optional[str] = None, - ): - self._transport = transport - self._get_connection_info = get_connection_info - self._status = status - self._runtime_proxy_override = runtime_proxy_override - - @property - def id(self) -> str: - return self._status.id - - @property - def current(self) -> SandboxTerminalStatus: - return _copy_model(self._status) - - def to_dict(self): - return self._status.model_dump() - - def to_json(self): - return self.to_dict() - - async def refresh(self, include_output: bool = False) -> "SandboxTerminalHandle": - payload = await self._transport.request_json( - f"/sandbox/pty/{self.id}", - params={"includeOutput": True} if include_output else None, - ) - self._status = _normalize_terminal_status(payload["pty"]) - return self - - async def wait( - self, - timeout_ms: Optional[int] = None, - include_output: Optional[bool] = None, - ) -> SandboxTerminalStatus: - payload = await self._transport.request_json( - f"/sandbox/pty/{self.id}/wait", - method="POST", - json_body=SandboxTerminalWaitParams( - timeout_ms=timeout_ms, - include_output=include_output, - ).model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - self._status = _normalize_terminal_status(payload["pty"]) - return self.current - - async def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: - payload = await self._transport.request_json( - f"/sandbox/pty/{self.id}/kill", - method="POST", - json_body={"signal": signal}, - headers={"content-type": "application/json"}, - ) - self._status = _normalize_terminal_status(payload["pty"]) - return self.current - - async def kill( - self, - signal: Optional[str] = None, - *, - timeout_ms: Optional[int] = None, - ) -> SandboxTerminalStatus: - await self.signal(signal) - if timeout_ms is None: - timeout_ms = int(DEFAULT_TERMINAL_KILL_WAIT_SECONDS * 1000) - return await self.wait(timeout_ms=timeout_ms) - - async def resize(self, rows: int, cols: int) -> SandboxTerminalStatus: - payload = await self._transport.request_json( - f"/sandbox/pty/{self.id}/resize", - method="POST", - json_body={"rows": rows, "cols": cols}, - headers={"content-type": "application/json"}, - ) - self._status = _normalize_terminal_status(payload["pty"]) - return self.current - - async def attach(self) -> SandboxTerminalConnection: - connection = await self._get_connection_info() - target = to_websocket_transport_target( - connection.base_url, - f"/sandbox/pty/{self.id}/ws?sessionId={connection.sandbox_id}", - self._runtime_proxy_override, - ) - headers = build_headers(connection.token, host_header=target.host_header) - connect_kwargs = {} - if target.connect_host is not None and target.connect_port is not None: - sock = socket.create_connection( - (target.connect_host, target.connect_port), - timeout=self._transport._timeout, - ) - sock.setblocking(False) - connect_kwargs["sock"] = sock - - try: - websocket = await async_ws_connect( - target.url, - additional_headers=headers, - open_timeout=self._transport._timeout, - **connect_kwargs, - ) - except BaseException as error: - raise _normalize_websocket_error(error) - - return SandboxTerminalConnection(websocket) - - -class SandboxTerminalApi: - def __init__( - self, - transport: RuntimeTransport, - get_connection_info, - runtime_proxy_override: Optional[str] = None, - ): - self._transport = transport - self._get_connection_info = get_connection_info - self._runtime_proxy_override = runtime_proxy_override - - async def create( - self, - input: Union[SandboxTerminalCreateParams, Dict[str, object]], - ) -> SandboxTerminalHandle: - params = ( - input - if isinstance(input, SandboxTerminalCreateParams) - else SandboxTerminalCreateParams(**input) - ) - payload = await self._transport.request_json( - "/sandbox/pty", - method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - return SandboxTerminalHandle( - self._transport, - self._get_connection_info, - _normalize_terminal_status(payload["pty"]), - self._runtime_proxy_override, - ) - - async def get( - self, terminal_id: str, include_output: bool = False - ) -> SandboxTerminalHandle: - payload = await self._transport.request_json( - f"/sandbox/pty/{terminal_id}", - params={"includeOutput": True} if include_output else None, - ) - return SandboxTerminalHandle( - self._transport, - self._get_connection_info, - _normalize_terminal_status(payload["pty"]), - self._runtime_proxy_override, - ) +from .sandboxes.sandbox_files import ( + DEFAULT_WATCH_TIMEOUT_MS, + SandboxFileWatchHandle, + SandboxFilesApi, + SandboxWatchDirHandle, +) +from .sandboxes.sandbox_processes import ( + DEFAULT_PROCESS_KILL_WAIT_SECONDS, + SandboxProcessHandle, + SandboxProcessesApi, +) +from .sandboxes.sandbox_terminal import ( + DEFAULT_TERMINAL_KILL_WAIT_SECONDS, + SandboxTerminalApi, + SandboxTerminalConnection, + SandboxTerminalHandle, +) +from .sandboxes.sandbox_transport import RuntimeTransport + +__all__ = [ + "DEFAULT_PROCESS_KILL_WAIT_SECONDS", + "DEFAULT_TERMINAL_KILL_WAIT_SECONDS", + "DEFAULT_WATCH_TIMEOUT_MS", + "RuntimeTransport", + "SandboxFileWatchHandle", + "SandboxFilesApi", + "SandboxHandle", + "SandboxManager", + "SandboxProcessHandle", + "SandboxProcessesApi", + "SandboxTerminalApi", + "SandboxTerminalConnection", + "SandboxTerminalHandle", + "SandboxWatchDirHandle", +] class SandboxHandle: @@ -1471,7 +133,8 @@ async def stop(self) -> BasicResponse: return response async def create_memory_snapshot( - self, params: Optional[Union[SandboxMemorySnapshotParams, Dict[str, object]]] = None + self, + params: Optional[Union[SandboxMemorySnapshotParams, Dict[str, object]]] = None, ) -> SandboxMemorySnapshotResult: normalized = ( params @@ -1610,7 +273,9 @@ async def create( self, params: Union[CreateSandboxParams, Dict[str, object]] ) -> SandboxHandle: normalized = ( - params if isinstance(params, CreateSandboxParams) else CreateSandboxParams(**params) + params + if isinstance(params, CreateSandboxParams) + else CreateSandboxParams(**params) ) detail = await self._create_detail(normalized) return self.attach(detail) diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/__init__.py b/hyperbrowser/client/managers/async_manager/sandboxes/__init__.py new file mode 100644 index 00000000..e9684afe --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandboxes/__init__.py @@ -0,0 +1,33 @@ +from .sandbox_files import ( + DEFAULT_WATCH_TIMEOUT_MS, + SandboxFileWatchHandle, + SandboxFilesApi, + SandboxWatchDirHandle, +) +from .sandbox_processes import ( + DEFAULT_PROCESS_KILL_WAIT_SECONDS, + SandboxProcessHandle, + SandboxProcessesApi, +) +from .sandbox_terminal import ( + DEFAULT_TERMINAL_KILL_WAIT_SECONDS, + SandboxTerminalApi, + SandboxTerminalConnection, + SandboxTerminalHandle, +) +from .sandbox_transport import RuntimeTransport + +__all__ = [ + "DEFAULT_PROCESS_KILL_WAIT_SECONDS", + "DEFAULT_TERMINAL_KILL_WAIT_SECONDS", + "DEFAULT_WATCH_TIMEOUT_MS", + "RuntimeTransport", + "SandboxFileWatchHandle", + "SandboxFilesApi", + "SandboxProcessHandle", + "SandboxProcessesApi", + "SandboxTerminalApi", + "SandboxTerminalConnection", + "SandboxTerminalHandle", + "SandboxWatchDirHandle", +] diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_files.py b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_files.py new file mode 100644 index 00000000..68fdd89e --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_files.py @@ -0,0 +1,718 @@ +import asyncio +import base64 +import inspect +import io +import json +import socket +from datetime import datetime +from typing import AsyncIterator, Callable, Dict, List, Optional, Union +from urllib.parse import urlencode + +from websockets.asyncio.client import connect as async_ws_connect +from websockets.exceptions import ConnectionClosed + +from .....exceptions import HyperbrowserError +from .....models.sandbox import ( + SandboxFileChmodParams, + SandboxFileChownParams, + SandboxFileCopyParams, + SandboxFileDeleteParams, + SandboxFileInfo, + SandboxFileReadResult, + SandboxFileSystemEvent, + SandboxFileWriteEntry, + SandboxFileTransferResult, + SandboxFileWatchDoneEvent, + SandboxFileWatchEventMessage, + SandboxFileWatchStatus, + SandboxPresignFileParams, + SandboxPresignedUrl, +) +from .....sandbox_common import build_headers, to_websocket_transport_target +from ...sandboxes.shared import ( + DEFAULT_WATCH_TIMEOUT_MS, + _copy_model, + _encode_write_data, + _normalize_event_type, + _normalize_file_info, + _normalize_websocket_error, + _normalize_write_info, + _relative_watch_name, +) +from .sandbox_transport import RuntimeTransport + + +class SandboxFileWatchHandle: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + self._runtime_proxy_override = runtime_proxy_override + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxFileWatchStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + async def refresh(self, include_events: bool = False) -> "SandboxFileWatchHandle": + params = {"includeEvents": True} if include_events else None + payload = await self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + params=params, + ) + self._status = SandboxFileWatchStatus(**payload["watch"]) + return self + + async def stop(self) -> None: + await self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + method="DELETE", + ) + self._status = self._status.model_copy( + update={ + "active": False, + "stopped_at": self._status.stopped_at + or int(datetime.now().timestamp() * 1000), + } + ) + + async def events( + self, + *, + cursor: Optional[int] = None, + route: str = "ws", + ) -> AsyncIterator[object]: + connection = await self._get_connection_info() + query = urlencode( + [ + ("sessionId", connection.sandbox_id), + *([("cursor", str(cursor))] if cursor is not None else []), + ] + ) + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/files/watch/{self.id}/{route}?{query}", + self._runtime_proxy_override, + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + sock = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + sock.setblocking(False) + connect_kwargs["sock"] = sock + try: + websocket = await async_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + try: + while True: + try: + message = await websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "event": + event = SandboxFileWatchEventMessage( + type="event", + event=parsed["event"], + ) + self._status = self._status.model_copy( + update={ + "oldest_seq": self._status.oldest_seq or event.event.seq, + "last_seq": max(self._status.last_seq, event.event.seq), + } + ) + yield event + elif parsed["type"] == "done": + self._status = SandboxFileWatchStatus(**parsed["status"]) + yield SandboxFileWatchDoneEvent(type="done", status=self.current) + break + except GeneratorExit: + raise + except BaseException as error: + raise _normalize_websocket_error(error) + finally: + await websocket.close() + + +class SandboxWatchDirHandle: + def __init__( + self, + watch: SandboxFileWatchHandle, + on_event: Callable[[SandboxFileSystemEvent], object], + *, + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + timeout_ms: Optional[int] = None, + ): + self._watch = watch + self._root_path = watch.current.path + self._on_event = on_event + self._on_exit = on_exit + self._stop_requested = False + self._exit_notified = False + self._task = asyncio.create_task(self._run()) + effective_timeout = ( + DEFAULT_WATCH_TIMEOUT_MS if timeout_ms is None else timeout_ms + ) + self._timeout_task = ( + asyncio.create_task(self._auto_stop(effective_timeout)) + if effective_timeout > 0 + else None + ) + + async def stop(self) -> None: + if self._stop_requested: + return + self._stop_requested = True + + if self._timeout_task is not None: + self._timeout_task.cancel() + self._timeout_task = None + + try: + await self._watch.stop() + except HyperbrowserError as error: + if error.status_code not in {404, 409}: + raise + + if asyncio.current_task() is not self._task: + await self._task + + async def _auto_stop(self, timeout_ms: int) -> None: + try: + await asyncio.sleep(timeout_ms / 1000.0) + await self.stop() + except asyncio.CancelledError: + return + + async def _run(self) -> None: + exit_error = None + try: + async for message in self._watch.events(): + event_type = _normalize_event_type(message.event.op) + if not event_type: + continue + result = self._on_event( + SandboxFileSystemEvent( + type=event_type, + name=_relative_watch_name(self._root_path, message.event.path), + ) + ) + if inspect.isawaitable(result): + await result + except BaseException as error: + exit_error = error + finally: + if self._timeout_task is not None: + self._timeout_task.cancel() + self._timeout_task = None + if not self._exit_notified: + self._exit_notified = True + if self._on_exit is not None: + result = self._on_exit(exit_error) + if inspect.isawaitable(result): + await result + + +class SandboxFilesApi: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override + + async def list( + self, + path: str, + *, + depth: Optional[int] = None, + ) -> List[SandboxFileInfo]: + depth = 1 if depth is None else depth + if depth < 1: + raise ValueError("depth should be at least one") + + payload = await self._transport.request_json( + "/sandbox/files", + params={ + "path": path, + "depth": depth, + }, + ) + return [_normalize_file_info(entry) for entry in payload.get("entries", [])] + + async def get_info(self, path: str) -> SandboxFileInfo: + payload = await self._transport.request_json( + "/sandbox/files/stat", + params={"path": path}, + ) + return _normalize_file_info(payload["file"]) + + async def stat(self, path: str) -> SandboxFileInfo: + return await self.get_info(path) + + async def exists(self, path: str) -> bool: + try: + await self.get_info(path) + return True + except HyperbrowserError as error: + if error.status_code == 404: + return False + if ( + "not found" in str(error).lower() + or "no such file" in str(error).lower() + ): + return False + raise + + async def read( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + format: str = "text", + ): + if format == "text": + return ( + await self._read_wire( + path, + offset=offset, + length=length, + encoding="utf8", + ) + ).content + + response = await self._read_wire( + path, + offset=offset, + length=length, + encoding="base64", + ) + content = base64.b64decode(response.content) + if format in {"bytes", "blob"}: + return content + if format == "stream": + return io.BytesIO(content) + raise ValueError("format should be one of: text, bytes, blob, stream") + + async def read_text( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> str: + return await self.read(path, offset=offset, length=length, format="text") + + async def read_bytes( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> bytes: + return await self.read(path, offset=offset, length=length, format="bytes") + + async def write( + self, + path_or_files: Union[ + str, List[Union[SandboxFileWriteEntry, Dict[str, object]]] + ], + data: Optional[Union[str, bytes, bytearray]] = None, + ): + if isinstance(path_or_files, str): + if data is None: + raise ValueError("Path and data are required") + payload = await self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path_or_files, + **_encode_write_data(data), + }, + headers={"content-type": "application/json"}, + ) + return _normalize_write_info(payload["files"][0]) + + if not path_or_files: + return [] + + encoded_files = [] + for entry in path_or_files: + normalized = ( + entry + if isinstance(entry, SandboxFileWriteEntry) + else SandboxFileWriteEntry(**entry) + ) + encoded_files.append( + { + "path": normalized.path, + **_encode_write_data(normalized.data), + } + ) + + payload = await self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={"files": encoded_files}, + headers={"content-type": "application/json"}, + ) + return [_normalize_write_info(entry) for entry in payload.get("files", [])] + + async def write_text( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return await self._write_single( + path, + data, + append=append, + mode=mode, + encoding="utf8", + ) + + async def write_bytes( + self, + path: str, + data: bytes, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return await self._write_single( + path, + base64.b64encode(data).decode("ascii"), + append=append, + mode=mode, + encoding="base64", + ) + + async def upload(self, path: str, data: Union[str, bytes, bytearray]): + body = data.encode("utf-8") if isinstance(data, str) else bytes(data) + payload = await self._transport.request_json( + "/sandbox/files/upload", + method="PUT", + params={"path": path}, + content=body, + ) + return SandboxFileTransferResult(**payload) + + async def download(self, path: str) -> bytes: + return await self._transport.request_bytes( + "/sandbox/files/download", + params={"path": path}, + ) + + async def make_dir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ) -> bool: + payload = await self._transport.request_json( + "/sandbox/files/mkdir", + method="POST", + json_body={ + "path": path, + "parents": parents, + "mode": mode, + }, + headers={"content-type": "application/json"}, + ) + return bool(payload.get("created")) + + async def mkdir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ) -> bool: + return await self.make_dir(path, parents=parents, mode=mode) + + async def rename(self, old_path: str, new_path: str) -> SandboxFileInfo: + payload = await self._transport.request_json( + "/sandbox/files/move", + method="POST", + json_body={ + "from": old_path, + "to": new_path, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_file_info(payload["entry"]) + + async def move( + self, + *, + source: str, + destination: str, + overwrite: Optional[bool] = None, + ) -> SandboxFileInfo: + return await self.rename(source, destination) + + async def remove(self, path: str, *, recursive: Optional[bool] = None) -> None: + await self._transport.request_json( + "/sandbox/files/delete", + method="POST", + json_body=SandboxFileDeleteParams( + path=path, + recursive=recursive, + ).model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + async def delete(self, path: str, *, recursive: Optional[bool] = None) -> None: + await self.remove(path, recursive=recursive) + + async def copy( + self, + params: Optional[Union[SandboxFileCopyParams, Dict[str, object]]] = None, + *, + source: Optional[str] = None, + destination: Optional[str] = None, + recursive: Optional[bool] = None, + overwrite: Optional[bool] = None, + ) -> SandboxFileInfo: + if params is None: + normalized = SandboxFileCopyParams( + source=source, + destination=destination, + recursive=recursive, + overwrite=overwrite, + ) + elif isinstance(params, SandboxFileCopyParams): + normalized = params + else: + normalized = SandboxFileCopyParams(**params) + + payload = await self._transport.request_json( + "/sandbox/files/copy", + method="POST", + json_body={ + "from": normalized.source, + "to": normalized.destination, + "recursive": normalized.recursive, + "overwrite": normalized.overwrite, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_file_info(payload["entry"]) + + async def chmod( + self, + params: Optional[Union[SandboxFileChmodParams, Dict[str, object]]] = None, + *, + path: Optional[str] = None, + mode: Optional[str] = None, + recursive: Optional[bool] = None, + ) -> None: + normalized = ( + params + if isinstance(params, SandboxFileChmodParams) + else SandboxFileChmodParams( + **(params or {"path": path, "mode": mode, "recursive": recursive}) + ) + ) + await self._transport.request_json( + "/sandbox/files/chmod", + method="POST", + json_body=normalized.model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + async def chown( + self, + params: Optional[Union[SandboxFileChownParams, Dict[str, object]]] = None, + *, + path: Optional[str] = None, + uid: Optional[int] = None, + gid: Optional[int] = None, + recursive: Optional[bool] = None, + ) -> None: + normalized = ( + params + if isinstance(params, SandboxFileChownParams) + else SandboxFileChownParams( + **( + params + or { + "path": path, + "uid": uid, + "gid": gid, + "recursive": recursive, + } + ) + ) + ) + await self._transport.request_json( + "/sandbox/files/chown", + method="POST", + json_body=normalized.model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + async def watch(self, path: str, *, recursive: Optional[bool] = None): + payload = await self._transport.request_json( + "/sandbox/files/watch", + method="POST", + json_body={ + "path": path, + "recursive": recursive, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, + ) + + async def watch_dir( + self, + path: str, + on_event: Callable[[SandboxFileSystemEvent], object], + *, + recursive: Optional[bool] = None, + timeout_ms: Optional[int] = None, + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + ) -> SandboxWatchDirHandle: + return SandboxWatchDirHandle( + await self.watch(path, recursive=recursive), + on_event, + on_exit=on_exit, + timeout_ms=timeout_ms, + ) + + async def get_watch( + self, watch_id: str, include_events: bool = False + ) -> SandboxFileWatchHandle: + payload = await self._transport.request_json( + f"/sandbox/files/watch/{watch_id}", + params={"includeEvents": True} if include_events else None, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, + ) + + async def upload_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = await self._transport.request_json( + "/sandbox/files/presign-upload", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + async def download_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = await self._transport.request_json( + "/sandbox/files/presign-download", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + async def _read_wire( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + encoding: str, + ) -> SandboxFileReadResult: + payload = await self._transport.request_json( + "/sandbox/files/read", + method="POST", + json_body={ + "path": path, + "offset": offset, + "length": length, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileReadResult(**payload) + + async def _write_single( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + encoding: str, + ): + payload = await self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path, + "data": data, + "append": append, + "mode": mode, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_write_info(payload["files"][0]) diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_processes.py b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_processes.py new file mode 100644 index 00000000..1b2c0303 --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_processes.py @@ -0,0 +1,217 @@ +import base64 +from typing import AsyncIterator, Dict, Optional, Union + +from .....models.sandbox import ( + SandboxExecParams, + SandboxProcessExitEvent, + SandboxProcessListResponse, + SandboxProcessOutputEvent, + SandboxProcessResult, + SandboxProcessStdinParams, + SandboxProcessSummary, +) +from .sandbox_transport import RuntimeTransport + +DEFAULT_PROCESS_KILL_WAIT_SECONDS = 5.0 + + +class SandboxProcessHandle: + def __init__(self, transport: RuntimeTransport, summary: SandboxProcessSummary): + self._transport = transport + self._summary = summary + + @property + def id(self) -> str: + return self._summary.id + + @property + def status(self) -> str: + return self._summary.status + + def to_dict(self): + return self._summary.model_dump() + + def to_json(self): + return self.to_dict() + + async def refresh(self) -> "SandboxProcessHandle": + payload = await self._transport.request_json(f"/sandbox/processes/{self.id}") + self._summary = SandboxProcessSummary(**payload["process"]) + return self + + async def wait( + self, + timeout_ms: Optional[int] = None, + timeout_sec: Optional[int] = None, + ) -> SandboxProcessResult: + payload = await self._transport.request_json( + f"/sandbox/processes/{self.id}/wait", + method="POST", + json_body={ + "timeoutMs": timeout_ms, + "timeout_sec": timeout_sec, + }, + headers={"content-type": "application/json"}, + ) + result = SandboxProcessResult(**payload["result"]) + self._summary = SandboxProcessSummary( + id=result.id, + status=result.status, + command=self._summary.command, + args=self._summary.args, + cwd=self._summary.cwd, + pid=self._summary.pid, + exit_code=result.exit_code, + started_at=result.started_at, + completed_at=result.completed_at, + ) + return result + + async def signal(self, signal: str) -> None: + payload = await self._transport.request_json( + f"/sandbox/processes/{self.id}/signal", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._summary = SandboxProcessSummary(**payload["process"]) + + async def kill( + self, + timeout_ms: Optional[int] = None, + timeout_sec: Optional[int] = None, + ) -> SandboxProcessResult: + payload = await self._transport.request_json( + f"/sandbox/processes/{self.id}", + method="DELETE", + ) + self._summary = SandboxProcessSummary(**payload["process"]) + if timeout_ms is None and timeout_sec is None: + timeout_ms = int(DEFAULT_PROCESS_KILL_WAIT_SECONDS * 1000) + return await self.wait(timeout_ms=timeout_ms, timeout_sec=timeout_sec) + + async def write_stdin( + self, + data: Optional[Union[str, bytes, bytearray, SandboxProcessStdinParams]] = None, + *, + encoding: Optional[str] = None, + eof: Optional[bool] = None, + ) -> None: + if isinstance(data, SandboxProcessStdinParams): + params = data + else: + params = SandboxProcessStdinParams(data=data, encoding=encoding, eof=eof) + + payload: Dict[str, object] = {"eof": params.eof} + if params.data is not None: + if isinstance(params.data, str): + payload["data"] = params.data + payload["encoding"] = params.encoding or "utf8" + else: + payload["data"] = base64.b64encode(bytes(params.data)).decode("ascii") + payload["encoding"] = "base64" + + await self._transport.request_json( + f"/sandbox/processes/{self.id}/stdin", + method="POST", + json_body=payload, + headers={"content-type": "application/json"}, + ) + + async def stream(self, from_seq: Optional[int] = None) -> AsyncIterator[object]: + params = {"from_seq": from_seq} if from_seq and from_seq > 0 else None + async for event in self._transport.stream_sse( + f"/sandbox/processes/{self.id}/stream", + params=params, + ): + event_type = event["event"] + data = event["data"] + if event_type == "output": + yield SandboxProcessOutputEvent( + type=data["stream"], + seq=data["seq"], + data=data["data"], + timestamp=data["timestamp"], + ) + elif event_type == "done": + yield SandboxProcessExitEvent( + type="exit", + result=SandboxProcessResult(**data), + ) + + async def result(self) -> SandboxProcessResult: + return await self.wait() + + +class SandboxProcessesApi: + def __init__(self, transport: RuntimeTransport): + self._transport = transport + + async def exec( + self, input: Union[SandboxExecParams, Dict[str, object]] + ) -> SandboxProcessResult: + params = ( + input + if isinstance(input, SandboxExecParams) + else SandboxExecParams(**input) + ) + payload = await self._transport.request_json( + "/sandbox/exec", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessResult(**payload["result"]) + + async def start( + self, input: Union[SandboxExecParams, Dict[str, object]] + ) -> SandboxProcessHandle: + params = ( + input + if isinstance(input, SandboxExecParams) + else SandboxExecParams(**input) + ) + payload = await self._transport.request_json( + "/sandbox/processes", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + async def get(self, process_id: str) -> SandboxProcessHandle: + payload = await self._transport.request_json(f"/sandbox/processes/{process_id}") + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + async def list( + self, + *, + status=None, + limit: Optional[int] = None, + cursor: Optional[Union[str, int]] = None, + created_after: Optional[int] = None, + created_before: Optional[int] = None, + ) -> SandboxProcessListResponse: + normalized_status = None + if isinstance(status, list): + normalized_status = ",".join(status) if status else None + else: + normalized_status = status + + payload = await self._transport.request_json( + "/sandbox/processes", + params={ + "status": normalized_status, + "limit": limit, + "cursor": cursor, + "created_after": created_after, + "created_before": created_before, + }, + ) + return SandboxProcessListResponse(**payload) diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_terminal.py b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_terminal.py new file mode 100644 index 00000000..4bfcd318 --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_terminal.py @@ -0,0 +1,238 @@ +import base64 +import json +import socket +from typing import AsyncIterator, Dict, Optional, Union + +from websockets.asyncio.client import connect as async_ws_connect +from websockets.exceptions import ConnectionClosed + +from .....models.sandbox import ( + SandboxTerminalCreateParams, + SandboxTerminalExitEvent, + SandboxTerminalOutputEvent, + SandboxTerminalStatus, + SandboxTerminalWaitParams, +) +from .....sandbox_common import build_headers, to_websocket_transport_target +from ...sandboxes.shared import ( + _copy_model, + _normalize_terminal_output_chunk, + _normalize_terminal_status, + _normalize_websocket_error, +) +from .sandbox_transport import RuntimeTransport + +DEFAULT_TERMINAL_KILL_WAIT_SECONDS = 5.0 + + +class SandboxTerminalConnection: + def __init__(self, websocket): + self._websocket = websocket + + async def events(self) -> AsyncIterator[object]: + while True: + try: + message = await self._websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "output": + normalized = _normalize_terminal_output_chunk(parsed) + yield SandboxTerminalOutputEvent( + type="output", + **normalized, + ) + elif parsed["type"] == "exit": + yield SandboxTerminalExitEvent( + type="exit", + status=_normalize_terminal_status(parsed["status"]), + ) + + async def write(self, data: Union[str, bytes, bytearray]) -> None: + payload: Dict[str, object] = { + "type": "input", + "data": data + if isinstance(data, str) + else base64.b64encode(bytes(data)).decode("ascii"), + } + if not isinstance(data, str): + payload["encoding"] = "base64" + await self._websocket.send(json.dumps(payload)) + + async def resize(self, rows: int, cols: int) -> None: + await self._websocket.send( + json.dumps( + { + "type": "resize", + "rows": rows, + "cols": cols, + } + ) + ) + + async def close(self) -> None: + await self._websocket.close() + + +class SandboxTerminalHandle: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + self._runtime_proxy_override = runtime_proxy_override + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxTerminalStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + async def refresh(self, include_output: bool = False) -> "SandboxTerminalHandle": + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}", + params={"includeOutput": True} if include_output else None, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self + + async def wait( + self, + timeout_ms: Optional[int] = None, + include_output: Optional[bool] = None, + ) -> SandboxTerminalStatus: + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}/wait", + method="POST", + json_body=SandboxTerminalWaitParams( + timeout_ms=timeout_ms, + include_output=include_output, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + async def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}/kill", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + async def kill( + self, + signal: Optional[str] = None, + *, + timeout_ms: Optional[int] = None, + ) -> SandboxTerminalStatus: + await self.signal(signal) + if timeout_ms is None: + timeout_ms = int(DEFAULT_TERMINAL_KILL_WAIT_SECONDS * 1000) + return await self.wait(timeout_ms=timeout_ms) + + async def resize(self, rows: int, cols: int) -> SandboxTerminalStatus: + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}/resize", + method="POST", + json_body={"rows": rows, "cols": cols}, + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + async def attach(self) -> SandboxTerminalConnection: + connection = await self._get_connection_info() + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/pty/{self.id}/ws?sessionId={connection.sandbox_id}", + self._runtime_proxy_override, + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + sock = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + sock.setblocking(False) + connect_kwargs["sock"] = sock + + try: + websocket = await async_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + return SandboxTerminalConnection(websocket) + + +class SandboxTerminalApi: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override + + async def create( + self, + input: Union[SandboxTerminalCreateParams, Dict[str, object]], + ) -> SandboxTerminalHandle: + params = ( + input + if isinstance(input, SandboxTerminalCreateParams) + else SandboxTerminalCreateParams(**input) + ) + payload = await self._transport.request_json( + "/sandbox/pty", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, + ) + + async def get( + self, terminal_id: str, include_output: bool = False + ) -> SandboxTerminalHandle: + payload = await self._transport.request_json( + f"/sandbox/pty/{terminal_id}", + params={"includeOutput": True} if include_output else None, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, + ) diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_transport.py b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_transport.py new file mode 100644 index 00000000..a3061759 --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_transport.py @@ -0,0 +1,251 @@ +import json +from typing import AsyncIterator, Dict, Optional, Union + +import httpx + +from .....sandbox_common import ( + RuntimeConnection, + build_headers, + ensure_response_ok, + normalize_network_error, + parse_json_response, + resolve_runtime_transport_target, +) +from ...sandboxes.shared import _build_query_path + + +class RuntimeTransport: + def __init__( + self, + resolve_connection, + timeout: float = 30.0, + runtime_proxy_override: Optional[str] = None, + ): + self._resolve_connection = resolve_connection + self._timeout = timeout + self._runtime_proxy_override = runtime_proxy_override + + async def request_json( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + ): + response = await self._request( + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return parse_json_response(response, "runtime") + + async def request_bytes( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + headers: Optional[Dict[str, str]] = None, + ) -> bytes: + response = await self._request( + path, method=method, params=params, headers=headers + ) + return response.content + + async def stream_sse( + self, path: str, params: Optional[Dict[str, object]] = None + ) -> AsyncIterator[Dict[str, object]]: + client, response = await self._open_stream(path, params=params) + event_name = "message" + event_id = None + data_lines = [] + + def flush_event(): + nonlocal event_name, event_id, data_lines + if not data_lines and event_name == "message" and event_id is None: + return None + + raw_data = "\n".join(data_lines) + data = raw_data + if raw_data: + try: + data = json.loads(raw_data) + except json.JSONDecodeError: + data = raw_data + + event = { + "event": event_name, + "data": data, + "id": event_id, + } + event_name = "message" + event_id = None + data_lines = [] + return event + + try: + async for line in response.aiter_lines(): + if line == "": + event = flush_event() + if event is not None: + yield event + continue + + if line.startswith(":"): + continue + + if ":" in line: + field, value = line.split(":", 1) + value = value.lstrip(" ") + else: + field, value = line, "" + + if field == "event": + event_name = value or "message" + elif field == "data": + data_lines.append(value) + elif field == "id": + event_id = value + + trailing = flush_event() + if trailing is not None: + yield trailing + finally: + await response.aclose() + await client.aclose() + + async def _request( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + allow_refresh: bool = True, + ) -> httpx.Response: + connection = await self._resolve_connection(False) + response = await self._send( + connection, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + + if response.status_code == 401 and allow_refresh: + await response.aclose() + refreshed = await self._resolve_connection(True) + retry = await self._send( + refreshed, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return ensure_response_ok(retry, "runtime") + + return ensure_response_ok(response, "runtime") + + async def _open_stream( + self, + path: str, + *, + params: Optional[Dict[str, object]] = None, + allow_refresh: bool = True, + ): + connection = await self._resolve_connection(False) + client, response = await self._send_stream(connection, path, params=params) + if response.status_code == 401 and allow_refresh: + await response.aclose() + await client.aclose() + refreshed = await self._resolve_connection(True) + client, response = await self._send_stream(refreshed, path, params=params) + + if not response.is_success: + await response.aread() + ensure_response_ok(response, "runtime") + return client, response + + async def _send( + self, + connection: RuntimeConnection, + path: str, + *, + method: str, + params: Optional[Dict[str, object]], + json_body: Optional[Dict[str, object]], + content: Optional[Union[str, bytes]], + headers: Optional[Dict[str, str]], + ) -> httpx.Response: + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) + merged_headers = build_headers(connection.token, headers, target.host_header) + client = httpx.AsyncClient(timeout=self._timeout) + + try: + response = await client.request( + method, + target.url, + headers=merged_headers, + json=json_body, + content=content, + ) + except BaseException as error: + await client.aclose() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) + + await response.aread() + await client.aclose() + return response + + async def _send_stream( + self, + connection: RuntimeConnection, + path: str, + *, + params: Optional[Dict[str, object]], + ): + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) + headers = build_headers( + connection.token, + {"Accept": "text/event-stream"}, + target.host_header, + ) + client = httpx.AsyncClient(timeout=self._timeout) + + try: + request = client.build_request("GET", target.url, headers=headers) + response = await client.send(request, stream=True) + return client, response + except BaseException as error: + await client.aclose() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) diff --git a/hyperbrowser/client/managers/sandboxes/__init__.py b/hyperbrowser/client/managers/sandboxes/__init__.py new file mode 100644 index 00000000..b36617a0 --- /dev/null +++ b/hyperbrowser/client/managers/sandboxes/__init__.py @@ -0,0 +1,31 @@ +from .shared import ( + DEFAULT_WATCH_TIMEOUT_MS, + _build_query_path, + _build_sandbox_exposed_url, + _copy_model, + _encode_write_data, + _expires_within_buffer, + _normalize_event_type, + _normalize_file_info, + _normalize_terminal_output_chunk, + _normalize_terminal_status, + _normalize_websocket_error, + _normalize_write_info, + _relative_watch_name, +) + +__all__ = [ + "DEFAULT_WATCH_TIMEOUT_MS", + "_build_query_path", + "_build_sandbox_exposed_url", + "_copy_model", + "_encode_write_data", + "_expires_within_buffer", + "_normalize_event_type", + "_normalize_file_info", + "_normalize_terminal_output_chunk", + "_normalize_terminal_status", + "_normalize_websocket_error", + "_normalize_write_info", + "_relative_watch_name", +] diff --git a/hyperbrowser/client/managers/sandboxes/shared.py b/hyperbrowser/client/managers/sandboxes/shared.py new file mode 100644 index 00000000..ca4d0e19 --- /dev/null +++ b/hyperbrowser/client/managers/sandboxes/shared.py @@ -0,0 +1,210 @@ +import base64 +import posixpath +from datetime import datetime, timedelta, timezone +from typing import Dict, Optional, Union +from urllib.parse import urlencode, urlsplit, urlunsplit + +from ....exceptions import HyperbrowserError +from ....models.sandbox import ( + SandboxFileInfo, + SandboxFileWriteInfo, + SandboxTerminalStatus, +) +from ....sandbox_common import ( + RUNTIME_SESSION_REFRESH_BUFFER_MS, + normalize_network_error, + parse_error_payload, +) + +DEFAULT_WATCH_TIMEOUT_MS = 60_000 + + +def _copy_model(model): + return model.model_copy(deep=True) + + +def _build_sandbox_exposed_url(runtime, port: int) -> str: + parsed = urlsplit(runtime.base_url) + hostname = parsed.hostname + if not hostname: + return runtime.base_url.rstrip("/") + + exposed_host = f"{port}-{hostname}" + netloc = exposed_host + if parsed.port: + netloc = f"{netloc}:{parsed.port}" + if parsed.username: + credentials = parsed.username + if parsed.password: + credentials = f"{credentials}:{parsed.password}" + netloc = f"{credentials}@{netloc}" + + return urlunsplit( + (parsed.scheme, netloc, parsed.path, parsed.query, parsed.fragment) + ).rstrip("/") + + +def _expires_within_buffer(expires_at: Optional[datetime]) -> bool: + if expires_at is None: + return False + if expires_at.tzinfo is None: + expires_at = expires_at.replace(tzinfo=timezone.utc) + threshold = datetime.now(timezone.utc) + timedelta( + milliseconds=RUNTIME_SESSION_REFRESH_BUFFER_MS + ) + return expires_at <= threshold + + +def _build_query_path(path: str, params: Optional[Dict[str, object]] = None) -> str: + if not params: + return path + + filtered = [] + for key, value in params.items(): + if value is None: + continue + filtered.append((key, str(value))) + + if not filtered: + return path + + return f"{path}?{urlencode(filtered)}" + + +def _normalize_websocket_error(error: BaseException) -> HyperbrowserError: + if isinstance(error, HyperbrowserError): + return error + + response = getattr(error, "response", None) + if response is not None: + status_code = getattr(response, "status_code", None) + headers = getattr(response, "headers", {}) or {} + body = getattr(response, "body", b"") + if isinstance(body, memoryview): + body = body.tobytes() + if isinstance(body, bytearray): + body = bytes(body) + if isinstance(body, bytes): + raw_text = body.decode("utf-8", errors="replace") + elif isinstance(body, str): + raw_text = body + else: + raw_text = "" + + message, code, details = parse_error_payload( + raw_text, + f"Runtime websocket request failed: {status_code or 0}", + ) + request_id = None + if isinstance(headers, dict): + request_id = headers.get("x-request-id") or headers.get("request-id") + else: + request_id = headers.get("x-request-id") or headers.get("request-id") + + return HyperbrowserError( + message, + status_code=status_code, + code=code, + request_id=request_id, + retryable=bool(status_code in {429, 502, 503, 504}), + service="runtime", + details=details, + cause=error, + original_error=error if isinstance(error, Exception) else None, + ) + + status_code = getattr(error, "status_code", None) + headers = getattr(error, "headers", None) + if status_code is not None: + request_id = None + if headers is not None: + request_id = headers.get("x-request-id") or headers.get("request-id") + return HyperbrowserError( + f"Runtime websocket request failed: {status_code}", + status_code=status_code, + request_id=request_id, + retryable=bool(status_code in {429, 502, 503, 504}), + service="runtime", + cause=error, + original_error=error if isinstance(error, Exception) else None, + ) + + return normalize_network_error( + error, + "runtime", + "Unknown runtime websocket error", + ) + + +def _normalize_file_type(value: Optional[str]) -> Optional[str]: + if not value: + return None + return "dir" if value in {"dir", "directory"} else "file" + + +def _normalize_file_info(entry: Dict[str, object]) -> SandboxFileInfo: + normalized = dict(entry) + normalized["type"] = _normalize_file_type(normalized.get("type")) + return SandboxFileInfo(**normalized) + + +def _normalize_write_info(entry: Dict[str, object]) -> SandboxFileWriteInfo: + normalized = dict(entry) + normalized["type"] = _normalize_file_type(normalized.get("type")) + return SandboxFileWriteInfo(**normalized) + + +def _normalize_event_type(operation: str) -> Optional[str]: + lower = operation.lower() + if "chmod" in lower: + return "chmod" + if "create" in lower: + return "create" + if "remove" in lower or "delete" in lower: + return "remove" + if "rename" in lower: + return "rename" + if "write" in lower: + return "write" + return None + + +def _relative_watch_name(root: str, absolute_path: str) -> str: + relative = posixpath.relpath(absolute_path, root) + if relative in {"", "."}: + return posixpath.basename(absolute_path) + return relative + + +def _encode_write_data(data: Union[str, bytes, bytearray]) -> Dict[str, str]: + if isinstance(data, str): + return { + "data": data, + "encoding": "utf8", + } + return { + "data": base64.b64encode(bytes(data)).decode("ascii"), + "encoding": "base64", + } + + +def _normalize_terminal_output_chunk(entry: Dict[str, object]) -> Dict[str, object]: + raw = base64.b64decode(entry["data"]) + return { + "seq": entry["seq"], + "data": raw.decode("utf-8", errors="replace"), + "raw": raw, + "timestamp": entry["timestamp"], + } + + +def _normalize_terminal_status(entry: Dict[str, object]) -> SandboxTerminalStatus: + normalized = dict(entry) + output = normalized.get("output") + if isinstance(output, list): + normalized["output"] = [ + _normalize_terminal_output_chunk(chunk) + for chunk in output + if isinstance(chunk, dict) + ] + return SandboxTerminalStatus(**normalized) diff --git a/hyperbrowser/client/managers/sync_manager/sandbox.py b/hyperbrowser/client/managers/sync_manager/sandbox.py index 79cba4a3..30ba7639 100644 --- a/hyperbrowser/client/managers/sync_manager/sandbox.py +++ b/hyperbrowser/client/managers/sync_manager/sandbox.py @@ -1,1538 +1,64 @@ -import base64 -import io -import json -import posixpath -import socket -import threading -from datetime import datetime, timedelta, timezone -from typing import Callable, Dict, Iterator, List, Optional, Union -from urllib.parse import urlencode, urlsplit, urlunsplit - -import httpx -from websockets.exceptions import ConnectionClosed -from websockets.sync.client import connect as sync_ws_connect +from typing import Dict, Optional, Union from ....exceptions import HyperbrowserError from ....models.sandbox import ( CreateSandboxParams, SandboxDetail, SandboxExecParams, - SandboxFileChmodParams, - SandboxFileChownParams, - SandboxFileCopyParams, - SandboxFileDeleteParams, - SandboxFileInfo, - SandboxFileListOptions, - SandboxFileReadOptions, - SandboxFileReadResult, - SandboxFileSystemEvent, - SandboxFileWriteEntry, - SandboxFileWriteInfo, - SandboxFileTransferResult, - SandboxFileWatchDoneEvent, - SandboxFileWatchEventMessage, - SandboxFileWatchStatus, - SandboxMemorySnapshotParams, - SandboxMemorySnapshotResult, SandboxExposeParams, SandboxExposeResult, - SandboxPresignFileParams, - SandboxPresignedUrl, - SandboxProcessExitEvent, - SandboxProcessListResponse, - SandboxProcessOutputEvent, - SandboxProcessResult, - SandboxProcessStdinParams, - SandboxProcessSummary, + SandboxMemorySnapshotParams, + SandboxMemorySnapshotResult, SandboxRuntimeSession, - SandboxTerminalCreateParams, - SandboxTerminalExitEvent, - SandboxTerminalOutputEvent, - SandboxTerminalStatus, - SandboxTerminalWaitParams, StartSandboxFromSnapshotParams, ) from ....models.session import BasicResponse from ....sandbox_common import ( - RUNTIME_SESSION_REFRESH_BUFFER_MS, RuntimeConnection, - build_headers, ensure_response_ok, normalize_network_error, - parse_error_payload, parse_json_response, - resolve_runtime_transport_target, - to_websocket_transport_target, ) - -DEFAULT_PROCESS_KILL_WAIT_SECONDS = 5.0 -DEFAULT_TERMINAL_KILL_WAIT_SECONDS = 5.0 -DEFAULT_WATCH_TIMEOUT_MS = 60_000 - - -def _copy_model(model): - return model.model_copy(deep=True) - - -def _build_sandbox_exposed_url(runtime, port: int) -> str: - parsed = urlsplit(runtime.base_url) - hostname = parsed.hostname - if not hostname: - return runtime.base_url.rstrip("/") - - exposed_host = f"{port}-{hostname}" - netloc = exposed_host - if parsed.port: - netloc = f"{netloc}:{parsed.port}" - if parsed.username: - credentials = parsed.username - if parsed.password: - credentials = f"{credentials}:{parsed.password}" - netloc = f"{credentials}@{netloc}" - - return urlunsplit( - (parsed.scheme, netloc, parsed.path, parsed.query, parsed.fragment) - ).rstrip("/") - - -def _expires_within_buffer(expires_at: Optional[datetime]) -> bool: - if expires_at is None: - return False - if expires_at.tzinfo is None: - expires_at = expires_at.replace(tzinfo=timezone.utc) - threshold = datetime.now(timezone.utc) + timedelta( - milliseconds=RUNTIME_SESSION_REFRESH_BUFFER_MS - ) - return expires_at <= threshold - - -def _build_query_path(path: str, params: Optional[Dict[str, object]] = None) -> str: - if not params: - return path - - filtered = [] - for key, value in params.items(): - if value is None: - continue - filtered.append((key, str(value))) - - if not filtered: - return path - - return f"{path}?{urlencode(filtered)}" - - -def _normalize_websocket_error(error: BaseException) -> HyperbrowserError: - if isinstance(error, HyperbrowserError): - return error - - response = getattr(error, "response", None) - if response is not None: - status_code = getattr(response, "status_code", None) - headers = getattr(response, "headers", {}) or {} - body = getattr(response, "body", b"") - if isinstance(body, memoryview): - body = body.tobytes() - if isinstance(body, bytearray): - body = bytes(body) - if isinstance(body, bytes): - raw_text = body.decode("utf-8", errors="replace") - elif isinstance(body, str): - raw_text = body - else: - raw_text = "" - - message, code, details = parse_error_payload( - raw_text, - f"Runtime websocket request failed: {status_code or 0}", - ) - request_id = None - if isinstance(headers, dict): - request_id = headers.get("x-request-id") or headers.get("request-id") - else: - request_id = headers.get("x-request-id") or headers.get("request-id") - - return HyperbrowserError( - message, - status_code=status_code, - code=code, - request_id=request_id, - retryable=bool(status_code in {429, 502, 503, 504}), - service="runtime", - details=details, - cause=error, - original_error=error if isinstance(error, Exception) else None, - ) - - status_code = getattr(error, "status_code", None) - headers = getattr(error, "headers", None) - if status_code is not None: - request_id = None - if headers is not None: - request_id = headers.get("x-request-id") or headers.get("request-id") - return HyperbrowserError( - f"Runtime websocket request failed: {status_code}", - status_code=status_code, - request_id=request_id, - retryable=bool(status_code in {429, 502, 503, 504}), - service="runtime", - cause=error, - original_error=error if isinstance(error, Exception) else None, - ) - - return normalize_network_error( - error, - "runtime", - "Unknown runtime websocket error", - ) - - -def _normalize_file_type(value: Optional[str]) -> Optional[str]: - if not value: - return None - return "dir" if value in {"dir", "directory"} else "file" - - -def _normalize_file_info(entry: Dict[str, object]) -> SandboxFileInfo: - normalized = dict(entry) - normalized["type"] = _normalize_file_type(normalized.get("type")) - return SandboxFileInfo(**normalized) - - -def _normalize_write_info(entry: Dict[str, object]) -> SandboxFileWriteInfo: - normalized = dict(entry) - normalized["type"] = _normalize_file_type(normalized.get("type")) - return SandboxFileWriteInfo(**normalized) - - -def _normalize_event_type(operation: str) -> Optional[str]: - lower = operation.lower() - if "chmod" in lower: - return "chmod" - if "create" in lower: - return "create" - if "remove" in lower or "delete" in lower: - return "remove" - if "rename" in lower: - return "rename" - if "write" in lower: - return "write" - return None - - -def _relative_watch_name(root: str, absolute_path: str) -> str: - relative = posixpath.relpath(absolute_path, root) - if relative in {"", "."}: - return posixpath.basename(absolute_path) - return relative - - -def _encode_write_data(data: Union[str, bytes, bytearray]) -> Dict[str, str]: - if isinstance(data, str): - return { - "data": data, - "encoding": "utf8", - } - return { - "data": base64.b64encode(bytes(data)).decode("ascii"), - "encoding": "base64", - } - - -def _normalize_terminal_output_chunk(entry: Dict[str, object]) -> Dict[str, object]: - raw = base64.b64decode(entry["data"]) - return { - "seq": entry["seq"], - "data": raw.decode("utf-8", errors="replace"), - "raw": raw, - "timestamp": entry["timestamp"], - } - - -def _normalize_terminal_status(entry: Dict[str, object]) -> SandboxTerminalStatus: - normalized = dict(entry) - output = normalized.get("output") - if isinstance(output, list): - normalized["output"] = [ - _normalize_terminal_output_chunk(chunk) - for chunk in output - if isinstance(chunk, dict) - ] - return SandboxTerminalStatus(**normalized) - - -class RuntimeTransport: - def __init__( - self, - resolve_connection, - timeout: float = 30.0, - runtime_proxy_override: Optional[str] = None, - ): - self._resolve_connection = resolve_connection - self._timeout = timeout - self._runtime_proxy_override = runtime_proxy_override - - def request_json( - self, - path: str, - *, - method: str = "GET", - params: Optional[Dict[str, object]] = None, - json_body: Optional[Dict[str, object]] = None, - content: Optional[Union[str, bytes]] = None, - headers: Optional[Dict[str, str]] = None, - ): - response = self._request( - path, - method=method, - params=params, - json_body=json_body, - content=content, - headers=headers, - ) - return parse_json_response(response, "runtime") - - def request_bytes( - self, - path: str, - *, - method: str = "GET", - params: Optional[Dict[str, object]] = None, - headers: Optional[Dict[str, str]] = None, - ) -> bytes: - response = self._request(path, method=method, params=params, headers=headers) - return response.content - - def stream_sse( - self, path: str, params: Optional[Dict[str, object]] = None - ) -> Iterator[Dict[str, object]]: - client, response = self._open_stream(path, params=params) - event_name = "message" - event_id = None - data_lines = [] - - def flush_event(): - nonlocal event_name, event_id, data_lines - if not data_lines and event_name == "message" and event_id is None: - return None - - raw_data = "\n".join(data_lines) - data = raw_data - if raw_data: - try: - data = json.loads(raw_data) - except json.JSONDecodeError: - data = raw_data - - event = { - "event": event_name, - "data": data, - "id": event_id, - } - event_name = "message" - event_id = None - data_lines = [] - return event - - try: - for line in response.iter_lines(): - if line == "": - event = flush_event() - if event is not None: - yield event - continue - - if line.startswith(":"): - continue - - if ":" in line: - field, value = line.split(":", 1) - value = value.lstrip(" ") - else: - field, value = line, "" - - if field == "event": - event_name = value or "message" - elif field == "data": - data_lines.append(value) - elif field == "id": - event_id = value - - trailing = flush_event() - if trailing is not None: - yield trailing - finally: - response.close() - client.close() - - def _request( - self, - path: str, - *, - method: str = "GET", - params: Optional[Dict[str, object]] = None, - json_body: Optional[Dict[str, object]] = None, - content: Optional[Union[str, bytes]] = None, - headers: Optional[Dict[str, str]] = None, - allow_refresh: bool = True, - ) -> httpx.Response: - connection = self._resolve_connection(False) - response = self._send( - connection, - path, - method=method, - params=params, - json_body=json_body, - content=content, - headers=headers, - ) - - if response.status_code == 401 and allow_refresh: - response.close() - refreshed = self._resolve_connection(True) - retry = self._send( - refreshed, - path, - method=method, - params=params, - json_body=json_body, - content=content, - headers=headers, - ) - return ensure_response_ok(retry, "runtime") - - return ensure_response_ok(response, "runtime") - - def _open_stream( - self, - path: str, - *, - params: Optional[Dict[str, object]] = None, - allow_refresh: bool = True, - ): - connection = self._resolve_connection(False) - client, response = self._send_stream(connection, path, params=params) - if response.status_code == 401 and allow_refresh: - response.close() - client.close() - refreshed = self._resolve_connection(True) - client, response = self._send_stream(refreshed, path, params=params) - - if not response.is_success: - response.read() - ensure_response_ok(response, "runtime") - return client, response - - def _send( - self, - connection: RuntimeConnection, - path: str, - *, - method: str, - params: Optional[Dict[str, object]], - json_body: Optional[Dict[str, object]], - content: Optional[Union[str, bytes]], - headers: Optional[Dict[str, str]], - ) -> httpx.Response: - request_path = _build_query_path(path, params) - target = resolve_runtime_transport_target( - connection.base_url, - request_path, - self._runtime_proxy_override, - ) - merged_headers = build_headers(connection.token, headers, target.host_header) - client = httpx.Client(timeout=self._timeout) - - try: - response = client.request( - method, - target.url, - headers=merged_headers, - json=json_body, - content=content, - ) - except BaseException as error: - client.close() - raise normalize_network_error( - error, - "runtime", - "Unknown runtime request error", - ) - - response.read() - client.close() - return response - - def _send_stream( - self, - connection: RuntimeConnection, - path: str, - *, - params: Optional[Dict[str, object]], - ): - request_path = _build_query_path(path, params) - target = resolve_runtime_transport_target( - connection.base_url, - request_path, - self._runtime_proxy_override, - ) - headers = build_headers( - connection.token, - {"Accept": "text/event-stream"}, - target.host_header, - ) - client = httpx.Client(timeout=self._timeout) - - try: - request = client.build_request("GET", target.url, headers=headers) - response = client.send(request, stream=True) - return client, response - except BaseException as error: - client.close() - raise normalize_network_error( - error, - "runtime", - "Unknown runtime request error", - ) - - -class SandboxProcessHandle: - def __init__(self, transport: RuntimeTransport, summary: SandboxProcessSummary): - self._transport = transport - self._summary = summary - - @property - def id(self) -> str: - return self._summary.id - - @property - def status(self) -> str: - return self._summary.status - - def to_dict(self): - return self._summary.model_dump() - - def to_json(self): - return self.to_dict() - - def refresh(self) -> "SandboxProcessHandle": - payload = self._transport.request_json(f"/sandbox/processes/{self.id}") - self._summary = SandboxProcessSummary(**payload["process"]) - return self - - def wait(self, timeout_ms: Optional[int] = None, timeout_sec: Optional[int] = None): - payload = self._transport.request_json( - f"/sandbox/processes/{self.id}/wait", - method="POST", - json_body={ - "timeoutMs": timeout_ms, - "timeout_sec": timeout_sec, - }, - headers={"content-type": "application/json"}, - ) - result = SandboxProcessResult(**payload["result"]) - self._summary = SandboxProcessSummary( - id=result.id, - status=result.status, - command=self._summary.command, - args=self._summary.args, - cwd=self._summary.cwd, - pid=self._summary.pid, - exit_code=result.exit_code, - started_at=result.started_at, - completed_at=result.completed_at, - ) - return result - - def signal(self, signal: str) -> None: - payload = self._transport.request_json( - f"/sandbox/processes/{self.id}/signal", - method="POST", - json_body={"signal": signal}, - headers={"content-type": "application/json"}, - ) - self._summary = SandboxProcessSummary(**payload["process"]) - - def kill( - self, - timeout_ms: Optional[int] = None, - timeout_sec: Optional[int] = None, - ) -> SandboxProcessResult: - payload = self._transport.request_json( - f"/sandbox/processes/{self.id}", - method="DELETE", - ) - self._summary = SandboxProcessSummary(**payload["process"]) - if timeout_ms is None and timeout_sec is None: - timeout_ms = int(DEFAULT_PROCESS_KILL_WAIT_SECONDS * 1000) - return self.wait(timeout_ms=timeout_ms, timeout_sec=timeout_sec) - - def write_stdin( - self, - data: Optional[Union[str, bytes, bytearray, SandboxProcessStdinParams]] = None, - *, - encoding: Optional[str] = None, - eof: Optional[bool] = None, - ) -> None: - if isinstance(data, SandboxProcessStdinParams): - params = data - else: - params = SandboxProcessStdinParams(data=data, encoding=encoding, eof=eof) - - payload: Dict[str, object] = {"eof": params.eof} - if params.data is not None: - if isinstance(params.data, str): - payload["data"] = params.data - payload["encoding"] = params.encoding or "utf8" - else: - payload["data"] = base64.b64encode(bytes(params.data)).decode("ascii") - payload["encoding"] = "base64" - - self._transport.request_json( - f"/sandbox/processes/{self.id}/stdin", - method="POST", - json_body=payload, - headers={"content-type": "application/json"}, - ) - - def stream(self, from_seq: Optional[int] = None): - params = {"from_seq": from_seq} if from_seq and from_seq > 0 else None - for event in self._transport.stream_sse( - f"/sandbox/processes/{self.id}/stream", - params=params, - ): - event_type = event["event"] - data = event["data"] - if event_type == "output": - yield SandboxProcessOutputEvent( - type=data["stream"], - seq=data["seq"], - data=data["data"], - timestamp=data["timestamp"], - ) - elif event_type == "done": - yield SandboxProcessExitEvent( - type="exit", - result=SandboxProcessResult(**data), - ) - - def result(self) -> SandboxProcessResult: - return self.wait() - - -class SandboxProcessesApi: - def __init__(self, transport: RuntimeTransport): - self._transport = transport - - def exec(self, input: Union[SandboxExecParams, Dict[str, object]]) -> SandboxProcessResult: - params = input if isinstance(input, SandboxExecParams) else SandboxExecParams(**input) - payload = self._transport.request_json( - "/sandbox/exec", - method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - return SandboxProcessResult(**payload["result"]) - - def start(self, input: Union[SandboxExecParams, Dict[str, object]]) -> SandboxProcessHandle: - params = input if isinstance(input, SandboxExecParams) else SandboxExecParams(**input) - payload = self._transport.request_json( - "/sandbox/processes", - method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - return SandboxProcessHandle( - self._transport, - SandboxProcessSummary(**payload["process"]), - ) - - def get(self, process_id: str) -> SandboxProcessHandle: - payload = self._transport.request_json(f"/sandbox/processes/{process_id}") - return SandboxProcessHandle( - self._transport, - SandboxProcessSummary(**payload["process"]), - ) - - def list( - self, - *, - status=None, - limit: Optional[int] = None, - cursor: Optional[Union[str, int]] = None, - created_after: Optional[int] = None, - created_before: Optional[int] = None, - ) -> SandboxProcessListResponse: - normalized_status = None - if isinstance(status, list): - normalized_status = ",".join(status) if status else None - else: - normalized_status = status - - payload = self._transport.request_json( - "/sandbox/processes", - params={ - "status": normalized_status, - "limit": limit, - "cursor": cursor, - "created_after": created_after, - "created_before": created_before, - }, - ) - return SandboxProcessListResponse(**payload) - - -class SandboxFileWatchHandle: - def __init__( - self, - transport: RuntimeTransport, - get_connection_info, - status, - runtime_proxy_override: Optional[str] = None, - ): - self._transport = transport - self._get_connection_info = get_connection_info - self._status = status - self._runtime_proxy_override = runtime_proxy_override - - @property - def id(self) -> str: - return self._status.id - - @property - def current(self) -> SandboxFileWatchStatus: - return _copy_model(self._status) - - def to_dict(self): - return self._status.model_dump() - - def to_json(self): - return self.to_dict() - - def refresh(self, include_events: bool = False) -> "SandboxFileWatchHandle": - params = {"includeEvents": True} if include_events else None - payload = self._transport.request_json( - f"/sandbox/files/watch/{self.id}", - params=params, - ) - self._status = SandboxFileWatchStatus(**payload["watch"]) - return self - - def stop(self) -> None: - self._transport.request_json( - f"/sandbox/files/watch/{self.id}", - method="DELETE", - ) - self._status = self._status.model_copy( - update={ - "active": False, - "stopped_at": self._status.stopped_at or int(datetime.now().timestamp() * 1000), - } - ) - - def events( - self, - *, - cursor: Optional[int] = None, - route: str = "ws", - ): - connection = self._get_connection_info() - query = urlencode( - [ - ("sessionId", connection.sandbox_id), - *([("cursor", str(cursor))] if cursor is not None else []), - ] - ) - target = to_websocket_transport_target( - connection.base_url, - f"/sandbox/files/watch/{self.id}/{route}?{query}", - self._runtime_proxy_override, - ) - headers = build_headers(connection.token, host_header=target.host_header) - connect_kwargs = {} - if target.connect_host is not None and target.connect_port is not None: - connect_kwargs["sock"] = socket.create_connection( - (target.connect_host, target.connect_port), - timeout=self._transport._timeout, - ) - try: - websocket = sync_ws_connect( - target.url, - additional_headers=headers, - open_timeout=self._transport._timeout, - **connect_kwargs, - ) - except BaseException as error: - raise _normalize_websocket_error(error) - - try: - while True: - try: - message = websocket.recv() - except ConnectionClosed: - break - - if isinstance(message, bytes): - message = message.decode("utf-8") - parsed = json.loads(message) - if parsed["type"] == "event": - event = SandboxFileWatchEventMessage( - type="event", - event=parsed["event"], - ) - self._status = self._status.model_copy( - update={ - "oldest_seq": self._status.oldest_seq or event.event.seq, - "last_seq": max(self._status.last_seq, event.event.seq), - } - ) - yield event - elif parsed["type"] == "done": - self._status = SandboxFileWatchStatus(**parsed["status"]) - yield SandboxFileWatchDoneEvent(type="done", status=self.current) - break - except GeneratorExit: - raise - except BaseException as error: - raise _normalize_websocket_error(error) - finally: - websocket.close() - - -class SandboxWatchDirHandle: - def __init__( - self, - watch: SandboxFileWatchHandle, - on_event: Callable[[SandboxFileSystemEvent], object], - *, - on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, - timeout_ms: Optional[int] = None, - ): - self._watch = watch - self._root_path = watch.current.path - self._on_event = on_event - self._on_exit = on_exit - self._thread = threading.Thread(target=self._run, daemon=True) - self._timer = None - self._stopped = threading.Event() - self._exit_notified = False - - effective_timeout = DEFAULT_WATCH_TIMEOUT_MS if timeout_ms is None else timeout_ms - if effective_timeout > 0: - self._timer = threading.Timer(effective_timeout / 1000.0, self.stop) - self._timer.daemon = True - self._timer.start() - - self._thread.start() - - def stop(self) -> None: - if self._stopped.is_set(): - return - self._stopped.set() - - if self._timer is not None: - self._timer.cancel() - self._timer = None - - try: - self._watch.stop() - except HyperbrowserError as error: - if error.status_code not in {404, 409}: - raise - - if threading.current_thread() is not self._thread: - self._thread.join() - - def _run(self) -> None: - exit_error = None - try: - for message in self._watch.events(): - event_type = _normalize_event_type(message.event.op) - if not event_type: - continue - self._on_event( - SandboxFileSystemEvent( - type=event_type, - name=_relative_watch_name(self._root_path, message.event.path), - ) - ) - except BaseException as error: - exit_error = error - finally: - if self._timer is not None: - self._timer.cancel() - self._timer = None - if not self._exit_notified: - self._exit_notified = True - if self._on_exit is not None: - self._on_exit(exit_error) - - -class SandboxFilesApi: - def __init__( - self, - transport: RuntimeTransport, - get_connection_info, - runtime_proxy_override: Optional[str] = None, - ): - self._transport = transport - self._get_connection_info = get_connection_info - self._runtime_proxy_override = runtime_proxy_override - - def exists(self, path: str) -> bool: - try: - self.get_info(path) - return True - except HyperbrowserError as error: - if error.status_code == 404: - return False - if "not found" in str(error).lower() or "no such file" in str(error).lower(): - return False - raise - - def get_info(self, path: str) -> SandboxFileInfo: - payload = self._transport.request_json( - "/sandbox/files/stat", - params={"path": path}, - ) - return _normalize_file_info(payload["file"]) - - def stat(self, path: str) -> SandboxFileInfo: - return self.get_info(path) - - def list( - self, - path: str, - *, - depth: Optional[int] = None, - ) -> List[SandboxFileInfo]: - depth = 1 if depth is None else depth - if depth < 1: - raise ValueError("depth should be at least one") - - payload = self._transport.request_json( - "/sandbox/files", - params={ - "path": path, - "depth": depth, - }, - ) - return [_normalize_file_info(entry) for entry in payload.get("entries", [])] - - def read( - self, - path: str, - *, - offset: Optional[int] = None, - length: Optional[int] = None, - format: str = "text", - ): - if format == "text": - return self._read_wire(path, offset=offset, length=length, encoding="utf8").content - - response = self._read_wire(path, offset=offset, length=length, encoding="base64") - content = base64.b64decode(response.content) - if format in {"bytes", "blob"}: - return content - if format == "stream": - return io.BytesIO(content) - raise ValueError("format should be one of: text, bytes, blob, stream") - - def read_text( - self, - path: str, - *, - offset: Optional[int] = None, - length: Optional[int] = None, - ) -> str: - return self.read(path, offset=offset, length=length, format="text") - - def read_bytes( - self, - path: str, - *, - offset: Optional[int] = None, - length: Optional[int] = None, - ) -> bytes: - return self.read(path, offset=offset, length=length, format="bytes") - - def write( - self, - path_or_files: Union[str, List[Union[SandboxFileWriteEntry, Dict[str, object]]]], - data: Optional[Union[str, bytes, bytearray]] = None, - ): - if isinstance(path_or_files, str): - if data is None: - raise ValueError("Path and data are required") - payload = self._transport.request_json( - "/sandbox/files/write", - method="POST", - json_body={ - "path": path_or_files, - **_encode_write_data(data), - }, - headers={"content-type": "application/json"}, - ) - return _normalize_write_info(payload["files"][0]) - - if not path_or_files: - return [] - - encoded_files = [] - for entry in path_or_files: - normalized = ( - entry - if isinstance(entry, SandboxFileWriteEntry) - else SandboxFileWriteEntry(**entry) - ) - encoded_files.append( - { - "path": normalized.path, - **_encode_write_data(normalized.data), - } - ) - - payload = self._transport.request_json( - "/sandbox/files/write", - method="POST", - json_body={"files": encoded_files}, - headers={"content-type": "application/json"}, - ) - return [_normalize_write_info(entry) for entry in payload.get("files", [])] - - def write_text( - self, - path: str, - data: str, - *, - append: Optional[bool] = None, - mode: Optional[str] = None, - ): - return self._write_single( - path, - data, - append=append, - mode=mode, - encoding="utf8", - ) - - def write_bytes( - self, - path: str, - data: bytes, - *, - append: Optional[bool] = None, - mode: Optional[str] = None, - ): - return self._write_single( - path, - base64.b64encode(data).decode("ascii"), - append=append, - mode=mode, - encoding="base64", - ) - - def upload(self, path: str, data: Union[str, bytes, bytearray]): - body = data.encode("utf-8") if isinstance(data, str) else bytes(data) - payload = self._transport.request_json( - "/sandbox/files/upload", - method="PUT", - params={"path": path}, - content=body, - ) - return SandboxFileTransferResult(**payload) - - def download(self, path: str) -> bytes: - return self._transport.request_bytes( - "/sandbox/files/download", - params={"path": path}, - ) - - def make_dir( - self, - path: str, - *, - parents: Optional[bool] = None, - mode: Optional[str] = None, - ) -> bool: - payload = self._transport.request_json( - "/sandbox/files/mkdir", - method="POST", - json_body={ - "path": path, - "parents": parents, - "mode": mode, - }, - headers={"content-type": "application/json"}, - ) - return bool(payload.get("created")) - - def mkdir( - self, - path: str, - *, - parents: Optional[bool] = None, - mode: Optional[str] = None, - ) -> bool: - return self.make_dir(path, parents=parents, mode=mode) - - def rename(self, old_path: str, new_path: str) -> SandboxFileInfo: - payload = self._transport.request_json( - "/sandbox/files/move", - method="POST", - json_body={ - "from": old_path, - "to": new_path, - }, - headers={"content-type": "application/json"}, - ) - return _normalize_file_info(payload["entry"]) - - def move( - self, - *, - source: str, - destination: str, - overwrite: Optional[bool] = None, - ) -> SandboxFileInfo: - return self.rename(source, destination) - - def remove(self, path: str, *, recursive: Optional[bool] = None) -> None: - self._transport.request_json( - "/sandbox/files/delete", - method="POST", - json_body=SandboxFileDeleteParams( - path=path, - recursive=recursive, - ).model_dump(exclude_none=True), - headers={"content-type": "application/json"}, - ) - - def delete(self, path: str, *, recursive: Optional[bool] = None) -> None: - self.remove(path, recursive=recursive) - - def copy( - self, - params: Optional[Union[SandboxFileCopyParams, Dict[str, object]]] = None, - *, - source: Optional[str] = None, - destination: Optional[str] = None, - recursive: Optional[bool] = None, - overwrite: Optional[bool] = None, - ) -> SandboxFileInfo: - if params is None: - normalized = SandboxFileCopyParams( - source=source, - destination=destination, - recursive=recursive, - overwrite=overwrite, - ) - elif isinstance(params, SandboxFileCopyParams): - normalized = params - else: - normalized = SandboxFileCopyParams(**params) - - payload = self._transport.request_json( - "/sandbox/files/copy", - method="POST", - json_body={ - "from": normalized.source, - "to": normalized.destination, - "recursive": normalized.recursive, - "overwrite": normalized.overwrite, - }, - headers={"content-type": "application/json"}, - ) - return _normalize_file_info(payload["entry"]) - - def chmod( - self, - params: Optional[Union[SandboxFileChmodParams, Dict[str, object]]] = None, - *, - path: Optional[str] = None, - mode: Optional[str] = None, - recursive: Optional[bool] = None, - ) -> None: - normalized = ( - params - if isinstance(params, SandboxFileChmodParams) - else SandboxFileChmodParams( - **(params or {"path": path, "mode": mode, "recursive": recursive}) - ) - ) - self._transport.request_json( - "/sandbox/files/chmod", - method="POST", - json_body=normalized.model_dump(exclude_none=True), - headers={"content-type": "application/json"}, - ) - - def chown( - self, - params: Optional[Union[SandboxFileChownParams, Dict[str, object]]] = None, - *, - path: Optional[str] = None, - uid: Optional[int] = None, - gid: Optional[int] = None, - recursive: Optional[bool] = None, - ) -> None: - normalized = ( - params - if isinstance(params, SandboxFileChownParams) - else SandboxFileChownParams( - **( - params - or { - "path": path, - "uid": uid, - "gid": gid, - "recursive": recursive, - } - ) - ) - ) - self._transport.request_json( - "/sandbox/files/chown", - method="POST", - json_body=normalized.model_dump(exclude_none=True), - headers={"content-type": "application/json"}, - ) - - def watch(self, path: str, *, recursive: Optional[bool] = None): - payload = self._transport.request_json( - "/sandbox/files/watch", - method="POST", - json_body={ - "path": path, - "recursive": recursive, - }, - headers={"content-type": "application/json"}, - ) - return SandboxFileWatchHandle( - self._transport, - self._get_connection_info, - SandboxFileWatchStatus(**payload["watch"]), - self._runtime_proxy_override, - ) - - def watch_dir( - self, - path: str, - on_event: Callable[[SandboxFileSystemEvent], object], - *, - recursive: Optional[bool] = None, - timeout_ms: Optional[int] = None, - on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, - ) -> SandboxWatchDirHandle: - return SandboxWatchDirHandle( - self.watch(path, recursive=recursive), - on_event, - on_exit=on_exit, - timeout_ms=timeout_ms, - ) - - def get_watch( - self, watch_id: str, include_events: bool = False - ) -> SandboxFileWatchHandle: - payload = self._transport.request_json( - f"/sandbox/files/watch/{watch_id}", - params={"includeEvents": True} if include_events else None, - ) - return SandboxFileWatchHandle( - self._transport, - self._get_connection_info, - SandboxFileWatchStatus(**payload["watch"]), - self._runtime_proxy_override, - ) - - def upload_url( - self, - path: str, - *, - expires_in_seconds: Optional[int] = None, - one_time: Optional[bool] = None, - ) -> SandboxPresignedUrl: - payload = self._transport.request_json( - "/sandbox/files/presign-upload", - method="POST", - json_body=SandboxPresignFileParams( - path=path, - expires_in_seconds=expires_in_seconds, - one_time=one_time, - ).model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - return SandboxPresignedUrl(**payload) - - def download_url( - self, - path: str, - *, - expires_in_seconds: Optional[int] = None, - one_time: Optional[bool] = None, - ) -> SandboxPresignedUrl: - payload = self._transport.request_json( - "/sandbox/files/presign-download", - method="POST", - json_body=SandboxPresignFileParams( - path=path, - expires_in_seconds=expires_in_seconds, - one_time=one_time, - ).model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - return SandboxPresignedUrl(**payload) - - def _read_wire( - self, - path: str, - *, - offset: Optional[int] = None, - length: Optional[int] = None, - encoding: str, - ) -> SandboxFileReadResult: - payload = self._transport.request_json( - "/sandbox/files/read", - method="POST", - json_body={ - "path": path, - "offset": offset, - "length": length, - "encoding": encoding, - }, - headers={"content-type": "application/json"}, - ) - return SandboxFileReadResult(**payload) - - def _write_single( - self, - path: str, - data: str, - *, - append: Optional[bool] = None, - mode: Optional[str] = None, - encoding: str, - ): - payload = self._transport.request_json( - "/sandbox/files/write", - method="POST", - json_body={ - "path": path, - "data": data, - "append": append, - "mode": mode, - "encoding": encoding, - }, - headers={"content-type": "application/json"}, - ) - return _normalize_write_info(payload["files"][0]) - - -class SandboxTerminalConnection: - def __init__(self, websocket): - self._websocket = websocket - - def events(self): - while True: - try: - message = self._websocket.recv() - except ConnectionClosed: - break - - if isinstance(message, bytes): - message = message.decode("utf-8") - parsed = json.loads(message) - if parsed["type"] == "output": - normalized = _normalize_terminal_output_chunk(parsed) - yield SandboxTerminalOutputEvent( - type="output", - **normalized, - ) - elif parsed["type"] == "exit": - yield SandboxTerminalExitEvent( - type="exit", - status=_normalize_terminal_status(parsed["status"]), - ) - - def write(self, data: Union[str, bytes, bytearray]) -> None: - payload = { - "type": "input", - "data": data if isinstance(data, str) else base64.b64encode(bytes(data)).decode("ascii"), - } - if not isinstance(data, str): - payload["encoding"] = "base64" - self._websocket.send(json.dumps(payload)) - - def resize(self, rows: int, cols: int) -> None: - self._websocket.send( - json.dumps( - { - "type": "resize", - "rows": rows, - "cols": cols, - } - ) - ) - - def close(self) -> None: - self._websocket.close() - - -class SandboxTerminalHandle: - def __init__( - self, - transport: RuntimeTransport, - get_connection_info, - status, - runtime_proxy_override: Optional[str] = None, - ): - self._transport = transport - self._get_connection_info = get_connection_info - self._status = status - self._runtime_proxy_override = runtime_proxy_override - - @property - def id(self) -> str: - return self._status.id - - @property - def current(self) -> SandboxTerminalStatus: - return _copy_model(self._status) - - def to_dict(self): - return self._status.model_dump() - - def to_json(self): - return self.to_dict() - - def refresh(self, include_output: bool = False) -> "SandboxTerminalHandle": - payload = self._transport.request_json( - f"/sandbox/pty/{self.id}", - params={"includeOutput": True} if include_output else None, - ) - self._status = _normalize_terminal_status(payload["pty"]) - return self - - def wait( - self, - timeout_ms: Optional[int] = None, - include_output: Optional[bool] = None, - ) -> SandboxTerminalStatus: - payload = self._transport.request_json( - f"/sandbox/pty/{self.id}/wait", - method="POST", - json_body=SandboxTerminalWaitParams( - timeout_ms=timeout_ms, - include_output=include_output, - ).model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - self._status = _normalize_terminal_status(payload["pty"]) - return self.current - - def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: - payload = self._transport.request_json( - f"/sandbox/pty/{self.id}/kill", - method="POST", - json_body={"signal": signal}, - headers={"content-type": "application/json"}, - ) - self._status = _normalize_terminal_status(payload["pty"]) - return self.current - - def kill( - self, - signal: Optional[str] = None, - *, - timeout_ms: Optional[int] = None, - ) -> SandboxTerminalStatus: - self.signal(signal) - if timeout_ms is None: - timeout_ms = int(DEFAULT_TERMINAL_KILL_WAIT_SECONDS * 1000) - return self.wait(timeout_ms=timeout_ms) - - def resize(self, rows: int, cols: int) -> SandboxTerminalStatus: - payload = self._transport.request_json( - f"/sandbox/pty/{self.id}/resize", - method="POST", - json_body={"rows": rows, "cols": cols}, - headers={"content-type": "application/json"}, - ) - self._status = _normalize_terminal_status(payload["pty"]) - return self.current - - def attach(self) -> SandboxTerminalConnection: - connection = self._get_connection_info() - target = to_websocket_transport_target( - connection.base_url, - f"/sandbox/pty/{self.id}/ws?sessionId={connection.sandbox_id}", - self._runtime_proxy_override, - ) - headers = build_headers(connection.token, host_header=target.host_header) - connect_kwargs = {} - if target.connect_host is not None and target.connect_port is not None: - connect_kwargs["sock"] = socket.create_connection( - (target.connect_host, target.connect_port), - timeout=self._transport._timeout, - ) - - try: - websocket = sync_ws_connect( - target.url, - additional_headers=headers, - open_timeout=self._transport._timeout, - **connect_kwargs, - ) - except BaseException as error: - raise _normalize_websocket_error(error) - - return SandboxTerminalConnection(websocket) - - -class SandboxTerminalApi: - def __init__( - self, - transport: RuntimeTransport, - get_connection_info, - runtime_proxy_override: Optional[str] = None, - ): - self._transport = transport - self._get_connection_info = get_connection_info - self._runtime_proxy_override = runtime_proxy_override - - def create( - self, - input: Union[SandboxTerminalCreateParams, Dict[str, object]], - ) -> SandboxTerminalHandle: - params = ( - input - if isinstance(input, SandboxTerminalCreateParams) - else SandboxTerminalCreateParams(**input) - ) - payload = self._transport.request_json( - "/sandbox/pty", - method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), - headers={"content-type": "application/json"}, - ) - return SandboxTerminalHandle( - self._transport, - self._get_connection_info, - _normalize_terminal_status(payload["pty"]), - self._runtime_proxy_override, - ) - - def get(self, terminal_id: str, include_output: bool = False) -> SandboxTerminalHandle: - payload = self._transport.request_json( - f"/sandbox/pty/{terminal_id}", - params={"includeOutput": True} if include_output else None, - ) - return SandboxTerminalHandle( - self._transport, - self._get_connection_info, - _normalize_terminal_status(payload["pty"]), - self._runtime_proxy_override, - ) +from ..sandboxes.shared import ( + _build_sandbox_exposed_url, + _copy_model, + _expires_within_buffer, +) +from .sandboxes.sandbox_files import ( + DEFAULT_WATCH_TIMEOUT_MS, + SandboxFileWatchHandle, + SandboxFilesApi, + SandboxWatchDirHandle, +) +from .sandboxes.sandbox_processes import ( + DEFAULT_PROCESS_KILL_WAIT_SECONDS, + SandboxProcessHandle, + SandboxProcessesApi, +) +from .sandboxes.sandbox_terminal import ( + DEFAULT_TERMINAL_KILL_WAIT_SECONDS, + SandboxTerminalApi, + SandboxTerminalConnection, + SandboxTerminalHandle, +) +from .sandboxes.sandbox_transport import RuntimeTransport + +__all__ = [ + "DEFAULT_PROCESS_KILL_WAIT_SECONDS", + "DEFAULT_TERMINAL_KILL_WAIT_SECONDS", + "DEFAULT_WATCH_TIMEOUT_MS", + "RuntimeTransport", + "SandboxFileWatchHandle", + "SandboxFilesApi", + "SandboxHandle", + "SandboxManager", + "SandboxProcessHandle", + "SandboxProcessesApi", + "SandboxTerminalApi", + "SandboxTerminalConnection", + "SandboxTerminalHandle", + "SandboxWatchDirHandle", +] class SandboxHandle: @@ -1607,7 +133,8 @@ def stop(self) -> BasicResponse: return response def create_memory_snapshot( - self, params: Optional[Union[SandboxMemorySnapshotParams, Dict[str, object]]] = None + self, + params: Optional[Union[SandboxMemorySnapshotParams, Dict[str, object]]] = None, ) -> SandboxMemorySnapshotResult: normalized = ( params @@ -1668,7 +195,9 @@ def _hydrate(self, detail: SandboxDetail) -> None: self._detail = detail self._runtime_session = self._to_runtime_session(detail) - def _resolve_runtime_connection(self, force_refresh: bool = False) -> RuntimeConnection: + def _resolve_runtime_connection( + self, force_refresh: bool = False + ) -> RuntimeConnection: session = self.create_runtime_session(force_refresh=force_refresh) return RuntimeConnection( sandbox_id=self.id, @@ -1740,7 +269,9 @@ def __init__(self, client): None, ) - def create(self, params: Union[CreateSandboxParams, Dict[str, object]]) -> SandboxHandle: + def create( + self, params: Union[CreateSandboxParams, Dict[str, object]] + ) -> SandboxHandle: normalized = ( params if isinstance(params, CreateSandboxParams) diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/__init__.py b/hyperbrowser/client/managers/sync_manager/sandboxes/__init__.py new file mode 100644 index 00000000..e9684afe --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/__init__.py @@ -0,0 +1,33 @@ +from .sandbox_files import ( + DEFAULT_WATCH_TIMEOUT_MS, + SandboxFileWatchHandle, + SandboxFilesApi, + SandboxWatchDirHandle, +) +from .sandbox_processes import ( + DEFAULT_PROCESS_KILL_WAIT_SECONDS, + SandboxProcessHandle, + SandboxProcessesApi, +) +from .sandbox_terminal import ( + DEFAULT_TERMINAL_KILL_WAIT_SECONDS, + SandboxTerminalApi, + SandboxTerminalConnection, + SandboxTerminalHandle, +) +from .sandbox_transport import RuntimeTransport + +__all__ = [ + "DEFAULT_PROCESS_KILL_WAIT_SECONDS", + "DEFAULT_TERMINAL_KILL_WAIT_SECONDS", + "DEFAULT_WATCH_TIMEOUT_MS", + "RuntimeTransport", + "SandboxFileWatchHandle", + "SandboxFilesApi", + "SandboxProcessHandle", + "SandboxProcessesApi", + "SandboxTerminalApi", + "SandboxTerminalConnection", + "SandboxTerminalHandle", + "SandboxWatchDirHandle", +] diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_files.py b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_files.py new file mode 100644 index 00000000..39e05e94 --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_files.py @@ -0,0 +1,699 @@ +import base64 +import io +import json +import socket +import threading +from datetime import datetime +from typing import Callable, Dict, List, Optional, Union +from urllib.parse import urlencode + +from websockets.exceptions import ConnectionClosed +from websockets.sync.client import connect as sync_ws_connect + +from .....exceptions import HyperbrowserError +from .....models.sandbox import ( + SandboxFileChmodParams, + SandboxFileChownParams, + SandboxFileCopyParams, + SandboxFileDeleteParams, + SandboxFileInfo, + SandboxFileReadResult, + SandboxFileSystemEvent, + SandboxFileWriteEntry, + SandboxFileTransferResult, + SandboxFileWatchDoneEvent, + SandboxFileWatchEventMessage, + SandboxFileWatchStatus, + SandboxPresignFileParams, + SandboxPresignedUrl, +) +from .....sandbox_common import build_headers, to_websocket_transport_target +from ...sandboxes.shared import ( + DEFAULT_WATCH_TIMEOUT_MS, + _copy_model, + _encode_write_data, + _normalize_event_type, + _normalize_file_info, + _normalize_websocket_error, + _normalize_write_info, + _relative_watch_name, +) +from .sandbox_transport import RuntimeTransport + + +class SandboxFileWatchHandle: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + self._runtime_proxy_override = runtime_proxy_override + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxFileWatchStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + def refresh(self, include_events: bool = False) -> "SandboxFileWatchHandle": + params = {"includeEvents": True} if include_events else None + payload = self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + params=params, + ) + self._status = SandboxFileWatchStatus(**payload["watch"]) + return self + + def stop(self) -> None: + self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + method="DELETE", + ) + self._status = self._status.model_copy( + update={ + "active": False, + "stopped_at": self._status.stopped_at + or int(datetime.now().timestamp() * 1000), + } + ) + + def events( + self, + *, + cursor: Optional[int] = None, + route: str = "ws", + ): + connection = self._get_connection_info() + query = urlencode( + [ + ("sessionId", connection.sandbox_id), + *([("cursor", str(cursor))] if cursor is not None else []), + ] + ) + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/files/watch/{self.id}/{route}?{query}", + self._runtime_proxy_override, + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + connect_kwargs["sock"] = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + try: + websocket = sync_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + try: + while True: + try: + message = websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "event": + event = SandboxFileWatchEventMessage( + type="event", + event=parsed["event"], + ) + self._status = self._status.model_copy( + update={ + "oldest_seq": self._status.oldest_seq or event.event.seq, + "last_seq": max(self._status.last_seq, event.event.seq), + } + ) + yield event + elif parsed["type"] == "done": + self._status = SandboxFileWatchStatus(**parsed["status"]) + yield SandboxFileWatchDoneEvent(type="done", status=self.current) + break + except GeneratorExit: + raise + except BaseException as error: + raise _normalize_websocket_error(error) + finally: + websocket.close() + + +class SandboxWatchDirHandle: + def __init__( + self, + watch: SandboxFileWatchHandle, + on_event: Callable[[SandboxFileSystemEvent], object], + *, + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + timeout_ms: Optional[int] = None, + ): + self._watch = watch + self._root_path = watch.current.path + self._on_event = on_event + self._on_exit = on_exit + self._thread = threading.Thread(target=self._run, daemon=True) + self._timer = None + self._stopped = threading.Event() + self._exit_notified = False + + effective_timeout = ( + DEFAULT_WATCH_TIMEOUT_MS if timeout_ms is None else timeout_ms + ) + if effective_timeout > 0: + self._timer = threading.Timer(effective_timeout / 1000.0, self.stop) + self._timer.daemon = True + self._timer.start() + + self._thread.start() + + def stop(self) -> None: + if self._stopped.is_set(): + return + self._stopped.set() + + if self._timer is not None: + self._timer.cancel() + self._timer = None + + try: + self._watch.stop() + except HyperbrowserError as error: + if error.status_code not in {404, 409}: + raise + + if threading.current_thread() is not self._thread: + self._thread.join() + + def _run(self) -> None: + exit_error = None + try: + for message in self._watch.events(): + event_type = _normalize_event_type(message.event.op) + if not event_type: + continue + self._on_event( + SandboxFileSystemEvent( + type=event_type, + name=_relative_watch_name(self._root_path, message.event.path), + ) + ) + except BaseException as error: + exit_error = error + finally: + if self._timer is not None: + self._timer.cancel() + self._timer = None + if not self._exit_notified: + self._exit_notified = True + if self._on_exit is not None: + self._on_exit(exit_error) + + +class SandboxFilesApi: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override + + def exists(self, path: str) -> bool: + try: + self.get_info(path) + return True + except HyperbrowserError as error: + if error.status_code == 404: + return False + if ( + "not found" in str(error).lower() + or "no such file" in str(error).lower() + ): + return False + raise + + def get_info(self, path: str) -> SandboxFileInfo: + payload = self._transport.request_json( + "/sandbox/files/stat", + params={"path": path}, + ) + return _normalize_file_info(payload["file"]) + + def stat(self, path: str) -> SandboxFileInfo: + return self.get_info(path) + + def list( + self, + path: str, + *, + depth: Optional[int] = None, + ) -> List[SandboxFileInfo]: + depth = 1 if depth is None else depth + if depth < 1: + raise ValueError("depth should be at least one") + + payload = self._transport.request_json( + "/sandbox/files", + params={ + "path": path, + "depth": depth, + }, + ) + return [_normalize_file_info(entry) for entry in payload.get("entries", [])] + + def read( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + format: str = "text", + ): + if format == "text": + return self._read_wire( + path, offset=offset, length=length, encoding="utf8" + ).content + + response = self._read_wire( + path, offset=offset, length=length, encoding="base64" + ) + content = base64.b64decode(response.content) + if format in {"bytes", "blob"}: + return content + if format == "stream": + return io.BytesIO(content) + raise ValueError("format should be one of: text, bytes, blob, stream") + + def read_text( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> str: + return self.read(path, offset=offset, length=length, format="text") + + def read_bytes( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> bytes: + return self.read(path, offset=offset, length=length, format="bytes") + + def write( + self, + path_or_files: Union[ + str, List[Union[SandboxFileWriteEntry, Dict[str, object]]] + ], + data: Optional[Union[str, bytes, bytearray]] = None, + ): + if isinstance(path_or_files, str): + if data is None: + raise ValueError("Path and data are required") + payload = self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path_or_files, + **_encode_write_data(data), + }, + headers={"content-type": "application/json"}, + ) + return _normalize_write_info(payload["files"][0]) + + if not path_or_files: + return [] + + encoded_files = [] + for entry in path_or_files: + normalized = ( + entry + if isinstance(entry, SandboxFileWriteEntry) + else SandboxFileWriteEntry(**entry) + ) + encoded_files.append( + { + "path": normalized.path, + **_encode_write_data(normalized.data), + } + ) + + payload = self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={"files": encoded_files}, + headers={"content-type": "application/json"}, + ) + return [_normalize_write_info(entry) for entry in payload.get("files", [])] + + def write_text( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return self._write_single( + path, + data, + append=append, + mode=mode, + encoding="utf8", + ) + + def write_bytes( + self, + path: str, + data: bytes, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return self._write_single( + path, + base64.b64encode(data).decode("ascii"), + append=append, + mode=mode, + encoding="base64", + ) + + def upload(self, path: str, data: Union[str, bytes, bytearray]): + body = data.encode("utf-8") if isinstance(data, str) else bytes(data) + payload = self._transport.request_json( + "/sandbox/files/upload", + method="PUT", + params={"path": path}, + content=body, + ) + return SandboxFileTransferResult(**payload) + + def download(self, path: str) -> bytes: + return self._transport.request_bytes( + "/sandbox/files/download", + params={"path": path}, + ) + + def make_dir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ) -> bool: + payload = self._transport.request_json( + "/sandbox/files/mkdir", + method="POST", + json_body={ + "path": path, + "parents": parents, + "mode": mode, + }, + headers={"content-type": "application/json"}, + ) + return bool(payload.get("created")) + + def mkdir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ) -> bool: + return self.make_dir(path, parents=parents, mode=mode) + + def rename(self, old_path: str, new_path: str) -> SandboxFileInfo: + payload = self._transport.request_json( + "/sandbox/files/move", + method="POST", + json_body={ + "from": old_path, + "to": new_path, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_file_info(payload["entry"]) + + def move( + self, + *, + source: str, + destination: str, + overwrite: Optional[bool] = None, + ) -> SandboxFileInfo: + return self.rename(source, destination) + + def remove(self, path: str, *, recursive: Optional[bool] = None) -> None: + self._transport.request_json( + "/sandbox/files/delete", + method="POST", + json_body=SandboxFileDeleteParams( + path=path, + recursive=recursive, + ).model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + def delete(self, path: str, *, recursive: Optional[bool] = None) -> None: + self.remove(path, recursive=recursive) + + def copy( + self, + params: Optional[Union[SandboxFileCopyParams, Dict[str, object]]] = None, + *, + source: Optional[str] = None, + destination: Optional[str] = None, + recursive: Optional[bool] = None, + overwrite: Optional[bool] = None, + ) -> SandboxFileInfo: + if params is None: + normalized = SandboxFileCopyParams( + source=source, + destination=destination, + recursive=recursive, + overwrite=overwrite, + ) + elif isinstance(params, SandboxFileCopyParams): + normalized = params + else: + normalized = SandboxFileCopyParams(**params) + + payload = self._transport.request_json( + "/sandbox/files/copy", + method="POST", + json_body={ + "from": normalized.source, + "to": normalized.destination, + "recursive": normalized.recursive, + "overwrite": normalized.overwrite, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_file_info(payload["entry"]) + + def chmod( + self, + params: Optional[Union[SandboxFileChmodParams, Dict[str, object]]] = None, + *, + path: Optional[str] = None, + mode: Optional[str] = None, + recursive: Optional[bool] = None, + ) -> None: + normalized = ( + params + if isinstance(params, SandboxFileChmodParams) + else SandboxFileChmodParams( + **(params or {"path": path, "mode": mode, "recursive": recursive}) + ) + ) + self._transport.request_json( + "/sandbox/files/chmod", + method="POST", + json_body=normalized.model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + def chown( + self, + params: Optional[Union[SandboxFileChownParams, Dict[str, object]]] = None, + *, + path: Optional[str] = None, + uid: Optional[int] = None, + gid: Optional[int] = None, + recursive: Optional[bool] = None, + ) -> None: + normalized = ( + params + if isinstance(params, SandboxFileChownParams) + else SandboxFileChownParams( + **( + params + or { + "path": path, + "uid": uid, + "gid": gid, + "recursive": recursive, + } + ) + ) + ) + self._transport.request_json( + "/sandbox/files/chown", + method="POST", + json_body=normalized.model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + def watch(self, path: str, *, recursive: Optional[bool] = None): + payload = self._transport.request_json( + "/sandbox/files/watch", + method="POST", + json_body={ + "path": path, + "recursive": recursive, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, + ) + + def watch_dir( + self, + path: str, + on_event: Callable[[SandboxFileSystemEvent], object], + *, + recursive: Optional[bool] = None, + timeout_ms: Optional[int] = None, + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + ) -> SandboxWatchDirHandle: + return SandboxWatchDirHandle( + self.watch(path, recursive=recursive), + on_event, + on_exit=on_exit, + timeout_ms=timeout_ms, + ) + + def get_watch( + self, watch_id: str, include_events: bool = False + ) -> SandboxFileWatchHandle: + payload = self._transport.request_json( + f"/sandbox/files/watch/{watch_id}", + params={"includeEvents": True} if include_events else None, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, + ) + + def upload_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = self._transport.request_json( + "/sandbox/files/presign-upload", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + def download_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = self._transport.request_json( + "/sandbox/files/presign-download", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + def _read_wire( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + encoding: str, + ) -> SandboxFileReadResult: + payload = self._transport.request_json( + "/sandbox/files/read", + method="POST", + json_body={ + "path": path, + "offset": offset, + "length": length, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileReadResult(**payload) + + def _write_single( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + encoding: str, + ): + payload = self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path, + "data": data, + "append": append, + "mode": mode, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_write_info(payload["files"][0]) diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_processes.py b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_processes.py new file mode 100644 index 00000000..36ec4127 --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_processes.py @@ -0,0 +1,213 @@ +import base64 +from typing import Dict, Optional, Union + +from .....models.sandbox import ( + SandboxExecParams, + SandboxProcessExitEvent, + SandboxProcessListResponse, + SandboxProcessOutputEvent, + SandboxProcessResult, + SandboxProcessStdinParams, + SandboxProcessSummary, +) +from .sandbox_transport import RuntimeTransport + +DEFAULT_PROCESS_KILL_WAIT_SECONDS = 5.0 + + +class SandboxProcessHandle: + def __init__(self, transport: RuntimeTransport, summary: SandboxProcessSummary): + self._transport = transport + self._summary = summary + + @property + def id(self) -> str: + return self._summary.id + + @property + def status(self) -> str: + return self._summary.status + + def to_dict(self): + return self._summary.model_dump() + + def to_json(self): + return self.to_dict() + + def refresh(self) -> "SandboxProcessHandle": + payload = self._transport.request_json(f"/sandbox/processes/{self.id}") + self._summary = SandboxProcessSummary(**payload["process"]) + return self + + def wait(self, timeout_ms: Optional[int] = None, timeout_sec: Optional[int] = None): + payload = self._transport.request_json( + f"/sandbox/processes/{self.id}/wait", + method="POST", + json_body={ + "timeoutMs": timeout_ms, + "timeout_sec": timeout_sec, + }, + headers={"content-type": "application/json"}, + ) + result = SandboxProcessResult(**payload["result"]) + self._summary = SandboxProcessSummary( + id=result.id, + status=result.status, + command=self._summary.command, + args=self._summary.args, + cwd=self._summary.cwd, + pid=self._summary.pid, + exit_code=result.exit_code, + started_at=result.started_at, + completed_at=result.completed_at, + ) + return result + + def signal(self, signal: str) -> None: + payload = self._transport.request_json( + f"/sandbox/processes/{self.id}/signal", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._summary = SandboxProcessSummary(**payload["process"]) + + def kill( + self, + timeout_ms: Optional[int] = None, + timeout_sec: Optional[int] = None, + ) -> SandboxProcessResult: + payload = self._transport.request_json( + f"/sandbox/processes/{self.id}", + method="DELETE", + ) + self._summary = SandboxProcessSummary(**payload["process"]) + if timeout_ms is None and timeout_sec is None: + timeout_ms = int(DEFAULT_PROCESS_KILL_WAIT_SECONDS * 1000) + return self.wait(timeout_ms=timeout_ms, timeout_sec=timeout_sec) + + def write_stdin( + self, + data: Optional[Union[str, bytes, bytearray, SandboxProcessStdinParams]] = None, + *, + encoding: Optional[str] = None, + eof: Optional[bool] = None, + ) -> None: + if isinstance(data, SandboxProcessStdinParams): + params = data + else: + params = SandboxProcessStdinParams(data=data, encoding=encoding, eof=eof) + + payload: Dict[str, object] = {"eof": params.eof} + if params.data is not None: + if isinstance(params.data, str): + payload["data"] = params.data + payload["encoding"] = params.encoding or "utf8" + else: + payload["data"] = base64.b64encode(bytes(params.data)).decode("ascii") + payload["encoding"] = "base64" + + self._transport.request_json( + f"/sandbox/processes/{self.id}/stdin", + method="POST", + json_body=payload, + headers={"content-type": "application/json"}, + ) + + def stream(self, from_seq: Optional[int] = None): + params = {"from_seq": from_seq} if from_seq and from_seq > 0 else None + for event in self._transport.stream_sse( + f"/sandbox/processes/{self.id}/stream", + params=params, + ): + event_type = event["event"] + data = event["data"] + if event_type == "output": + yield SandboxProcessOutputEvent( + type=data["stream"], + seq=data["seq"], + data=data["data"], + timestamp=data["timestamp"], + ) + elif event_type == "done": + yield SandboxProcessExitEvent( + type="exit", + result=SandboxProcessResult(**data), + ) + + def result(self) -> SandboxProcessResult: + return self.wait() + + +class SandboxProcessesApi: + def __init__(self, transport: RuntimeTransport): + self._transport = transport + + def exec( + self, input: Union[SandboxExecParams, Dict[str, object]] + ) -> SandboxProcessResult: + params = ( + input + if isinstance(input, SandboxExecParams) + else SandboxExecParams(**input) + ) + payload = self._transport.request_json( + "/sandbox/exec", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessResult(**payload["result"]) + + def start( + self, input: Union[SandboxExecParams, Dict[str, object]] + ) -> SandboxProcessHandle: + params = ( + input + if isinstance(input, SandboxExecParams) + else SandboxExecParams(**input) + ) + payload = self._transport.request_json( + "/sandbox/processes", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + def get(self, process_id: str) -> SandboxProcessHandle: + payload = self._transport.request_json(f"/sandbox/processes/{process_id}") + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + def list( + self, + *, + status=None, + limit: Optional[int] = None, + cursor: Optional[Union[str, int]] = None, + created_after: Optional[int] = None, + created_before: Optional[int] = None, + ) -> SandboxProcessListResponse: + normalized_status = None + if isinstance(status, list): + normalized_status = ",".join(status) if status else None + else: + normalized_status = status + + payload = self._transport.request_json( + "/sandbox/processes", + params={ + "status": normalized_status, + "limit": limit, + "cursor": cursor, + "created_after": created_after, + "created_before": created_before, + }, + ) + return SandboxProcessListResponse(**payload) diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_terminal.py b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_terminal.py new file mode 100644 index 00000000..ccc984e6 --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_terminal.py @@ -0,0 +1,236 @@ +import base64 +import json +import socket +from typing import Dict, Optional, Union + +from websockets.exceptions import ConnectionClosed +from websockets.sync.client import connect as sync_ws_connect + +from .....models.sandbox import ( + SandboxTerminalCreateParams, + SandboxTerminalExitEvent, + SandboxTerminalOutputEvent, + SandboxTerminalStatus, + SandboxTerminalWaitParams, +) +from .....sandbox_common import build_headers, to_websocket_transport_target +from ...sandboxes.shared import ( + _copy_model, + _normalize_terminal_output_chunk, + _normalize_terminal_status, + _normalize_websocket_error, +) +from .sandbox_transport import RuntimeTransport + +DEFAULT_TERMINAL_KILL_WAIT_SECONDS = 5.0 + + +class SandboxTerminalConnection: + def __init__(self, websocket): + self._websocket = websocket + + def events(self): + while True: + try: + message = self._websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "output": + normalized = _normalize_terminal_output_chunk(parsed) + yield SandboxTerminalOutputEvent( + type="output", + **normalized, + ) + elif parsed["type"] == "exit": + yield SandboxTerminalExitEvent( + type="exit", + status=_normalize_terminal_status(parsed["status"]), + ) + + def write(self, data: Union[str, bytes, bytearray]) -> None: + payload: Dict[str, object] = { + "type": "input", + "data": data + if isinstance(data, str) + else base64.b64encode(bytes(data)).decode("ascii"), + } + if not isinstance(data, str): + payload["encoding"] = "base64" + self._websocket.send(json.dumps(payload)) + + def resize(self, rows: int, cols: int) -> None: + self._websocket.send( + json.dumps( + { + "type": "resize", + "rows": rows, + "cols": cols, + } + ) + ) + + def close(self) -> None: + self._websocket.close() + + +class SandboxTerminalHandle: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + self._runtime_proxy_override = runtime_proxy_override + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxTerminalStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + def refresh(self, include_output: bool = False) -> "SandboxTerminalHandle": + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}", + params={"includeOutput": True} if include_output else None, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self + + def wait( + self, + timeout_ms: Optional[int] = None, + include_output: Optional[bool] = None, + ) -> SandboxTerminalStatus: + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}/wait", + method="POST", + json_body=SandboxTerminalWaitParams( + timeout_ms=timeout_ms, + include_output=include_output, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}/kill", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + def kill( + self, + signal: Optional[str] = None, + *, + timeout_ms: Optional[int] = None, + ) -> SandboxTerminalStatus: + self.signal(signal) + if timeout_ms is None: + timeout_ms = int(DEFAULT_TERMINAL_KILL_WAIT_SECONDS * 1000) + return self.wait(timeout_ms=timeout_ms) + + def resize(self, rows: int, cols: int) -> SandboxTerminalStatus: + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}/resize", + method="POST", + json_body={"rows": rows, "cols": cols}, + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + def attach(self) -> SandboxTerminalConnection: + connection = self._get_connection_info() + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/pty/{self.id}/ws?sessionId={connection.sandbox_id}", + self._runtime_proxy_override, + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + connect_kwargs["sock"] = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + + try: + websocket = sync_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + return SandboxTerminalConnection(websocket) + + +class SandboxTerminalApi: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override + + def create( + self, + input: Union[SandboxTerminalCreateParams, Dict[str, object]], + ) -> SandboxTerminalHandle: + params = ( + input + if isinstance(input, SandboxTerminalCreateParams) + else SandboxTerminalCreateParams(**input) + ) + payload = self._transport.request_json( + "/sandbox/pty", + method="POST", + json_body=params.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, + ) + + def get( + self, terminal_id: str, include_output: bool = False + ) -> SandboxTerminalHandle: + payload = self._transport.request_json( + f"/sandbox/pty/{terminal_id}", + params={"includeOutput": True} if include_output else None, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, + ) diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_transport.py b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_transport.py new file mode 100644 index 00000000..d51d2647 --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_transport.py @@ -0,0 +1,249 @@ +import json +from typing import Dict, Iterator, Optional, Union + +import httpx + +from .....sandbox_common import ( + RuntimeConnection, + build_headers, + ensure_response_ok, + normalize_network_error, + parse_json_response, + resolve_runtime_transport_target, +) +from ...sandboxes.shared import _build_query_path + + +class RuntimeTransport: + def __init__( + self, + resolve_connection, + timeout: float = 30.0, + runtime_proxy_override: Optional[str] = None, + ): + self._resolve_connection = resolve_connection + self._timeout = timeout + self._runtime_proxy_override = runtime_proxy_override + + def request_json( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + ): + response = self._request( + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return parse_json_response(response, "runtime") + + def request_bytes( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + headers: Optional[Dict[str, str]] = None, + ) -> bytes: + response = self._request(path, method=method, params=params, headers=headers) + return response.content + + def stream_sse( + self, path: str, params: Optional[Dict[str, object]] = None + ) -> Iterator[Dict[str, object]]: + client, response = self._open_stream(path, params=params) + event_name = "message" + event_id = None + data_lines = [] + + def flush_event(): + nonlocal event_name, event_id, data_lines + if not data_lines and event_name == "message" and event_id is None: + return None + + raw_data = "\n".join(data_lines) + data = raw_data + if raw_data: + try: + data = json.loads(raw_data) + except json.JSONDecodeError: + data = raw_data + + event = { + "event": event_name, + "data": data, + "id": event_id, + } + event_name = "message" + event_id = None + data_lines = [] + return event + + try: + for line in response.iter_lines(): + if line == "": + event = flush_event() + if event is not None: + yield event + continue + + if line.startswith(":"): + continue + + if ":" in line: + field, value = line.split(":", 1) + value = value.lstrip(" ") + else: + field, value = line, "" + + if field == "event": + event_name = value or "message" + elif field == "data": + data_lines.append(value) + elif field == "id": + event_id = value + + trailing = flush_event() + if trailing is not None: + yield trailing + finally: + response.close() + client.close() + + def _request( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + allow_refresh: bool = True, + ) -> httpx.Response: + connection = self._resolve_connection(False) + response = self._send( + connection, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + + if response.status_code == 401 and allow_refresh: + response.close() + refreshed = self._resolve_connection(True) + retry = self._send( + refreshed, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return ensure_response_ok(retry, "runtime") + + return ensure_response_ok(response, "runtime") + + def _open_stream( + self, + path: str, + *, + params: Optional[Dict[str, object]] = None, + allow_refresh: bool = True, + ): + connection = self._resolve_connection(False) + client, response = self._send_stream(connection, path, params=params) + if response.status_code == 401 and allow_refresh: + response.close() + client.close() + refreshed = self._resolve_connection(True) + client, response = self._send_stream(refreshed, path, params=params) + + if not response.is_success: + response.read() + ensure_response_ok(response, "runtime") + return client, response + + def _send( + self, + connection: RuntimeConnection, + path: str, + *, + method: str, + params: Optional[Dict[str, object]], + json_body: Optional[Dict[str, object]], + content: Optional[Union[str, bytes]], + headers: Optional[Dict[str, str]], + ) -> httpx.Response: + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) + merged_headers = build_headers(connection.token, headers, target.host_header) + client = httpx.Client(timeout=self._timeout) + + try: + response = client.request( + method, + target.url, + headers=merged_headers, + json=json_body, + content=content, + ) + except BaseException as error: + client.close() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) + + response.read() + client.close() + return response + + def _send_stream( + self, + connection: RuntimeConnection, + path: str, + *, + params: Optional[Dict[str, object]], + ): + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) + headers = build_headers( + connection.token, + {"Accept": "text/event-stream"}, + target.host_header, + ) + client = httpx.Client(timeout=self._timeout) + + try: + request = client.build_request("GET", target.url, headers=headers) + response = client.send(request, stream=True) + return client, response + except BaseException as error: + client.close() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) diff --git a/pyproject.toml b/pyproject.toml index 26bfe62f..7c1f5a9d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "hyperbrowser" -version = "0.84.0" +version = "0.85.0" description = "Python SDK for hyperbrowser" authors = ["Nikhil Shahi "] license = "MIT" diff --git a/tests/sandbox/e2e/test_async_lifecycle.py b/tests/sandbox/e2e/test_async_lifecycle.py index 067dbb21..869f83a5 100644 --- a/tests/sandbox/e2e/test_async_lifecycle.py +++ b/tests/sandbox/e2e/test_async_lifecycle.py @@ -58,7 +58,9 @@ async def test_async_sandbox_lifecycle_e2e(): custom_image = None try: - sandbox = await client.sandboxes.create(default_sandbox_params("py-async-lifecycle")) + sandbox = await client.sandboxes.create( + default_sandbox_params("py-async-lifecycle") + ) stale_handle = await client.sandboxes.get(sandbox.id) custom_image = await get_image_by_name_async(CUSTOM_IMAGE_NAME) await wait_for_runtime_ready_async(sandbox) @@ -145,7 +147,9 @@ async def patched_get_detail(sandbox_id: str): assert custom_image_sandbox.status == "active" await wait_for_runtime_ready_async(custom_image_sandbox) - custom_image_memory_snapshot = await custom_image_sandbox.create_memory_snapshot() + custom_image_memory_snapshot = ( + await custom_image_sandbox.create_memory_snapshot() + ) assert custom_image_memory_snapshot.image_name == custom_image["imageName"] assert custom_image_memory_snapshot.image_id == custom_image["id"] assert custom_image_memory_snapshot.image_namespace == custom_image["namespace"] From 2e520952b326dd3a3d14b403674fdafe13442a4d Mon Sep 17 00:00:00 2001 From: Devin Deng Date: Thu, 12 Mar 2026 07:13:44 +0000 Subject: [PATCH 06/10] use serializaetion_alias --- hyperbrowser/models/sandbox.py | 68 ++++++++++++++--------- tests/sandbox/e2e/test_async_lifecycle.py | 24 ++++---- tests/sandbox/e2e/test_lifecycle.py | 22 ++++---- tests/test_create_sandbox_params.py | 5 ++ 4 files changed, 71 insertions(+), 48 deletions(-) diff --git a/hyperbrowser/models/sandbox.py b/hyperbrowser/models/sandbox.py index 894e0835..2e67def4 100644 --- a/hyperbrowser/models/sandbox.py +++ b/hyperbrowser/models/sandbox.py @@ -1,5 +1,5 @@ from datetime import datetime, timezone -from typing import Callable, Dict, Iterable, List, Literal, Optional, Union +from typing import Callable, Dict, List, Literal, Optional, Union from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator @@ -78,9 +78,7 @@ class Sandbox(SandboxBaseModel): updated_at: datetime = Field(alias="updatedAt") close_reason: Optional[str] = Field(default=None, alias="closeReason") data_consumed: Optional[int] = Field(default=None, alias="dataConsumed") - proxy_data_consumed: Optional[int] = Field( - default=None, alias="proxyDataConsumed" - ) + proxy_data_consumed: Optional[int] = Field(default=None, alias="proxyDataConsumed") usage_type: Optional[str] = Field(default=None, alias="usageType") job_id: Optional[str] = Field(default=None, alias="jobId") launch_state: Optional[SessionLaunchState] = Field( @@ -131,13 +129,19 @@ def parse_token_expires_at(cls, value): class CreateSandboxParams(SandboxBaseModel): - snapshot_name: Optional[str] = Field(default=None, alias="snapshotName") - snapshot_id: Optional[str] = Field(default=None, alias="snapshotId") - image_name: Optional[str] = Field(default=None, alias="imageName") - image_id: Optional[str] = Field(default=None, alias="imageId") + snapshot_name: Optional[str] = Field( + default=None, serialization_alias="snapshotName" + ) + snapshot_id: Optional[str] = Field(default=None, serialization_alias="snapshotId") + image_name: Optional[str] = Field(default=None, serialization_alias="imageName") + image_id: Optional[str] = Field(default=None, serialization_alias="imageId") region: Optional[SandboxRegion] = None - enable_recording: Optional[bool] = Field(default=None, alias="enableRecording") - timeout_minutes: Optional[int] = Field(default=None, alias="timeoutMinutes") + enable_recording: Optional[bool] = Field( + default=None, serialization_alias="enableRecording" + ) + timeout_minutes: Optional[int] = Field( + default=None, serialization_alias="timeoutMinutes" + ) @model_validator(mode="after") def validate_launch_source(self): @@ -145,7 +149,9 @@ def validate_launch_source(self): raise ValueError("snapshot_id requires snapshot_name") if self.image_id and not self.image_name: raise ValueError("image_id requires image_name") - source_count = sum(bool(value) for value in [self.snapshot_name, self.image_name]) + source_count = sum( + bool(value) for value in [self.snapshot_name, self.image_name] + ) if source_count != 1: raise ValueError( "Provide exactly one start source: snapshot_name or image_name" @@ -172,7 +178,9 @@ class SandboxListResponse(SandboxBaseModel): class SandboxMemorySnapshotParams(SandboxBaseModel): - snapshot_name: Optional[str] = Field(default=None, alias="snapshotName") + snapshot_name: Optional[str] = Field( + default=None, serialization_alias="snapshotName" + ) class SandboxMemorySnapshotResult(SandboxBaseModel): @@ -201,9 +209,9 @@ class SandboxExecParams(SandboxBaseModel): args: Optional[List[str]] = None cwd: Optional[str] = None env: Optional[Dict[str, str]] = None - timeout_ms: Optional[int] = Field(default=None, alias="timeoutMs") - timeout_sec: Optional[int] = Field(default=None, alias="timeoutSec") - use_shell: Optional[bool] = Field(default=None, alias="useShell") + timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") + timeout_sec: Optional[int] = Field(default=None, serialization_alias="timeoutSec") + use_shell: Optional[bool] = Field(default=None, serialization_alias="useShell") class SandboxProcessSummary(SandboxBaseModel): @@ -233,8 +241,12 @@ class SandboxProcessListParams(SandboxBaseModel): status: Optional[Union[SandboxProcessStatus, List[SandboxProcessStatus]]] = None limit: Optional[int] = None cursor: Optional[Union[str, int]] = None - created_after: Optional[int] = Field(default=None, alias="createdAfter") - created_before: Optional[int] = Field(default=None, alias="createdBefore") + created_after: Optional[int] = Field( + default=None, serialization_alias="createdAfter" + ) + created_before: Optional[int] = Field( + default=None, serialization_alias="createdBefore" + ) class SandboxProcessListResponse(SandboxBaseModel): @@ -243,8 +255,8 @@ class SandboxProcessListResponse(SandboxBaseModel): class SandboxProcessWaitParams(SandboxBaseModel): - timeout_ms: Optional[int] = Field(default=None, alias="timeoutMs") - timeout_sec: Optional[int] = Field(default=None, alias="timeoutSec") + timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") + timeout_sec: Optional[int] = Field(default=None, serialization_alias="timeoutSec") class SandboxProcessStdinParams(SandboxBaseModel): @@ -489,8 +501,10 @@ class SandboxFileSystemEvent(SandboxBaseModel): class SandboxPresignFileParams(SandboxBaseModel): path: str - expires_in_seconds: Optional[int] = Field(default=None, alias="expiresInSeconds") - one_time: Optional[bool] = Field(default=None, alias="oneTime") + expires_in_seconds: Optional[int] = Field( + default=None, serialization_alias="expiresInSeconds" + ) + one_time: Optional[bool] = Field(default=None, serialization_alias="oneTime") class SandboxPresignedUrl(SandboxBaseModel): @@ -506,10 +520,10 @@ class SandboxTerminalCreateParams(SandboxBaseModel): args: Optional[List[str]] = None cwd: Optional[str] = None env: Optional[Dict[str, str]] = None - use_shell: Optional[bool] = Field(default=None, alias="useShell") + use_shell: Optional[bool] = Field(default=None, serialization_alias="useShell") rows: Optional[int] = None cols: Optional[int] = None - timeout_ms: Optional[int] = Field(default=None, alias="timeoutMs") + timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") class SandboxTerminalOutputChunk(SandboxBaseModel): @@ -537,13 +551,15 @@ class SandboxTerminalStatus(SandboxBaseModel): class SandboxTerminalWaitParams(SandboxBaseModel): - timeout_ms: Optional[int] = Field(default=None, alias="timeoutMs") - include_output: Optional[bool] = Field(default=None, alias="includeOutput") + timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") + include_output: Optional[bool] = Field( + default=None, serialization_alias="includeOutput" + ) class SandboxTerminalKillParams(SandboxBaseModel): signal: Optional[str] = None - timeout_ms: Optional[int] = Field(default=None, alias="timeoutMs") + timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") class SandboxTerminalOutputEvent(SandboxBaseModel): diff --git a/tests/sandbox/e2e/test_async_lifecycle.py b/tests/sandbox/e2e/test_async_lifecycle.py index 869f83a5..17b868fd 100644 --- a/tests/sandbox/e2e/test_async_lifecycle.py +++ b/tests/sandbox/e2e/test_async_lifecycle.py @@ -130,7 +130,9 @@ async def patched_get_detail(sandbox_id: str): finally: sandbox._service.get_detail = original_get_detail - image_sandbox = await client.sandboxes.create({"imageName": DEFAULT_IMAGE_NAME}) + image_sandbox = await client.sandboxes.create( + {"image_name": DEFAULT_IMAGE_NAME} + ) assert image_sandbox.id assert image_sandbox.status == "active" response = await image_sandbox.stop() @@ -139,8 +141,8 @@ async def patched_get_detail(sandbox_id: str): custom_image_sandbox = await client.sandboxes.create( { - "imageName": custom_image["imageName"], - "imageId": custom_image["id"], + "image_name": custom_image["imageName"], + "image_id": custom_image["id"], } ) assert custom_image_sandbox.id @@ -157,8 +159,8 @@ async def patched_get_detail(sandbox_id: str): custom_snapshot_sandbox = await _create_sandbox_with_snapshot_retry( client, { - "snapshotName": custom_image_memory_snapshot.snapshot_name, - "snapshotId": custom_image_memory_snapshot.snapshot_id, + "snapshot_name": custom_image_memory_snapshot.snapshot_name, + "snapshot_id": custom_image_memory_snapshot.snapshot_id, }, ) assert custom_snapshot_sandbox.id @@ -171,8 +173,8 @@ async def patched_get_detail(sandbox_id: str): "mismatched image selector", lambda: client.sandboxes.create( { - "imageName": custom_image["imageName"], - "imageId": str(uuid4()), + "image_name": custom_image["imageName"], + "image_id": str(uuid4()), } ), status_code=404, @@ -185,8 +187,8 @@ async def patched_get_detail(sandbox_id: str): "mismatched snapshot selector", lambda: client.sandboxes.create( { - "snapshotName": memory_snapshot.snapshot_name, - "snapshotId": str(uuid4()), + "snapshot_name": memory_snapshot.snapshot_name, + "snapshot_id": str(uuid4()), } ), status_code=404, @@ -260,8 +262,8 @@ async def patched_get_detail(sandbox_id: str): secondary = await _create_sandbox_with_snapshot_retry( client, { - "snapshotName": memory_snapshot.snapshot_name, - "snapshotId": memory_snapshot.snapshot_id, + "snapshot_name": memory_snapshot.snapshot_name, + "snapshot_id": memory_snapshot.snapshot_id, }, ) response = await secondary.stop() diff --git a/tests/sandbox/e2e/test_lifecycle.py b/tests/sandbox/e2e/test_lifecycle.py index bd097e87..3201e3f5 100644 --- a/tests/sandbox/e2e/test_lifecycle.py +++ b/tests/sandbox/e2e/test_lifecycle.py @@ -126,7 +126,7 @@ def patched_get_detail(sandbox_id: str): finally: sandbox._service.get_detail = original_get_detail - image_sandbox = client.sandboxes.create({"imageName": DEFAULT_IMAGE_NAME}) + image_sandbox = client.sandboxes.create({"image_name": DEFAULT_IMAGE_NAME}) assert image_sandbox.id assert image_sandbox.status == "active" response = image_sandbox.stop() @@ -135,8 +135,8 @@ def patched_get_detail(sandbox_id: str): custom_image_sandbox = client.sandboxes.create( { - "imageName": custom_image["imageName"], - "imageId": custom_image["id"], + "image_name": custom_image["imageName"], + "image_id": custom_image["id"], } ) assert custom_image_sandbox.id @@ -150,8 +150,8 @@ def patched_get_detail(sandbox_id: str): custom_snapshot_sandbox = _create_sandbox_with_snapshot_retry( { - "snapshotName": custom_image_memory_snapshot.snapshot_name, - "snapshotId": custom_image_memory_snapshot.snapshot_id, + "snapshot_name": custom_image_memory_snapshot.snapshot_name, + "snapshot_id": custom_image_memory_snapshot.snapshot_id, } ) assert custom_snapshot_sandbox.id @@ -164,8 +164,8 @@ def patched_get_detail(sandbox_id: str): "mismatched image selector", lambda: client.sandboxes.create( { - "imageName": custom_image["imageName"], - "imageId": str(uuid4()), + "image_name": custom_image["imageName"], + "image_id": str(uuid4()), } ), status_code=404, @@ -178,8 +178,8 @@ def patched_get_detail(sandbox_id: str): "mismatched snapshot selector", lambda: client.sandboxes.create( { - "snapshotName": memory_snapshot.snapshot_name, - "snapshotId": str(uuid4()), + "snapshot_name": memory_snapshot.snapshot_name, + "snapshot_id": str(uuid4()), } ), status_code=404, @@ -252,8 +252,8 @@ def patched_get_detail(sandbox_id: str): secondary = _create_sandbox_with_snapshot_retry( { - "snapshotName": memory_snapshot.snapshot_name, - "snapshotId": memory_snapshot.snapshot_id, + "snapshot_name": memory_snapshot.snapshot_name, + "snapshot_id": memory_snapshot.snapshot_id, } ) response = secondary.stop() diff --git a/tests/test_create_sandbox_params.py b/tests/test_create_sandbox_params.py index 1d9dd00f..a79d9a10 100644 --- a/tests/test_create_sandbox_params.py +++ b/tests/test_create_sandbox_params.py @@ -19,6 +19,11 @@ def test_create_sandbox_params_accepts_snapshot_source(): } +def test_create_sandbox_params_rejects_camel_case_input(): + with pytest.raises(ValidationError, match="Provide exactly one start source"): + CreateSandboxParams(**{"imageName": "node"}) + + @pytest.mark.parametrize( "payload", [ From da6b5a8de0637c4f14b94cb121e24bc29c22d81b Mon Sep 17 00:00:00 2001 From: Devin Deng Date: Thu, 12 Mar 2026 07:29:52 +0000 Subject: [PATCH 07/10] update models --- .../client/managers/async_manager/sandbox.py | 53 ++++----- .../async_manager/sandboxes/sandbox_files.py | 64 +++++------ .../sandboxes/sandbox_processes.py | 26 ++--- .../sandboxes/sandbox_terminal.py | 11 +- .../client/managers/sync_manager/sandbox.py | 53 ++++----- .../sync_manager/sandboxes/sandbox_files.py | 64 +++++------ .../sandboxes/sandbox_processes.py | 26 ++--- .../sandboxes/sandbox_terminal.py | 11 +- tests/sandbox/e2e/test_async_expose.py | 24 ++-- tests/sandbox/e2e/test_async_lifecycle.py | 46 ++++---- tests/sandbox/e2e/test_async_process.py | 46 ++++---- .../sandbox/e2e/test_async_terminal_smoke.py | 104 ++++++++++-------- tests/sandbox/e2e/test_expose.py | 16 +-- tests/sandbox/e2e/test_lifecycle.py | 48 ++++---- tests/sandbox/e2e/test_process.py | 42 +++---- tests/sandbox/e2e/test_terminal_smoke.py | 96 ++++++++-------- 16 files changed, 355 insertions(+), 375 deletions(-) diff --git a/hyperbrowser/client/managers/async_manager/sandbox.py b/hyperbrowser/client/managers/async_manager/sandbox.py index 62f04cb7..6db8bc83 100644 --- a/hyperbrowser/client/managers/async_manager/sandbox.py +++ b/hyperbrowser/client/managers/async_manager/sandbox.py @@ -134,24 +134,20 @@ async def stop(self) -> BasicResponse: async def create_memory_snapshot( self, - params: Optional[Union[SandboxMemorySnapshotParams, Dict[str, object]]] = None, + params: Optional[SandboxMemorySnapshotParams] = None, ) -> SandboxMemorySnapshotResult: - normalized = ( - params - if isinstance(params, SandboxMemorySnapshotParams) - else SandboxMemorySnapshotParams(**(params or {})) - ) + if params is None: + normalized = SandboxMemorySnapshotParams() + elif isinstance(params, SandboxMemorySnapshotParams): + normalized = params + else: + raise TypeError("params must be a SandboxMemorySnapshotParams instance") return await self._service.create_memory_snapshot(self.id, normalized) - async def expose( - self, params: Union[SandboxExposeParams, Dict[str, object]] - ) -> SandboxExposeResult: - normalized = ( - params - if isinstance(params, SandboxExposeParams) - else SandboxExposeParams(**params) - ) - return await self._service.expose(self.id, normalized, runtime=self.runtime) + async def expose(self, params: SandboxExposeParams) -> SandboxExposeResult: + if not isinstance(params, SandboxExposeParams): + raise TypeError("params must be a SandboxExposeParams instance") + return await self._service.expose(self.id, params, runtime=self.runtime) def get_exposed_url(self, port: int) -> str: return _build_sandbox_exposed_url(self.runtime, port) @@ -179,13 +175,15 @@ async def create_runtime_session( ) return _copy_model(self._runtime_session) - async def exec(self, input: Union[str, SandboxExecParams, Dict[str, object]]): + async def exec(self, input: Union[str, SandboxExecParams]): if isinstance(input, str): params = SandboxExecParams(command=input) - elif isinstance(input, SandboxExecParams): - params = input else: - params = SandboxExecParams(**input) + if not isinstance(input, SandboxExecParams): + raise TypeError( + "input must be a command string or SandboxExecParams instance" + ) + params = input return await self.processes.exec(params) async def get_process(self, process_id: str) -> SandboxProcessHandle: @@ -269,20 +267,17 @@ def __init__(self, client): None, ) - async def create( - self, params: Union[CreateSandboxParams, Dict[str, object]] - ) -> SandboxHandle: - normalized = ( - params - if isinstance(params, CreateSandboxParams) - else CreateSandboxParams(**params) - ) - detail = await self._create_detail(normalized) + async def create(self, params: CreateSandboxParams) -> SandboxHandle: + if not isinstance(params, CreateSandboxParams): + raise TypeError("params must be a CreateSandboxParams instance") + detail = await self._create_detail(params) return self.attach(detail) async def start_from_snapshot( - self, params: Union[StartSandboxFromSnapshotParams, Dict[str, object]] + self, params: StartSandboxFromSnapshotParams ) -> SandboxHandle: + if not isinstance(params, StartSandboxFromSnapshotParams): + raise TypeError("params must be a StartSandboxFromSnapshotParams instance") return await self.create(params) async def get(self, sandbox_id: str) -> SandboxHandle: diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_files.py b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_files.py index 68fdd89e..d888be33 100644 --- a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_files.py +++ b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_files.py @@ -5,7 +5,7 @@ import json import socket from datetime import datetime -from typing import AsyncIterator, Callable, Dict, List, Optional, Union +from typing import AsyncIterator, Callable, List, Optional, Union from urllib.parse import urlencode from websockets.asyncio.client import connect as async_ws_connect @@ -346,9 +346,7 @@ async def read_bytes( async def write( self, - path_or_files: Union[ - str, List[Union[SandboxFileWriteEntry, Dict[str, object]]] - ], + path_or_files: Union[str, List[SandboxFileWriteEntry]], data: Optional[Union[str, bytes, bytearray]] = None, ): if isinstance(path_or_files, str): @@ -370,15 +368,12 @@ async def write( encoded_files = [] for entry in path_or_files: - normalized = ( - entry - if isinstance(entry, SandboxFileWriteEntry) - else SandboxFileWriteEntry(**entry) - ) + if not isinstance(entry, SandboxFileWriteEntry): + raise TypeError("files must contain SandboxFileWriteEntry instances") encoded_files.append( { - "path": normalized.path, - **_encode_write_data(normalized.data), + "path": entry.path, + **_encode_write_data(entry.data), } ) @@ -503,7 +498,7 @@ async def delete(self, path: str, *, recursive: Optional[bool] = None) -> None: async def copy( self, - params: Optional[Union[SandboxFileCopyParams, Dict[str, object]]] = None, + params: Optional[SandboxFileCopyParams] = None, *, source: Optional[str] = None, destination: Optional[str] = None, @@ -520,7 +515,7 @@ async def copy( elif isinstance(params, SandboxFileCopyParams): normalized = params else: - normalized = SandboxFileCopyParams(**params) + raise TypeError("params must be a SandboxFileCopyParams instance") payload = await self._transport.request_json( "/sandbox/files/copy", @@ -537,19 +532,22 @@ async def copy( async def chmod( self, - params: Optional[Union[SandboxFileChmodParams, Dict[str, object]]] = None, + params: Optional[SandboxFileChmodParams] = None, *, path: Optional[str] = None, mode: Optional[str] = None, recursive: Optional[bool] = None, ) -> None: - normalized = ( - params - if isinstance(params, SandboxFileChmodParams) - else SandboxFileChmodParams( - **(params or {"path": path, "mode": mode, "recursive": recursive}) + if params is None: + normalized = SandboxFileChmodParams( + path=path, + mode=mode, + recursive=recursive, ) - ) + elif isinstance(params, SandboxFileChmodParams): + normalized = params + else: + raise TypeError("params must be a SandboxFileChmodParams instance") await self._transport.request_json( "/sandbox/files/chmod", method="POST", @@ -559,28 +557,24 @@ async def chmod( async def chown( self, - params: Optional[Union[SandboxFileChownParams, Dict[str, object]]] = None, + params: Optional[SandboxFileChownParams] = None, *, path: Optional[str] = None, uid: Optional[int] = None, gid: Optional[int] = None, recursive: Optional[bool] = None, ) -> None: - normalized = ( - params - if isinstance(params, SandboxFileChownParams) - else SandboxFileChownParams( - **( - params - or { - "path": path, - "uid": uid, - "gid": gid, - "recursive": recursive, - } - ) + if params is None: + normalized = SandboxFileChownParams( + path=path, + uid=uid, + gid=gid, + recursive=recursive, ) - ) + elif isinstance(params, SandboxFileChownParams): + normalized = params + else: + raise TypeError("params must be a SandboxFileChownParams instance") await self._transport.request_json( "/sandbox/files/chown", method="POST", diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_processes.py b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_processes.py index 1b2c0303..3743f4bc 100644 --- a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_processes.py +++ b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_processes.py @@ -147,34 +147,24 @@ class SandboxProcessesApi: def __init__(self, transport: RuntimeTransport): self._transport = transport - async def exec( - self, input: Union[SandboxExecParams, Dict[str, object]] - ) -> SandboxProcessResult: - params = ( - input - if isinstance(input, SandboxExecParams) - else SandboxExecParams(**input) - ) + async def exec(self, input: SandboxExecParams) -> SandboxProcessResult: + if not isinstance(input, SandboxExecParams): + raise TypeError("input must be a SandboxExecParams instance") payload = await self._transport.request_json( "/sandbox/exec", method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), + json_body=input.model_dump(exclude_none=True, by_alias=True), headers={"content-type": "application/json"}, ) return SandboxProcessResult(**payload["result"]) - async def start( - self, input: Union[SandboxExecParams, Dict[str, object]] - ) -> SandboxProcessHandle: - params = ( - input - if isinstance(input, SandboxExecParams) - else SandboxExecParams(**input) - ) + async def start(self, input: SandboxExecParams) -> SandboxProcessHandle: + if not isinstance(input, SandboxExecParams): + raise TypeError("input must be a SandboxExecParams instance") payload = await self._transport.request_json( "/sandbox/processes", method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), + json_body=input.model_dump(exclude_none=True, by_alias=True), headers={"content-type": "application/json"}, ) return SandboxProcessHandle( diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_terminal.py b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_terminal.py index 4bfcd318..660d79ec 100644 --- a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_terminal.py +++ b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_terminal.py @@ -203,17 +203,14 @@ def __init__( async def create( self, - input: Union[SandboxTerminalCreateParams, Dict[str, object]], + input: SandboxTerminalCreateParams, ) -> SandboxTerminalHandle: - params = ( - input - if isinstance(input, SandboxTerminalCreateParams) - else SandboxTerminalCreateParams(**input) - ) + if not isinstance(input, SandboxTerminalCreateParams): + raise TypeError("input must be a SandboxTerminalCreateParams instance") payload = await self._transport.request_json( "/sandbox/pty", method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), + json_body=input.model_dump(exclude_none=True, by_alias=True), headers={"content-type": "application/json"}, ) return SandboxTerminalHandle( diff --git a/hyperbrowser/client/managers/sync_manager/sandbox.py b/hyperbrowser/client/managers/sync_manager/sandbox.py index 30ba7639..bdd4fcd7 100644 --- a/hyperbrowser/client/managers/sync_manager/sandbox.py +++ b/hyperbrowser/client/managers/sync_manager/sandbox.py @@ -134,24 +134,20 @@ def stop(self) -> BasicResponse: def create_memory_snapshot( self, - params: Optional[Union[SandboxMemorySnapshotParams, Dict[str, object]]] = None, + params: Optional[SandboxMemorySnapshotParams] = None, ) -> SandboxMemorySnapshotResult: - normalized = ( - params - if isinstance(params, SandboxMemorySnapshotParams) - else SandboxMemorySnapshotParams(**(params or {})) - ) + if params is None: + normalized = SandboxMemorySnapshotParams() + elif isinstance(params, SandboxMemorySnapshotParams): + normalized = params + else: + raise TypeError("params must be a SandboxMemorySnapshotParams instance") return self._service.create_memory_snapshot(self.id, normalized) - def expose( - self, params: Union[SandboxExposeParams, Dict[str, object]] - ) -> SandboxExposeResult: - normalized = ( - params - if isinstance(params, SandboxExposeParams) - else SandboxExposeParams(**params) - ) - return self._service.expose(self.id, normalized, runtime=self.runtime) + def expose(self, params: SandboxExposeParams) -> SandboxExposeResult: + if not isinstance(params, SandboxExposeParams): + raise TypeError("params must be a SandboxExposeParams instance") + return self._service.expose(self.id, params, runtime=self.runtime) def get_exposed_url(self, port: int) -> str: return _build_sandbox_exposed_url(self.runtime, port) @@ -179,13 +175,15 @@ def create_runtime_session( ) return _copy_model(self._runtime_session) - def exec(self, input: Union[str, SandboxExecParams, Dict[str, object]]): + def exec(self, input: Union[str, SandboxExecParams]): if isinstance(input, str): params = SandboxExecParams(command=input) - elif isinstance(input, SandboxExecParams): - params = input else: - params = SandboxExecParams(**input) + if not isinstance(input, SandboxExecParams): + raise TypeError( + "input must be a command string or SandboxExecParams instance" + ) + params = input return self.processes.exec(params) def get_process(self, process_id: str) -> SandboxProcessHandle: @@ -269,20 +267,17 @@ def __init__(self, client): None, ) - def create( - self, params: Union[CreateSandboxParams, Dict[str, object]] - ) -> SandboxHandle: - normalized = ( - params - if isinstance(params, CreateSandboxParams) - else CreateSandboxParams(**params) - ) - detail = self._create_detail(normalized) + def create(self, params: CreateSandboxParams) -> SandboxHandle: + if not isinstance(params, CreateSandboxParams): + raise TypeError("params must be a CreateSandboxParams instance") + detail = self._create_detail(params) return self.attach(detail) def start_from_snapshot( - self, params: Union[StartSandboxFromSnapshotParams, Dict[str, object]] + self, params: StartSandboxFromSnapshotParams ) -> SandboxHandle: + if not isinstance(params, StartSandboxFromSnapshotParams): + raise TypeError("params must be a StartSandboxFromSnapshotParams instance") return self.create(params) def get(self, sandbox_id: str) -> SandboxHandle: diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_files.py b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_files.py index 39e05e94..b700405b 100644 --- a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_files.py +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_files.py @@ -4,7 +4,7 @@ import socket import threading from datetime import datetime -from typing import Callable, Dict, List, Optional, Union +from typing import Callable, List, Optional, Union from urllib.parse import urlencode from websockets.exceptions import ConnectionClosed @@ -327,9 +327,7 @@ def read_bytes( def write( self, - path_or_files: Union[ - str, List[Union[SandboxFileWriteEntry, Dict[str, object]]] - ], + path_or_files: Union[str, List[SandboxFileWriteEntry]], data: Optional[Union[str, bytes, bytearray]] = None, ): if isinstance(path_or_files, str): @@ -351,15 +349,12 @@ def write( encoded_files = [] for entry in path_or_files: - normalized = ( - entry - if isinstance(entry, SandboxFileWriteEntry) - else SandboxFileWriteEntry(**entry) - ) + if not isinstance(entry, SandboxFileWriteEntry): + raise TypeError("files must contain SandboxFileWriteEntry instances") encoded_files.append( { - "path": normalized.path, - **_encode_write_data(normalized.data), + "path": entry.path, + **_encode_write_data(entry.data), } ) @@ -484,7 +479,7 @@ def delete(self, path: str, *, recursive: Optional[bool] = None) -> None: def copy( self, - params: Optional[Union[SandboxFileCopyParams, Dict[str, object]]] = None, + params: Optional[SandboxFileCopyParams] = None, *, source: Optional[str] = None, destination: Optional[str] = None, @@ -501,7 +496,7 @@ def copy( elif isinstance(params, SandboxFileCopyParams): normalized = params else: - normalized = SandboxFileCopyParams(**params) + raise TypeError("params must be a SandboxFileCopyParams instance") payload = self._transport.request_json( "/sandbox/files/copy", @@ -518,19 +513,22 @@ def copy( def chmod( self, - params: Optional[Union[SandboxFileChmodParams, Dict[str, object]]] = None, + params: Optional[SandboxFileChmodParams] = None, *, path: Optional[str] = None, mode: Optional[str] = None, recursive: Optional[bool] = None, ) -> None: - normalized = ( - params - if isinstance(params, SandboxFileChmodParams) - else SandboxFileChmodParams( - **(params or {"path": path, "mode": mode, "recursive": recursive}) + if params is None: + normalized = SandboxFileChmodParams( + path=path, + mode=mode, + recursive=recursive, ) - ) + elif isinstance(params, SandboxFileChmodParams): + normalized = params + else: + raise TypeError("params must be a SandboxFileChmodParams instance") self._transport.request_json( "/sandbox/files/chmod", method="POST", @@ -540,28 +538,24 @@ def chmod( def chown( self, - params: Optional[Union[SandboxFileChownParams, Dict[str, object]]] = None, + params: Optional[SandboxFileChownParams] = None, *, path: Optional[str] = None, uid: Optional[int] = None, gid: Optional[int] = None, recursive: Optional[bool] = None, ) -> None: - normalized = ( - params - if isinstance(params, SandboxFileChownParams) - else SandboxFileChownParams( - **( - params - or { - "path": path, - "uid": uid, - "gid": gid, - "recursive": recursive, - } - ) + if params is None: + normalized = SandboxFileChownParams( + path=path, + uid=uid, + gid=gid, + recursive=recursive, ) - ) + elif isinstance(params, SandboxFileChownParams): + normalized = params + else: + raise TypeError("params must be a SandboxFileChownParams instance") self._transport.request_json( "/sandbox/files/chown", method="POST", diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_processes.py b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_processes.py index 36ec4127..13dfc16c 100644 --- a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_processes.py +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_processes.py @@ -143,34 +143,24 @@ class SandboxProcessesApi: def __init__(self, transport: RuntimeTransport): self._transport = transport - def exec( - self, input: Union[SandboxExecParams, Dict[str, object]] - ) -> SandboxProcessResult: - params = ( - input - if isinstance(input, SandboxExecParams) - else SandboxExecParams(**input) - ) + def exec(self, input: SandboxExecParams) -> SandboxProcessResult: + if not isinstance(input, SandboxExecParams): + raise TypeError("input must be a SandboxExecParams instance") payload = self._transport.request_json( "/sandbox/exec", method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), + json_body=input.model_dump(exclude_none=True, by_alias=True), headers={"content-type": "application/json"}, ) return SandboxProcessResult(**payload["result"]) - def start( - self, input: Union[SandboxExecParams, Dict[str, object]] - ) -> SandboxProcessHandle: - params = ( - input - if isinstance(input, SandboxExecParams) - else SandboxExecParams(**input) - ) + def start(self, input: SandboxExecParams) -> SandboxProcessHandle: + if not isinstance(input, SandboxExecParams): + raise TypeError("input must be a SandboxExecParams instance") payload = self._transport.request_json( "/sandbox/processes", method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), + json_body=input.model_dump(exclude_none=True, by_alias=True), headers={"content-type": "application/json"}, ) return SandboxProcessHandle( diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_terminal.py b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_terminal.py index ccc984e6..59c858eb 100644 --- a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_terminal.py +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_terminal.py @@ -201,17 +201,14 @@ def __init__( def create( self, - input: Union[SandboxTerminalCreateParams, Dict[str, object]], + input: SandboxTerminalCreateParams, ) -> SandboxTerminalHandle: - params = ( - input - if isinstance(input, SandboxTerminalCreateParams) - else SandboxTerminalCreateParams(**input) - ) + if not isinstance(input, SandboxTerminalCreateParams): + raise TypeError("input must be a SandboxTerminalCreateParams instance") payload = self._transport.request_json( "/sandbox/pty", method="POST", - json_body=params.model_dump(exclude_none=True, by_alias=True), + json_body=input.model_dump(exclude_none=True, by_alias=True), headers={"content-type": "application/json"}, ) return SandboxTerminalHandle( diff --git a/tests/sandbox/e2e/test_async_expose.py b/tests/sandbox/e2e/test_async_expose.py index d48ecdc3..0e8d69ca 100644 --- a/tests/sandbox/e2e/test_async_expose.py +++ b/tests/sandbox/e2e/test_async_expose.py @@ -2,6 +2,8 @@ import pytest +from hyperbrowser.models import SandboxExecParams, SandboxExposeParams + from tests.helpers.config import create_async_client from tests.helpers.errors import expect_hyperbrowser_error_async from tests.helpers.http import fetch_runtime_url @@ -14,7 +16,9 @@ HTTP_PORT = 3210 -async def _wait_for_http_response(url: str, *, headers=None, predicate, attempts: int = 15): +async def _wait_for_http_response( + url: str, *, headers=None, predicate, attempts: int = 15 +): last_status = 0 last_body = "" @@ -45,13 +49,15 @@ async def test_async_sandbox_expose_e2e(): server_process = None try: - sandbox = await client.sandboxes.create(default_sandbox_params("py-async-expose")) + sandbox = await client.sandboxes.create( + default_sandbox_params("py-async-expose") + ) await wait_for_runtime_ready_async(sandbox) server_process = await sandbox.processes.start( - { - "command": "node", - "args": [ + SandboxExecParams( + command="node", + args=[ "-e", " ".join( [ @@ -69,7 +75,7 @@ async def test_async_sandbox_expose_e2e(): ] ), ], - } + ) ) token = sandbox.to_dict()["token"] @@ -82,14 +88,14 @@ async def test_async_sandbox_expose_e2e(): await expect_hyperbrowser_error_async( "reserved receiver port expose", - lambda: sandbox.expose({"port": 4001}), + lambda: sandbox.expose(SandboxExposeParams(port=4001)), status_code=400, service="control", retryable=False, message_includes="cannot be exposed", ) - exposure = await sandbox.expose({"port": HTTP_PORT, "auth": False}) + exposure = await sandbox.expose(SandboxExposeParams(port=HTTP_PORT, auth=False)) assert exposure.port == HTTP_PORT assert exposure.auth is False assert exposure.url == sandbox.get_exposed_url(HTTP_PORT) @@ -103,7 +109,7 @@ async def test_async_sandbox_expose_e2e(): assert status == 200 assert "sdk-exposed:GET:/" in body - exposure = await sandbox.expose({"port": HTTP_PORT, "auth": True}) + exposure = await sandbox.expose(SandboxExposeParams(port=HTTP_PORT, auth=True)) assert exposure.auth is True status, _ = await _wait_for_http_response( diff --git a/tests/sandbox/e2e/test_async_lifecycle.py b/tests/sandbox/e2e/test_async_lifecycle.py index 17b868fd..074cccbb 100644 --- a/tests/sandbox/e2e/test_async_lifecycle.py +++ b/tests/sandbox/e2e/test_async_lifecycle.py @@ -5,7 +5,7 @@ import pytest from hyperbrowser.exceptions import HyperbrowserError -from hyperbrowser.models import SandboxRuntimeSession +from hyperbrowser.models import CreateSandboxParams, SandboxRuntimeSession from tests.helpers.config import DEFAULT_IMAGE_NAME, create_async_client from tests.helpers.errors import expect_hyperbrowser_error_async @@ -21,7 +21,7 @@ SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS = 60 -async def _create_sandbox_with_snapshot_retry(client, params): +async def _create_sandbox_with_snapshot_retry(client, params: CreateSandboxParams): deadline = asyncio.get_running_loop().time() + SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS last_error = None @@ -131,7 +131,7 @@ async def patched_get_detail(sandbox_id: str): sandbox._service.get_detail = original_get_detail image_sandbox = await client.sandboxes.create( - {"image_name": DEFAULT_IMAGE_NAME} + CreateSandboxParams(image_name=DEFAULT_IMAGE_NAME) ) assert image_sandbox.id assert image_sandbox.status == "active" @@ -140,10 +140,10 @@ async def patched_get_detail(sandbox_id: str): assert image_sandbox.status == "closed" custom_image_sandbox = await client.sandboxes.create( - { - "image_name": custom_image["imageName"], - "image_id": custom_image["id"], - } + CreateSandboxParams( + image_name=custom_image["imageName"], + image_id=custom_image["id"], + ) ) assert custom_image_sandbox.id assert custom_image_sandbox.status == "active" @@ -158,10 +158,10 @@ async def patched_get_detail(sandbox_id: str): custom_snapshot_sandbox = await _create_sandbox_with_snapshot_retry( client, - { - "snapshot_name": custom_image_memory_snapshot.snapshot_name, - "snapshot_id": custom_image_memory_snapshot.snapshot_id, - }, + CreateSandboxParams( + snapshot_name=custom_image_memory_snapshot.snapshot_name, + snapshot_id=custom_image_memory_snapshot.snapshot_id, + ), ) assert custom_snapshot_sandbox.id assert custom_snapshot_sandbox.status == "active" @@ -172,10 +172,10 @@ async def patched_get_detail(sandbox_id: str): await expect_hyperbrowser_error_async( "mismatched image selector", lambda: client.sandboxes.create( - { - "image_name": custom_image["imageName"], - "image_id": str(uuid4()), - } + CreateSandboxParams( + image_name=custom_image["imageName"], + image_id=str(uuid4()), + ) ), status_code=404, service="control", @@ -186,10 +186,10 @@ async def patched_get_detail(sandbox_id: str): await expect_hyperbrowser_error_async( "mismatched snapshot selector", lambda: client.sandboxes.create( - { - "snapshot_name": memory_snapshot.snapshot_name, - "snapshot_id": str(uuid4()), - } + CreateSandboxParams( + snapshot_name=memory_snapshot.snapshot_name, + snapshot_id=str(uuid4()), + ) ), status_code=404, service="control", @@ -261,10 +261,10 @@ async def patched_get_detail(sandbox_id: str): secondary = await _create_sandbox_with_snapshot_retry( client, - { - "snapshot_name": memory_snapshot.snapshot_name, - "snapshot_id": memory_snapshot.snapshot_id, - }, + CreateSandboxParams( + snapshot_name=memory_snapshot.snapshot_name, + snapshot_id=memory_snapshot.snapshot_id, + ), ) response = await secondary.stop() assert response.success is True diff --git a/tests/sandbox/e2e/test_async_process.py b/tests/sandbox/e2e/test_async_process.py index 44401d8b..6c5c4eaa 100644 --- a/tests/sandbox/e2e/test_async_process.py +++ b/tests/sandbox/e2e/test_async_process.py @@ -1,5 +1,7 @@ import pytest +from hyperbrowser.models import SandboxExecParams + from tests.helpers.config import create_async_client from tests.helpers.errors import expect_hyperbrowser_error_async from tests.helpers.sandbox import ( @@ -24,7 +26,9 @@ async def test_async_sandbox_process_e2e(): sandbox = None try: - sandbox = await client.sandboxes.create(default_sandbox_params("py-async-process")) + sandbox = await client.sandboxes.create( + default_sandbox_params("py-async-process") + ) await wait_for_runtime_ready_async(sandbox) result = await sandbox.exec("echo process-exec-ok") @@ -32,19 +36,19 @@ async def test_async_sandbox_process_e2e(): assert "process-exec-ok" in result.stdout result = await sandbox.exec( - { - "command": "bash", - "args": ["-lc", "echo process-exec-fail 1>&2; exit 7"], - } + SandboxExecParams( + command="bash", + args=["-lc", "echo process-exec-fail 1>&2; exit 7"], + ) ) assert result.exit_code == 7 assert "process-exec-fail" in result.stderr stdin_process = await sandbox.processes.start( - { - "command": "bash", - "args": ["-lc", "read line; echo stdout:$line; echo stderr:$line 1>&2"], - } + SandboxExecParams( + command="bash", + args=["-lc", "read line; echo stdout:$line; echo stderr:$line 1>&2"], + ) ) fetched = await sandbox.get_process(stdin_process.id) assert fetched.id == stdin_process.id @@ -59,7 +63,7 @@ async def test_async_sandbox_process_e2e(): assert "stderr:sdk-stdin" in result.stderr running_process = await sandbox.processes.start( - {"command": "bash", "args": ["-lc", "sleep 30"]} + SandboxExecParams(command="bash", args=["-lc", "sleep 30"]) ) refreshed = await running_process.refresh() assert refreshed.status in {"queued", "running"} @@ -67,10 +71,10 @@ async def test_async_sandbox_process_e2e(): assert result.status not in {"queued", "running"} streamed = await sandbox.processes.start( - { - "command": "bash", - "args": ["-lc", "echo stream-out; echo stream-err 1>&2"], - } + SandboxExecParams( + command="bash", + args=["-lc", "echo stream-out; echo stream-err 1>&2"], + ) ) events = await _collect_process_stream(streamed.stream()) assert any( @@ -82,20 +86,20 @@ async def test_async_sandbox_process_e2e(): assert any(event.type == "exit" for event in events) result_process = await sandbox.processes.start( - {"command": "bash", "args": ["-lc", "echo result-alias-ok"]} + SandboxExecParams(command="bash", args=["-lc", "echo result-alias-ok"]) ) result = await result_process.result() assert result.exit_code == 0 assert "result-alias-ok" in result.stdout noisy_process = await sandbox.processes.start( - { - "command": "bash", - "args": [ + SandboxExecParams( + command="bash", + args=[ "-lc", 'yes "process-replay-window-overflow-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" | head -n 120000', ], - } + ) ) result = await noisy_process.result() assert len(result.stdout) > 3 * 1024 * 1024 @@ -111,7 +115,7 @@ async def test_async_sandbox_process_e2e(): ) timeout_process = await sandbox.processes.start( - {"command": "bash", "args": ["-lc", "sleep 10"]} + SandboxExecParams(command="bash", args=["-lc", "sleep 10"]) ) await expect_hyperbrowser_error_async( "process wait timeout", @@ -126,7 +130,7 @@ async def test_async_sandbox_process_e2e(): assert result.status in {"exited", "failed", "killed", "timed_out"} kill_process = await sandbox.processes.start( - {"command": "bash", "args": ["-lc", "sleep 30"]} + SandboxExecParams(command="bash", args=["-lc", "sleep 30"]) ) result = await kill_process.kill() assert result.status not in {"queued", "running"} diff --git a/tests/sandbox/e2e/test_async_terminal_smoke.py b/tests/sandbox/e2e/test_async_terminal_smoke.py index c986c7f3..3518317f 100644 --- a/tests/sandbox/e2e/test_async_terminal_smoke.py +++ b/tests/sandbox/e2e/test_async_terminal_smoke.py @@ -2,6 +2,8 @@ import pytest +from hyperbrowser.models import SandboxTerminalCreateParams + from tests.helpers.config import create_async_client from tests.helpers.errors import expect_hyperbrowser_error_async from tests.helpers.sandbox import ( @@ -30,9 +32,9 @@ def _terminal_status_output(status) -> str: def _terminal_status_raw_output(status) -> str: - return b"".join(chunk.raw for chunk in ((status.output if status else None) or [])).decode( - "utf-8" - ) + return b"".join( + chunk.raw for chunk in ((status.output if status else None) or []) + ).decode("utf-8") async def _wait_for_terminal_status_output( @@ -61,18 +63,20 @@ async def test_async_sandbox_terminal_e2e(): sandbox = None try: - sandbox = await client.sandboxes.create(default_sandbox_params("py-async-terminal")) + sandbox = await client.sandboxes.create( + default_sandbox_params("py-async-terminal") + ) await wait_for_runtime_ready_async(sandbox) assert sandbox.pty is sandbox.terminal terminal = await sandbox.terminal.create( - { - "command": "bash", - "args": ["-l"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-l"], + rows=24, + cols=80, + ) ) fetched = await sandbox.terminal.get(terminal.id) assert fetched.id == terminal.id @@ -95,12 +99,12 @@ async def test_async_sandbox_terminal_e2e(): assert status.exit_code == 0 terminal = await sandbox.terminal.create( - { - "command": "bash", - "args": ["-l"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-l"], + rows=24, + cols=80, + ) ) connection = await terminal.attach() try: @@ -120,12 +124,12 @@ async def test_async_sandbox_terminal_e2e(): marker = "terminal-get-output" terminal = await sandbox.terminal.create( - { - "command": "bash", - "args": ["-lc", f"printf '{marker}' && sleep 1"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}' && sleep 1"], + rows=24, + cols=80, + ) ) without_output = await sandbox.terminal.get(terminal.id) assert without_output.current.output is None @@ -142,12 +146,12 @@ async def test_async_sandbox_terminal_e2e(): marker = "terminal-refresh-output" terminal = await sandbox.terminal.create( - { - "command": "bash", - "args": ["-lc", f"printf '{marker}' && sleep 1"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}' && sleep 1"], + rows=24, + cols=80, + ) ) without_output = await terminal.refresh() assert without_output.current.output is None @@ -164,12 +168,12 @@ async def test_async_sandbox_terminal_e2e(): marker = "terminal-wait-output" terminal = await sandbox.terminal.create( - { - "command": "bash", - "args": ["-lc", f"printf '{marker}'"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}'"], + rows=24, + cols=80, + ) ) status = await terminal.wait(timeout_ms=2000, include_output=True) assert status.running is False @@ -179,12 +183,12 @@ async def test_async_sandbox_terminal_e2e(): assert status.output timeout_terminal = await sandbox.pty.create( - { - "command": "bash", - "args": ["-lc", "sleep 10"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-lc", "sleep 10"], + rows=24, + cols=80, + ) ) await expect_hyperbrowser_error_async( "terminal wait timeout", @@ -200,12 +204,12 @@ async def test_async_sandbox_terminal_e2e(): assert status.running is False kill_terminal = await sandbox.pty.create( - { - "command": "bash", - "args": ["-lc", "sleep 30"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-lc", "sleep 30"], + rows=24, + cols=80, + ) ) status = await kill_terminal.kill() assert status.running is False @@ -224,8 +228,12 @@ async def test_async_sandbox_terminal_e2e(): await client.close() -async def _get_terminal_status(sandbox, terminal_id: str, *, include_output: bool = False): - return (await sandbox.terminal.get(terminal_id, include_output=include_output)).current +async def _get_terminal_status( + sandbox, terminal_id: str, *, include_output: bool = False +): + return ( + await sandbox.terminal.get(terminal_id, include_output=include_output) + ).current async def _refresh_terminal_status(terminal, *, include_output: bool = False): diff --git a/tests/sandbox/e2e/test_expose.py b/tests/sandbox/e2e/test_expose.py index e2956df7..34fbcff5 100644 --- a/tests/sandbox/e2e/test_expose.py +++ b/tests/sandbox/e2e/test_expose.py @@ -1,5 +1,7 @@ import time +from hyperbrowser.models import SandboxExecParams, SandboxExposeParams + from tests.helpers.config import create_client from tests.helpers.errors import expect_hyperbrowser_error from tests.helpers.http import fetch_runtime_url @@ -46,9 +48,9 @@ def test_sandbox_expose_e2e(): wait_for_runtime_ready(sandbox) server_process = sandbox.processes.start( - { - "command": "node", - "args": [ + SandboxExecParams( + command="node", + args=[ "-e", " ".join( [ @@ -66,7 +68,7 @@ def test_sandbox_expose_e2e(): ] ), ], - } + ) ) token = sandbox.to_dict()["token"] @@ -79,14 +81,14 @@ def test_sandbox_expose_e2e(): expect_hyperbrowser_error( "reserved receiver port expose", - lambda: sandbox.expose({"port": 4001}), + lambda: sandbox.expose(SandboxExposeParams(port=4001)), status_code=400, service="control", retryable=False, message_includes="cannot be exposed", ) - exposure = sandbox.expose({"port": HTTP_PORT, "auth": False}) + exposure = sandbox.expose(SandboxExposeParams(port=HTTP_PORT, auth=False)) assert exposure.port == HTTP_PORT assert exposure.auth is False assert exposure.url == sandbox.get_exposed_url(HTTP_PORT) @@ -100,7 +102,7 @@ def test_sandbox_expose_e2e(): assert status == 200 assert "sdk-exposed:GET:/" in body - exposure = sandbox.expose({"port": HTTP_PORT, "auth": True}) + exposure = sandbox.expose(SandboxExposeParams(port=HTTP_PORT, auth=True)) assert exposure.auth is True status, _ = _wait_for_http_response( diff --git a/tests/sandbox/e2e/test_lifecycle.py b/tests/sandbox/e2e/test_lifecycle.py index 3201e3f5..b7e5ffbf 100644 --- a/tests/sandbox/e2e/test_lifecycle.py +++ b/tests/sandbox/e2e/test_lifecycle.py @@ -3,7 +3,7 @@ from uuid import uuid4 from hyperbrowser.exceptions import HyperbrowserError -from hyperbrowser.models import SandboxRuntimeSession +from hyperbrowser.models import CreateSandboxParams, SandboxRuntimeSession from tests.helpers.config import DEFAULT_IMAGE_NAME, create_client from tests.helpers.errors import expect_hyperbrowser_error @@ -21,7 +21,7 @@ SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS = 60 -def _create_sandbox_with_snapshot_retry(params): +def _create_sandbox_with_snapshot_retry(params: CreateSandboxParams): deadline = time.monotonic() + SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS last_error = None @@ -126,7 +126,9 @@ def patched_get_detail(sandbox_id: str): finally: sandbox._service.get_detail = original_get_detail - image_sandbox = client.sandboxes.create({"image_name": DEFAULT_IMAGE_NAME}) + image_sandbox = client.sandboxes.create( + CreateSandboxParams(image_name=DEFAULT_IMAGE_NAME) + ) assert image_sandbox.id assert image_sandbox.status == "active" response = image_sandbox.stop() @@ -134,10 +136,10 @@ def patched_get_detail(sandbox_id: str): assert image_sandbox.status == "closed" custom_image_sandbox = client.sandboxes.create( - { - "image_name": custom_image["imageName"], - "image_id": custom_image["id"], - } + CreateSandboxParams( + image_name=custom_image["imageName"], + image_id=custom_image["id"], + ) ) assert custom_image_sandbox.id assert custom_image_sandbox.status == "active" @@ -149,10 +151,10 @@ def patched_get_detail(sandbox_id: str): assert custom_image_memory_snapshot.image_namespace == custom_image["namespace"] custom_snapshot_sandbox = _create_sandbox_with_snapshot_retry( - { - "snapshot_name": custom_image_memory_snapshot.snapshot_name, - "snapshot_id": custom_image_memory_snapshot.snapshot_id, - } + CreateSandboxParams( + snapshot_name=custom_image_memory_snapshot.snapshot_name, + snapshot_id=custom_image_memory_snapshot.snapshot_id, + ) ) assert custom_snapshot_sandbox.id assert custom_snapshot_sandbox.status == "active" @@ -163,10 +165,10 @@ def patched_get_detail(sandbox_id: str): expect_hyperbrowser_error( "mismatched image selector", lambda: client.sandboxes.create( - { - "image_name": custom_image["imageName"], - "image_id": str(uuid4()), - } + CreateSandboxParams( + image_name=custom_image["imageName"], + image_id=str(uuid4()), + ) ), status_code=404, service="control", @@ -177,10 +179,10 @@ def patched_get_detail(sandbox_id: str): expect_hyperbrowser_error( "mismatched snapshot selector", lambda: client.sandboxes.create( - { - "snapshot_name": memory_snapshot.snapshot_name, - "snapshot_id": str(uuid4()), - } + CreateSandboxParams( + snapshot_name=memory_snapshot.snapshot_name, + snapshot_id=str(uuid4()), + ) ), status_code=404, service="control", @@ -251,10 +253,10 @@ def patched_get_detail(sandbox_id: str): ) secondary = _create_sandbox_with_snapshot_retry( - { - "snapshot_name": memory_snapshot.snapshot_name, - "snapshot_id": memory_snapshot.snapshot_id, - } + CreateSandboxParams( + snapshot_name=memory_snapshot.snapshot_name, + snapshot_id=memory_snapshot.snapshot_id, + ) ) response = secondary.stop() assert response.success is True diff --git a/tests/sandbox/e2e/test_process.py b/tests/sandbox/e2e/test_process.py index 74ffaa9f..31c6668a 100644 --- a/tests/sandbox/e2e/test_process.py +++ b/tests/sandbox/e2e/test_process.py @@ -1,3 +1,5 @@ +from hyperbrowser.models import SandboxExecParams + from tests.helpers.config import create_client from tests.helpers.errors import expect_hyperbrowser_error from tests.helpers.sandbox import ( @@ -30,19 +32,19 @@ def test_sandbox_process_e2e(): assert "process-exec-ok" in result.stdout result = sandbox.exec( - { - "command": "bash", - "args": ["-lc", "echo process-exec-fail 1>&2; exit 7"], - } + SandboxExecParams( + command="bash", + args=["-lc", "echo process-exec-fail 1>&2; exit 7"], + ) ) assert result.exit_code == 7 assert "process-exec-fail" in result.stderr stdin_process = sandbox.processes.start( - { - "command": "bash", - "args": ["-lc", "read line; echo stdout:$line; echo stderr:$line 1>&2"], - } + SandboxExecParams( + command="bash", + args=["-lc", "read line; echo stdout:$line; echo stderr:$line 1>&2"], + ) ) fetched = sandbox.get_process(stdin_process.id) assert fetched.id == stdin_process.id @@ -57,7 +59,7 @@ def test_sandbox_process_e2e(): assert "stderr:sdk-stdin" in result.stderr running_process = sandbox.processes.start( - {"command": "bash", "args": ["-lc", "sleep 30"]} + SandboxExecParams(command="bash", args=["-lc", "sleep 30"]) ) refreshed = running_process.refresh() assert refreshed.status in {"queued", "running"} @@ -65,10 +67,10 @@ def test_sandbox_process_e2e(): assert result.status not in {"queued", "running"} streamed = sandbox.processes.start( - { - "command": "bash", - "args": ["-lc", "echo stream-out; echo stream-err 1>&2"], - } + SandboxExecParams( + command="bash", + args=["-lc", "echo stream-out; echo stream-err 1>&2"], + ) ) events = _collect_process_stream(streamed.stream()) assert any( @@ -80,20 +82,20 @@ def test_sandbox_process_e2e(): assert any(event.type == "exit" for event in events) result_process = sandbox.processes.start( - {"command": "bash", "args": ["-lc", "echo result-alias-ok"]} + SandboxExecParams(command="bash", args=["-lc", "echo result-alias-ok"]) ) result = result_process.result() assert result.exit_code == 0 assert "result-alias-ok" in result.stdout noisy_process = sandbox.processes.start( - { - "command": "bash", - "args": [ + SandboxExecParams( + command="bash", + args=[ "-lc", 'yes "process-replay-window-overflow-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" | head -n 120000', ], - } + ) ) result = noisy_process.result() assert len(result.stdout) > 3 * 1024 * 1024 @@ -109,7 +111,7 @@ def test_sandbox_process_e2e(): ) timeout_process = sandbox.processes.start( - {"command": "bash", "args": ["-lc", "sleep 10"]} + SandboxExecParams(command="bash", args=["-lc", "sleep 10"]) ) expect_hyperbrowser_error( "process wait timeout", @@ -124,7 +126,7 @@ def test_sandbox_process_e2e(): assert result.status in {"exited", "failed", "killed", "timed_out"} kill_process = sandbox.processes.start( - {"command": "bash", "args": ["-lc", "sleep 30"]} + SandboxExecParams(command="bash", args=["-lc", "sleep 30"]) ) result = kill_process.kill() assert result.status not in {"queued", "running"} diff --git a/tests/sandbox/e2e/test_terminal_smoke.py b/tests/sandbox/e2e/test_terminal_smoke.py index 34f681e7..02e8a57e 100644 --- a/tests/sandbox/e2e/test_terminal_smoke.py +++ b/tests/sandbox/e2e/test_terminal_smoke.py @@ -1,5 +1,7 @@ import time +from hyperbrowser.models import SandboxTerminalCreateParams + from tests.helpers.config import create_client from tests.helpers.errors import expect_hyperbrowser_error from tests.helpers.sandbox import ( @@ -30,12 +32,14 @@ def _terminal_status_output(status) -> str: def _terminal_status_raw_output(status) -> str: - return b"".join(chunk.raw for chunk in ((status.output if status else None) or [])).decode( - "utf-8" - ) + return b"".join( + chunk.raw for chunk in ((status.output if status else None) or []) + ).decode("utf-8") -def _wait_for_terminal_status_output(read_status, marker: str, timeout_seconds: float = 5.0): +def _wait_for_terminal_status_output( + read_status, marker: str, timeout_seconds: float = 5.0 +): deadline = time.monotonic() + timeout_seconds last_status = None @@ -61,12 +65,12 @@ def test_sandbox_terminal_e2e(): assert sandbox.pty is sandbox.terminal terminal = sandbox.terminal.create( - { - "command": "bash", - "args": ["-l"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-l"], + rows=24, + cols=80, + ) ) fetched = sandbox.terminal.get(terminal.id) assert fetched.id == terminal.id @@ -89,12 +93,12 @@ def test_sandbox_terminal_e2e(): assert status.exit_code == 0 terminal = sandbox.terminal.create( - { - "command": "bash", - "args": ["-l"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-l"], + rows=24, + cols=80, + ) ) connection = terminal.attach() try: @@ -114,12 +118,12 @@ def test_sandbox_terminal_e2e(): marker = "terminal-get-output" terminal = sandbox.terminal.create( - { - "command": "bash", - "args": ["-lc", f"printf '{marker}' && sleep 1"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}' && sleep 1"], + rows=24, + cols=80, + ) ) without_output = sandbox.terminal.get(terminal.id) assert without_output.current.output is None @@ -136,12 +140,12 @@ def test_sandbox_terminal_e2e(): marker = "terminal-refresh-output" terminal = sandbox.terminal.create( - { - "command": "bash", - "args": ["-lc", f"printf '{marker}' && sleep 1"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}' && sleep 1"], + rows=24, + cols=80, + ) ) without_output = terminal.refresh() assert without_output.current.output is None @@ -158,12 +162,12 @@ def test_sandbox_terminal_e2e(): marker = "terminal-wait-output" terminal = sandbox.terminal.create( - { - "command": "bash", - "args": ["-lc", f"printf '{marker}'"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}'"], + rows=24, + cols=80, + ) ) status = terminal.wait(timeout_ms=2000, include_output=True) assert status.running is False @@ -173,12 +177,12 @@ def test_sandbox_terminal_e2e(): assert status.output timeout_terminal = sandbox.pty.create( - { - "command": "bash", - "args": ["-lc", "sleep 10"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-lc", "sleep 10"], + rows=24, + cols=80, + ) ) expect_hyperbrowser_error( "terminal wait timeout", @@ -194,12 +198,12 @@ def test_sandbox_terminal_e2e(): assert status.running is False kill_terminal = sandbox.pty.create( - { - "command": "bash", - "args": ["-lc", "sleep 30"], - "rows": 24, - "cols": 80, - } + SandboxTerminalCreateParams( + command="bash", + args=["-lc", "sleep 30"], + rows=24, + cols=80, + ) ) status = kill_terminal.kill() assert status.running is False From aa4e786dac19b00bd4861e708eda299a30558078 Mon Sep 17 00:00:00 2001 From: Devin Deng Date: Thu, 12 Mar 2026 07:37:45 +0000 Subject: [PATCH 08/10] fix tests --- tests/sandbox/e2e/test_async_files.py | 272 +++++++++++++------------ tests/sandbox/e2e/test_async_sudo.py | 55 +++--- tests/sandbox/e2e/test_files.py | 273 +++++++++++++------------- tests/sandbox/e2e/test_sudo.py | 55 +++--- 4 files changed, 331 insertions(+), 324 deletions(-) diff --git a/tests/sandbox/e2e/test_async_files.py b/tests/sandbox/e2e/test_async_files.py index 85c08eca..ba2b3103 100644 --- a/tests/sandbox/e2e/test_async_files.py +++ b/tests/sandbox/e2e/test_async_files.py @@ -2,6 +2,8 @@ import pytest +from hyperbrowser.models import SandboxExecParams, SandboxFileWriteEntry + from tests.helpers.config import create_async_client, make_test_name from tests.helpers.errors import expect_hyperbrowser_error_async from tests.helpers.http import fetch_signed_url @@ -16,6 +18,10 @@ def _read_stream_text(stream) -> str: return stream.read().decode("utf-8") +def _bash_exec(command: str) -> SandboxExecParams: + return SandboxExecParams(command="bash", args=["-lc", command]) + + async def _await_future(future: asyncio.Future, timeout: float = 10.0): return await asyncio.wait_for(future, timeout=timeout) @@ -27,20 +33,16 @@ async def _create_parent_symlink_escape_fixture(sandbox, base_dir: str, name: st link_dir = f"{allowed_dir}/evil" escaped_file = f"{link_dir}/secret.txt" setup = await sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - " && ".join( - [ - f'mkdir -p "{allowed_dir}"', - f'mkdir -p "{outside_dir}"', - f'printf "outside secret" > "{outside_file}"', - f'ln -sfn "{outside_dir}" "{link_dir}"', - ] - ), - ], - } + _bash_exec( + " && ".join( + [ + f'mkdir -p "{allowed_dir}"', + f'mkdir -p "{outside_dir}"', + f'printf "outside secret" > "{outside_file}"', + f'ln -sfn "{outside_dir}" "{link_dir}"', + ] + ) + ) ) assert setup.exit_code == 0 return { @@ -59,7 +61,9 @@ async def test_async_sandbox_files_e2e(): base_dir = f"/tmp/{make_test_name('py-async-files')}" try: - sandbox = await client.sandboxes.create(default_sandbox_params("py-async-files")) + sandbox = await client.sandboxes.create( + default_sandbox_params("py-async-files") + ) await wait_for_runtime_ready_async(sandbox) assert await sandbox.files.exists(f"{base_dir}/missing.txt") is False @@ -106,9 +110,7 @@ async def test_async_sandbox_files_e2e(): link = f"{symlink_dir}/link.txt" await sandbox.files.make_dir(symlink_dir) await sandbox.files.write_text(target, "payload") - result = await sandbox.exec( - {"command": "bash", "args": ["-lc", f'ln -sfn "{target}" "{link}"']} - ) + result = await sandbox.exec(_bash_exec(f'ln -sfn "{target}" "{link}"')) assert result.exit_code == 0 link_entry = next( entry @@ -121,53 +123,71 @@ async def test_async_sandbox_files_e2e(): symlink_link = f"{base_dir}/symlink/link.txt" await sandbox.files.write_text(symlink_target, "target") result = await sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'mkdir -p "{base_dir}/symlink" && ln -sfn "{symlink_target}" "{symlink_link}"', - ], - } + _bash_exec( + f'mkdir -p "{base_dir}/symlink" && ln -sfn "{symlink_target}" "{symlink_link}"' + ) ) assert result.exit_code == 0 - assert (await sandbox.files.get_info(symlink_link)).symlink_target == symlink_target + assert ( + await sandbox.files.get_info(symlink_link) + ).symlink_target == symlink_target broken_target = f"{base_dir}/symlink-broken/missing-target.txt" broken_link = f"{base_dir}/symlink-broken/link.txt" result = await sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'mkdir -p "{base_dir}/symlink-broken" && ln -sfn "{broken_target}" "{broken_link}"', - ], - } + _bash_exec( + f'mkdir -p "{base_dir}/symlink-broken" && ln -sfn "{broken_target}" "{broken_link}"' + ) ) assert result.exit_code == 0 assert await sandbox.files.exists(broken_link) is True - assert (await sandbox.files.get_info(broken_link)).symlink_target == broken_target + assert ( + await sandbox.files.get_info(broken_link) + ).symlink_target == broken_target read_path = f"{base_dir}/read/readme.txt" await sandbox.files.write_text(read_path, "hello from sdk files") assert await sandbox.files.read(read_path) == "hello from sdk files" - assert await sandbox.files.read(read_path, format="text", offset=6, length=4) == "from" - assert await sandbox.files.read(read_path, format="bytes") == b"hello from sdk files" - assert await sandbox.files.read(read_path, format="blob") == b"hello from sdk files" - assert _read_stream_text(await sandbox.files.read(read_path, format="stream")) == "hello from sdk files" + assert ( + await sandbox.files.read(read_path, format="text", offset=6, length=4) + == "from" + ) + assert ( + await sandbox.files.read(read_path, format="bytes") + == b"hello from sdk files" + ) + assert ( + await sandbox.files.read(read_path, format="blob") + == b"hello from sdk files" + ) + assert ( + _read_stream_text(await sandbox.files.read(read_path, format="stream")) + == "hello from sdk files" + ) - single = await sandbox.files.write(f"{base_dir}/write/single.txt", "single file") + single = await sandbox.files.write( + f"{base_dir}/write/single.txt", "single file" + ) assert single.name == "single.txt" assert single.path == f"{base_dir}/write/single.txt" assert await sandbox.files.read_text(single.path) == "single file" batch = await sandbox.files.write( [ - {"path": f"{base_dir}/write/batch-a.txt", "data": "batch-a"}, - {"path": f"{base_dir}/write/batch-b.bin", "data": bytes([1, 2, 3, 4])}, + SandboxFileWriteEntry( + path=f"{base_dir}/write/batch-a.txt", + data="batch-a", + ), + SandboxFileWriteEntry( + path=f"{base_dir}/write/batch-b.bin", + data=bytes([1, 2, 3, 4]), + ), ] ) assert [entry.name for entry in batch] == ["batch-a.txt", "batch-b.bin"] - assert await sandbox.files.read_text(f"{base_dir}/write/batch-a.txt") == "batch-a" + assert ( + await sandbox.files.read_text(f"{base_dir}/write/batch-a.txt") == "batch-a" + ) assert await sandbox.files.read_bytes(f"{base_dir}/write/batch-b.bin") == bytes( [1, 2, 3, 4] ) @@ -186,7 +206,9 @@ async def test_async_sandbox_files_e2e(): transfer_path = f"{base_dir}/transfer/upload.txt" uploaded = await sandbox.files.upload(transfer_path, "uploaded from sdk") assert uploaded.bytes_written > 0 - assert (await sandbox.files.download(transfer_path)).decode("utf-8") == "uploaded from sdk" + assert (await sandbox.files.download(transfer_path)).decode( + "utf-8" + ) == "uploaded from sdk" file_path = f"{base_dir}/rename/hello.txt" renamed_path = f"{base_dir}/rename/hello-renamed.txt" @@ -200,37 +222,37 @@ async def test_async_sandbox_files_e2e(): copied_link_path = f"{base_dir}/rename/hello-link-copy.txt" renamed_link_path = f"{base_dir}/rename/hello-link-renamed.txt" result = await sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'ln -sfn "{renamed_path}" "{link_path}"'], - } + _bash_exec(f'ln -sfn "{renamed_path}" "{link_path}"') ) assert result.exit_code == 0 - copied_link = await sandbox.files.copy(source=link_path, destination=copied_link_path) + copied_link = await sandbox.files.copy( + source=link_path, destination=copied_link_path + ) assert copied_link.path == copied_link_path - assert (await sandbox.files.get_info(copied_link_path)).symlink_target == renamed_path + assert ( + await sandbox.files.get_info(copied_link_path) + ).symlink_target == renamed_path renamed_link = await sandbox.files.rename(copied_link_path, renamed_link_path) assert renamed_link.path == renamed_link_path - assert (await sandbox.files.get_info(renamed_link_path)).symlink_target == renamed_path + assert ( + await sandbox.files.get_info(renamed_link_path) + ).symlink_target == renamed_path target_dir = f"{base_dir}/rename-dir/target-dir" link_dir = f"{base_dir}/rename-dir/link-dir" renamed_link_dir = f"{base_dir}/rename-dir/link-dir-renamed" await sandbox.files.make_dir(target_dir) await sandbox.files.write_text(f"{target_dir}/child.txt", "child") - result = await sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'ln -sfn "{target_dir}" "{link_dir}"'], - } - ) + result = await sandbox.exec(_bash_exec(f'ln -sfn "{target_dir}" "{link_dir}"')) assert result.exit_code == 0 renamed = await sandbox.files.rename(link_dir, renamed_link_dir) assert renamed.path == renamed_link_dir - assert (await sandbox.files.get_info(renamed_link_dir)).symlink_target == target_dir - assert [entry.path for entry in await sandbox.files.list(renamed_link_dir, depth=1)] == [ - f"{target_dir}/child.txt" - ] + assert ( + await sandbox.files.get_info(renamed_link_dir) + ).symlink_target == target_dir + assert [ + entry.path for entry in await sandbox.files.list(renamed_link_dir, depth=1) + ] == [f"{target_dir}/child.txt"] source_dir = f"{base_dir}/copy-tree/source" nested_dir = f"{source_dir}/nested" @@ -239,50 +261,49 @@ async def test_async_sandbox_files_e2e(): await sandbox.files.make_dir(nested_dir) await sandbox.files.write_text(nested_target, "payload") result = await sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'cd "{nested_dir}" && ln -sfn "target.txt" "link.txt"'], - } + _bash_exec(f'cd "{nested_dir}" && ln -sfn "target.txt" "link.txt"') ) assert result.exit_code == 0 - await sandbox.files.copy(source=source_dir, destination=destination_dir, recursive=True) + await sandbox.files.copy( + source=source_dir, destination=destination_dir, recursive=True + ) copied_target = f"{destination_dir}/nested/target.txt" copied_link = f"{destination_dir}/nested/link.txt" assert await sandbox.files.read_text(copied_target) == "payload" - assert (await sandbox.files.get_info(copied_link)).symlink_target == copied_target + assert ( + await sandbox.files.get_info(copied_link) + ).symlink_target == copied_target loop_dir = f"{base_dir}/loop-list" loop_nested_dir = f"{loop_dir}/nested" await sandbox.files.make_dir(loop_nested_dir) await sandbox.files.write_text(f"{loop_nested_dir}/child.txt", "payload") result = await sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'cd "{loop_nested_dir}" && ln -sfn .. loop'], - } + _bash_exec(f'cd "{loop_nested_dir}" && ln -sfn .. loop') ) assert result.exit_code == 0 loop_entries = await sandbox.files.list(loop_dir, depth=4) loop_paths = [entry.path for entry in loop_entries] assert f"{loop_nested_dir}/loop" in loop_paths assert not any("/loop/" in path for path in loop_paths) - assert (await sandbox.files.get_info(f"{loop_nested_dir}/loop")).symlink_target == loop_dir + assert ( + await sandbox.files.get_info(f"{loop_nested_dir}/loop") + ).symlink_target == loop_dir source_dir = f"{base_dir}/loop-copy/source" nested_dir = f"{source_dir}/nested" await sandbox.files.make_dir(nested_dir) await sandbox.files.write_text(f"{nested_dir}/child.txt", "payload") - result = await sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'cd "{nested_dir}" && ln -sfn .. loop'], - } - ) + result = await sandbox.exec(_bash_exec(f'cd "{nested_dir}" && ln -sfn .. loop')) assert result.exit_code == 0 destination_dir = f"{base_dir}/loop-copy/destination" - await sandbox.files.copy(source=source_dir, destination=destination_dir, recursive=True) + await sandbox.files.copy( + source=source_dir, destination=destination_dir, recursive=True + ) copied_loop = f"{destination_dir}/nested/loop" - assert (await sandbox.files.get_info(copied_loop)).symlink_target == destination_dir + assert ( + await sandbox.files.get_info(copied_loop) + ).symlink_target == destination_dir assert not any( "/loop/" in entry.path for entry in await sandbox.files.list(destination_dir, depth=4) @@ -294,16 +315,14 @@ async def test_async_sandbox_files_e2e(): await sandbox.files.write_text(source, "source payload") await sandbox.files.write_text(existing_target, "existing target") result = await sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'mkdir -p "{base_dir}/copy-overwrite" && ln -sfn "{existing_target}" "{destination_link}"', - ], - } + _bash_exec( + f'mkdir -p "{base_dir}/copy-overwrite" && ln -sfn "{existing_target}" "{destination_link}"' + ) ) assert result.exit_code == 0 - await sandbox.files.copy(source=source, destination=destination_link, overwrite=True) + await sandbox.files.copy( + source=source, destination=destination_link, overwrite=True + ) assert await sandbox.files.read_text(destination_link) == "source payload" assert await sandbox.files.read_text(existing_target) == "existing target" assert (await sandbox.files.get_info(destination_link)).symlink_target is None @@ -338,10 +357,9 @@ async def test_async_sandbox_files_e2e(): link = f"{base_dir}/remove-link/link.txt" await sandbox.files.write_text(target, "keep me") result = await sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'mkdir -p "{base_dir}/remove-link" && ln -sfn "{target}" "{link}"'], - } + _bash_exec( + f'mkdir -p "{base_dir}/remove-link" && ln -sfn "{target}" "{link}"' + ) ) assert result.exit_code == 0 await sandbox.files.remove(link) @@ -354,13 +372,9 @@ async def test_async_sandbox_files_e2e(): await sandbox.files.make_dir(target_dir) await sandbox.files.write_text(target_file, "keep tree") result = await sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'mkdir -p "{base_dir}/remove-recursive" && ln -sfn "{target_dir}" "{link_dir}"', - ], - } + _bash_exec( + f'mkdir -p "{base_dir}/remove-recursive" && ln -sfn "{target_dir}" "{link_dir}"' + ) ) assert result.exit_code == 0 await sandbox.files.remove(link_dir, recursive=True) @@ -369,10 +383,7 @@ async def test_async_sandbox_files_e2e(): link = f"{base_dir}/escape/file-link" result = await sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'mkdir -p "{base_dir}/escape" && ln -sfn /etc/hosts "{link}"'], - } + _bash_exec(f'mkdir -p "{base_dir}/escape" && ln -sfn /etc/hosts "{link}"') ) assert result.exit_code == 0 text = await sandbox.files.read_text(link) @@ -382,11 +393,16 @@ async def test_async_sandbox_files_e2e(): fixture = await _create_parent_symlink_escape_fixture( sandbox, base_dir, "parent-escape-read" ) - assert await sandbox.files.read_text(fixture["escaped_file"]) == "outside secret" - assert (await sandbox.files.download(fixture["escaped_file"])).decode("utf-8") == "outside secret" - assert [entry.path for entry in await sandbox.files.list(fixture["link_dir"], depth=1)] == [ - f'{fixture["outside_dir"]}/secret.txt' - ] + assert ( + await sandbox.files.read_text(fixture["escaped_file"]) == "outside secret" + ) + assert (await sandbox.files.download(fixture["escaped_file"])).decode( + "utf-8" + ) == "outside secret" + assert [ + entry.path + for entry in await sandbox.files.list(fixture["link_dir"], depth=1) + ] == [f"{fixture['outside_dir']}/secret.txt"] seen = asyncio.get_running_loop().create_future() async def on_parent_event(event): @@ -396,7 +412,7 @@ async def on_parent_event(event): handle = await sandbox.files.watch_dir(fixture["link_dir"], on_parent_event) try: await sandbox.files.write_text( - f'{fixture["outside_dir"]}/fresh.txt', "watch parent link" + f"{fixture['outside_dir']}/fresh.txt", "watch parent link" ) assert await _await_future(seen) == "fresh.txt" finally: @@ -424,13 +440,9 @@ async def on_parent_event(event): await sandbox.files.write_text(fixture["escaped_file"], "remove me") await sandbox.files.remove(fixture["escaped_file"]) outside_read = await sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'if [ -e "{fixture["outside_file"]}" ]; then cat "{fixture["outside_file"]}"; else printf "__MISSING__"; fi', - ], - } + _bash_exec( + f'if [ -e "{fixture["outside_file"]}" ]; then cat "{fixture["outside_file"]}"; else printf "__MISSING__"; fi' + ) ) assert outside_read.exit_code == 0 assert outside_read.stdout.strip() == "__MISSING__" @@ -439,16 +451,14 @@ async def on_parent_event(event): target_file = f"{target_dir}/child.txt" link = f"{base_dir}/escape/dir-link" result = await sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'mkdir -p "{base_dir}/escape" "{target_dir}" && printf "child" > "{target_file}" && ln -sfn "{target_dir}" "{link}"', - ], - } + _bash_exec( + f'mkdir -p "{base_dir}/escape" "{target_dir}" && printf "child" > "{target_file}" && ln -sfn "{target_dir}" "{link}"' + ) ) assert result.exit_code == 0 - assert [entry.path for entry in await sandbox.files.list(link, depth=1)] == [target_file] + assert [entry.path for entry in await sandbox.files.list(link, depth=1)] == [ + target_file + ] seen = asyncio.get_running_loop().create_future() async def on_link_event(event): @@ -457,7 +467,9 @@ async def on_link_event(event): handle = await sandbox.files.watch_dir(link, on_link_event) try: - await sandbox.files.write_text(f"{target_dir}/file.txt", "watch through link") + await sandbox.files.write_text( + f"{target_dir}/file.txt", "watch through link" + ) assert await _await_future(seen) == "file.txt" finally: await handle.stop() @@ -468,7 +480,11 @@ async def on_link_event(event): recursive_future = asyncio.get_running_loop().create_future() async def on_direct(event): - if event.type == "write" and event.name == "direct.txt" and not direct_future.done(): + if ( + event.type == "write" + and event.name == "direct.txt" + and not direct_future.done() + ): direct_future.set_result(event.name) async def on_recursive(event): @@ -498,7 +514,9 @@ async def on_recursive(event): await expect_hyperbrowser_error_async( "watch missing directory", - lambda: sandbox.files.watch_dir(f"{base_dir}/watch-missing", lambda event: None), + lambda: sandbox.files.watch_dir( + f"{base_dir}/watch-missing", lambda event: None + ), status_code=404, service="runtime", retryable=False, @@ -584,7 +602,7 @@ async def on_recursive(event): assert len(rejected) == 1 await expect_hyperbrowser_error_async( "rename race failure", - lambda: (_async_raise(rejected[0])), + lambda: _async_raise(rejected[0]), status_code=404, service="runtime", retryable=False, diff --git a/tests/sandbox/e2e/test_async_sudo.py b/tests/sandbox/e2e/test_async_sudo.py index fa2e92e2..96b845f4 100644 --- a/tests/sandbox/e2e/test_async_sudo.py +++ b/tests/sandbox/e2e/test_async_sudo.py @@ -1,5 +1,7 @@ import pytest +from hyperbrowser.models import SandboxExecParams + from tests.helpers.config import create_async_client from tests.helpers.sandbox import ( default_sandbox_params, @@ -8,6 +10,10 @@ ) +def _bash_exec(command: str) -> SandboxExecParams: + return SandboxExecParams(command="bash", args=["-lc", command]) + + @pytest.mark.anyio async def test_async_sandbox_sudo_e2e(): client = create_async_client() @@ -19,47 +25,34 @@ async def test_async_sandbox_sudo_e2e(): path = "/tmp/sdk-sudo-check.txt" - runtime_user = await sandbox.exec( - { - "command": "bash", - "args": ["-lc", "whoami && id -u && id -g"], - } - ) + runtime_user = await sandbox.exec(_bash_exec("whoami && id -u && id -g")) assert runtime_user.exit_code == 0 assert "ubuntu" in runtime_user.stdout assert "1000" in runtime_user.stdout direct_chown = await sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - " && ".join( - [ - f'printf "sudo-check" > "{path}"', - f'chown root:root "{path}"', - ] - ), - ], - } + _bash_exec( + " && ".join( + [ + f'printf "sudo-check" > "{path}"', + f'chown root:root "{path}"', + ] + ) + ) ) assert direct_chown.exit_code != 0 assert "operation not permitted" in direct_chown.stderr.lower() sudo_result = await sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - " && ".join( - [ - "sudo -n whoami", - f'sudo -n chown root:root "{path}"', - f"stat -c '%U:%G' \"{path}\"", - ] - ), - ], - } + _bash_exec( + " && ".join( + [ + "sudo -n whoami", + f'sudo -n chown root:root "{path}"', + f"stat -c '%U:%G' \"{path}\"", + ] + ) + ) ) assert sudo_result.exit_code == 0 assert "root" in sudo_result.stdout diff --git a/tests/sandbox/e2e/test_files.py b/tests/sandbox/e2e/test_files.py index 36585f86..14962635 100644 --- a/tests/sandbox/e2e/test_files.py +++ b/tests/sandbox/e2e/test_files.py @@ -1,6 +1,8 @@ from concurrent.futures import ThreadPoolExecutor from queue import Empty, Queue +from hyperbrowser.models import SandboxExecParams, SandboxFileWriteEntry + from tests.helpers.config import create_client, make_test_name from tests.helpers.errors import expect_hyperbrowser_error from tests.helpers.http import fetch_signed_url @@ -13,6 +15,10 @@ client = create_client() +def _bash_exec(command: str) -> SandboxExecParams: + return SandboxExecParams(command="bash", args=["-lc", command]) + + def _read_stream_text(stream) -> str: return stream.read().decode("utf-8") @@ -31,20 +37,16 @@ def _create_parent_symlink_escape_fixture(sandbox, base_dir: str, name: str): link_dir = f"{allowed_dir}/evil" escaped_file = f"{link_dir}/secret.txt" setup = sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - " && ".join( - [ - f'mkdir -p "{allowed_dir}"', - f'mkdir -p "{outside_dir}"', - f'printf "outside secret" > "{outside_file}"', - f'ln -sfn "{outside_dir}" "{link_dir}"', - ] - ), - ], - } + _bash_exec( + " && ".join( + [ + f'mkdir -p "{allowed_dir}"', + f'mkdir -p "{outside_dir}"', + f'printf "outside secret" > "{outside_file}"', + f'ln -sfn "{outside_dir}" "{link_dir}"', + ] + ) + ) ) assert setup.exit_code == 0 return { @@ -87,7 +89,9 @@ def test_sandbox_files_e2e(): sandbox.files.make_dir(f"{list_dir}/nested/inner", parents=True) sandbox.files.write_text(f"{list_dir}/root.txt", "root") sandbox.files.write_text(f"{list_dir}/nested/child.txt", "child") - sandbox.files.write_text(f"{list_dir}/nested/inner/grandchild.txt", "grandchild") + sandbox.files.write_text( + f"{list_dir}/nested/inner/grandchild.txt", "grandchild" + ) depth_one = sandbox.files.list(list_dir, depth=1) assert [entry.name for entry in depth_one] == ["nested", "root.txt"] @@ -106,12 +110,12 @@ def test_sandbox_files_e2e(): link = f"{symlink_dir}/link.txt" sandbox.files.make_dir(symlink_dir) sandbox.files.write_text(target, "payload") - result = sandbox.exec( - {"command": "bash", "args": ["-lc", f'ln -sfn "{target}" "{link}"']} - ) + result = sandbox.exec(_bash_exec(f'ln -sfn "{target}" "{link}"')) assert result.exit_code == 0 link_entry = next( - entry for entry in sandbox.files.list(symlink_dir, depth=1) if entry.path == link + entry + for entry in sandbox.files.list(symlink_dir, depth=1) + if entry.path == link ) assert link_entry.symlink_target == target @@ -119,13 +123,9 @@ def test_sandbox_files_e2e(): symlink_link = f"{base_dir}/symlink/link.txt" sandbox.files.write_text(symlink_target, "target") result = sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'mkdir -p "{base_dir}/symlink" && ln -sfn "{symlink_target}" "{symlink_link}"', - ], - } + _bash_exec( + f'mkdir -p "{base_dir}/symlink" && ln -sfn "{symlink_target}" "{symlink_link}"' + ) ) assert result.exit_code == 0 assert sandbox.files.get_info(symlink_link).symlink_target == symlink_target @@ -133,13 +133,9 @@ def test_sandbox_files_e2e(): broken_target = f"{base_dir}/symlink-broken/missing-target.txt" broken_link = f"{base_dir}/symlink-broken/link.txt" result = sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'mkdir -p "{base_dir}/symlink-broken" && ln -sfn "{broken_target}" "{broken_link}"', - ], - } + _bash_exec( + f'mkdir -p "{base_dir}/symlink-broken" && ln -sfn "{broken_target}" "{broken_link}"' + ) ) assert result.exit_code == 0 assert sandbox.files.exists(broken_link) is True @@ -148,10 +144,15 @@ def test_sandbox_files_e2e(): read_path = f"{base_dir}/read/readme.txt" sandbox.files.write_text(read_path, "hello from sdk files") assert sandbox.files.read(read_path) == "hello from sdk files" - assert sandbox.files.read(read_path, format="text", offset=6, length=4) == "from" + assert ( + sandbox.files.read(read_path, format="text", offset=6, length=4) == "from" + ) assert sandbox.files.read(read_path, format="bytes") == b"hello from sdk files" assert sandbox.files.read(read_path, format="blob") == b"hello from sdk files" - assert _read_stream_text(sandbox.files.read(read_path, format="stream")) == "hello from sdk files" + assert ( + _read_stream_text(sandbox.files.read(read_path, format="stream")) + == "hello from sdk files" + ) single = sandbox.files.write(f"{base_dir}/write/single.txt", "single file") assert single.name == "single.txt" @@ -160,13 +161,21 @@ def test_sandbox_files_e2e(): batch = sandbox.files.write( [ - {"path": f"{base_dir}/write/batch-a.txt", "data": "batch-a"}, - {"path": f"{base_dir}/write/batch-b.bin", "data": bytes([1, 2, 3, 4])}, + SandboxFileWriteEntry( + path=f"{base_dir}/write/batch-a.txt", + data="batch-a", + ), + SandboxFileWriteEntry( + path=f"{base_dir}/write/batch-b.bin", + data=bytes([1, 2, 3, 4]), + ), ] ) assert [entry.name for entry in batch] == ["batch-a.txt", "batch-b.bin"] assert sandbox.files.read_text(f"{base_dir}/write/batch-a.txt") == "batch-a" - assert sandbox.files.read_bytes(f"{base_dir}/write/batch-b.bin") == bytes([1, 2, 3, 4]) + assert sandbox.files.read_bytes(f"{base_dir}/write/batch-b.bin") == bytes( + [1, 2, 3, 4] + ) text_path = f"{base_dir}/write-options/text.txt" sandbox.files.write_text(text_path, "hello", mode="0640") @@ -182,7 +191,9 @@ def test_sandbox_files_e2e(): transfer_path = f"{base_dir}/transfer/upload.txt" uploaded = sandbox.files.upload(transfer_path, "uploaded from sdk") assert uploaded.bytes_written > 0 - assert sandbox.files.download(transfer_path).decode("utf-8") == "uploaded from sdk" + assert ( + sandbox.files.download(transfer_path).decode("utf-8") == "uploaded from sdk" + ) file_path = f"{base_dir}/rename/hello.txt" renamed_path = f"{base_dir}/rename/hello-renamed.txt" @@ -195,12 +206,7 @@ def test_sandbox_files_e2e(): link_path = f"{base_dir}/rename/hello-link.txt" copied_link_path = f"{base_dir}/rename/hello-link-copy.txt" renamed_link_path = f"{base_dir}/rename/hello-link-renamed.txt" - result = sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'ln -sfn "{renamed_path}" "{link_path}"'], - } - ) + result = sandbox.exec(_bash_exec(f'ln -sfn "{renamed_path}" "{link_path}"')) assert result.exit_code == 0 copied_link = sandbox.files.copy(source=link_path, destination=copied_link_path) assert copied_link.path == copied_link_path @@ -214,35 +220,28 @@ def test_sandbox_files_e2e(): renamed_link_dir = f"{base_dir}/rename-dir/link-dir-renamed" sandbox.files.make_dir(target_dir) sandbox.files.write_text(f"{target_dir}/child.txt", "child") - result = sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'ln -sfn "{target_dir}" "{link_dir}"'], - } - ) + result = sandbox.exec(_bash_exec(f'ln -sfn "{target_dir}" "{link_dir}"')) assert result.exit_code == 0 renamed = sandbox.files.rename(link_dir, renamed_link_dir) assert renamed.path == renamed_link_dir assert sandbox.files.get_info(renamed_link_dir).symlink_target == target_dir - assert [entry.path for entry in sandbox.files.list(renamed_link_dir, depth=1)] == [ - f"{target_dir}/child.txt" - ] + assert [ + entry.path for entry in sandbox.files.list(renamed_link_dir, depth=1) + ] == [f"{target_dir}/child.txt"] source_dir = f"{base_dir}/copy-tree/source" nested_dir = f"{source_dir}/nested" nested_target = f"{nested_dir}/target.txt" - nested_link = f"{nested_dir}/link.txt" destination_dir = f"{base_dir}/copy-tree/destination" sandbox.files.make_dir(nested_dir) sandbox.files.write_text(nested_target, "payload") result = sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'cd "{nested_dir}" && ln -sfn "target.txt" "link.txt"'], - } + _bash_exec(f'cd "{nested_dir}" && ln -sfn "target.txt" "link.txt"') ) assert result.exit_code == 0 - sandbox.files.copy(source=source_dir, destination=destination_dir, recursive=True) + sandbox.files.copy( + source=source_dir, destination=destination_dir, recursive=True + ) copied_target = f"{destination_dir}/nested/target.txt" copied_link = f"{destination_dir}/nested/link.txt" assert sandbox.files.read_text(copied_target) == "payload" @@ -252,35 +251,32 @@ def test_sandbox_files_e2e(): loop_nested_dir = f"{loop_dir}/nested" sandbox.files.make_dir(loop_nested_dir) sandbox.files.write_text(f"{loop_nested_dir}/child.txt", "payload") - result = sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'cd "{loop_nested_dir}" && ln -sfn .. loop'], - } - ) + result = sandbox.exec(_bash_exec(f'cd "{loop_nested_dir}" && ln -sfn .. loop')) assert result.exit_code == 0 loop_entries = sandbox.files.list(loop_dir, depth=4) loop_paths = [entry.path for entry in loop_entries] assert f"{loop_nested_dir}/loop" in loop_paths assert not any("/loop/" in path for path in loop_paths) - assert sandbox.files.get_info(f"{loop_nested_dir}/loop").symlink_target == loop_dir + assert ( + sandbox.files.get_info(f"{loop_nested_dir}/loop").symlink_target == loop_dir + ) source_dir = f"{base_dir}/loop-copy/source" nested_dir = f"{source_dir}/nested" sandbox.files.make_dir(nested_dir) sandbox.files.write_text(f"{nested_dir}/child.txt", "payload") - result = sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'cd "{nested_dir}" && ln -sfn .. loop'], - } - ) + result = sandbox.exec(_bash_exec(f'cd "{nested_dir}" && ln -sfn .. loop')) assert result.exit_code == 0 destination_dir = f"{base_dir}/loop-copy/destination" - sandbox.files.copy(source=source_dir, destination=destination_dir, recursive=True) + sandbox.files.copy( + source=source_dir, destination=destination_dir, recursive=True + ) copied_loop = f"{destination_dir}/nested/loop" assert sandbox.files.get_info(copied_loop).symlink_target == destination_dir - assert not any("/loop/" in entry.path for entry in sandbox.files.list(destination_dir, depth=4)) + assert not any( + "/loop/" in entry.path + for entry in sandbox.files.list(destination_dir, depth=4) + ) source = f"{base_dir}/copy-overwrite/source.txt" existing_target = f"{base_dir}/copy-overwrite/existing-target.txt" @@ -288,13 +284,9 @@ def test_sandbox_files_e2e(): sandbox.files.write_text(source, "source payload") sandbox.files.write_text(existing_target, "existing target") result = sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'mkdir -p "{base_dir}/copy-overwrite" && ln -sfn "{existing_target}" "{destination_link}"', - ], - } + _bash_exec( + f'mkdir -p "{base_dir}/copy-overwrite" && ln -sfn "{existing_target}" "{destination_link}"' + ) ) assert result.exit_code == 0 sandbox.files.copy(source=source, destination=destination_link, overwrite=True) @@ -332,10 +324,9 @@ def test_sandbox_files_e2e(): link = f"{base_dir}/remove-link/link.txt" sandbox.files.write_text(target, "keep me") result = sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'mkdir -p "{base_dir}/remove-link" && ln -sfn "{target}" "{link}"'], - } + _bash_exec( + f'mkdir -p "{base_dir}/remove-link" && ln -sfn "{target}" "{link}"' + ) ) assert result.exit_code == 0 sandbox.files.remove(link) @@ -348,13 +339,9 @@ def test_sandbox_files_e2e(): sandbox.files.make_dir(target_dir) sandbox.files.write_text(target_file, "keep tree") result = sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'mkdir -p "{base_dir}/remove-recursive" && ln -sfn "{target_dir}" "{link_dir}"', - ], - } + _bash_exec( + f'mkdir -p "{base_dir}/remove-recursive" && ln -sfn "{target_dir}" "{link_dir}"' + ) ) assert result.exit_code == 0 sandbox.files.remove(link_dir, recursive=True) @@ -363,36 +350,44 @@ def test_sandbox_files_e2e(): link = f"{base_dir}/escape/file-link" result = sandbox.exec( - { - "command": "bash", - "args": ["-lc", f'mkdir -p "{base_dir}/escape" && ln -sfn /etc/hosts "{link}"'], - } + _bash_exec(f'mkdir -p "{base_dir}/escape" && ln -sfn /etc/hosts "{link}"') ) assert result.exit_code == 0 text = sandbox.files.read_text(link) assert "localhost" in text assert "localhost" in sandbox.files.download(link).decode("utf-8") - fixture = _create_parent_symlink_escape_fixture(sandbox, base_dir, "parent-escape-read") + fixture = _create_parent_symlink_escape_fixture( + sandbox, base_dir, "parent-escape-read" + ) assert sandbox.files.read_text(fixture["escaped_file"]) == "outside secret" - assert sandbox.files.download(fixture["escaped_file"]).decode("utf-8") == "outside secret" - assert [entry.path for entry in sandbox.files.list(fixture["link_dir"], depth=1)] == [ - f'{fixture["outside_dir"]}/secret.txt' - ] + assert ( + sandbox.files.download(fixture["escaped_file"]).decode("utf-8") + == "outside secret" + ) + assert [ + entry.path for entry in sandbox.files.list(fixture["link_dir"], depth=1) + ] == [f"{fixture['outside_dir']}/secret.txt"] seen = Queue(maxsize=1) handle = sandbox.files.watch_dir( fixture["link_dir"], - lambda event: seen.put_nowait(event.name) - if event.type == "write" and event.name == "fresh.txt" - else None, + lambda event: ( + seen.put_nowait(event.name) + if event.type == "write" and event.name == "fresh.txt" + else None + ), ) try: - sandbox.files.write_text(f'{fixture["outside_dir"]}/fresh.txt', "watch parent link") + sandbox.files.write_text( + f"{fixture['outside_dir']}/fresh.txt", "watch parent link" + ) assert _await_queue_value(seen) == "fresh.txt" finally: handle.stop() - fixture = _create_parent_symlink_escape_fixture(sandbox, base_dir, "parent-escape-mutate") + fixture = _create_parent_symlink_escape_fixture( + sandbox, base_dir, "parent-escape-mutate" + ) info = sandbox.files.get_info(fixture["escaped_file"]) assert info.type == "file" assert info.size == len("outside secret") @@ -412,13 +407,9 @@ def test_sandbox_files_e2e(): sandbox.files.write_text(fixture["escaped_file"], "remove me") sandbox.files.remove(fixture["escaped_file"]) outside_read = sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'if [ -e "{fixture["outside_file"]}" ]; then cat "{fixture["outside_file"]}"; else printf "__MISSING__"; fi', - ], - } + _bash_exec( + f'if [ -e "{fixture["outside_file"]}" ]; then cat "{fixture["outside_file"]}"; else printf "__MISSING__"; fi' + ) ) assert outside_read.exit_code == 0 assert outside_read.stdout.strip() == "__MISSING__" @@ -427,22 +418,22 @@ def test_sandbox_files_e2e(): target_file = f"{target_dir}/child.txt" link = f"{base_dir}/escape/dir-link" result = sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - f'mkdir -p "{base_dir}/escape" "{target_dir}" && printf "child" > "{target_file}" && ln -sfn "{target_dir}" "{link}"', - ], - } + _bash_exec( + f'mkdir -p "{base_dir}/escape" "{target_dir}" && printf "child" > "{target_file}" && ln -sfn "{target_dir}" "{link}"' + ) ) assert result.exit_code == 0 - assert [entry.path for entry in sandbox.files.list(link, depth=1)] == [target_file] + assert [entry.path for entry in sandbox.files.list(link, depth=1)] == [ + target_file + ] seen = Queue(maxsize=1) handle = sandbox.files.watch_dir( link, - lambda event: seen.put_nowait(event.name) - if event.type == "write" and event.name == "file.txt" - else None, + lambda event: ( + seen.put_nowait(event.name) + if event.type == "write" and event.name == "file.txt" + else None + ), ) try: sandbox.files.write_text(f"{target_dir}/file.txt", "watch through link") @@ -456,20 +447,26 @@ def test_sandbox_files_e2e(): recursive_event = Queue(maxsize=1) direct_handle = sandbox.files.watch_dir( watch_dir, - lambda event: direct_event.put_nowait(event.name) - if event.type == "write" and event.name == "direct.txt" - else None, + lambda event: ( + direct_event.put_nowait(event.name) + if event.type == "write" and event.name == "direct.txt" + else None + ), ) recursive_handle = sandbox.files.watch_dir( watch_dir, - lambda event: recursive_event.put_nowait(event.name) - if event.type == "write" and event.name == "nested/recursive.txt" - else None, + lambda event: ( + recursive_event.put_nowait(event.name) + if event.type == "write" and event.name == "nested/recursive.txt" + else None + ), recursive=True, ) try: sandbox.files.write_text(f"{watch_dir}/direct.txt", "watch me") - sandbox.files.write_text(f"{watch_dir}/nested/recursive.txt", "watch me too") + sandbox.files.write_text( + f"{watch_dir}/nested/recursive.txt", "watch me too" + ) assert _await_queue_value(direct_event) == "direct.txt" assert _await_queue_value(recursive_event) == "nested/recursive.txt" finally: @@ -478,7 +475,9 @@ def test_sandbox_files_e2e(): expect_hyperbrowser_error( "watch missing directory", - lambda: sandbox.files.watch_dir(f"{base_dir}/watch-missing", lambda event: None), + lambda: sandbox.files.watch_dir( + f"{base_dir}/watch-missing", lambda event: None + ), status_code=404, service="runtime", retryable=False, @@ -539,8 +538,12 @@ def test_sandbox_files_e2e(): sandbox.files.write_text(path, "download once") download = sandbox.files.download_url(path, one_time=True) with ThreadPoolExecutor(max_workers=2) as executor: - first_future = executor.submit(fetch_signed_url, download.url, method=download.method) - second_future = executor.submit(fetch_signed_url, download.url, method=download.method) + first_future = executor.submit( + fetch_signed_url, download.url, method=download.method + ) + second_future = executor.submit( + fetch_signed_url, download.url, method=download.method + ) first = first_future.result() second = second_future.result() assert sorted([first.status_code, second.status_code]) == [200, 401] diff --git a/tests/sandbox/e2e/test_sudo.py b/tests/sandbox/e2e/test_sudo.py index 9ba02369..0d48f3ba 100644 --- a/tests/sandbox/e2e/test_sudo.py +++ b/tests/sandbox/e2e/test_sudo.py @@ -1,3 +1,5 @@ +from hyperbrowser.models import SandboxExecParams + from tests.helpers.config import create_client from tests.helpers.sandbox import ( default_sandbox_params, @@ -8,6 +10,10 @@ client = create_client() +def _bash_exec(command: str) -> SandboxExecParams: + return SandboxExecParams(command="bash", args=["-lc", command]) + + def test_sandbox_sudo_e2e(): sandbox = None @@ -17,47 +23,34 @@ def test_sandbox_sudo_e2e(): path = "/tmp/sdk-sudo-check.txt" - runtime_user = sandbox.exec( - { - "command": "bash", - "args": ["-lc", "whoami && id -u && id -g"], - } - ) + runtime_user = sandbox.exec(_bash_exec("whoami && id -u && id -g")) assert runtime_user.exit_code == 0 assert "ubuntu" in runtime_user.stdout assert "1000" in runtime_user.stdout direct_chown = sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - " && ".join( - [ - f'printf "sudo-check" > "{path}"', - f'chown root:root "{path}"', - ] - ), - ], - } + _bash_exec( + " && ".join( + [ + f'printf "sudo-check" > "{path}"', + f'chown root:root "{path}"', + ] + ) + ) ) assert direct_chown.exit_code != 0 assert "operation not permitted" in direct_chown.stderr.lower() sudo_result = sandbox.exec( - { - "command": "bash", - "args": [ - "-lc", - " && ".join( - [ - "sudo -n whoami", - f'sudo -n chown root:root "{path}"', - f"stat -c '%U:%G' \"{path}\"", - ] - ), - ], - } + _bash_exec( + " && ".join( + [ + "sudo -n whoami", + f'sudo -n chown root:root "{path}"', + f"stat -c '%U:%G' \"{path}\"", + ] + ) + ) ) assert sudo_result.exit_code == 0 assert "root" in sudo_result.stdout From a0adcd03d2127f5c5150d0917e6ea8b2768ce023 Mon Sep 17 00:00:00 2001 From: Devin Deng Date: Thu, 12 Mar 2026 08:04:58 +0000 Subject: [PATCH 09/10] fix, no more claude --- hyperbrowser/models/sandbox.py | 12 +++---- tests/test_create_sandbox_params.py | 50 ++++++++++++++++++++++++++++- 2 files changed, 53 insertions(+), 9 deletions(-) diff --git a/hyperbrowser/models/sandbox.py b/hyperbrowser/models/sandbox.py index 2e67def4..0cac9e85 100644 --- a/hyperbrowser/models/sandbox.py +++ b/hyperbrowser/models/sandbox.py @@ -210,7 +210,7 @@ class SandboxExecParams(SandboxBaseModel): cwd: Optional[str] = None env: Optional[Dict[str, str]] = None timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") - timeout_sec: Optional[int] = Field(default=None, serialization_alias="timeoutSec") + timeout_sec: Optional[int] = None use_shell: Optional[bool] = Field(default=None, serialization_alias="useShell") @@ -241,12 +241,8 @@ class SandboxProcessListParams(SandboxBaseModel): status: Optional[Union[SandboxProcessStatus, List[SandboxProcessStatus]]] = None limit: Optional[int] = None cursor: Optional[Union[str, int]] = None - created_after: Optional[int] = Field( - default=None, serialization_alias="createdAfter" - ) - created_before: Optional[int] = Field( - default=None, serialization_alias="createdBefore" - ) + created_after: Optional[int] = None + created_before: Optional[int] = None class SandboxProcessListResponse(SandboxBaseModel): @@ -256,7 +252,7 @@ class SandboxProcessListResponse(SandboxBaseModel): class SandboxProcessWaitParams(SandboxBaseModel): timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") - timeout_sec: Optional[int] = Field(default=None, serialization_alias="timeoutSec") + timeout_sec: Optional[int] = None class SandboxProcessStdinParams(SandboxBaseModel): diff --git a/tests/test_create_sandbox_params.py b/tests/test_create_sandbox_params.py index a79d9a10..ffe25a5b 100644 --- a/tests/test_create_sandbox_params.py +++ b/tests/test_create_sandbox_params.py @@ -1,7 +1,12 @@ import pytest from pydantic import ValidationError -from hyperbrowser.models import CreateSandboxParams +from hyperbrowser.models import ( + CreateSandboxParams, + SandboxExecParams, + SandboxProcessListParams, + SandboxProcessWaitParams, +) def test_create_sandbox_params_accepts_image_source(): @@ -50,3 +55,46 @@ def test_create_sandbox_params_rejects_multiple_sources(): def test_create_sandbox_params_requires_snapshot_name_for_snapshot_id(): with pytest.raises(ValidationError, match="snapshot_id requires snapshot_name"): CreateSandboxParams(snapshot_id="snap-id") + + +def test_sandbox_exec_params_serialize_process_timeout_sec_as_snake_case(): + params = SandboxExecParams( + command="echo hi", + timeout_ms=500, + timeout_sec=7, + use_shell=True, + ) + + assert params.model_dump(by_alias=True, exclude_none=True) == { + "command": "echo hi", + "timeoutMs": 500, + "timeout_sec": 7, + "useShell": True, + } + + +def test_sandbox_process_wait_params_serialize_timeout_sec_as_snake_case(): + params = SandboxProcessWaitParams(timeout_ms=250, timeout_sec=3) + + assert params.model_dump(by_alias=True, exclude_none=True) == { + "timeoutMs": 250, + "timeout_sec": 3, + } + + +def test_sandbox_process_list_params_serialize_created_filters_as_snake_case(): + params = SandboxProcessListParams( + status=["running", "exited"], + limit=10, + cursor="cursor-1", + created_after=100, + created_before=200, + ) + + assert params.model_dump(by_alias=True, exclude_none=True) == { + "status": ["running", "exited"], + "limit": 10, + "cursor": "cursor-1", + "created_after": 100, + "created_before": 200, + } From d405aca1d0048d5acc479f0f2745738a3fdde3b6 Mon Sep 17 00:00:00 2001 From: Nikhil Shahi Date: Thu, 12 Mar 2026 01:06:24 -0700 Subject: [PATCH 10/10] formatting --- hyperbrowser/models/agents/cua.py | 4 +-- hyperbrowser/models/consts.py | 5 +--- hyperbrowser/sandbox_common.py | 10 +++---- tests/helpers/errors.py | 48 ++++++++++++++++--------------- tests/helpers/http.py | 8 ++++-- uv.lock | 3 -- 6 files changed, 37 insertions(+), 41 deletions(-) delete mode 100644 uv.lock diff --git a/hyperbrowser/models/agents/cua.py b/hyperbrowser/models/agents/cua.py index f7d4472a..6562e52f 100644 --- a/hyperbrowser/models/agents/cua.py +++ b/hyperbrowser/models/agents/cua.py @@ -30,9 +30,7 @@ class StartCuaTaskParams(BaseModel): ) task: str - llm: Optional[CuaLlm] = Field( - default=None, serialization_alias="llm" - ) + llm: Optional[CuaLlm] = Field(default=None, serialization_alias="llm") session_id: Optional[str] = Field(default=None, serialization_alias="sessionId") max_failures: Optional[int] = Field(default=None, serialization_alias="maxFailures") max_steps: Optional[int] = Field(default=None, serialization_alias="maxSteps") diff --git a/hyperbrowser/models/consts.py b/hyperbrowser/models/consts.py index 85a0f5aa..0388e7ee 100644 --- a/hyperbrowser/models/consts.py +++ b/hyperbrowser/models/consts.py @@ -69,10 +69,7 @@ "claude-sonnet-4-20250514", "claude-3-7-sonnet-20250219", ] -CuaLlm = Literal[ - "computer-use-preview", - "gpt-5.4" -] +CuaLlm = Literal["computer-use-preview", "gpt-5.4"] GeminiComputerUseLlm = Literal["gemini-2.5-computer-use-preview-10-2025",] SessionRegion = Literal[ "us-central", diff --git a/hyperbrowser/sandbox_common.py b/hyperbrowser/sandbox_common.py index a600dd71..e0e20298 100644 --- a/hyperbrowser/sandbox_common.py +++ b/hyperbrowser/sandbox_common.py @@ -45,7 +45,9 @@ def is_retryable_network_error(error: BaseException) -> bool: ) -def parse_error_payload(raw_text: str, fallback_message: str) -> Tuple[str, Optional[str], Any]: +def parse_error_payload( + raw_text: str, fallback_message: str +) -> Tuple[str, Optional[str], Any]: if not raw_text: return fallback_message, None, None @@ -55,11 +57,7 @@ def parse_error_payload(raw_text: str, fallback_message: str) -> Tuple[str, Opti return raw_text, None, raw_text if isinstance(parsed, dict): - message = ( - parsed.get("message") - or parsed.get("error") - or fallback_message - ) + message = parsed.get("message") or parsed.get("error") or fallback_message code = parsed.get("code") if isinstance(parsed.get("code"), str) else None return message, code, parsed diff --git a/tests/helpers/errors.py b/tests/helpers/errors.py index 1ebd7d58..f874cc11 100644 --- a/tests/helpers/errors.py +++ b/tests/helpers/errors.py @@ -3,9 +3,7 @@ from hyperbrowser.exceptions import HyperbrowserError -def _normalize_messages( - value: Optional[Iterable[str]], single: Optional[str] -): +def _normalize_messages(value: Optional[Iterable[str]], single: Optional[str]): if single is not None: return [single] if value is None: @@ -28,9 +26,9 @@ def expect_hyperbrowser_error( try: action() except HyperbrowserError as error: - assert "Unknown error occurred" not in str(error), ( - f"{label}: unexpected generic error message {error!r}" - ) + assert "Unknown error occurred" not in str( + error + ), f"{label}: unexpected generic error message {error!r}" if status_code is not None: assert error.status_code == status_code, ( @@ -38,15 +36,17 @@ def expect_hyperbrowser_error( f"got {error.status_code}" ) if code is not None: - assert error.code == code, f"{label}: expected code={code}, got {error.code}" + assert ( + error.code == code + ), f"{label}: expected code={code}, got {error.code}" if service is not None: - assert error.service == service, ( - f"{label}: expected service={service}, got {error.service}" - ) + assert ( + error.service == service + ), f"{label}: expected service={service}, got {error.service}" if retryable is not None: - assert error.retryable == retryable, ( - f"{label}: expected retryable={retryable}, got {error.retryable}" - ) + assert ( + error.retryable == retryable + ), f"{label}: expected retryable={retryable}, got {error.retryable}" for text in _normalize_messages(message_includes_many, message_includes): assert text in str(error), ( @@ -80,9 +80,9 @@ async def expect_hyperbrowser_error_async( try: await action() except HyperbrowserError as error: - assert "Unknown error occurred" not in str(error), ( - f"{label}: unexpected generic error message {error!r}" - ) + assert "Unknown error occurred" not in str( + error + ), f"{label}: unexpected generic error message {error!r}" if status_code is not None: assert error.status_code == status_code, ( @@ -90,15 +90,17 @@ async def expect_hyperbrowser_error_async( f"got {error.status_code}" ) if code is not None: - assert error.code == code, f"{label}: expected code={code}, got {error.code}" + assert ( + error.code == code + ), f"{label}: expected code={code}, got {error.code}" if service is not None: - assert error.service == service, ( - f"{label}: expected service={service}, got {error.service}" - ) + assert ( + error.service == service + ), f"{label}: expected service={service}, got {error.service}" if retryable is not None: - assert error.retryable == retryable, ( - f"{label}: expected retryable={retryable}, got {error.retryable}" - ) + assert ( + error.retryable == retryable + ), f"{label}: expected retryable={retryable}, got {error.retryable}" for text in _normalize_messages(message_includes_many, message_includes): assert text in str(error), ( diff --git a/tests/helpers/http.py b/tests/helpers/http.py index 3926ba3b..ad78f7d7 100644 --- a/tests/helpers/http.py +++ b/tests/helpers/http.py @@ -69,7 +69,9 @@ def get_image_by_name(image_name: str): response.raise_for_status() payload = response.json() images = payload.get("data", {}).get("images") or payload.get("images") or [] - image = next((entry for entry in images if entry.get("imageName") == image_name), None) + image = next( + (entry for entry in images if entry.get("imageName") == image_name), None + ) if image is None: raise RuntimeError(f"custom image {image_name!r} not found in /api/images") return image @@ -84,7 +86,9 @@ async def get_image_by_name_async(image_name: str): response.raise_for_status() payload = response.json() images = payload.get("data", {}).get("images") or payload.get("images") or [] - image = next((entry for entry in images if entry.get("imageName") == image_name), None) + image = next( + (entry for entry in images if entry.get("imageName") == image_name), None + ) if image is None: raise RuntimeError(f"custom image {image_name!r} not found in /api/images") return image diff --git a/uv.lock b/uv.lock deleted file mode 100644 index 7518fc90..00000000 --- a/uv.lock +++ /dev/null @@ -1,3 +0,0 @@ -version = 1 -revision = 3 -requires-python = ">=3.12"