diff --git a/hyperbrowser/client/async_client.py b/hyperbrowser/client/async_client.py index 338021b6..a6c9b3d2 100644 --- a/hyperbrowser/client/async_client.py +++ b/hyperbrowser/client/async_client.py @@ -9,6 +9,7 @@ from .managers.async_manager.extension import ExtensionManager from .managers.async_manager.extract import ExtractManager from .managers.async_manager.profile import ProfileManager +from .managers.async_manager.sandbox import SandboxManager from .managers.async_manager.scrape import ScrapeManager from .managers.async_manager.session import SessionManager from .managers.async_manager.team import TeamManager @@ -24,8 +25,16 @@ def __init__( api_key: Optional[str] = None, base_url: Optional[str] = None, timeout: Optional[int] = 30, + runtime_proxy_override: Optional[str] = None, ): - super().__init__(AsyncTransport, config, api_key, base_url) + super().__init__( + AsyncTransport, + config, + api_key, + base_url, + runtime_proxy_override, + ) + self.timeout = timeout or 30 self.transport.client.timeout = timeout self.sessions = SessionManager(self) self.web = WebManager(self) @@ -37,6 +46,7 @@ def __init__( self.agents = Agents(self) self.team = TeamManager(self) self.computer_action = ComputerActionManager(self) + self.sandboxes = SandboxManager(self) async def close(self) -> None: await self.transport.close() diff --git a/hyperbrowser/client/base.py b/hyperbrowser/client/base.py index 69ffc76e..ac6ac227 100644 --- a/hyperbrowser/client/base.py +++ b/hyperbrowser/client/base.py @@ -15,6 +15,7 @@ def __init__( config: Optional[ClientConfig] = None, api_key: Optional[str] = None, base_url: Optional[str] = None, + runtime_proxy_override: Optional[str] = None, ): if config is None: config = ClientConfig( @@ -30,6 +31,7 @@ def __init__( "HYPERBROWSER_BASE_URL", "https://api.hyperbrowser.ai" ) ), + runtime_proxy_override=runtime_proxy_override, ) if not config.api_key: diff --git a/hyperbrowser/client/managers/async_manager/sandbox.py b/hyperbrowser/client/managers/async_manager/sandbox.py new file mode 100644 index 00000000..6db8bc83 --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandbox.py @@ -0,0 +1,379 @@ +from typing import Dict, Optional, Union + +from ....exceptions import HyperbrowserError +from ....models.sandbox import ( + CreateSandboxParams, + SandboxDetail, + SandboxExecParams, + SandboxExposeParams, + SandboxExposeResult, + SandboxMemorySnapshotParams, + SandboxMemorySnapshotResult, + SandboxRuntimeSession, + StartSandboxFromSnapshotParams, +) +from ....models.session import BasicResponse +from ....sandbox_common import ( + RuntimeConnection, + ensure_response_ok, + normalize_network_error, + parse_json_response, +) +from ..sandboxes.shared import ( + _build_sandbox_exposed_url, + _copy_model, + _expires_within_buffer, +) +from .sandboxes.sandbox_files import ( + DEFAULT_WATCH_TIMEOUT_MS, + SandboxFileWatchHandle, + SandboxFilesApi, + SandboxWatchDirHandle, +) +from .sandboxes.sandbox_processes import ( + DEFAULT_PROCESS_KILL_WAIT_SECONDS, + SandboxProcessHandle, + SandboxProcessesApi, +) +from .sandboxes.sandbox_terminal import ( + DEFAULT_TERMINAL_KILL_WAIT_SECONDS, + SandboxTerminalApi, + SandboxTerminalConnection, + SandboxTerminalHandle, +) +from .sandboxes.sandbox_transport import RuntimeTransport + +__all__ = [ + "DEFAULT_PROCESS_KILL_WAIT_SECONDS", + "DEFAULT_TERMINAL_KILL_WAIT_SECONDS", + "DEFAULT_WATCH_TIMEOUT_MS", + "RuntimeTransport", + "SandboxFileWatchHandle", + "SandboxFilesApi", + "SandboxHandle", + "SandboxManager", + "SandboxProcessHandle", + "SandboxProcessesApi", + "SandboxTerminalApi", + "SandboxTerminalConnection", + "SandboxTerminalHandle", + "SandboxWatchDirHandle", +] + + +class SandboxHandle: + def __init__(self, service: "SandboxManager", detail: SandboxDetail): + self._service = service + self._detail = detail + self._runtime_session = self._to_runtime_session(detail) + self._transport = RuntimeTransport( + self._resolve_runtime_connection, + service.runtime_timeout, + service.runtime_proxy_override, + ) + self.processes = SandboxProcessesApi(self._transport) + self.files = SandboxFilesApi( + self._transport, + self._resolve_runtime_socket_info, + service.runtime_proxy_override, + ) + self.terminal = SandboxTerminalApi( + self._transport, + self._resolve_runtime_socket_info, + service.runtime_proxy_override, + ) + self.pty = self.terminal + + @property + def id(self) -> str: + return self._detail.id + + @property + def status(self) -> str: + return self._detail.status + + @property + def region(self): + return self._detail.region + + @property + def runtime(self): + return self._detail.runtime + + @property + def token_expires_at(self): + return self._detail.token_expires_at + + @property + def session_url(self) -> str: + return self._detail.session_url + + def to_dict(self): + return self._detail.model_dump() + + def to_json(self): + return self.to_dict() + + async def info(self) -> SandboxDetail: + detail = await self._service.get_detail(self.id) + self._hydrate(detail) + return _copy_model(self._detail) + + async def refresh(self) -> "SandboxHandle": + await self.info() + return self + + async def connect(self) -> "SandboxHandle": + await self.create_runtime_session(force_refresh=True) + return self + + async def stop(self) -> BasicResponse: + response = await self._service.stop(self.id) + self._clear_runtime_session("closed") + return response + + async def create_memory_snapshot( + self, + params: Optional[SandboxMemorySnapshotParams] = None, + ) -> SandboxMemorySnapshotResult: + if params is None: + normalized = SandboxMemorySnapshotParams() + elif isinstance(params, SandboxMemorySnapshotParams): + normalized = params + else: + raise TypeError("params must be a SandboxMemorySnapshotParams instance") + return await self._service.create_memory_snapshot(self.id, normalized) + + async def expose(self, params: SandboxExposeParams) -> SandboxExposeResult: + if not isinstance(params, SandboxExposeParams): + raise TypeError("params must be a SandboxExposeParams instance") + return await self._service.expose(self.id, params, runtime=self.runtime) + + def get_exposed_url(self, port: int) -> str: + return _build_sandbox_exposed_url(self.runtime, port) + + async def create_runtime_session( + self, force_refresh: bool = False + ) -> SandboxRuntimeSession: + self._assert_runtime_available() + if ( + not force_refresh + and self._runtime_session is not None + and not _expires_within_buffer(self._runtime_session.token_expires_at) + ): + return _copy_model(self._runtime_session) + + detail = await self._service.get_detail(self.id) + self._hydrate(detail) + if self._runtime_session is None: + raise HyperbrowserError( + f"Sandbox {self.id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + return _copy_model(self._runtime_session) + + async def exec(self, input: Union[str, SandboxExecParams]): + if isinstance(input, str): + params = SandboxExecParams(command=input) + else: + if not isinstance(input, SandboxExecParams): + raise TypeError( + "input must be a command string or SandboxExecParams instance" + ) + params = input + return await self.processes.exec(params) + + async def get_process(self, process_id: str) -> SandboxProcessHandle: + return await self.processes.get(process_id) + + def _hydrate(self, detail: SandboxDetail) -> None: + self._detail = detail + self._runtime_session = self._to_runtime_session(detail) + + async def _resolve_runtime_connection( + self, force_refresh: bool = False + ) -> RuntimeConnection: + session = await self.create_runtime_session(force_refresh=force_refresh) + return RuntimeConnection( + sandbox_id=self.id, + base_url=session.runtime.base_url, + token=session.token, + ) + + async def _resolve_runtime_socket_info(self) -> RuntimeConnection: + session = await self.create_runtime_session() + return RuntimeConnection( + sandbox_id=self.id, + base_url=session.runtime.base_url, + token=session.token, + ) + + def _apply_runtime_session(self, session: SandboxRuntimeSession) -> None: + self._runtime_session = _copy_model(session) + self._detail = self._detail.model_copy( + update={ + "status": session.status, + "region": session.region, + "runtime": session.runtime, + "token": session.token, + "token_expires_at": session.token_expires_at, + } + ) + + def _clear_runtime_session(self, status: Optional[str] = None) -> None: + self._runtime_session = None + self._detail = self._detail.model_copy( + update={ + "status": status or self._detail.status, + "token": None, + "token_expires_at": None, + } + ) + + def _assert_runtime_available(self) -> None: + if self._detail.status in {"closed", "error"}: + raise HyperbrowserError( + f"Sandbox {self.id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + + @staticmethod + def _to_runtime_session(detail: SandboxDetail) -> Optional[SandboxRuntimeSession]: + if not detail.token: + return None + return SandboxRuntimeSession( + sandbox_id=detail.id, + status=detail.status, + region=detail.region, + token=detail.token, + token_expires_at=detail.token_expires_at, + runtime=detail.runtime, + ) + + +class SandboxManager: + def __init__(self, client): + self._client = client + self.runtime_timeout = getattr(client, "timeout", 30) + self.runtime_proxy_override = getattr( + client.config, + "runtime_proxy_override", + None, + ) + + async def create(self, params: CreateSandboxParams) -> SandboxHandle: + if not isinstance(params, CreateSandboxParams): + raise TypeError("params must be a CreateSandboxParams instance") + detail = await self._create_detail(params) + return self.attach(detail) + + async def start_from_snapshot( + self, params: StartSandboxFromSnapshotParams + ) -> SandboxHandle: + if not isinstance(params, StartSandboxFromSnapshotParams): + raise TypeError("params must be a StartSandboxFromSnapshotParams instance") + return await self.create(params) + + async def get(self, sandbox_id: str) -> SandboxHandle: + return self.attach(await self.get_detail(sandbox_id)) + + async def connect(self, sandbox_id: str) -> SandboxHandle: + sandbox = await self.get(sandbox_id) + await sandbox.connect() + return sandbox + + async def stop(self, sandbox_id: str) -> BasicResponse: + payload = await self._request("PUT", f"/sandbox/{sandbox_id}/stop") + return BasicResponse(**payload) + + async def get_runtime_session(self, sandbox_id: str) -> SandboxRuntimeSession: + detail = await self.get_detail(sandbox_id) + session = SandboxHandle._to_runtime_session(detail) + if session is None: + raise HyperbrowserError( + f"Sandbox {sandbox_id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + return session + + async def get_detail(self, sandbox_id: str) -> SandboxDetail: + payload = await self._request("GET", f"/sandbox/{sandbox_id}") + return SandboxDetail(**payload) + + def attach(self, detail: SandboxDetail) -> SandboxHandle: + return SandboxHandle(self, detail) + + async def create_memory_snapshot( + self, + sandbox_id: str, + params: Optional[SandboxMemorySnapshotParams] = None, + ) -> SandboxMemorySnapshotResult: + payload = await self._request( + "POST", + f"/sandbox/{sandbox_id}/snapshot", + data=(params or SandboxMemorySnapshotParams()).model_dump( + exclude_none=True, by_alias=True + ), + ) + return SandboxMemorySnapshotResult(**payload) + + async def expose( + self, + sandbox_id: str, + params: SandboxExposeParams, + *, + runtime=None, + ) -> SandboxExposeResult: + payload = await self._request( + "POST", + f"/sandbox/{sandbox_id}/expose", + data=params.model_dump(exclude_none=True, by_alias=True), + ) + target_runtime = runtime or (await self.get_detail(sandbox_id)).runtime + return SandboxExposeResult( + port=payload["port"], + auth=payload["auth"], + url=_build_sandbox_exposed_url(target_runtime, payload["port"]), + ) + + async def _create_detail(self, params: CreateSandboxParams) -> SandboxDetail: + payload = await self._request( + "POST", + "/sandbox", + data=params.model_dump(exclude_none=True, by_alias=True), + ) + return SandboxDetail(**payload) + + async def _request( + self, + method: str, + path: str, + *, + params: Optional[Dict[str, object]] = None, + data: Optional[Dict[str, object]] = None, + ): + try: + response = await self._client.transport.client.request( + method, + self._client._build_url(path), + params={k: v for k, v in (params or {}).items() if v is not None}, + json=data, + ) + except BaseException as error: + raise normalize_network_error( + error, + "control", + "Unknown error occurred", + ) + + ensure_response_ok(response, "control") + return parse_json_response(response, "control") diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/__init__.py b/hyperbrowser/client/managers/async_manager/sandboxes/__init__.py new file mode 100644 index 00000000..e9684afe --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandboxes/__init__.py @@ -0,0 +1,33 @@ +from .sandbox_files import ( + DEFAULT_WATCH_TIMEOUT_MS, + SandboxFileWatchHandle, + SandboxFilesApi, + SandboxWatchDirHandle, +) +from .sandbox_processes import ( + DEFAULT_PROCESS_KILL_WAIT_SECONDS, + SandboxProcessHandle, + SandboxProcessesApi, +) +from .sandbox_terminal import ( + DEFAULT_TERMINAL_KILL_WAIT_SECONDS, + SandboxTerminalApi, + SandboxTerminalConnection, + SandboxTerminalHandle, +) +from .sandbox_transport import RuntimeTransport + +__all__ = [ + "DEFAULT_PROCESS_KILL_WAIT_SECONDS", + "DEFAULT_TERMINAL_KILL_WAIT_SECONDS", + "DEFAULT_WATCH_TIMEOUT_MS", + "RuntimeTransport", + "SandboxFileWatchHandle", + "SandboxFilesApi", + "SandboxProcessHandle", + "SandboxProcessesApi", + "SandboxTerminalApi", + "SandboxTerminalConnection", + "SandboxTerminalHandle", + "SandboxWatchDirHandle", +] diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_files.py b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_files.py new file mode 100644 index 00000000..d888be33 --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_files.py @@ -0,0 +1,712 @@ +import asyncio +import base64 +import inspect +import io +import json +import socket +from datetime import datetime +from typing import AsyncIterator, Callable, List, Optional, Union +from urllib.parse import urlencode + +from websockets.asyncio.client import connect as async_ws_connect +from websockets.exceptions import ConnectionClosed + +from .....exceptions import HyperbrowserError +from .....models.sandbox import ( + SandboxFileChmodParams, + SandboxFileChownParams, + SandboxFileCopyParams, + SandboxFileDeleteParams, + SandboxFileInfo, + SandboxFileReadResult, + SandboxFileSystemEvent, + SandboxFileWriteEntry, + SandboxFileTransferResult, + SandboxFileWatchDoneEvent, + SandboxFileWatchEventMessage, + SandboxFileWatchStatus, + SandboxPresignFileParams, + SandboxPresignedUrl, +) +from .....sandbox_common import build_headers, to_websocket_transport_target +from ...sandboxes.shared import ( + DEFAULT_WATCH_TIMEOUT_MS, + _copy_model, + _encode_write_data, + _normalize_event_type, + _normalize_file_info, + _normalize_websocket_error, + _normalize_write_info, + _relative_watch_name, +) +from .sandbox_transport import RuntimeTransport + + +class SandboxFileWatchHandle: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + self._runtime_proxy_override = runtime_proxy_override + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxFileWatchStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + async def refresh(self, include_events: bool = False) -> "SandboxFileWatchHandle": + params = {"includeEvents": True} if include_events else None + payload = await self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + params=params, + ) + self._status = SandboxFileWatchStatus(**payload["watch"]) + return self + + async def stop(self) -> None: + await self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + method="DELETE", + ) + self._status = self._status.model_copy( + update={ + "active": False, + "stopped_at": self._status.stopped_at + or int(datetime.now().timestamp() * 1000), + } + ) + + async def events( + self, + *, + cursor: Optional[int] = None, + route: str = "ws", + ) -> AsyncIterator[object]: + connection = await self._get_connection_info() + query = urlencode( + [ + ("sessionId", connection.sandbox_id), + *([("cursor", str(cursor))] if cursor is not None else []), + ] + ) + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/files/watch/{self.id}/{route}?{query}", + self._runtime_proxy_override, + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + sock = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + sock.setblocking(False) + connect_kwargs["sock"] = sock + try: + websocket = await async_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + try: + while True: + try: + message = await websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "event": + event = SandboxFileWatchEventMessage( + type="event", + event=parsed["event"], + ) + self._status = self._status.model_copy( + update={ + "oldest_seq": self._status.oldest_seq or event.event.seq, + "last_seq": max(self._status.last_seq, event.event.seq), + } + ) + yield event + elif parsed["type"] == "done": + self._status = SandboxFileWatchStatus(**parsed["status"]) + yield SandboxFileWatchDoneEvent(type="done", status=self.current) + break + except GeneratorExit: + raise + except BaseException as error: + raise _normalize_websocket_error(error) + finally: + await websocket.close() + + +class SandboxWatchDirHandle: + def __init__( + self, + watch: SandboxFileWatchHandle, + on_event: Callable[[SandboxFileSystemEvent], object], + *, + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + timeout_ms: Optional[int] = None, + ): + self._watch = watch + self._root_path = watch.current.path + self._on_event = on_event + self._on_exit = on_exit + self._stop_requested = False + self._exit_notified = False + self._task = asyncio.create_task(self._run()) + effective_timeout = ( + DEFAULT_WATCH_TIMEOUT_MS if timeout_ms is None else timeout_ms + ) + self._timeout_task = ( + asyncio.create_task(self._auto_stop(effective_timeout)) + if effective_timeout > 0 + else None + ) + + async def stop(self) -> None: + if self._stop_requested: + return + self._stop_requested = True + + if self._timeout_task is not None: + self._timeout_task.cancel() + self._timeout_task = None + + try: + await self._watch.stop() + except HyperbrowserError as error: + if error.status_code not in {404, 409}: + raise + + if asyncio.current_task() is not self._task: + await self._task + + async def _auto_stop(self, timeout_ms: int) -> None: + try: + await asyncio.sleep(timeout_ms / 1000.0) + await self.stop() + except asyncio.CancelledError: + return + + async def _run(self) -> None: + exit_error = None + try: + async for message in self._watch.events(): + event_type = _normalize_event_type(message.event.op) + if not event_type: + continue + result = self._on_event( + SandboxFileSystemEvent( + type=event_type, + name=_relative_watch_name(self._root_path, message.event.path), + ) + ) + if inspect.isawaitable(result): + await result + except BaseException as error: + exit_error = error + finally: + if self._timeout_task is not None: + self._timeout_task.cancel() + self._timeout_task = None + if not self._exit_notified: + self._exit_notified = True + if self._on_exit is not None: + result = self._on_exit(exit_error) + if inspect.isawaitable(result): + await result + + +class SandboxFilesApi: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override + + async def list( + self, + path: str, + *, + depth: Optional[int] = None, + ) -> List[SandboxFileInfo]: + depth = 1 if depth is None else depth + if depth < 1: + raise ValueError("depth should be at least one") + + payload = await self._transport.request_json( + "/sandbox/files", + params={ + "path": path, + "depth": depth, + }, + ) + return [_normalize_file_info(entry) for entry in payload.get("entries", [])] + + async def get_info(self, path: str) -> SandboxFileInfo: + payload = await self._transport.request_json( + "/sandbox/files/stat", + params={"path": path}, + ) + return _normalize_file_info(payload["file"]) + + async def stat(self, path: str) -> SandboxFileInfo: + return await self.get_info(path) + + async def exists(self, path: str) -> bool: + try: + await self.get_info(path) + return True + except HyperbrowserError as error: + if error.status_code == 404: + return False + if ( + "not found" in str(error).lower() + or "no such file" in str(error).lower() + ): + return False + raise + + async def read( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + format: str = "text", + ): + if format == "text": + return ( + await self._read_wire( + path, + offset=offset, + length=length, + encoding="utf8", + ) + ).content + + response = await self._read_wire( + path, + offset=offset, + length=length, + encoding="base64", + ) + content = base64.b64decode(response.content) + if format in {"bytes", "blob"}: + return content + if format == "stream": + return io.BytesIO(content) + raise ValueError("format should be one of: text, bytes, blob, stream") + + async def read_text( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> str: + return await self.read(path, offset=offset, length=length, format="text") + + async def read_bytes( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> bytes: + return await self.read(path, offset=offset, length=length, format="bytes") + + async def write( + self, + path_or_files: Union[str, List[SandboxFileWriteEntry]], + data: Optional[Union[str, bytes, bytearray]] = None, + ): + if isinstance(path_or_files, str): + if data is None: + raise ValueError("Path and data are required") + payload = await self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path_or_files, + **_encode_write_data(data), + }, + headers={"content-type": "application/json"}, + ) + return _normalize_write_info(payload["files"][0]) + + if not path_or_files: + return [] + + encoded_files = [] + for entry in path_or_files: + if not isinstance(entry, SandboxFileWriteEntry): + raise TypeError("files must contain SandboxFileWriteEntry instances") + encoded_files.append( + { + "path": entry.path, + **_encode_write_data(entry.data), + } + ) + + payload = await self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={"files": encoded_files}, + headers={"content-type": "application/json"}, + ) + return [_normalize_write_info(entry) for entry in payload.get("files", [])] + + async def write_text( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return await self._write_single( + path, + data, + append=append, + mode=mode, + encoding="utf8", + ) + + async def write_bytes( + self, + path: str, + data: bytes, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return await self._write_single( + path, + base64.b64encode(data).decode("ascii"), + append=append, + mode=mode, + encoding="base64", + ) + + async def upload(self, path: str, data: Union[str, bytes, bytearray]): + body = data.encode("utf-8") if isinstance(data, str) else bytes(data) + payload = await self._transport.request_json( + "/sandbox/files/upload", + method="PUT", + params={"path": path}, + content=body, + ) + return SandboxFileTransferResult(**payload) + + async def download(self, path: str) -> bytes: + return await self._transport.request_bytes( + "/sandbox/files/download", + params={"path": path}, + ) + + async def make_dir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ) -> bool: + payload = await self._transport.request_json( + "/sandbox/files/mkdir", + method="POST", + json_body={ + "path": path, + "parents": parents, + "mode": mode, + }, + headers={"content-type": "application/json"}, + ) + return bool(payload.get("created")) + + async def mkdir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ) -> bool: + return await self.make_dir(path, parents=parents, mode=mode) + + async def rename(self, old_path: str, new_path: str) -> SandboxFileInfo: + payload = await self._transport.request_json( + "/sandbox/files/move", + method="POST", + json_body={ + "from": old_path, + "to": new_path, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_file_info(payload["entry"]) + + async def move( + self, + *, + source: str, + destination: str, + overwrite: Optional[bool] = None, + ) -> SandboxFileInfo: + return await self.rename(source, destination) + + async def remove(self, path: str, *, recursive: Optional[bool] = None) -> None: + await self._transport.request_json( + "/sandbox/files/delete", + method="POST", + json_body=SandboxFileDeleteParams( + path=path, + recursive=recursive, + ).model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + async def delete(self, path: str, *, recursive: Optional[bool] = None) -> None: + await self.remove(path, recursive=recursive) + + async def copy( + self, + params: Optional[SandboxFileCopyParams] = None, + *, + source: Optional[str] = None, + destination: Optional[str] = None, + recursive: Optional[bool] = None, + overwrite: Optional[bool] = None, + ) -> SandboxFileInfo: + if params is None: + normalized = SandboxFileCopyParams( + source=source, + destination=destination, + recursive=recursive, + overwrite=overwrite, + ) + elif isinstance(params, SandboxFileCopyParams): + normalized = params + else: + raise TypeError("params must be a SandboxFileCopyParams instance") + + payload = await self._transport.request_json( + "/sandbox/files/copy", + method="POST", + json_body={ + "from": normalized.source, + "to": normalized.destination, + "recursive": normalized.recursive, + "overwrite": normalized.overwrite, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_file_info(payload["entry"]) + + async def chmod( + self, + params: Optional[SandboxFileChmodParams] = None, + *, + path: Optional[str] = None, + mode: Optional[str] = None, + recursive: Optional[bool] = None, + ) -> None: + if params is None: + normalized = SandboxFileChmodParams( + path=path, + mode=mode, + recursive=recursive, + ) + elif isinstance(params, SandboxFileChmodParams): + normalized = params + else: + raise TypeError("params must be a SandboxFileChmodParams instance") + await self._transport.request_json( + "/sandbox/files/chmod", + method="POST", + json_body=normalized.model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + async def chown( + self, + params: Optional[SandboxFileChownParams] = None, + *, + path: Optional[str] = None, + uid: Optional[int] = None, + gid: Optional[int] = None, + recursive: Optional[bool] = None, + ) -> None: + if params is None: + normalized = SandboxFileChownParams( + path=path, + uid=uid, + gid=gid, + recursive=recursive, + ) + elif isinstance(params, SandboxFileChownParams): + normalized = params + else: + raise TypeError("params must be a SandboxFileChownParams instance") + await self._transport.request_json( + "/sandbox/files/chown", + method="POST", + json_body=normalized.model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + async def watch(self, path: str, *, recursive: Optional[bool] = None): + payload = await self._transport.request_json( + "/sandbox/files/watch", + method="POST", + json_body={ + "path": path, + "recursive": recursive, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, + ) + + async def watch_dir( + self, + path: str, + on_event: Callable[[SandboxFileSystemEvent], object], + *, + recursive: Optional[bool] = None, + timeout_ms: Optional[int] = None, + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + ) -> SandboxWatchDirHandle: + return SandboxWatchDirHandle( + await self.watch(path, recursive=recursive), + on_event, + on_exit=on_exit, + timeout_ms=timeout_ms, + ) + + async def get_watch( + self, watch_id: str, include_events: bool = False + ) -> SandboxFileWatchHandle: + payload = await self._transport.request_json( + f"/sandbox/files/watch/{watch_id}", + params={"includeEvents": True} if include_events else None, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, + ) + + async def upload_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = await self._transport.request_json( + "/sandbox/files/presign-upload", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + async def download_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = await self._transport.request_json( + "/sandbox/files/presign-download", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + async def _read_wire( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + encoding: str, + ) -> SandboxFileReadResult: + payload = await self._transport.request_json( + "/sandbox/files/read", + method="POST", + json_body={ + "path": path, + "offset": offset, + "length": length, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileReadResult(**payload) + + async def _write_single( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + encoding: str, + ): + payload = await self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path, + "data": data, + "append": append, + "mode": mode, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_write_info(payload["files"][0]) diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_processes.py b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_processes.py new file mode 100644 index 00000000..3743f4bc --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_processes.py @@ -0,0 +1,207 @@ +import base64 +from typing import AsyncIterator, Dict, Optional, Union + +from .....models.sandbox import ( + SandboxExecParams, + SandboxProcessExitEvent, + SandboxProcessListResponse, + SandboxProcessOutputEvent, + SandboxProcessResult, + SandboxProcessStdinParams, + SandboxProcessSummary, +) +from .sandbox_transport import RuntimeTransport + +DEFAULT_PROCESS_KILL_WAIT_SECONDS = 5.0 + + +class SandboxProcessHandle: + def __init__(self, transport: RuntimeTransport, summary: SandboxProcessSummary): + self._transport = transport + self._summary = summary + + @property + def id(self) -> str: + return self._summary.id + + @property + def status(self) -> str: + return self._summary.status + + def to_dict(self): + return self._summary.model_dump() + + def to_json(self): + return self.to_dict() + + async def refresh(self) -> "SandboxProcessHandle": + payload = await self._transport.request_json(f"/sandbox/processes/{self.id}") + self._summary = SandboxProcessSummary(**payload["process"]) + return self + + async def wait( + self, + timeout_ms: Optional[int] = None, + timeout_sec: Optional[int] = None, + ) -> SandboxProcessResult: + payload = await self._transport.request_json( + f"/sandbox/processes/{self.id}/wait", + method="POST", + json_body={ + "timeoutMs": timeout_ms, + "timeout_sec": timeout_sec, + }, + headers={"content-type": "application/json"}, + ) + result = SandboxProcessResult(**payload["result"]) + self._summary = SandboxProcessSummary( + id=result.id, + status=result.status, + command=self._summary.command, + args=self._summary.args, + cwd=self._summary.cwd, + pid=self._summary.pid, + exit_code=result.exit_code, + started_at=result.started_at, + completed_at=result.completed_at, + ) + return result + + async def signal(self, signal: str) -> None: + payload = await self._transport.request_json( + f"/sandbox/processes/{self.id}/signal", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._summary = SandboxProcessSummary(**payload["process"]) + + async def kill( + self, + timeout_ms: Optional[int] = None, + timeout_sec: Optional[int] = None, + ) -> SandboxProcessResult: + payload = await self._transport.request_json( + f"/sandbox/processes/{self.id}", + method="DELETE", + ) + self._summary = SandboxProcessSummary(**payload["process"]) + if timeout_ms is None and timeout_sec is None: + timeout_ms = int(DEFAULT_PROCESS_KILL_WAIT_SECONDS * 1000) + return await self.wait(timeout_ms=timeout_ms, timeout_sec=timeout_sec) + + async def write_stdin( + self, + data: Optional[Union[str, bytes, bytearray, SandboxProcessStdinParams]] = None, + *, + encoding: Optional[str] = None, + eof: Optional[bool] = None, + ) -> None: + if isinstance(data, SandboxProcessStdinParams): + params = data + else: + params = SandboxProcessStdinParams(data=data, encoding=encoding, eof=eof) + + payload: Dict[str, object] = {"eof": params.eof} + if params.data is not None: + if isinstance(params.data, str): + payload["data"] = params.data + payload["encoding"] = params.encoding or "utf8" + else: + payload["data"] = base64.b64encode(bytes(params.data)).decode("ascii") + payload["encoding"] = "base64" + + await self._transport.request_json( + f"/sandbox/processes/{self.id}/stdin", + method="POST", + json_body=payload, + headers={"content-type": "application/json"}, + ) + + async def stream(self, from_seq: Optional[int] = None) -> AsyncIterator[object]: + params = {"from_seq": from_seq} if from_seq and from_seq > 0 else None + async for event in self._transport.stream_sse( + f"/sandbox/processes/{self.id}/stream", + params=params, + ): + event_type = event["event"] + data = event["data"] + if event_type == "output": + yield SandboxProcessOutputEvent( + type=data["stream"], + seq=data["seq"], + data=data["data"], + timestamp=data["timestamp"], + ) + elif event_type == "done": + yield SandboxProcessExitEvent( + type="exit", + result=SandboxProcessResult(**data), + ) + + async def result(self) -> SandboxProcessResult: + return await self.wait() + + +class SandboxProcessesApi: + def __init__(self, transport: RuntimeTransport): + self._transport = transport + + async def exec(self, input: SandboxExecParams) -> SandboxProcessResult: + if not isinstance(input, SandboxExecParams): + raise TypeError("input must be a SandboxExecParams instance") + payload = await self._transport.request_json( + "/sandbox/exec", + method="POST", + json_body=input.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessResult(**payload["result"]) + + async def start(self, input: SandboxExecParams) -> SandboxProcessHandle: + if not isinstance(input, SandboxExecParams): + raise TypeError("input must be a SandboxExecParams instance") + payload = await self._transport.request_json( + "/sandbox/processes", + method="POST", + json_body=input.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + async def get(self, process_id: str) -> SandboxProcessHandle: + payload = await self._transport.request_json(f"/sandbox/processes/{process_id}") + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + async def list( + self, + *, + status=None, + limit: Optional[int] = None, + cursor: Optional[Union[str, int]] = None, + created_after: Optional[int] = None, + created_before: Optional[int] = None, + ) -> SandboxProcessListResponse: + normalized_status = None + if isinstance(status, list): + normalized_status = ",".join(status) if status else None + else: + normalized_status = status + + payload = await self._transport.request_json( + "/sandbox/processes", + params={ + "status": normalized_status, + "limit": limit, + "cursor": cursor, + "created_after": created_after, + "created_before": created_before, + }, + ) + return SandboxProcessListResponse(**payload) diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_terminal.py b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_terminal.py new file mode 100644 index 00000000..660d79ec --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_terminal.py @@ -0,0 +1,235 @@ +import base64 +import json +import socket +from typing import AsyncIterator, Dict, Optional, Union + +from websockets.asyncio.client import connect as async_ws_connect +from websockets.exceptions import ConnectionClosed + +from .....models.sandbox import ( + SandboxTerminalCreateParams, + SandboxTerminalExitEvent, + SandboxTerminalOutputEvent, + SandboxTerminalStatus, + SandboxTerminalWaitParams, +) +from .....sandbox_common import build_headers, to_websocket_transport_target +from ...sandboxes.shared import ( + _copy_model, + _normalize_terminal_output_chunk, + _normalize_terminal_status, + _normalize_websocket_error, +) +from .sandbox_transport import RuntimeTransport + +DEFAULT_TERMINAL_KILL_WAIT_SECONDS = 5.0 + + +class SandboxTerminalConnection: + def __init__(self, websocket): + self._websocket = websocket + + async def events(self) -> AsyncIterator[object]: + while True: + try: + message = await self._websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "output": + normalized = _normalize_terminal_output_chunk(parsed) + yield SandboxTerminalOutputEvent( + type="output", + **normalized, + ) + elif parsed["type"] == "exit": + yield SandboxTerminalExitEvent( + type="exit", + status=_normalize_terminal_status(parsed["status"]), + ) + + async def write(self, data: Union[str, bytes, bytearray]) -> None: + payload: Dict[str, object] = { + "type": "input", + "data": data + if isinstance(data, str) + else base64.b64encode(bytes(data)).decode("ascii"), + } + if not isinstance(data, str): + payload["encoding"] = "base64" + await self._websocket.send(json.dumps(payload)) + + async def resize(self, rows: int, cols: int) -> None: + await self._websocket.send( + json.dumps( + { + "type": "resize", + "rows": rows, + "cols": cols, + } + ) + ) + + async def close(self) -> None: + await self._websocket.close() + + +class SandboxTerminalHandle: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + self._runtime_proxy_override = runtime_proxy_override + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxTerminalStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + async def refresh(self, include_output: bool = False) -> "SandboxTerminalHandle": + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}", + params={"includeOutput": True} if include_output else None, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self + + async def wait( + self, + timeout_ms: Optional[int] = None, + include_output: Optional[bool] = None, + ) -> SandboxTerminalStatus: + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}/wait", + method="POST", + json_body=SandboxTerminalWaitParams( + timeout_ms=timeout_ms, + include_output=include_output, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + async def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}/kill", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + async def kill( + self, + signal: Optional[str] = None, + *, + timeout_ms: Optional[int] = None, + ) -> SandboxTerminalStatus: + await self.signal(signal) + if timeout_ms is None: + timeout_ms = int(DEFAULT_TERMINAL_KILL_WAIT_SECONDS * 1000) + return await self.wait(timeout_ms=timeout_ms) + + async def resize(self, rows: int, cols: int) -> SandboxTerminalStatus: + payload = await self._transport.request_json( + f"/sandbox/pty/{self.id}/resize", + method="POST", + json_body={"rows": rows, "cols": cols}, + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + async def attach(self) -> SandboxTerminalConnection: + connection = await self._get_connection_info() + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/pty/{self.id}/ws?sessionId={connection.sandbox_id}", + self._runtime_proxy_override, + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + sock = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + sock.setblocking(False) + connect_kwargs["sock"] = sock + + try: + websocket = await async_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + return SandboxTerminalConnection(websocket) + + +class SandboxTerminalApi: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override + + async def create( + self, + input: SandboxTerminalCreateParams, + ) -> SandboxTerminalHandle: + if not isinstance(input, SandboxTerminalCreateParams): + raise TypeError("input must be a SandboxTerminalCreateParams instance") + payload = await self._transport.request_json( + "/sandbox/pty", + method="POST", + json_body=input.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, + ) + + async def get( + self, terminal_id: str, include_output: bool = False + ) -> SandboxTerminalHandle: + payload = await self._transport.request_json( + f"/sandbox/pty/{terminal_id}", + params={"includeOutput": True} if include_output else None, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, + ) diff --git a/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_transport.py b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_transport.py new file mode 100644 index 00000000..a3061759 --- /dev/null +++ b/hyperbrowser/client/managers/async_manager/sandboxes/sandbox_transport.py @@ -0,0 +1,251 @@ +import json +from typing import AsyncIterator, Dict, Optional, Union + +import httpx + +from .....sandbox_common import ( + RuntimeConnection, + build_headers, + ensure_response_ok, + normalize_network_error, + parse_json_response, + resolve_runtime_transport_target, +) +from ...sandboxes.shared import _build_query_path + + +class RuntimeTransport: + def __init__( + self, + resolve_connection, + timeout: float = 30.0, + runtime_proxy_override: Optional[str] = None, + ): + self._resolve_connection = resolve_connection + self._timeout = timeout + self._runtime_proxy_override = runtime_proxy_override + + async def request_json( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + ): + response = await self._request( + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return parse_json_response(response, "runtime") + + async def request_bytes( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + headers: Optional[Dict[str, str]] = None, + ) -> bytes: + response = await self._request( + path, method=method, params=params, headers=headers + ) + return response.content + + async def stream_sse( + self, path: str, params: Optional[Dict[str, object]] = None + ) -> AsyncIterator[Dict[str, object]]: + client, response = await self._open_stream(path, params=params) + event_name = "message" + event_id = None + data_lines = [] + + def flush_event(): + nonlocal event_name, event_id, data_lines + if not data_lines and event_name == "message" and event_id is None: + return None + + raw_data = "\n".join(data_lines) + data = raw_data + if raw_data: + try: + data = json.loads(raw_data) + except json.JSONDecodeError: + data = raw_data + + event = { + "event": event_name, + "data": data, + "id": event_id, + } + event_name = "message" + event_id = None + data_lines = [] + return event + + try: + async for line in response.aiter_lines(): + if line == "": + event = flush_event() + if event is not None: + yield event + continue + + if line.startswith(":"): + continue + + if ":" in line: + field, value = line.split(":", 1) + value = value.lstrip(" ") + else: + field, value = line, "" + + if field == "event": + event_name = value or "message" + elif field == "data": + data_lines.append(value) + elif field == "id": + event_id = value + + trailing = flush_event() + if trailing is not None: + yield trailing + finally: + await response.aclose() + await client.aclose() + + async def _request( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + allow_refresh: bool = True, + ) -> httpx.Response: + connection = await self._resolve_connection(False) + response = await self._send( + connection, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + + if response.status_code == 401 and allow_refresh: + await response.aclose() + refreshed = await self._resolve_connection(True) + retry = await self._send( + refreshed, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return ensure_response_ok(retry, "runtime") + + return ensure_response_ok(response, "runtime") + + async def _open_stream( + self, + path: str, + *, + params: Optional[Dict[str, object]] = None, + allow_refresh: bool = True, + ): + connection = await self._resolve_connection(False) + client, response = await self._send_stream(connection, path, params=params) + if response.status_code == 401 and allow_refresh: + await response.aclose() + await client.aclose() + refreshed = await self._resolve_connection(True) + client, response = await self._send_stream(refreshed, path, params=params) + + if not response.is_success: + await response.aread() + ensure_response_ok(response, "runtime") + return client, response + + async def _send( + self, + connection: RuntimeConnection, + path: str, + *, + method: str, + params: Optional[Dict[str, object]], + json_body: Optional[Dict[str, object]], + content: Optional[Union[str, bytes]], + headers: Optional[Dict[str, str]], + ) -> httpx.Response: + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) + merged_headers = build_headers(connection.token, headers, target.host_header) + client = httpx.AsyncClient(timeout=self._timeout) + + try: + response = await client.request( + method, + target.url, + headers=merged_headers, + json=json_body, + content=content, + ) + except BaseException as error: + await client.aclose() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) + + await response.aread() + await client.aclose() + return response + + async def _send_stream( + self, + connection: RuntimeConnection, + path: str, + *, + params: Optional[Dict[str, object]], + ): + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) + headers = build_headers( + connection.token, + {"Accept": "text/event-stream"}, + target.host_header, + ) + client = httpx.AsyncClient(timeout=self._timeout) + + try: + request = client.build_request("GET", target.url, headers=headers) + response = await client.send(request, stream=True) + return client, response + except BaseException as error: + await client.aclose() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) diff --git a/hyperbrowser/client/managers/sandboxes/__init__.py b/hyperbrowser/client/managers/sandboxes/__init__.py new file mode 100644 index 00000000..b36617a0 --- /dev/null +++ b/hyperbrowser/client/managers/sandboxes/__init__.py @@ -0,0 +1,31 @@ +from .shared import ( + DEFAULT_WATCH_TIMEOUT_MS, + _build_query_path, + _build_sandbox_exposed_url, + _copy_model, + _encode_write_data, + _expires_within_buffer, + _normalize_event_type, + _normalize_file_info, + _normalize_terminal_output_chunk, + _normalize_terminal_status, + _normalize_websocket_error, + _normalize_write_info, + _relative_watch_name, +) + +__all__ = [ + "DEFAULT_WATCH_TIMEOUT_MS", + "_build_query_path", + "_build_sandbox_exposed_url", + "_copy_model", + "_encode_write_data", + "_expires_within_buffer", + "_normalize_event_type", + "_normalize_file_info", + "_normalize_terminal_output_chunk", + "_normalize_terminal_status", + "_normalize_websocket_error", + "_normalize_write_info", + "_relative_watch_name", +] diff --git a/hyperbrowser/client/managers/sandboxes/shared.py b/hyperbrowser/client/managers/sandboxes/shared.py new file mode 100644 index 00000000..ca4d0e19 --- /dev/null +++ b/hyperbrowser/client/managers/sandboxes/shared.py @@ -0,0 +1,210 @@ +import base64 +import posixpath +from datetime import datetime, timedelta, timezone +from typing import Dict, Optional, Union +from urllib.parse import urlencode, urlsplit, urlunsplit + +from ....exceptions import HyperbrowserError +from ....models.sandbox import ( + SandboxFileInfo, + SandboxFileWriteInfo, + SandboxTerminalStatus, +) +from ....sandbox_common import ( + RUNTIME_SESSION_REFRESH_BUFFER_MS, + normalize_network_error, + parse_error_payload, +) + +DEFAULT_WATCH_TIMEOUT_MS = 60_000 + + +def _copy_model(model): + return model.model_copy(deep=True) + + +def _build_sandbox_exposed_url(runtime, port: int) -> str: + parsed = urlsplit(runtime.base_url) + hostname = parsed.hostname + if not hostname: + return runtime.base_url.rstrip("/") + + exposed_host = f"{port}-{hostname}" + netloc = exposed_host + if parsed.port: + netloc = f"{netloc}:{parsed.port}" + if parsed.username: + credentials = parsed.username + if parsed.password: + credentials = f"{credentials}:{parsed.password}" + netloc = f"{credentials}@{netloc}" + + return urlunsplit( + (parsed.scheme, netloc, parsed.path, parsed.query, parsed.fragment) + ).rstrip("/") + + +def _expires_within_buffer(expires_at: Optional[datetime]) -> bool: + if expires_at is None: + return False + if expires_at.tzinfo is None: + expires_at = expires_at.replace(tzinfo=timezone.utc) + threshold = datetime.now(timezone.utc) + timedelta( + milliseconds=RUNTIME_SESSION_REFRESH_BUFFER_MS + ) + return expires_at <= threshold + + +def _build_query_path(path: str, params: Optional[Dict[str, object]] = None) -> str: + if not params: + return path + + filtered = [] + for key, value in params.items(): + if value is None: + continue + filtered.append((key, str(value))) + + if not filtered: + return path + + return f"{path}?{urlencode(filtered)}" + + +def _normalize_websocket_error(error: BaseException) -> HyperbrowserError: + if isinstance(error, HyperbrowserError): + return error + + response = getattr(error, "response", None) + if response is not None: + status_code = getattr(response, "status_code", None) + headers = getattr(response, "headers", {}) or {} + body = getattr(response, "body", b"") + if isinstance(body, memoryview): + body = body.tobytes() + if isinstance(body, bytearray): + body = bytes(body) + if isinstance(body, bytes): + raw_text = body.decode("utf-8", errors="replace") + elif isinstance(body, str): + raw_text = body + else: + raw_text = "" + + message, code, details = parse_error_payload( + raw_text, + f"Runtime websocket request failed: {status_code or 0}", + ) + request_id = None + if isinstance(headers, dict): + request_id = headers.get("x-request-id") or headers.get("request-id") + else: + request_id = headers.get("x-request-id") or headers.get("request-id") + + return HyperbrowserError( + message, + status_code=status_code, + code=code, + request_id=request_id, + retryable=bool(status_code in {429, 502, 503, 504}), + service="runtime", + details=details, + cause=error, + original_error=error if isinstance(error, Exception) else None, + ) + + status_code = getattr(error, "status_code", None) + headers = getattr(error, "headers", None) + if status_code is not None: + request_id = None + if headers is not None: + request_id = headers.get("x-request-id") or headers.get("request-id") + return HyperbrowserError( + f"Runtime websocket request failed: {status_code}", + status_code=status_code, + request_id=request_id, + retryable=bool(status_code in {429, 502, 503, 504}), + service="runtime", + cause=error, + original_error=error if isinstance(error, Exception) else None, + ) + + return normalize_network_error( + error, + "runtime", + "Unknown runtime websocket error", + ) + + +def _normalize_file_type(value: Optional[str]) -> Optional[str]: + if not value: + return None + return "dir" if value in {"dir", "directory"} else "file" + + +def _normalize_file_info(entry: Dict[str, object]) -> SandboxFileInfo: + normalized = dict(entry) + normalized["type"] = _normalize_file_type(normalized.get("type")) + return SandboxFileInfo(**normalized) + + +def _normalize_write_info(entry: Dict[str, object]) -> SandboxFileWriteInfo: + normalized = dict(entry) + normalized["type"] = _normalize_file_type(normalized.get("type")) + return SandboxFileWriteInfo(**normalized) + + +def _normalize_event_type(operation: str) -> Optional[str]: + lower = operation.lower() + if "chmod" in lower: + return "chmod" + if "create" in lower: + return "create" + if "remove" in lower or "delete" in lower: + return "remove" + if "rename" in lower: + return "rename" + if "write" in lower: + return "write" + return None + + +def _relative_watch_name(root: str, absolute_path: str) -> str: + relative = posixpath.relpath(absolute_path, root) + if relative in {"", "."}: + return posixpath.basename(absolute_path) + return relative + + +def _encode_write_data(data: Union[str, bytes, bytearray]) -> Dict[str, str]: + if isinstance(data, str): + return { + "data": data, + "encoding": "utf8", + } + return { + "data": base64.b64encode(bytes(data)).decode("ascii"), + "encoding": "base64", + } + + +def _normalize_terminal_output_chunk(entry: Dict[str, object]) -> Dict[str, object]: + raw = base64.b64decode(entry["data"]) + return { + "seq": entry["seq"], + "data": raw.decode("utf-8", errors="replace"), + "raw": raw, + "timestamp": entry["timestamp"], + } + + +def _normalize_terminal_status(entry: Dict[str, object]) -> SandboxTerminalStatus: + normalized = dict(entry) + output = normalized.get("output") + if isinstance(output, list): + normalized["output"] = [ + _normalize_terminal_output_chunk(chunk) + for chunk in output + if isinstance(chunk, dict) + ] + return SandboxTerminalStatus(**normalized) diff --git a/hyperbrowser/client/managers/sync_manager/sandbox.py b/hyperbrowser/client/managers/sync_manager/sandbox.py new file mode 100644 index 00000000..bdd4fcd7 --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandbox.py @@ -0,0 +1,379 @@ +from typing import Dict, Optional, Union + +from ....exceptions import HyperbrowserError +from ....models.sandbox import ( + CreateSandboxParams, + SandboxDetail, + SandboxExecParams, + SandboxExposeParams, + SandboxExposeResult, + SandboxMemorySnapshotParams, + SandboxMemorySnapshotResult, + SandboxRuntimeSession, + StartSandboxFromSnapshotParams, +) +from ....models.session import BasicResponse +from ....sandbox_common import ( + RuntimeConnection, + ensure_response_ok, + normalize_network_error, + parse_json_response, +) +from ..sandboxes.shared import ( + _build_sandbox_exposed_url, + _copy_model, + _expires_within_buffer, +) +from .sandboxes.sandbox_files import ( + DEFAULT_WATCH_TIMEOUT_MS, + SandboxFileWatchHandle, + SandboxFilesApi, + SandboxWatchDirHandle, +) +from .sandboxes.sandbox_processes import ( + DEFAULT_PROCESS_KILL_WAIT_SECONDS, + SandboxProcessHandle, + SandboxProcessesApi, +) +from .sandboxes.sandbox_terminal import ( + DEFAULT_TERMINAL_KILL_WAIT_SECONDS, + SandboxTerminalApi, + SandboxTerminalConnection, + SandboxTerminalHandle, +) +from .sandboxes.sandbox_transport import RuntimeTransport + +__all__ = [ + "DEFAULT_PROCESS_KILL_WAIT_SECONDS", + "DEFAULT_TERMINAL_KILL_WAIT_SECONDS", + "DEFAULT_WATCH_TIMEOUT_MS", + "RuntimeTransport", + "SandboxFileWatchHandle", + "SandboxFilesApi", + "SandboxHandle", + "SandboxManager", + "SandboxProcessHandle", + "SandboxProcessesApi", + "SandboxTerminalApi", + "SandboxTerminalConnection", + "SandboxTerminalHandle", + "SandboxWatchDirHandle", +] + + +class SandboxHandle: + def __init__(self, service: "SandboxManager", detail: SandboxDetail): + self._service = service + self._detail = detail + self._runtime_session = self._to_runtime_session(detail) + self._transport = RuntimeTransport( + self._resolve_runtime_connection, + service.runtime_timeout, + service.runtime_proxy_override, + ) + self.processes = SandboxProcessesApi(self._transport) + self.files = SandboxFilesApi( + self._transport, + self._resolve_runtime_socket_info, + service.runtime_proxy_override, + ) + self.terminal = SandboxTerminalApi( + self._transport, + self._resolve_runtime_socket_info, + service.runtime_proxy_override, + ) + self.pty = self.terminal + + @property + def id(self) -> str: + return self._detail.id + + @property + def status(self) -> str: + return self._detail.status + + @property + def region(self): + return self._detail.region + + @property + def runtime(self): + return self._detail.runtime + + @property + def token_expires_at(self): + return self._detail.token_expires_at + + @property + def session_url(self) -> str: + return self._detail.session_url + + def to_dict(self): + return self._detail.model_dump() + + def to_json(self): + return self.to_dict() + + def info(self) -> SandboxDetail: + detail = self._service.get_detail(self.id) + self._hydrate(detail) + return _copy_model(self._detail) + + def refresh(self) -> "SandboxHandle": + self.info() + return self + + def connect(self) -> "SandboxHandle": + self.create_runtime_session(force_refresh=True) + return self + + def stop(self) -> BasicResponse: + response = self._service.stop(self.id) + self._clear_runtime_session("closed") + return response + + def create_memory_snapshot( + self, + params: Optional[SandboxMemorySnapshotParams] = None, + ) -> SandboxMemorySnapshotResult: + if params is None: + normalized = SandboxMemorySnapshotParams() + elif isinstance(params, SandboxMemorySnapshotParams): + normalized = params + else: + raise TypeError("params must be a SandboxMemorySnapshotParams instance") + return self._service.create_memory_snapshot(self.id, normalized) + + def expose(self, params: SandboxExposeParams) -> SandboxExposeResult: + if not isinstance(params, SandboxExposeParams): + raise TypeError("params must be a SandboxExposeParams instance") + return self._service.expose(self.id, params, runtime=self.runtime) + + def get_exposed_url(self, port: int) -> str: + return _build_sandbox_exposed_url(self.runtime, port) + + def create_runtime_session( + self, force_refresh: bool = False + ) -> SandboxRuntimeSession: + self._assert_runtime_available() + if ( + not force_refresh + and self._runtime_session is not None + and not _expires_within_buffer(self._runtime_session.token_expires_at) + ): + return _copy_model(self._runtime_session) + + detail = self._service.get_detail(self.id) + self._hydrate(detail) + if self._runtime_session is None: + raise HyperbrowserError( + f"Sandbox {self.id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + return _copy_model(self._runtime_session) + + def exec(self, input: Union[str, SandboxExecParams]): + if isinstance(input, str): + params = SandboxExecParams(command=input) + else: + if not isinstance(input, SandboxExecParams): + raise TypeError( + "input must be a command string or SandboxExecParams instance" + ) + params = input + return self.processes.exec(params) + + def get_process(self, process_id: str) -> SandboxProcessHandle: + return self.processes.get(process_id) + + def _hydrate(self, detail: SandboxDetail) -> None: + self._detail = detail + self._runtime_session = self._to_runtime_session(detail) + + def _resolve_runtime_connection( + self, force_refresh: bool = False + ) -> RuntimeConnection: + session = self.create_runtime_session(force_refresh=force_refresh) + return RuntimeConnection( + sandbox_id=self.id, + base_url=session.runtime.base_url, + token=session.token, + ) + + def _resolve_runtime_socket_info(self) -> RuntimeConnection: + session = self.create_runtime_session() + return RuntimeConnection( + sandbox_id=self.id, + base_url=session.runtime.base_url, + token=session.token, + ) + + def _apply_runtime_session(self, session: SandboxRuntimeSession) -> None: + self._runtime_session = _copy_model(session) + self._detail = self._detail.model_copy( + update={ + "status": session.status, + "region": session.region, + "runtime": session.runtime, + "token": session.token, + "token_expires_at": session.token_expires_at, + } + ) + + def _clear_runtime_session(self, status: Optional[str] = None) -> None: + self._runtime_session = None + self._detail = self._detail.model_copy( + update={ + "status": status or self._detail.status, + "token": None, + "token_expires_at": None, + } + ) + + def _assert_runtime_available(self) -> None: + if self._detail.status in {"closed", "error"}: + raise HyperbrowserError( + f"Sandbox {self.id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + + @staticmethod + def _to_runtime_session(detail: SandboxDetail) -> Optional[SandboxRuntimeSession]: + if not detail.token: + return None + return SandboxRuntimeSession( + sandbox_id=detail.id, + status=detail.status, + region=detail.region, + token=detail.token, + token_expires_at=detail.token_expires_at, + runtime=detail.runtime, + ) + + +class SandboxManager: + def __init__(self, client): + self._client = client + self.runtime_timeout = getattr(client, "timeout", 30) + self.runtime_proxy_override = getattr( + client.config, + "runtime_proxy_override", + None, + ) + + def create(self, params: CreateSandboxParams) -> SandboxHandle: + if not isinstance(params, CreateSandboxParams): + raise TypeError("params must be a CreateSandboxParams instance") + detail = self._create_detail(params) + return self.attach(detail) + + def start_from_snapshot( + self, params: StartSandboxFromSnapshotParams + ) -> SandboxHandle: + if not isinstance(params, StartSandboxFromSnapshotParams): + raise TypeError("params must be a StartSandboxFromSnapshotParams instance") + return self.create(params) + + def get(self, sandbox_id: str) -> SandboxHandle: + return self.attach(self.get_detail(sandbox_id)) + + def connect(self, sandbox_id: str) -> SandboxHandle: + sandbox = self.get(sandbox_id) + sandbox.connect() + return sandbox + + def stop(self, sandbox_id: str) -> BasicResponse: + payload = self._request("PUT", f"/sandbox/{sandbox_id}/stop") + return BasicResponse(**payload) + + def get_runtime_session(self, sandbox_id: str) -> SandboxRuntimeSession: + detail = self.get_detail(sandbox_id) + session = SandboxHandle._to_runtime_session(detail) + if session is None: + raise HyperbrowserError( + f"Sandbox {sandbox_id} is not running", + status_code=409, + code="sandbox_not_running", + retryable=False, + service="runtime", + ) + return session + + def get_detail(self, sandbox_id: str) -> SandboxDetail: + payload = self._request("GET", f"/sandbox/{sandbox_id}") + return SandboxDetail(**payload) + + def attach(self, detail: SandboxDetail) -> SandboxHandle: + return SandboxHandle(self, detail) + + def create_memory_snapshot( + self, + sandbox_id: str, + params: Optional[SandboxMemorySnapshotParams] = None, + ) -> SandboxMemorySnapshotResult: + payload = self._request( + "POST", + f"/sandbox/{sandbox_id}/snapshot", + data=(params or SandboxMemorySnapshotParams()).model_dump( + exclude_none=True, by_alias=True + ), + ) + return SandboxMemorySnapshotResult(**payload) + + def expose( + self, + sandbox_id: str, + params: SandboxExposeParams, + *, + runtime=None, + ) -> SandboxExposeResult: + payload = self._request( + "POST", + f"/sandbox/{sandbox_id}/expose", + data=params.model_dump(exclude_none=True, by_alias=True), + ) + target_runtime = runtime or self.get_detail(sandbox_id).runtime + return SandboxExposeResult( + port=payload["port"], + auth=payload["auth"], + url=_build_sandbox_exposed_url(target_runtime, payload["port"]), + ) + + def _create_detail(self, params: CreateSandboxParams) -> SandboxDetail: + payload = self._request( + "POST", + "/sandbox", + data=params.model_dump(exclude_none=True, by_alias=True), + ) + return SandboxDetail(**payload) + + def _request( + self, + method: str, + path: str, + *, + params: Optional[Dict[str, object]] = None, + data: Optional[Dict[str, object]] = None, + ): + try: + response = self._client.transport.client.request( + method, + self._client._build_url(path), + params={k: v for k, v in (params or {}).items() if v is not None}, + json=data, + ) + except BaseException as error: + raise normalize_network_error( + error, + "control", + "Unknown error occurred", + ) + + ensure_response_ok(response, "control") + return parse_json_response(response, "control") diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/__init__.py b/hyperbrowser/client/managers/sync_manager/sandboxes/__init__.py new file mode 100644 index 00000000..e9684afe --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/__init__.py @@ -0,0 +1,33 @@ +from .sandbox_files import ( + DEFAULT_WATCH_TIMEOUT_MS, + SandboxFileWatchHandle, + SandboxFilesApi, + SandboxWatchDirHandle, +) +from .sandbox_processes import ( + DEFAULT_PROCESS_KILL_WAIT_SECONDS, + SandboxProcessHandle, + SandboxProcessesApi, +) +from .sandbox_terminal import ( + DEFAULT_TERMINAL_KILL_WAIT_SECONDS, + SandboxTerminalApi, + SandboxTerminalConnection, + SandboxTerminalHandle, +) +from .sandbox_transport import RuntimeTransport + +__all__ = [ + "DEFAULT_PROCESS_KILL_WAIT_SECONDS", + "DEFAULT_TERMINAL_KILL_WAIT_SECONDS", + "DEFAULT_WATCH_TIMEOUT_MS", + "RuntimeTransport", + "SandboxFileWatchHandle", + "SandboxFilesApi", + "SandboxProcessHandle", + "SandboxProcessesApi", + "SandboxTerminalApi", + "SandboxTerminalConnection", + "SandboxTerminalHandle", + "SandboxWatchDirHandle", +] diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_files.py b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_files.py new file mode 100644 index 00000000..b700405b --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_files.py @@ -0,0 +1,693 @@ +import base64 +import io +import json +import socket +import threading +from datetime import datetime +from typing import Callable, List, Optional, Union +from urllib.parse import urlencode + +from websockets.exceptions import ConnectionClosed +from websockets.sync.client import connect as sync_ws_connect + +from .....exceptions import HyperbrowserError +from .....models.sandbox import ( + SandboxFileChmodParams, + SandboxFileChownParams, + SandboxFileCopyParams, + SandboxFileDeleteParams, + SandboxFileInfo, + SandboxFileReadResult, + SandboxFileSystemEvent, + SandboxFileWriteEntry, + SandboxFileTransferResult, + SandboxFileWatchDoneEvent, + SandboxFileWatchEventMessage, + SandboxFileWatchStatus, + SandboxPresignFileParams, + SandboxPresignedUrl, +) +from .....sandbox_common import build_headers, to_websocket_transport_target +from ...sandboxes.shared import ( + DEFAULT_WATCH_TIMEOUT_MS, + _copy_model, + _encode_write_data, + _normalize_event_type, + _normalize_file_info, + _normalize_websocket_error, + _normalize_write_info, + _relative_watch_name, +) +from .sandbox_transport import RuntimeTransport + + +class SandboxFileWatchHandle: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + self._runtime_proxy_override = runtime_proxy_override + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxFileWatchStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + def refresh(self, include_events: bool = False) -> "SandboxFileWatchHandle": + params = {"includeEvents": True} if include_events else None + payload = self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + params=params, + ) + self._status = SandboxFileWatchStatus(**payload["watch"]) + return self + + def stop(self) -> None: + self._transport.request_json( + f"/sandbox/files/watch/{self.id}", + method="DELETE", + ) + self._status = self._status.model_copy( + update={ + "active": False, + "stopped_at": self._status.stopped_at + or int(datetime.now().timestamp() * 1000), + } + ) + + def events( + self, + *, + cursor: Optional[int] = None, + route: str = "ws", + ): + connection = self._get_connection_info() + query = urlencode( + [ + ("sessionId", connection.sandbox_id), + *([("cursor", str(cursor))] if cursor is not None else []), + ] + ) + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/files/watch/{self.id}/{route}?{query}", + self._runtime_proxy_override, + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + connect_kwargs["sock"] = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + try: + websocket = sync_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + try: + while True: + try: + message = websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "event": + event = SandboxFileWatchEventMessage( + type="event", + event=parsed["event"], + ) + self._status = self._status.model_copy( + update={ + "oldest_seq": self._status.oldest_seq or event.event.seq, + "last_seq": max(self._status.last_seq, event.event.seq), + } + ) + yield event + elif parsed["type"] == "done": + self._status = SandboxFileWatchStatus(**parsed["status"]) + yield SandboxFileWatchDoneEvent(type="done", status=self.current) + break + except GeneratorExit: + raise + except BaseException as error: + raise _normalize_websocket_error(error) + finally: + websocket.close() + + +class SandboxWatchDirHandle: + def __init__( + self, + watch: SandboxFileWatchHandle, + on_event: Callable[[SandboxFileSystemEvent], object], + *, + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + timeout_ms: Optional[int] = None, + ): + self._watch = watch + self._root_path = watch.current.path + self._on_event = on_event + self._on_exit = on_exit + self._thread = threading.Thread(target=self._run, daemon=True) + self._timer = None + self._stopped = threading.Event() + self._exit_notified = False + + effective_timeout = ( + DEFAULT_WATCH_TIMEOUT_MS if timeout_ms is None else timeout_ms + ) + if effective_timeout > 0: + self._timer = threading.Timer(effective_timeout / 1000.0, self.stop) + self._timer.daemon = True + self._timer.start() + + self._thread.start() + + def stop(self) -> None: + if self._stopped.is_set(): + return + self._stopped.set() + + if self._timer is not None: + self._timer.cancel() + self._timer = None + + try: + self._watch.stop() + except HyperbrowserError as error: + if error.status_code not in {404, 409}: + raise + + if threading.current_thread() is not self._thread: + self._thread.join() + + def _run(self) -> None: + exit_error = None + try: + for message in self._watch.events(): + event_type = _normalize_event_type(message.event.op) + if not event_type: + continue + self._on_event( + SandboxFileSystemEvent( + type=event_type, + name=_relative_watch_name(self._root_path, message.event.path), + ) + ) + except BaseException as error: + exit_error = error + finally: + if self._timer is not None: + self._timer.cancel() + self._timer = None + if not self._exit_notified: + self._exit_notified = True + if self._on_exit is not None: + self._on_exit(exit_error) + + +class SandboxFilesApi: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override + + def exists(self, path: str) -> bool: + try: + self.get_info(path) + return True + except HyperbrowserError as error: + if error.status_code == 404: + return False + if ( + "not found" in str(error).lower() + or "no such file" in str(error).lower() + ): + return False + raise + + def get_info(self, path: str) -> SandboxFileInfo: + payload = self._transport.request_json( + "/sandbox/files/stat", + params={"path": path}, + ) + return _normalize_file_info(payload["file"]) + + def stat(self, path: str) -> SandboxFileInfo: + return self.get_info(path) + + def list( + self, + path: str, + *, + depth: Optional[int] = None, + ) -> List[SandboxFileInfo]: + depth = 1 if depth is None else depth + if depth < 1: + raise ValueError("depth should be at least one") + + payload = self._transport.request_json( + "/sandbox/files", + params={ + "path": path, + "depth": depth, + }, + ) + return [_normalize_file_info(entry) for entry in payload.get("entries", [])] + + def read( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + format: str = "text", + ): + if format == "text": + return self._read_wire( + path, offset=offset, length=length, encoding="utf8" + ).content + + response = self._read_wire( + path, offset=offset, length=length, encoding="base64" + ) + content = base64.b64decode(response.content) + if format in {"bytes", "blob"}: + return content + if format == "stream": + return io.BytesIO(content) + raise ValueError("format should be one of: text, bytes, blob, stream") + + def read_text( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> str: + return self.read(path, offset=offset, length=length, format="text") + + def read_bytes( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + ) -> bytes: + return self.read(path, offset=offset, length=length, format="bytes") + + def write( + self, + path_or_files: Union[str, List[SandboxFileWriteEntry]], + data: Optional[Union[str, bytes, bytearray]] = None, + ): + if isinstance(path_or_files, str): + if data is None: + raise ValueError("Path and data are required") + payload = self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path_or_files, + **_encode_write_data(data), + }, + headers={"content-type": "application/json"}, + ) + return _normalize_write_info(payload["files"][0]) + + if not path_or_files: + return [] + + encoded_files = [] + for entry in path_or_files: + if not isinstance(entry, SandboxFileWriteEntry): + raise TypeError("files must contain SandboxFileWriteEntry instances") + encoded_files.append( + { + "path": entry.path, + **_encode_write_data(entry.data), + } + ) + + payload = self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={"files": encoded_files}, + headers={"content-type": "application/json"}, + ) + return [_normalize_write_info(entry) for entry in payload.get("files", [])] + + def write_text( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return self._write_single( + path, + data, + append=append, + mode=mode, + encoding="utf8", + ) + + def write_bytes( + self, + path: str, + data: bytes, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + ): + return self._write_single( + path, + base64.b64encode(data).decode("ascii"), + append=append, + mode=mode, + encoding="base64", + ) + + def upload(self, path: str, data: Union[str, bytes, bytearray]): + body = data.encode("utf-8") if isinstance(data, str) else bytes(data) + payload = self._transport.request_json( + "/sandbox/files/upload", + method="PUT", + params={"path": path}, + content=body, + ) + return SandboxFileTransferResult(**payload) + + def download(self, path: str) -> bytes: + return self._transport.request_bytes( + "/sandbox/files/download", + params={"path": path}, + ) + + def make_dir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ) -> bool: + payload = self._transport.request_json( + "/sandbox/files/mkdir", + method="POST", + json_body={ + "path": path, + "parents": parents, + "mode": mode, + }, + headers={"content-type": "application/json"}, + ) + return bool(payload.get("created")) + + def mkdir( + self, + path: str, + *, + parents: Optional[bool] = None, + mode: Optional[str] = None, + ) -> bool: + return self.make_dir(path, parents=parents, mode=mode) + + def rename(self, old_path: str, new_path: str) -> SandboxFileInfo: + payload = self._transport.request_json( + "/sandbox/files/move", + method="POST", + json_body={ + "from": old_path, + "to": new_path, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_file_info(payload["entry"]) + + def move( + self, + *, + source: str, + destination: str, + overwrite: Optional[bool] = None, + ) -> SandboxFileInfo: + return self.rename(source, destination) + + def remove(self, path: str, *, recursive: Optional[bool] = None) -> None: + self._transport.request_json( + "/sandbox/files/delete", + method="POST", + json_body=SandboxFileDeleteParams( + path=path, + recursive=recursive, + ).model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + def delete(self, path: str, *, recursive: Optional[bool] = None) -> None: + self.remove(path, recursive=recursive) + + def copy( + self, + params: Optional[SandboxFileCopyParams] = None, + *, + source: Optional[str] = None, + destination: Optional[str] = None, + recursive: Optional[bool] = None, + overwrite: Optional[bool] = None, + ) -> SandboxFileInfo: + if params is None: + normalized = SandboxFileCopyParams( + source=source, + destination=destination, + recursive=recursive, + overwrite=overwrite, + ) + elif isinstance(params, SandboxFileCopyParams): + normalized = params + else: + raise TypeError("params must be a SandboxFileCopyParams instance") + + payload = self._transport.request_json( + "/sandbox/files/copy", + method="POST", + json_body={ + "from": normalized.source, + "to": normalized.destination, + "recursive": normalized.recursive, + "overwrite": normalized.overwrite, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_file_info(payload["entry"]) + + def chmod( + self, + params: Optional[SandboxFileChmodParams] = None, + *, + path: Optional[str] = None, + mode: Optional[str] = None, + recursive: Optional[bool] = None, + ) -> None: + if params is None: + normalized = SandboxFileChmodParams( + path=path, + mode=mode, + recursive=recursive, + ) + elif isinstance(params, SandboxFileChmodParams): + normalized = params + else: + raise TypeError("params must be a SandboxFileChmodParams instance") + self._transport.request_json( + "/sandbox/files/chmod", + method="POST", + json_body=normalized.model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + def chown( + self, + params: Optional[SandboxFileChownParams] = None, + *, + path: Optional[str] = None, + uid: Optional[int] = None, + gid: Optional[int] = None, + recursive: Optional[bool] = None, + ) -> None: + if params is None: + normalized = SandboxFileChownParams( + path=path, + uid=uid, + gid=gid, + recursive=recursive, + ) + elif isinstance(params, SandboxFileChownParams): + normalized = params + else: + raise TypeError("params must be a SandboxFileChownParams instance") + self._transport.request_json( + "/sandbox/files/chown", + method="POST", + json_body=normalized.model_dump(exclude_none=True), + headers={"content-type": "application/json"}, + ) + + def watch(self, path: str, *, recursive: Optional[bool] = None): + payload = self._transport.request_json( + "/sandbox/files/watch", + method="POST", + json_body={ + "path": path, + "recursive": recursive, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, + ) + + def watch_dir( + self, + path: str, + on_event: Callable[[SandboxFileSystemEvent], object], + *, + recursive: Optional[bool] = None, + timeout_ms: Optional[int] = None, + on_exit: Optional[Callable[[Optional[BaseException]], object]] = None, + ) -> SandboxWatchDirHandle: + return SandboxWatchDirHandle( + self.watch(path, recursive=recursive), + on_event, + on_exit=on_exit, + timeout_ms=timeout_ms, + ) + + def get_watch( + self, watch_id: str, include_events: bool = False + ) -> SandboxFileWatchHandle: + payload = self._transport.request_json( + f"/sandbox/files/watch/{watch_id}", + params={"includeEvents": True} if include_events else None, + ) + return SandboxFileWatchHandle( + self._transport, + self._get_connection_info, + SandboxFileWatchStatus(**payload["watch"]), + self._runtime_proxy_override, + ) + + def upload_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = self._transport.request_json( + "/sandbox/files/presign-upload", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + def download_url( + self, + path: str, + *, + expires_in_seconds: Optional[int] = None, + one_time: Optional[bool] = None, + ) -> SandboxPresignedUrl: + payload = self._transport.request_json( + "/sandbox/files/presign-download", + method="POST", + json_body=SandboxPresignFileParams( + path=path, + expires_in_seconds=expires_in_seconds, + one_time=one_time, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxPresignedUrl(**payload) + + def _read_wire( + self, + path: str, + *, + offset: Optional[int] = None, + length: Optional[int] = None, + encoding: str, + ) -> SandboxFileReadResult: + payload = self._transport.request_json( + "/sandbox/files/read", + method="POST", + json_body={ + "path": path, + "offset": offset, + "length": length, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return SandboxFileReadResult(**payload) + + def _write_single( + self, + path: str, + data: str, + *, + append: Optional[bool] = None, + mode: Optional[str] = None, + encoding: str, + ): + payload = self._transport.request_json( + "/sandbox/files/write", + method="POST", + json_body={ + "path": path, + "data": data, + "append": append, + "mode": mode, + "encoding": encoding, + }, + headers={"content-type": "application/json"}, + ) + return _normalize_write_info(payload["files"][0]) diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_processes.py b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_processes.py new file mode 100644 index 00000000..13dfc16c --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_processes.py @@ -0,0 +1,203 @@ +import base64 +from typing import Dict, Optional, Union + +from .....models.sandbox import ( + SandboxExecParams, + SandboxProcessExitEvent, + SandboxProcessListResponse, + SandboxProcessOutputEvent, + SandboxProcessResult, + SandboxProcessStdinParams, + SandboxProcessSummary, +) +from .sandbox_transport import RuntimeTransport + +DEFAULT_PROCESS_KILL_WAIT_SECONDS = 5.0 + + +class SandboxProcessHandle: + def __init__(self, transport: RuntimeTransport, summary: SandboxProcessSummary): + self._transport = transport + self._summary = summary + + @property + def id(self) -> str: + return self._summary.id + + @property + def status(self) -> str: + return self._summary.status + + def to_dict(self): + return self._summary.model_dump() + + def to_json(self): + return self.to_dict() + + def refresh(self) -> "SandboxProcessHandle": + payload = self._transport.request_json(f"/sandbox/processes/{self.id}") + self._summary = SandboxProcessSummary(**payload["process"]) + return self + + def wait(self, timeout_ms: Optional[int] = None, timeout_sec: Optional[int] = None): + payload = self._transport.request_json( + f"/sandbox/processes/{self.id}/wait", + method="POST", + json_body={ + "timeoutMs": timeout_ms, + "timeout_sec": timeout_sec, + }, + headers={"content-type": "application/json"}, + ) + result = SandboxProcessResult(**payload["result"]) + self._summary = SandboxProcessSummary( + id=result.id, + status=result.status, + command=self._summary.command, + args=self._summary.args, + cwd=self._summary.cwd, + pid=self._summary.pid, + exit_code=result.exit_code, + started_at=result.started_at, + completed_at=result.completed_at, + ) + return result + + def signal(self, signal: str) -> None: + payload = self._transport.request_json( + f"/sandbox/processes/{self.id}/signal", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._summary = SandboxProcessSummary(**payload["process"]) + + def kill( + self, + timeout_ms: Optional[int] = None, + timeout_sec: Optional[int] = None, + ) -> SandboxProcessResult: + payload = self._transport.request_json( + f"/sandbox/processes/{self.id}", + method="DELETE", + ) + self._summary = SandboxProcessSummary(**payload["process"]) + if timeout_ms is None and timeout_sec is None: + timeout_ms = int(DEFAULT_PROCESS_KILL_WAIT_SECONDS * 1000) + return self.wait(timeout_ms=timeout_ms, timeout_sec=timeout_sec) + + def write_stdin( + self, + data: Optional[Union[str, bytes, bytearray, SandboxProcessStdinParams]] = None, + *, + encoding: Optional[str] = None, + eof: Optional[bool] = None, + ) -> None: + if isinstance(data, SandboxProcessStdinParams): + params = data + else: + params = SandboxProcessStdinParams(data=data, encoding=encoding, eof=eof) + + payload: Dict[str, object] = {"eof": params.eof} + if params.data is not None: + if isinstance(params.data, str): + payload["data"] = params.data + payload["encoding"] = params.encoding or "utf8" + else: + payload["data"] = base64.b64encode(bytes(params.data)).decode("ascii") + payload["encoding"] = "base64" + + self._transport.request_json( + f"/sandbox/processes/{self.id}/stdin", + method="POST", + json_body=payload, + headers={"content-type": "application/json"}, + ) + + def stream(self, from_seq: Optional[int] = None): + params = {"from_seq": from_seq} if from_seq and from_seq > 0 else None + for event in self._transport.stream_sse( + f"/sandbox/processes/{self.id}/stream", + params=params, + ): + event_type = event["event"] + data = event["data"] + if event_type == "output": + yield SandboxProcessOutputEvent( + type=data["stream"], + seq=data["seq"], + data=data["data"], + timestamp=data["timestamp"], + ) + elif event_type == "done": + yield SandboxProcessExitEvent( + type="exit", + result=SandboxProcessResult(**data), + ) + + def result(self) -> SandboxProcessResult: + return self.wait() + + +class SandboxProcessesApi: + def __init__(self, transport: RuntimeTransport): + self._transport = transport + + def exec(self, input: SandboxExecParams) -> SandboxProcessResult: + if not isinstance(input, SandboxExecParams): + raise TypeError("input must be a SandboxExecParams instance") + payload = self._transport.request_json( + "/sandbox/exec", + method="POST", + json_body=input.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessResult(**payload["result"]) + + def start(self, input: SandboxExecParams) -> SandboxProcessHandle: + if not isinstance(input, SandboxExecParams): + raise TypeError("input must be a SandboxExecParams instance") + payload = self._transport.request_json( + "/sandbox/processes", + method="POST", + json_body=input.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + def get(self, process_id: str) -> SandboxProcessHandle: + payload = self._transport.request_json(f"/sandbox/processes/{process_id}") + return SandboxProcessHandle( + self._transport, + SandboxProcessSummary(**payload["process"]), + ) + + def list( + self, + *, + status=None, + limit: Optional[int] = None, + cursor: Optional[Union[str, int]] = None, + created_after: Optional[int] = None, + created_before: Optional[int] = None, + ) -> SandboxProcessListResponse: + normalized_status = None + if isinstance(status, list): + normalized_status = ",".join(status) if status else None + else: + normalized_status = status + + payload = self._transport.request_json( + "/sandbox/processes", + params={ + "status": normalized_status, + "limit": limit, + "cursor": cursor, + "created_after": created_after, + "created_before": created_before, + }, + ) + return SandboxProcessListResponse(**payload) diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_terminal.py b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_terminal.py new file mode 100644 index 00000000..59c858eb --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_terminal.py @@ -0,0 +1,233 @@ +import base64 +import json +import socket +from typing import Dict, Optional, Union + +from websockets.exceptions import ConnectionClosed +from websockets.sync.client import connect as sync_ws_connect + +from .....models.sandbox import ( + SandboxTerminalCreateParams, + SandboxTerminalExitEvent, + SandboxTerminalOutputEvent, + SandboxTerminalStatus, + SandboxTerminalWaitParams, +) +from .....sandbox_common import build_headers, to_websocket_transport_target +from ...sandboxes.shared import ( + _copy_model, + _normalize_terminal_output_chunk, + _normalize_terminal_status, + _normalize_websocket_error, +) +from .sandbox_transport import RuntimeTransport + +DEFAULT_TERMINAL_KILL_WAIT_SECONDS = 5.0 + + +class SandboxTerminalConnection: + def __init__(self, websocket): + self._websocket = websocket + + def events(self): + while True: + try: + message = self._websocket.recv() + except ConnectionClosed: + break + + if isinstance(message, bytes): + message = message.decode("utf-8") + parsed = json.loads(message) + if parsed["type"] == "output": + normalized = _normalize_terminal_output_chunk(parsed) + yield SandboxTerminalOutputEvent( + type="output", + **normalized, + ) + elif parsed["type"] == "exit": + yield SandboxTerminalExitEvent( + type="exit", + status=_normalize_terminal_status(parsed["status"]), + ) + + def write(self, data: Union[str, bytes, bytearray]) -> None: + payload: Dict[str, object] = { + "type": "input", + "data": data + if isinstance(data, str) + else base64.b64encode(bytes(data)).decode("ascii"), + } + if not isinstance(data, str): + payload["encoding"] = "base64" + self._websocket.send(json.dumps(payload)) + + def resize(self, rows: int, cols: int) -> None: + self._websocket.send( + json.dumps( + { + "type": "resize", + "rows": rows, + "cols": cols, + } + ) + ) + + def close(self) -> None: + self._websocket.close() + + +class SandboxTerminalHandle: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + status, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._status = status + self._runtime_proxy_override = runtime_proxy_override + + @property + def id(self) -> str: + return self._status.id + + @property + def current(self) -> SandboxTerminalStatus: + return _copy_model(self._status) + + def to_dict(self): + return self._status.model_dump() + + def to_json(self): + return self.to_dict() + + def refresh(self, include_output: bool = False) -> "SandboxTerminalHandle": + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}", + params={"includeOutput": True} if include_output else None, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self + + def wait( + self, + timeout_ms: Optional[int] = None, + include_output: Optional[bool] = None, + ) -> SandboxTerminalStatus: + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}/wait", + method="POST", + json_body=SandboxTerminalWaitParams( + timeout_ms=timeout_ms, + include_output=include_output, + ).model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + def signal(self, signal: Optional[str] = None) -> SandboxTerminalStatus: + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}/kill", + method="POST", + json_body={"signal": signal}, + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + def kill( + self, + signal: Optional[str] = None, + *, + timeout_ms: Optional[int] = None, + ) -> SandboxTerminalStatus: + self.signal(signal) + if timeout_ms is None: + timeout_ms = int(DEFAULT_TERMINAL_KILL_WAIT_SECONDS * 1000) + return self.wait(timeout_ms=timeout_ms) + + def resize(self, rows: int, cols: int) -> SandboxTerminalStatus: + payload = self._transport.request_json( + f"/sandbox/pty/{self.id}/resize", + method="POST", + json_body={"rows": rows, "cols": cols}, + headers={"content-type": "application/json"}, + ) + self._status = _normalize_terminal_status(payload["pty"]) + return self.current + + def attach(self) -> SandboxTerminalConnection: + connection = self._get_connection_info() + target = to_websocket_transport_target( + connection.base_url, + f"/sandbox/pty/{self.id}/ws?sessionId={connection.sandbox_id}", + self._runtime_proxy_override, + ) + headers = build_headers(connection.token, host_header=target.host_header) + connect_kwargs = {} + if target.connect_host is not None and target.connect_port is not None: + connect_kwargs["sock"] = socket.create_connection( + (target.connect_host, target.connect_port), + timeout=self._transport._timeout, + ) + + try: + websocket = sync_ws_connect( + target.url, + additional_headers=headers, + open_timeout=self._transport._timeout, + **connect_kwargs, + ) + except BaseException as error: + raise _normalize_websocket_error(error) + + return SandboxTerminalConnection(websocket) + + +class SandboxTerminalApi: + def __init__( + self, + transport: RuntimeTransport, + get_connection_info, + runtime_proxy_override: Optional[str] = None, + ): + self._transport = transport + self._get_connection_info = get_connection_info + self._runtime_proxy_override = runtime_proxy_override + + def create( + self, + input: SandboxTerminalCreateParams, + ) -> SandboxTerminalHandle: + if not isinstance(input, SandboxTerminalCreateParams): + raise TypeError("input must be a SandboxTerminalCreateParams instance") + payload = self._transport.request_json( + "/sandbox/pty", + method="POST", + json_body=input.model_dump(exclude_none=True, by_alias=True), + headers={"content-type": "application/json"}, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, + ) + + def get( + self, terminal_id: str, include_output: bool = False + ) -> SandboxTerminalHandle: + payload = self._transport.request_json( + f"/sandbox/pty/{terminal_id}", + params={"includeOutput": True} if include_output else None, + ) + return SandboxTerminalHandle( + self._transport, + self._get_connection_info, + _normalize_terminal_status(payload["pty"]), + self._runtime_proxy_override, + ) diff --git a/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_transport.py b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_transport.py new file mode 100644 index 00000000..d51d2647 --- /dev/null +++ b/hyperbrowser/client/managers/sync_manager/sandboxes/sandbox_transport.py @@ -0,0 +1,249 @@ +import json +from typing import Dict, Iterator, Optional, Union + +import httpx + +from .....sandbox_common import ( + RuntimeConnection, + build_headers, + ensure_response_ok, + normalize_network_error, + parse_json_response, + resolve_runtime_transport_target, +) +from ...sandboxes.shared import _build_query_path + + +class RuntimeTransport: + def __init__( + self, + resolve_connection, + timeout: float = 30.0, + runtime_proxy_override: Optional[str] = None, + ): + self._resolve_connection = resolve_connection + self._timeout = timeout + self._runtime_proxy_override = runtime_proxy_override + + def request_json( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + ): + response = self._request( + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return parse_json_response(response, "runtime") + + def request_bytes( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + headers: Optional[Dict[str, str]] = None, + ) -> bytes: + response = self._request(path, method=method, params=params, headers=headers) + return response.content + + def stream_sse( + self, path: str, params: Optional[Dict[str, object]] = None + ) -> Iterator[Dict[str, object]]: + client, response = self._open_stream(path, params=params) + event_name = "message" + event_id = None + data_lines = [] + + def flush_event(): + nonlocal event_name, event_id, data_lines + if not data_lines and event_name == "message" and event_id is None: + return None + + raw_data = "\n".join(data_lines) + data = raw_data + if raw_data: + try: + data = json.loads(raw_data) + except json.JSONDecodeError: + data = raw_data + + event = { + "event": event_name, + "data": data, + "id": event_id, + } + event_name = "message" + event_id = None + data_lines = [] + return event + + try: + for line in response.iter_lines(): + if line == "": + event = flush_event() + if event is not None: + yield event + continue + + if line.startswith(":"): + continue + + if ":" in line: + field, value = line.split(":", 1) + value = value.lstrip(" ") + else: + field, value = line, "" + + if field == "event": + event_name = value or "message" + elif field == "data": + data_lines.append(value) + elif field == "id": + event_id = value + + trailing = flush_event() + if trailing is not None: + yield trailing + finally: + response.close() + client.close() + + def _request( + self, + path: str, + *, + method: str = "GET", + params: Optional[Dict[str, object]] = None, + json_body: Optional[Dict[str, object]] = None, + content: Optional[Union[str, bytes]] = None, + headers: Optional[Dict[str, str]] = None, + allow_refresh: bool = True, + ) -> httpx.Response: + connection = self._resolve_connection(False) + response = self._send( + connection, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + + if response.status_code == 401 and allow_refresh: + response.close() + refreshed = self._resolve_connection(True) + retry = self._send( + refreshed, + path, + method=method, + params=params, + json_body=json_body, + content=content, + headers=headers, + ) + return ensure_response_ok(retry, "runtime") + + return ensure_response_ok(response, "runtime") + + def _open_stream( + self, + path: str, + *, + params: Optional[Dict[str, object]] = None, + allow_refresh: bool = True, + ): + connection = self._resolve_connection(False) + client, response = self._send_stream(connection, path, params=params) + if response.status_code == 401 and allow_refresh: + response.close() + client.close() + refreshed = self._resolve_connection(True) + client, response = self._send_stream(refreshed, path, params=params) + + if not response.is_success: + response.read() + ensure_response_ok(response, "runtime") + return client, response + + def _send( + self, + connection: RuntimeConnection, + path: str, + *, + method: str, + params: Optional[Dict[str, object]], + json_body: Optional[Dict[str, object]], + content: Optional[Union[str, bytes]], + headers: Optional[Dict[str, str]], + ) -> httpx.Response: + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) + merged_headers = build_headers(connection.token, headers, target.host_header) + client = httpx.Client(timeout=self._timeout) + + try: + response = client.request( + method, + target.url, + headers=merged_headers, + json=json_body, + content=content, + ) + except BaseException as error: + client.close() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) + + response.read() + client.close() + return response + + def _send_stream( + self, + connection: RuntimeConnection, + path: str, + *, + params: Optional[Dict[str, object]], + ): + request_path = _build_query_path(path, params) + target = resolve_runtime_transport_target( + connection.base_url, + request_path, + self._runtime_proxy_override, + ) + headers = build_headers( + connection.token, + {"Accept": "text/event-stream"}, + target.host_header, + ) + client = httpx.Client(timeout=self._timeout) + + try: + request = client.build_request("GET", target.url, headers=headers) + response = client.send(request, stream=True) + return client, response + except BaseException as error: + client.close() + raise normalize_network_error( + error, + "runtime", + "Unknown runtime request error", + ) diff --git a/hyperbrowser/client/sync.py b/hyperbrowser/client/sync.py index dd5329b0..f08e42d3 100644 --- a/hyperbrowser/client/sync.py +++ b/hyperbrowser/client/sync.py @@ -9,6 +9,7 @@ from .managers.sync_manager.extension import ExtensionManager from .managers.sync_manager.extract import ExtractManager from .managers.sync_manager.profile import ProfileManager +from .managers.sync_manager.sandbox import SandboxManager from .managers.sync_manager.scrape import ScrapeManager from .managers.sync_manager.session import SessionManager from .managers.sync_manager.team import TeamManager @@ -24,8 +25,16 @@ def __init__( api_key: Optional[str] = None, base_url: Optional[str] = None, timeout: Optional[int] = 30, + runtime_proxy_override: Optional[str] = None, ): - super().__init__(SyncTransport, config, api_key, base_url) + super().__init__( + SyncTransport, + config, + api_key, + base_url, + runtime_proxy_override, + ) + self.timeout = timeout or 30 self.transport.client.timeout = timeout self.sessions = SessionManager(self) self.web = WebManager(self) @@ -37,6 +46,7 @@ def __init__( self.agents = Agents(self) self.team = TeamManager(self) self.computer_action = ComputerActionManager(self) + self.sandboxes = SandboxManager(self) def close(self) -> None: self.transport.close() diff --git a/hyperbrowser/config.py b/hyperbrowser/config.py index c055ab17..e1ec8f20 100644 --- a/hyperbrowser/config.py +++ b/hyperbrowser/config.py @@ -9,6 +9,7 @@ class ClientConfig: api_key: str base_url: str = "https://api.hyperbrowser.ai" + runtime_proxy_override: Optional[str] = None @classmethod def from_env(cls) -> "ClientConfig": diff --git a/hyperbrowser/exceptions.py b/hyperbrowser/exceptions.py index 906a138a..e07b7996 100644 --- a/hyperbrowser/exceptions.py +++ b/hyperbrowser/exceptions.py @@ -1,5 +1,7 @@ # exceptions.py -from typing import Optional, Any +from typing import Any, Literal, Optional + +HyperbrowserService = Literal["control", "runtime"] class HyperbrowserError(Exception): @@ -11,11 +13,23 @@ def __init__( status_code: Optional[int] = None, response: Optional[Any] = None, original_error: Optional[Exception] = None, + code: Optional[str] = None, + request_id: Optional[str] = None, + retryable: bool = False, + service: Optional[HyperbrowserService] = None, + details: Optional[Any] = None, + cause: Optional[Any] = None, ): super().__init__(message) self.status_code = status_code self.response = response self.original_error = original_error + self.code = code + self.request_id = request_id + self.retryable = retryable + self.service = service + self.details = details + self.cause = cause if cause is not None else original_error def __str__(self) -> str: """Custom string representation to show a cleaner error message""" diff --git a/hyperbrowser/models/__init__.py b/hyperbrowser/models/__init__.py index ecd4c34e..8135e4fd 100644 --- a/hyperbrowser/models/__init__.py +++ b/hyperbrowser/models/__init__.py @@ -235,6 +235,82 @@ ImageCaptchaParam, UpdateSessionProfileParams, ) +from .sandbox import ( + SandboxStatus, + SandboxRegion, + SandboxRuntimeTarget, + Sandbox, + SandboxDetail, + SandboxRuntimeSession, + CreateSandboxParams, + StartSandboxFromSnapshotParams, + SandboxListParams, + SandboxListResponse, + SandboxMemorySnapshotParams, + SandboxMemorySnapshotResult, + SandboxExposeParams, + SandboxExposeResult, + SandboxProcessStatus, + SandboxExecParams, + SandboxProcessSummary, + SandboxProcessResult, + SandboxProcessListParams, + SandboxProcessListResponse, + SandboxProcessWaitParams, + SandboxProcessStdinParams, + SandboxProcessOutputEvent, + SandboxProcessExitEvent, + SandboxProcessStreamEvent, + SandboxFileType, + SandboxFileReadFormat, + SandboxFileInfo, + SandboxFileWriteInfo, + SandboxFileEntry, + SandboxFileListOptions, + SandboxFileListParams, + SandboxFileListResponse, + SandboxFileReadOptions, + SandboxFileReadParams, + SandboxFileReadResult, + SandboxFileWriteEntry, + SandboxFileTextWriteOptions, + SandboxFileBytesWriteOptions, + SandboxFileWriteTextParams, + SandboxFileWriteBytesParams, + SandboxFileWriteResult, + SandboxFileUploadParams, + SandboxFileRemoveOptions, + SandboxFileDeleteParams, + SandboxFileMakeDirOptions, + SandboxFileMkdirParams, + SandboxFileMoveParams, + SandboxFileCopyParams, + SandboxFileChmodParams, + SandboxFileChownParams, + SandboxFileMutationResult, + SandboxFileTransferResult, + SandboxFileMoveCopyResult, + SandboxFileWatchParams, + SandboxFileWatchEvent, + SandboxFileWatchStatus, + SandboxFileWatchRoute, + SandboxFileWatchEventsParams, + SandboxFileWatchEventMessage, + SandboxFileWatchDoneEvent, + SandboxFileWatchStreamEvent, + SandboxFileSystemEventType, + SandboxFileSystemEvent, + SandboxPresignFileParams, + SandboxPresignedUrl, + SandboxTerminalCreateParams, + SandboxTerminalOutputChunk, + SandboxTerminalStatus, + SandboxTerminalWaitParams, + SandboxTerminalKillParams, + SandboxTerminalOutputEvent, + SandboxTerminalExitEvent, + SandboxTerminalEvent, +) from .team import TeamCreditInfo __all__ = [ @@ -403,6 +479,81 @@ "UploadFileResponse", "ImageCaptchaParam", "UpdateSessionProfileParams", + # sandbox + "SandboxStatus", + "SandboxRegion", + "SandboxRuntimeTarget", + "Sandbox", + "SandboxDetail", + "SandboxRuntimeSession", + "CreateSandboxParams", + "StartSandboxFromSnapshotParams", + "SandboxListParams", + "SandboxListResponse", + "SandboxMemorySnapshotParams", + "SandboxMemorySnapshotResult", + "SandboxExposeParams", + "SandboxExposeResult", + "SandboxProcessStatus", + "SandboxExecParams", + "SandboxProcessSummary", + "SandboxProcessResult", + "SandboxProcessListParams", + "SandboxProcessListResponse", + "SandboxProcessWaitParams", + "SandboxProcessStdinParams", + "SandboxProcessOutputEvent", + "SandboxProcessExitEvent", + "SandboxProcessStreamEvent", + "SandboxFileType", + "SandboxFileReadFormat", + "SandboxFileInfo", + "SandboxFileWriteInfo", + "SandboxFileEntry", + "SandboxFileListOptions", + "SandboxFileListParams", + "SandboxFileListResponse", + "SandboxFileReadOptions", + "SandboxFileReadParams", + "SandboxFileReadResult", + "SandboxFileWriteEntry", + "SandboxFileTextWriteOptions", + "SandboxFileBytesWriteOptions", + "SandboxFileWriteTextParams", + "SandboxFileWriteBytesParams", + "SandboxFileWriteResult", + "SandboxFileUploadParams", + "SandboxFileRemoveOptions", + "SandboxFileDeleteParams", + "SandboxFileMakeDirOptions", + "SandboxFileMkdirParams", + "SandboxFileMoveParams", + "SandboxFileCopyParams", + "SandboxFileChmodParams", + "SandboxFileChownParams", + "SandboxFileMutationResult", + "SandboxFileTransferResult", + "SandboxFileMoveCopyResult", + "SandboxFileWatchParams", + "SandboxFileWatchEvent", + "SandboxFileWatchStatus", + "SandboxFileWatchRoute", + "SandboxFileWatchEventsParams", + "SandboxFileWatchEventMessage", + "SandboxFileWatchDoneEvent", + "SandboxFileWatchStreamEvent", + "SandboxFileSystemEventType", + "SandboxFileSystemEvent", + "SandboxPresignFileParams", + "SandboxPresignedUrl", + "SandboxTerminalCreateParams", + "SandboxTerminalOutputChunk", + "SandboxTerminalStatus", + "SandboxTerminalWaitParams", + "SandboxTerminalKillParams", + "SandboxTerminalOutputEvent", + "SandboxTerminalExitEvent", + "SandboxTerminalEvent", # team "TeamCreditInfo", # computer action diff --git a/hyperbrowser/models/agents/cua.py b/hyperbrowser/models/agents/cua.py index f7d4472a..6562e52f 100644 --- a/hyperbrowser/models/agents/cua.py +++ b/hyperbrowser/models/agents/cua.py @@ -30,9 +30,7 @@ class StartCuaTaskParams(BaseModel): ) task: str - llm: Optional[CuaLlm] = Field( - default=None, serialization_alias="llm" - ) + llm: Optional[CuaLlm] = Field(default=None, serialization_alias="llm") session_id: Optional[str] = Field(default=None, serialization_alias="sessionId") max_failures: Optional[int] = Field(default=None, serialization_alias="maxFailures") max_steps: Optional[int] = Field(default=None, serialization_alias="maxSteps") diff --git a/hyperbrowser/models/consts.py b/hyperbrowser/models/consts.py index 85a0f5aa..0388e7ee 100644 --- a/hyperbrowser/models/consts.py +++ b/hyperbrowser/models/consts.py @@ -69,10 +69,7 @@ "claude-sonnet-4-20250514", "claude-3-7-sonnet-20250219", ] -CuaLlm = Literal[ - "computer-use-preview", - "gpt-5.4" -] +CuaLlm = Literal["computer-use-preview", "gpt-5.4"] GeminiComputerUseLlm = Literal["gemini-2.5-computer-use-preview-10-2025",] SessionRegion = Literal[ "us-central", diff --git a/hyperbrowser/models/sandbox.py b/hyperbrowser/models/sandbox.py new file mode 100644 index 00000000..0cac9e85 --- /dev/null +++ b/hyperbrowser/models/sandbox.py @@ -0,0 +1,574 @@ +from datetime import datetime, timezone +from typing import Callable, Dict, List, Literal, Optional, Union + +from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator + +from .session import SessionLaunchState, SessionStatus + +SandboxStatus = SessionStatus +SandboxRegion = Literal[ + "us-central", + "asia-south", + "us-dev", + "europe-west", + "us-west", + "us-east", + "us", +] +SandboxProcessStatus = Literal[ + "queued", + "running", + "exited", + "failed", + "killed", + "timed_out", +] +SandboxFileType = Literal["file", "dir"] +SandboxFileEncoding = Literal["utf8", "base64"] +SandboxFileReadFormat = Literal["text", "bytes", "blob", "stream"] +SandboxFileWatchRoute = Literal["ws", "stream"] +SandboxFileSystemEventType = Literal["chmod", "create", "remove", "rename", "write"] + + +def _parse_optional_datetime(value): + if value in (None, ""): + return None + return value + + +def _parse_optional_int(value): + if value is None or isinstance(value, int): + return value + if isinstance(value, str) and value.strip() == "": + return None + if isinstance(value, str): + return int(value) + return value + + +def _parse_optional_datetime_from_millis(value): + if value in (None, ""): + return None + if isinstance(value, datetime): + if value.tzinfo is None: + return value.replace(tzinfo=timezone.utc) + return value + if isinstance(value, (int, float)): + return datetime.fromtimestamp(float(value) / 1000.0, tz=timezone.utc) + return value + + +class SandboxBaseModel(BaseModel): + model_config = ConfigDict(populate_by_name=True) + + +class SandboxRuntimeTarget(SandboxBaseModel): + transport: Literal["regional_proxy"] + host: str + base_url: str = Field(alias="baseUrl") + + +class Sandbox(SandboxBaseModel): + id: str + team_id: str = Field(alias="teamId") + status: SandboxStatus + end_time: Optional[int] = Field(default=None, alias="endTime") + start_time: Optional[int] = Field(default=None, alias="startTime") + created_at: datetime = Field(alias="createdAt") + updated_at: datetime = Field(alias="updatedAt") + close_reason: Optional[str] = Field(default=None, alias="closeReason") + data_consumed: Optional[int] = Field(default=None, alias="dataConsumed") + proxy_data_consumed: Optional[int] = Field(default=None, alias="proxyDataConsumed") + usage_type: Optional[str] = Field(default=None, alias="usageType") + job_id: Optional[str] = Field(default=None, alias="jobId") + launch_state: Optional[SessionLaunchState] = Field( + default=None, alias="launchState" + ) + credits_used: Optional[float] = Field(default=None, alias="creditsUsed") + region: SandboxRegion + session_url: str = Field(alias="sessionUrl") + duration: int + proxy_bytes_used: Optional[int] = Field(default=None, alias="proxyBytesUsed") + runtime: SandboxRuntimeTarget + + @field_validator( + "end_time", + "start_time", + "data_consumed", + "proxy_data_consumed", + "proxy_bytes_used", + mode="before", + ) + @classmethod + def parse_optional_int_fields(cls, value): + return _parse_optional_int(value) + + +class SandboxDetail(Sandbox): + token: Optional[str] = None + token_expires_at: Optional[datetime] = Field(default=None, alias="tokenExpiresAt") + + @field_validator("token_expires_at", mode="before") + @classmethod + def parse_token_expires_at(cls, value): + return _parse_optional_datetime(value) + + +class SandboxRuntimeSession(SandboxBaseModel): + sandbox_id: str = Field(alias="sandboxId") + status: SandboxStatus + region: SandboxRegion + token: str + token_expires_at: Optional[datetime] = Field(default=None, alias="tokenExpiresAt") + runtime: SandboxRuntimeTarget + + @field_validator("token_expires_at", mode="before") + @classmethod + def parse_token_expires_at(cls, value): + return _parse_optional_datetime(value) + + +class CreateSandboxParams(SandboxBaseModel): + snapshot_name: Optional[str] = Field( + default=None, serialization_alias="snapshotName" + ) + snapshot_id: Optional[str] = Field(default=None, serialization_alias="snapshotId") + image_name: Optional[str] = Field(default=None, serialization_alias="imageName") + image_id: Optional[str] = Field(default=None, serialization_alias="imageId") + region: Optional[SandboxRegion] = None + enable_recording: Optional[bool] = Field( + default=None, serialization_alias="enableRecording" + ) + timeout_minutes: Optional[int] = Field( + default=None, serialization_alias="timeoutMinutes" + ) + + @model_validator(mode="after") + def validate_launch_source(self): + if self.snapshot_id and not self.snapshot_name: + raise ValueError("snapshot_id requires snapshot_name") + if self.image_id and not self.image_name: + raise ValueError("image_id requires image_name") + source_count = sum( + bool(value) for value in [self.snapshot_name, self.image_name] + ) + if source_count != 1: + raise ValueError( + "Provide exactly one start source: snapshot_name or image_name" + ) + return self + + +class StartSandboxFromSnapshotParams(CreateSandboxParams): + pass + + +class SandboxListParams(SandboxBaseModel): + status: Optional[SandboxStatus] = None + page: Optional[int] = None + limit: Optional[int] = None + search: Optional[str] = None + + +class SandboxListResponse(SandboxBaseModel): + sandboxes: List[Sandbox] + total_count: int = Field(alias="totalCount") + page: int + per_page: int = Field(alias="perPage") + + +class SandboxMemorySnapshotParams(SandboxBaseModel): + snapshot_name: Optional[str] = Field( + default=None, serialization_alias="snapshotName" + ) + + +class SandboxMemorySnapshotResult(SandboxBaseModel): + snapshot_name: str = Field(alias="snapshotName") + snapshot_id: str = Field(alias="snapshotId") + namespace: str + status: str + image_name: str = Field(alias="imageName") + image_id: str = Field(alias="imageId") + image_namespace: str = Field(alias="imageNamespace") + + +class SandboxExposeParams(SandboxBaseModel): + port: int + auth: Optional[bool] = None + + +class SandboxExposeResult(SandboxBaseModel): + port: int + auth: bool + url: str + + +class SandboxExecParams(SandboxBaseModel): + command: str + args: Optional[List[str]] = None + cwd: Optional[str] = None + env: Optional[Dict[str, str]] = None + timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") + timeout_sec: Optional[int] = None + use_shell: Optional[bool] = Field(default=None, serialization_alias="useShell") + + +class SandboxProcessSummary(SandboxBaseModel): + id: str + status: SandboxProcessStatus + command: str + args: Optional[List[str]] = None + cwd: str + pid: Optional[int] = None + exit_code: Optional[int] = Field(default=None, alias="exit_code") + started_at: int = Field(alias="started_at") + completed_at: Optional[int] = Field(default=None, alias="completed_at") + + +class SandboxProcessResult(SandboxBaseModel): + id: str + status: SandboxProcessStatus + exit_code: Optional[int] = Field(default=None, alias="exit_code") + stdout: str + stderr: str + started_at: int = Field(alias="started_at") + completed_at: Optional[int] = Field(default=None, alias="completed_at") + error: Optional[str] = None + + +class SandboxProcessListParams(SandboxBaseModel): + status: Optional[Union[SandboxProcessStatus, List[SandboxProcessStatus]]] = None + limit: Optional[int] = None + cursor: Optional[Union[str, int]] = None + created_after: Optional[int] = None + created_before: Optional[int] = None + + +class SandboxProcessListResponse(SandboxBaseModel): + data: List[SandboxProcessSummary] + next_cursor: Optional[str] = Field(default=None, alias="next_cursor") + + +class SandboxProcessWaitParams(SandboxBaseModel): + timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") + timeout_sec: Optional[int] = None + + +class SandboxProcessStdinParams(SandboxBaseModel): + data: Optional[Union[str, bytes]] = None + encoding: Optional[SandboxFileEncoding] = None + eof: Optional[bool] = None + + +class SandboxProcessOutputEvent(SandboxBaseModel): + type: Literal["stdout", "stderr", "system"] + seq: int + data: str + timestamp: int + + +class SandboxProcessExitEvent(SandboxBaseModel): + type: Literal["exit"] + result: SandboxProcessResult + + +SandboxProcessStreamEvent = Union[SandboxProcessOutputEvent, SandboxProcessExitEvent] + + +class SandboxFileInfo(SandboxBaseModel): + path: str + name: str + type: SandboxFileType + size: int + mode: int + permissions: str + owner: str + group: str + modified_time: Optional[datetime] = Field(default=None, alias="modifiedTime") + symlink_target: Optional[str] = Field(default=None, alias="symlinkTarget") + + @field_validator("modified_time", mode="before") + @classmethod + def parse_modified_time(cls, value): + return _parse_optional_datetime_from_millis(value) + + +class SandboxFileWriteInfo(SandboxBaseModel): + path: str + name: str + type: Optional[SandboxFileType] = None + + +SandboxFileEntry = SandboxFileInfo + + +class SandboxFileListOptions(SandboxBaseModel): + depth: Optional[int] = None + + +class SandboxFileListParams(SandboxBaseModel): + path: str + depth: Optional[int] = None + + +class SandboxFileListResponse(SandboxBaseModel): + path: str + depth: int + entries: List[SandboxFileInfo] + + +class SandboxFileReadOptions(SandboxBaseModel): + offset: Optional[int] = None + length: Optional[int] = None + format: Optional[SandboxFileReadFormat] = None + + +class SandboxFileReadParams(SandboxBaseModel): + path: str + offset: Optional[int] = None + length: Optional[int] = None + encoding: Optional[SandboxFileEncoding] = None + + +class SandboxFileReadResult(SandboxBaseModel): + content: str + encoding: SandboxFileEncoding + bytes_read: int = Field(alias="bytesRead") + truncated: bool + content_type: Optional[str] = Field(default=None, alias="contentType") + + +SandboxFileWriteData = Union[str, bytes] + + +class SandboxFileWriteEntry(SandboxBaseModel): + path: str + data: SandboxFileWriteData + + +class SandboxFileTextWriteOptions(SandboxBaseModel): + append: Optional[bool] = None + mode: Optional[str] = None + + +class SandboxFileBytesWriteOptions(SandboxBaseModel): + append: Optional[bool] = None + mode: Optional[str] = None + + +class SandboxFileWriteTextParams(SandboxBaseModel): + path: str + data: str + append: Optional[bool] = None + mode: Optional[str] = None + + +class SandboxFileWriteBytesParams(SandboxBaseModel): + path: str + data: bytes + append: Optional[bool] = None + mode: Optional[str] = None + + +class SandboxFileWriteResult(SandboxBaseModel): + files: List[SandboxFileWriteInfo] + + +class SandboxFileUploadParams(SandboxBaseModel): + path: str + data: Union[bytes, str] + + +class SandboxFileTransferResult(SandboxBaseModel): + path: str + bytes_written: int = Field(alias="bytesWritten") + + +class SandboxFileRemoveOptions(SandboxBaseModel): + recursive: Optional[bool] = None + + +class SandboxFileDeleteParams(SandboxBaseModel): + path: str + recursive: Optional[bool] = None + + +class SandboxFileMakeDirOptions(SandboxBaseModel): + parents: Optional[bool] = None + mode: Optional[str] = None + + +class SandboxFileMkdirParams(SandboxBaseModel): + path: str + parents: Optional[bool] = None + mode: Optional[str] = None + + +class SandboxFileMoveParams(SandboxBaseModel): + source: str + destination: str + overwrite: Optional[bool] = None + + +class SandboxFileCopyParams(SandboxBaseModel): + source: str + destination: str + recursive: Optional[bool] = None + overwrite: Optional[bool] = None + + +class SandboxFileChmodParams(SandboxBaseModel): + path: str + mode: str + recursive: Optional[bool] = None + + +class SandboxFileChownParams(SandboxBaseModel): + path: str + uid: Optional[int] = None + gid: Optional[int] = None + recursive: Optional[bool] = None + + +class SandboxFileMutationResult(SandboxBaseModel): + path: str + created: Optional[bool] = None + + +class SandboxFileMoveCopyResult(SandboxBaseModel): + entry: SandboxFileInfo + + +class SandboxFileWatchParams(SandboxBaseModel): + path: str + recursive: Optional[bool] = None + + +class SandboxFileWatchEvent(SandboxBaseModel): + seq: int + path: str + op: str + timestamp: int + + +class SandboxFileWatchStatus(SandboxBaseModel): + id: str + path: str + recursive: bool + active: bool + error: Optional[str] = None + created_at: int = Field(alias="createdAt") + stopped_at: Optional[int] = Field(default=None, alias="stoppedAt") + oldest_seq: int = Field(default=0, alias="oldestSeq") + last_seq: int = Field(default=0, alias="lastSeq") + event_count: int = Field(default=0, alias="eventCount") + events: Optional[List[SandboxFileWatchEvent]] = None + + +class SandboxFileWatchEventsParams(SandboxBaseModel): + cursor: Optional[int] = None + route: Optional[SandboxFileWatchRoute] = None + + +class SandboxFileWatchEventMessage(SandboxBaseModel): + type: Literal["event"] + event: SandboxFileWatchEvent + + +class SandboxFileWatchDoneEvent(SandboxBaseModel): + type: Literal["done"] + status: SandboxFileWatchStatus + + +SandboxFileWatchStreamEvent = Union[ + SandboxFileWatchEventMessage, + SandboxFileWatchDoneEvent, +] + + +class SandboxFileSystemEvent(SandboxBaseModel): + type: SandboxFileSystemEventType + name: str + + +SandboxWatchDirExitCallback = Callable[[Optional[BaseException]], object] + + +class SandboxPresignFileParams(SandboxBaseModel): + path: str + expires_in_seconds: Optional[int] = Field( + default=None, serialization_alias="expiresInSeconds" + ) + one_time: Optional[bool] = Field(default=None, serialization_alias="oneTime") + + +class SandboxPresignedUrl(SandboxBaseModel): + token: str + path: str + method: str + expires_at: int = Field(alias="expiresAt") + url: str + + +class SandboxTerminalCreateParams(SandboxBaseModel): + command: str + args: Optional[List[str]] = None + cwd: Optional[str] = None + env: Optional[Dict[str, str]] = None + use_shell: Optional[bool] = Field(default=None, serialization_alias="useShell") + rows: Optional[int] = None + cols: Optional[int] = None + timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") + + +class SandboxTerminalOutputChunk(SandboxBaseModel): + seq: int + data: str + raw: bytes + timestamp: int + + +class SandboxTerminalStatus(SandboxBaseModel): + id: str + command: str + args: Optional[List[str]] = None + cwd: str + pid: Optional[int] = None + running: bool + exit_code: Optional[int] = Field(default=None, alias="exitCode") + error: Optional[str] = None + timed_out: Optional[bool] = Field(default=None, alias="timedOut") + rows: int + cols: int + started_at: int = Field(alias="startedAt") + finished_at: Optional[int] = Field(default=None, alias="finishedAt") + output: Optional[List[SandboxTerminalOutputChunk]] = None + + +class SandboxTerminalWaitParams(SandboxBaseModel): + timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") + include_output: Optional[bool] = Field( + default=None, serialization_alias="includeOutput" + ) + + +class SandboxTerminalKillParams(SandboxBaseModel): + signal: Optional[str] = None + timeout_ms: Optional[int] = Field(default=None, serialization_alias="timeoutMs") + + +class SandboxTerminalOutputEvent(SandboxBaseModel): + type: Literal["output"] + seq: int + data: str + raw: bytes + timestamp: int + + +class SandboxTerminalExitEvent(SandboxBaseModel): + type: Literal["exit"] + status: SandboxTerminalStatus + + +SandboxTerminalEvent = Union[SandboxTerminalOutputEvent, SandboxTerminalExitEvent] diff --git a/hyperbrowser/sandbox_common.py b/hyperbrowser/sandbox_common.py new file mode 100644 index 00000000..e0e20298 --- /dev/null +++ b/hyperbrowser/sandbox_common.py @@ -0,0 +1,218 @@ +import json +from dataclasses import dataclass +from typing import Any, Dict, Optional, Tuple +from urllib.parse import urljoin, urlsplit, urlunsplit + +import httpx + +from .exceptions import HyperbrowserError, HyperbrowserService + +RETRYABLE_STATUS_CODES = {429, 502, 503, 504} +RUNTIME_SESSION_REFRESH_BUFFER_MS = 60_000 + + +@dataclass(frozen=True) +class RuntimeConnection: + sandbox_id: str + base_url: str + token: str + + +@dataclass(frozen=True) +class RuntimeTransportTarget: + url: str + host_header: Optional[str] = None + connect_host: Optional[str] = None + connect_port: Optional[int] = None + + +def get_request_id(response: httpx.Response) -> Optional[str]: + return response.headers.get("x-request-id") or response.headers.get("request-id") + + +def is_retryable_network_error(error: BaseException) -> bool: + return isinstance( + error, + ( + httpx.TimeoutException, + httpx.NetworkError, + httpx.RemoteProtocolError, + httpx.ProxyError, + httpx.ReadError, + httpx.WriteError, + httpx.PoolTimeout, + ), + ) + + +def parse_error_payload( + raw_text: str, fallback_message: str +) -> Tuple[str, Optional[str], Any]: + if not raw_text: + return fallback_message, None, None + + try: + parsed = json.loads(raw_text) + except json.JSONDecodeError: + return raw_text, None, raw_text + + if isinstance(parsed, dict): + message = parsed.get("message") or parsed.get("error") or fallback_message + code = parsed.get("code") if isinstance(parsed.get("code"), str) else None + return message, code, parsed + + return fallback_message, None, parsed + + +def ensure_response_ok( + response: httpx.Response, + service: HyperbrowserService, + default_message: Optional[str] = None, +) -> httpx.Response: + if response.is_success: + return response + + fallback = default_message or ( + f"Request failed: {response.status_code} {response.reason_phrase}" + ) + message, code, details = parse_error_payload(response.text, fallback) + raise HyperbrowserError( + message, + status_code=response.status_code, + response=response, + code=code, + request_id=get_request_id(response), + retryable=response.status_code in RETRYABLE_STATUS_CODES, + service=service, + details=details, + ) + + +def parse_json_response( + response: httpx.Response, + service: HyperbrowserService, + default_message: str = "Failed to parse JSON response", +) -> Any: + if not response.content: + return {} + + try: + return response.json() + except json.JSONDecodeError as error: + raise HyperbrowserError( + default_message, + status_code=response.status_code, + response=response, + request_id=get_request_id(response), + retryable=False, + service=service, + cause=error, + ) + + +def has_scheme(value: str) -> bool: + return "://" in value + + +def resolve_runtime_transport_target( + base_url: str, + path: str, + runtime_proxy_override: Optional[str] = None, +) -> RuntimeTransportTarget: + normalized_base = base_url if base_url.endswith("/") else f"{base_url}/" + url = urljoin(normalized_base, path.lstrip("/")) + + if not runtime_proxy_override: + return RuntimeTransportTarget(url=url) + + override_raw = ( + runtime_proxy_override + if has_scheme(runtime_proxy_override) + else f"{urlsplit(url).scheme}://{runtime_proxy_override}" + ) + original = urlsplit(url) + override = urlsplit(override_raw) + rewritten = urlunsplit( + ( + override.scheme or original.scheme, + override.netloc or original.netloc, + original.path, + original.query, + original.fragment, + ) + ) + runtime_host = urlsplit(base_url).netloc + return RuntimeTransportTarget(url=rewritten, host_header=runtime_host) + + +def to_websocket_transport_target( + base_url: str, + path: str, + runtime_proxy_override: Optional[str] = None, +) -> RuntimeTransportTarget: + normalized_base = base_url if base_url.endswith("/") else f"{base_url}/" + url = urljoin(normalized_base, path.lstrip("/")) + parts = urlsplit(url) + scheme = parts.scheme + if scheme == "https": + scheme = "wss" + elif scheme == "http": + scheme = "ws" + websocket_url = urlunsplit( + (scheme, parts.netloc, parts.path, parts.query, parts.fragment) + ) + + if not runtime_proxy_override: + return RuntimeTransportTarget(url=websocket_url) + + override = urlsplit( + runtime_proxy_override + if has_scheme(runtime_proxy_override) + else f"{parts.scheme}://{runtime_proxy_override}" + ) + connect_port = override.port + if connect_port is None: + if override.scheme in {"https", "wss"}: + connect_port = 443 + elif override.scheme in {"http", "ws"}: + connect_port = 80 + + return RuntimeTransportTarget( + url=websocket_url, + connect_host=override.hostname, + connect_port=connect_port, + ) + + +def normalize_network_error( + error: BaseException, + service: HyperbrowserService, + default_message: str, +) -> HyperbrowserError: + if isinstance(error, HyperbrowserError): + return error + + return HyperbrowserError( + str(error) if str(error) else default_message, + retryable=is_retryable_network_error(error), + service=service, + cause=error, + original_error=error if isinstance(error, Exception) else None, + ) + + +def build_headers( + token: str, + extra_headers: Optional[Dict[str, str]] = None, + host_header: Optional[str] = None, +) -> Dict[str, str]: + headers: Dict[str, str] = { + "Authorization": f"Bearer {token}", + } + if extra_headers: + for key, value in extra_headers.items(): + if value is not None: + headers[key] = str(value) + if host_header and "Host" not in headers and "host" not in headers: + headers["Host"] = host_header + return headers diff --git a/poetry.lock b/poetry.lock index 3718645e..f66abd98 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -6,6 +6,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -20,6 +21,7 @@ version = "4.5.2" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"}, {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"}, @@ -33,7 +35,7 @@ typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21.0b1) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\""] trio = ["trio (>=0.26.1)"] [[package]] @@ -42,17 +44,33 @@ version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + [[package]] name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -67,6 +85,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -78,6 +97,7 @@ version = "1.0.7" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, @@ -99,6 +119,7 @@ version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, @@ -111,7 +132,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -123,6 +144,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -131,23 +153,65 @@ files = [ [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + [[package]] name = "jsonref" version = "1.1.0" description = "jsonref is a library for automatic dereferencing of JSON Reference objects for Python." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "jsonref-1.1.0-py3-none-any.whl", hash = "sha256:590dc7773df6c21cbf948b5dac07a72a251db28b0238ceecce0a2abfa8ec30a9"}, {file = "jsonref-1.1.0.tar.gz", hash = "sha256:32fe8e1d85af0fdefbebce950af85590b22b60f9e95443176adbde4e1ecea552"}, ] +[[package]] +name = "packaging" +version = "26.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529"}, + {file = "packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + [[package]] name = "pydantic" version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, @@ -160,7 +224,7 @@ typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" @@ -168,6 +232,7 @@ version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, @@ -274,12 +339,36 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pytest" +version = "8.3.5" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + [[package]] name = "ruff" version = "0.3.7" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, @@ -306,23 +395,179 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] +[[package]] +name = "tomli" +version = "2.4.0" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version < \"3.11\"" +files = [ + {file = "tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867"}, + {file = "tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9"}, + {file = "tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95"}, + {file = "tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76"}, + {file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d"}, + {file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576"}, + {file = "tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a"}, + {file = "tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa"}, + {file = "tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614"}, + {file = "tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1"}, + {file = "tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8"}, + {file = "tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a"}, + {file = "tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1"}, + {file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b"}, + {file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51"}, + {file = "tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729"}, + {file = "tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da"}, + {file = "tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3"}, + {file = "tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0"}, + {file = "tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e"}, + {file = "tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4"}, + {file = "tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e"}, + {file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c"}, + {file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f"}, + {file = "tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86"}, + {file = "tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87"}, + {file = "tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132"}, + {file = "tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6"}, + {file = "tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc"}, + {file = "tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66"}, + {file = "tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d"}, + {file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702"}, + {file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8"}, + {file = "tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776"}, + {file = "tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475"}, + {file = "tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2"}, + {file = "tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9"}, + {file = "tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0"}, + {file = "tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df"}, + {file = "tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d"}, + {file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f"}, + {file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b"}, + {file = "tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087"}, + {file = "tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd"}, + {file = "tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4"}, + {file = "tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a"}, + {file = "tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c"}, +] + [[package]] name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "websockets" +version = "13.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, + {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, + {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, + {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, + {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, + {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, + {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, + {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, + {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, + {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, + {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, + {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, + {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, + {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"}, + {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"}, + {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"}, + {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"}, + {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"}, + {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"}, + {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, + {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, + {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, + {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, + {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"}, + {file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"}, + {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"}, + {file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, + {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, + {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, + {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, +] + [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = "^3.8" -content-hash = "7b62e1f0f4a0585712cb1026052e7baaa2c123a0bf48fd94a4e97ca405148ea2" +content-hash = "811874a8f5ef40c48d5591893d71256c97ace59a0ef0d84bc7c7dde6782efb95" diff --git a/pyproject.toml b/pyproject.toml index aa2cd7a9..7c1f5a9d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "hyperbrowser" -version = "0.84.0" +version = "0.85.0" description = "Python SDK for hyperbrowser" authors = ["Nikhil Shahi "] license = "MIT" @@ -14,10 +14,12 @@ python = "^3.8" pydantic = ">=2.0,<3" httpx = ">=0.23.0,<1" jsonref = ">=1.1.0" +websockets = ">=13,<16" [tool.poetry.group.dev.dependencies] ruff = "^0.3.0" +pytest = "^8.3.0" [build-system] diff --git a/tests/.env.example b/tests/.env.example new file mode 100644 index 00000000..70db0662 --- /dev/null +++ b/tests/.env.example @@ -0,0 +1,3 @@ +HYPERBROWSER_API_KEY= +HYPERBROWSER_BASE_URL=http://localhost:8080 +REGIONAL_PROXY_DEV_HOST=127.0.0.1:8090 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..af7e4799 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,6 @@ +import pytest + + +@pytest.fixture +def anyio_backend(): + return "asyncio" diff --git a/tests/helpers/__init__.py b/tests/helpers/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/tests/helpers/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/helpers/config.py b/tests/helpers/config.py new file mode 100644 index 00000000..ab136345 --- /dev/null +++ b/tests/helpers/config.py @@ -0,0 +1,65 @@ +import os +from pathlib import Path + +from hyperbrowser import AsyncHyperbrowser, Hyperbrowser + +TESTS_DIR = Path(__file__).resolve().parent.parent +ENV_PATHS = ( + TESTS_DIR / ".env", + TESTS_DIR.parent / ".env", +) + + +def _load_env() -> None: + for env_path in ENV_PATHS: + if not env_path.exists(): + continue + + for raw_line in env_path.read_text().splitlines(): + line = raw_line.strip() + if not line or line.startswith("#") or "=" not in line: + continue + key, value = line.split("=", 1) + os.environ.setdefault(key.strip(), value.strip()) + return + + +_load_env() + +API_KEY = os.environ.get("HYPERBROWSER_API_KEY", "") +BASE_URL = os.environ.get("HYPERBROWSER_BASE_URL", "http://localhost:8080") +REGIONAL_PROXY_DEV_HOST = os.environ.get("REGIONAL_PROXY_DEV_HOST", "") +DEFAULT_IMAGE_NAME = os.environ.get("HYPERBROWSER_DEFAULT_IMAGE_NAME", "node") + + +def create_client() -> Hyperbrowser: + if not API_KEY: + raise RuntimeError( + "Set HYPERBROWSER_API_KEY in tests/.env before running sandbox e2e tests" + ) + + return Hyperbrowser( + api_key=API_KEY, + base_url=BASE_URL, + runtime_proxy_override=REGIONAL_PROXY_DEV_HOST or None, + ) + + +def create_async_client() -> AsyncHyperbrowser: + if not API_KEY: + raise RuntimeError( + "Set HYPERBROWSER_API_KEY in tests/.env before running sandbox e2e tests" + ) + + return AsyncHyperbrowser( + api_key=API_KEY, + base_url=BASE_URL, + runtime_proxy_override=REGIONAL_PROXY_DEV_HOST or None, + ) + + +def make_test_name(prefix: str) -> str: + import random + import time + + return f"{prefix}-{int(time.time() * 1000)}-{random.randrange(16**6):06x}" diff --git a/tests/helpers/errors.py b/tests/helpers/errors.py new file mode 100644 index 00000000..f874cc11 --- /dev/null +++ b/tests/helpers/errors.py @@ -0,0 +1,119 @@ +from typing import Callable, Iterable, Optional + +from hyperbrowser.exceptions import HyperbrowserError + + +def _normalize_messages(value: Optional[Iterable[str]], single: Optional[str]): + if single is not None: + return [single] + if value is None: + return [] + return list(value) + + +def expect_hyperbrowser_error( + label: str, + action: Callable[[], object], + *, + status_code: Optional[int] = None, + code: Optional[str] = None, + service: Optional[str] = None, + retryable: Optional[bool] = None, + message_includes: Optional[str] = None, + message_includes_many: Optional[Iterable[str]] = None, + message_includes_any: Optional[Iterable[str]] = None, +): + try: + action() + except HyperbrowserError as error: + assert "Unknown error occurred" not in str( + error + ), f"{label}: unexpected generic error message {error!r}" + + if status_code is not None: + assert error.status_code == status_code, ( + f"{label}: expected status_code={status_code}, " + f"got {error.status_code}" + ) + if code is not None: + assert ( + error.code == code + ), f"{label}: expected code={code}, got {error.code}" + if service is not None: + assert ( + error.service == service + ), f"{label}: expected service={service}, got {error.service}" + if retryable is not None: + assert ( + error.retryable == retryable + ), f"{label}: expected retryable={retryable}, got {error.retryable}" + + for text in _normalize_messages(message_includes_many, message_includes): + assert text in str(error), ( + f"{label}: expected error message to include {text!r}, " + f"got {str(error)!r}" + ) + + if message_includes_any: + assert any(text in str(error) for text in message_includes_any), ( + f"{label}: expected error message to include one of " + f"{list(message_includes_any)!r}, got {str(error)!r}" + ) + + return error + + raise AssertionError(f"{label}: expected HyperbrowserError, but call succeeded") + + +async def expect_hyperbrowser_error_async( + label: str, + action, + *, + status_code: Optional[int] = None, + code: Optional[str] = None, + service: Optional[str] = None, + retryable: Optional[bool] = None, + message_includes: Optional[str] = None, + message_includes_many: Optional[Iterable[str]] = None, + message_includes_any: Optional[Iterable[str]] = None, +): + try: + await action() + except HyperbrowserError as error: + assert "Unknown error occurred" not in str( + error + ), f"{label}: unexpected generic error message {error!r}" + + if status_code is not None: + assert error.status_code == status_code, ( + f"{label}: expected status_code={status_code}, " + f"got {error.status_code}" + ) + if code is not None: + assert ( + error.code == code + ), f"{label}: expected code={code}, got {error.code}" + if service is not None: + assert ( + error.service == service + ), f"{label}: expected service={service}, got {error.service}" + if retryable is not None: + assert ( + error.retryable == retryable + ), f"{label}: expected retryable={retryable}, got {error.retryable}" + + for text in _normalize_messages(message_includes_many, message_includes): + assert text in str(error), ( + f"{label}: expected error message to include {text!r}, " + f"got {str(error)!r}" + ) + + if message_includes_any: + assert any(text in str(error) for text in message_includes_any), ( + f"{label}: expected error message to include one of " + f"{list(message_includes_any)!r}, got {str(error)!r}" + ) + + return error + + raise AssertionError(f"{label}: expected HyperbrowserError, but call succeeded") diff --git a/tests/helpers/http.py b/tests/helpers/http.py new file mode 100644 index 00000000..ad78f7d7 --- /dev/null +++ b/tests/helpers/http.py @@ -0,0 +1,94 @@ +from urllib.parse import urlsplit, urlunsplit + +import httpx + +from tests.helpers.config import API_KEY, BASE_URL, REGIONAL_PROXY_DEV_HOST + + +def _has_scheme(value: str) -> bool: + return "://" in value + + +def _resolve_signed_url_target(input_url: str): + original = urlsplit(input_url) + if not REGIONAL_PROXY_DEV_HOST: + return input_url, None + + override = urlsplit( + REGIONAL_PROXY_DEV_HOST + if _has_scheme(REGIONAL_PROXY_DEV_HOST) + else f"{original.scheme}://{REGIONAL_PROXY_DEV_HOST}" + ) + rewritten = urlunsplit( + ( + override.scheme or original.scheme, + override.netloc or original.netloc, + original.path, + original.query, + original.fragment, + ) + ) + return rewritten, original.netloc + + +def fetch_signed_url( + input_url: str, + *, + method: str = "GET", + body=None, + headers=None, +) -> httpx.Response: + url, host_header = _resolve_signed_url_target(input_url) + request_headers = dict(headers or {}) + if host_header and "Host" not in request_headers and "host" not in request_headers: + request_headers["Host"] = host_header + return httpx.request(method, url, headers=request_headers, content=body, timeout=30) + + +def fetch_runtime_url( + input_url: str, + *, + method: str = "GET", + body=None, + headers=None, +) -> httpx.Response: + return fetch_signed_url( + input_url, + method=method, + body=body, + headers=headers, + ) + + +def get_image_by_name(image_name: str): + response = httpx.get( + f"{BASE_URL}/api/images", + headers={"Authorization": f"Bearer {API_KEY}"}, + timeout=30, + ) + response.raise_for_status() + payload = response.json() + images = payload.get("data", {}).get("images") or payload.get("images") or [] + image = next( + (entry for entry in images if entry.get("imageName") == image_name), None + ) + if image is None: + raise RuntimeError(f"custom image {image_name!r} not found in /api/images") + return image + + +async def get_image_by_name_async(image_name: str): + async with httpx.AsyncClient(timeout=30) as client: + response = await client.get( + f"{BASE_URL}/api/images", + headers={"Authorization": f"Bearer {API_KEY}"}, + ) + response.raise_for_status() + payload = response.json() + images = payload.get("data", {}).get("images") or payload.get("images") or [] + image = next( + (entry for entry in images if entry.get("imageName") == image_name), None + ) + if image is None: + raise RuntimeError(f"custom image {image_name!r} not found in /api/images") + return image diff --git a/tests/helpers/sandbox.py b/tests/helpers/sandbox.py new file mode 100644 index 00000000..dd525640 --- /dev/null +++ b/tests/helpers/sandbox.py @@ -0,0 +1,98 @@ +import time + +from hyperbrowser.exceptions import HyperbrowserError +from hyperbrowser.models import CreateSandboxParams + +from tests.helpers.config import DEFAULT_IMAGE_NAME + + +def default_sandbox_params(prefix: str) -> CreateSandboxParams: + return CreateSandboxParams( + image_name=DEFAULT_IMAGE_NAME, + ) + + +def stop_sandbox_if_running(sandbox) -> None: + if sandbox is None: + return + + try: + sandbox.stop() + except HyperbrowserError as error: + if error.status_code in {404, 409}: + return + raise + + +def wait_for_runtime_ready( + sandbox, + *, + attempts: int = 5, + delay_seconds: float = 0.25, +) -> None: + last_error = None + + for attempt in range(1, attempts + 1): + try: + result = sandbox.exec("true") + if result.exit_code == 0: + return + last_error = RuntimeError( + f"runtime readiness probe exited with code {result.exit_code}" + ) + except HyperbrowserError as error: + if error.service == "runtime" and error.retryable: + last_error = error + else: + raise + + if attempt < attempts: + time.sleep(delay_seconds * attempt) + + if isinstance(last_error, Exception): + raise last_error + raise RuntimeError("sandbox runtime did not become ready") + + +async def stop_sandbox_if_running_async(sandbox) -> None: + if sandbox is None: + return + + try: + await sandbox.stop() + except HyperbrowserError as error: + if error.status_code in {404, 409}: + return + raise + + +async def wait_for_runtime_ready_async( + sandbox, + *, + attempts: int = 5, + delay_seconds: float = 0.25, +) -> None: + import asyncio + + last_error = None + + for attempt in range(1, attempts + 1): + try: + result = await sandbox.exec("true") + if result.exit_code == 0: + return + last_error = RuntimeError( + f"runtime readiness probe exited with code {result.exit_code}" + ) + except HyperbrowserError as error: + if error.service == "runtime" and error.retryable: + last_error = error + else: + raise + + if attempt < attempts: + await asyncio.sleep(delay_seconds * attempt) + + if isinstance(last_error, Exception): + raise last_error + raise RuntimeError("sandbox runtime did not become ready") diff --git a/tests/sandbox/e2e/test_async_expose.py b/tests/sandbox/e2e/test_async_expose.py new file mode 100644 index 00000000..0e8d69ca --- /dev/null +++ b/tests/sandbox/e2e/test_async_expose.py @@ -0,0 +1,140 @@ +import asyncio + +import pytest + +from hyperbrowser.models import SandboxExecParams, SandboxExposeParams + +from tests.helpers.config import create_async_client +from tests.helpers.errors import expect_hyperbrowser_error_async +from tests.helpers.http import fetch_runtime_url +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + +HTTP_PORT = 3210 + + +async def _wait_for_http_response( + url: str, *, headers=None, predicate, attempts: int = 15 +): + last_status = 0 + last_body = "" + + for attempt in range(1, attempts + 1): + try: + response = await asyncio.to_thread(fetch_runtime_url, url, headers=headers) + body = response.text + last_status = response.status_code + last_body = body + if predicate(response.status_code, body): + return response.status_code, body + except Exception as error: # pragma: no cover - network edge in e2e + last_body = str(error) + + if attempt < attempts: + await asyncio.sleep(0.2 * attempt) + + raise AssertionError( + f"did not receive expected response for {url}; " + f"last status={last_status}, last body={last_body!r}" + ) + + +@pytest.mark.anyio +async def test_async_sandbox_expose_e2e(): + client = create_async_client() + sandbox = None + server_process = None + + try: + sandbox = await client.sandboxes.create( + default_sandbox_params("py-async-expose") + ) + await wait_for_runtime_ready_async(sandbox) + + server_process = await sandbox.processes.start( + SandboxExecParams( + command="node", + args=[ + "-e", + " ".join( + [ + "const http = require('http');", + f"const port = {HTTP_PORT};", + "const server = http.createServer((req, res) => {", + " res.writeHead(200, {'content-type': 'text/plain'});", + " res.end(`sdk-exposed:${req.method}:${req.url}`);", + "});", + "server.listen(port, '0.0.0.0', () => {", + " console.log(`listening:${port}`);", + "});", + "process.on('SIGTERM', () => server.close(() => process.exit(0)));", + "process.on('SIGINT', () => server.close(() => process.exit(0)));", + ] + ), + ], + ) + ) + + token = sandbox.to_dict()["token"] + assert token + await _wait_for_http_response( + sandbox.get_exposed_url(HTTP_PORT), + headers={"Authorization": f"Bearer {token}"}, + predicate=lambda status, _: status == 403, + ) + + await expect_hyperbrowser_error_async( + "reserved receiver port expose", + lambda: sandbox.expose(SandboxExposeParams(port=4001)), + status_code=400, + service="control", + retryable=False, + message_includes="cannot be exposed", + ) + + exposure = await sandbox.expose(SandboxExposeParams(port=HTTP_PORT, auth=False)) + assert exposure.port == HTTP_PORT + assert exposure.auth is False + assert exposure.url == sandbox.get_exposed_url(HTTP_PORT) + + status, body = await _wait_for_http_response( + exposure.url, + predicate=lambda response_status, response_body: ( + response_status == 200 and "sdk-exposed:GET:/" in response_body + ), + ) + assert status == 200 + assert "sdk-exposed:GET:/" in body + + exposure = await sandbox.expose(SandboxExposeParams(port=HTTP_PORT, auth=True)) + assert exposure.auth is True + + status, _ = await _wait_for_http_response( + exposure.url, + predicate=lambda response_status, _: response_status == 401, + ) + assert status == 401 + + await sandbox.refresh() + token = sandbox.to_dict()["token"] + assert token + status, body = await _wait_for_http_response( + exposure.url, + headers={"Authorization": f"Bearer {token}"}, + predicate=lambda response_status, response_body: ( + response_status == 200 and "sdk-exposed:GET:/" in response_body + ), + ) + assert status == 200 + assert "sdk-exposed:GET:/" in body + finally: + if server_process is not None: + try: + await server_process.kill() + except Exception: + pass + await stop_sandbox_if_running_async(sandbox) + await client.close() diff --git a/tests/sandbox/e2e/test_async_files.py b/tests/sandbox/e2e/test_async_files.py new file mode 100644 index 00000000..ba2b3103 --- /dev/null +++ b/tests/sandbox/e2e/test_async_files.py @@ -0,0 +1,635 @@ +import asyncio + +import pytest + +from hyperbrowser.models import SandboxExecParams, SandboxFileWriteEntry + +from tests.helpers.config import create_async_client, make_test_name +from tests.helpers.errors import expect_hyperbrowser_error_async +from tests.helpers.http import fetch_signed_url +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + + +def _read_stream_text(stream) -> str: + return stream.read().decode("utf-8") + + +def _bash_exec(command: str) -> SandboxExecParams: + return SandboxExecParams(command="bash", args=["-lc", command]) + + +async def _await_future(future: asyncio.Future, timeout: float = 10.0): + return await asyncio.wait_for(future, timeout=timeout) + + +async def _create_parent_symlink_escape_fixture(sandbox, base_dir: str, name: str): + allowed_dir = f"{base_dir}/{name}" + outside_dir = f"/var/tmp/{make_test_name(name)}" + outside_file = f"{outside_dir}/secret.txt" + link_dir = f"{allowed_dir}/evil" + escaped_file = f"{link_dir}/secret.txt" + setup = await sandbox.exec( + _bash_exec( + " && ".join( + [ + f'mkdir -p "{allowed_dir}"', + f'mkdir -p "{outside_dir}"', + f'printf "outside secret" > "{outside_file}"', + f'ln -sfn "{outside_dir}" "{link_dir}"', + ] + ) + ) + ) + assert setup.exit_code == 0 + return { + "allowed_dir": allowed_dir, + "outside_dir": outside_dir, + "outside_file": outside_file, + "link_dir": link_dir, + "escaped_file": escaped_file, + } + + +@pytest.mark.anyio +async def test_async_sandbox_files_e2e(): + client = create_async_client() + sandbox = None + base_dir = f"/tmp/{make_test_name('py-async-files')}" + + try: + sandbox = await client.sandboxes.create( + default_sandbox_params("py-async-files") + ) + await wait_for_runtime_ready_async(sandbox) + + assert await sandbox.files.exists(f"{base_dir}/missing.txt") is False + + path = f"{base_dir}/dirs/root" + assert await sandbox.files.make_dir(path) is True + assert await sandbox.files.make_dir(path) is False + + info_path = f"{base_dir}/info/hello.txt" + await sandbox.files.write_text(info_path, "hello from sdk files") + info = await sandbox.files.get_info(info_path) + assert info.name == "hello.txt" + assert info.path == info_path + assert info.type == "file" + assert info.size == len("hello from sdk files") + assert info.mode == 0o644 + assert info.permissions == "-rw-r--r--" + assert info.owner + assert info.group + assert info.modified_time is not None + + list_dir = f"{base_dir}/list" + await sandbox.files.make_dir(f"{list_dir}/nested/inner", parents=True) + await sandbox.files.write_text(f"{list_dir}/root.txt", "root") + await sandbox.files.write_text(f"{list_dir}/nested/child.txt", "child") + await sandbox.files.write_text( + f"{list_dir}/nested/inner/grandchild.txt", "grandchild" + ) + + depth_one = await sandbox.files.list(list_dir, depth=1) + assert [entry.name for entry in depth_one] == ["nested", "root.txt"] + assert [entry.type for entry in depth_one] == ["dir", "file"] + + depth_two = await sandbox.files.list(list_dir, depth=2) + assert [entry.path for entry in depth_two] == [ + f"{list_dir}/nested", + f"{list_dir}/nested/child.txt", + f"{list_dir}/nested/inner", + f"{list_dir}/root.txt", + ] + + symlink_dir = f"{base_dir}/list-symlink" + target = f"{symlink_dir}/target.txt" + link = f"{symlink_dir}/link.txt" + await sandbox.files.make_dir(symlink_dir) + await sandbox.files.write_text(target, "payload") + result = await sandbox.exec(_bash_exec(f'ln -sfn "{target}" "{link}"')) + assert result.exit_code == 0 + link_entry = next( + entry + for entry in await sandbox.files.list(symlink_dir, depth=1) + if entry.path == link + ) + assert link_entry.symlink_target == target + + symlink_target = f"{base_dir}/symlink/target.txt" + symlink_link = f"{base_dir}/symlink/link.txt" + await sandbox.files.write_text(symlink_target, "target") + result = await sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/symlink" && ln -sfn "{symlink_target}" "{symlink_link}"' + ) + ) + assert result.exit_code == 0 + assert ( + await sandbox.files.get_info(symlink_link) + ).symlink_target == symlink_target + + broken_target = f"{base_dir}/symlink-broken/missing-target.txt" + broken_link = f"{base_dir}/symlink-broken/link.txt" + result = await sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/symlink-broken" && ln -sfn "{broken_target}" "{broken_link}"' + ) + ) + assert result.exit_code == 0 + assert await sandbox.files.exists(broken_link) is True + assert ( + await sandbox.files.get_info(broken_link) + ).symlink_target == broken_target + + read_path = f"{base_dir}/read/readme.txt" + await sandbox.files.write_text(read_path, "hello from sdk files") + assert await sandbox.files.read(read_path) == "hello from sdk files" + assert ( + await sandbox.files.read(read_path, format="text", offset=6, length=4) + == "from" + ) + assert ( + await sandbox.files.read(read_path, format="bytes") + == b"hello from sdk files" + ) + assert ( + await sandbox.files.read(read_path, format="blob") + == b"hello from sdk files" + ) + assert ( + _read_stream_text(await sandbox.files.read(read_path, format="stream")) + == "hello from sdk files" + ) + + single = await sandbox.files.write( + f"{base_dir}/write/single.txt", "single file" + ) + assert single.name == "single.txt" + assert single.path == f"{base_dir}/write/single.txt" + assert await sandbox.files.read_text(single.path) == "single file" + + batch = await sandbox.files.write( + [ + SandboxFileWriteEntry( + path=f"{base_dir}/write/batch-a.txt", + data="batch-a", + ), + SandboxFileWriteEntry( + path=f"{base_dir}/write/batch-b.bin", + data=bytes([1, 2, 3, 4]), + ), + ] + ) + assert [entry.name for entry in batch] == ["batch-a.txt", "batch-b.bin"] + assert ( + await sandbox.files.read_text(f"{base_dir}/write/batch-a.txt") == "batch-a" + ) + assert await sandbox.files.read_bytes(f"{base_dir}/write/batch-b.bin") == bytes( + [1, 2, 3, 4] + ) + + text_path = f"{base_dir}/write-options/text.txt" + await sandbox.files.write_text(text_path, "hello", mode="0640") + await sandbox.files.write_text(text_path, " world", append=True) + assert await sandbox.files.read_text(text_path) == "hello world" + assert (await sandbox.files.get_info(text_path)).mode == 0o640 + + bytes_path = f"{base_dir}/write-options/bytes.bin" + await sandbox.files.write_bytes(bytes_path, bytes([1, 2]), mode="0600") + await sandbox.files.write_bytes(bytes_path, bytes([3]), append=True) + assert await sandbox.files.read_bytes(bytes_path) == bytes([1, 2, 3]) + + transfer_path = f"{base_dir}/transfer/upload.txt" + uploaded = await sandbox.files.upload(transfer_path, "uploaded from sdk") + assert uploaded.bytes_written > 0 + assert (await sandbox.files.download(transfer_path)).decode( + "utf-8" + ) == "uploaded from sdk" + + file_path = f"{base_dir}/rename/hello.txt" + renamed_path = f"{base_dir}/rename/hello-renamed.txt" + await sandbox.files.write_text(file_path, "rename me") + renamed = await sandbox.files.rename(file_path, renamed_path) + assert renamed.path == renamed_path + assert await sandbox.files.exists(file_path) is False + assert await sandbox.files.read_text(renamed_path) == "rename me" + + link_path = f"{base_dir}/rename/hello-link.txt" + copied_link_path = f"{base_dir}/rename/hello-link-copy.txt" + renamed_link_path = f"{base_dir}/rename/hello-link-renamed.txt" + result = await sandbox.exec( + _bash_exec(f'ln -sfn "{renamed_path}" "{link_path}"') + ) + assert result.exit_code == 0 + copied_link = await sandbox.files.copy( + source=link_path, destination=copied_link_path + ) + assert copied_link.path == copied_link_path + assert ( + await sandbox.files.get_info(copied_link_path) + ).symlink_target == renamed_path + renamed_link = await sandbox.files.rename(copied_link_path, renamed_link_path) + assert renamed_link.path == renamed_link_path + assert ( + await sandbox.files.get_info(renamed_link_path) + ).symlink_target == renamed_path + + target_dir = f"{base_dir}/rename-dir/target-dir" + link_dir = f"{base_dir}/rename-dir/link-dir" + renamed_link_dir = f"{base_dir}/rename-dir/link-dir-renamed" + await sandbox.files.make_dir(target_dir) + await sandbox.files.write_text(f"{target_dir}/child.txt", "child") + result = await sandbox.exec(_bash_exec(f'ln -sfn "{target_dir}" "{link_dir}"')) + assert result.exit_code == 0 + renamed = await sandbox.files.rename(link_dir, renamed_link_dir) + assert renamed.path == renamed_link_dir + assert ( + await sandbox.files.get_info(renamed_link_dir) + ).symlink_target == target_dir + assert [ + entry.path for entry in await sandbox.files.list(renamed_link_dir, depth=1) + ] == [f"{target_dir}/child.txt"] + + source_dir = f"{base_dir}/copy-tree/source" + nested_dir = f"{source_dir}/nested" + nested_target = f"{nested_dir}/target.txt" + destination_dir = f"{base_dir}/copy-tree/destination" + await sandbox.files.make_dir(nested_dir) + await sandbox.files.write_text(nested_target, "payload") + result = await sandbox.exec( + _bash_exec(f'cd "{nested_dir}" && ln -sfn "target.txt" "link.txt"') + ) + assert result.exit_code == 0 + await sandbox.files.copy( + source=source_dir, destination=destination_dir, recursive=True + ) + copied_target = f"{destination_dir}/nested/target.txt" + copied_link = f"{destination_dir}/nested/link.txt" + assert await sandbox.files.read_text(copied_target) == "payload" + assert ( + await sandbox.files.get_info(copied_link) + ).symlink_target == copied_target + + loop_dir = f"{base_dir}/loop-list" + loop_nested_dir = f"{loop_dir}/nested" + await sandbox.files.make_dir(loop_nested_dir) + await sandbox.files.write_text(f"{loop_nested_dir}/child.txt", "payload") + result = await sandbox.exec( + _bash_exec(f'cd "{loop_nested_dir}" && ln -sfn .. loop') + ) + assert result.exit_code == 0 + loop_entries = await sandbox.files.list(loop_dir, depth=4) + loop_paths = [entry.path for entry in loop_entries] + assert f"{loop_nested_dir}/loop" in loop_paths + assert not any("/loop/" in path for path in loop_paths) + assert ( + await sandbox.files.get_info(f"{loop_nested_dir}/loop") + ).symlink_target == loop_dir + + source_dir = f"{base_dir}/loop-copy/source" + nested_dir = f"{source_dir}/nested" + await sandbox.files.make_dir(nested_dir) + await sandbox.files.write_text(f"{nested_dir}/child.txt", "payload") + result = await sandbox.exec(_bash_exec(f'cd "{nested_dir}" && ln -sfn .. loop')) + assert result.exit_code == 0 + destination_dir = f"{base_dir}/loop-copy/destination" + await sandbox.files.copy( + source=source_dir, destination=destination_dir, recursive=True + ) + copied_loop = f"{destination_dir}/nested/loop" + assert ( + await sandbox.files.get_info(copied_loop) + ).symlink_target == destination_dir + assert not any( + "/loop/" in entry.path + for entry in await sandbox.files.list(destination_dir, depth=4) + ) + + source = f"{base_dir}/copy-overwrite/source.txt" + existing_target = f"{base_dir}/copy-overwrite/existing-target.txt" + destination_link = f"{base_dir}/copy-overwrite/destination-link.txt" + await sandbox.files.write_text(source, "source payload") + await sandbox.files.write_text(existing_target, "existing target") + result = await sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/copy-overwrite" && ln -sfn "{existing_target}" "{destination_link}"' + ) + ) + assert result.exit_code == 0 + await sandbox.files.copy( + source=source, destination=destination_link, overwrite=True + ) + assert await sandbox.files.read_text(destination_link) == "source payload" + assert await sandbox.files.read_text(existing_target) == "existing target" + assert (await sandbox.files.get_info(destination_link)).symlink_target is None + + chmod_path = f"{base_dir}/chmod/file.txt" + await sandbox.files.write_text(chmod_path, "chmod me") + await sandbox.files.chmod(path=chmod_path, mode="0640") + assert (await sandbox.files.get_info(chmod_path)).mode == 0o640 + try: + await expect_hyperbrowser_error_async( + "file chown", + lambda: sandbox.files.chown(path=chmod_path, uid=0, gid=0), + status_code=400, + service="runtime", + retryable=False, + message_includes_any=["operation", "permission"], + ) + except AssertionError as error: + if "expected HyperbrowserError, but call succeeded" not in str(error): + raise + assert (await sandbox.files.get_info(chmod_path)).name == "file.txt" + + remove_path = f"{base_dir}/remove/file.txt" + await sandbox.files.write_text(remove_path, "remove me") + await sandbox.files.remove(remove_path) + assert await sandbox.files.exists(remove_path) is False + await sandbox.files.remove(remove_path) + await sandbox.files.remove(f"{base_dir}/remove", recursive=True) + assert await sandbox.files.exists(f"{base_dir}/remove") is False + + target = f"{base_dir}/remove-link/target.txt" + link = f"{base_dir}/remove-link/link.txt" + await sandbox.files.write_text(target, "keep me") + result = await sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/remove-link" && ln -sfn "{target}" "{link}"' + ) + ) + assert result.exit_code == 0 + await sandbox.files.remove(link) + assert await sandbox.files.exists(link) is False + assert await sandbox.files.read_text(target) == "keep me" + + target_dir = f"{base_dir}/remove-recursive/target-dir" + target_file = f"{target_dir}/child.txt" + link_dir = f"{base_dir}/remove-recursive/link-dir" + await sandbox.files.make_dir(target_dir) + await sandbox.files.write_text(target_file, "keep tree") + result = await sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/remove-recursive" && ln -sfn "{target_dir}" "{link_dir}"' + ) + ) + assert result.exit_code == 0 + await sandbox.files.remove(link_dir, recursive=True) + assert await sandbox.files.exists(link_dir) is False + assert await sandbox.files.read_text(target_file) == "keep tree" + + link = f"{base_dir}/escape/file-link" + result = await sandbox.exec( + _bash_exec(f'mkdir -p "{base_dir}/escape" && ln -sfn /etc/hosts "{link}"') + ) + assert result.exit_code == 0 + text = await sandbox.files.read_text(link) + assert "localhost" in text + assert "localhost" in (await sandbox.files.download(link)).decode("utf-8") + + fixture = await _create_parent_symlink_escape_fixture( + sandbox, base_dir, "parent-escape-read" + ) + assert ( + await sandbox.files.read_text(fixture["escaped_file"]) == "outside secret" + ) + assert (await sandbox.files.download(fixture["escaped_file"])).decode( + "utf-8" + ) == "outside secret" + assert [ + entry.path + for entry in await sandbox.files.list(fixture["link_dir"], depth=1) + ] == [f"{fixture['outside_dir']}/secret.txt"] + seen = asyncio.get_running_loop().create_future() + + async def on_parent_event(event): + if event.type == "write" and event.name == "fresh.txt" and not seen.done(): + seen.set_result(event.name) + + handle = await sandbox.files.watch_dir(fixture["link_dir"], on_parent_event) + try: + await sandbox.files.write_text( + f"{fixture['outside_dir']}/fresh.txt", "watch parent link" + ) + assert await _await_future(seen) == "fresh.txt" + finally: + await handle.stop() + + fixture = await _create_parent_symlink_escape_fixture( + sandbox, base_dir, "parent-escape-mutate" + ) + info = await sandbox.files.get_info(fixture["escaped_file"]) + assert info.type == "file" + assert info.size == len("outside secret") + copied = await sandbox.files.copy( + source=fixture["escaped_file"], + destination=f"{base_dir}/parent-escape-mutate/copied.txt", + ) + assert copied.path == f"{base_dir}/parent-escape-mutate/copied.txt" + assert await sandbox.files.read_text(copied.path) == "outside secret" + renamed = await sandbox.files.rename( + fixture["escaped_file"], + f"{base_dir}/parent-escape-mutate/renamed.txt", + ) + assert renamed.path == f"{base_dir}/parent-escape-mutate/renamed.txt" + assert await sandbox.files.exists(fixture["outside_file"]) is False + assert await sandbox.files.read_text(renamed.path) == "outside secret" + await sandbox.files.write_text(fixture["escaped_file"], "remove me") + await sandbox.files.remove(fixture["escaped_file"]) + outside_read = await sandbox.exec( + _bash_exec( + f'if [ -e "{fixture["outside_file"]}" ]; then cat "{fixture["outside_file"]}"; else printf "__MISSING__"; fi' + ) + ) + assert outside_read.exit_code == 0 + assert outside_read.stdout.strip() == "__MISSING__" + + target_dir = f"/var/tmp/{make_test_name('watch-outside-target')}" + target_file = f"{target_dir}/child.txt" + link = f"{base_dir}/escape/dir-link" + result = await sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/escape" "{target_dir}" && printf "child" > "{target_file}" && ln -sfn "{target_dir}" "{link}"' + ) + ) + assert result.exit_code == 0 + assert [entry.path for entry in await sandbox.files.list(link, depth=1)] == [ + target_file + ] + seen = asyncio.get_running_loop().create_future() + + async def on_link_event(event): + if event.type == "write" and event.name == "file.txt" and not seen.done(): + seen.set_result(event.name) + + handle = await sandbox.files.watch_dir(link, on_link_event) + try: + await sandbox.files.write_text( + f"{target_dir}/file.txt", "watch through link" + ) + assert await _await_future(seen) == "file.txt" + finally: + await handle.stop() + + watch_dir = f"{base_dir}/watch" + await sandbox.files.make_dir(f"{watch_dir}/nested", parents=True) + direct_future = asyncio.get_running_loop().create_future() + recursive_future = asyncio.get_running_loop().create_future() + + async def on_direct(event): + if ( + event.type == "write" + and event.name == "direct.txt" + and not direct_future.done() + ): + direct_future.set_result(event.name) + + async def on_recursive(event): + if ( + event.type == "write" + and event.name == "nested/recursive.txt" + and not recursive_future.done() + ): + recursive_future.set_result(event.name) + + direct_handle = await sandbox.files.watch_dir(watch_dir, on_direct) + recursive_handle = await sandbox.files.watch_dir( + watch_dir, + on_recursive, + recursive=True, + ) + try: + await sandbox.files.write_text(f"{watch_dir}/direct.txt", "watch me") + await sandbox.files.write_text( + f"{watch_dir}/nested/recursive.txt", "watch me too" + ) + assert await _await_future(direct_future) == "direct.txt" + assert await _await_future(recursive_future) == "nested/recursive.txt" + finally: + await direct_handle.stop() + await recursive_handle.stop() + + await expect_hyperbrowser_error_async( + "watch missing directory", + lambda: sandbox.files.watch_dir( + f"{base_dir}/watch-missing", lambda event: None + ), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], + ) + + invalid_file_path = f"{base_dir}/watch-invalid/file.txt" + await sandbox.files.write_text(invalid_file_path, "not a directory") + await expect_hyperbrowser_error_async( + "watch file path", + lambda: sandbox.files.watch_dir(invalid_file_path, lambda event: None), + status_code=400, + service="runtime", + retryable=False, + message_includes="not a directory", + ) + + path = f"{base_dir}/presign/file.txt" + upload = await sandbox.files.upload_url(path, one_time=True) + assert upload.path == path + assert upload.method == "PUT" + upload_response = await asyncio.to_thread( + fetch_signed_url, + upload.url, + method=upload.method, + body="presigned upload body", + ) + assert upload_response.status_code == 200 + assert await sandbox.files.read_text(path) == "presigned upload body" + + download = await sandbox.files.download_url(path, one_time=True) + assert download.path == path + assert download.method == "GET" + download_response = await asyncio.to_thread( + fetch_signed_url, + download.url, + method=download.method, + ) + assert download_response.status_code == 200 + assert download_response.text == "presigned upload body" + + path = f"{base_dir}/presign-race/upload.txt" + upload = await sandbox.files.upload_url(path, one_time=True) + first, second = await asyncio.gather( + asyncio.to_thread( + fetch_signed_url, + upload.url, + method=upload.method, + body="first body", + ), + asyncio.to_thread( + fetch_signed_url, + upload.url, + method=upload.method, + body="second body", + ), + ) + assert sorted([first.status_code, second.status_code]) == [200, 401] + assert await sandbox.files.read_text(path) in {"first body", "second body"} + + path = f"{base_dir}/presign-race/download.txt" + await sandbox.files.write_text(path, "download once") + download = await sandbox.files.download_url(path, one_time=True) + first, second = await asyncio.gather( + asyncio.to_thread(fetch_signed_url, download.url, method=download.method), + asyncio.to_thread(fetch_signed_url, download.url, method=download.method), + ) + assert sorted([first.status_code, second.status_code]) == [200, 401] + assert "download once" in {first.text, second.text} + + source = f"{base_dir}/rename-race/source.txt" + left = f"{base_dir}/rename-race/left.txt" + right = f"{base_dir}/rename-race/right.txt" + await sandbox.files.write_text(source, "race") + results = await asyncio.gather( + sandbox.files.rename(source, left), + sandbox.files.rename(source, right), + return_exceptions=True, + ) + fulfilled = [result for result in results if not isinstance(result, Exception)] + rejected = [result for result in results if isinstance(result, Exception)] + assert len(fulfilled) == 1 + assert len(rejected) == 1 + await expect_hyperbrowser_error_async( + "rename race failure", + lambda: _async_raise(rejected[0]), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], + ) + winner_path = left if await sandbox.files.exists(left) else right + assert await sandbox.files.read_text(winner_path) == "race" + + await expect_hyperbrowser_error_async( + "missing file read", + lambda: sandbox.files.read_text(f"{base_dir}/still-missing.txt"), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], + ) + + try: + await sandbox.files.list(base_dir, depth=0) + except ValueError as error: + assert "depth should be at least one" in str(error) + else: + raise AssertionError("expected invalid depth to fail locally") + finally: + await stop_sandbox_if_running_async(sandbox) + await client.close() + + +async def _async_raise(error): + raise error diff --git a/tests/sandbox/e2e/test_async_lifecycle.py b/tests/sandbox/e2e/test_async_lifecycle.py new file mode 100644 index 00000000..074cccbb --- /dev/null +++ b/tests/sandbox/e2e/test_async_lifecycle.py @@ -0,0 +1,279 @@ +import asyncio +from datetime import datetime, timedelta, timezone +from uuid import uuid4 + +import pytest + +from hyperbrowser.exceptions import HyperbrowserError +from hyperbrowser.models import CreateSandboxParams, SandboxRuntimeSession + +from tests.helpers.config import DEFAULT_IMAGE_NAME, create_async_client +from tests.helpers.errors import expect_hyperbrowser_error_async +from tests.helpers.http import get_image_by_name_async +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + +CUSTOM_IMAGE_NAME = "node" +SNAPSHOT_CREATE_RETRY_DELAY_SECONDS = 0.5 +SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS = 60 + + +async def _create_sandbox_with_snapshot_retry(client, params: CreateSandboxParams): + deadline = asyncio.get_running_loop().time() + SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS + last_error = None + + while asyncio.get_running_loop().time() < deadline: + try: + return await client.sandboxes.create(params) + except HyperbrowserError as error: + is_snapshot_catalog_race = ( + error.status_code == 404 + and isinstance(str(error), str) + and "snapshot not found" in str(error).lower() + ) + if not is_snapshot_catalog_race: + raise + last_error = error + await asyncio.sleep(SNAPSHOT_CREATE_RETRY_DELAY_SECONDS) + + if isinstance(last_error, Exception): + raise last_error + raise RuntimeError("snapshot create retry failed") + + +@pytest.mark.anyio +async def test_async_sandbox_lifecycle_e2e(): + client = create_async_client() + sandbox = None + stale_handle = None + secondary = None + image_sandbox = None + custom_image_sandbox = None + custom_snapshot_sandbox = None + memory_snapshot = None + custom_image_memory_snapshot = None + custom_image = None + + try: + sandbox = await client.sandboxes.create( + default_sandbox_params("py-async-lifecycle") + ) + stale_handle = await client.sandboxes.get(sandbox.id) + custom_image = await get_image_by_name_async(CUSTOM_IMAGE_NAME) + await wait_for_runtime_ready_async(sandbox) + + detail = sandbox.to_dict() + assert detail["token"] + assert sandbox.runtime.base_url + assert sandbox.token_expires_at is not None + + stale_detail = stale_handle.to_dict() + assert stale_detail["token"] + assert stale_handle.runtime.base_url == sandbox.runtime.base_url + + info = await sandbox.info() + assert info.id == sandbox.id + await sandbox.refresh() + assert sandbox.status == "active" + + await sandbox.connect() + assert sandbox.status == "active" + + memory_snapshot = await sandbox.create_memory_snapshot() + assert memory_snapshot.snapshot_name + assert memory_snapshot.snapshot_id + assert memory_snapshot.namespace + assert memory_snapshot.status + assert memory_snapshot.image_name + assert memory_snapshot.image_id + assert memory_snapshot.image_namespace + + # Snapshot creation can briefly disrupt the next fast exec on the same handle. + await wait_for_runtime_ready_async(sandbox) + + valid_detail = await sandbox.info() + invalid_jwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.invalid-signature" + refresh_count = 0 + original_get_detail = sandbox._service.get_detail + + sandbox._runtime_session = SandboxRuntimeSession( + sandbox_id=sandbox.id, + status=valid_detail.status, + region=valid_detail.region, + token=invalid_jwt, + token_expires_at=datetime.now(timezone.utc) + timedelta(hours=1), + runtime=valid_detail.runtime, + ) + sandbox._detail = valid_detail.model_copy( + update={ + "token": invalid_jwt, + "token_expires_at": sandbox._runtime_session.token_expires_at, + } + ) + + async def patched_get_detail(sandbox_id: str): + nonlocal refresh_count + refresh_count += 1 + return await original_get_detail(sandbox_id) + + sandbox._service.get_detail = patched_get_detail + try: + result = await sandbox.exec("echo runtime-refresh-ok") + assert result.exit_code == 0 + assert "runtime-refresh-ok" in result.stdout + assert refresh_count > 0 + assert sandbox.to_dict()["token"] + assert sandbox.to_dict()["token"] != invalid_jwt + finally: + sandbox._service.get_detail = original_get_detail + + image_sandbox = await client.sandboxes.create( + CreateSandboxParams(image_name=DEFAULT_IMAGE_NAME) + ) + assert image_sandbox.id + assert image_sandbox.status == "active" + response = await image_sandbox.stop() + assert response.success is True + assert image_sandbox.status == "closed" + + custom_image_sandbox = await client.sandboxes.create( + CreateSandboxParams( + image_name=custom_image["imageName"], + image_id=custom_image["id"], + ) + ) + assert custom_image_sandbox.id + assert custom_image_sandbox.status == "active" + await wait_for_runtime_ready_async(custom_image_sandbox) + + custom_image_memory_snapshot = ( + await custom_image_sandbox.create_memory_snapshot() + ) + assert custom_image_memory_snapshot.image_name == custom_image["imageName"] + assert custom_image_memory_snapshot.image_id == custom_image["id"] + assert custom_image_memory_snapshot.image_namespace == custom_image["namespace"] + + custom_snapshot_sandbox = await _create_sandbox_with_snapshot_retry( + client, + CreateSandboxParams( + snapshot_name=custom_image_memory_snapshot.snapshot_name, + snapshot_id=custom_image_memory_snapshot.snapshot_id, + ), + ) + assert custom_snapshot_sandbox.id + assert custom_snapshot_sandbox.status == "active" + response = await custom_snapshot_sandbox.stop() + assert response.success is True + assert custom_snapshot_sandbox.status == "closed" + + await expect_hyperbrowser_error_async( + "mismatched image selector", + lambda: client.sandboxes.create( + CreateSandboxParams( + image_name=custom_image["imageName"], + image_id=str(uuid4()), + ) + ), + status_code=404, + service="control", + retryable=False, + message_includes_any=["image not found", "not found"], + ) + + await expect_hyperbrowser_error_async( + "mismatched snapshot selector", + lambda: client.sandboxes.create( + CreateSandboxParams( + snapshot_name=memory_snapshot.snapshot_name, + snapshot_id=str(uuid4()), + ) + ), + status_code=404, + service="control", + retryable=False, + message_includes_any=["snapshot not found", "not found"], + ) + + response = await sandbox.stop() + assert response.success is True + assert sandbox.status == "closed" + + await expect_hyperbrowser_error_async( + "stopped sandbox memory snapshot", + lambda: sandbox.create_memory_snapshot(), + status_code=409, + service="control", + retryable=False, + message_includes="Sandbox is not running", + ) + + await expect_hyperbrowser_error_async( + "stopped sandbox connect", + lambda: sandbox.connect(), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + await expect_hyperbrowser_error_async( + "stopped sandbox exec", + lambda: sandbox.exec("echo should-not-run"), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + await expect_hyperbrowser_error_async( + "stale sandbox connect", + lambda: stale_handle.connect(), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + await expect_hyperbrowser_error_async( + "stopped sandbox reconnect", + lambda: client.sandboxes.connect(sandbox.id), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + await expect_hyperbrowser_error_async( + "missing sandbox get", + lambda: client.sandboxes.get(str(uuid4())), + status_code=404, + service="control", + retryable=False, + message_includes="not found", + ) + + secondary = await _create_sandbox_with_snapshot_retry( + client, + CreateSandboxParams( + snapshot_name=memory_snapshot.snapshot_name, + snapshot_id=memory_snapshot.snapshot_id, + ), + ) + response = await secondary.stop() + assert response.success is True + assert secondary.status == "closed" + finally: + await stop_sandbox_if_running_async(sandbox) + await stop_sandbox_if_running_async(stale_handle) + await stop_sandbox_if_running_async(secondary) + await stop_sandbox_if_running_async(image_sandbox) + await stop_sandbox_if_running_async(custom_image_sandbox) + await stop_sandbox_if_running_async(custom_snapshot_sandbox) + await client.close() diff --git a/tests/sandbox/e2e/test_async_process.py b/tests/sandbox/e2e/test_async_process.py new file mode 100644 index 00000000..6c5c4eaa --- /dev/null +++ b/tests/sandbox/e2e/test_async_process.py @@ -0,0 +1,149 @@ +import pytest + +from hyperbrowser.models import SandboxExecParams + +from tests.helpers.config import create_async_client +from tests.helpers.errors import expect_hyperbrowser_error_async +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + + +async def _collect_process_stream(events): + output = [] + async for event in events: + output.append(event) + if event.type == "exit": + break + return output + + +@pytest.mark.anyio +async def test_async_sandbox_process_e2e(): + client = create_async_client() + sandbox = None + + try: + sandbox = await client.sandboxes.create( + default_sandbox_params("py-async-process") + ) + await wait_for_runtime_ready_async(sandbox) + + result = await sandbox.exec("echo process-exec-ok") + assert result.exit_code == 0 + assert "process-exec-ok" in result.stdout + + result = await sandbox.exec( + SandboxExecParams( + command="bash", + args=["-lc", "echo process-exec-fail 1>&2; exit 7"], + ) + ) + assert result.exit_code == 7 + assert "process-exec-fail" in result.stderr + + stdin_process = await sandbox.processes.start( + SandboxExecParams( + command="bash", + args=["-lc", "read line; echo stdout:$line; echo stderr:$line 1>&2"], + ) + ) + fetched = await sandbox.get_process(stdin_process.id) + assert fetched.id == stdin_process.id + + listing = await sandbox.processes.list(limit=20) + assert any(entry.id == stdin_process.id for entry in listing.data) + + await stdin_process.write_stdin("sdk-stdin\n", eof=True) + result = await stdin_process.wait() + assert result.exit_code == 0 + assert "stdout:sdk-stdin" in result.stdout + assert "stderr:sdk-stdin" in result.stderr + + running_process = await sandbox.processes.start( + SandboxExecParams(command="bash", args=["-lc", "sleep 30"]) + ) + refreshed = await running_process.refresh() + assert refreshed.status in {"queued", "running"} + result = await running_process.kill() + assert result.status not in {"queued", "running"} + + streamed = await sandbox.processes.start( + SandboxExecParams( + command="bash", + args=["-lc", "echo stream-out; echo stream-err 1>&2"], + ) + ) + events = await _collect_process_stream(streamed.stream()) + assert any( + event.type == "stdout" and "stream-out" in event.data for event in events + ) + assert any( + event.type == "stderr" and "stream-err" in event.data for event in events + ) + assert any(event.type == "exit" for event in events) + + result_process = await sandbox.processes.start( + SandboxExecParams(command="bash", args=["-lc", "echo result-alias-ok"]) + ) + result = await result_process.result() + assert result.exit_code == 0 + assert "result-alias-ok" in result.stdout + + noisy_process = await sandbox.processes.start( + SandboxExecParams( + command="bash", + args=[ + "-lc", + 'yes "process-replay-window-overflow-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" | head -n 120000', + ], + ) + ) + result = await noisy_process.result() + assert len(result.stdout) > 3 * 1024 * 1024 + + await expect_hyperbrowser_error_async( + "process replay window expired", + lambda: _collect_process_stream(noisy_process.stream(1)), + status_code=410, + code="replay_window_expired", + service="runtime", + retryable=False, + message_includes="Replay window expired", + ) + + timeout_process = await sandbox.processes.start( + SandboxExecParams(command="bash", args=["-lc", "sleep 10"]) + ) + await expect_hyperbrowser_error_async( + "process wait timeout", + lambda: timeout_process.wait(timeout_ms=100), + status_code=408, + service="runtime", + retryable=False, + message_includes="timed out", + ) + await timeout_process.signal("TERM") + result = await timeout_process.wait(timeout_ms=3000) + assert result.status in {"exited", "failed", "killed", "timed_out"} + + kill_process = await sandbox.processes.start( + SandboxExecParams(command="bash", args=["-lc", "sleep 30"]) + ) + result = await kill_process.kill() + assert result.status not in {"queued", "running"} + assert kill_process.status not in {"queued", "running"} + + await expect_hyperbrowser_error_async( + "missing process get", + lambda: sandbox.get_process("proc_missing"), + status_code=404, + service="runtime", + retryable=False, + message_includes="not found", + ) + finally: + await stop_sandbox_if_running_async(sandbox) + await client.close() diff --git a/tests/sandbox/e2e/test_async_sudo.py b/tests/sandbox/e2e/test_async_sudo.py new file mode 100644 index 00000000..96b845f4 --- /dev/null +++ b/tests/sandbox/e2e/test_async_sudo.py @@ -0,0 +1,62 @@ +import pytest + +from hyperbrowser.models import SandboxExecParams + +from tests.helpers.config import create_async_client +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + + +def _bash_exec(command: str) -> SandboxExecParams: + return SandboxExecParams(command="bash", args=["-lc", command]) + + +@pytest.mark.anyio +async def test_async_sandbox_sudo_e2e(): + client = create_async_client() + sandbox = None + + try: + sandbox = await client.sandboxes.create(default_sandbox_params("py-async-sudo")) + await wait_for_runtime_ready_async(sandbox) + + path = "/tmp/sdk-sudo-check.txt" + + runtime_user = await sandbox.exec(_bash_exec("whoami && id -u && id -g")) + assert runtime_user.exit_code == 0 + assert "ubuntu" in runtime_user.stdout + assert "1000" in runtime_user.stdout + + direct_chown = await sandbox.exec( + _bash_exec( + " && ".join( + [ + f'printf "sudo-check" > "{path}"', + f'chown root:root "{path}"', + ] + ) + ) + ) + assert direct_chown.exit_code != 0 + assert "operation not permitted" in direct_chown.stderr.lower() + + sudo_result = await sandbox.exec( + _bash_exec( + " && ".join( + [ + "sudo -n whoami", + f'sudo -n chown root:root "{path}"', + f"stat -c '%U:%G' \"{path}\"", + ] + ) + ) + ) + assert sudo_result.exit_code == 0 + assert "root" in sudo_result.stdout + assert "root:root" in sudo_result.stdout + finally: + await stop_sandbox_if_running_async(sandbox) + await client.close() diff --git a/tests/sandbox/e2e/test_async_terminal_smoke.py b/tests/sandbox/e2e/test_async_terminal_smoke.py new file mode 100644 index 00000000..3518317f --- /dev/null +++ b/tests/sandbox/e2e/test_async_terminal_smoke.py @@ -0,0 +1,240 @@ +import asyncio + +import pytest + +from hyperbrowser.models import SandboxTerminalCreateParams + +from tests.helpers.config import create_async_client +from tests.helpers.errors import expect_hyperbrowser_error_async +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running_async, + wait_for_runtime_ready_async, +) + + +async def _collect_terminal_session(connection): + output = "" + exit_code = None + + async for event in connection.events(): + if event.type == "output": + output += event.data + continue + exit_code = event.status.exit_code + break + + return output, exit_code + + +def _terminal_status_output(status) -> str: + return "".join(chunk.data for chunk in ((status.output if status else None) or [])) + + +def _terminal_status_raw_output(status) -> str: + return b"".join( + chunk.raw for chunk in ((status.output if status else None) or []) + ).decode("utf-8") + + +async def _wait_for_terminal_status_output( + read_status, + marker: str, + timeout_seconds: float = 5.0, +): + deadline = asyncio.get_running_loop().time() + timeout_seconds + last_status = None + + while asyncio.get_running_loop().time() < deadline: + last_status = await read_status() + if marker in _terminal_status_output(last_status): + return last_status + await asyncio.sleep(0.1) + + raise AssertionError( + f"timed out waiting for terminal output {marker!r}; " + f"last output={_terminal_status_output(last_status)!r}" + ) + + +@pytest.mark.anyio +async def test_async_sandbox_terminal_e2e(): + client = create_async_client() + sandbox = None + + try: + sandbox = await client.sandboxes.create( + default_sandbox_params("py-async-terminal") + ) + await wait_for_runtime_ready_async(sandbox) + + assert sandbox.pty is sandbox.terminal + + terminal = await sandbox.terminal.create( + SandboxTerminalCreateParams( + command="bash", + args=["-l"], + rows=24, + cols=80, + ) + ) + fetched = await sandbox.terminal.get(terminal.id) + assert fetched.id == terminal.id + + connection = await terminal.attach() + try: + await terminal.resize(30, 100) + await connection.write("pwd\n") + await connection.write("echo terminal-smoke-ok\n") + await connection.write("exit\n") + + output, exit_code = await _collect_terminal_session(connection) + assert "terminal-smoke-ok" in output + assert exit_code == 0 + finally: + await connection.close() + + status = await terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + terminal = await sandbox.terminal.create( + SandboxTerminalCreateParams( + command="bash", + args=["-l"], + rows=24, + cols=80, + ) + ) + connection = await terminal.attach() + try: + await connection.resize(32, 110) + refreshed = await terminal.refresh() + assert refreshed.current.rows == 32 + assert refreshed.current.cols == 110 + + await connection.write("exit\n") + _, exit_code = await _collect_terminal_session(connection) + assert exit_code == 0 + finally: + await connection.close() + + status = await terminal.wait(timeout_ms=2000) + assert status.running is False + + marker = "terminal-get-output" + terminal = await sandbox.terminal.create( + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}' && sleep 1"], + rows=24, + cols=80, + ) + ) + without_output = await sandbox.terminal.get(terminal.id) + assert without_output.current.output is None + fetched = await _wait_for_terminal_status_output( + lambda: _get_terminal_status(sandbox, terminal.id, include_output=True), + marker, + ) + assert marker in _terminal_status_output(fetched) + assert marker in _terminal_status_raw_output(fetched) + assert fetched.output + status = await terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + marker = "terminal-refresh-output" + terminal = await sandbox.terminal.create( + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}' && sleep 1"], + rows=24, + cols=80, + ) + ) + without_output = await terminal.refresh() + assert without_output.current.output is None + refreshed = await _wait_for_terminal_status_output( + lambda: _refresh_terminal_status(terminal, include_output=True), + marker, + ) + assert marker in _terminal_status_output(refreshed) + assert marker in _terminal_status_raw_output(refreshed) + assert refreshed.output + status = await terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + marker = "terminal-wait-output" + terminal = await sandbox.terminal.create( + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}'"], + rows=24, + cols=80, + ) + ) + status = await terminal.wait(timeout_ms=2000, include_output=True) + assert status.running is False + assert status.exit_code == 0 + assert marker in _terminal_status_output(status) + assert marker in _terminal_status_raw_output(status) + assert status.output + + timeout_terminal = await sandbox.pty.create( + SandboxTerminalCreateParams( + command="bash", + args=["-lc", "sleep 10"], + rows=24, + cols=80, + ) + ) + await expect_hyperbrowser_error_async( + "terminal wait timeout", + lambda: timeout_terminal.wait(timeout_ms=100), + status_code=408, + service="runtime", + retryable=False, + message_includes="timed out", + ) + + await timeout_terminal.signal("TERM") + status = await timeout_terminal.wait(timeout_ms=3000) + assert status.running is False + + kill_terminal = await sandbox.pty.create( + SandboxTerminalCreateParams( + command="bash", + args=["-lc", "sleep 30"], + rows=24, + cols=80, + ) + ) + status = await kill_terminal.kill() + assert status.running is False + assert kill_terminal.current.running is False + + await expect_hyperbrowser_error_async( + "missing terminal get", + lambda: sandbox.terminal.get("pty_missing"), + status_code=404, + service="runtime", + retryable=False, + message_includes="not found", + ) + finally: + await stop_sandbox_if_running_async(sandbox) + await client.close() + + +async def _get_terminal_status( + sandbox, terminal_id: str, *, include_output: bool = False +): + return ( + await sandbox.terminal.get(terminal_id, include_output=include_output) + ).current + + +async def _refresh_terminal_status(terminal, *, include_output: bool = False): + return (await terminal.refresh(include_output=include_output)).current diff --git a/tests/sandbox/e2e/test_expose.py b/tests/sandbox/e2e/test_expose.py new file mode 100644 index 00000000..34fbcff5 --- /dev/null +++ b/tests/sandbox/e2e/test_expose.py @@ -0,0 +1,132 @@ +import time + +from hyperbrowser.models import SandboxExecParams, SandboxExposeParams + +from tests.helpers.config import create_client +from tests.helpers.errors import expect_hyperbrowser_error +from tests.helpers.http import fetch_runtime_url +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() +HTTP_PORT = 3210 + + +def _wait_for_http_response(url: str, *, headers=None, predicate, attempts: int = 15): + last_status = 0 + last_body = "" + + for attempt in range(1, attempts + 1): + try: + response = fetch_runtime_url(url, headers=headers) + body = response.text + last_status = response.status_code + last_body = body + if predicate(response.status_code, body): + return response.status_code, body + except Exception as error: # pragma: no cover - network edge in e2e + last_body = str(error) + + if attempt < attempts: + time.sleep(0.2 * attempt) + + raise AssertionError( + f"did not receive expected response for {url}; " + f"last status={last_status}, last body={last_body!r}" + ) + + +def test_sandbox_expose_e2e(): + sandbox = None + server_process = None + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-expose")) + wait_for_runtime_ready(sandbox) + + server_process = sandbox.processes.start( + SandboxExecParams( + command="node", + args=[ + "-e", + " ".join( + [ + "const http = require('http');", + f"const port = {HTTP_PORT};", + "const server = http.createServer((req, res) => {", + " res.writeHead(200, {'content-type': 'text/plain'});", + " res.end(`sdk-exposed:${req.method}:${req.url}`);", + "});", + "server.listen(port, '0.0.0.0', () => {", + " console.log(`listening:${port}`);", + "});", + "process.on('SIGTERM', () => server.close(() => process.exit(0)));", + "process.on('SIGINT', () => server.close(() => process.exit(0)));", + ] + ), + ], + ) + ) + + token = sandbox.to_dict()["token"] + assert token + _wait_for_http_response( + sandbox.get_exposed_url(HTTP_PORT), + headers={"Authorization": f"Bearer {token}"}, + predicate=lambda status, _: status == 403, + ) + + expect_hyperbrowser_error( + "reserved receiver port expose", + lambda: sandbox.expose(SandboxExposeParams(port=4001)), + status_code=400, + service="control", + retryable=False, + message_includes="cannot be exposed", + ) + + exposure = sandbox.expose(SandboxExposeParams(port=HTTP_PORT, auth=False)) + assert exposure.port == HTTP_PORT + assert exposure.auth is False + assert exposure.url == sandbox.get_exposed_url(HTTP_PORT) + + status, body = _wait_for_http_response( + exposure.url, + predicate=lambda response_status, response_body: ( + response_status == 200 and "sdk-exposed:GET:/" in response_body + ), + ) + assert status == 200 + assert "sdk-exposed:GET:/" in body + + exposure = sandbox.expose(SandboxExposeParams(port=HTTP_PORT, auth=True)) + assert exposure.auth is True + + status, _ = _wait_for_http_response( + exposure.url, + predicate=lambda response_status, _: response_status == 401, + ) + assert status == 401 + + sandbox.refresh() + token = sandbox.to_dict()["token"] + assert token + status, body = _wait_for_http_response( + exposure.url, + headers={"Authorization": f"Bearer {token}"}, + predicate=lambda response_status, response_body: ( + response_status == 200 and "sdk-exposed:GET:/" in response_body + ), + ) + assert status == 200 + assert "sdk-exposed:GET:/" in body + finally: + if server_process is not None: + try: + server_process.kill() + except Exception: + pass + stop_sandbox_if_running(sandbox) diff --git a/tests/sandbox/e2e/test_files.py b/tests/sandbox/e2e/test_files.py new file mode 100644 index 00000000..14962635 --- /dev/null +++ b/tests/sandbox/e2e/test_files.py @@ -0,0 +1,598 @@ +from concurrent.futures import ThreadPoolExecutor +from queue import Empty, Queue + +from hyperbrowser.models import SandboxExecParams, SandboxFileWriteEntry + +from tests.helpers.config import create_client, make_test_name +from tests.helpers.errors import expect_hyperbrowser_error +from tests.helpers.http import fetch_signed_url +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() + + +def _bash_exec(command: str) -> SandboxExecParams: + return SandboxExecParams(command="bash", args=["-lc", command]) + + +def _read_stream_text(stream) -> str: + return stream.read().decode("utf-8") + + +def _await_queue_value(queue: Queue, timeout: float = 10.0): + try: + return queue.get(timeout=timeout) + except Empty as error: + raise AssertionError("timed out waiting for watch event") from error + + +def _create_parent_symlink_escape_fixture(sandbox, base_dir: str, name: str): + allowed_dir = f"{base_dir}/{name}" + outside_dir = f"/var/tmp/{make_test_name(name)}" + outside_file = f"{outside_dir}/secret.txt" + link_dir = f"{allowed_dir}/evil" + escaped_file = f"{link_dir}/secret.txt" + setup = sandbox.exec( + _bash_exec( + " && ".join( + [ + f'mkdir -p "{allowed_dir}"', + f'mkdir -p "{outside_dir}"', + f'printf "outside secret" > "{outside_file}"', + f'ln -sfn "{outside_dir}" "{link_dir}"', + ] + ) + ) + ) + assert setup.exit_code == 0 + return { + "allowed_dir": allowed_dir, + "outside_dir": outside_dir, + "outside_file": outside_file, + "link_dir": link_dir, + "escaped_file": escaped_file, + } + + +def test_sandbox_files_e2e(): + sandbox = None + base_dir = f"/tmp/{make_test_name('py-sdk-files')}" + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-files")) + wait_for_runtime_ready(sandbox) + + assert sandbox.files.exists(f"{base_dir}/missing.txt") is False + + path = f"{base_dir}/dirs/root" + assert sandbox.files.make_dir(path) is True + assert sandbox.files.make_dir(path) is False + + info_path = f"{base_dir}/info/hello.txt" + sandbox.files.write_text(info_path, "hello from sdk files") + info = sandbox.files.get_info(info_path) + assert info.name == "hello.txt" + assert info.path == info_path + assert info.type == "file" + assert info.size == len("hello from sdk files") + assert info.mode == 0o644 + assert info.permissions == "-rw-r--r--" + assert info.owner + assert info.group + assert info.modified_time is not None + + list_dir = f"{base_dir}/list" + sandbox.files.make_dir(f"{list_dir}/nested/inner", parents=True) + sandbox.files.write_text(f"{list_dir}/root.txt", "root") + sandbox.files.write_text(f"{list_dir}/nested/child.txt", "child") + sandbox.files.write_text( + f"{list_dir}/nested/inner/grandchild.txt", "grandchild" + ) + + depth_one = sandbox.files.list(list_dir, depth=1) + assert [entry.name for entry in depth_one] == ["nested", "root.txt"] + assert [entry.type for entry in depth_one] == ["dir", "file"] + + depth_two = sandbox.files.list(list_dir, depth=2) + assert [entry.path for entry in depth_two] == [ + f"{list_dir}/nested", + f"{list_dir}/nested/child.txt", + f"{list_dir}/nested/inner", + f"{list_dir}/root.txt", + ] + + symlink_dir = f"{base_dir}/list-symlink" + target = f"{symlink_dir}/target.txt" + link = f"{symlink_dir}/link.txt" + sandbox.files.make_dir(symlink_dir) + sandbox.files.write_text(target, "payload") + result = sandbox.exec(_bash_exec(f'ln -sfn "{target}" "{link}"')) + assert result.exit_code == 0 + link_entry = next( + entry + for entry in sandbox.files.list(symlink_dir, depth=1) + if entry.path == link + ) + assert link_entry.symlink_target == target + + symlink_target = f"{base_dir}/symlink/target.txt" + symlink_link = f"{base_dir}/symlink/link.txt" + sandbox.files.write_text(symlink_target, "target") + result = sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/symlink" && ln -sfn "{symlink_target}" "{symlink_link}"' + ) + ) + assert result.exit_code == 0 + assert sandbox.files.get_info(symlink_link).symlink_target == symlink_target + + broken_target = f"{base_dir}/symlink-broken/missing-target.txt" + broken_link = f"{base_dir}/symlink-broken/link.txt" + result = sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/symlink-broken" && ln -sfn "{broken_target}" "{broken_link}"' + ) + ) + assert result.exit_code == 0 + assert sandbox.files.exists(broken_link) is True + assert sandbox.files.get_info(broken_link).symlink_target == broken_target + + read_path = f"{base_dir}/read/readme.txt" + sandbox.files.write_text(read_path, "hello from sdk files") + assert sandbox.files.read(read_path) == "hello from sdk files" + assert ( + sandbox.files.read(read_path, format="text", offset=6, length=4) == "from" + ) + assert sandbox.files.read(read_path, format="bytes") == b"hello from sdk files" + assert sandbox.files.read(read_path, format="blob") == b"hello from sdk files" + assert ( + _read_stream_text(sandbox.files.read(read_path, format="stream")) + == "hello from sdk files" + ) + + single = sandbox.files.write(f"{base_dir}/write/single.txt", "single file") + assert single.name == "single.txt" + assert single.path == f"{base_dir}/write/single.txt" + assert sandbox.files.read_text(single.path) == "single file" + + batch = sandbox.files.write( + [ + SandboxFileWriteEntry( + path=f"{base_dir}/write/batch-a.txt", + data="batch-a", + ), + SandboxFileWriteEntry( + path=f"{base_dir}/write/batch-b.bin", + data=bytes([1, 2, 3, 4]), + ), + ] + ) + assert [entry.name for entry in batch] == ["batch-a.txt", "batch-b.bin"] + assert sandbox.files.read_text(f"{base_dir}/write/batch-a.txt") == "batch-a" + assert sandbox.files.read_bytes(f"{base_dir}/write/batch-b.bin") == bytes( + [1, 2, 3, 4] + ) + + text_path = f"{base_dir}/write-options/text.txt" + sandbox.files.write_text(text_path, "hello", mode="0640") + sandbox.files.write_text(text_path, " world", append=True) + assert sandbox.files.read_text(text_path) == "hello world" + assert sandbox.files.get_info(text_path).mode == 0o640 + + bytes_path = f"{base_dir}/write-options/bytes.bin" + sandbox.files.write_bytes(bytes_path, bytes([1, 2]), mode="0600") + sandbox.files.write_bytes(bytes_path, bytes([3]), append=True) + assert sandbox.files.read_bytes(bytes_path) == bytes([1, 2, 3]) + + transfer_path = f"{base_dir}/transfer/upload.txt" + uploaded = sandbox.files.upload(transfer_path, "uploaded from sdk") + assert uploaded.bytes_written > 0 + assert ( + sandbox.files.download(transfer_path).decode("utf-8") == "uploaded from sdk" + ) + + file_path = f"{base_dir}/rename/hello.txt" + renamed_path = f"{base_dir}/rename/hello-renamed.txt" + sandbox.files.write_text(file_path, "rename me") + renamed = sandbox.files.rename(file_path, renamed_path) + assert renamed.path == renamed_path + assert sandbox.files.exists(file_path) is False + assert sandbox.files.read_text(renamed_path) == "rename me" + + link_path = f"{base_dir}/rename/hello-link.txt" + copied_link_path = f"{base_dir}/rename/hello-link-copy.txt" + renamed_link_path = f"{base_dir}/rename/hello-link-renamed.txt" + result = sandbox.exec(_bash_exec(f'ln -sfn "{renamed_path}" "{link_path}"')) + assert result.exit_code == 0 + copied_link = sandbox.files.copy(source=link_path, destination=copied_link_path) + assert copied_link.path == copied_link_path + assert sandbox.files.get_info(copied_link_path).symlink_target == renamed_path + renamed_link = sandbox.files.rename(copied_link_path, renamed_link_path) + assert renamed_link.path == renamed_link_path + assert sandbox.files.get_info(renamed_link_path).symlink_target == renamed_path + + target_dir = f"{base_dir}/rename-dir/target-dir" + link_dir = f"{base_dir}/rename-dir/link-dir" + renamed_link_dir = f"{base_dir}/rename-dir/link-dir-renamed" + sandbox.files.make_dir(target_dir) + sandbox.files.write_text(f"{target_dir}/child.txt", "child") + result = sandbox.exec(_bash_exec(f'ln -sfn "{target_dir}" "{link_dir}"')) + assert result.exit_code == 0 + renamed = sandbox.files.rename(link_dir, renamed_link_dir) + assert renamed.path == renamed_link_dir + assert sandbox.files.get_info(renamed_link_dir).symlink_target == target_dir + assert [ + entry.path for entry in sandbox.files.list(renamed_link_dir, depth=1) + ] == [f"{target_dir}/child.txt"] + + source_dir = f"{base_dir}/copy-tree/source" + nested_dir = f"{source_dir}/nested" + nested_target = f"{nested_dir}/target.txt" + destination_dir = f"{base_dir}/copy-tree/destination" + sandbox.files.make_dir(nested_dir) + sandbox.files.write_text(nested_target, "payload") + result = sandbox.exec( + _bash_exec(f'cd "{nested_dir}" && ln -sfn "target.txt" "link.txt"') + ) + assert result.exit_code == 0 + sandbox.files.copy( + source=source_dir, destination=destination_dir, recursive=True + ) + copied_target = f"{destination_dir}/nested/target.txt" + copied_link = f"{destination_dir}/nested/link.txt" + assert sandbox.files.read_text(copied_target) == "payload" + assert sandbox.files.get_info(copied_link).symlink_target == copied_target + + loop_dir = f"{base_dir}/loop-list" + loop_nested_dir = f"{loop_dir}/nested" + sandbox.files.make_dir(loop_nested_dir) + sandbox.files.write_text(f"{loop_nested_dir}/child.txt", "payload") + result = sandbox.exec(_bash_exec(f'cd "{loop_nested_dir}" && ln -sfn .. loop')) + assert result.exit_code == 0 + loop_entries = sandbox.files.list(loop_dir, depth=4) + loop_paths = [entry.path for entry in loop_entries] + assert f"{loop_nested_dir}/loop" in loop_paths + assert not any("/loop/" in path for path in loop_paths) + assert ( + sandbox.files.get_info(f"{loop_nested_dir}/loop").symlink_target == loop_dir + ) + + source_dir = f"{base_dir}/loop-copy/source" + nested_dir = f"{source_dir}/nested" + sandbox.files.make_dir(nested_dir) + sandbox.files.write_text(f"{nested_dir}/child.txt", "payload") + result = sandbox.exec(_bash_exec(f'cd "{nested_dir}" && ln -sfn .. loop')) + assert result.exit_code == 0 + destination_dir = f"{base_dir}/loop-copy/destination" + sandbox.files.copy( + source=source_dir, destination=destination_dir, recursive=True + ) + copied_loop = f"{destination_dir}/nested/loop" + assert sandbox.files.get_info(copied_loop).symlink_target == destination_dir + assert not any( + "/loop/" in entry.path + for entry in sandbox.files.list(destination_dir, depth=4) + ) + + source = f"{base_dir}/copy-overwrite/source.txt" + existing_target = f"{base_dir}/copy-overwrite/existing-target.txt" + destination_link = f"{base_dir}/copy-overwrite/destination-link.txt" + sandbox.files.write_text(source, "source payload") + sandbox.files.write_text(existing_target, "existing target") + result = sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/copy-overwrite" && ln -sfn "{existing_target}" "{destination_link}"' + ) + ) + assert result.exit_code == 0 + sandbox.files.copy(source=source, destination=destination_link, overwrite=True) + assert sandbox.files.read_text(destination_link) == "source payload" + assert sandbox.files.read_text(existing_target) == "existing target" + assert sandbox.files.get_info(destination_link).symlink_target is None + + chmod_path = f"{base_dir}/chmod/file.txt" + sandbox.files.write_text(chmod_path, "chmod me") + sandbox.files.chmod(path=chmod_path, mode="0640") + assert sandbox.files.get_info(chmod_path).mode == 0o640 + try: + expect_hyperbrowser_error( + "file chown", + lambda: sandbox.files.chown(path=chmod_path, uid=0, gid=0), + status_code=400, + service="runtime", + retryable=False, + message_includes_any=["operation", "permission"], + ) + except AssertionError as error: + if "expected HyperbrowserError, but call succeeded" not in str(error): + raise + assert sandbox.files.get_info(chmod_path).name == "file.txt" + + remove_path = f"{base_dir}/remove/file.txt" + sandbox.files.write_text(remove_path, "remove me") + sandbox.files.remove(remove_path) + assert sandbox.files.exists(remove_path) is False + sandbox.files.remove(remove_path) + sandbox.files.remove(f"{base_dir}/remove", recursive=True) + assert sandbox.files.exists(f"{base_dir}/remove") is False + + target = f"{base_dir}/remove-link/target.txt" + link = f"{base_dir}/remove-link/link.txt" + sandbox.files.write_text(target, "keep me") + result = sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/remove-link" && ln -sfn "{target}" "{link}"' + ) + ) + assert result.exit_code == 0 + sandbox.files.remove(link) + assert sandbox.files.exists(link) is False + assert sandbox.files.read_text(target) == "keep me" + + target_dir = f"{base_dir}/remove-recursive/target-dir" + target_file = f"{target_dir}/child.txt" + link_dir = f"{base_dir}/remove-recursive/link-dir" + sandbox.files.make_dir(target_dir) + sandbox.files.write_text(target_file, "keep tree") + result = sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/remove-recursive" && ln -sfn "{target_dir}" "{link_dir}"' + ) + ) + assert result.exit_code == 0 + sandbox.files.remove(link_dir, recursive=True) + assert sandbox.files.exists(link_dir) is False + assert sandbox.files.read_text(target_file) == "keep tree" + + link = f"{base_dir}/escape/file-link" + result = sandbox.exec( + _bash_exec(f'mkdir -p "{base_dir}/escape" && ln -sfn /etc/hosts "{link}"') + ) + assert result.exit_code == 0 + text = sandbox.files.read_text(link) + assert "localhost" in text + assert "localhost" in sandbox.files.download(link).decode("utf-8") + + fixture = _create_parent_symlink_escape_fixture( + sandbox, base_dir, "parent-escape-read" + ) + assert sandbox.files.read_text(fixture["escaped_file"]) == "outside secret" + assert ( + sandbox.files.download(fixture["escaped_file"]).decode("utf-8") + == "outside secret" + ) + assert [ + entry.path for entry in sandbox.files.list(fixture["link_dir"], depth=1) + ] == [f"{fixture['outside_dir']}/secret.txt"] + seen = Queue(maxsize=1) + handle = sandbox.files.watch_dir( + fixture["link_dir"], + lambda event: ( + seen.put_nowait(event.name) + if event.type == "write" and event.name == "fresh.txt" + else None + ), + ) + try: + sandbox.files.write_text( + f"{fixture['outside_dir']}/fresh.txt", "watch parent link" + ) + assert _await_queue_value(seen) == "fresh.txt" + finally: + handle.stop() + + fixture = _create_parent_symlink_escape_fixture( + sandbox, base_dir, "parent-escape-mutate" + ) + info = sandbox.files.get_info(fixture["escaped_file"]) + assert info.type == "file" + assert info.size == len("outside secret") + copied = sandbox.files.copy( + source=fixture["escaped_file"], + destination=f"{base_dir}/parent-escape-mutate/copied.txt", + ) + assert copied.path == f"{base_dir}/parent-escape-mutate/copied.txt" + assert sandbox.files.read_text(copied.path) == "outside secret" + renamed = sandbox.files.rename( + fixture["escaped_file"], + f"{base_dir}/parent-escape-mutate/renamed.txt", + ) + assert renamed.path == f"{base_dir}/parent-escape-mutate/renamed.txt" + assert sandbox.files.exists(fixture["outside_file"]) is False + assert sandbox.files.read_text(renamed.path) == "outside secret" + sandbox.files.write_text(fixture["escaped_file"], "remove me") + sandbox.files.remove(fixture["escaped_file"]) + outside_read = sandbox.exec( + _bash_exec( + f'if [ -e "{fixture["outside_file"]}" ]; then cat "{fixture["outside_file"]}"; else printf "__MISSING__"; fi' + ) + ) + assert outside_read.exit_code == 0 + assert outside_read.stdout.strip() == "__MISSING__" + + target_dir = f"/var/tmp/{make_test_name('watch-outside-target')}" + target_file = f"{target_dir}/child.txt" + link = f"{base_dir}/escape/dir-link" + result = sandbox.exec( + _bash_exec( + f'mkdir -p "{base_dir}/escape" "{target_dir}" && printf "child" > "{target_file}" && ln -sfn "{target_dir}" "{link}"' + ) + ) + assert result.exit_code == 0 + assert [entry.path for entry in sandbox.files.list(link, depth=1)] == [ + target_file + ] + seen = Queue(maxsize=1) + handle = sandbox.files.watch_dir( + link, + lambda event: ( + seen.put_nowait(event.name) + if event.type == "write" and event.name == "file.txt" + else None + ), + ) + try: + sandbox.files.write_text(f"{target_dir}/file.txt", "watch through link") + assert _await_queue_value(seen) == "file.txt" + finally: + handle.stop() + + watch_dir = f"{base_dir}/watch" + sandbox.files.make_dir(f"{watch_dir}/nested", parents=True) + direct_event = Queue(maxsize=1) + recursive_event = Queue(maxsize=1) + direct_handle = sandbox.files.watch_dir( + watch_dir, + lambda event: ( + direct_event.put_nowait(event.name) + if event.type == "write" and event.name == "direct.txt" + else None + ), + ) + recursive_handle = sandbox.files.watch_dir( + watch_dir, + lambda event: ( + recursive_event.put_nowait(event.name) + if event.type == "write" and event.name == "nested/recursive.txt" + else None + ), + recursive=True, + ) + try: + sandbox.files.write_text(f"{watch_dir}/direct.txt", "watch me") + sandbox.files.write_text( + f"{watch_dir}/nested/recursive.txt", "watch me too" + ) + assert _await_queue_value(direct_event) == "direct.txt" + assert _await_queue_value(recursive_event) == "nested/recursive.txt" + finally: + direct_handle.stop() + recursive_handle.stop() + + expect_hyperbrowser_error( + "watch missing directory", + lambda: sandbox.files.watch_dir( + f"{base_dir}/watch-missing", lambda event: None + ), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], + ) + + invalid_file_path = f"{base_dir}/watch-invalid/file.txt" + sandbox.files.write_text(invalid_file_path, "not a directory") + expect_hyperbrowser_error( + "watch file path", + lambda: sandbox.files.watch_dir(invalid_file_path, lambda event: None), + status_code=400, + service="runtime", + retryable=False, + message_includes="not a directory", + ) + + path = f"{base_dir}/presign/file.txt" + upload = sandbox.files.upload_url(path, one_time=True) + assert upload.path == path + assert upload.method == "PUT" + upload_response = fetch_signed_url( + upload.url, + method=upload.method, + body="presigned upload body", + ) + assert upload_response.status_code == 200 + assert sandbox.files.read_text(path) == "presigned upload body" + + download = sandbox.files.download_url(path, one_time=True) + assert download.path == path + assert download.method == "GET" + download_response = fetch_signed_url(download.url, method=download.method) + assert download_response.status_code == 200 + assert download_response.text == "presigned upload body" + + path = f"{base_dir}/presign-race/upload.txt" + upload = sandbox.files.upload_url(path, one_time=True) + with ThreadPoolExecutor(max_workers=2) as executor: + first_future = executor.submit( + fetch_signed_url, + upload.url, + method=upload.method, + body="first body", + ) + second_future = executor.submit( + fetch_signed_url, + upload.url, + method=upload.method, + body="second body", + ) + first = first_future.result() + second = second_future.result() + assert sorted([first.status_code, second.status_code]) == [200, 401] + assert sandbox.files.read_text(path) in {"first body", "second body"} + + path = f"{base_dir}/presign-race/download.txt" + sandbox.files.write_text(path, "download once") + download = sandbox.files.download_url(path, one_time=True) + with ThreadPoolExecutor(max_workers=2) as executor: + first_future = executor.submit( + fetch_signed_url, download.url, method=download.method + ) + second_future = executor.submit( + fetch_signed_url, download.url, method=download.method + ) + first = first_future.result() + second = second_future.result() + assert sorted([first.status_code, second.status_code]) == [200, 401] + assert "download once" in {first.text, second.text} + + source = f"{base_dir}/rename-race/source.txt" + left = f"{base_dir}/rename-race/left.txt" + right = f"{base_dir}/rename-race/right.txt" + sandbox.files.write_text(source, "race") + with ThreadPoolExecutor(max_workers=2) as executor: + futures = [ + executor.submit(sandbox.files.rename, source, left), + executor.submit(sandbox.files.rename, source, right), + ] + results = [] + for future in futures: + try: + results.append(("fulfilled", future.result())) + except Exception as error: # pragma: no cover - exercised in e2e + results.append(("rejected", error)) + fulfilled = [result for result in results if result[0] == "fulfilled"] + rejected = [result for result in results if result[0] == "rejected"] + assert len(fulfilled) == 1 + assert len(rejected) == 1 + expect_hyperbrowser_error( + "rename race failure", + lambda: (_ for _ in ()).throw(rejected[0][1]), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], + ) + winner_path = left if sandbox.files.exists(left) else right + assert sandbox.files.read_text(winner_path) == "race" + + expect_hyperbrowser_error( + "missing file read", + lambda: sandbox.files.read_text(f"{base_dir}/still-missing.txt"), + status_code=404, + service="runtime", + retryable=False, + message_includes_any=["not found", "no such file"], + ) + + try: + sandbox.files.list(base_dir, depth=0) + except ValueError as error: + assert "depth should be at least one" in str(error) + else: + raise AssertionError("expected invalid depth to fail locally") + finally: + stop_sandbox_if_running(sandbox) diff --git a/tests/sandbox/e2e/test_lifecycle.py b/tests/sandbox/e2e/test_lifecycle.py new file mode 100644 index 00000000..b7e5ffbf --- /dev/null +++ b/tests/sandbox/e2e/test_lifecycle.py @@ -0,0 +1,270 @@ +import time +from datetime import datetime, timedelta, timezone +from uuid import uuid4 + +from hyperbrowser.exceptions import HyperbrowserError +from hyperbrowser.models import CreateSandboxParams, SandboxRuntimeSession + +from tests.helpers.config import DEFAULT_IMAGE_NAME, create_client +from tests.helpers.errors import expect_hyperbrowser_error +from tests.helpers.http import get_image_by_name +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() + +CUSTOM_IMAGE_NAME = "node" +SNAPSHOT_CREATE_RETRY_DELAY_SECONDS = 0.5 +SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS = 60 + + +def _create_sandbox_with_snapshot_retry(params: CreateSandboxParams): + deadline = time.monotonic() + SNAPSHOT_CREATE_RETRY_TIMEOUT_SECONDS + last_error = None + + while time.monotonic() < deadline: + try: + return client.sandboxes.create(params) + except HyperbrowserError as error: + is_snapshot_catalog_race = ( + error.status_code == 404 + and isinstance(str(error), str) + and "snapshot not found" in str(error).lower() + ) + if not is_snapshot_catalog_race: + raise + last_error = error + time.sleep(SNAPSHOT_CREATE_RETRY_DELAY_SECONDS) + + if isinstance(last_error, Exception): + raise last_error + raise RuntimeError("snapshot create retry failed") + + +def test_sandbox_lifecycle_e2e(): + sandbox = None + stale_handle = None + secondary = None + image_sandbox = None + custom_image_sandbox = None + custom_snapshot_sandbox = None + memory_snapshot = None + custom_image_memory_snapshot = None + custom_image = None + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-lifecycle")) + stale_handle = client.sandboxes.get(sandbox.id) + custom_image = get_image_by_name(CUSTOM_IMAGE_NAME) + wait_for_runtime_ready(sandbox) + + detail = sandbox.to_dict() + assert detail["token"] + assert sandbox.runtime.base_url + assert sandbox.token_expires_at is not None + + stale_detail = stale_handle.to_dict() + assert stale_detail["token"] + assert stale_handle.runtime.base_url == sandbox.runtime.base_url + + info = sandbox.info() + assert info.id == sandbox.id + sandbox.refresh() + assert sandbox.status == "active" + + sandbox.connect() + assert sandbox.status == "active" + + memory_snapshot = sandbox.create_memory_snapshot() + assert memory_snapshot.snapshot_name + assert memory_snapshot.snapshot_id + assert memory_snapshot.namespace + assert memory_snapshot.status + assert memory_snapshot.image_name + assert memory_snapshot.image_id + assert memory_snapshot.image_namespace + + # Snapshot creation can briefly disrupt the next fast exec on the same handle. + wait_for_runtime_ready(sandbox) + + valid_detail = sandbox.info() + invalid_jwt = "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.e30.invalid-signature" + refresh_count = 0 + original_get_detail = sandbox._service.get_detail + + sandbox._runtime_session = SandboxRuntimeSession( + sandbox_id=sandbox.id, + status=valid_detail.status, + region=valid_detail.region, + token=invalid_jwt, + token_expires_at=datetime.now(timezone.utc) + timedelta(hours=1), + runtime=valid_detail.runtime, + ) + sandbox._detail = valid_detail.model_copy( + update={ + "token": invalid_jwt, + "token_expires_at": sandbox._runtime_session.token_expires_at, + } + ) + + def patched_get_detail(sandbox_id: str): + nonlocal refresh_count + refresh_count += 1 + return original_get_detail(sandbox_id) + + sandbox._service.get_detail = patched_get_detail + try: + result = sandbox.exec("echo runtime-refresh-ok") + assert result.exit_code == 0 + assert "runtime-refresh-ok" in result.stdout + assert refresh_count > 0 + assert sandbox.to_dict()["token"] + assert sandbox.to_dict()["token"] != invalid_jwt + finally: + sandbox._service.get_detail = original_get_detail + + image_sandbox = client.sandboxes.create( + CreateSandboxParams(image_name=DEFAULT_IMAGE_NAME) + ) + assert image_sandbox.id + assert image_sandbox.status == "active" + response = image_sandbox.stop() + assert response.success is True + assert image_sandbox.status == "closed" + + custom_image_sandbox = client.sandboxes.create( + CreateSandboxParams( + image_name=custom_image["imageName"], + image_id=custom_image["id"], + ) + ) + assert custom_image_sandbox.id + assert custom_image_sandbox.status == "active" + wait_for_runtime_ready(custom_image_sandbox) + + custom_image_memory_snapshot = custom_image_sandbox.create_memory_snapshot() + assert custom_image_memory_snapshot.image_name == custom_image["imageName"] + assert custom_image_memory_snapshot.image_id == custom_image["id"] + assert custom_image_memory_snapshot.image_namespace == custom_image["namespace"] + + custom_snapshot_sandbox = _create_sandbox_with_snapshot_retry( + CreateSandboxParams( + snapshot_name=custom_image_memory_snapshot.snapshot_name, + snapshot_id=custom_image_memory_snapshot.snapshot_id, + ) + ) + assert custom_snapshot_sandbox.id + assert custom_snapshot_sandbox.status == "active" + response = custom_snapshot_sandbox.stop() + assert response.success is True + assert custom_snapshot_sandbox.status == "closed" + + expect_hyperbrowser_error( + "mismatched image selector", + lambda: client.sandboxes.create( + CreateSandboxParams( + image_name=custom_image["imageName"], + image_id=str(uuid4()), + ) + ), + status_code=404, + service="control", + retryable=False, + message_includes_any=["image not found", "not found"], + ) + + expect_hyperbrowser_error( + "mismatched snapshot selector", + lambda: client.sandboxes.create( + CreateSandboxParams( + snapshot_name=memory_snapshot.snapshot_name, + snapshot_id=str(uuid4()), + ) + ), + status_code=404, + service="control", + retryable=False, + message_includes_any=["snapshot not found", "not found"], + ) + + response = sandbox.stop() + assert response.success is True + assert sandbox.status == "closed" + + expect_hyperbrowser_error( + "stopped sandbox memory snapshot", + lambda: sandbox.create_memory_snapshot(), + status_code=409, + service="control", + retryable=False, + message_includes="Sandbox is not running", + ) + + expect_hyperbrowser_error( + "stopped sandbox connect", + lambda: sandbox.connect(), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + expect_hyperbrowser_error( + "stopped sandbox exec", + lambda: sandbox.exec("echo should-not-run"), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + expect_hyperbrowser_error( + "stale sandbox connect", + lambda: stale_handle.connect(), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + expect_hyperbrowser_error( + "stopped sandbox reconnect", + lambda: client.sandboxes.connect(sandbox.id), + status_code=409, + code="sandbox_not_running", + service="runtime", + retryable=False, + message_includes="not running", + ) + + expect_hyperbrowser_error( + "missing sandbox get", + lambda: client.sandboxes.get(str(uuid4())), + status_code=404, + service="control", + retryable=False, + message_includes="not found", + ) + + secondary = _create_sandbox_with_snapshot_retry( + CreateSandboxParams( + snapshot_name=memory_snapshot.snapshot_name, + snapshot_id=memory_snapshot.snapshot_id, + ) + ) + response = secondary.stop() + assert response.success is True + assert secondary.status == "closed" + finally: + stop_sandbox_if_running(sandbox) + stop_sandbox_if_running(stale_handle) + stop_sandbox_if_running(secondary) + stop_sandbox_if_running(image_sandbox) + stop_sandbox_if_running(custom_image_sandbox) + stop_sandbox_if_running(custom_snapshot_sandbox) diff --git a/tests/sandbox/e2e/test_process.py b/tests/sandbox/e2e/test_process.py new file mode 100644 index 00000000..31c6668a --- /dev/null +++ b/tests/sandbox/e2e/test_process.py @@ -0,0 +1,144 @@ +from hyperbrowser.models import SandboxExecParams + +from tests.helpers.config import create_client +from tests.helpers.errors import expect_hyperbrowser_error +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() + + +def _collect_process_stream(events): + output = [] + for event in events: + output.append(event) + if event.type == "exit": + break + return output + + +def test_sandbox_process_e2e(): + sandbox = None + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-process")) + wait_for_runtime_ready(sandbox) + + result = sandbox.exec("echo process-exec-ok") + assert result.exit_code == 0 + assert "process-exec-ok" in result.stdout + + result = sandbox.exec( + SandboxExecParams( + command="bash", + args=["-lc", "echo process-exec-fail 1>&2; exit 7"], + ) + ) + assert result.exit_code == 7 + assert "process-exec-fail" in result.stderr + + stdin_process = sandbox.processes.start( + SandboxExecParams( + command="bash", + args=["-lc", "read line; echo stdout:$line; echo stderr:$line 1>&2"], + ) + ) + fetched = sandbox.get_process(stdin_process.id) + assert fetched.id == stdin_process.id + + listing = sandbox.processes.list(limit=20) + assert any(entry.id == stdin_process.id for entry in listing.data) + + stdin_process.write_stdin("sdk-stdin\n", eof=True) + result = stdin_process.wait() + assert result.exit_code == 0 + assert "stdout:sdk-stdin" in result.stdout + assert "stderr:sdk-stdin" in result.stderr + + running_process = sandbox.processes.start( + SandboxExecParams(command="bash", args=["-lc", "sleep 30"]) + ) + refreshed = running_process.refresh() + assert refreshed.status in {"queued", "running"} + result = running_process.kill() + assert result.status not in {"queued", "running"} + + streamed = sandbox.processes.start( + SandboxExecParams( + command="bash", + args=["-lc", "echo stream-out; echo stream-err 1>&2"], + ) + ) + events = _collect_process_stream(streamed.stream()) + assert any( + event.type == "stdout" and "stream-out" in event.data for event in events + ) + assert any( + event.type == "stderr" and "stream-err" in event.data for event in events + ) + assert any(event.type == "exit" for event in events) + + result_process = sandbox.processes.start( + SandboxExecParams(command="bash", args=["-lc", "echo result-alias-ok"]) + ) + result = result_process.result() + assert result.exit_code == 0 + assert "result-alias-ok" in result.stdout + + noisy_process = sandbox.processes.start( + SandboxExecParams( + command="bash", + args=[ + "-lc", + 'yes "process-replay-window-overflow-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" | head -n 120000', + ], + ) + ) + result = noisy_process.result() + assert len(result.stdout) > 3 * 1024 * 1024 + + expect_hyperbrowser_error( + "process replay window expired", + lambda: _collect_process_stream(noisy_process.stream(1)), + status_code=410, + code="replay_window_expired", + service="runtime", + retryable=False, + message_includes="Replay window expired", + ) + + timeout_process = sandbox.processes.start( + SandboxExecParams(command="bash", args=["-lc", "sleep 10"]) + ) + expect_hyperbrowser_error( + "process wait timeout", + lambda: timeout_process.wait(timeout_ms=100), + status_code=408, + service="runtime", + retryable=False, + message_includes="timed out", + ) + timeout_process.signal("TERM") + result = timeout_process.wait(timeout_ms=3000) + assert result.status in {"exited", "failed", "killed", "timed_out"} + + kill_process = sandbox.processes.start( + SandboxExecParams(command="bash", args=["-lc", "sleep 30"]) + ) + result = kill_process.kill() + assert result.status not in {"queued", "running"} + assert kill_process.status not in {"queued", "running"} + + expect_hyperbrowser_error( + "missing process get", + lambda: sandbox.get_process("proc_missing"), + status_code=404, + service="runtime", + retryable=False, + message_includes="not found", + ) + finally: + stop_sandbox_if_running(sandbox) diff --git a/tests/sandbox/e2e/test_runtime_transport.py b/tests/sandbox/e2e/test_runtime_transport.py new file mode 100644 index 00000000..5727a6d7 --- /dev/null +++ b/tests/sandbox/e2e/test_runtime_transport.py @@ -0,0 +1,44 @@ +from hyperbrowser.sandbox_common import ( + resolve_runtime_transport_target, + to_websocket_transport_target, +) + + +def test_runtime_transport_target_ignores_ambient_proxy_without_explicit_override( + monkeypatch, +): + monkeypatch.setenv("REGIONAL_PROXY_DEV_HOST", "http://127.0.0.1:8090") + + target = resolve_runtime_transport_target( + "https://session.example.dev:8443", + "/sandbox/exec?foo=bar", + ) + + assert target.url == "https://session.example.dev:8443/sandbox/exec?foo=bar" + assert target.host_header is None + + +def test_runtime_transport_target_applies_explicit_proxy_override(): + target = resolve_runtime_transport_target( + "https://session.example.dev:8443", + "/sandbox/exec?foo=bar", + "http://127.0.0.1:8090", + ) + + assert target.url == "http://127.0.0.1:8090/sandbox/exec?foo=bar" + assert target.host_header == "session.example.dev:8443" + + +def test_runtime_websocket_target_applies_explicit_proxy_override(): + target = to_websocket_transport_target( + "https://session.example.dev:8443", + "/sandbox/pty/pty_123/ws?sessionId=sandbox_123", + "http://127.0.0.1:8090", + ) + + assert ( + target.url + == "wss://session.example.dev:8443/sandbox/pty/pty_123/ws?sessionId=sandbox_123" + ) + assert target.connect_host == "127.0.0.1" + assert target.connect_port == 8090 diff --git a/tests/sandbox/e2e/test_sudo.py b/tests/sandbox/e2e/test_sudo.py new file mode 100644 index 00000000..0d48f3ba --- /dev/null +++ b/tests/sandbox/e2e/test_sudo.py @@ -0,0 +1,59 @@ +from hyperbrowser.models import SandboxExecParams + +from tests.helpers.config import create_client +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() + + +def _bash_exec(command: str) -> SandboxExecParams: + return SandboxExecParams(command="bash", args=["-lc", command]) + + +def test_sandbox_sudo_e2e(): + sandbox = None + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-sudo")) + wait_for_runtime_ready(sandbox) + + path = "/tmp/sdk-sudo-check.txt" + + runtime_user = sandbox.exec(_bash_exec("whoami && id -u && id -g")) + assert runtime_user.exit_code == 0 + assert "ubuntu" in runtime_user.stdout + assert "1000" in runtime_user.stdout + + direct_chown = sandbox.exec( + _bash_exec( + " && ".join( + [ + f'printf "sudo-check" > "{path}"', + f'chown root:root "{path}"', + ] + ) + ) + ) + assert direct_chown.exit_code != 0 + assert "operation not permitted" in direct_chown.stderr.lower() + + sudo_result = sandbox.exec( + _bash_exec( + " && ".join( + [ + "sudo -n whoami", + f'sudo -n chown root:root "{path}"', + f"stat -c '%U:%G' \"{path}\"", + ] + ) + ) + ) + assert sudo_result.exit_code == 0 + assert "root" in sudo_result.stdout + assert "root:root" in sudo_result.stdout + finally: + stop_sandbox_if_running(sandbox) diff --git a/tests/sandbox/e2e/test_terminal_smoke.py b/tests/sandbox/e2e/test_terminal_smoke.py new file mode 100644 index 00000000..02e8a57e --- /dev/null +++ b/tests/sandbox/e2e/test_terminal_smoke.py @@ -0,0 +1,221 @@ +import time + +from hyperbrowser.models import SandboxTerminalCreateParams + +from tests.helpers.config import create_client +from tests.helpers.errors import expect_hyperbrowser_error +from tests.helpers.sandbox import ( + default_sandbox_params, + stop_sandbox_if_running, + wait_for_runtime_ready, +) + +client = create_client() + + +def _collect_terminal_session(connection): + output = "" + exit_code = None + + for event in connection.events(): + if event.type == "output": + output += event.data + continue + exit_code = event.status.exit_code + break + + return output, exit_code + + +def _terminal_status_output(status) -> str: + return "".join(chunk.data for chunk in ((status.output if status else None) or [])) + + +def _terminal_status_raw_output(status) -> str: + return b"".join( + chunk.raw for chunk in ((status.output if status else None) or []) + ).decode("utf-8") + + +def _wait_for_terminal_status_output( + read_status, marker: str, timeout_seconds: float = 5.0 +): + deadline = time.monotonic() + timeout_seconds + last_status = None + + while time.monotonic() < deadline: + last_status = read_status() + if marker in _terminal_status_output(last_status): + return last_status + time.sleep(0.1) + + raise AssertionError( + f"timed out waiting for terminal output {marker!r}; " + f"last output={_terminal_status_output(last_status)!r}" + ) + + +def test_sandbox_terminal_e2e(): + sandbox = None + + try: + sandbox = client.sandboxes.create(default_sandbox_params("py-sdk-terminal")) + wait_for_runtime_ready(sandbox) + + assert sandbox.pty is sandbox.terminal + + terminal = sandbox.terminal.create( + SandboxTerminalCreateParams( + command="bash", + args=["-l"], + rows=24, + cols=80, + ) + ) + fetched = sandbox.terminal.get(terminal.id) + assert fetched.id == terminal.id + + connection = terminal.attach() + try: + terminal.resize(30, 100) + connection.write("pwd\n") + connection.write("echo terminal-smoke-ok\n") + connection.write("exit\n") + + output, exit_code = _collect_terminal_session(connection) + assert "terminal-smoke-ok" in output + assert exit_code == 0 + finally: + connection.close() + + status = terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + terminal = sandbox.terminal.create( + SandboxTerminalCreateParams( + command="bash", + args=["-l"], + rows=24, + cols=80, + ) + ) + connection = terminal.attach() + try: + connection.resize(32, 110) + refreshed = terminal.refresh() + assert refreshed.current.rows == 32 + assert refreshed.current.cols == 110 + + connection.write("exit\n") + _, exit_code = _collect_terminal_session(connection) + assert exit_code == 0 + finally: + connection.close() + + status = terminal.wait(timeout_ms=2000) + assert status.running is False + + marker = "terminal-get-output" + terminal = sandbox.terminal.create( + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}' && sleep 1"], + rows=24, + cols=80, + ) + ) + without_output = sandbox.terminal.get(terminal.id) + assert without_output.current.output is None + fetched = _wait_for_terminal_status_output( + lambda: sandbox.terminal.get(terminal.id, include_output=True).current, + marker, + ) + assert marker in _terminal_status_output(fetched) + assert marker in _terminal_status_raw_output(fetched) + assert fetched.output + status = terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + marker = "terminal-refresh-output" + terminal = sandbox.terminal.create( + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}' && sleep 1"], + rows=24, + cols=80, + ) + ) + without_output = terminal.refresh() + assert without_output.current.output is None + refreshed = _wait_for_terminal_status_output( + lambda: terminal.refresh(include_output=True).current, + marker, + ) + assert marker in _terminal_status_output(refreshed) + assert marker in _terminal_status_raw_output(refreshed) + assert refreshed.output + status = terminal.wait(timeout_ms=2000) + assert status.running is False + assert status.exit_code == 0 + + marker = "terminal-wait-output" + terminal = sandbox.terminal.create( + SandboxTerminalCreateParams( + command="bash", + args=["-lc", f"printf '{marker}'"], + rows=24, + cols=80, + ) + ) + status = terminal.wait(timeout_ms=2000, include_output=True) + assert status.running is False + assert status.exit_code == 0 + assert marker in _terminal_status_output(status) + assert marker in _terminal_status_raw_output(status) + assert status.output + + timeout_terminal = sandbox.pty.create( + SandboxTerminalCreateParams( + command="bash", + args=["-lc", "sleep 10"], + rows=24, + cols=80, + ) + ) + expect_hyperbrowser_error( + "terminal wait timeout", + lambda: timeout_terminal.wait(timeout_ms=100), + status_code=408, + service="runtime", + retryable=False, + message_includes="timed out", + ) + + timeout_terminal.signal("TERM") + status = timeout_terminal.wait(timeout_ms=3000) + assert status.running is False + + kill_terminal = sandbox.pty.create( + SandboxTerminalCreateParams( + command="bash", + args=["-lc", "sleep 30"], + rows=24, + cols=80, + ) + ) + status = kill_terminal.kill() + assert status.running is False + assert kill_terminal.current.running is False + + expect_hyperbrowser_error( + "missing terminal get", + lambda: sandbox.terminal.get("pty_missing"), + status_code=404, + service="runtime", + retryable=False, + message_includes="not found", + ) + finally: + stop_sandbox_if_running(sandbox) diff --git a/tests/test_create_sandbox_params.py b/tests/test_create_sandbox_params.py new file mode 100644 index 00000000..ffe25a5b --- /dev/null +++ b/tests/test_create_sandbox_params.py @@ -0,0 +1,100 @@ +import pytest +from pydantic import ValidationError + +from hyperbrowser.models import ( + CreateSandboxParams, + SandboxExecParams, + SandboxProcessListParams, + SandboxProcessWaitParams, +) + + +def test_create_sandbox_params_accepts_image_source(): + params = CreateSandboxParams(image_name="node") + + assert params.model_dump(by_alias=True, exclude_none=True) == {"imageName": "node"} + + +def test_create_sandbox_params_accepts_snapshot_source(): + params = CreateSandboxParams(snapshot_name="snap", snapshot_id="snap-id") + + assert params.model_dump(by_alias=True, exclude_none=True) == { + "snapshotName": "snap", + "snapshotId": "snap-id", + } + + +def test_create_sandbox_params_rejects_camel_case_input(): + with pytest.raises(ValidationError, match="Provide exactly one start source"): + CreateSandboxParams(**{"imageName": "node"}) + + +@pytest.mark.parametrize( + "payload", + [ + {"sandboxName": "legacy"}, + {"sandbox_name": "legacy"}, + ], +) +def test_create_sandbox_params_rejects_legacy_sandbox_name(payload): + with pytest.raises( + ValidationError, + match="Provide exactly one start source: snapshot_name or image_name", + ): + CreateSandboxParams(**payload) + + +def test_create_sandbox_params_rejects_multiple_sources(): + with pytest.raises( + ValidationError, + match="Provide exactly one start source: snapshot_name or image_name", + ): + CreateSandboxParams(image_name="node", snapshot_name="snap") + + +def test_create_sandbox_params_requires_snapshot_name_for_snapshot_id(): + with pytest.raises(ValidationError, match="snapshot_id requires snapshot_name"): + CreateSandboxParams(snapshot_id="snap-id") + + +def test_sandbox_exec_params_serialize_process_timeout_sec_as_snake_case(): + params = SandboxExecParams( + command="echo hi", + timeout_ms=500, + timeout_sec=7, + use_shell=True, + ) + + assert params.model_dump(by_alias=True, exclude_none=True) == { + "command": "echo hi", + "timeoutMs": 500, + "timeout_sec": 7, + "useShell": True, + } + + +def test_sandbox_process_wait_params_serialize_timeout_sec_as_snake_case(): + params = SandboxProcessWaitParams(timeout_ms=250, timeout_sec=3) + + assert params.model_dump(by_alias=True, exclude_none=True) == { + "timeoutMs": 250, + "timeout_sec": 3, + } + + +def test_sandbox_process_list_params_serialize_created_filters_as_snake_case(): + params = SandboxProcessListParams( + status=["running", "exited"], + limit=10, + cursor="cursor-1", + created_after=100, + created_before=200, + ) + + assert params.model_dump(by_alias=True, exclude_none=True) == { + "status": ["running", "exited"], + "limit": 10, + "cursor": "cursor-1", + "created_after": 100, + "created_before": 200, + }